Skip to content

Commit

Permalink
Merge pull request #80 from daisybio/development
Browse files Browse the repository at this point in the history
New version including DIPK
  • Loading branch information
JudithBernett authored Nov 21, 2024
2 parents 74cd9d5 + a13dea0 commit d07ef29
Show file tree
Hide file tree
Showing 88 changed files with 5,677 additions and 3,256 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/labeler.yml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,6 @@ jobs:
uses: actions/checkout@v4

- name: Run Labeler
uses: crazy-max/ghaction-github-labeler@v5.0.0
uses: crazy-max/ghaction-github-labeler@v5.1.0
with:
skip-delete: true
64 changes: 64 additions & 0 deletions .github/workflows/publish-docker.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,64 @@
name: Create and publish a Docker image

# Configures this workflow to run every time a release is published
on:
release:
types: [published]

# Defines two custom environment variables for the workflow.
# These are used for the Container registry domain, and a name for the Docker image that this workflow builds.
env:
REGISTRY: ghcr.io
IMAGE_NAME: ${{ github.repository }}

# There is a single job in this workflow. It's configured to run on the latest available version of Ubuntu.
jobs:
build-and-push-image:
runs-on: ubuntu-latest

# Sets the permissions granted to the `GITHUB_TOKEN` for the actions in this job.
permissions:
contents: read
packages: write
attestations: write
id-token: write

steps:
# Necessary for buildx
- name: Checkout repository
uses: actions/checkout@v4

- name: Setup QEMU
uses: docker/setup-qemu-action@v3

# Set up BuildKit Docker container builder to be able to build
# multi-platform images and export cache
# https://github.com/docker/setup-buildx-action
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v3

- name: Log in to the Container registry
uses: docker/login-action@v3
with:
registry: ${{ env.REGISTRY }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}

- name: Extract metadata (tags, labels) for Docker
id: meta
uses: docker/metadata-action@v5
with:
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}

# This step uses the `docker/build-push-action` action to build the image, based on your repository's `Dockerfile`. If the build succeeds, it pushes the image to GitHub Packages.
# It uses the `context` parameter to define the build's context as the set of files located in the specified path. For more information, see "[Usage](https://github.com/docker/build-push-action#usage)" in the README of the `docker/build-push-action` repository.
# It uses the `tags` and `labels` parameters to tag and label the image with the output from the "meta" step.
- name: Build and push Docker image
id: build-and-push
uses: docker/build-push-action@v6
with:
context: .
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
platforms: linux/amd64,linux/arm64
24 changes: 17 additions & 7 deletions .github/workflows/python-package.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,22 +18,32 @@ jobs:
python-version: ["3.9", "3.10", "3.11"]

steps:
- uses: actions/checkout@v4
- name: Check out the repository
uses: actions/checkout@v4

- name: Set up Python ${{ matrix.python-version }}
uses: actions/setup-python@v5
with:
python-version: ${{ matrix.python-version }}

- name: Install Poetry
run: |
pipx install poetry
poetry --version
- name: Install dependencies
run: |
python -m pip install --upgrade pip
python -m pip install flake8 pytest
if [ -f requirements.txt ]; then pip install -r requirements.txt; fi
poetry install --with development
- name: Build package
run: poetry build --ansi

- name: Lint with flake8
run: |
# stop the build if there are Python syntax errors or undefined names
flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
poetry run flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics
# exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide
flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics --ignore W503
poetry run flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics --ignore W503,D212
- name: Test with pytest
run: |
pytest
poetry run pytest
2 changes: 1 addition & 1 deletion .github/workflows/python-publish.yml
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ jobs:
- name: Build package
run: python -m build
- name: Publish package
uses: pypa/gh-action-pypi-publish@27b31702a0e7fc50959f5ad993c78deac1bdfc29
uses: pypa/gh-action-pypi-publish@15c56dba361d8335944d31a2ecd17d700fc7bcbc
with:
user: __token__
password: ${{ secrets.PYPI_API_TOKEN }}
7 changes: 3 additions & 4 deletions .github/workflows/run_tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,6 @@ name: Run drevalpy Tests
on:
push:
branches:
- development
- main
- "release/*"
pull_request:
branches:
Expand All @@ -23,7 +21,6 @@ jobs:
- { python-version: "3.10", os: ubuntu-latest, session: "mypy" }
- { python-version: "3.10", os: ubuntu-latest, session: "tests" }
- { python-version: "3.10", os: windows-latest, session: "tests" }
- { python-version: "3.10", os: macos-latest, session: "tests" }
- { python-version: "3.10", os: ubuntu-latest, session: "typeguard" }
- { python-version: "3.10", os: ubuntu-latest, session: "xdoctest" }
- { python-version: "3.10", os: ubuntu-latest, session: "docs-build" }
Expand Down Expand Up @@ -130,4 +127,6 @@ jobs:
run: nox --force-color --session=coverage -- xml -i

- name: Upload coverage report
uses: codecov/[email protected]
uses: codecov/[email protected]
with:
token: ${{ secrets.CODECOV_TOKEN }}
2 changes: 1 addition & 1 deletion .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,7 @@ repos:
types: [python]
require_serial: true
args:
- --ignore=W503
- --ignore=D212,W503,C901
- id: pyupgrade
name: pyupgrade
description: Automatically upgrade syntax for newer versions.
Expand Down
53 changes: 53 additions & 0 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -0,0 +1,53 @@
# I followed this article's recommendations
# https://medium.com/@albertazzir/blazing-fast-python-docker-builds-with-poetry-a78a66f5aed0

# The builder image, used to build the virtual environment
FROM python:3.10-buster as builder

RUN pip install poetry==1.8.4

# POETRY_VIRTUALENVS_CREATE=1: Makes sure that environment will be as isolated as possible and above all that
# installation will not mess up with the system Python or, even worse, with Poetry itself.
# POETRY_CACHE_DIR: When removing the cache folder, make sure this is done in the same RUN command. If it’s done in a
# separate RUN command, the cache will still be part of the previous Docker layer (the one containing poetry install )
# effectively rendering your optimization useless.

ENV POETRY_NO_INTERACTION=1 \
POETRY_VIRTUALENVS_IN_PROJECT=1 \
POETRY_VIRTUALENVS_CREATE=1 \
POETRY_CACHE_DIR=/tmp/poetry_cache

WORKDIR /root

COPY pyproject.toml poetry.lock ./

# First, we install only the dependencies. This way, we can cache this layer and avoid re-installing dependencies
# every time we change our application code.
# Because poetry will complain if a README.md is not found, we create a dummy one.
RUN touch README.md

RUN poetry install --without dev --no-root && rm -rf $POETRY_CACHE_DIR

# The runtime image, used to run the code
FROM python:3.10-slim-buster as runtime

LABEL image.author.name="Judith Bernett"
LABEL image.author.email="[email protected]"

ENV VIRTUAL_ENV=/root/.venv \
PATH="/root/.venv/bin:$PATH"

COPY --from=builder ${VIRTUAL_ENV} ${VIRTUAL_ENV}

# Copy all relevant code

COPY drevalpy ./drevalpy
COPY create_report.py ./
COPY README.md ./
COPY run_suite.py ./
COPY setup.py ./
COPY pyproject.toml ./
COPY poetry.lock ./

# Install drevalpy
RUN pip install .
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ pip install drevalpy
From Source:

```bash
conda env create -f models/simple_neural_network/env.yml
conda env create -f models/SimpleNeuralNetwork/env.yml
pip install .
```

Expand Down
4 changes: 2 additions & 2 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -15,10 +15,10 @@ DrEvalPy: Python Cancer Cell Line Drug Response Prediction Suite
.. |Read the Docs| image:: https://img.shields.io/readthedocs/drevalpy/latest.svg?label=Read%20the%20Docs
:target: https://drevalpy.readthedocs.io/
:alt: Read the documentation at https://drevalpy.readthedocs.io/
.. |Build| image:: https://github.com/daisybio/drevalpy/workflows/Build%20drevalpy%20Package/badge.svg
.. |Build| image:: https://github.com/daisybio/drevalpy/actions/workflows/build_package.yml/badge.svg
:target: https://github.com/daisybio/drevalpy/actions?workflow=Package
:alt: Build Package Status
.. |Tests| image:: https://github.com/daisybio/drevalpy/workflows/Run%20drevalpy%20Tests/badge.svg
.. |Tests| image:: https://github.com/daisybio/drevalpy/actions/workflows/run_tests.yml/badge.svg
:target: https://github.com/daisybio/drevalpy/actions?workflow=Tests
:alt: Run Tests Status
.. |Codecov| image:: https://codecov.io/gh/daisybio/drevalpy/branch/main/graph/badge.svg
Expand Down
Empty file removed configs/CCLE.yaml
Empty file.
37 changes: 0 additions & 37 deletions configs/GDSC.yaml

This file was deleted.

71 changes: 46 additions & 25 deletions create_report.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,8 @@
import argparse
import os

import pandas as pd

from drevalpy.visualization import (
CorrelationComparisonScatter,
CriticalDifferencePlot,
Expand All @@ -14,8 +16,10 @@
from drevalpy.visualization.utils import create_html, create_index_html, parse_results, prep_results, write_results


def create_output_directories(custom_id):
"""If they do not exist yet, make directories for the visualization files.
def create_output_directories(custom_id: str) -> None:
"""
If they do not exist yet, make directories for the visualization files.
:param custom_id: run id passed via command line
"""
os.makedirs(f"results/{custom_id}/violin_plots", exist_ok=True)
Expand All @@ -26,13 +30,22 @@ def create_output_directories(custom_id):
os.makedirs(f"results/{custom_id}/critical_difference_plots", exist_ok=True)


def draw_setting_plots(lpo_lco_ldo, ev_res, ev_res_per_drug, ev_res_per_cell_line, custom_id):
"""Draw all plots for a specific setting (LPO, LCO, LDO).
def draw_setting_plots(
lpo_lco_ldo: str,
ev_res: pd.DataFrame,
ev_res_per_drug: pd.DataFrame,
ev_res_per_cell_line: pd.DataFrame,
custom_id: str,
) -> list[str]:
"""
Draw all plots for a specific setting (LPO, LCO, LDO).
:param lpo_lco_ldo: setting
:param ev_res: overall evaluation results
:param ev_res_per_drug: evaluation results per drug
:param ev_res_per_cell_line: evaluation results per cell line
:param custom_id: run id passed via command line
:returns: list of unique algorithms
"""
ev_res_subset = ev_res[ev_res["LPO_LCO_LDO"] == lpo_lco_ldo]
# PIPELINE: SAVE_TABLES
Expand Down Expand Up @@ -99,8 +112,12 @@ def draw_setting_plots(lpo_lco_ldo, ev_res, ev_res_per_drug, ev_res_per_cell_lin
return eval_results_preds["algorithm"].unique()


def draw_per_grouping_setting_plots(grouping, ev_res_per_group, lpo_lco_ldo, custom_id):
"""Draw plots for a specific grouping (drug or cell line) for a specific setting (LPO, LCO, LDO).
def draw_per_grouping_setting_plots(
grouping: str, ev_res_per_group: pd.DataFrame, lpo_lco_ldo: str, custom_id: str
) -> None:
"""
Draw plots for a specific grouping (drug or cell line) for a specific setting (LPO, LCO, LDO).
:param grouping: drug or cell_line
:param ev_res_per_group: evaluation results per drug or per cell line
:param lpo_lco_ldo: setting
Expand Down Expand Up @@ -132,15 +149,17 @@ def draw_per_grouping_setting_plots(grouping, ev_res_per_group, lpo_lco_ldo, cus


def draw_algorithm_plots(
model,
ev_res,
ev_res_per_drug,
ev_res_per_cell_line,
t_vs_p,
lpo_lco_ldo,
custom_id,
):
"""Draw all plots for a specific algorithm.
model: str,
ev_res: pd.DataFrame,
ev_res_per_drug: pd.DataFrame,
ev_res_per_cell_line: pd.DataFrame,
t_vs_p: pd.DataFrame,
lpo_lco_ldo: str,
custom_id: str,
) -> None:
"""
Draw all plots for a specific algorithm.
:param model: name of the model/algorithm
:param ev_res: overall evaluation results
:param ev_res_per_drug: evaluation results per drug
Expand Down Expand Up @@ -194,15 +213,17 @@ def draw_algorithm_plots(


def draw_per_grouping_algorithm_plots(
grouping_slider,
grouping_scatter_table,
model,
ev_res_per_group,
t_v_p,
lpo_lco_ldo,
custom_id,
grouping_slider: str,
grouping_scatter_table: str,
model: str,
ev_res_per_group: pd.DataFrame,
t_v_p: pd.DataFrame,
lpo_lco_ldo: str,
custom_id: str,
):
"""Draw plots for a specific grouping (drug or cell line) for a specific algorithm.
"""
Draw plots for a specific grouping (drug or cell line) for a specific algorithm.
:param grouping_slider: the grouping variable for the regression plots
:param grouping_scatter_table: the grouping variable for the scatter plots.
If grouping_slider is drug, this should be cell_line and vice versa
Expand Down Expand Up @@ -320,8 +341,8 @@ def draw_per_grouping_algorithm_plots(
custom_id=run_id,
)
# get all html files from results/{run_id}
all_files = []
for _, _, files in os.walk(f"results/{run_id}"):
all_files: list[str] = []
for _, _, files in os.walk(f"results/{run_id}"): # type: ignore[assignment]
for file in files:
if file.endswith(".html") and file not in ["index.html", "LPO.html", "LCO.html", "LDO.html"]:
all_files.append(file)
Expand Down
Loading

0 comments on commit d07ef29

Please sign in to comment.