diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 213570c9e5..eee0a717d9 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -69,11 +69,8 @@ jobs: run: | pip install torch==2.2.1 pip install cellpose==3.0.5 - pip install csbdeep==0.7.4 - pip install stardist==0.8.1 - pip install tensorflow==2.12.1 - pip install markupsafe==2.0.1 - pip install h5py==3.6.0 + pip install csbdeep==0.8.1 stardist==0.9.1 tensorflow==2.12.1 keras==2.12.0 + pip install markupsafe==2.0.1 h5py==3.6.0 - name: Display installed packages run: pip list - if: startsWith(matrix.os, 'windows') @@ -81,7 +78,7 @@ jobs: run: | pyinstaller distribution/windows/cellprofiler.spec rm ./dist/CellProfiler/jvm.dll - iscc /dMyAppVersion="4.2.80001-ai" "distribution/windows/cellprofiler.iss" + iscc /dMyAppVersion="4.2.80002-ai" "distribution/windows/cellprofiler.iss" - if: startsWith(matrix.os, 'macos') name: MacOS pyinstaller build and package run: | @@ -94,13 +91,13 @@ jobs: uses: actions/upload-artifact@v4 name: MacOS tar upload with: - name: CellProfiler-macOS-4.2.80001-ai.tar.gz + name: CellProfiler-macOS-4.2.80002-ai.tar.gz path: ./dist/*.tar.gz - if: startsWith(matrix.os, 'windows') uses: actions/upload-artifact@v4 name: Windows artifact upload with: - name: CellProfiler-Windows-4.2.80001-ai.exe + name: CellProfiler-Windows-4.2.80002-ai.exe path: ./distribution/windows/Output/*.exe upload: name: upload diff --git a/cellprofiler/__init__.py b/cellprofiler/__init__.py index 9b37a08b65..1b6bc14d70 100644 --- a/cellprofiler/__init__.py +++ b/cellprofiler/__init__.py @@ -1,3 +1,3 @@ __test__ = False -__version__ = "4.2.80001" +__version__ = "4.2.80002" diff --git a/cellprofiler/modules/runstardist.py b/cellprofiler/modules/runstardist.py index 1ce7e66da3..09ba6e6eaf 100644 --- a/cellprofiler/modules/runstardist.py +++ b/cellprofiler/modules/runstardist.py @@ -3,10 +3,11 @@ # Imports from useful Python libraries # ################################# - +import logging import os -import pathlib +from pathlib import Path from skimage.transform import resize +import csbdeep from csbdeep.utils import normalize ################################# @@ -25,24 +26,23 @@ # Monkey patch csbdeep to avoid re-extracting models on each run and allow -# specification of a custom cache dir with KERAS_CACHE_DIR. +# specification of a custom cache dir with KERAS_HOME env var. def patched_get_model_folder(cls, key_or_alias): - import csbdeep key, alias, m = csbdeep.models.pretrained.get_model_details( cls, key_or_alias) - target = str(pathlib.Path('models') / cls.__name__ / key) - cache_dir = os.environ.get('KERAS_CACHE_DIR', + target = str(Path('models') / cls.__name__ / key) + cache_dir = os.environ.get('KERAS_HOME', os.path.join(os.path.expanduser('~'), '.keras')) - expected_dir = os.path.join(cache_dir, target) - if os.path.exists(expected_dir) and len(os.listdir(expected_dir)) > 1: - return pathlib.Path(expected_dir) - else: - from keras.utils import get_file - path = pathlib.Path( - get_file(fname=key+'.zip', origin=m['url'], file_hash=m['hash'], - cache_subdir=target, extract=True, cache_dir=cache_dir)) - assert path.exists() and path.parent.exists() - return path.parent + expected_dir = Path(cache_dir, target) + print("Searching for ", expected_dir) + if expected_dir.is_dir() and Path(expected_dir, "config.json").exists(): + logging.info(f"Using cached model {key_or_alias}") + return Path(expected_dir) + from keras.utils import get_file + logging.info(f"Fetching model {key_or_alias}") + path = Path(get_file(fname=key+'.zip', origin=m['url'], extract=True, + file_hash=m['hash'], cache_subdir=target)) + return path.parent __doc__ = f"""\