diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index b80e88d2e..31fe54301 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -25,7 +25,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"] + python-version: ["3.9", "3.10", "3.11", "3.12"] fail-fast: false steps: diff --git a/README.md b/README.md index 911a8dd35..67683742b 100644 --- a/README.md +++ b/README.md @@ -74,7 +74,7 @@ Distilabel is a tool that can be used to **synthesize data and provide AI feedba pip install distilabel --upgrade ``` -Requires Python 3.8+ +Requires Python 3.9+ In addition, the following extras are available: diff --git a/docs/sections/getting_started/installation.md b/docs/sections/getting_started/installation.md index 07e473795..804aa8de7 100644 --- a/docs/sections/getting_started/installation.md +++ b/docs/sections/getting_started/installation.md @@ -9,7 +9,7 @@ hide: !!! NOTE Since `distilabel` v1.0.0 was recently released, we refactored most of the stuff, so the installation below only applies to `distilabel` v1.0.0 and above. -You will need to have at least Python 3.8 or higher, up to Python 3.12, since support for the latter is still a work in progress. +You will need to have at least Python 3.9 or higher, up to Python 3.12, since support for the latter is still a work in progress. To install the latest release of the package from PyPI you can use the following command: @@ -46,7 +46,7 @@ Additionally, as part of `distilabel` some extra dependencies are available, mai - `llama-cpp`: for using [llama-cpp-python](https://github.com/abetlen/llama-cpp-python) Python bindings for `llama.cpp` via the `LlamaCppLLM` integration. -- `mistralai`: for using models available in [Mistral AI API](https://mistral.ai/news/la-plateforme/) via the `MistralAILLM` integration. Note that the [`mistralai` Python client](https://github.com/mistralai/client-python) can only be installed from Python 3.9 onwards, so this is the only `distilabel` dependency that's not supported in Python 3.8. +- `mistralai`: for using models available in [Mistral AI API](https://mistral.ai/news/la-plateforme/) via the `MistralAILLM` integration. - `ollama`: for using [Ollama](https://ollama.com/) and their available models via `OllamaLLM` integration. diff --git a/pyproject.toml b/pyproject.toml index b94387ff5..58f3b3226 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -6,14 +6,13 @@ build-backend = "hatchling.build" name = "distilabel" description = "Distilabel is an AI Feedback (AIF) framework for building datasets with and for LLMs." readme = "README.md" -requires-python = ">=3.8" +requires-python = ">=3.9" license = "Apache-2.0" keywords = ["llm", "annotation", "alignment", "synthetic", "data", "rlaif"] authors = [{ name = "Argilla", email = "admin@argilla.io" }] classifiers = [ "Development Status :: 4 - Beta", "Programming Language :: Python", - "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", diff --git a/scripts/install_dependencies.sh b/scripts/install_dependencies.sh index 4da6ad9dd..dc2ef0f3d 100755 --- a/scripts/install_dependencies.sh +++ b/scripts/install_dependencies.sh @@ -6,9 +6,6 @@ python_version=$(python -c "import sys; print(sys.version_info[:2])") python -m pip install uv -uv pip install --system -e ".[dev,tests,anthropic,argilla,cohere,groq,hf-inference-endpoints,hf-transformers,litellm,llama-cpp,ollama,openai,outlines,vertexai]" -if [ "${python_version}" != "(3, 8)" ]; then - uv pip install --system -e .[mistralai,instructor] -fi +uv pip install --system -e ".[dev,tests,anthropic,argilla,cohere,groq,hf-inference-endpoints,hf-transformers,litellm,llama-cpp,ollama,openai,outlines,vertexai,mistralai,instructor]" uv pip install --system git+https://github.com/argilla-io/LLM-Blender.git diff --git a/src/distilabel/pipeline/local.py b/src/distilabel/pipeline/local.py index 85cbadb96..874757a6d 100644 --- a/src/distilabel/pipeline/local.py +++ b/src/distilabel/pipeline/local.py @@ -101,11 +101,14 @@ def run( num_processes = self.dag.get_total_replica_count() ctx = mp.get_context() # type: ignore - with ctx.Manager() as manager, ctx.Pool( - num_processes, - initializer=_init_worker, - initargs=(log_queue,), - ) as pool: + with ( + ctx.Manager() as manager, + ctx.Pool( + num_processes, + initializer=_init_worker, + initargs=(log_queue,), + ) as pool, + ): self._manager = manager self._pool = pool self._output_queue = self.QueueClass() diff --git a/tests/unit/llms/huggingface/test_inference_endpoints.py b/tests/unit/llms/huggingface/test_inference_endpoints.py index 87a890a38..ecc5d9759 100644 --- a/tests/unit/llms/huggingface/test_inference_endpoints.py +++ b/tests/unit/llms/huggingface/test_inference_endpoints.py @@ -48,8 +48,11 @@ def test_load_with_cached_token( ) # Mock `huggingface_hub.constants.HF_TOKEN_PATH` to exist - with mock.patch("pathlib.Path.exists", return_value=True), mock.patch( - "builtins.open", new_callable=mock.mock_open, read_data="hf_token" + with ( + mock.patch("pathlib.Path.exists", return_value=True), + mock.patch( + "builtins.open", new_callable=mock.mock_open, read_data="hf_token" + ), ): # Should not raise any errors llm.load() diff --git a/tests/unit/pipeline/test_base.py b/tests/unit/pipeline/test_base.py index 1f27e423e..90231eec1 100644 --- a/tests/unit/pipeline/test_base.py +++ b/tests/unit/pipeline/test_base.py @@ -105,11 +105,14 @@ def test_load_batch_manager(self, use_cache: bool) -> None: pipeline._load_batch_manager(use_cache=True) pipeline._cache() - with mock.patch( - "distilabel.pipeline.base._BatchManager.load_from_cache" - ) as mock_load_from_cache, mock.patch( - "distilabel.pipeline.base._BatchManager.from_dag" - ) as mock_from_dag: + with ( + mock.patch( + "distilabel.pipeline.base._BatchManager.load_from_cache" + ) as mock_load_from_cache, + mock.patch( + "distilabel.pipeline.base._BatchManager.from_dag" + ) as mock_from_dag, + ): pipeline._load_batch_manager(use_cache=use_cache) if use_cache: