diff --git a/.editorconfig b/.editorconfig index fb560b2e..2e201952 100644 --- a/.editorconfig +++ b/.editorconfig @@ -18,3 +18,7 @@ indent_size = 2 [{*.yml,*.yaml}] indent_size = 2 + +[*.md] +indent_size = 2 +indent_style = space diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 83f4663a..5c89329d 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -2,51 +2,44 @@ name: tests on: push: - branches: [main] + paths-ignore: + - "docs/**" + - "*.md" + branches: + - main pull_request: - branches: [main] + branches: + - main workflow_dispatch: jobs: build: runs-on: ubuntu-latest strategy: - max-parallel: 4 matrix: - python-version: [3.6, 3.7, 3.8, 3.9] - + python-version: ["3.6", "3.7", "3.8", "3.9"] + fail-fast: false steps: - - name: Checkout sources - uses: actions/checkout@v2 - - - name: Set up Python - uses: actions/setup-python@v2 + - uses: actions/checkout@v2 + - uses: actions/setup-python@v2 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | - python -m pip install --upgrade pip - python -m pip install poetry + python3 -m pip install --upgrade pip setuptools + python3 -m pip install poetry + poetry run python -m pip install -U pip setuptools poetry install -E textract - - name: General info + - name: Version info run: | - poetry run python main.py list - poetry run python main.py config --path poetry run python main.py --version - - name: Lint with flake8 - run: | - # stop the build if there are Python syntax errors or undefined names - poetry run flake8 --count --select=E9,F63,F7,F82 --show-source --statistics organize - # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide - poetry run flake8 --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics organize - - name: Test with pytest run: | poetry run pytest - name: Check with MyPy run: | - poetry run mypy -porganize + poetry run mypy organize main.py diff --git a/.readthedocs.yml b/.readthedocs.yml index 75475439..e49a71d7 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -1,27 +1,23 @@ -# .readthedocs.yml +# .readthedocs.yaml # Read the Docs configuration file # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details # Required version: 2 -# Build documentation in the docs/ directory with Sphinx -sphinx: - configuration: docs/conf.py - -# Build documentation with MkDocs -#mkdocs: -# configuration: mkdocs.yml - -# Optionally build your docs in additional formats such as PDF and ePub -formats: all - +# Set the version of Python and other tools you might need build: - image: latest + os: ubuntu-20.04 + tools: + python: "3.9" + +mkdocs: + configuration: mkdocs.yml -# Optionally set the version of Python and requirements required to build your docs +# Optionally declare the Python requirements required to build your docs python: - version: 3.7 install: - method: pip path: . + extra_requirements: + - docs diff --git a/CHANGELOG.md b/CHANGELOG.md index f9b0711c..68d74c8d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,56 +1,116 @@ # Changelog +## v2.0.0 (2022-02-07) + +This is a huge update with lots of improvements. +Please backup all your important stuff before running and use the simulate option! + +[**Migration Guide**](docs/updating-from-v1.md) + +### what's new + +- You can now [target directories](docs/rules.md#targeting-directories) with your rules + (copying, renaming, etc a whole folder) +- [Organize inside or between (S)FTP, S3 Buckets, Zip archives and many more](docs/locations.md#remote-filesystems-and-archives) + (list of [available filesystems](https://www.pyfilesystem.org/page/index-of-filesystems/)). +- [`max_depth`](docs/locations.md#location-options) setting when recursing into subfolders +- Respects your rule order - safer, less magic, less surprises. + - (organize v1 tried to be clever. v2 now works your config file from top to bottom) +- [Jinja2 template engine for placeholders](docs/rules.md#templates-and-placeholders). +- Instant start. (does not need to gather all the files before starting) +- [Filters can now be excluded](docs/filters.md#how-to-exclude-filters). +- [Filter modes](docs/rules.md#rule-options): `all`, `any` and `none`. +- [Rule names](docs/rules.md#rule-options). +- new conflict resolution settings in [`move`](docs/actions.md#move), + [`copy`](docs/actions.md#copy) and [`rename`](docs/actions.md#rename) action: + - Options are `skip`, `overwrite`, `trash`, `rename_new` or `rename_existing` + - You can now define a custom `rename_template`. +- The [`python`](docs/actions.md#python) action can now be run in simulation. +- The [`shell`](docs/actions.md#shell) action now returns stdout and errorcode. +- Added filter [`empty`](docs/filters.md#empty) - find empty files and folders +- Added filter [`hash`](docs/filters.md#hash) - generate file hashes +- Added action [`symlink`](docs/actions.md#symlink) - generate symlinks +- Added action [`confirm`](docs/actions.md#confirm) - asks for confirmation +- Many small fixes and improvements! + +### changed + +- The `timezone` keyword for [`lastmodified`](docs/filters.md#lastmodified) and + [`created`](docs/filters.md#created) was removed. The timezone is + now the local timezone by default. +- The `filesize` filter was renamed to [`size`](docs/filters.md#size) and can now be + used to get directory sizes as well. +- The `filename` filter was renamed to [`name`](docs/filters.md#name) and can now be + used to get directory names as well. +- The [`size`](docs/filters.md#size) filter now returns multiple formats + +### removed + +- Glob syntax is gone from folders ([no longer needed](docs/locations.md)) +- `"!"` folder exclude syntax is gone ([no longer needed](docs/locations.md)) + ## v1.10.1 (2021-04-21) + - Action `macos_tags` now supports colors and placeholders. - Show full expanded path if folder is not found. ## v1.10.0 (2021-04-20) + - Add filter `mimetype` - Add action `macos_tags` - Support [`simplematch`](https://github.com/tfeldmann/simplematch) syntax in - `filename`-filter. + `lename`-filter. - Updated dependencies - Because installing `textract` is quite hard on some platforms it is now an optional - dependency. Install it with `pip install organize-tool[textract]` + dendency. Install it with `pip install organize-tool[textract]` - This version needs python 3.6 minimum. Some dependencies that were simply backports - (pathlib2, typing) are removed. + (thlib2, typing) are removed. - Add timezones in created and last_modified filters (Thank you, @win0err!) ## v1.9.1 (2020-11-10) + - Add {env} variable - Add {now} variable ## v1.9 (2020-06-12) + - Add filter `Duplicate`. ## v1.8.2 (2020-04-03) + - Fix a bug in the filename filter config parsing algorithm with digits-only filenames. ## v1.8.1 (2020-03-28) + - Flatten filter and action lists to allow enhanced config file configuration (Thanks to @rawdamedia!) - Add support for multiline content filters (Thanks to @zor-el!) ## v1.8.0 (2020-03-04) + - Added action `Delete`. - Added filter `FileContent`. - Python 3.4 is officially deprecated and no longer supported. - `--config-file` command line option now supports `~` for user folder and expansion - of environment variables -- Added `years`, `months`, `weeks` and `seconds` parameter to filter `created` and - `lastmodified` + oenvironment variables +- Added `years`, `months`, `weeks` and `seconds` parameter to filter `created` and + `stmodified` ## v1.7.0 (2019-11-26) + - Added filter `Exif` to filter by image exif data. - Placeholder variable properties are now case insensitve. ## v1.6.2 (2019-11-22) + - Fix `Rename` action (`'PosixPath' object has no attribute 'items'`). - Use type hints everywhere. ## v1.6.1 (2019-10-25) + - Shows a warning for missing folders instead of raising an exception. ## v1.6 (2019-08-19) + - Added filter: `Python` - Added filter: `FileSize` - The organize module can now be run directly: `python3 -m organize` @@ -61,31 +121,38 @@ - The "~~ SIMULATION ~~"-banner now takes up the whole terminal width ## v1.5.3 (2019-08-01) + - Filename filter now supports lists. ## v1.5.2 (2019-07-29) + - Environment variables in folder pathes are now expanded (syntax `$name` or `${name}` - and additionally `%name%` on windows). - For example this allows the usage of e.g. `%public/Desktop%` in windows. + a additionally `%name%` on windows). + F example this allows the usage of e.g. `%public/Desktop%` in windows. ## v1.5.1 (2019-07-23) + - New filter "Created" to filter by creation date. - Fixes issue #39 where globstrings don't work most of the time. - Integration test for issue #39 - Support indented config files ## v1.5 (2019-07-17) + - Fixes issue #31 where the {path} variable always resolves to the source path - Updated dependencies - Exclude changelog and readme from published wheel ## v1.4.5 (2019-07-03) + - Filter and Actions names are now case-insensitive ## v1.4.4 (2019-07-02) + - Fixes issues #36 with umlauts in config file on windows ## v1.4.3 (2019-06-05) + - Use safe YAML loader to fix a deprecation warning. (Thanks mope1!) - Better error message if a folder does not exist. (Again thanks mope1!) - Fix example code in documentation for LastModified filter. @@ -93,30 +160,37 @@ - `config --debug` now shows the full path to the config file. ## v1.4.2 (2018-11-14) -- Fixes a bug with command line arguments in the ``$EDITOR`` environment - variable. + +- Fixes a bug with command line arguments in the `$EDITOR` environment + viable. - Fixes a bug where an empty config wouldn't show the correct error message. - Fix binary wheel creation in setup.py by using environment markers ## v1.4.1 (2018-10-05) -- A custom separator ``counter_separator`` can now be set in the actions Move, - Copy and Rename. + +- A custom separator `counter_separator` can now be set in the actions Move, + Cy and Rename. ## v1.4 (2018-09-21) + - Fixes a bug where glob wildcards are not detected correctly - Adds support for excluding folders and files via glob syntax. - Makes sure that files are only handled once per rule. ## v1.3 (2018-07-06) + - Glob support in folder configuration. - New variable {relative_path} is now available in actions. ## v1.2 (2018-03-19) + - Shows the relative path to files in subfolders. ## v1.1 (2018-03-13) + - Removes the colon from extension filter output so `{extension.lower}` now - returns `'png'` instead of `'.png'`. + rurns `'png'` instead of `'.png'`. ## v1.0 (2018-03-13) + - Initial release. diff --git a/README.md b/README.md index 1879a52b..073454c7 100644 --- a/README.md +++ b/README.md @@ -1,13 +1,21 @@

- organize logo + organize logo

-[![tests](https://github.com/tfeldmann/organize/actions/workflows/tests.yml/badge.svg)](https://github.com/tfeldmann/organize/actions/workflows/tests.yml) -[![Documentation Status](https://readthedocs.org/projects/organize/badge/?version=latest)](https://organize.readthedocs.io/en/latest/?badge=latest) -[![License](https://img.shields.io/badge/license-MIT-blue.svg)](/LICENSE) -[![PyPI Version](https://img.shields.io/pypi/v/organize-tool)](https://pypi.org/project/organize-tool/) + + + + + + + + + + + +
@@ -18,14 +26,27 @@ Full documentation at Read the docs

+- [**organize v2 is released!**](#organize-v2-is-released) - [About](#about) +- [Features](#features) - [Getting started](#getting-started) - [Installation](#installation) - - [Creating your first rule](#creating-your-first-rule) + - [Create your first rule](#create-your-first-rule) - [Example rules](#example-rules) -- [Advanced usage](#advanced-usage) - [Command line interface](#command-line-interface) +## **organize v2 is released!** + +This is a huge update with lots of improvements. + +See [the changelog](https://organize.readthedocs.io/en/v2/changelog/) for all the new +features! + +Unfortunately your configuration may need some small adjustments: +[**Migration Guide**](docs/updating-from-v1.md) + +Please backup all your important stuff before running and use the simulate option! + ## About Your desktop is a mess? You cannot find anything in your downloads and @@ -35,6 +56,19 @@ Time to automate it once and benefit from it forever. **organize** is a command line, open-source alternative to apps like Hazel (macOS) or File Juggler (Windows). +## Features + +Some highlights include: + +- Safe moving, renaming, copying of files and folders with conflict resolution options +- Fast duplicate file detection +- Exif tags extraction +- Categorization via text extracted from PDF, DOCX and many more +- Supports remote file locations like FTP, WebDAV, S3 Buckets, SSH and many more +- Powerful template engine +- Inline python and shell commands as filters and actions for maximum flexibility +- Everything can be simulated before touching your files. + ## Getting started ### Installation @@ -55,43 +89,50 @@ pip3 install -U "organize-tool[textract]" This command can also be used to update to the newest version. Now you can run `organize --help` to check if the installation was successful. -### Creating your first rule +### Create your first rule -In your shell, **run `organize config`** to edit the configuration: +In your shell, run `organize edit` to edit the configuration: ```yaml rules: - - folders: ~/Downloads - subfolders: true - filters: - - extension: pdf - actions: - - echo: "Found PDF!" + - name: "Find PDFs" + locations: + - ~/Downloads + subfolders: true + filters: + - extension: pdf + actions: + - echo: "Found PDF!" ``` -> If you have problems editing the configuration you can run `organize config --open-folder` to reveal the configuration folder in your file manager. You can then edit the `config.yaml` in your favourite editor. -> -> Alternatively you can run `organize config --path` to see the full path to -> your `config.yaml`) +> If you have problems editing the configuration you can run `organize reveal` to reveal the configuration folder in your file manager. You can then edit the `config.yaml` in your favourite editor. -**Save your config file and run `organize run`.** +save your config file and run: -You will see a list of all `.pdf` files you have in your downloads folder (+ subfolders). For now we only show the text `Found PDF!` for each file, but this will change soon... +```sh +organize run +``` + +You will see a list of all `.pdf` files you have in your downloads folder (+ subfolders). +For now we only show the text `Found PDF!` for each file, but this will change soon... (If it shows `Nothing to do` you simply don't have any pdfs in your downloads folder). -Run `organize config` again and add a `copy`-action to your rule: +Run `organize edit` again and add a `move`-action to your rule: -```yaml +```yml actions: - - echo: "Found PDF!" - - move: ~/Documents/PDFs/ + - echo: "Found PDF!" + - move: ~/Documents/PDFs/ ``` -**Now run `organize sim` to see what would happen without touching your files**. You will see that your pdf-files would be moved over to your `Documents/PDFs` folder. +Now run `organize sim` to see what would happen without touching your files. + +You will see that your pdf-files would be moved over to your `Documents/PDFs` folder. -Congratulations, you just automated your first task. You can now run `organize run` whenever you like and all your pdfs are a bit more organized. It's that easy. +Congratulations, you just automated your first task. You can now run `organize run` +whenever you like and all your pdfs are a bit more organized. It's that easy. -> There is so much more. You want to rename / copy files, run custom shell- or python scripts, match filenames with regular expressions or use placeholder variables? organize has you covered. Have a look at the advanced usage example below! +> There is so much more. You want to rename / copy files, run custom shell- or python scripts, match names with regular expressions or use placeholder variables? organize has you covered. Have a look at the advanced usage example below! ## Example rules @@ -101,133 +142,98 @@ Move all invoices, orders or purchase documents into your documents folder: ```yaml rules: - # sort my invoices and receipts - - folders: ~/Downloads - subfolders: true - filters: - - extension: pdf - - filename: - contains: - - Invoice - - Order - - Purchase - case_sensitive: false - actions: - - move: ~/Documents/Shopping/ + - name: "Sort my invoices and receipts" + locations: ~/Downloads + subfolders: true + filters: + - extension: pdf + - name: + contains: + - Invoice + - Order + - Purchase + case_sensitive: false + actions: + - move: ~/Documents/Shopping/ ``` -Move incomplete downloads older than 30 days into the trash: +Recursively delete all empty directories: ```yaml rules: - # move incomplete downloads older > 30 days into the trash - - folders: ~/Downloads - filters: - - extension: - - download - - crdownload - - part - - lastmodified: - days: 30 - mode: older - actions: - - trash + - name: "Recursively delete all empty directories" + locations: + - path: ~/Downloads + subfolders: true + filters: + - empty + actions: + - delete ``` -Delete empty files from downloads and desktop: - -```yaml -rules: - # delete empty files from downloads and desktop - - folders: - - ~/Downloads - - ~/Desktop - filters: - - filesize: 0 - actions: - - trash -``` + -This example shows some advanced features like placeholder variables, pluggable -actions, recursion through subfolders and glob syntax: +You'll find many more examples in the full documentation. -```yaml -rules: - - folders: ~/Documents/**/* - filters: - - extension: - - pdf - - docx - - created - actions: - - move: "~/Documents/{extension.upper}/{created.year}{created.month:02}/" - - shell: 'open "{path}"' -``` +## Command line interface -Given we have two files in our `~/Documents` folder (or any of its subfolders) -named `script.docx` from january 2018 and `demo.pdf` from december 2016 this will -happen: +```sh +Usage: organize [OPTIONS] COMMAND [ARGS]... -- `script.docx` will be moved to `~/Documents/DOCX/2018-01/script.docx` -- `demo.pdf` will be moved to `~/Documents/PDF/2016-12/demo.pdf` -- The files will be opened (`open` command in macOS) _from their new location_. -- Note the format syntax for `{created.month}` to make sure the month is prepended with a zero. + organize -## Command line interface - -``` -The file management automation tool. - -Usage: - organize sim [--config-file=] - organize run [--config-file=] - organize config [--open-folder | --path | --debug] [--config-file=] - organize list - organize --help - organize --version - -Arguments: - sim Simulate a run. Does not touch your files. - run Organizes your files according to your rules. - config Open the configuration file in $EDITOR. - list List available filters and actions. - --version Show program version and exit. - -h, --help Show this screen and exit. + The file management automation tool. Options: - -o, --open-folder Open the folder containing the configuration files. - -p, --path Show the path to the configuration file. - -d, --debug Debug your configuration file. - -Full documentation: https://organize.readthedocs.io + --version Show the version and exit. + -h, --help Show this message and exit. + +Commands: + run Organizes your files according to your rules. + sim Simulates a run (does not touch your files). + edit Edit the rules. + check Checks whether a given config file is valid. + reveal Reveals the default config file. + schema Prints the json schema for config files. + docs Opens the documentation. ``` diff --git a/docs/Makefile b/docs/Makefile deleted file mode 100644 index e77582a6..00000000 --- a/docs/Makefile +++ /dev/null @@ -1,20 +0,0 @@ -# Minimal makefile for Sphinx documentation -# - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = python3 -msphinx -SPHINXPROJ = organize -SOURCEDIR = . -BUILDDIR = _build - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/_static/organize.svg b/docs/_static/organize.svg deleted file mode 100644 index 4423317f..00000000 --- a/docs/_static/organize.svg +++ /dev/null @@ -1,272 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/docs/actions.md b/docs/actions.md new file mode 100644 index 00000000..435cfcd0 --- /dev/null +++ b/docs/actions.md @@ -0,0 +1,408 @@ +# Actions + +This page shows the specifics of each action. For basic action usage and options have a +look at the [Rules](rules.md) section. + +## confirm + +::: organize.actions.Confirm + +**Examples** + +Confirm before deleting a duplicate + +```yaml +rules: + - name: "Delete duplicates with confirmation" + locations: + - ~/Downloads + - ~/Documents + filters: + - not empty + - duplicate + - name + actions: + - confirm: "Delete {name}?" + - trash +``` + +## copy + +::: organize.actions.Copy + +**Examples:** + +Copy all pdfs into `~/Desktop/somefolder/` and keep filenames + +```yaml +rules: + - locations: ~/Desktop + filters: + - extension: pdf + actions: + - copy: "~/Desktop/somefolder/" +``` + +Use a placeholder to copy all .pdf files into a "PDF" folder and all .jpg files into a "JPG" folder. Existing files will be overwritten. + +```yaml +rules: + - locations: ~/Desktop + filters: + - extension: + - pdf + - jpg + actions: + - copy: + dest: "~/Desktop/{extension.upper()}/" + on_conflict: overwrite +``` + +Copy into the folder `Invoices`. Keep the filename but do not overwrite existing files. +To prevent overwriting files, an index is added to the filename, so `somefile.jpg` becomes `somefile 2.jpg`. +The counter separator is `' '` by default, but can be changed using the `counter_separator` property. + +```yaml +rules: + - locations: ~/Desktop/Invoices + filters: + - extension: + - pdf + actions: + - copy: + dest: "~/Documents/Invoices/" + on_conflict: "rename_new" + rename_template: "{name} {counter}{extension}" +``` + +## delete + +::: organize.actions.delete.Delete + +**Examples:** + +Delete old downloads. + +```yaml +rules: + - locations: "~/Downloads" + filters: + - lastmodified: + days: 365 + - extension: + - png + - jpg + actions: + - delete +``` + +Delete all empty subfolders + +```yaml +rules: + - name: Delete all empty subfolders + locations: + - path: "~/Downloads" + max_depth: null + targets: dirs + filters: + - empty + actions: + - delete +``` + +## echo + +::: organize.actions.Echo + +**Examples:** + +```yaml +rules: + - name: "Find files older than a year" + locations: ~/Desktop + filters: + - lastmodified: + days: 365 + actions: + - echo: "Found old file" +``` + +Prints "Hello World!" and filepath for each file on the desktop: + +```yaml +rules: + - locations: + - ~/Desktop + actions: + - echo: "Hello World! {path}" +``` + +This will print something like `Found a ZIP: "backup"` for each file on your desktop + +```yaml +rules: + - locations: + - ~/Desktop + filters: + - extension + - name + actions: + - echo: 'Found a {extension.upper()}: "{name}"' +``` + +Show the `{relative_path}` and `{path}` of all files in '~/Downloads', '~/Desktop' and their subfolders: + +```yaml +rules: + - locations: + - path: ~/Desktop + max_depth: null + - path: ~/Downloads + max_depth: null + actions: + - echo: "Path: {path}" + - echo: "Relative: {relative_path}" +``` + +## macos_tags + +::: organize.actions.MacOSTags + +**Examples:** + +```yaml +rules: + - name: "add a single tag" + locations: "~/Documents/Invoices" + filters: + - name: + startswith: "Invoice" + - extension: pdf + actions: + - macos_tags: Invoice +``` + +Adding multiple tags ("Invoice" and "Important") + +```yaml +rules: + - locations: "~/Documents/Invoices" + filters: + - name: + startswith: "Invoice" + - extension: pdf + actions: + - macos_tags: + - Important + - Invoice +``` + +Specify tag colors + +```yaml +rules: + - locations: "~/Documents/Invoices" + filters: + - name: + startswith: "Invoice" + - extension: pdf + actions: + - macos_tags: + - Important (green) + - Invoice (purple) +``` + +Add a templated tag with color + +```yaml +rules: + - locations: "~/Documents/Invoices" + filters: + - created + actions: + - macos_tags: + - Year-{created.year} (red) +``` + +## move + +::: organize.actions.Move + +**Examples:** + +Move all pdfs and jpgs from the desktop into the folder "~/Desktop/media/". Filenames are not changed. + +```yaml +rules: + - locations: ~/Desktop + filters: + - extension: + - pdf + - jpg + actions: + - move: "~/Desktop/media/" +``` + +Use a placeholder to move all .pdf files into a "PDF" folder and all .jpg files into a +"JPG" folder. Existing files will be overwritten. + +```yaml +rules: + - locations: ~/Desktop + filters: + - extension: + - pdf + - jpg + actions: + - move: + dest: "~/Desktop/{extension.upper()}/" + on_conflict: "overwrite" +``` + +Move pdfs into the folder `Invoices`. Keep the filename but do not overwrite existing files. To prevent overwriting files, an index is added to the filename, so `somefile.jpg` becomes `somefile 2.jpg`. + +```yaml +rules: + - locations: ~/Desktop/Invoices + filters: + - extension: + - pdf + actions: + - move: + dest: "~/Documents/Invoices/" + on_conflict: "rename_new" + rename_template: "{name} {counter}{extension}" +``` + +## python + +::: organize.actions.Python + +**Examples:** + +A basic example that shows how to get the current file path and do some printing in a +for loop. The `|` is yaml syntax for defining a string literal spanning multiple lines. + +```yaml +rules: + - locations: "~/Desktop" + actions: + - python: | + print('The path of the current file is %s' % path) + for _ in range(5): + print('Heyho, its me from the loop') +``` + +```yaml +rules: + - name: "You can access filter data" + locations: ~/Desktop + filters: + - regex: '^(?P.*)\.(?P.*)$' + actions: + - python: | + print('Name: %s' % regex["name"]) + print('Extension: %s' % regex["extension"]) +``` + +Running in simulation and [yaml aliases](rules.md#advanced-aliases): + +```yaml +my_python_script: &script | + print("Hello World!") + print(path) + +rules: + - name: "Run in simulation and yaml alias" + locations: + - ~/Desktop/ + actions: + - python: + code: *script + run_in_simulation: yes +``` + +You have access to all the python magic -- do a google search for each +filename starting with an underscore: + +```yaml +rules: + - locations: ~/Desktop + filters: + - name: + startswith: "_" + actions: + - python: | + import webbrowser + webbrowser.open('https://www.google.com/search?q=%s' % name) +``` + +## rename + +::: organize.actions.Rename + +**Examples:** + +```yaml +rules: + - name: "Convert all .PDF file extensions to lowercase (.pdf)" + locations: "~/Desktop" + filters: + - name + - extension: PDF + actions: + - rename: "{name}.pdf" +``` + +```yaml +rules: + - name: "Convert **all** file extensions to lowercase" + locations: "~/Desktop" + filters: + - name + - extension + actions: + - rename: "{name}.{extension.lower()}" +``` + +## shell + +::: organize.actions.Shell + +**Examples:** + +```yaml +rules: + - name: "On macOS: Open all pdfs on your desktop" + locations: "~/Desktop" + filters: + - extension: pdf + actions: + - shell: 'open "{path}"' +``` + +## symlink + +::: organize.actions.Symlink + +## trash + +::: organize.actions.Trash + +**Examples:** + +```yaml +rules: + - name: Move all JPGs and PNGs on the desktop which are older than one year into the trash + locations: "~/Desktop" + filters: + - lastmodified: + years: 1 + mode: older + - extension: + - png + - jpg + actions: + - trash +``` diff --git a/docs/changelog.md b/docs/changelog.md new file mode 100644 index 00000000..11811532 --- /dev/null +++ b/docs/changelog.md @@ -0,0 +1,4 @@ +{% + include-markdown "../CHANGELOG.md" + rewrite-relative-urls=true +%} diff --git a/docs/conf.py b/docs/conf.py deleted file mode 100644 index 8aee0584..00000000 --- a/docs/conf.py +++ /dev/null @@ -1,177 +0,0 @@ -#!/usr/bin/env python3 -# -*- coding: utf-8 -*- -# -# organize documentation build configuration file, created by -# sphinx-quickstart on Fri Sep 29 15:43:41 2017. -# -# This file is execfile()d with the current directory set to its -# containing dir. -# -# Note that not all possible configuration values are present in this -# autogenerated file. -# -# All configuration values have a default; values that are commented out -# serve to show the default. - -# If extensions (or modules to document with autodoc) are in another directory, -# add these directories to sys.path here. If the directory is relative to the -# documentation root, use os.path.abspath to make it absolute, like shown here. - -import os -import sys - -sys.path.insert(0, os.path.abspath("../organize")) - -# -- General configuration ------------------------------------------------ - -# If your documentation needs a minimal Sphinx version, state it here. -# -# needs_sphinx = '1.0' - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = ["sphinx.ext.autodoc", "sphinx.ext.todo", "sphinx.ext.autosectionlabel"] - -# If true, the current module name will be prepended to all description -# unit titles (such as .. function::). -add_module_names = False - -# Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] - -# The suffix(es) of source filenames. -# You can specify multiple suffix as a list of string: -# -# source_suffix = ['.rst', '.md'] -source_suffix = ".rst" - -# The master toctree document. -master_doc = "index" - -# General information about the project. -project = "organize" -copyright = "Thomas Feldmann" -author = "Thomas Feldmann" - -# The version info for the project you're documenting, acts as replacement for -# |version| and |release|, also used in various other places throughout the -# built documents. -# -# The short X.Y version. -src_dir = os.path.realpath(os.path.dirname(os.path.dirname(__file__))) -sys.path.insert(0, src_dir) -from organize.__version__ import __version__ - -version = __version__ -# The full version, including alpha/beta/rc tags. -release = __version__ - -# The language for content autogenerated by Sphinx. Refer to documentation -# for a list of supported languages. -# -# This is also used if you do content translation via gettext catalogs. -# Usually you set "language" from the command line for these cases. -language = None - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This patterns also effect to html_static_path and html_extra_path -exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] - -# The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" - -# If true, `todo` and `todoList` produce output, else they produce nothing. -todo_include_todos = True - - -# -- Options for HTML output ---------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -# html_theme = 'alabaster' -html_theme = "sphinx_rtd_theme" - -# Theme options are theme-specific and customize the look and feel of a theme -# further. For a list of options available for each theme, see the -# documentation. -# -# html_theme_options = {} - -# Add any paths that contain custom static files (such as style sheets) here, -# relative to this directory. They are copied after the builtin static files, -# so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] - -# Custom sidebar templates, must be a dictionary that maps document names -# to template names. -# -# This is required for the alabaster theme -# refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars -html_sidebars = { - "**": [ - "about.html", - "navigation.html", - "relations.html", # needs 'show_related': True theme option to display - "searchbox.html", - "donate.html", - ] -} - - -# -- Options for HTMLHelp output ------------------------------------------ - -# Output file base name for HTML help builder. -htmlhelp_basename = "organizedoc" - - -# -- Options for LaTeX output --------------------------------------------- - -latex_elements = { - # The paper size ('letterpaper' or 'a4paper'). - # - # 'papersize': 'letterpaper', - # The font size ('10pt', '11pt' or '12pt'). - # - # 'pointsize': '10pt', - # Additional stuff for the LaTeX preamble. - # - # 'preamble': '', - # Latex figure (float) alignment - # - # 'figure_align': 'htbp', -} - -# Grouping the document tree into LaTeX files. List of tuples -# (source start file, target name, title, -# author, documentclass [howto, manual, or own class]). -latex_documents = [ - (master_doc, "organize.tex", "organize Documentation", "Thomas Feldmann", "manual") -] - - -# -- Options for manual page output --------------------------------------- - -# One entry per manual page. List of tuples -# (source start file, name, description, authors, manual section). -man_pages = [(master_doc, "organize", "organize Documentation", [author], 1)] - - -# -- Options for Texinfo output ------------------------------------------- - -# Grouping the document tree into Texinfo files. List of tuples -# (source start file, target name, title, author, -# dir menu entry, description, category) -texinfo_documents = [ - ( - master_doc, - "organize", - "organize Documentation", - author, - "organize", - "One line description of project.", - "Miscellaneous", - ) -] diff --git a/docs/configuration.md b/docs/configuration.md new file mode 100644 index 00000000..0279157d --- /dev/null +++ b/docs/configuration.md @@ -0,0 +1,49 @@ +# Configuration + +## Editing the configuration + +organize has a default config file if no other file is given. + +To edit the default configuration file: + +```sh +$ organize edit # opens in $EDITOR +$ organize edit --editor=vim +$ EDITOR=code organize edit +``` + +To open the folder containing the configuration file: + +```sh +$ organize reveal +$ organize reveal --path # show the full path to the default config +``` + +To check your configuration run: + +```sh +$ organize check +$ organize check --debug # check with debug output +``` + +## Running and simulating + +To run / simulate the default config file: + +```sh +$ organize sim +$ organize run +``` + +To run / simulate a specific config file: + +```sh +$ organize sim [FILE] +$ organize run [FILE] +``` + +## Environment variables + +- `ORGANIZE_CONFIG` - The path to the default config file. +- `NO_COLOR` - if this is set, the output is not colored. +- `EDITOR` - The editor used to edit the config file. diff --git a/docs/filters.md b/docs/filters.md new file mode 100644 index 00000000..1b255d8f --- /dev/null +++ b/docs/filters.md @@ -0,0 +1,632 @@ +# Filters + +This page shows the specifics of each filter. + +## - How to exclude filters - + +To exclude a filter, prefix the filter name with **not** (e.g. `"not empty"`, +`"not extension": jpg`, etc). + +!!! note + + If you want to exclude all filters you can set the rule's `filter_mode` to `none`. + +Example: + +```yaml +rules: + # using filter_mode + - locations: ~/Desktop + filter_mode: "none" # <- excludes all + filters: + - empty + - name: + endswith: "2022" + actions: + - echo: "{name}" + + # Exclude a single filter + - locations: ~/Desktop + filters: + - not extension: jpg # <- matches all non-jpgs + - name: + startswith: "Invoice" + - not empty # <- matches files with content + actions: + - echo: "{name}" +``` + +## created + +::: organize.filters.Created + +**Examples:** + +Show all files on your desktop created at least 10 days ago + +```yaml +rules: + - name: Show all files on your desktop created at least 10 days ago + locations: "~/Desktop" + filters: + - created: + days: 10 + actions: + - echo: "Was created at least 10 days ago" +``` + +Show all files on your desktop which were created within the last 5 hours + +```yaml +rules: + - name: Show all files on your desktop which were created within the last 5 hours + locations: "~/Desktop" + filters: + - created: + hours: 5 + mode: newer + actions: + - echo: "Was created within the last 5 hours" +``` + +Sort pdfs by year of creation + +```yaml +rules: + - name: Sort pdfs by year of creation + locations: "~/Documents" + filters: + - extension: pdf + - created + actions: + - move: "~/Documents/PDF/{created.year}/" +``` + +Formatting the creation date + +```yaml +rules: + - name: Display the creation date + locations: "~/Documents" + filters: + - extension: pdf + - created + actions: + - echo: "{created.strftime('%Y-%m-%d')}" +``` + +## duplicate + +::: organize.filters.Duplicate + +**Examples:** + +Show all duplicate files in your desktop and download folder (and their subfolders) + +```yaml +rules: + - name: Show all duplicate files in your desktop and download folder (and their subfolders) + locations: + - ~/Desktop + - ~/Downloads + subfolders: true + filters: + - duplicate + actions: + - echo: "{path} is a duplicate of {duplicate.original}" +``` + +Check for duplicated files between Desktop and a Zip file, select original by creation date + +```yaml +rules: + - name: "Check for duplicated files between Desktop and a Zip file, select original by creation date" + locations: + - ~/Desktop + - zip://~/Desktop/backup.zip + filters: + - duplicate: + detect_original_by: "created" + actions: + - echo: "Duplicate found!" +``` + +## empty + +::: organize.filters.Empty + +**Examples:** + +Recursively delete empty folders + +```yaml +rules: + - targets: dirs + locations: + - path: ~/Desktop + max_depth: null + filters: + - empty + actions: + - delete +``` + +## exif + +::: organize.filters.Exif + +Show available EXIF data of your pictures + +```yaml +rules: + - name: "Show available EXIF data of your pictures" + locations: + - path: ~/Pictures + max_depth: null + filters: + - exif + actions: + - echo: "{exif}" +``` + +Copy all images which contain GPS information while keeping subfolder structure: + +```yaml +rules: + - name: "GPS demo" + locations: + - path: ~/Pictures + max_depth: null + filters: + - exif: gps.gpsdate + actions: + - copy: ~/Pictures/with_gps/{relative_path}/ +``` + +Filter by camera manufacturer + +```yaml +rules: + - name: "Filter by camera manufacturer" + locations: + - path: ~/Pictures + max_depth: null + filters: + - exif: + image.model: Nikon D3200 + actions: + - move: "~/Pictures/My old Nikon/" +``` + +Sort images by camera manufacturer. This will create folders for each camera model +(for example "Nikon D3200", "iPhone 6s", "iPhone 5s", "DMC-GX80") and move the pictures +accordingly: + +```yaml +rules: + - name: "camera sort" + locations: + - path: ~/Pictures + max_depth: null + filters: + - extension: jpg + - exif: image.model + actions: + - move: "~/Pictures/{exif.image.model}/" +``` + +## extension + +::: organize.filters.Extension + +**Examples:** + +Match a single file extension + +```yaml +rules: + - name: "Match a single file extension" + locations: "~/Desktop" + filters: + - extension: png + actions: + - echo: "Found PNG file: {path}" +``` + +Match multiple file extensions + +```yaml +rules: + - name: "Match multiple file extensions" + locations: "~/Desktop" + filters: + - extension: + - .jpg + - jpeg + actions: + - echo: "Found JPG file: {path}" +``` + +Make all file extensions lowercase + +```yaml +rules: + - name: "Make all file extensions lowercase" + locations: "~/Desktop" + filters: + - extension + actions: + - rename: "{path.stem}.{extension.lower()}" +``` + +Using extension lists ([yaml aliases](rules.md#advanced-aliases) + +```yaml +img_ext: &img + - png + - jpg + - tiff + +audio_ext: &audio + - mp3 + - wav + - ogg + +rules: + - name: "Using extension lists" + locations: "~/Desktop" + filters: + - extension: + - *img + - *audio + actions: + - echo: "Found media file: {path}" +``` + +## filecontent + +::: organize.filters.FileContent + +**Examples:** + +Show the content of all your PDF files + +```yaml +rules: + - name: "Show the content of all your PDF files" + locations: ~/Documents + filters: + - extension: pdf + - filecontent + actions: + - echo: "{filecontent}" +``` + +Match an invoice with a regular expression and sort by customer + +```yaml +rules: + - name: "Match an invoice with a regular expression and sort by customer" + locations: "~/Desktop" + filters: + - filecontent: 'Invoice.*Customer (?P\w+)' + actions: + - move: "~/Documents/Invoices/{filecontent.customer}/" +``` + +## hash + +::: organize.filters.Hash + +**Examples:** + +Show the hashes of your files: + +```yaml +rules: + - name: "Show the hashes and size of your files" + locations: "~/Desktop" + filters: + - hash + - size + actions: + - echo: "{hash} {size.decimal}" +``` + +## lastmodified + +::: organize.filters.LastModified + +**Examples:** + +```yaml +rules: + - name: "Show all files on your desktop last modified at least 10 days ago" + locations: "~/Desktop" + filters: + - lastmodified: + days: 10 + actions: + - echo: "Was modified at least 10 days ago" +``` + +Show all files on your desktop which were modified within the last 5 hours: + +```yaml +rules: + - locations: "~/Desktop" + filters: + - lastmodified: + hours: 5 + mode: newer + actions: + - echo: "Was modified within the last 5 hours" +``` + +Sort pdfs by year of last modification + +```yaml +rules: + - name: "Sort pdfs by year of last modification" + locations: "~/Documents" + filters: + - extension: pdf + - lastmodified + actions: + - move: "~/Documents/PDF/{lastmodified.year}/" +``` + +Formatting the last modified date + +```yaml +rules: + - name: Formatting the lastmodified date + locations: "~/Documents" + filters: + - extension: pdf + - lastmodified + actions: + - echo: "{lastmodified.strftime('%Y-%m-%d')}" +``` + +## mimetype + +::: organize.filters.MimeType + +**Examples:** + +Show MIME types + +```yaml +rules: + - name: "Show MIME types" + locations: "~/Downloads" + filters: + - mimetype + actions: + - echo: "{mimetype}" +``` + +Filter by 'image' mimetype + +```yaml +rules: + - name: "Filter by 'image' mimetype" + locations: "~/Downloads" + filters: + - mimetype: image + actions: + - echo: "This file is an image: {mimetype}" +``` + +Filter by specific MIME type + +```yaml +rules: + - name: Filter by specific MIME type + locations: "~/Desktop" + filters: + - mimetype: application/pdf + actions: + - echo: "Found a PDF file" +``` + +Filter by multiple specific MIME types + +```yaml +rules: + - name: Filter by multiple specific MIME types + locations: "~/Music" + filters: + - mimetype: + - application/pdf + - audio/midi + actions: + - echo: "Found Midi or PDF." +``` + +## name + +::: organize.filters.Name + +**Examples:** + +Match all files starting with 'Invoice': + +```yaml +rules: + - locations: "~/Desktop" + filters: + - name: + startswith: Invoice + actions: + - echo: "This is an invoice" +``` + +Match all files starting with 'A' end containing the string 'hole' +(case insensitive): + +```yaml +rules: + - locations: "~/Desktop" + filters: + - name: + startswith: A + contains: hole + case_sensitive: false + actions: + - echo: "Found a match." +``` + +Match all files starting with 'A' or 'B' containing '5' or '6' and ending with +'\_end': + +```yaml +rules: + - locations: "~/Desktop" + filters: + - name: + startswith: + - "A" + - "B" + contains: + - "5" + - "6" + endswith: _end + case_sensitive: false + actions: + - echo: "Found a match." +``` + +## python + +::: organize.filters.Python + +**Examples:** + +```yaml +rules: + - name: A file name reverser. + locations: ~/Documents + filters: + - extension + - python: | + return {"reversed_name": path.stem[::-1]} + actions: + - rename: "{python.reversed_name}.{extension}" +``` + +A filter for odd student numbers. Assuming the folder `~/Students` contains +the files `student-01.jpg`, `student-01.txt`, `student-02.txt` and +`student-03.txt` this rule will print +`"Odd student numbers: student-01.txt"` and +`"Odd student numbers: student-03.txt"` + +```yaml +rules: + - name: "Filter odd student numbers" + locations: ~/Students/ + filters: + - python: | + return int(path.stem.split('-')[1]) % 2 == 1 + actions: + - echo: "Odd student numbers: {path.name}" +``` + +Advanced usecase. You can access data from previous filters in your python code. +This can be used to match files and capturing names with a regular expression +and then renaming the files with the output of your python script. + +```yaml +rules: + - name: "Access placeholders in python filter" + locations: files + filters: + - extension: txt + - regex: (?P\w+)-(?P\w+)\..* + - python: | + emails = { + "Betts": "dbetts@mail.de", + "Cornish": "acornish@google.com", + "Bean": "dbean@aol.com", + "Frey": "l-frey@frey.org", + } + if regex.lastname in emails: # get emails from wherever + return {"mail": emails[regex.lastname]} + actions: + - rename: "{python.mail}.txt" +``` + +Result: + +- `Devonte-Betts.txt` becomes `dbetts@mail.de.txt` +- `Alaina-Cornish.txt` becomes `acornish@google.com.txt` +- `Dimitri-Bean.txt` becomes `dbean@aol.com.txt` +- `Lowri-Frey.txt` becomes `l-frey@frey.org.txt` +- `Someunknown-User.txt` remains unchanged because the email is not found + +## regex + +::: organize.filters.Regex + +**Examples:** + +Match an invoice with a regular expression: + +```yaml +rules: + - locations: "~/Desktop" + filters: + - regex: '^RG(\d{12})-sig\.pdf$' + actions: + - move: "~/Documents/Invoices/1und1/" +``` + +Match and extract data from filenames with regex named groups: +This is just like the previous example but we rename the invoice using +the invoice number extracted via the regular expression and the named +group `the_number`. + +```yaml +rules: + - locations: ~/Desktop + filters: + - regex: '^RG(?P\d{12})-sig\.pdf$' + actions: + - move: ~/Documents/Invoices/1und1/{regex.the_number}.pdf +``` + +## size + +::: organize.filters.Size + +**Examples:** + +Trash big downloads: + +```yaml +rules: + - locations: "~/Downloads" + targets: files + filters: + - size: "> 0.5 GB" + actions: + - trash +``` + +Move all JPEGS bigger > 1MB and <10 MB. Search all subfolders and keep the +original relative path. + +```yaml +rules: + - locations: + - path: "~/Pictures" + max_depth: null + filters: + - extension: + - jpg + - jpeg + - size: ">1mb, <10mb" + actions: + - move: "~/Pictures/sorted/{relative_path}/" +``` diff --git a/docs/images/organize.pdf b/docs/images/organize.pdf deleted file mode 100644 index 7538b99f..00000000 Binary files a/docs/images/organize.pdf and /dev/null differ diff --git a/docs/_static/organize.pdf b/docs/img/organize.pdf similarity index 100% rename from docs/_static/organize.pdf rename to docs/img/organize.pdf diff --git a/docs/images/organize.svg b/docs/img/organize.svg similarity index 100% rename from docs/images/organize.svg rename to docs/img/organize.svg diff --git a/docs/index.md b/docs/index.md new file mode 100644 index 00000000..62a4b441 --- /dev/null +++ b/docs/index.md @@ -0,0 +1,6 @@ +# Welcome to organize's documentation + +{% + include-markdown "../README.md" + rewrite-relative-urls=false +%} diff --git a/docs/index.rst b/docs/index.rst deleted file mode 100644 index 23946f2f..00000000 --- a/docs/index.rst +++ /dev/null @@ -1,25 +0,0 @@ -.. image:: https://github.com/tfeldmann/organize/raw/main/docs/images/organize.svg?sanitize=true - -organize -======== -organize is a command line utility to automate file organization tasks. - -http://github.com/tfeldmann/organize - -Contents: ---------- -.. toctree:: - - page/quickstart - page/config - page/filters - page/actions - -If you find any bugs or have an idea for a new feature please don't hesitate to `open an issue `_ on GitHub. - -Indices and tables -================== - -* :ref:`genindex` -* :ref:`modindex` -* :ref:`search` diff --git a/docs/locations.md b/docs/locations.md new file mode 100644 index 00000000..8d6b2bec --- /dev/null +++ b/docs/locations.md @@ -0,0 +1,205 @@ +# Locations + +**Locations** are the folders in which organize searches for resources. +You can set multiple locations for each rule if you want. + +A minimum location definition is just a path where to look for files / folders: + +```yml +rules: + - locations: ~/Desktop + actions: ... +``` + +If you want to handle multiple locations in a rule, create a list: + +```yml +rules: + - locations: + - ~/Desktop + - /usr/bin/ + - "%PROGRAMDATA%/test" + actions: ... +``` + +Using options: + +```yml +rules: + - name: "Location list" + locations: + - path: "~/Desktop" + max_depth: 3 + actions: ... +``` + +Note that you can use environment variables in your locations. + +## Location options + +```yml +rules: + - locations: + - path: ... + max_depth: ... + search: ... + exclude_files: ... + exclude_dirs: ... + system_exlude_files: ... + system_exclude_dirs: ... + ignore_errors: ... + filter: ... + filter_dirs: ... + filesystem: ... +``` + +**path** (`str`)
+Path to a local folder or a [Filesystem URL](#filesystems). + +**max_depth** (`int` or `null`)
+Maximum directory depth to search. + +**search** (`"breadth"` or `"depth"`)
+Whether to use breadth or depth search to recurse into subfolders. Note that if you +want to move or delete files from this location, this has to be set to `"depth"`. +_(Default: `"depth"`)_ + +**exclude_files** (`List[str]`)
+A list of filename patterns that should be excluded in this location, e.g. `["~*"]`. + +**exclude_dirs** (`List[str]`)
+A list of patterns that will filter be used to filter out directory names in this location. +e.g. `['do-not-move', '*-Important']` + +**system_exlude_files** (`List[str]`)
+The list of filename patterns that are excluded by default. Defaults to: +`["thumbs.db", "desktop.ini", "~$*", ".DS_Store", ".localized"]` + +**system_exclude_dirs** (`List[str]`)
+The list of dir names that are excluded by default (`['.git', '.svn']`) + +**ignore_errors** (`bool`)
+If `true`, any errors reading the location will be ignored. + +**filter** (`List[str]`)
+A list of filename patterns that should be used in this location, e.g. `["*.py"]`. +All other files are skipped. + +**filter_dirs** (`List[str]`)
+A list of patterns to match directory names that are included in this location. +All other directories are skipped. + +**filesystem** (str)
+A [Filesystem URL](#filesystems). + +### `filesystem` and `path` + +If you want the location to be the root (`"/"`) of a filesystem, use `path`: + +```yml +rules: + - locations: + - path: zip:///Users/theuser/Downloads/Test.zip +``` + +If you want the location to be a subfolder inside a filesystem, use `path` and `filesystem`: + +```yml +rules: + - locations: + - filesystem: zip:///Users/theuser/Downloads/Test.zip + path: "/folder/in/the/zipfile/" +``` + +### `max_depth` and `subfolders` + +- If `subfolders: true` is specified on the rule, all locations are set to `max_depth: null` + by default. +- A `max_depth` setting in a location is given precedence over the rule's `subfolders` setting. + +## Remote filesystems and archives + +Locations in organize can include: + +- Folders on the harddrive +- ZIP archives +- TAR archives +- FTP servers +- S3 Buckets +- SSH and SMB connections +- IMAP servers +- WebDAV storages +- Dropbox / OneDrive / Google Drive storage (no need to install the client) +- Azure Datalake / Google Cloud Storage +- [and many more!](https://www.pyfilesystem.org/page/index-of-filesystems) + +You can uses these just like the local harddrive, move/copy files or folders between +them or organize them however you want. + +Filesystem URLs are formatted like this: + +```sh +://:@ + +# Examples: +ftp://ftp.example.org/pub +ftps://will:daffodil@ftp.example.org/private +zip://projects.zip +s3://mybucket +dropbox://dropbox.com?access_token= +ssh://[user[:password]@]host[:port] +``` + +!!! note + + The ZIP, TAR, FTP and AppFS filesystems are builtin. + For all other filesystems you need to + [install the appropriate library](https://www.pyfilesystem.org/page/index-of-filesystems). + +**FTP Example** + +Show the size of all JPGs on a remote FTP server and put them into a local ZIP file. + +```yaml +rules: + - locations: "ftps://demo:{env.FTP_PASSWORD}@demo.wftpserver.com" + subfolders: true + filters: + - size + - extension: jpg + actions: + - echo: "Found file! Size: {size.decimal}" + - copy: + dest: "{relative_path}" + filesystem: zip:///Users/thomas/Desktop/ftpfiles.zip +``` + +!!! note + + You should never include a password in a config file. Better pass them in via an + environment variable (`{env.FTP_PASSWORD}`) as you can see above. + +## Relative locations + +Locations can be relative. This allows you to create simple one-off rules that can be +copied between projects. + +There is a command line option to change the working directory should you need it. + +**huge-pic-warner.yaml:** + +```yaml +rules: + - locations: "docs" # here "docs" is relative to the current working dir + filters: + - extension: jpg + - size: ">3 MB" + actions: + - echo: "Warning - huge pic found!" +``` + +Then run it with: + +```sh +organize sim huge-pic-warner.yaml --working-dir=some/other/dir/ +``` diff --git a/docs/make.bat b/docs/make.bat deleted file mode 100644 index 4670531a..00000000 --- a/docs/make.bat +++ /dev/null @@ -1,36 +0,0 @@ -@ECHO OFF - -pushd %~dp0 - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=python -msphinx -) -set SOURCEDIR=. -set BUILDDIR=_build -set SPHINXPROJ=organize - -if "%1" == "" goto help - -%SPHINXBUILD% >NUL 2>NUL -if errorlevel 9009 ( - echo. - echo.The Sphinx module was not found. Make sure you have Sphinx installed, - echo.then set the SPHINXBUILD environment variable to point to the full - echo.path of the 'sphinx-build' executable. Alternatively you may add the - echo.Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ - exit /b 1 -) - -%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% -goto end - -:help -%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% - -:end -popd diff --git a/docs/page/actions.rst b/docs/page/actions.rst deleted file mode 100644 index 8e027a38..00000000 --- a/docs/page/actions.rst +++ /dev/null @@ -1,41 +0,0 @@ -.. _actions: -.. py:module:: actions - -Actions -======= - -Copy ----- -.. autoclass:: Copy(dest, [overwrite=False], [counter_separator=' ']) - -Delete ------- -.. autoclass:: Delete - -Echo ----- -.. autoclass:: Echo - -Move ----- -.. autoclass:: Move(dest, [overwrite=False], [counter_separator=' ']) - -Python ------- -.. autoclass:: actions.Python - -Rename ------- -.. autoclass:: Rename(dest, [overwrite=False], [counter_separator=' ']) - -Shell ------ -.. autoclass:: Shell - -Trash ------ -.. autoclass:: Trash - -macOS Tags ----------- -.. autoclass:: MacOSTags diff --git a/docs/page/config.rst b/docs/page/config.rst deleted file mode 100644 index 6ed296ec..00000000 --- a/docs/page/config.rst +++ /dev/null @@ -1,350 +0,0 @@ -************* -Configuration -************* - - -Editing the configuration -========================= -All configuration takes place in your `config.yaml` file. - -- To edit your configuration in ``$EDITOR`` run: - - .. code-block:: bash - - $ organize config # example: "EDITOR=vim organize config" - -- To show the full path to your configuration file:: - - $ organize config --path - -- To open the folder containing the configuration file:: - - $ organize config --open-folder - -- To debug your configuration run:: - - $ organize config --debug - - -Environment variables -===================== - -- ``$EDITOR`` - The editor used to edit the config file. -- ``$ORGANIZE_CONFIG`` - The config file path. Is overridden by ``--config-file`` cmd line argument. - - -Rule syntax -=========== -The rule configuration is done in `YAML `_. -You need a top-level element ``rules`` which contains a list of rules. -Each rule defines ``folders``, ``filters`` (optional) and ``actions``. - -.. code-block:: yaml - :caption: config.yaml - :emphasize-lines: 1,2,5,10,14,16,18 - - rules: - - folders: - - ~/Desktop - - /some/folder/ - filters: - - lastmodified: - days: 40 - mode: newer - - extension: pdf - actions: - - move: ~/Desktop/Target/ - - trash - - - folders: - - ~/Inbox - filters: - - extension: pdf - actions: - - move: ~/otherinbox - # optional settings: - enabled: true - subfolders: true - system_files: false - -- ``folders`` is a list of folders you want to organize. -- ``filters`` is a list of filters to apply to the files - you can filter by file extension, last modified date, regular expressions and many more. See :ref:`Filters`. -- ``actions`` is a list of actions to apply to the filtered files. You can put them into the trash, move them into another folder and many more. See :ref:`Actions`. - -Other optional per rule settings: - -- ``enabled`` can be used to temporarily disable single rules. Default = true -- ``subfolders`` specifies whether subfolders should be included in the search. Default = false. This setting only applies to folders without glob wildcards. -- ``system_files`` specifies whether to include system files (desktop.ini, thumbs.db, .DS_Store) in the search. Default = false - - -Folder syntax -============= -Every rule in your configuration file needs to know the folders it applies to. -The easiest way is to define the rules like this: - -.. code-block:: yaml - :caption: config.yaml - - rules: - - folders: - - /path/one - - /path/two - filters: ... - actions: ... - - - folders: - - /path/one - - /another/path - filters: ... - actions: ... - -.. note:: - - You can use environment variables in your folder names. On windows this means you can use ``%public%/Desktop``, ``%APPDATA%``, ``%PROGRAMDATA%`` etc. - -Globstrings ------------ -You can use globstrings in the folder lists. For example to get all files with filenames ending with ``_ui`` and any file extension you can use: - -.. code-block:: yaml - :caption: config.yaml - - rules: - - folders: - - '~/Downloads/*_ui.*' - actions: - - echo: '{path}' - -You can use globstrings to recurse through subdirectories (alternatively you can use the ``subfolders: true`` setting as shown below) - -.. code-block:: yaml - :caption: config.yaml - - rules: - - folders: - - '~/Downloads/**/*.*' - actions: - - echo: 'base {basedir}, path {path}, relative: {relative_path}' - - # alternative syntax - - folders: - - ~/Downloads - subfolders: true - actions: - - echo: 'base {basedir}, path {path}, relative: {relative_path}' - - -The following example recurses through all subdirectories in your downloads folder and finds files with ending in ``.c`` and ``.h``. - -.. code-block:: yaml - :caption: config.yaml - - rules: - - folders: - - '~/Downloads/**/*.[c|h]' - actions: - - echo: '{path}' - -.. note:: - - You have to target files with the globstring, not folders. So to scan through all folders starting with *log_* you would write ``yourpath/log_*/*`` - - -Excluding files and folders ---------------------------- -Files and folders can be excluded by prepending an exclamation mark. The following example selects all files -in ``~/Downloads`` and its subfolders - excluding the folder ``Software``: - -.. code-block:: yaml - :caption: config.yaml - - rules: - - folders: - - '~/Downloads/**/*' - - '! ~/Downloads/Software' - actions: - - echo: '{path}' - - -Globstrings can be used to exclude only specific files / folders. This example: - - - adds all files in ``~/Downloads`` - - exludes files from that list whose name contains the word ``system`` ending in ``.bak`` - - adds all files from ``~/Documents`` - - excludes the file ``~/Documents/important.txt``. - -.. code-block:: yaml - :caption: config.yaml - - rules: - - folders: - - '~/Downloads/**/*' - - '! ~/Downloads/**/*system*.bak' - - '~/Documents' - - '! ~/Documents/important.txt' - actions: - - echo: '{path}' - -.. note:: - - Files and folders are included and excluded in the order you specify them! - - Please make sure your are putting the exclamation mark within quotation marks. - - -Aliases -------- -Instead of repeating the same folders in each and every rule you can use an alias for multiple folders which you can then reference in each rule. -Aliases are a standard feature of the YAML syntax. - -.. code-block:: yaml - :caption: config.yaml - - - all_my_messy_folders: &all - - ~/Desktop - - ~/Downloads - - ~/Documents - - ~/Dropbox - - rules: - - folders: *all - filters: ... - actions: ... - - - folders: *all - filters: ... - actions: ... - -You can even use multiple folder lists: - -.. code-block:: yaml - :caption: config.yaml - - private_folders: &private - - '/path/private' - - '~/path/private' - - work_folders: &work - - '/path/work' - - '~/My work folder' - - all_folders: &all - - *private - - *work - - rules: - - folders: *private - filters: ... - actions: ... - - - folders: *work - filters: ... - actions: ... - - - folders: *all - filters: ... - actions: ... - - # same as *all - - folders: - - *work - - *private - filters: ... - actions: ... - - -Filter syntax -============= -``filters`` is a list of :ref:`Filters`. -Filters are defined like this: - -.. code-block:: yaml - :caption: config.yaml - - rules: - - folders: ... - actions: ... - filters: - # filter without parameters - - FilterName - - # filter with a single parameter - - FilterName: parameter - - # filter expecting a list as parameter - - FilterName: - - first - - second - - third - - # filter with multiple parameters - - FilterName: - parameter1: true - option2: 10.51 - third_argument: test string - -.. note:: - Every filter comes with multiple usage examples which should be easy to adapt for your use case! - - -Action syntax -============= -``actions`` is a list of :ref:`Actions`. -Actions can be defined like this: - -.. code-block:: yaml - :caption: config.yaml - - rules: - - folders: ... - actions: - # action without parameters - - ActionName - - # action with a single parameter - - ActionName: parameter - - # filter with multiple parameters - - ActionName: - parameter1: true - option2: 10.51 - third_argument: test string - -.. note:: - Every action comes with multiple usage examples which should be easy to adapt for your use case! - -Variable substitution (placeholders) ------------------------------------- -**You can use placeholder variables in your actions.** - -Placeholder variables are used with curly braces ``{var}``. -You always have access to the variables ``{path}``, ``{basedir}`` and ``{relative_path}``: - -- ``{path}`` -- is the full path to the current file -- ``{basedir}`` -- the current base folder (the base folder is the folder you - specify in your configuration). -- ``{relative_path}`` -- the relative path from ``{basedir}`` to ``{path}`` - -Use the dot notation to access properties of ``{path}``, ``{basedir}`` and ``{relative_path}``: - -- ``{path}`` -- the full path to the current file -- ``{path.name}`` -- the full filename including extension -- ``{path.stem}`` -- just the file name without extension -- ``{path.suffix}`` -- the file extension -- ``{path.parent}`` -- the parent folder of the current file -- ``{path.parent.parent}`` -- parent calls are chainable... - -- ``{basedir}`` -- the full path to the current base folder -- ``{basedir.parent}`` -- the full path to the base folder's parent - -and any other property of the python ``pathlib.Path`` (`official documentation -`_) object. - -Additionally :ref:`Filters` may emit placeholder variables when applied to a -path. Check the documentation and examples of the filter to see available -placeholder variables and usage examples. - -Some examples include: - -- ``{lastmodified.year}`` -- the year the file was last modified -- ``{regex.yournamedgroup}`` -- anything you can extract via regular expressions -- ``{extension.upper}`` -- the file extension in uppercase -- ... and many more. diff --git a/docs/page/filters.rst b/docs/page/filters.rst deleted file mode 100644 index 3c847548..00000000 --- a/docs/page/filters.rst +++ /dev/null @@ -1,49 +0,0 @@ -.. _filters: -.. py:module:: filters - -Filters -======= - -Created -------- -.. autoclass:: Created - -Duplicate ---------- -.. autoclass:: Duplicate - -Exif ----- -.. autoclass:: Exif - -Extension ---------- -.. autoclass:: Extension - -FileContent ------------ -.. autoclass:: FileContent - -Filename --------- -.. autoclass:: Filename - -FileSize --------- -.. autoclass:: FileSize - -LastModified ------------- -.. autoclass:: LastModified - -MimeType ------------- -.. autoclass:: MimeType - -Python ------- -.. autoclass:: filters.Python - -Regex ------ -.. autoclass:: Regex diff --git a/docs/page/quickstart.rst b/docs/page/quickstart.rst deleted file mode 100644 index 824ff2e3..00000000 --- a/docs/page/quickstart.rst +++ /dev/null @@ -1,64 +0,0 @@ -Quickstart -========== - -Installation ------------- -Requirements: Python 3.6+ - -`organize` is installed via pip: - -``$ pip install organize-tool`` - -If you want all the text extraction capabilities, install with `textract` like this: - -``$ pip3 -U "organize-tool[textract]"`` - - -Creating your first config file -------------------------------- -To edit the configuration in your $EDITOR, run: - - ``$ organize config`` - -For example your configuration file could look like this: - -.. code-block:: yaml - :caption: config.yaml - - rules: - # move screenshots into "Screenshots" folder - - folders: - - ~/Desktop - filters: - - filename: - startswith: Screen Shot - actions: - - move: ~/Desktop/Screenshots/ - - # move incomplete downloads older > 30 days into the trash - - folders: - - ~/Downloads - filters: - - extension: - - crdownload - - part - - download - - lastmodified: - days: 30 - actions: - - trash - -.. note:: - You can run ``$ organize config --path`` to show the full path to the configuration file. - - -Simulate and run ----------------- -After you saved the configuration file, run ``$ organize sim`` to show a simulation of how your files would be organized. - -If you like what you see, run ``$ organize run`` to organize your files. - -.. note:: - Congrats! You just automated some tedious cleaning tasks! - Continue to :ref:`Configuration` to see the full potential of organize or skip - directly to the :ref:`Filters` and :ref:`Actions`. diff --git a/docs/rules.md b/docs/rules.md new file mode 100644 index 00000000..607ba92d --- /dev/null +++ b/docs/rules.md @@ -0,0 +1,171 @@ +# Rules + +A organize config file can be written in [YAML](https://learnxinyminutes.com/docs/yaml/) +or [JSON](https://learnxinyminutes.com/docs/json/). See [configuration](configuration.md) +on how to locate your config file. + +The top level element must be a dict with a key "rules". +"rules" contains a list of objects with the required keys "locations" and "actions". + +A minimum config: + +```yaml +rules: + - locations: "~/Desktop" + actions: + - echo: "Hello World!" +``` + +Organize checks your rules from top to bottom. For every resource in each location (top to bottom) +it will check whether the filters apply (top to bottom) and then execute the given actions (top to bottom). + +So with this minimal configuration it will print "Hello World!" for each file it finds in your Desktop. + +## Rule options + +```yml +rules: + # First rule + - name: ... + enabled: ... + targets: ... + locations: ... + subfolders: ... + filter_mode: ... + filters: ... + actions: ... + + # Another rule + - name: ... + enabled: ... + # ... and so on +``` + +The rule options in detail: + +- **name** (`str`): The rule name +- **enabled** (`bool`): Whether the rule is enabled / disabled _(Default: `true`)_ +- **targets** (`str`): `"dirs"` or `"files"` _(Default: `"files"`)_ +- **locations** (`str`|`list`) - A single location string or list of [locations](locations.md) +- **subfolders** (`bool`): Whether to recurse into subfolders of all locations _(Default: `false`)_ +- **filter_mode** (`str`): `"all"`, `"any"` or `"none"` of the filters must apply _(Default: `"all"`)_ +- **filters** (`list`): A list of [filters](filters.md) _(Default: `[]`)_ +- **actions** (`list`): A list of [actions](actions.md) + +## Targeting directories + +When `targets` is set to `dirs`, organize will work on the folders, not on files. + +The filters adjust their meaning automatically. For example the `size` filter sums up +the size of all files contained in the given folder instead of returning the size of a +single file. + +Of course other filters like `exif` or `filecontent` do not work on folders and will +return an error. + +## Templates and placeholders + +Placeholder variables are used with curly braces `{var}`. + +These variables are **always available**: + +`{env}` (`dict`)
+All your environment variables. You can access individual env vars like this: `{env.MY_VARIABLE}`. + +`{path}` ([`pathlib.Path`](https://docs.python.org/3/library/pathlib.html#methods-and-properties))
+The full path to the current file / folder on the local harddrive. +This is not available for remote locations - in this case use `fs` and `fs_path`. + +`{relative_path}` (`str`)
+the relative path of the current file in `{fs}`. + +`{now}` (`datetime`)
+The current datetime in the local timezone. + +`{utcnow}` (`datetime`)
+The current UTC datetime. + +`{fs}` (`FS`)
+The filesystem of the current location. Normally you should not need this. + +`{fs_path}` (`str`)
+The path of the current file / folder in related to `fs`. Normally you should not need this. + +In addition to that nearly all filters add new placeholders with information about +the currently handled file / folder. + +Example on how to access the size and hash of a file: + +```yaml +rules: + - locations: ~/Desktop + filters: + - size + - hash + actions: + - echo: "{size} {hash}" +``` + +!!! note + + In order to use a value returned by a filter it must be listed in the filters! + +## Advanced: Aliases + +Instead of repeating the same locations / actions / filters in each and every rule you +can use an alias for multiple locations which you can then reference in each rule. + +Aliases are a standard feature of the YAML syntax. + +```yml +all_my_messy_folders: &all + - ~/Desktop + - ~/Downloads + - ~/Documents + - ~/Dropbox + +rules: + - locations: *all + filters: ... + actions: ... + + - locations: *all + filters: ... + actions: ... +``` + +You can even use multiple folder lists: + +```yml +private_folders: &private + - "/path/private" + - "~/path/private" + +work_folders: &work + - "/path/work" + - "~/My work folder" + +all_folders: &all + - *private + - *work + +rules: + - locations: *private + filters: ... + actions: ... + + - locations: *work + filters: ... + actions: ... + + - locations: *all + filters: ... + actions: ... + + # same as *all + - locations: + - *work + - *private + filters: ... + actions: ... +``` diff --git a/docs/updating-from-v1.md b/docs/updating-from-v1.md new file mode 100644 index 00000000..c3094edd --- /dev/null +++ b/docs/updating-from-v1.md @@ -0,0 +1,96 @@ +# Updating from organize v1.x + +First of all, thank you for being a long time user of `organize`! + +I tried to keep the amount of breaking changes small but could not avoid them +completely. Feel free to pin organize to v1.x, but then you're missing the party. + +Please open a issue on Github if you need help migrating your config file! + +## Folders + +Folders have become [Locations](locations.md) in organize v2. + +- `folders` must be renamed to `locations` in your config. +- REMOVED: The glob syntax (`/Docs/**/*.png`). + See [Location options](locations.md#location-options). +- REMOVED: The exclamation mark exlucde syntax (`! ~/Desktop/exclude`). + See [Location options](locations.md#location-options). +- All keys (filter names, action names, option names) now must be lowercase. + +## Placeholders + +organize v2 uses the Jinja template engine. You may need to change some of your +placeholders. + +- `{basedir}` is no longer available. +- You have to replace undocumented placeholders like this: + +```python +{created.year}-{created.month:02}-{created.day:02} +``` + +With this: + +```python +{created.strftime('%Y-%m-%d')} +``` + +If you need to left pad other numbers you can now use the following syntax: + +```python +{ "{:02}".format(your_variable) } +# or +{ '%02d' % your_variable } +``` + +## Filters + +- [`filename`](filters.md#name) is renamed to `name`. +- [`filesize`](filters.md#size) is renamed to `size`. +- [`created`](filters.md#created) no longer accepts a timezone and uses the local timezone by default. +- [`lastmodified`](filters.md#lastmodified) no longer accepts a timezone and uses the local timezone by default. + +## Actions + +The copy, move and rename actions got a whole lot more powerful. You now have several +conflict options and can specify exactly how a file should be renamed in case of a +conflict. + +This means you might need to change your config to use the new parameters. + +- [`copy`](actions.md#copy) arguments changed to support conflict resolution options. +- [`move`](actions.md#move) arguments changed to support conflict resolution options. +- [`rename`](actions.md#rename) arguments changed to support conflict resolution options. + +Example: + +```yml +rules: + - folders: ~/Desktop + filters: + - extension: pdf + actions: + - move: + dest: ~/Documents/PDFs/ + overwrite: false + counter_seperator: "-" +``` + +becomes (organize v2): + +```yaml +rules: + - locations: ~/Desktop + filters: + - extension: pdf + actions: + - move: + dest: ~/Documents/PDFs/ + on_conflict: rename_new + rename_template: "{name}-{:02}.format(counter){extension}" +``` + +If you used `move`, `copy` or `rename` without arguments, nothing changes for you. + +That's it. Again, feel free to open a issue if you have trouble migrating your config. diff --git a/main.py b/main.py index 3a85f7ec..ad67468d 100644 --- a/main.py +++ b/main.py @@ -1,4 +1,4 @@ -from organize.cli import main +from organize.cli import cli if __name__ == "__main__": - main() + cli() diff --git a/mkdocs.yml b/mkdocs.yml new file mode 100644 index 00000000..6f5262c7 --- /dev/null +++ b/mkdocs.yml @@ -0,0 +1,37 @@ +site_name: organize +repo_url: https://github.com/tfeldmann/organize/ +site_author: "Thomas Feldmann" +nav: + - Home: index.md + - Updating from organize v1.x: updating-from-v1.md + - Configuration: configuration.md + - Rules: rules.md + - Locations: locations.md + - Filters: filters.md + - Actions: actions.md + - Changelog: changelog.md +plugins: + - search + - include-markdown + - autorefs + - mkdocstrings: + default_handler: python + handlers: + python: + selection: + members: false + rendering: + show_bases: false + show_root_toc_entry: false + show_root_heading: false + show_source: false + watch: + - organize + +markdown_extensions: + - admonition + - toc: + permalink: "#" + +theme: + name: readthedocs diff --git a/organize/__init__.py b/organize/__init__.py index 502da78e..b9df3107 100644 --- a/organize/__init__.py +++ b/organize/__init__.py @@ -1,35 +1,11 @@ import logging import logging.config -import os - -import appdirs # type: ignore -import colorama # type: ignore import yaml -from pathlib import Path - -colorama.init(autoreset=True) - -# prepare config and log folders -APP_DIRS = appdirs.AppDirs("organize") - -# setting the $ORGANIZE_CONFIG env variable overrides the default config path -if os.getenv("ORGANIZE_CONFIG"): - CONFIG_PATH = Path(os.getenv("ORGANIZE_CONFIG", "")).resolve() - CONFIG_DIR = CONFIG_PATH.parent -else: - CONFIG_DIR = Path(APP_DIRS.user_config_dir) - CONFIG_PATH = CONFIG_DIR / "config.yaml" - -LOG_DIR = Path(APP_DIRS.user_log_dir) -LOG_PATH = LOG_DIR / "organize.log" - -for folder in (CONFIG_DIR, LOG_DIR): - folder.mkdir(parents=True, exist_ok=True) +from fs import appfs -# create empty config file if it does not exist -if not CONFIG_PATH.exists(): - CONFIG_PATH.touch() +with appfs.UserLogFS("organize") as log_fs: + LOG_PATH = log_fs.getsyspath("organize.log") # configure logging LOGGING_CONFIG = """ diff --git a/organize/__main__.py b/organize/__main__.py index 02ea28da..42a01aa0 100644 --- a/organize/__main__.py +++ b/organize/__main__.py @@ -1,6 +1,4 @@ -import sys - if __name__ == "__main__": - from .cli import main + from .cli import cli - sys.exit(main()) + cli() diff --git a/organize/__version__.py b/organize/__version__.py index a0865bba..8c0d5d5b 100644 --- a/organize/__version__.py +++ b/organize/__version__.py @@ -1 +1 @@ -__version__ = "1.10.1" +__version__ = "2.0.0" diff --git a/organize/actions/__init__.py b/organize/actions/__init__.py index 7b92a89d..5141af73 100644 --- a/organize/actions/__init__.py +++ b/organize/actions/__init__.py @@ -1,3 +1,6 @@ +from typing import Dict, Type +from .action import Action +from .confirm import Confirm from .copy import Copy from .delete import Delete from .echo import Echo @@ -6,4 +9,19 @@ from .python import Python from .rename import Rename from .shell import Shell +from .symlink import Symlink from .trash import Trash + +ACTIONS = { + Confirm.name: Confirm, + Copy.name: Copy, + Delete.name: Delete, + Echo.name: Echo, + MacOSTags.name: MacOSTags, + Move.name: Move, + Python.name: Python, + Rename.name: Rename, + Shell.name: Shell, + Symlink.name: Symlink, + Trash.name: Trash, +} # type: Dict[str, Type[Action]] diff --git a/organize/actions/action.py b/organize/actions/action.py index dfdbaccf..775aa870 100644 --- a/organize/actions/action.py +++ b/organize/actions/action.py @@ -1,41 +1,66 @@ -from textwrap import indent -from typing import Any, Mapping, Optional, Callable +import logging +from typing import Any, Dict +from typing import Optional as tyOptional +from typing import Union -from organize.utils import DotDict +from schema import Optional, Or, Schema +from organize.console import pipeline_error, pipeline_message -class Error(Exception): - pass +logger = logging.getLogger(__name__) -class TemplateAttributeError(Error): +class Error(Exception): pass class Action: - pre_print_hook = None # type: Optional[Callable] + name = None # type: Union[str, None] + arg_schema = None + schema_support_instance_without_args = False - def run(self, **kwargs) -> Optional[Mapping[str, Any]]: - return self.pipeline(DotDict(kwargs)) + @classmethod + def get_name(cls): + if cls.name: + return cls.name + return cls.__name__.lower() - def pipeline(self, args: DotDict) -> Optional[Mapping[str, Any]]: + @classmethod + def get_schema(cls): + if cls.arg_schema: + arg_schema = cls.arg_schema + else: + arg_schema = Or( + str, + [str], + Schema({}, ignore_extra_keys=True), + ) + if cls.schema_support_instance_without_args: + return Or( + cls.get_name(), + { + cls.get_name(): arg_schema, + }, + ) + return { + cls.get_name(): arg_schema, + } + + def run(self, simulate: bool, **kwargs) -> tyOptional[Dict[str, Any]]: + return self.pipeline(kwargs, simulate=simulate) + + def pipeline(self, args: dict, simulate: bool) -> tyOptional[Dict[str, Any]]: raise NotImplementedError - def print(self, msg) -> None: - """ print a message for the user """ - if callable(self.pre_print_hook): - self.pre_print_hook() # pylint: disable=not-callable - print(indent("- [%s] %s" % (self.__class__.__name__, msg), " " * 4)) - - @staticmethod - def fill_template_tags(msg: str, args) -> str: - try: - return msg.format(**args) - except AttributeError as exc: - cause = exc.args[0] - raise TemplateAttributeError( - 'Missing template variable %s for "%s"' % (cause, msg) - ) + def print(self, *msg) -> None: + """print a message for the user""" + text = " ".join(str(x) for x in msg) + for line in text.splitlines(): + pipeline_message(source=self.get_name(), msg=line) + + def print_error(self, msg: str): + for line in msg.splitlines(): + pipeline_error(source=self.get_name(), msg=line) def __str__(self) -> str: return self.__class__.__name__ diff --git a/organize/actions/confirm.py b/organize/actions/confirm.py new file mode 100644 index 00000000..6fadc54e --- /dev/null +++ b/organize/actions/confirm.py @@ -0,0 +1,40 @@ +from rich.prompt import Prompt +from schema import Optional, Or + +from organize import console +from organize.utils import Template + +from .action import Action + + +class Confirm(Action): + + """Ask for confirmation before continuing.""" + + name = "confirm" + schema_support_instance_without_args = True + + arg_schema = Or( + str, + { + Optional("msg"): str, + Optional("default"): bool, + }, + ) + + def __init__(self, msg="Continue?", default=True): + self.msg = Template.from_string(msg) + self.default = default + + def pipeline(self, args: dict, simulate: bool): + msg = self.msg.render(**args) + result = console.pipeline_confirm( + self.get_name(), + msg, + default=self.default, + ) + if not result: + raise StopIteration("Aborted") + + def __str__(self) -> str: + return 'Confirm(msg="%s")' % self.msg diff --git a/organize/actions/copy.py b/organize/actions/copy.py index a9c3b256..72880965 100644 --- a/organize/actions/copy.py +++ b/organize/actions/copy.py @@ -1,120 +1,123 @@ -import logging -import os -import shutil +from typing import Callable, Union -from organize.utils import Mapping, find_unused_filename, fullpath +from fs import open_fs +from fs import errors +from fs.base import FS +from fs.copy import copy_dir, copy_file +from fs.path import dirname +from schema import Optional, Or -from .action import Action -from .trash import Trash +from organize.utils import Template, safe_description, SimulationFS -logger = logging.getLogger(__name__) +from .action import Action +from .copymove_utils import CONFLICT_OPTIONS, check_conflict, dst_from_options class Copy(Action): - """ - Copy a file to a new location. + """Copy a file or dir to a new location. + If the specified path does not exist it will be created. - :param str dest: - The destination where the file should be copied to. - If `dest` ends with a slash / backslash, the file will be copied into - this folder and keep its original name. - - :param bool overwrite: - specifies whether existing files should be overwritten. - Otherwise it will start enumerating files (append a counter to the - filename) to resolve naming conflicts. [Default: False] - - :param str counter_separator: - specifies the separator between filename and the appended counter. - Only relevant if **overwrite** is disabled. [Default: ``\' \'``] - - Examples: - - Copy all pdfs into `~/Desktop/somefolder/` and keep filenames - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: ~/Desktop - filters: - - extension: pdf - actions: - - copy: '~/Desktop/somefolder/' - - - Use a placeholder to copy all .pdf files into a "PDF" folder and all .jpg - files into a "JPG" folder. Existing files will be overwritten. - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: ~/Desktop - filters: - - extension: - - pdf - - jpg - actions: - - copy: - dest: '~/Desktop/{extension.upper}/' - overwrite: true - - - Copy into the folder `Invoices`. Keep the filename but do not - overwrite existing files. To prevent overwriting files, an index is - added to the filename, so `somefile.jpg` becomes `somefile 2.jpg`. - The counter separator is `' '` by default, but can be changed using - the `counter_separator` property. - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: ~/Desktop/Invoices - filters: - - extension: - - pdf - actions: - - copy: - dest: '~/Documents/Invoices/' - overwrite: false - counter_separator: '_' + Args: + dest (str): + The destination where the file / dir should be copied to. + If `dest` ends with a slash, it is assumed to be a target directory + and the file / dir will be copied into `dest` and keep its name. + + on_conflict (str): + What should happen in case **dest** already exists. + One of `skip`, `overwrite`, `trash`, `rename_new` and `rename_existing`. + Defaults to `rename_new`. + + rename_template (str): + A template for renaming the file / dir in case of a conflict. + Defaults to `{name} {counter}{extension}`. + + filesystem (str): + (Optional) A pyfilesystem opener url of the filesystem you want to copy to. + If this is not given, the local filesystem is used. + + The next action will work with the created copy. """ - def __init__(self, dest: str, overwrite=False, counter_separator=" ") -> None: - self.dest = dest - self.overwrite = overwrite - self.counter_separator = counter_separator - - def pipeline(self, args: Mapping) -> None: - path = args["path"] - simulate = args["simulate"] - - expanded_dest = self.fill_template_tags(self.dest, args) - # if only a folder path is given we append the filename to have the full - # path. We use os.path for that because pathlib removes trailing slashes - if expanded_dest.endswith(("\\", "/")): - expanded_dest = os.path.join(expanded_dest, path.name) - - new_path = fullpath(expanded_dest) - if new_path.exists() and not new_path.samefile(path): - if self.overwrite: - self.print("File already exists") - Trash().run(path=new_path, simulate=simulate) + name = "copy" + arg_schema = Or( + str, + { + "dest": str, + Optional("on_conflict"): Or(*CONFLICT_OPTIONS), + Optional("rename_template"): str, + Optional("filesystem"): object, + }, + ) + + def __init__( + self, + dest: str, + on_conflict="rename_new", + rename_template="{name} {counter}{extension}", + filesystem: Union[str, FS] = "", + ) -> None: + if on_conflict not in CONFLICT_OPTIONS: + raise ValueError( + "on_conflict must be one of %s" % ", ".join(CONFLICT_OPTIONS) + ) + + self.dest = Template.from_string(dest) + self.conflict_mode = on_conflict + self.rename_template = Template.from_string(rename_template) + self.filesystem = filesystem + + def pipeline(self, args: dict, simulate: bool): + src_fs = args["fs"] # type: FS + src_path = args["fs_path"] + + # should we copy a dir or a file? + copy_action: Callable[[FS, str, FS, str], None] + if src_fs.isdir(src_path): + copy_action = copy_dir + elif src_fs.isfile(src_path): + copy_action = copy_file + + dst_fs, dst_path = dst_from_options( + src_path=src_path, + dest=self.dest, + filesystem=self.filesystem, + args=args, + ) + + # check for conflicts + skip, dst_path = check_conflict( + src_fs=src_fs, + src_path=src_path, + dst_fs=dst_fs, + dst_path=dst_path, + conflict_mode=self.conflict_mode, + rename_template=self.rename_template, + simulate=simulate, + print=self.print, + ) + + try: + dst_fs = open_fs(dst_fs, create=False, writeable=True) + except errors.CreateFailed: + if not simulate: + dst_fs = open_fs(dst_fs, create=True, writeable=True) else: - new_path = find_unused_filename( - path=new_path, separator=self.counter_separator - ) + dst_fs = SimulationFS(dst_fs) - self.print('Copy to "%s"' % new_path) - if not simulate: - logger.info("Creating folder if not exists: %s", new_path.parent) - new_path.parent.mkdir(parents=True, exist_ok=True) - logger.info('Copying "%s" to "%s"', path, new_path) - shutil.copy2(src=str(path), dst=str(new_path)) + if not skip: + self.print("Copy to %s" % safe_description(dst_fs, dst_path)) + if not simulate: + dst_fs.makedirs(dirname(dst_path), recreate=True) + copy_action(src_fs, src_path, dst_fs, dst_path) - # the next actions should handle the original file - return None + # the next action should work with the newly created copy + return { + "fs": dst_fs, + "fs_path": dst_path, + } def __str__(self) -> str: - return "Copy(dest=%s, overwrite=%s)" % (self.dest, self.overwrite) + return "Copy(dest=%s, conflict_mode=%s)" % (self.dest, self.conflict_mode) diff --git a/organize/actions/copymove_utils.py b/organize/actions/copymove_utils.py new file mode 100644 index 00000000..3ff404a2 --- /dev/null +++ b/organize/actions/copymove_utils.py @@ -0,0 +1,169 @@ +from typing import Callable, Union + +import jinja2 +from fs import errors, open_fs +from fs.base import FS +from fs.move import move_dir, move_file +from fs.path import basename, dirname, join, splitext +from jinja2 import Template + +from organize.utils import expand_args, is_same_resource, safe_description + +from .trash import Trash + +CONFLICT_OPTIONS = ( + "skip", + "overwrite", + "trash", + "rename_new", + "rename_existing", + # "keep_newer", + # "keep_older", +) + + +def next_free_name(fs: FS, template: jinja2.Template, name: str, extension: str) -> str: + """ + Increments {counter} in the template until the given resource does not exist. + + Args: + fs (FS): the filesystem to work on + template (jinja2.Template): + A jinja2 template with placeholders for {name}, {extension} and {counter} + name (str): The wanted filename + extension (str): the wanted extension + + Raises: + ValueError if no free name can be found with the given template.so + + Returns: + (str) A filename according to the given template that does not exist on **fs**. + """ + counter = 1 + prev_candidate = "" + while True: + candidate = template.render(name=name, extension=extension, counter=counter) + if not fs.exists(candidate): + return candidate + if prev_candidate == candidate: + raise ValueError( + "Could not find a free filename for the given template. " + 'Maybe you forgot the "{counter}" placeholder?' + ) + prev_candidate = candidate + counter += 1 + + +def resolve_overwrite_conflict( + src_fs: FS, + src_path: str, + dst_fs: FS, + dst_path: str, + conflict_mode: str, + rename_template: Template, + simulate: bool, + print: Callable, +) -> Union[None, str]: + """ + Returns: + - A new path if applicable + - None if this action should be skipped. + """ + if is_same_resource(src_fs, src_path, dst_fs, dst_path): + print("Same resource: Skipped.") + return None + + if conflict_mode == "trash": + Trash().run(fs=dst_fs, fs_path=dst_path, simulate=simulate) + return dst_path + + elif conflict_mode == "skip": + print("Skipped.") + return None + + elif conflict_mode == "overwrite": + print("Overwrite %s." % safe_description(dst_fs, dst_path)) + return dst_path + + elif conflict_mode == "rename_new": + stem, ext = splitext(dst_path) + name = next_free_name( + fs=dst_fs, + name=stem, + extension=ext, + template=rename_template, + ) + return name + + elif conflict_mode == "rename_existing": + stem, ext = splitext(dst_path) + name = next_free_name( + fs=dst_fs, + name=stem, + extension=ext, + template=rename_template, + ) + print('Renaming existing to: "%s"' % name) + if not simulate: + if dst_fs.isdir(dst_path): + move_dir(dst_fs, dst_path, dst_fs, name) + elif dst_fs.isfile(dst_path): + move_file(dst_fs, dst_path, dst_fs, name) + return dst_path + + raise ValueError("Unknown conflict_mode %s" % conflict_mode) + + +def dst_from_options(src_path, dest, filesystem, args: dict): + # append the original resource name if destination is a dir (ends with "/") + dst_path = expand_args(dest, args) + if dst_path.endswith(("\\", "/")): + dst_path = join(dst_path, basename(src_path)) + + if filesystem: + if isinstance(filesystem, str): + dst_fs = expand_args(filesystem, args) + else: + dst_fs = filesystem + else: + dst_fs = dirname(dst_path) + dst_path = basename(dst_path) + return dst_fs, dst_path + + +def check_conflict( + src_fs: FS, + src_path: str, + dst_fs: FS, + dst_path: str, + conflict_mode: str, + rename_template: Template, + simulate: bool, + print: Callable, +): + skip = False + try: + check_fs = open_fs(dst_fs, create=False, writeable=True) + if check_fs.exists(dst_path): + print( + '%s already exists! (conflict mode is "%s").' + % (safe_description(dst_fs, dst_path), conflict_mode) + ) + new_path = resolve_overwrite_conflict( + src_fs=src_fs, + src_path=src_path, + dst_fs=check_fs, + dst_path=dst_path, + conflict_mode=conflict_mode, + rename_template=rename_template, + simulate=simulate, + print=print, + ) + if new_path is not None: + dst_path = new_path + else: + skip = True + except errors.CreateFailed: + pass + + return skip, dst_path diff --git a/organize/actions/delete.py b/organize/actions/delete.py index 997e7969..c2c90598 100644 --- a/organize/actions/delete.py +++ b/organize/actions/delete.py @@ -1,10 +1,7 @@ -import os import logging -from typing import Mapping - -from pathlib import Path - +from fs.base import FS from .action import Action +from organize.utils import safe_description logger = logging.getLogger(__name__) @@ -16,29 +13,22 @@ class Delete(Action): Deleted files have no recovery option! Using the `Trash` action is strongly advised for most use-cases! - - Example: - - Delete all JPGs and PNGs on the desktop which are older than one year: - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Desktop' - - filters: - - lastmodified: - - days: 365 - - extension: - - png - - jpg - - actions: - - delete """ - def pipeline(self, args: Mapping): - path = args["path"] # type: Path - simulate = args["simulate"] # type: bool - self.print('Delete "%s"' % path) + name = "delete" + + @classmethod + def get_schema(cls): + return cls.name + + def pipeline(self, args: dict, simulate: bool): + fs = args["fs"] # type: FS + fs_path = args["fs_path"] # type: str + desc = safe_description(fs=fs, path=fs_path) + self.print('Deleting "%s"' % desc) if not simulate: - logger.info("Deleting file %s.", path) - os.remove(str(path)) + logger.info("Deleting %s.", desc) + if fs.isdir(fs_path): + fs.removetree(fs_path) + elif fs.isfile(fs_path): + fs.remove(fs_path) diff --git a/organize/actions/echo.py b/organize/actions/echo.py index 8c502ba7..81eb0d01 100644 --- a/organize/actions/echo.py +++ b/organize/actions/echo.py @@ -1,82 +1,29 @@ -import logging - +from ..utils import Template from .action import Action -logger = logging.getLogger(__name__) - class Echo(Action): - """ - Prints the given (formatted) message. This can be useful to test your rules, - especially if you use formatted messages. - - :param str msg: The message to print (can be formatted) - - Example: - - Prints "Found old file" for each file older than one year: - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: ~/Desktop - filters: - - lastmodified: - days: 365 - actions: - - echo: 'Found old file' + """Prints the given message. - - Prints "Hello World!" and filepath for each file on the desktop: + This can be useful to test your rules, especially in combination with placeholder + variables. - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: - - ~/Desktop - actions: - - echo: 'Hello World! {path}' - - - This will print something like ``Found a PNG: "test.png"`` for each - file on your desktop: - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: - - ~/Desktop - filters: - - Extension - actions: - - echo: 'Found a {extension.upper}: "{path.name}"' - - - Show the ``{basedir}`` and ``{path}`` of all files in '~/Downloads', - '~/Desktop' and their subfolders: + Args: + msg (str): The message to print. Accepts placeholder variables. + """ - .. code-block:: yaml - :caption: config.yaml + name = "echo" - rules: - - folders: - - ~/Desktop - - ~/Downloads - subfolders: true - actions: - - echo: 'Basedir: {basedir}' - - echo: 'Path: {path}' - """ + @classmethod + def get_schema(cls): + return {cls.name: str} - def __init__(self, msg) -> None: - self.msg = msg - self.log = logging.getLogger(__name__) + def __init__(self, msg): + self.msg = Template.from_string(msg) - def pipeline(self, args) -> None: - path = args["path"] - logger.debug('Echo msg "%s", path: "%s", args: "%s"', self.msg, path, args) - full_msg = self.fill_template_tags(self.msg, args) - logger.info("Console output: %s", full_msg) + def pipeline(self, args: dict, simulate: bool) -> None: + full_msg = self.msg.render(**args) self.print("%s" % full_msg) def __str__(self) -> str: diff --git a/organize/actions/macos_tags.py b/organize/actions/macos_tags.py index 86e18597..61707725 100644 --- a/organize/actions/macos_tags.py +++ b/organize/actions/macos_tags.py @@ -1,9 +1,10 @@ import logging import sys -from pathlib import Path -from typing import Mapping -import simplematch as sm # type: ignore +import simplematch as sm +from schema import Or + +from organize.utils import Template from .action import Action @@ -12,87 +13,45 @@ class MacOSTags(Action): + """Add macOS tags. + + Args: + *tags (str): A list of tags or a single tag. + + The color can be specified in brackets after the tag name, for example: + + ```yaml + macos_tags: "Invoices (red)" + ``` + + Available colors are `none`, `gray`, `green`, `purple`, `blue`, `yellow`, `red` and + `orange`. """ - Add macOS tags. - - Example: - - Add a single tag: - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Documents/Invoices' - - filters: - - filename: - startswith: "Invoice" - - extension: pdf - - actions: - - macos_tags: Invoice - - - Adding multiple tags ("Invoice" and "Important"): - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Documents/Invoices' - - filters: - - filename: - startswith: "Invoice" - - extension: pdf - - actions: - - macos_tags: - - Important - - Invoice - - - Specify tag colors. Available colors are `none`, `gray`, `green`, `purple`, `blue`, `yellow`, `red`, `orange`. - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Documents/Invoices' - - filters: - - filename: - startswith: "Invoice" - - extension: pdf - - actions: - - macos_tags: - - Important (green) - - Invoice (purple) - - - Add a templated tag with color: - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Documents/Invoices' - - filters: - - created - - actions: - - macos_tags: - - Year-{created.year} (red) - """ + + name = "macos_tags" + + @classmethod + def get_schema(cls): + return {cls.name: Or(str, [str])} def __init__(self, *tags): - self.tags = tags + self.tags = [Template(tag) for tag in tags] - def pipeline(self, args: Mapping): - path = args["path"] # type: Path - simulate = args["simulate"] # type: bool + def pipeline(self, args: dict, simulate: bool): + fs = args["fs"] + fs_path = args["fs_path"] + path = fs.getsyspath(fs_path) if sys.platform != "darwin": self.print("The macos_tags action is only available on macOS") return - import macos_tags # type: ignore + import macos_tags COLORS = [c.name.lower() for c in macos_tags.Color] for template in self.tags: - tag = self.fill_template_tags(template, args) + tag = template.render(**args) name, color = self._parse_tag(tag) if color not in COLORS: @@ -105,11 +64,11 @@ def pipeline(self, args: Mapping): _tag = macos_tags.Tag( name=name, color=macos_tags.Color[color.upper()], - ) + ) # type: ignore macos_tags.add(_tag, file=str(path)) def _parse_tag(self, s): - """ parse a tag definition and return a tuple (name, color) """ + """parse a tag definition and return a tuple (name, color)""" result = sm.match("{name} ({color})", s) if not result: return s, "none" diff --git a/organize/actions/move.py b/organize/actions/move.py index 3dc55dc1..514aeb11 100644 --- a/organize/actions/move.py +++ b/organize/actions/move.py @@ -1,130 +1,126 @@ -import logging -import os -import shutil -from typing import Mapping +from typing import Callable, Union -from pathlib import Path -from organize.utils import DotDict, find_unused_filename, fullpath +from fs import open_fs +from fs import errors +from fs.base import FS +from fs.move import move_dir, move_file +from fs.path import dirname +from schema import Optional, Or -from .action import Action -from .trash import Trash +from organize.utils import Template, safe_description, SimulationFS -logger = logging.getLogger(__name__) +from .action import Action +from .copymove_utils import CONFLICT_OPTIONS, check_conflict, dst_from_options class Move(Action): - """ - Move a file to a new location. The file can also be renamed. + """Move a file to a new location. + + The file can also be renamed. If the specified path does not exist it will be created. If you only want to rename the file and keep the folder, it is - easier to use the Rename-Action. - - :param str dest: - The destination folder or path. - If `dest` ends with a slash / backslash, the file will be moved into - this folder and not renamed. - - :param bool overwrite: - specifies whether existing files should be overwritten. - Otherwise it will start enumerating files (append a counter to the - filename) to resolve naming conflicts. [Default: False] - - :param str counter_separator: - specifies the separator between filename and the appended counter. - Only relevant if **overwrite** is disabled. [Default: ``\' \'``] - - Examples: - - Move all pdfs and jpgs from the desktop into the folder "~/Desktop/media/". - Filenames are not changed. - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: ~/Desktop - filters: - - extension: - - pdf - - jpg - actions: - - move: '~/Desktop/media/' - - - Use a placeholder to move all .pdf files into a "PDF" folder and all - .jpg files into a "JPG" folder. Existing files will be overwritten. - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: ~/Desktop - filters: - - extension: - - pdf - - jpg - actions: - - move: - dest: '~/Desktop/{extension.upper}/' - overwrite: true - - - Move pdfs into the folder `Invoices`. Keep the filename but do not - overwrite existing files. To prevent overwriting files, an index is - added to the filename, so ``somefile.jpg`` becomes ``somefile 2.jpg``. - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: ~/Desktop/Invoices - filters: - - extension: - - pdf - actions: - - move: - dest: '~/Documents/Invoices/' - overwrite: false - counter_separator: '_' + easier to use the `rename` action. + + Args: + dest (str): + The destination where the file / dir should be moved to. + If `dest` ends with a slash, it is assumed to be a target directory + and the file / dir will be moved into `dest` and keep its name. + + on_conflict (str): + What should happen in case **dest** already exists. + One of `skip`, `overwrite`, `trash`, `rename_new` and `rename_existing`. + Defaults to `rename_new`. + + rename_template (str): + A template for renaming the file / dir in case of a conflict. + Defaults to `{name} {counter}{extension}`. + + filesystem (str): + (Optional) A pyfilesystem opener url of the filesystem you want to copy to. + If this is not given, the local filesystem is used. + + The next action will work with the moved file / dir. """ - def __init__(self, dest: str, overwrite=False, counter_separator=" "): - self.dest = dest - self.overwrite = overwrite - self.counter_separator = counter_separator - - def pipeline(self, args: DotDict) -> Mapping[str, Path]: - path = args["path"] - simulate = args["simulate"] - - expanded_dest = self.fill_template_tags(self.dest, args) - # if only a folder path is given we append the filename to have the full - # path. We use os.path for that because pathlib removes trailing slashes - if expanded_dest.endswith(("\\", "/")): - expanded_dest = os.path.join(expanded_dest, path.name) - - new_path = fullpath(expanded_dest) - new_path_exists = new_path.exists() - new_path_samefile = new_path_exists and new_path.samefile(path) - if new_path_exists and not new_path_samefile: - if self.overwrite: - self.print("File already exists") - Trash().run(path=new_path, simulate=simulate) + name = "move" + arg_schema = Or( + str, + { + "dest": str, + Optional("on_conflict"): Or(*CONFLICT_OPTIONS), + Optional("rename_template"): str, + Optional("filesystem"): object, + }, + ) + + def __init__( + self, + dest: str, + on_conflict="rename_new", + rename_template="{name} {counter}{extension}", + filesystem: Union[str, FS] = "", + ) -> None: + if on_conflict not in CONFLICT_OPTIONS: + raise ValueError( + "on_conflict must be one of %s" % ", ".join(CONFLICT_OPTIONS) + ) + + self.dest = Template.from_string(dest) + self.conflict_mode = on_conflict + self.rename_template = Template.from_string(rename_template) + self.filesystem = filesystem + + def pipeline(self, args: dict, simulate: bool): + src_fs = args["fs"] # type: FS + src_path = args["fs_path"] + + move_action: Callable[[FS, str, FS, str], None] + if src_fs.isdir(src_path): + move_action = move_dir + elif src_fs.isfile(src_path): + move_action = move_file + + dst_fs, dst_path = dst_from_options( + src_path=src_path, + dest=self.dest, + filesystem=self.filesystem, + args=args, + ) + + # check for conflicts + skip, dst_path = check_conflict( + src_fs=src_fs, + src_path=src_path, + dst_fs=dst_fs, + dst_path=dst_path, + conflict_mode=self.conflict_mode, + rename_template=self.rename_template, + simulate=simulate, + print=self.print, + ) + + try: + dst_fs = open_fs(dst_fs, create=False, writeable=True) + except errors.CreateFailed: + if not simulate: + dst_fs = open_fs(dst_fs, create=True, writeable=True) else: - new_path = find_unused_filename( - path=new_path, separator=self.counter_separator - ) - - if new_path_samefile and new_path == path: - self.print("Keep location") - else: - self.print('Move to "%s"' % new_path) + dst_fs = SimulationFS(dst_fs) + + if not skip: + self.print("Move to %s" % safe_description(dst_fs, dst_path)) if not simulate: - logger.info("Creating folder if not exists: %s", new_path.parent) - new_path.parent.mkdir(parents=True, exist_ok=True) - logger.info('Moving "%s" to "%s"', path, new_path) - shutil.move(src=str(path), dst=str(new_path)) + dst_fs.makedirs(dirname(dst_path), recreate=True) + move_action(src_fs, src_path, dst_fs, dst_path) - return {"path": new_path} + # the next action should work with the newly created copy + return { + "fs": dst_fs, + "fs_path": dst_path, + } def __str__(self) -> str: - return "Move(dest=%s, overwrite=%s)" % (self.dest, self.overwrite) + return "Move(dest=%s, conflict_mode=%s)" % (self.dest, self.conflict_mode) diff --git a/organize/actions/python.py b/organize/actions/python.py index f378e1eb..f9598fe3 100644 --- a/organize/actions/python.py +++ b/organize/actions/python.py @@ -1,8 +1,9 @@ import logging import textwrap -from typing import Any, Mapping, Optional, Iterable +from typing import Any, Dict, Iterable +from typing import Optional as tyOptional -from organize.utils import DotDict +from schema import Optional, Or from .action import Action @@ -11,62 +12,28 @@ class Python(Action): - r""" - Execute python code in your config file. + """Execute python code. - :param str code: The python code to execute - - Examples: - - A basic example that shows how to get the current file path and do some - printing in a for loop. The ``|`` is yaml syntax for defining a string - literal spanning multiple lines. - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Desktop' - actions: - - python: | - print('The path of the current file is %s' % path) - for _ in range(5): - print('Heyho, its me from the loop') - - - You can access filter data: - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: ~/Desktop - filters: - - regex: '^(?P.*)\.(?P.*)$' - actions: - - python: | - print('Name: %s' % regex.name) - print('Extension: %s' % regex.extension) - - - You have access to all the python magic -- do a google search for each - filename starting with an underscore: - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: ~/Desktop - filters: - - filename: - startswith: '_' - actions: - - python: | - import webbrowser - webbrowser.open('https://www.google.com/search?q=%s' % path.stem) + Args: + code (str): The python code to execute. + run_in_simulation (bool): + Whether to execute this code in simulation mode (Default false). """ - def __init__(self, code) -> None: + name = "python" + arg_schema = Or( + str, + { + "code": str, + Optional("run_in_simulation"): bool, + }, + ) + + def __init__(self, code, run_in_simulation=False) -> None: self.code = textwrap.dedent(code) + self.run_in_simulation = run_in_simulation - def usercode(self, *args, **kwargs) -> Optional[Any]: + def usercode(self, *args, **kwargs): pass # will be overwritten by `create_method` def create_method(self, name: str, argnames: Iterable[str], code: str) -> None: @@ -80,13 +47,12 @@ def create_method(self, name: str, argnames: Iterable[str], code: str) -> None: ) exec(funccode, globals_, locals_) # pylint: disable=exec-used - def pipeline(self, args: DotDict) -> Optional[Mapping[str, Any]]: - simulate = args.simulate - if simulate: - self.print("Code not run in simulation. (Args: %s)" % args) + def pipeline(self, args: dict, simulate: bool) -> tyOptional[Dict[str, Any]]: + if simulate and not self.run_in_simulation: + self.print("** Code not run in simulation. **") return None - logger.info('Executing python:\n"""\n%s\n""", args=%s', self.code, args) + logger.info('Executing python:\n"""\n%s\n"""', self.code) self.create_method(name="usercode", argnames=args.keys(), code=self.code) self.print("Running python script.") diff --git a/organize/actions/rename.py b/organize/actions/rename.py index ee2bbb90..9ea869c0 100644 --- a/organize/actions/rename.py +++ b/organize/actions/rename.py @@ -1,103 +1,112 @@ import logging import os -from typing import Mapping +from typing import Callable -from pathlib import Path -from organize.utils import find_unused_filename +from fs import path +from fs.base import FS +from fs.move import move_dir, move_file +from schema import Optional, Or + +from organize.utils import Template, safe_description from .action import Action -from .trash import Trash +from .copymove_utils import CONFLICT_OPTIONS, check_conflict, resolve_overwrite_conflict logger = logging.getLogger(__name__) class Rename(Action): - """ - Renames a file. + """Renames a file. - :param str name: - The new filename. - Can be a format string which uses file attributes from a filter. + Args: + name (str): + The new name for the file / dir. - :param bool overwrite: - specifies whether existing files should be overwritten. - Otherwise it will start enumerating files (append a counter to the - filename) to resolve naming conflicts. [Default: False] + on_conflict (str): + What should happen in case **dest** already exists. + One of `skip`, `overwrite`, `trash`, `rename_new` and `rename_existing`. + Defaults to `rename_new`. - :param str counter_separator: - specifies the separator between filename and the appended counter. - Only relevant if **overwrite** is disabled. [Default: ``\' \'``] + rename_template (str): + A template for renaming the file / dir in case of a conflict. + Defaults to `{name} {counter}{extension}`. - Examples: - - Convert all .PDF file extensions to lowercase (.pdf): + The next action will work with the renamed file / dir. + """ - .. code-block:: yaml - :caption: config.yaml + name = "rename" + arg_schema = Or( + str, + { + "name": str, + Optional("on_conflict"): Or(*CONFLICT_OPTIONS), + Optional("rename_template"): str, + }, + ) + + def __init__( + self, + name: str, + on_conflict="rename_new", + rename_template="{name} {counter}{extension}", + ) -> None: + if on_conflict not in CONFLICT_OPTIONS: + raise ValueError( + "on_conflict must be one of %s" % ", ".join(CONFLICT_OPTIONS) + ) - rules: - - folders: '~/Desktop' - filters: - - extension: PDF - actions: - - rename: "{path.stem}.pdf" + self.new_name = Template.from_string(name) + self.conflict_mode = on_conflict + self.rename_template = Template.from_string(rename_template) - - Convert **all** file extensions to lowercase: + def pipeline(self, args: dict, simulate: bool): + fs = args["fs"] # type: FS + src_path = args["fs_path"] - .. code-block:: yaml - :caption: config.yaml + new_name = self.new_name.render(**args) + if "/" in new_name: + raise ValueError( + "The new name cannot contain slashes. " + "To move files or folders use `move`." + ) - rules: - - folders: '~/Desktop' - filters: - - Extension - actions: - - rename: "{path.stem}.{extension.lower}" - """ + dst_path = path.join(path.dirname(src_path), new_name) - def __init__(self, name: str, overwrite=False, counter_separator=" ") -> None: - if os.path.sep in name: - ValueError( - "Rename only takes a filename as argument. To move files between " - "folders use the Move action." - ) - self.name = name - self.overwrite = overwrite - self.counter_separator = counter_separator - - def pipeline(self, args: Mapping) -> Mapping[str, Path]: - path = args["path"] # type: Path - simulate = args["simulate"] - expanded_name = self.fill_template_tags(self.name, args) - new_path = path.parent / expanded_name - - # handle filename collisions - new_path_exists = new_path.exists() - new_path_samefile = new_path_exists and new_path.samefile(path) - if new_path_exists and not new_path_samefile: - if self.overwrite: - self.print("File already exists") - Trash().run(path=new_path, simulate=simulate) - else: - new_path = find_unused_filename( - path=new_path, separator=self.counter_separator - ) - - # do nothing if the new name is equal to the old name and the file is - # the same - if new_path_samefile and new_path == path: - self.print("Keep name") + if dst_path == src_path: + self.print("Name did not change") else: - self.print('New name: "%s"' % new_path.name) - if not simulate: - logger.info('Renaming "%s" to "%s".', path, new_path) - path.rename(new_path) + move_action: Callable[[FS, str, FS, str], None] + if fs.isdir(src_path): + move_action = move_dir + elif fs.isfile(src_path): + move_action = move_file + + # check for conflicts + skip, dst_path = check_conflict( + src_fs=fs, + src_path=src_path, + dst_fs=fs, + dst_path=dst_path, + conflict_mode=self.conflict_mode, + rename_template=self.rename_template, + simulate=simulate, + print=self.print, + ) + + if not skip: + self.print("Rename to %s" % safe_description(fs, dst_path)) + if not simulate: + move_action(fs, src_path, fs, dst_path) - return {"path": new_path} + # the next action should work with the renamed file + return { + "fs": fs, + "fs_path": dst_path, + } def __str__(self) -> str: - return "Rename(name=%s, overwrite=%s, sep=%s)" % ( - self.name, - self.overwrite, - self.counter_separator, + return "Rename(new_name=%s, conflict_mode=%s)" % ( + self.new_name, + self.conflict_mode, ) diff --git a/organize/actions/shell.py b/organize/actions/shell.py index 0be8290c..b221cad5 100644 --- a/organize/actions/shell.py +++ b/organize/actions/shell.py @@ -1,43 +1,76 @@ +from schema import Or, Optional +import shlex import logging import subprocess -from typing import Mapping +from subprocess import PIPE +from ..utils import Template from .action import Action logger = logging.getLogger(__name__) class Shell(Action): - """ Executes a shell command - :param str cmd: The command to execute. - - Example: - - (macOS) Open all pdfs on your desktop: + Args: + cmd (str): The command to execute. + run_in_simulation (bool): + Whether to execute in simulation mode (default = false) + ignore_errors (bool): + Whether to continue on returncodes != 0. - .. code-block:: yaml - :caption: config.yaml + Returns - rules: - - folders: '~/Desktop' - filters: - - extension: pdf - actions: - - shell: 'open "{path}"' + - `{shell.output}` (`str`): The stdout of the executed process. + - `{shell.returncode}` (`int`): The returncode of the executed process. """ - def __init__(self, cmd: str) -> None: - self.cmd = cmd + name = "shell" + arg_schema = Or( + str, + { + "cmd": str, + Optional("run_in_simulation"): bool, + Optional("ignore_errors"): bool, + }, + ) + + def __init__(self, cmd: str, run_in_simulation=False, ignore_errors=False): + self.cmd = Template.from_string(cmd) + self.run_in_simulation = run_in_simulation + self.ignore_errors = ignore_errors - def pipeline(self, args: Mapping) -> None: - full_cmd = self.fill_template_tags(self.cmd, args) + def pipeline(self, args: dict, simulate: bool): + full_cmd = self.cmd.render(**args) self.print("$ %s" % full_cmd) - if not args["simulate"]: + if not simulate or self.run_in_simulation: # we use call instead of run to be compatible with python < 3.5 logger.info('Executing command "%s" in shell.', full_cmd) - subprocess.call(full_cmd, shell=True) + try: + lexed = shlex.split(full_cmd) + call = subprocess.run( + lexed, + check=True, + stdout=PIPE, + stderr=subprocess.STDOUT, + ) + return { + self.get_name(): { + "output": call.stdout.decode("utf-8"), + "returncode": 0, + } + } + except subprocess.CalledProcessError as e: + if not self.ignore_errors: + raise e + return { + self.get_name(): { + "output": e.stdout.decode("utf-8"), + "returncode": e.returncode, + } + } def __str__(self) -> str: return 'Shell(cmd="%s")' % self.cmd diff --git a/organize/actions/symlink.py b/organize/actions/symlink.py new file mode 100644 index 00000000..e2689839 --- /dev/null +++ b/organize/actions/symlink.py @@ -0,0 +1,43 @@ +import os +import logging +from fs.base import FS +from fs import path +from fs.osfs import OSFS +from .action import Action +from organize.utils import Template + +logger = logging.getLogger(__name__) + + +class Symlink(Action): + + """Create a symbolic link. + + Args: + dest (str): + The symlink destination. If **dest** ends with a slash `/``, create the + symlink in the given directory. Can contain placeholders. + + Only the local filesystem is supported. + """ + + name = "symlink" + + def __init__(self, dest): + self._dest = Template.from_string(dest) + + def pipeline(self, args: dict, simulate: bool): + fs = args["fs"] # type: FS + fs_path = args["fs_path"] # type: str + + if not isinstance(fs, OSFS): + raise EnvironmentError("Symlinks only work on the local filesystem.") + + dest = os.path.expanduser(self._dest.render(**args)) + if dest.endswith("/"): + dest = path.join(dest, path.basename(fs_path)) + + self.print("Creating symlink: %s" % dest) + if not simulate: + os.makedirs(os.path.dirname(dest), exist_ok=True) + os.symlink(fs.getsyspath(fs_path), dest) diff --git a/organize/actions/trash.py b/organize/actions/trash.py index b814e2e2..4c8b68f2 100644 --- a/organize/actions/trash.py +++ b/organize/actions/trash.py @@ -1,7 +1,4 @@ import logging -from typing import Mapping - -from pathlib import Path from .action import Action @@ -10,34 +7,23 @@ class Trash(Action): - """ - Move a file into the trash. - - Example: - - Move all JPGs and PNGs on the desktop which are older than one year - into the trash: - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Desktop' - - filters: - - lastmodified: - - days: 365 - - extension: - - png - - jpg - - actions: - - trash - """ - - def pipeline(self, args: Mapping): - path = args["path"] # type: Path - simulate = args["simulate"] # type: bool - from send2trash import send2trash # type: ignore + """Move a file or dir into the trash.""" + + name = "trash" + + @classmethod + def get_schema(cls): + return cls.name + + def trash(self, path: str, simulate: bool): + from send2trash import send2trash self.print('Trash "%s"' % path) if not simulate: logger.info("Moving file %s into trash.", path) - send2trash(str(path)) + send2trash(path) + + def pipeline(self, args: dict, simulate: bool): + fs = args["fs"] + fs_path = args["fs_path"] + self.trash(path=fs.getsyspath(fs_path), simulate=simulate) diff --git a/organize/cli.py b/organize/cli.py index 2be90b2c..57378316 100644 --- a/organize/cli.py +++ b/organize/cli.py @@ -1,155 +1,289 @@ """ -organize -- The file management automation tool. - -Usage: - organize sim [--config-file=] - organize run [--config-file=] - organize config [--open-folder | --path | --debug] [--config-file=] - organize list - organize --help - organize --version - -Arguments: - sim Simulate a run. Does not touch your files. - run Organizes your files according to your rules. - config Open the configuration file in $EDITOR. - list List available filters and actions. - --version Show program version and exit. - -h, --help Show this screen and exit. - -Options: - -o, --open-folder Open the folder containing the configuration files. - -p, --path Show the path to the configuration file. - -d, --debug Debug your configuration file. - -Full documentation: https://organize.readthedocs.io +organize + +The file management automation tool. """ -import logging import os import sys -from typing import Union -from colorama import Fore, Style # type: ignore -from docopt import docopt # type: ignore +import click +from fs import appfs, osfs, open_fs +from fs.path import split -from . import CONFIG_DIR, CONFIG_PATH, LOG_PATH +from . import console from .__version__ import __version__ -from pathlib import Path -from .config import Config -from .core import execute_rules -from .utils import flatten, fullpath - -logger = logging.getLogger("organize") +from .migration import NeedsMigrationError +DOCS_URL = "https://tfeldmann.github.io/organize/" # "https://organize.readthedocs.io" +MIGRATE_URL = DOCS_URL + "updating-from-v1/" +DEFAULT_CONFIG = """\ +# organize configuration file +# {docs} -def main(argv=None): - """ entry point for the command line interface """ - args = docopt(__doc__, argv=argv, version=__version__, help=True) +rules: + - locations: + - # your locations here + filters: + - # your filters here + actions: + - # your actions here +""".format( + docs=DOCS_URL +) - # override default config file path - if args["--config-file"]: - expanded_path = os.path.expandvars(args["--config-file"]) - config_path = Path(expanded_path).expanduser().resolve() - config_dir = config_path.parent - else: - config_dir = CONFIG_DIR - config_path = CONFIG_PATH - - # > organize config - if args["config"]: - if args["--open-folder"]: - open_in_filemanager(config_dir) - elif args["--path"]: - print(str(config_path)) - elif args["--debug"]: - config_debug(config_path) - else: - config_edit(config_path) - - # > organize list - elif args["list"]: - list_actions_and_filters() - - # > organize sim / run +try: + config_filename = "config.yaml" + if os.getenv("ORGANIZE_CONFIG"): + dirname, config_filename = os.path.split(os.getenv("ORGANIZE_CONFIG", "")) + config_fs = osfs.OSFS(dirname, create=False) else: - try: - config = Config.from_file(config_path) - execute_rules(config.rules, simulate=args["sim"]) - except Config.Error as e: - logger.exception(e) - print_error(e) - print("Try 'organize config --debug' for easier debugging.") - print("Full traceback at: %s" % LOG_PATH) - sys.exit(1) - except Exception as e: # pylint: disable=broad-except - logger.exception(e) - print_error(e) - print("Full traceback at: %s" % LOG_PATH) - sys.exit(1) - - -def config_edit(config_path: Path) -> None: - """ open the config file in $EDITOR or default text editor """ - # attention: the env variable might contain command line arguments. - # https://github.com/tfeldmann/organize/issues/24 - editor = os.getenv("EDITOR") - if editor: - os.system('%s "%s"' % (editor, config_path)) - else: - open_in_filemanager(config_path) + config_fs = appfs.UserConfigFS("organize", create=True) + + # create default config file if it not exists + if not config_fs.exists(config_filename): + config_fs.writetext(config_filename, DEFAULT_CONFIG) + CONFIG_PATH = config_fs.getsyspath(config_filename) +except Exception as e: + console.error(str(e), title="Config file") + sys.exit(1) + + +class NaturalOrderGroup(click.Group): + def list_commands(self, ctx): + return self.commands.keys() + + +CLI_CONFIG = click.argument( + "config", + required=False, + default=CONFIG_PATH, + type=click.Path(exists=True), +) +CLI_WORKING_DIR_OPTION = click.option( + "--working-dir", + default=".", + type=click.Path(exists=True), + help="The working directory", +) +# for CLI backwards compatibility with organize v1.x +CLI_CONFIG_FILE_OPTION = click.option( + "--config-file", + default=None, + hidden=True, + type=click.Path(exists=True), +) + + +def run_local(config_path: str, working_dir: str, simulate: bool): + from . import core + from schema import SchemaError + + try: + console.info(config_path=config_path, working_dir=working_dir) + config_dir, config_name = split(config_path) + config = open_fs(config_dir).readtext(config_name) + os.chdir(working_dir) + core.run(rules=config, simulate=simulate) + except NeedsMigrationError as e: + console.error(e, title="Config needs migration") + console.warn( + "Your config file needs some updates to work with organize v2.\n" + "Please see the migration guide at\n\n" + "%s" % MIGRATE_URL + ) + sys.exit(1) + except SchemaError as e: + console.error("Invalid config file!") + for err in e.autos: + if err and len(err) < 200: + core.highlighted_console.print(err) + except Exception as e: + core.highlighted_console.print_exception() + except (EOFError, KeyboardInterrupt): + console.status.stop() + console.warn("Aborted") + + +@click.group( + help=__doc__, + cls=NaturalOrderGroup, + context_settings=dict(help_option_names=["-h", "--help"]), +) +@click.version_option(__version__) +def cli(): + pass + + +@cli.command() +@CLI_CONFIG +@CLI_WORKING_DIR_OPTION +@CLI_CONFIG_FILE_OPTION +def run(config, working_dir, config_file): + """Organizes your files according to your rules.""" + if config_file: + config = config_file + console.deprecated( + "The --config-file option can now be omitted. See organize --help." + ) + run_local(config_path=config, working_dir=working_dir, simulate=False) -def open_in_filemanager(path: Path) -> None: - """ opens the given path in file manager, using the default application """ - import webbrowser # pylint: disable=import-outside-toplevel +@cli.command() +@CLI_CONFIG +@CLI_WORKING_DIR_OPTION +@CLI_CONFIG_FILE_OPTION +def sim(config, working_dir, config_file): + """Simulates a run (does not touch your files).""" + if config_file: + config = config_file + console.deprecated( + "The --config-file option can now be omitted. See organize --help." + ) + run_local(config_path=config, working_dir=working_dir, simulate=True) - webbrowser.open(path.as_uri()) +@cli.command() +@click.argument( + "config", + required=False, + default=CONFIG_PATH, + type=click.Path(), +) +@click.option( + "--editor", + envvar="EDITOR", + help="The editor to use. (Default: $EDITOR)", +) +def edit(config, editor): + """Edit the rules. -def config_debug(config_path: Path) -> None: - """ prints the config with resolved yaml aliases, checks rules syntax and checks - whether the given folders exist + If called without arguments it will open the default config file in $EDITOR. """ - print(str(config_path)) - haserr = False - # check config syntax + click.edit(filename=config, editor=editor) + + +@cli.command() +@CLI_CONFIG +@click.option("--debug", is_flag=True, help="Verbose output") +def check(config, debug): + """Checks whether a given config file is valid. + + If called without arguments it will check the default config file. + """ + print("Checking: " + config) + + from . import migration + from .config import load_from_string, cleanup, validate + from .core import highlighted_console as out, replace_with_instances + try: - print(Style.BRIGHT + "Your configuration as seen by the parser:") - config = Config.from_file(config_path) - if not config.config: - print_error("Config file is empty") - return - print(config.yaml()) - rules = config.rules - print("Config file syntax seems fine!") - except Config.Error as e: - haserr = True - print_error(e) + config_dir, config_name = split(str(config)) + config_str = open_fs(config_dir).readtext(config_name) + + if debug: + out.rule("Raw", align="left") + out.print(config_str) + + rules = load_from_string(config_str) + + if debug: + out.print("\n\n") + out.rule("Loaded", align="left") + out.print(rules) + + rules = cleanup(rules) + + if debug: + out.print("\n\n") + out.rule("Cleaned", align="left") + out.print(rules) + + if debug: + out.print("\n\n") + out.rule("Migration from v1", align="left") + + migration.migrate_v1(rules) + + if debug: + out.print("Not needed.") + out.print("\n\n") + out.rule("Schema validation", align="left") + + validate(rules) + + if debug: + out.print("Validtion ok.") + out.print("\n\n") + out.rule("Instantiation", align="left") + + warnings = replace_with_instances(rules) + if debug: + out.print(rules) + for msg in warnings: + out.print("Warning: %s" % msg) + + if debug: + out.print("\n\n") + out.rule("Result", align="left") + out.print("Config is valid.") + + except Exception as e: + out.print_exception() + sys.exit(1) + + +@cli.command() +@click.option("--path", is_flag=True, help="Print the path instead of revealing it.") +def reveal(path): + """Reveals the default config file.""" + if path: + click.echo(CONFIG_PATH) else: - # check whether all folders exists: - allfolders = set(flatten([rule.folders for rule in rules])) - for f in allfolders: - if not fullpath(f).exists(): - haserr = True - print(Fore.YELLOW + 'Warning: "%s" does not exist!' % f) + click.launch(str(CONFIG_PATH), locate=True) - if not haserr: - print(Fore.GREEN + Style.BRIGHT + "No config problems found.") +@cli.command() +def schema(): + """Prints the json schema for config files.""" + import json -def list_actions_and_filters() -> None: - """ Prints a list of available actions and filters """ - import inspect # pylint: disable=import-outside-toplevel - from organize import filters, actions # pylint: disable=import-outside-toplevel + from .config import CONFIG_SCHEMA + from .console import console as richconsole - print(Style.BRIGHT + "Filters:") - for name, _ in inspect.getmembers(filters, inspect.isclass): - print(" " + name) - print() - print(Style.BRIGHT + "Actions:") - for name, _ in inspect.getmembers(actions, inspect.isclass): - print(" " + name) + js = json.dumps( + CONFIG_SCHEMA.json_schema( + schema_id="https://tfeldmann.de/organize.schema.json", + ) + ) + richconsole.print_json(js) + + +@cli.command() +def docs(): + """Opens the documentation.""" + click.launch(DOCS_URL) + + +# deprecated - only here for backwards compatibility +@cli.command(hidden=True) +@click.option("--path", is_flag=True, help="Print the default config file path") +@click.option("--debug", is_flag=True, help="Debug the default config file") +@click.option("--open-folder", is_flag=True) +@click.pass_context +def config(ctx, path, debug, open_folder): + """Edit the default configuration file.""" + if open_folder: + ctx.invoke(reveal) + elif path: + ctx.invoke(reveal, path=True) + return + elif debug: + ctx.invoke(check) + else: + ctx.invoke(edit) + console.deprecated("`organize config` is deprecated.") + console.deprecated("Please see `organize --help` for all available commands.") -def print_error(e: Union[Exception, str]) -> None: - print(Style.BRIGHT + Fore.RED + "ERROR:" + Style.RESET_ALL + " %s" % e) +if __name__ == "__main__": + cli() diff --git a/organize/config.py b/organize/config.py index ffdd0690..ae3a5eb7 100644 --- a/organize/config.py +++ b/organize/config.py @@ -1,205 +1,102 @@ -import inspect -import logging import textwrap -from typing import Generator, List, Mapping, NamedTuple, Sequence +from collections import defaultdict import yaml - -from . import actions, filters -from .actions.action import Action -from pathlib import Path -from .filters.filter import Filter -from .utils import first_key, flatten - -logger = logging.getLogger(__name__) -Rule = NamedTuple( - "Rule", - [ - ("filters", Sequence[Filter]), - ("actions", Sequence[Action]), - ("folders", Sequence[str]), - ("subfolders", bool), - ("system_files", bool), - ], +from schema import And, Literal, Optional, Or, Schema + +from organize.actions import ACTIONS +from organize.filters import FILTERS + +from .utils import flatten_all_lists_in_dict + +CONFIG_SCHEMA = Schema( + { + Literal("rules", description="All rules are defined here."): [ + { + Optional("name", description="The name of the rule."): str, + Optional("enabled"): bool, + Optional("subfolders"): bool, + Optional("filter_mode", description="The filter mode."): Or( + "all", "any", "none", error="Invalid filter mode" + ), + Optional( + "targets", + description="Whether the rule should apply to directories or folders.", + ): Or("dirs", "files"), + "locations": Or( + str, + [ + Or( + str, + { + "path": And(str, len), + Optional("max_depth"): Or(int, None), + Optional("search"): Or("depth", "breadth"), + Optional("exclude_files"): [str], + Optional("exclude_dirs"): [str], + Optional("system_exlude_files"): [str], + Optional("system_exclude_dirs"): [str], + Optional("ignore_errors"): bool, + Optional("filter"): [str], + Optional("filter_dirs"): [str], + Optional("filesystem"): object, + }, + ), + ], + ), + Optional("filters"): [ + Optional(x.get_schema()) for x in FILTERS.values() + ], + "actions": [Optional(x.get_schema()) for x in ACTIONS.values()], + }, + ], + }, + name="organize rule configuration", ) -# disable yaml constructors for strings starting with exclamation marks -# https://stackoverflow.com/a/13281292/300783 + def default_yaml_cnst(loader, tag_suffix, node): + # disable yaml constructors for strings starting with exclamation marks + # https://stackoverflow.com/a/13281292/300783 return str(node.tag) yaml.add_multi_constructor("", default_yaml_cnst, Loader=yaml.SafeLoader) -class Config: - def __init__(self, config: dict) -> None: - self.config = config - self.filter_by_name = { - name.lower(): getattr(filters, name) - for name, _ in inspect.getmembers(filters, inspect.isclass) - } - self.action_by_name = { - name.lower(): getattr(actions, name) - for name, _ in inspect.getmembers(actions, inspect.isclass) - } - - @classmethod - def from_string(cls, config: str) -> "Config": - dedented_config = textwrap.dedent(config) - try: - return cls(yaml.load(dedented_config, Loader=yaml.SafeLoader)) - except yaml.YAMLError as e: - raise cls.ParsingError(e) - - @classmethod - def from_file(cls, path: Path) -> "Config": - with path.open(encoding="utf-8") as f: - return cls.from_string(f.read()) - - def yaml(self) -> str: - if not (self.config and "rules" in self.config): - raise self.NoRulesFoundError() - data = {"rules": self.config["rules"]} - yaml.Dumper.ignore_aliases = lambda self, data: True # type: ignore - return yaml.dump( - data, allow_unicode=True, default_flow_style=False, default_style="'" - ) - - @staticmethod - def parse_folders(rule_item) -> Generator[str, None, None]: - # the folder list is flattened so we can use encapsulated list - # definitions in the config file. - yield from flatten(rule_item["folders"]) - - @staticmethod - def sanitize_key(key): - return key.lower().replace("_", "") - - def _get_filter_class_by_name(self, name): - try: - return self.filter_by_name[self.sanitize_key(name)] - except AttributeError as e: - raise self.Error("%s is no valid filter" % name) from e - - def _get_action_class_by_name(self, name): - try: - return self.action_by_name[self.sanitize_key(name)] - except AttributeError as e: - raise self.Error("%s is no valid action" % name) from e - - @staticmethod - def _class_instance_with_args(Cls, args): - if args is None: - return Cls() - elif isinstance(args, list): - return Cls(*args) - elif isinstance(args, dict): - return Cls(**args) - return Cls(args) - - def instantiate_filters(self, rule_item: Mapping) -> Generator[Filter, None, None]: - # filter list can be empty - try: - filter_list = rule_item["filters"] - except KeyError: - return - if not filter_list: - return - if not isinstance(filter_list, list): - raise self.FiltersNoListError() - - for filter_item in flatten(filter_list): - if filter_item is None: - # TODO: don't know what this should be - continue - # filter with arguments - elif isinstance(filter_item, dict): - name = first_key(filter_item) - args = filter_item[name] - filter_class = self._get_filter_class_by_name(name) - yield self._class_instance_with_args(filter_class, args) - # only given filter name without args - elif isinstance(filter_item, str): - name = filter_item - filter_class = self._get_filter_class_by_name(name) - yield filter_class() - else: - raise self.Error("Unknown filter: %s" % filter_item) - - def instantiate_actions(self, rule_item: Mapping) -> Generator[Action, None, None]: - action_list = rule_item["actions"] - if not isinstance(action_list, list): - raise self.ActionsNoListError() - - for action_item in flatten(action_list): - if isinstance(action_item, dict): - name = first_key(action_item) - args = action_item[name] - action_class = self._get_action_class_by_name(name) - yield self._class_instance_with_args(action_class, args) - elif isinstance(action_item, str): - name = action_item - action_class = self._get_action_class_by_name(name) - yield action_class() - else: - raise self.Error("Unknown action: %s" % action_item) - - @property - def rules(self) -> List[Rule]: - """ :returns: A list of instantiated Rules """ - if not (self.config and "rules" in self.config): - raise self.NoRulesFoundError() - result = [] - for i, rule_item in enumerate(self.config["rules"]): - # skip disabled rules - if not rule_item.get("enabled", True): - continue - - rule_folders = list(self.parse_folders(rule_item)) - rule_filters = list(self.instantiate_filters(rule_item)) - rule_actions = list(self.instantiate_actions(rule_item)) - - if not rule_folders: - logger.warning("No folders given for rule %s!", i + 1) - if not rule_filters: - logger.warning("No filters given for rule %s!", i + 1) - if not rule_actions: - logger.warning("No actions given for rule %s!", i + 1) - - rule = Rule( - folders=rule_folders, - filters=rule_filters, - actions=rule_actions, - subfolders=rule_item.get("subfolders", False), - system_files=rule_item.get("system_files", False), - ) - result.append(rule) - return result - - class Error(Exception): - pass - - class NoRulesFoundError(Error): - def __str__(self): - return "No rules found in configuration file" - - class ParsingError(Error): - pass - - class NoFoldersFoundError(Error): - pass - - class NoFiltersFoundError(Error): - pass - - class NoActionsFoundError(Error): - pass - - class FiltersNoListError(Error): - def __str__(self): - return "Please specify your filters as a YAML list" - - class ActionsNoListError(Error): - def __str__(self): - return "Please specify your actions as a YAML list" +def load_from_string(config: str) -> dict: + dedented_config = textwrap.dedent(config) + return yaml.load(dedented_config, Loader=yaml.SafeLoader) + + +def lowercase_keys(obj): + if isinstance(obj, dict): + obj = {key.lower(): value for key, value in obj.items()} + for key, value in obj.items(): + if isinstance(value, list): + for i, item in enumerate(value): + value[i] = lowercase_keys(item) + obj[key] = lowercase_keys(value) + return obj + + +def cleanup(config: dict) -> dict: + result = defaultdict(list) + + # delete every root key except "rules" + for rule in config.get("rules", []): + # delete disabled rules + if rule.get("enabled", True): + result["rules"].append(rule) + + if not result: + raise ValueError("No rules defined.") + + result = lowercase_keys(result) + + # flatten all lists everywhere + return flatten_all_lists_in_dict(dict(result)) + + +def validate(config: dict): + return CONFIG_SCHEMA.validate(config) diff --git a/organize/console.py b/organize/console.py new file mode 100644 index 00000000..ce2bc02b --- /dev/null +++ b/organize/console.py @@ -0,0 +1,227 @@ +from fs.base import FS +from fs.path import basename, dirname, forcedir, relpath +from rich.console import Console +from rich.panel import Panel +from rich.text import Text +from rich.theme import Theme +from rich.status import Status +from rich.prompt import Confirm as RichConfirm, Prompt as RichPrompt +from .utils import safe_description + +from organize.__version__ import __version__ + +ICON_DIR = "🗁" +ICON_FILE = "" +INDENT = " " * 2 + +theme = Theme( + { + "info": "dim cyan", + "warning": "yellow", + "error": "bold red", + "simulation": "bold green", + "status": "bold green", + "rule": "bold cyan", + "location.fs": "yellow", + "location.base": "green", + "location.main": "bold green", + "path.base": "dim green", + "path.main": "green", + "path.icon": "green", + "pipeline.source": "cyan", + "pipeline.msg": "white", + "pipeline.error": "bold red", + "pipeline.prompt": "bold yellow", + "summary.done": "bold green", + "summary.fail": "red", + } +) +console = Console(theme=theme, highlight=False) +status = Status("", console=console) + + +class Prefixer: + def __init__(self): + self.reset() + + def reset(self): + self._args = None + self._kwargs = None + + def set_prefix(self, *args, **kwargs): + self._args = args + self._kwargs = kwargs + + def ensure_prefix(self): + if self._args is not None: + console.print(*self._args, **self._kwargs) + self.reset() + + def print(self, *args, **kwargs): + self.ensure_prefix() + console.print(*args, **kwargs) + + +with_path = Prefixer() +with_newline = Prefixer() + + +class PipelineMixin: + @classmethod + def set_source(cls, source): + cls.validate_error_message = Text.assemble( + _pipeline_base(source), + ("Please enter Y or N", "prompt.invalid"), + ) + + def pre_prompt(self): + with_path.ensure_prefix() + + +class Confirm(PipelineMixin, RichConfirm): + pass + + +class Prompt(PipelineMixin, RichPrompt): + pass + + +def _highlight_path(path, base_style, main_style, relative=False): + dir_ = forcedir(dirname(path)) + if relative: + dir_ = relpath(dir_) + name = basename(path) + return Text.assemble( + (dir_, base_style), + (name, main_style), + ) + + +def info(config_path, working_dir): + console.print("organize {}".format(__version__)) + console.print('Config: "{}"'.format(config_path)) + if working_dir != ".": + console.print('Working dir: "{}"'.format(working_dir)) + + +def warn(msg, title="Warning"): + console.print("[warning][b]{}:[/b] {}[/warning]".format(title, msg)) + + +def deprecated(msg): + warn(msg, title="Deprecated") + + +def error(msg, title="Error"): + console.print("[error]{}: {}[/error]".format(title, msg)) + + +def simulation_banner(): + console.print() + console.print(Panel("SIMULATION", style="simulation")) + + +def spinner(simulate: bool): + status_verb = "simulating" if simulate else "organizing" + status.update(Text(status_verb, "status")) + status.start() + + +def rule(rule): + console.print() + console.rule("[rule]:gear: %s" % rule, align="left", style="rule") + with_newline.reset() + + +def location(fs: FS, fs_path: str): + result = Text() + if fs.hassyspath(fs_path): + syspath = fs.getsyspath(fs_path) + result = _highlight_path(syspath.rstrip("/"), "location.base", "location.main") + else: + result = Text.assemble( + (str(fs), "location.fs"), + " ", + _highlight_path(fs_path.rstrip("/"), "location.base", "location.main"), + ) + with_newline.print(result) + + +def path(fs: FS, fs_path: str): + icon = ICON_DIR if fs.isdir(fs_path) else ICON_FILE + msg = Text.assemble( + INDENT, + _highlight_path(fs_path, "path.base", "path.main", relative=True), + " ", + (icon, "path.icon"), + ) + with_path.set_prefix(msg) + + +def path_changed_during_pipeline( + fs: FS, fs_path: str, new_fs: FS, new_path: str, reason="deferred from" +): + icon = ICON_DIR if new_fs.isdir(new_path) else ICON_FILE + msg = Text.assemble( + INDENT, + _highlight_path( + safe_description(new_fs, new_path), "path.base", "path.main", relative=True + ), + (" <- %s " % reason, "yellow"), + _highlight_path(fs_path, "path.base", "path.main", relative=True), + " ", + (icon, "path.icon"), + ) + with_path.set_prefix(msg) + + +def _pipeline_base(source: str): + return Text.assemble( + INDENT * 2, + ("- ({}) ".format(source), "pipeline.source"), + ) + + +def pipeline_message(source: str, msg: str) -> None: + line = Text.assemble( + _pipeline_base(source), + (msg, "pipeline.msg"), + ) + with_path.print(line) + with_newline.set_prefix("") + + +def pipeline_error(source: str, msg: str): + line = _pipeline_base(source) + line.append("ERROR! {}".format(msg), "pipeline.error") + with_path.print(line) + with_newline.set_prefix("") + + +def pipeline_confirm(source: str, msg: str, default: bool): + status.stop() + line = _pipeline_base(source) + line.append(msg, "pipeline.prompt") + Confirm.set_source(source) + result = Confirm.ask( + line, + console=console, + default=default, + ) + with_newline.set_prefix("") + status.start() + return result + + +def summary(count: dict): + status.stop() + console.print() + if not sum(count.values()): + console.print("Nothing to do.") + else: + result = Text.assemble( + ("success {done}".format(**count), "summary.done"), + " / ", + ("fail {fail}".format(**count), "summary.fail"), + ) + console.print(result) diff --git a/organize/core.py b/organize/core.py index eaaa7192..d8d2a7c2 100644 --- a/organize/core.py +++ b/organize/core.py @@ -1,204 +1,309 @@ import logging -import os -import shutil -from copy import deepcopy -from datetime import datetime -from textwrap import indent -from typing import Generator, Iterable, List, NamedTuple, Optional, Sequence, Set, Tuple +from collections import Counter +from pathlib import Path +from typing import Iterable, NamedTuple, Union -from colorama import Fore, Style # type: ignore +from fs import path as fspath +from fs.base import FS +from fs.errors import NoSysPath +from fs.walk import Walker +from rich.console import Console +from . import config, console +from .actions import ACTIONS from .actions.action import Action -from pathlib import Path -from .config import Rule +from .filters import FILTERS from .filters.filter import Filter -from .utils import DotDict, splitglob - -logger = logging.getLogger(__name__) -SYSTEM_FILES = ("thumbs.db", "desktop.ini", ".DS_Store") - -Job = NamedTuple( - "Job", - [ - ("folderstr", str), - ("basedir", Path), - ("path", Path), - ("filters", Sequence[Filter]), - ("actions", Sequence[Action]), - ], +from .migration import migrate_v1 +from .utils import ( + basic_args, + deep_merge_inplace, + ensure_dict, + ensure_list, + fs_path_from_options, + to_args, ) -Job.__doc__ = """ - :param str folderstr: the original folder definition specified in the config - :param Path basedir: the job's base folder - :param Path path: the path of the file to handle - :param list filters: the filters that apply to the path - :param list actions: the actions which should be executed -""" - - -class OutputHelper: - """ - class to track the current folder / file and print only changes. - This is needed because we only want to output the current folder and file if the - filter or action prints something. - """ - - def __init__(self) -> None: - self.not_found = set() # type: Set[str] - self.curr_folder = None # type: Optional[Path] - self.curr_path = None # type: Optional[Path] - self.prev_folder = None # type: Optional[Path] - self.prev_path = None # type: Optional[Path] - - def set_location(self, folder: Path, path: Path) -> None: - self.curr_folder = folder - self.curr_path = path - - def pre_print(self) -> None: - """ - pre-print hook that is called everytime the moment before a filter or action is - about to print something to the cli - """ - if self.curr_folder != self.prev_folder: - if self.prev_folder is not None: - print() # ensure newline between folders - print("Folder %s%s:" % (Style.BRIGHT, self.curr_folder)) - self.prev_folder = self.curr_folder - - if self.curr_path != self.prev_path: - print(indent("File %s%s:" % (Style.BRIGHT, self.curr_path), " " * 2)) - self.prev_path = self.curr_path - - def print_path_not_found(self, folderstr: str) -> None: - if folderstr not in self.not_found: - self.not_found.add(folderstr) - msg = "Path not found: {}".format(folderstr) - print(Fore.YELLOW + Style.BRIGHT + msg) - logger.warning(msg) - - -output_helper = OutputHelper() - - -def execute_rules(rules: Iterable[Rule], simulate: bool) -> None: - cols, _ = shutil.get_terminal_size(fallback=(79, 20)) - simulation_msg = Fore.GREEN + Style.BRIGHT + " SIMULATION ".center(cols, "~") - - jobs = create_jobs(rules=rules) - - if simulate: - print(simulation_msg) - - failed, succeded = run_jobs(jobs=jobs, simulate=simulate) - if succeded == failed == 0: - msg = "Nothing to do." - logger.info(msg) - print(msg) - - if simulate: - print(simulation_msg) - - -def create_jobs(rules: Iterable[Rule]) -> Generator[Job, None, None]: - """ creates `Job` data structures for every path handled in each rule """ - for rule in rules: - for folderstr, basedir, path in all_files_for_rule(rule): - yield Job( - folderstr=folderstr, - basedir=basedir, - path=path, - filters=rule.filters, - actions=rule.actions, - ) - -def all_files_for_rule(rule: Rule) -> Generator[Tuple[str, Path, Path], None, None]: - files = dict() - for folderstr in rule.folders: - folderstr = folderstr.strip() - - # check whether the file / folder is prefixed with `!` to be excluded - exclude_flag = folderstr.startswith("!") - - # assemble glob expression - basedir, globstr = splitglob(folderstr.lstrip("!").strip()) - if basedir.is_dir(): - if not globstr: - globstr = "**/*" if rule.subfolders else "*" - elif basedir.is_file(): - # this allows specifying single files - globstr = basedir.name - basedir = basedir.parent - else: - output_helper.print_path_not_found(str(basedir)) - continue - - # iterate files in basedir and add to / remove from result dict - for path in basedir.glob(globstr): - if path.is_file() and (rule.system_files or path.name not in SYSTEM_FILES): - if not exclude_flag: - files[path] = (folderstr, basedir) - elif path in files: - del files[path] - - for path, (folderstr, basedir) in files.items(): - yield (folderstr, basedir, path) - - -def run_jobs(jobs: Iterable[Job], simulate: bool) -> List[int]: - """ :returns: The number of successfully handled files """ - count = [0, 0] - Action.pre_print_hook = output_helper.pre_print - Filter.pre_print_hook = output_helper.pre_print - - for job in sorted(jobs, key=lambda x: (x.folderstr, x.basedir, x.path)): - args = DotDict( - path=job.path, - basedir=job.basedir, - simulate=simulate, - relative_path=job.path.relative_to(job.basedir), - env=os.environ, - now=datetime.now(), - ) - - output_helper.set_location(job.basedir, args.relative_path) - match = filter_pipeline(filters=job.filters, args=args) - if match: - success = action_pipeline(actions=job.actions, args=args) - count[success] += 1 - return count - - -def filter_pipeline(filters: Iterable[Filter], args: DotDict) -> bool: +logger = logging.getLogger(__name__) +highlighted_console = Console() + + +class Location(NamedTuple): + walker: Walker + fs: FS + fs_path: str + + +DEFAULT_SYSTEM_EXCLUDE_FILES = [ + "thumbs.db", + "desktop.ini", + "~$*", + ".DS_Store", + ".localized", +] + +DEFAULT_SYSTEM_EXCLUDE_DIRS = [ + ".git", + ".svn", +] + + +def convert_options_to_walker_args(options: dict): + # combine system_exclude and exclude into a single list + excludes = options.get("system_exlude_files", DEFAULT_SYSTEM_EXCLUDE_FILES) + excludes.extend(options.get("exclude_files", [])) + exclude_dirs = options.get("system_exclude_dirs", DEFAULT_SYSTEM_EXCLUDE_DIRS) + exclude_dirs.extend(options.get("exclude_dirs", [])) + # return all the default options + return { + "ignore_errors": options.get("ignore_errors", False), + "on_error": options.get("on_error", None), + "search": options.get("search", "depth"), + "exclude": excludes, + "exclude_dirs": exclude_dirs, + "max_depth": options.get("max_depth", None), + "filter": None, + "filter_dirs": None, + } + + +def instantiate_location(options: Union[str, dict], default_max_depth=0) -> Location: + if isinstance(options, Location): + return options + if isinstance(options, str): + options = {"path": options} + + # set default max depth from rule + if not "max_depth" in options: + options["max_depth"] = default_max_depth + + if "walker" not in options: + args = convert_options_to_walker_args(options) + walker = Walker(**args) + else: + walker = options["walker"] + + fs, fs_path = fs_path_from_options( + path=options.get("path", "/"), + filesystem=options.get("filesystem"), + ) + return Location(walker=walker, fs=fs, fs_path=fs_path) + + +def instantiate_filter(filter_config): + if isinstance(filter_config, Filter): + return filter_config + spec = ensure_dict(filter_config) + name, value = next(iter(spec.items())) + parts = name.split(maxsplit=1) + invert = False + if len(parts) == 2 and parts[0] == "not": + name = parts[1] + invert = True + args, kwargs = to_args(value) + instance = FILTERS[name](*args, **kwargs) + instance.set_logic(inverted=invert) + return instance + + +def instantiate_action(action_config): + if isinstance(action_config, Action): + return action_config + spec = ensure_dict(action_config) + name, value = next(iter(spec.items())) + args, kwargs = to_args(value) + return ACTIONS[name](*args, **kwargs) + + +def syspath_or_exception(fs, path): + try: + return Path(fs.getsyspath(path)) + except NoSysPath as e: + return e + + +def replace_with_instances(config: dict): + warnings = [] + + for rule in config["rules"]: + default_depth = None if rule.get("subfolders", False) else 0 + + _locations = [] + for options in ensure_list(rule["locations"]): + try: + instance = instantiate_location( + options=options, + default_max_depth=default_depth, + ) + _locations.append(instance) + except Exception as e: + if isinstance(options, dict) and options.get("ignore_errors", False): + warnings.append(str(e)) + else: + raise ValueError("Invalid location %s" % options) from e + + # filters are optional + _filters = [] + for x in ensure_list(rule.get("filters", [])): + try: + _filters.append(instantiate_filter(x)) + except Exception as e: + raise ValueError("Invalid filter %s (%s)" % (x, e)) from e + + # actions + _actions = [] + for x in ensure_list(rule["actions"]): + try: + _actions.append(instantiate_action(x)) + except Exception as e: + raise ValueError("Invalid action %s (%s)" % (x, e)) from e + + rule["locations"] = _locations + rule["filters"] = _filters + rule["actions"] = _actions + + return warnings + + +def filter_pipeline(filters: Iterable[Filter], args: dict, filter_mode: str) -> bool: """ run the filter pipeline. Returns True on a match, False otherwise and updates `args` in the process. """ + results = [] for filter_ in filters: try: - result = filter_.pipeline(deepcopy(args)) - if isinstance(result, dict): - args.update(result) - elif not result: - # filters might return a simple True / False. - # Exit early if a filter does not match. + # update dynamic path args + args["path"] = syspath_or_exception(args["fs"], args["fs_path"]) + args["relative_path"] = fspath.frombase( + args["fs_base_path"], args["fs_path"] + ) + + match, updates = filter_.pipeline(args) + result = match ^ filter_.inverted + # we cannot exit early on "any". + if (filter_mode == "none" and result) or ( + filter_mode == "all" and not result + ): return False + results.append(result) + deep_merge_inplace(args, updates) except Exception as e: # pylint: disable=broad-except logger.exception(e) - filter_.print(Fore.RED + Style.BRIGHT + "ERROR! %s" % e) + # console.print_exception() + filter_.print_error(str(e)) return False + + if filter_mode == "any": + return any(results) return True -def action_pipeline(actions: Iterable[Action], args: DotDict) -> bool: +def action_pipeline(actions: Iterable[Action], args: dict, simulate: bool) -> bool: for action in actions: try: - updates = action.pipeline(deepcopy(args)) + # update dynamic path args + args["path"] = syspath_or_exception(args["fs"], args["fs_path"]) + args["relative_path"] = fspath.frombase( + args["fs_base_path"], args["fs_path"] + ) + + updates = action.pipeline(args, simulate=simulate) # jobs may return a dict with updates that should be merged into args if updates is not None: - args.update(updates) + deep_merge_inplace(args, updates) except Exception as e: # pylint: disable=broad-except logger.exception(e) - action.print(Fore.RED + Style.BRIGHT + "ERROR! %s" % e) + action.print_error(str(e)) return False return True + + +def run_rules(rules: dict, simulate: bool = True): + count = Counter(done=0, fail=0) # type: Counter + + if simulate: + console.simulation_banner() + + console.spinner(simulate=simulate) + for rule_nr, rule in enumerate(rules["rules"], start=1): + target = rule.get("targets", "files") + console.rule(rule.get("name", "Rule %s" % rule_nr)) + filter_mode = rule.get("filter_mode", "all") + + for walker, walker_fs, walker_path in rule["locations"]: + console.location(walker_fs, walker_path) + walk = walker.files if target == "files" else walker.dirs + for path in walk(fs=walker_fs, path=walker_path): + if walker_fs.islink(path): + continue + # tell the user which resource we're handling + console.path(walker_fs, path) + + # assemble the available args + args = basic_args() + args.update( + fs=walker_fs, + fs_path=path, + fs_base_path=walker_path, + ) + + # run resource through the filter pipeline + match = filter_pipeline( + filters=rule["filters"], + args=args, + filter_mode=filter_mode, + ) + + # if the currently handled resource changed we adjust the prefix message + if args.get("resource_changed"): + console.path_changed_during_pipeline( + fs=walker_fs, + fs_path=path, + new_fs=args["fs"], + new_path=args["fs_path"], + reason=args.get("resource_changed"), + ) + args.pop("resource_changed", None) + + # run resource through the action pipeline + if match: + is_success = action_pipeline( + actions=rule["actions"], + args=args, + simulate=simulate, + ) + if is_success: + count["done"] += 1 + else: + count["fail"] += 1 + + if simulate: + console.simulation_banner() + + return count + + +def run(rules: Union[str, dict], simulate: bool, validate=True): + # load and validate + if isinstance(rules, str): + rules = config.load_from_string(rules) + + rules = config.cleanup(rules) + + migrate_v1(rules) + + if validate: + config.validate(rules) + + # instantiate + warnings = replace_with_instances(rules) + for msg in warnings: + console.warn(msg) + + # run + count = run_rules(rules=rules, simulate=simulate) + console.summary(count) + + if count["fail"]: + raise RuntimeWarning("Some actions failed.") diff --git a/organize/filters/__init__.py b/organize/filters/__init__.py index 23067f3f..9aafe5f0 100644 --- a/organize/filters/__init__.py +++ b/organize/filters/__init__.py @@ -1,11 +1,32 @@ +from typing import Dict, Type + from .created import Created from .duplicate import Duplicate +from .empty import Empty from .exif import Exif from .extension import Extension -from .file_content import FileContent -from .filename import Filename -from .filesize import FileSize -from .last_modified import LastModified +from .filecontent import FileContent +from .filter import Filter +from .hash import Hash +from .lastmodified import LastModified from .mimetype import MimeType +from .name import Name from .python import Python from .regex import Regex +from .size import Size + +FILTERS = { + Created.name: Created, + Duplicate.name: Duplicate, + Empty.name: Empty, + Exif.name: Exif, + Extension.name: Extension, + FileContent.name: FileContent, + Hash.name: Hash, + Name.name: Name, + Size.name: Size, + LastModified.name: LastModified, + MimeType.name: MimeType, + Python.name: Python, + Regex.name: Regex, +} # type: Dict[str, Type[Filter]] diff --git a/organize/filters/created.py b/organize/filters/created.py index f6cfc1fc..72783915 100644 --- a/organize/filters/created.py +++ b/organize/filters/created.py @@ -1,110 +1,45 @@ -import sys -from typing import Dict, Optional, SupportsFloat +from datetime import datetime, timedelta +from typing import Union -import pendulum # type: ignore -from pathlib import Path -from organize.utils import DotDict +from fs.base import FS +from schema import Optional, Or -from .filter import Filter +from .filter import Filter, FilterResult +from .utils import age_condition_applies class Created(Filter): - + """Matches files / folders by created date + + Args: + years (int): specify number of years + months (int): specify number of months + weeks (float): specify number of weeks + days (float): specify number of days + hours (float): specify number of hours + minutes (float): specify number of minutes + seconds (float): specify number of seconds + mode (str): + either 'older' or 'newer'. 'older' matches files / folders created before the given + time, 'newer' matches files / folders created within the given time. + (default = 'older') + + Returns: + {created}: The datetime the file / folder was created. """ - Matches files by created date - - :param int years: - specify number of years - - :param int months: - specify number of months - - :param float weeks: - specify number of weeks - - :param float days: - specify number of days - - :param float hours: - specify number of hours - - :param float minutes: - specify number of minutes - - :param float seconds: - specify number of seconds - - :param str mode: - either 'older' or 'newer'. 'older' matches all files created before the given - time, 'newer' matches all files created within the given time. - (default = 'older') - - :param str timezone: - specify timezone - - :returns: - - ``{created.year}`` -- the year the file was created - - ``{created.month}`` -- the month the file was created - - ``{created.day}`` -- the day the file was created - - ``{created.hour}`` -- the hour the file was created - - ``{created.minute}`` -- the minute the file was created - - ``{created.second}`` -- the second the file was created - - Examples: - - Show all files on your desktop created at least 10 days ago: - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Desktop' - filters: - - created: - days: 10 - actions: - - echo: 'Was created at least 10 days ago' - - Show all files on your desktop which were created within the last 5 hours: - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Desktop' - filters: - - created: - hours: 5 - mode: newer - actions: - - echo: 'Was created within the last 5 hours' - - - Sort pdfs by year of creation: - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Documents' - filters: - - extension: pdf - - created - actions: - - move: '~/Documents/PDF/{created.year}/' - - - Use specific timezone when processing files - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Documents' - filters: - - extension: pdf - - created: - timezone: "Europe/Moscow" - actions: - - move: '~/Documents/PDF/{created.day}/{created.hour}/' - """ + name = "created" + schema_support_instance_without_args = True + arg_schema = { + Optional("years"): int, + Optional("months"): int, + Optional("weeks"): int, + Optional("days"): int, + Optional("hours"): int, + Optional("minutes"): int, + Optional("seconds"): int, + Optional("mode"): Or("older", "newer"), + } def __init__( self, @@ -116,54 +51,48 @@ def __init__( minutes=0, seconds=0, mode="older", - timezone=pendulum.tz.local_timezone(), - ) -> None: - self._mode = mode.strip().lower() - if self._mode not in ("older", "newer"): - raise ValueError("Unknown option for 'mode': must be 'older' or 'newer'.") - self.is_older = self._mode == "older" - self.timezone = timezone - self.timedelta = pendulum.duration( - years=years, - months=months, - weeks=weeks, + ): + self.age = timedelta( + weeks=52 * years + 4 * months + weeks, # quick and a bit dirty days=days, hours=hours, minutes=minutes, seconds=seconds, ) - print(bool(self.timedelta)) - - def pipeline(self, args: DotDict) -> Optional[Dict[str, pendulum.DateTime]]: - created_date = self._created(args.path) - # Pendulum bug: https://github.com/sdispater/pendulum/issues/387 - # in_words() is a workaround: total_seconds() returns 0 if years are given - if self.timedelta.in_words(): - is_past = (created_date + self.timedelta).is_past() - match = self.is_older == is_past - else: - match = True - if match: - return {"created": created_date} - return None + self.mode = mode.strip().lower() + if self.mode not in ("older", "newer"): + raise ValueError("Unknown option for 'mode': must be 'older' or 'newer'.") - def _created(self, path: Path) -> pendulum.DateTime: - # see https://stackoverflow.com/a/39501288/300783 - stat = path.stat() - time = 0 # type: SupportsFloat - if sys.platform.startswith("win"): - time = stat.st_ctime - else: - try: - time = stat.st_birthtime - except AttributeError: - # We're probably on Linux. No easy way to get creation dates here, - # so we'll settle for when its content was last modified. - time = stat.st_mtime - return pendulum.from_timestamp(float(time), tz=self.timezone) + def matches_created_time(self, created: Union[None, datetime]): + match = True + if self.age.total_seconds(): + if not created: + match = False + else: + match = age_condition_applies( + dt=created, + age=self.age, + mode=self.mode, + reference=datetime.now(), + ) + return match + + def pipeline(self, args: dict) -> FilterResult: + fs = args["fs"] # type: FS + fs_path = args["fs_path"] + + created = fs.getinfo(fs_path, namespaces=["details"]).created + if created: + created = created.astimezone() + + match = self.matches_created_time(created) + return FilterResult( + matches=match, + updates={self.get_name(): created}, + ) def __str__(self): - return "[Created] All files %s than %s" % ( + return "[Created] All files / folders %s than %s" % ( self._mode, - self.timedelta.in_words(), + self.timedelta, ) diff --git a/organize/filters/duplicate.py b/organize/filters/duplicate.py index 3b38dca6..8354bf5f 100644 --- a/organize/filters/duplicate.py +++ b/organize/filters/duplicate.py @@ -4,145 +4,209 @@ Based on this stackoverflow answer: https://stackoverflow.com/a/36113168/300783 -Which was updated for python3 in: +Which I updated for python3 in: https://gist.github.com/tfeldmann/fc875e6630d11f2256e746f67a09c1ae - -The script on stackoverflow has a bug which could lead to false positives. This is fixed -here by using a tuple (file_size, hash) as key in the comparison dictionaries. """ import hashlib -import os from collections import defaultdict -from typing import DefaultDict as DDict -from typing import Dict, List, Set, Tuple, Union +from typing import Dict, NamedTuple, Set, Union -from organize.utils import fullpath +from fs.base import FS +from fs.path import basename -from .filter import Filter +from organize.utils import is_same_resource +from .filter import Filter, FilterResult -def chunk_reader(fobj, chunk_size=1024): - """ Generator that reads a file in chunks of bytes """ - while True: - chunk = fobj.read(chunk_size) - if not chunk: - return - yield chunk +HASH_ALGORITHM = "sha1" +DETECTION_METHODS = ("first_seen", "name", "created", "lastmodified") +DETECTION_METHOD_REGEX = r"(-?)\s*?({})".format("|".join(DETECTION_METHODS)) -def get_hash(filename, first_chunk_only=False, hash_algo=hashlib.sha1): - hashobj = hash_algo() - with open(filename, "rb") as f: - if first_chunk_only: - hashobj.update(f.read(1024)) - else: - for chunk in chunk_reader(f): - hashobj.update(chunk) - return hashobj.digest() +class File(NamedTuple): + fs: FS + path: str + base_path: str + @property + def lastmodified(self): + return self.fs.getmodified(self.path) -class Duplicate(Filter): + @property + def created(self): + return self.fs.getinfo(self.path, namespaces=["details"]).created - """ - Finds duplicate files. + @property + def name(self): + return basename(self.path) + + +def getsize(f: File): + return f.fs.getsize(f.path) + + +def full_hash(f: File): + return f.fs.hash(f.path, name=HASH_ALGORITHM) + + +def first_chunk_hash(f: File): + hash_object = hashlib.new(HASH_ALGORITHM) + with f.fs.openbin(f.path) as file_: + hash_object.update(file_.read(1024)) + return hash_object.hexdigest() + + +def detect_original(known: File, new: File, method: str, reverse: bool): + """Returns a tuple (original file, duplicate)""" + + if method == "first_seen": + return (known, new) if not reverse else (new, known) + elif method == "name": + return tuple(sorted((known, new), key=lambda x: x.name, reverse=reverse)) + elif method == "created": + return tuple(sorted((known, new), key=lambda x: x.created, reverse=reverse)) + elif method == "lastmodified": + return tuple( + sorted((known, new), key=lambda x: x.lastmodified, reverse=reverse) + ) + else: + raise ValueError("Unknown original detection method: %s" % method) + + +class Duplicate(Filter): + """A fast duplicate file finder. This filter compares files byte by byte and finds identical files with potentially different filenames. - :returns: - - ``{duplicate}`` -- path to the duplicate source - - Examples: - - Show all duplicate files in your desktop and download folder (and their - subfolders). - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: - - ~/Desktop - - ~/Downloads - subfolders: true - filters: - - duplicate - actions: - - echo: "{path} is a duplicate of {duplicate}" - - Note: - This filter uses the glob.glob() function to retrieve the list of files. Duplicate list could be returned in arbitrary order depending on the filesystem and the Python glob.glob() function itself. If you have several duplicates of the same file in a folder, you can get arbitrary sorting of results. For a detailed explanation and proposed fix check: https://github.com/tfeldmann/organize/issues/158 + Args: + detect_original_by (str): + Detection method to distinguish between original and duplicate. + Possible values are: + + - `"first_seen"`: Whatever file is visited first is the original. This + depends on the order of your location entries. + - `"name"`: The first entry sorted by name is the original. + - `"created"`: The first entry sorted by creation date is the original. + - `"lastmodified"`: The first file sorted by date of last modification is the original. + + You can reverse the sorting method by prefixing a `-`. + + So with `detect_original_by: "-created"` the file with the older creation date is + the original and the younger file is the duplicate. This works on all methods, for + example `"-first_seen"`, `"-name"`, `"-created"`, `"-lastmodified"`. + + **Returns:** + + `{duplicate.original}` - The path to the original """ - def __init__(self) -> None: - self.files_for_size = defaultdict(list) # type: DDict[int, List[str]] + name = "duplicate" + schema_support_instance_without_args = True - # to prevent false positives the keys must be tuples of (file_size, hash). - self.files_for_small_hash = defaultdict( - list - ) # type: DDict[Tuple[int, bytes], List[str]] - self.file_for_full_hash = dict() # type: Dict[Tuple[int, bytes], str] + def __init__(self, detect_original_by="first_seen"): + if detect_original_by.startswith("-"): + self.detect_original_by = detect_original_by[1:] + self.select_orignal_reverse = True + else: + self.detect_original_by = detect_original_by + self.select_orignal_reverse = False - # we keep track of which files we already computed the hashes for so we only do - # that once. - self.small_hash_known = set() # type: Set[str] - self.full_hash_known = set() # type: Set[str] + self.files_for_size = defaultdict(list) + self.files_for_chunk = defaultdict(list) + self.file_for_hash = dict() - def matches(self, path: str) -> Union[bool, Dict[str, str]]: - # the exact same path has already been handled. This might happen if path is a - # symlink which resolves to file that is already known. We skip these. - if path in self.small_hash_known: + # we keep track of the files we already computed the hashes for so we only do + # that once. + self.seen_files = set() # type: Set[File] + self.first_chunk_known = set() # type: Set[File] + self.hash_known = set() # type: Set[File] + + def matches(self, fs: FS, path: str, base_path: str): + file_ = File(fs=fs, path=path, base_path=base_path) + # the exact same path has already been handled. This happens if multiple + # locations emit this file in a single rule or if we follow symlinks. + # We skip these. + if file_ in self.seen_files or any( + is_same_resource(file_.fs, file_.path, x.fs, x.path) + for x in self.seen_files + ): return False + self.seen_files.add(file_) + # check for files with equal size - file_size = os.path.getsize(path) # type: int + file_size = getsize(file_) same_size = self.files_for_size[file_size] - candidates_fsize = same_size[:] - same_size.append(path) - if not candidates_fsize: + same_size.append(file_) + if len(same_size) == 1: # the file is unique in size and cannot be a duplicate return False - # for all other files with the same file size, get their hash of the first 1024 - # bytes - for c in candidates_fsize: - if c not in self.small_hash_known: - try: - c_small_hash = get_hash(c, first_chunk_only=True) - self.files_for_small_hash[(file_size, c_small_hash)].append(c) - self.small_hash_known.add(c) - except OSError: - pass - - # check small hash collisions with the current file - small_hash = get_hash(path, first_chunk_only=True) - same_small_hash = self.files_for_small_hash[(file_size, small_hash)] - candidates_shash = same_small_hash[:] - same_small_hash.append(path) - self.small_hash_known.add(path) - if not candidates_shash: + # for all other files with the same file size: + # make sure we know their hash of their first 1024 byte chunk + for f in same_size[:-1]: + if f not in self.first_chunk_known: + chunk_hash = first_chunk_hash(f) + self.first_chunk_known.add(f) + self.files_for_chunk[chunk_hash].append(f) + + # check first chunk hash collisions with the current file + chunk_hash = first_chunk_hash(file_) + same_first_chunk = self.files_for_chunk[chunk_hash] + same_first_chunk.append(file_) + self.first_chunk_known.add(file_) + if len(same_first_chunk) == 1: # the file has a unique small hash and cannot be a duplicate return False - # For all other files with the same file size and small hash get the full hash - for c in candidates_shash: - if c not in self.full_hash_known: - try: - c_full_hash = get_hash(c, first_chunk_only=False) - self.file_for_full_hash[(file_size, c_full_hash)] = c - self.full_hash_known.add(c) - except OSError: - pass + # Ensure we know the full hashes of all files with the same first chunk as + # the investigated file + for f in same_first_chunk[:-1]: + if f not in self.hash_known: + hash_ = full_hash(f) + self.hash_known.add(f) + self.file_for_hash[hash_] = f # check full hash collisions with the current file - full_hash = get_hash(path, first_chunk_only=False) - duplicate = self.file_for_full_hash.get((file_size, full_hash)) - if duplicate: - return {"duplicate": duplicate} - self.file_for_full_hash[(file_size, full_hash)] = path + hash_ = full_hash(file_) + self.hash_known.add(file_) + known = self.file_for_hash.get(hash_) + if known: + original, duplicate = detect_original( + known=known, + new=file_, + method=self.detect_original_by, + reverse=self.select_orignal_reverse, + ) + if known != original: + self.file_for_hash[hash_] = original + + resource_changed_reason = "duplicate of" if known != original else None + from organize.core import syspath_or_exception + + return { + "fs": duplicate.fs, + "fs_path": duplicate.path, + "fs_base_path": duplicate.base_path, + "resource_changed": resource_changed_reason, + self.get_name(): { + "original": syspath_or_exception(original.fs, original.path) + }, + } + return False def pipeline(self, args): - return self.matches(str(fullpath(args["path"]))) + fs = args["fs"] + fs_path = args["fs_path"] + fs_base_path = args["fs_base_path"] + if fs.isdir(fs_path): + raise EnvironmentError("Dirs are not supported") + result = self.matches(fs=fs, path=fs_path, base_path=fs_base_path) + if result is False: + return FilterResult(matches=False, updates={}) + return FilterResult(matches=True, updates=result) def __str__(self) -> str: return "Duplicate()" diff --git a/organize/filters/empty.py b/organize/filters/empty.py new file mode 100644 index 00000000..490f5659 --- /dev/null +++ b/organize/filters/empty.py @@ -0,0 +1,28 @@ +from fs.base import FS + +from .filter import Filter, FilterResult + + +class Empty(Filter): + + """Finds empty dirs and files""" + + name = "empty" + + @classmethod + def get_schema(cls): + return cls.get_name_schema() + + def pipeline(self, args: dict) -> FilterResult: + fs = args["fs"] # type: FS + fs_path = args["fs_path"] # type: str + + if fs.isdir(fs_path): + result = fs.isempty(fs_path) + else: + result = fs.getsize(fs_path) == 0 + + return FilterResult(matches=result, updates={}) + + def __str__(self) -> str: + return "Empty()" diff --git a/organize/filters/exif.py b/organize/filters/exif.py index 39f147ef..071b7606 100644 --- a/organize/filters/exif.py +++ b/organize/filters/exif.py @@ -1,19 +1,17 @@ import collections from typing import Any, DefaultDict, Dict, Mapping, Optional, Union -import exifread # type: ignore +import exifread from pathlib import Path -from .filter import Filter +from .filter import Filter, FilterResult ExifDict = Mapping[str, Union[str, Mapping[str, str]]] class Exif(Filter): - - """ - Filter by image EXIF data + """Filter by image EXIF data The `exif` filter can be used as a filter as well as a way to get exif information into your actions. @@ -26,91 +24,30 @@ class Exif(Filter): - ``{exif.exif}`` -- Exif information - ``{exif.gps}`` -- GPS information - ``{exif.interoperability}`` -- Interoperability information - - Examples: - - Show available EXIF data of your pictures: - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: ~/Pictures - subfolders: true - filters: - - exif - actions: - - echo: "{exif}" - - - Copy all images which contain GPS information while keeping subfolder - structure: - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: ~/Pictures - subfolders: true - filters: - - exif: - gps.gpsdate - actions: - - copy: ~/Pictures/with_gps/{relative_path}/ - - - Filter by camera manufacturer: - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: ~/Pictures - subfolders: true - filters: - - exif: - image.model: Nikon D3200 - actions: - - move: '~/Pictures/My old Nikon/' - - - Sort images by camera manufacturer. This will create folders for each camera - model (for example "Nikon D3200", "iPhone 6s", "iPhone 5s", "DMC-GX80") and - move the pictures accordingly: - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: ~/Pictures - subfolders: true - filters: - - extension: jpg - - exif: - image.model - actions: - - move: '~/Pictures/{exif.image.model}/' """ + name = "exif" + arg_schema = None + schema_support_instance_without_args = True + def __init__(self, *required_tags: str, **tag_filters: str) -> None: self.args = required_tags # expected exif keys self.kwargs = tag_filters # exif keys with expected values def category_dict(self, tags: Mapping[str, str]) -> ExifDict: - result = collections.defaultdict(dict) # type: DefaultDict[str, Dict[str, str]] + result = collections.defaultdict(dict) # type: DefaultDict for key, value in tags.items(): if " " in key: category, field = key.split(" ", maxsplit=1) result[category][field] = value else: - result[key] = value # type: ignore - return result - - def matches(self, path: Path) -> Union[bool, ExifDict]: - # NOTE: This should return Union[Literal[False], ExifDict] but Literal is only - # available in Python>=3.8. - with path.open("rb") as f: - exiftags = exifread.process_file(f, details=False) # type: Dict + result[key] = value + return dict(result) + + def matches(self, exiftags: dict) -> bool: if not exiftags: return False - - tags = {k.lower(): v.printable for k, v in exiftags.items()} + tags = {k.lower(): v for k, v in exiftags.items()} # no match if expected tag is not found normkey = lambda k: k.replace(".", " ").lower() @@ -122,13 +59,22 @@ def matches(self, path: Path) -> Union[bool, ExifDict]: key = normkey(key) if not (key in tags and tags[key].lower() == value.lower()): return False - return self.category_dict(tags) + return True + + def pipeline(self, args: dict) -> FilterResult: + fs = args["fs"] + fs_path = args["fs_path"] + with fs.openbin(fs_path) as f: + exiftags = exifread.process_file(f, details=False) + + tags = {k.lower(): v.printable for k, v in exiftags.items()} + matches = self.matches(tags) + exif_result = self.category_dict(tags) - def pipeline(self, args: Mapping[str, Any]) -> Optional[Dict[str, ExifDict]]: - tags = self.matches(args["path"]) - if isinstance(tags, dict): - return {"exif": tags} - return None + return FilterResult( + matches=matches, + updates={self.get_name(): exif_result}, + ) def __str__(self) -> str: return "EXIF(%s)" % ", ".join("%s=%s" % (k, v) for k, v in self.kwargs.items()) diff --git a/organize/filters/extension.py b/organize/filters/extension.py index f8f74946..a937c077 100644 --- a/organize/filters/extension.py +++ b/organize/filters/extension.py @@ -1,127 +1,58 @@ -from typing import Dict, Optional, Union +from typing import Union -from pathlib import Path -from organize.utils import DotDict, flatten +from fs.base import FS -from .filter import Filter +from organize.utils import flatten - -class ExtensionResult: - def __init__(self, ext): - self.ext = ext[1:] if ext.startswith(".") else ext - - @property - def lower(self): - return self.ext.lower() - - @property - def upper(self): - return self.ext.upper() - - def __str__(self): - return self.ext +from .filter import Filter, FilterResult class Extension(Filter): + """Filter by file extension - """ - Filter by file extension - - :param extensions: - The file extensions to match (does not need to start with a colon). - - :returns: - - ``{extension}`` -- the original file extension (without colon) - - ``{extension.lower}`` -- the file extension in lowercase - - ``{extension.upper}`` -- the file extension in UPPERCASE - - Examples: - - Match a single file extension: - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Desktop' - filters: - - extension: png - actions: - - echo: 'Found PNG file: {path}' + Args: + *extensions (list(str) or str): + The file extensions to match (does not need to start with a colon). - - Match multiple file extensions: + **Returns:** - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Desktop' - filters: - - extension: - - .jpg - - jpeg - actions: - - echo: 'Found JPG file: {path}' - - - Make all file extensions lowercase: - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Desktop' - filters: - - Extension - actions: - - rename: '{path.stem}.{extension.lower}' - - - Using extension lists: - - .. code-block:: yaml - :caption: config.yaml - - img_ext: &img - - png - - jpg - - tiff - - audio_ext: &audio - - mp3 - - wav - - ogg - - rules: - - folders: '~/Desktop' - filters: - - extension: - - *img - - *audio - actions: - - echo: 'Found media file: {path}' + - `{extension}`: the original file extension (without colon) """ + name = "extension" + schema_support_instance_without_args = True + def __init__(self, *extensions) -> None: self.extensions = list(map(self.normalize_extension, flatten(list(extensions)))) @staticmethod def normalize_extension(ext: str) -> str: - """ strip colon and convert to lowercase """ + """strip colon and convert to lowercase""" if ext.startswith("."): return ext[1:].lower() else: return ext.lower() - def matches(self, path: Path) -> Union[bool, str]: + def matches(self, ext: str) -> Union[bool, str]: if not self.extensions: return True - if not path.suffix: + if not ext: return False - return self.normalize_extension(path.suffix) in self.extensions - - def pipeline(self, args: DotDict) -> Optional[Dict[str, ExtensionResult]]: - if self.matches(args.path): - result = ExtensionResult(args.path.suffix) - return {"extension": result} - return None + return self.normalize_extension(ext) in self.extensions + + def pipeline(self, args: dict) -> FilterResult: + fs = args["fs"] # type: FS + fs_path = args["fs_path"] + if fs.isdir(fs_path): + raise ValueError("Dirs not supported") + + # suffix is the extension with dot + suffix = fs.getinfo(fs_path).suffix + ext = suffix[1:] + return FilterResult( + matches=bool(self.matches(ext)), + updates={self.get_name(): ext}, + ) def __str__(self): return "Extension(%s)" % ", ".join(self.extensions) diff --git a/organize/filters/file_content.py b/organize/filters/file_content.py deleted file mode 100644 index 9ba8fff9..00000000 --- a/organize/filters/file_content.py +++ /dev/null @@ -1,82 +0,0 @@ -import re -from typing import Any, Dict, Mapping, Optional - -from pathlib import Path - -from .filter import Filter - - -# not supported: .gif, .jpg, .mp3, .ogg, .png, .tiff, .wav -SUPPORTED_EXTENSIONS = ( - ".csv .doc .docx .eml .epub .json .html .msg .odt .pdf .pptx .ps .rtf .txt .xlsx .xls" -).split() - - -class FileContent(Filter): - - r""" - Matches file content with the given regular expression - - :param str expr: - The regular expression to be matched. - - Any named groups in your regular expression will be returned like this: - - :returns: - - ``{filecontent.yourgroupname}`` -- The text matched with the named group - ``(?P)`` - - Examples: - - - Show the content of all your PDF files: - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: ~/Documents - filters: - - extension: pdf - - filecontent: '(?P.*)' - actions: - - echo: "{filecontent.all}" - - - - Match an invoice with a regular expression and sort by customer: - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Desktop' - filters: - - filecontent: 'Invoice.*Customer (?P\w+)' - actions: - - move: '~/Documents/Invoices/{filecontent.customer}/' - """ - - def __init__(self, expr) -> None: - self.expr = re.compile(expr, re.MULTILINE | re.DOTALL) - - def matches(self, path: Path) -> Any: - if path.suffix.lower() not in SUPPORTED_EXTENSIONS: - return - try: - import textract # type: ignore - - content = textract.process(str(path), errors="ignore") - return self.expr.search(content.decode("utf-8", errors="ignore")) - except ImportError as e: - raise ImportError( - "textract is not installed. " - "Install with pip install organize-tool[textract]" - ) from e - except textract.exceptions.CommandLineError: - pass - - def pipeline(self, args: Mapping) -> Optional[Dict[str, Dict]]: - match = self.matches(args["path"]) - if match: - result = match.groupdict() - return {"filecontent": result} - return None diff --git a/organize/filters/filecontent.py b/organize/filters/filecontent.py new file mode 100644 index 00000000..9742e367 --- /dev/null +++ b/organize/filters/filecontent.py @@ -0,0 +1,72 @@ +import re +from typing import Any, Dict, Mapping, Optional + +from fs.base import FS +from fs.errors import NoSysPath + +from .filter import Filter, FilterResult + +SUPPORTED_EXTENSIONS = ( + # not supported: .gif, .jpg, .mp3, .ogg, .png, .tiff, .wav + ".csv .doc .docx .eml .epub .json .html .msg .odt .pdf .pptx .ps .rtf .txt .xlsx .xls" +).split() + + +class FileContent(Filter): + """Matches file content with the given regular expression + + Args: + expr (str): The regular expression to be matched. + + Any named groups (`(?P.*)`) in your regular expression will + be returned like this: + + **Returns:** + + - `{filecontent.groupname}`: The text matched with the named group + `(?P)` + """ + + name = "filecontent" + schema_support_instance_without_args = True + + def __init__(self, expr="(?P.*)") -> None: + self.expr = re.compile(expr, re.MULTILINE | re.DOTALL) + + def matches(self, path: str, extension: str) -> Any: + if extension not in SUPPORTED_EXTENSIONS: + return + try: + import textract + + content = textract.process( + str(path), + extension=extension, + errors="ignore", + ) + return self.expr.search(content.decode("utf-8", errors="ignore")) + except ImportError as e: + raise ImportError( + "textract is not installed. " + "Install with pip install organize-tool[textract]" + ) from e + except textract.exceptions.CommandLineError: + pass + + def pipeline(self, args: dict) -> FilterResult: + fs = args["fs"] # type: FS + fs_path = args["fs_path"] + if fs.isdir(fs_path): + raise ValueError("Dirs not supported") + extension = fs.getinfo(fs_path).suffix + try: + syspath = fs.getsyspath(fs_path) + except NoSysPath as e: + raise EnvironmentError( + "filecontent only supports the local filesystem" + ) from e + match = self.matches(path=syspath, extension=extension) + return FilterResult( + matches=bool(match), + updates={self.get_name(): match.groupdict()}, + ) diff --git a/organize/filters/filename.py b/organize/filters/filename.py deleted file mode 100644 index a5a9125c..00000000 --- a/organize/filters/filename.py +++ /dev/null @@ -1,122 +0,0 @@ -from typing import Any, List, Union, Optional, Dict - -import simplematch # type: ignore - -from pathlib import Path - -from .filter import Filter - - -class Filename(Filter): - - """ - Match files by filename - - :param str match: - A matching string in `simplematch`-syntax - (https://github.com/tfeldmann/simplematch) - - :param str startswith: - The filename must begin with the given string - - :param str contains: - The filename must contain the given string - - :param str endswith: - The filename (without extension) must end with the given string - - :param bool case_sensitive = True: - By default, the matching is case sensitive. Change this to False to use - case insensitive matching. - - Examples: - - Match all files starting with 'Invoice': - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Desktop' - filters: - - filename: - startswith: Invoice - actions: - - echo: 'This is an invoice' - - - Match all files starting with 'A' end containing the string 'hole' - (case insensitive) - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Desktop' - filters: - - filename: - startswith: A - contains: hole - case_sensitive: false - actions: - - echo: 'Found a match.' - - - Match all files starting with 'A' or 'B' containing '5' or '6' and ending with - '_end' - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Desktop' - filters: - - filename: - startswith: - - A - - B - contains: - - 5 - - 6 - endswith: _end - case_sensitive: false - actions: - - echo: 'Found a match.' - """ - - def __init__( - self, match="*", *, startswith="", contains="", endswith="", case_sensitive=True - ) -> None: - self.matcher = simplematch.Matcher(match, case_sensitive=case_sensitive) - self.startswith = self.create_list(startswith, case_sensitive) - self.contains = self.create_list(contains, case_sensitive) - self.endswith = self.create_list(endswith, case_sensitive) - self.case_sensitive = case_sensitive - - def matches(self, path: Path) -> bool: - filename = path.stem - if not self.case_sensitive: - filename = filename.lower() - - is_match = ( - self.matcher.test(filename) - and any(x in filename for x in self.contains) - and any(filename.startswith(x) for x in self.startswith) - and any(filename.endswith(x) for x in self.endswith) - ) - return is_match - - def pipeline(self, args: Dict) -> Optional[Dict[str, Any]]: - path = args["path"] - result = self.matches(path) - if result: - return {"filename": self.matcher.match(path.stem)} - return None - - @staticmethod - def create_list(x: Union[int, str, List[Any]], case_sensitive: bool) -> List[str]: - if isinstance(x, (int, float)): - x = str(x) - if isinstance(x, str): - x = [x] - x = [str(x) for x in x] - if not case_sensitive: - x = [x.lower() for x in x] - return x diff --git a/organize/filters/filesize.py b/organize/filters/filesize.py deleted file mode 100644 index cee7d884..00000000 --- a/organize/filters/filesize.py +++ /dev/null @@ -1,123 +0,0 @@ -import operator -import re -from typing import Callable, Dict, Optional, Sequence, Set, Tuple - -from organize.utils import DotDict, flattened_string_list, fullpath - -from .filter import Filter - -OPERATORS = { - "<": operator.lt, - "<=": operator.le, - "==": operator.eq, - "=": operator.eq, - "": operator.eq, - ">=": operator.ge, - ">": operator.gt, -} -SIZE_REGEX = re.compile( - r"^(?P[<>=]*)(?P(\d*\.)?\d+)(?P[kmgtpezy]?i?)b?$" -) - - -def create_constrains(inp: str) -> Set[Tuple[Callable[[int, int], bool], int]]: - """ - Given an input string it returns a list of tuples (comparison operator, - number of bytes). - - Accepted formats are: '30k', '>= 5 TiB, <10tb', '< 60 tb', ... - Calculation is in bytes, even if the 'b' is lowercase. If an 'i' is present - we calculate base 1024. - """ - result = set() # type: Set[Tuple[Callable[[int, int], bool], int]] - parts = inp.replace(" ", "").lower().split(",") - for part in parts: - try: - reg_match = SIZE_REGEX.match(part) - if reg_match: - match = reg_match.groupdict() - op = OPERATORS[match["op"]] - num = float(match["num"]) if "." in match["num"] else int(match["num"]) - unit = match["unit"] - base = 1024 if unit.endswith("i") else 1000 - exp = "kmgtpezy".index(unit[0]) + 1 if unit else 0 - numbytes = num * base ** exp - result.add((op, numbytes)) - except (AttributeError, KeyError, IndexError, ValueError, TypeError) as e: - raise ValueError("Invalid size format: %s" % part) from e - return result - - -def satisfies_constrains(size, constrains): - return all(op(size, p_size) for op, p_size in constrains) - - -class FileSize(Filter): - """ - Matches files by file size - - :param str conditions: - - Accepts file size conditions, e.g: ``'>= 500 MB'``, ``'< 20k'``, ``'>0'``, - ``'= 10 KiB'``. - - It is possible to define both lower and upper conditions like this: - ``'>20k, < 1 TB'``, ``'>= 20 Mb, <25 Mb'``. The filter will match if all given - conditions are satisfied. - - - Accepts all units from KB to YB. - - If no unit is given, kilobytes are assumend. - - If binary prefix is given (KiB, GiB) the size is calculated using base 1024. - - :returns: - - ``{filesize.bytes}`` -- File size in bytes - - Examples: - - Trash big downloads: - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Downloads' - filters: - - filesize: '> 0.5 GB' - actions: - - trash - - - Move all JPEGS bigger > 1MB and <10 MB. Search all subfolders and keep the´ - original relative path. - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Pictures' - subfolders: true - filters: - - extension: - - jpg - - jpeg - - filesize: '>1mb, <10mb' - actions: - - move: '~/Pictures/sorted/{relative_path}/' - - """ - - def __init__(self, *conditions: Sequence[str]) -> None: - self.conditions = ", ".join(flattened_string_list(list(conditions))) - self.constrains = create_constrains(self.conditions) - if not self.constrains: - raise ValueError("No size(s) given!") - - def matches(self, filesize: int) -> bool: - return all(op(filesize, c_size) for op, c_size in self.constrains) - - def pipeline(self, args: DotDict) -> Optional[Dict[str, Dict[str, int]]]: - file_size = fullpath(args.path).stat().st_size - if self.matches(file_size): - return {"filesize": {"bytes": file_size}} - return None - - def __str__(self) -> str: - return "FileSize({})".format(" ".join(self.conditions)) diff --git a/organize/filters/filter.py b/organize/filters/filter.py index a2c6ee87..47eba6ab 100644 --- a/organize/filters/filter.py +++ b/organize/filters/filter.py @@ -1,29 +1,79 @@ +from schema import Schema, Optional, Or from textwrap import indent -from typing import Any, Callable, Dict, Optional, Union +from typing import Any, Dict, Union, NamedTuple +from organize.console import pipeline_message, pipeline_error -from organize.utils import DotDict -FilterResult = Union[Dict[str, Any], bool, None] +class FilterResult(NamedTuple): + matches: bool + updates: dict class Filter: - pre_print_hook = None # type: Optional[Callable] + name = None # type: Union[str, None] + arg_schema = None # type: Union[Schema, None] + schema_support_instance_without_args = False + + @classmethod + def get_name(cls): + if cls.name: + return cls.name + return cls.__name__.lower() + + @classmethod + def get_name_schema(cls): + return Schema( + Or("not " + cls.get_name(), cls.get_name()), + name=cls.get_name(), + description=cls.get_description(), + ) + + @classmethod + def get_schema(cls): + name = cls.get_name_schema() + + if cls.arg_schema: + arg_schema = cls.arg_schema + else: + arg_schema = Or( + str, + [str], + Schema({}, ignore_extra_keys=True), + ) + + if cls.schema_support_instance_without_args: + return Or(name, {name: arg_schema}) + return { + name: arg_schema, + } + + @classmethod + def get_description(cls): + """the first line of the class docstring""" + return cls.__doc__.splitlines()[0] def run(self, **kwargs: Dict) -> FilterResult: - return self.pipeline(DotDict(kwargs)) + return self.pipeline(dict(kwargs)) - def pipeline(self, args: DotDict) -> FilterResult: + def pipeline(self, args: dict) -> FilterResult: raise NotImplementedError - def print(self, msg: str) -> None: - """ print a message for the user """ - if callable(self.pre_print_hook): - self.pre_print_hook() # pylint: disable=not-callable - print(indent("- (%s) %s" % (self.__class__.__name__, msg), " " * 4)) + def print(self, *msg: str) -> None: + """print a message for the user""" + text = " ".join(str(x) for x in msg) + for line in text.splitlines(): + pipeline_message(self.get_name(), line) + + def print_error(self, msg: str): + for line in msg.splitlines(): + pipeline_error(self.get_name(), line) + + def set_logic(self, inverted=False): + self.inverted = inverted def __str__(self) -> str: - """ Return filter name and properties """ - return self.__class__.__name__ + """Return filter name and properties""" + return self.get_name() def __repr__(self) -> str: return "<%s>" % str(self) diff --git a/organize/filters/hash.py b/organize/filters/hash.py new file mode 100644 index 00000000..e5a0a6e4 --- /dev/null +++ b/organize/filters/hash.py @@ -0,0 +1,58 @@ +import logging + +from fs.base import FS + +from organize.utils import Template + +from .filter import Filter, FilterResult + +logger = logging.getLogger(__name__) + + +class Hash(Filter): + + """Calculates the hash of a file. + + Args: + algorithm (str): Any hashing algorithm available to python's `hashlib`. + `md5` by default. + + Algorithms guaranteed to be available are + `shake_256`, `sha3_256`, `sha1`, `sha3_224`, `sha384`, `sha512`, `blake2b`, + `blake2s`, `sha256`, `sha224`, `shake_128`, `sha3_512`, `sha3_384` and `md5`. + + Depending on your python installation and installed libs there may be additional + hash algorithms to chose from. + + To list the available algorithms on your installation run this in a python + interpreter: + + ```py + >>> import hashlib + >>> hashlib.algorithms_available + {'shake_256', 'whirlpool', 'mdc2', 'blake2s', 'sha224', 'shake_128', 'sha3_512', 'sha3_224', 'sha384', 'md5', 'sha1', 'sha512_256', 'blake2b', 'sha256', 'sha512_224', 'ripemd160', 'sha3_384', 'md4', 'sm3', 'sha3_256', 'md5-sha1', 'sha512'} + ``` + + **Returns:** + + - `{hash}`: The hash of the file. + """ + + name = "hash" + schema_support_instance_without_args = True + + def __init__(self, algorithm="md5"): + self.algorithm = Template.from_string(algorithm) + + def pipeline(self, args: dict): + fs = args["fs"] # type: FS + fs_path = args["fs_path"] # type: str + algo = self.algorithm.render(**args) + hash_ = fs.hash(fs_path, name=algo) + return FilterResult( + matches=True, + updates={self.get_name(): hash_}, + ) + + def __str__(self) -> str: + return "Hash(algorithm={})".format(self.algorithm) diff --git a/organize/filters/last_modified.py b/organize/filters/last_modified.py deleted file mode 100644 index 8f079a75..00000000 --- a/organize/filters/last_modified.py +++ /dev/null @@ -1,156 +0,0 @@ -from typing import Dict, Optional - -import pendulum # type: ignore -from pathlib import Path -from organize.utils import DotDict - -from .filter import Filter - - -class LastModified(Filter): - - """ - Matches files by last modified date - - :param int years: - specify number of years - - :param int months: - specify number of months - - :param float weeks: - specify number of weeks - - :param float days: - specify number of days - - :param float hours: - specify number of hours - - :param float minutes: - specify number of minutes - - :param float seconds: - specify number of seconds - - :param str mode: - either 'older' or 'newer'. 'older' matches all files last modified - before the given time, 'newer' matches all files last modified within - the given time. (default = 'older') - - :param str timezone: - specify timezone - - :returns: - - ``{lastmodified.year}`` -- the year the file was last modified - - ``{lastmodified.month}`` -- the month the file was last modified - - ``{lastmodified.day}`` -- the day the file was last modified - - ``{lastmodified.hour}`` -- the hour the file was last modified - - ``{lastmodified.minute}`` -- the minute the file was last modified - - ``{lastmodified.second}`` -- the second the file was last modified - - Examples: - - Show all files on your desktop last modified at least 10 days ago: - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Desktop' - filters: - - lastmodified: - days: 10 - actions: - - echo: 'Was modified at least 10 days ago' - - - Show all files on your desktop which were modified within the last - 5 hours: - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Desktop' - filters: - - lastmodified: - hours: 5 - mode: newer - actions: - - echo: 'Was modified within the last 5 hours' - - - Sort pdfs by year of last modification - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Documents' - filters: - - extension: pdf - - LastModified - actions: - - move: '~/Documents/PDF/{lastmodified.year}/' - - - Use specific timezone when processing files - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Documents' - filters: - - extension: pdf - - lastmodified: - timezone: "Europe/Moscow" - actions: - - move: '~/Documents/PDF/{lastmodified.day}/{lastmodified.hour}/' - """ - - def __init__( - self, - years=0, - months=0, - weeks=0, - days=0, - hours=0, - minutes=0, - seconds=0, - mode="older", - timezone=pendulum.tz.local_timezone(), - ) -> None: - self._mode = mode.strip().lower() - if self._mode not in ("older", "newer"): - raise ValueError("Unknown option for 'mode': must be 'older' or 'newer'.") - self.is_older = self._mode == "older" - self.timezone = timezone - self.timedelta = pendulum.duration( - years=years, - months=months, - weeks=weeks, - days=days, - hours=hours, - minutes=minutes, - seconds=seconds, - ) - - def pipeline(self, args: DotDict) -> Optional[Dict[str, pendulum.DateTime]]: - file_modified = self._last_modified(args.path) - # Pendulum bug: https://github.com/sdispater/pendulum/issues/387 - # in_words() is a workaround: total_seconds() returns 0 if years are given - if self.timedelta.in_words(): - is_past = (file_modified + self.timedelta).is_past() - match = self.is_older == is_past - else: - match = True - if match: - return {"lastmodified": file_modified} - return None - - def _last_modified(self, path: Path) -> pendulum.DateTime: - return pendulum.from_timestamp(float(path.stat().st_mtime), tz=self.timezone) - - def __str__(self): - return "[LastModified] All files last modified %s than %s" % ( - self._mode, - self.timedelta.in_words(), - ) diff --git a/organize/filters/lastmodified.py b/organize/filters/lastmodified.py new file mode 100644 index 00000000..dd749aa9 --- /dev/null +++ b/organize/filters/lastmodified.py @@ -0,0 +1,99 @@ +from datetime import datetime, timedelta +from typing import Union + +from fs.base import FS +from schema import Optional, Or + +from .filter import Filter, FilterResult +from .utils import age_condition_applies + + +class LastModified(Filter): + + """Matches files by last modified date + + Args: + years (int): specify number of years + months (int): specify number of months + weeks (float): specify number of weeks + days (float): specify number of days + hours (float): specify number of hours + minutes (float): specify number of minutes + seconds (float): specify number of seconds + mode (str): + either 'older' or 'newer'. 'older' matches files / folders last modified before + the given time, 'newer' matches files / folders last modified within the given + time. (default = 'older') + + Returns: + {lastmodified}: The datetime the files / folders was lastmodified. + """ + + name = "lastmodified" + schema_support_instance_without_args = True + arg_schema = { + Optional("mode"): Or("older", "newer"), + Optional("years"): int, + Optional("months"): int, + Optional("weeks"): int, + Optional("days"): int, + Optional("hours"): int, + Optional("minutes"): int, + Optional("seconds"): int, + } + + def __init__( + self, + years=0, + months=0, + weeks=0, + days=0, + hours=0, + minutes=0, + seconds=0, + mode="older", + ): + self.age = timedelta( + weeks=52 * years + 4 * months + weeks, # quick and a bit dirty + days=days, + hours=hours, + minutes=minutes, + seconds=seconds, + ) + self.mode = mode.strip().lower() + if self.mode not in ("older", "newer"): + raise ValueError("Unknown option for 'mode': must be 'older' or 'newer'.") + + def matches_lastmodified_time(self, lastmodified: Union[None, datetime]): + match = True + if self.age.total_seconds(): + if not lastmodified: + match = False + else: + match = age_condition_applies( + dt=lastmodified, + age=self.age, + mode=self.mode, + reference=datetime.now(), + ) + return match + + def pipeline(self, args: dict) -> FilterResult: + fs = args["fs"] # type: FS + fs_path = args["fs_path"] + + modified = fs.getmodified(fs_path) + if modified: + modified = modified.astimezone() + + match = self.matches_lastmodified_time(modified) + return FilterResult( + matches=match, + updates={self.get_name(): modified}, + ) + + def __str__(self): + return "[LastModified] All files / folders last modified %s than %s" % ( + self._mode, + self.timedelta, + ) diff --git a/organize/filters/mimetype.py b/organize/filters/mimetype.py index f5599fb5..d65b27bc 100644 --- a/organize/filters/mimetype.py +++ b/organize/filters/mimetype.py @@ -1,15 +1,13 @@ import mimetypes -from pathlib import Path -from organize.utils import DotDict, flatten +from organize.utils import flatten -from .filter import Filter +from .filter import Filter, FilterResult class MimeType(Filter): - """ - Filter by MIME type associated with the file extension. + """Filter by MIME type associated with the file extension. Supports a single string or list of MIME type strings as argument. The types don't need to be fully specified, for example "audio" matches everything @@ -17,63 +15,21 @@ class MimeType(Filter): You can see a list of known MIME types on your system by running this oneliner: - .. code-block:: yaml - - python3 -c "import mimetypes as m; print('\\n'.join(sorted(set(m.common_types.values()) | set(m.types_map.values()))))" - - - Examples: - - Show MIME types: - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Downloads' - filters: - - mimetype - actions: - - echo: '{mimetype}' - - - Filter by "image" mimetype: + ```sh + python3 -c "import mimetypes as m; print('\\n'.join(sorted(set(m.common_types.values()) | set(m.types_map.values()))))" + ``` - .. code-block:: yaml - :caption: config.yaml + Args: + *mimetypes (list(str) or str): The MIME types to filter for. - rules: - - folders: '~/Downloads' - filters: - - mimetype: image - actions: - - echo: This file is an image: {mimetype} + **Returns:** - - Filter by specific MIME type: - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Desktop' - filters: - - mimetype: application/pdf - actions: - - echo: 'Found a PDF file' - - - Filter by multiple specific MIME types: - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Music' - filters: - - mimetype: - - application/pdf - - audio/midi - actions: - - echo: 'Found Midi or PDF.' + - `{mimetype}`: The MIME type of the file. """ + name = "mimetype" + schema_support_instance_without_args = True + def __init__(self, *mimetypes): self.mimetypes = list(map(str.lower, flatten(list(mimetypes)))) @@ -82,19 +38,23 @@ def mimetype(path): type_, _ = mimetypes.guess_type(path, strict=False) return type_ - def matches(self, path: Path): - mimetype = self.mimetype(path) + def matches(self, mimetype) -> bool: if mimetype is None: return False if not self.mimetypes: return True return any(mimetype.startswith(x) for x in self.mimetypes) - def pipeline(self, args: DotDict): - if self.matches(args.path): - result = self.mimetype(args.path) - return {"mimetype": result} - return None + def pipeline(self, args: dict) -> FilterResult: + fs = args["fs"] + fs_path = args["fs_path"] + if fs.isdir(fs_path): + raise ValueError("Dirs not supported.") + mimetype = self.mimetype(fs_path) + return FilterResult( + matches=self.matches(mimetype), + updates={self.get_name(): mimetype}, + ) def __str__(self): return "MimeType(%s)" % ", ".join(self.mimetypes) diff --git a/organize/filters/name.py b/organize/filters/name.py new file mode 100644 index 00000000..a70d7da4 --- /dev/null +++ b/organize/filters/name.py @@ -0,0 +1,97 @@ +from schema import Or, Optional +from typing import Any, List, Union, Dict + +import simplematch +from fs import path + +from .filter import Filter, FilterResult + + +class Name(Filter): + """Match files and folders by name + + Args: + match (str): + A matching string in [simplematch-syntax](https://github.com/tfeldmann/simplematch) + + startswith (str): + The filename must begin with the given string + + contains (str): + The filename must contain the given string + + endswith (str): + The filename (without extension) must end with the given string + + case_sensitive (bool): + By default, the matching is case sensitive. Change this to False to use + case insensitive matching. + """ + + name = "name" + schema_support_instance_without_args = True + + arg_schema = Or( + str, + { + Optional("match"): str, + Optional("startswith"): Or(str, [str]), + Optional("contains"): Or(str, [str]), + Optional("endswith"): Or(str, [str]), + Optional("case_sensitive"): bool, + }, + ) + + def __init__( + self, + match="*", + *, + startswith="", + contains="", + endswith="", + case_sensitive=True, + ) -> None: + self.matcher = simplematch.Matcher(match, case_sensitive=case_sensitive) + self.startswith = self.create_list(startswith, case_sensitive) + self.contains = self.create_list(contains, case_sensitive) + self.endswith = self.create_list(endswith, case_sensitive) + self.case_sensitive = case_sensitive + + def matches(self, name: str) -> bool: + if not self.case_sensitive: + name = name.lower() + + is_match = ( + self.matcher.test(name) + and any(x in name for x in self.contains) + and any(name.startswith(x) for x in self.startswith) + and any(name.endswith(x) for x in self.endswith) + ) + return is_match + + def pipeline(self, args: Dict) -> FilterResult: + fs = args["fs"] + fs_path = args["fs_path"] + if fs.isdir(fs_path): + name = path.basename(fs_path) + else: + name, _ = path.splitext(path.basename(fs_path)) + result = self.matches(name) + m = self.matcher.match(name) + if m == {}: + m = name + return FilterResult( + matches=result, + updates={self.get_name(): m}, + ) + + @staticmethod + def create_list(x: Union[int, str, List[Any]], case_sensitive: bool) -> List[str]: + if isinstance(x, (int, float)): + x = str(x) + if isinstance(x, str): + x = [x] + x = [str(x) for x in x] + if not case_sensitive: + x = [x.lower() for x in x] + return x diff --git a/organize/filters/python.py b/organize/filters/python.py index ed3c5a65..f5b39620 100644 --- a/organize/filters/python.py +++ b/organize/filters/python.py @@ -1,98 +1,39 @@ import textwrap -from typing import Any, Dict, Optional, Sequence +from schema import Or +from typing import Any, Optional as tyOpt, Sequence -from .filter import Filter +from .filter import Filter, FilterResult class Python(Filter): - r""" - Use python code to filter files. + """Use python code to filter files. - :param str code: - The python code to execute. The code must contain a ``return`` statement. + Args: + code (str): + The python code to execute. The code must contain a `return` statement. - :returns: - - If your code returns ``False`` or ``None`` the file is filtered out, - otherwise the file is passed on to the next filters. - - ``{python}`` contains the returned value. If you return a dictionary (for - example ``return {"some_key": some_value, "nested": {"k": 2}}``) it will be - accessible via dot syntax in your actions: ``{python.some_key}``, - ``{python.nested.k}``. + **Returns:** - Examples: - - A file name reverser. - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: ~/Documents - filters: - - extension - - python: | - return {"reversed_name": path.stem[::-1]} - actions: - - rename: '{python.reversed_name}.{extension}' - - - A filter for odd student numbers. Assuming the folder ``~/Students`` contains - the files ``student-01.jpg``, ``student-01.txt``, ``student-02.txt`` and - ``student-03.txt`` this rule will print - ``"Odd student numbers: student-01.txt"`` and - ``"Odd student numbers: student-03.txt"`` - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: ~/Students/ - filters: - - python: | - return int(path.stem.split('-')[1]) % 2 == 1 - actions: - - echo: 'Odd student numbers: {path.name}' - - - - Advanced usecase. You can access data from previous filters in your python code. - This can be used to match files and capturing names with a regular expression - and then renaming the files with the output of your python script. - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: files - filters: - - extension: txt - - regex: (?P\w+)-(?P\w+)\..* - - python: | - emails = { - "Betts": "dbetts@mail.de", - "Cornish": "acornish@google.com", - "Bean": "dbean@aol.com", - "Frey": "l-frey@frey.org", - } - if regex.lastname in emails: # get emails from wherever - return {"mail": emails[regex.lastname]} - actions: - - rename: '{python.mail}.txt' + - If your code returns `False` or `None` the file is filtered out, + otherwise the file is passed on to the next filters. + - `{python}` contains the returned value. If you return a dictionary (for + example `return {"some_key": some_value, "nested": {"k": 2}}`) it will be + accessible via dot syntax in your actions: `{python.some_key}`, + `{python.nested.k}`. + """ - Result: - - ``Devonte-Betts.txt`` becomes ``dbetts@mail.de.txt`` - - ``Alaina-Cornish.txt`` becomes ``acornish@google.com.txt`` - - ``Dimitri-Bean.txt`` becomes ``dbean@aol.com.txt`` - - ``Lowri-Frey.txt`` becomes ``l-frey@frey.org.txt`` - - ``Someunknown-User.txt`` remains unchanged because the email is not found + name = "python" - """ + arg_schema = Or(str, {"code": str}) def __init__(self, code) -> None: self.code = textwrap.dedent(code) if "return" not in self.code: raise ValueError("No return statement found in your code!") - def usercode(self, *args, **kwargs) -> Optional[Any]: + def usercode(self, *args, **kwargs) -> tyOpt[Any]: pass # will be overwritten by `create_method` def create_method(self, name: str, argnames: Sequence[str], code: str) -> None: @@ -107,9 +48,9 @@ def create_method(self, name: str, argnames: Sequence[str], code: str) -> None: ) exec(funccode, globals_, locals_) # pylint: disable=exec-used - def pipeline(self, args) -> Optional[Dict[str, Any]]: + def pipeline(self, args) -> FilterResult: self.create_method(name="usercode", argnames=args.keys(), code=self.code) result = self.usercode(**args) # pylint: disable=assignment-from-no-return if result not in (False, None): - return {"python": result} - return None + return FilterResult(matches=True, updates={self.get_name(): result}) + return FilterResult(matches=False, updates={}) diff --git a/organize/filters/regex.py b/organize/filters/regex.py index 0805762e..b571d0a6 100644 --- a/organize/filters/regex.py +++ b/organize/filters/regex.py @@ -1,63 +1,42 @@ import re -from typing import Any, Dict, Mapping, Optional -from pathlib import Path +from fs.path import basename -from .filter import Filter +from .filter import Filter, FilterResult class Regex(Filter): - r""" - Matches filenames with the given regular expression + """Matches filenames with the given regular expression - :param str expr: - The regular expression to be matched. + Args: + expr (str): The regular expression to be matched. + + **Returns:** Any named groups in your regular expression will be returned like this: - :returns: - - ``{regex.yourgroupname}`` -- The text matched with the named group - ``(?P)`` - - Examples: - - Match an invoice with a regular expression: - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: '~/Desktop' - filters: - - regex: '^RG(\d{12})-sig\.pdf$' - actions: - - move: '~/Documents/Invoices/1und1/' - - - Match and extract data from filenames with regex named groups: - This is just like the previous example but we rename the invoice using - the invoice number extracted via the regular expression and the named - group ``the_number``. - - .. code-block:: yaml - :caption: config.yaml - - rules: - - folders: ~/Desktop - filters: - - regex: '^RG(?P\d{12})-sig\.pdf$' - actions: - - move: ~/Documents/Invoices/1und1/{regex.the_number}.pdf + - `{regex.groupname}`: The text matched with the named + group `(?P.*)` + """ + name = "regex" + + arg_schema = str + def __init__(self, expr) -> None: self.expr = re.compile(expr, flags=re.UNICODE) - def matches(self, path: Path) -> Any: - return self.expr.search(path.name) - - def pipeline(self, args: Mapping) -> Optional[Dict[str, Dict]]: - match = self.matches(args["path"]) - if match: - result = match.groupdict() - return {"regex": result} - return None + def matches(self, path: str): + return self.expr.search(path) + + def pipeline(self, args: dict) -> FilterResult: + fs_path = args["fs_path"] + match = self.matches(basename(fs_path)) + return FilterResult( + matches=bool(match), + updates={ + self.get_name(): match.groupdict() if match else "", + }, + ) diff --git a/organize/filters/size.py b/organize/filters/size.py new file mode 100644 index 00000000..7fb9e82c --- /dev/null +++ b/organize/filters/size.py @@ -0,0 +1,124 @@ +import operator +import re +from typing import Callable, Dict +from typing import Optional as Opt +from typing import Sequence, Set, Tuple + +from fs.filesize import binary, decimal, traditional +from schema import Optional, Or + +from organize.utils import flattened_string_list + +from .filter import Filter, FilterResult + +OPERATORS = { + "<": operator.lt, + "<=": operator.le, + "==": operator.eq, + "=": operator.eq, + "": operator.eq, + ">=": operator.ge, + ">": operator.gt, +} +SIZE_REGEX = re.compile( + r"^(?P[<>=]*)(?P(\d*\.)?\d+)(?P[kmgtpezy]?i?)b?$" +) + + +def create_constraints(inp: str) -> Set[Tuple[Callable[[int, int], bool], int]]: + """ + Given an input string it returns a list of tuples (comparison operator, + number of bytes). + + Accepted formats are: "30k", ">= 5 TiB, <10tb", "< 60 tb", ... + Calculation is in bytes, even if the "b" is lowercase. If an "i" is present + we calculate base 1024. + """ + result = set() # type: Set[Tuple[Callable[[int, int], bool], int]] + parts = str(inp).replace(" ", "").lower().split(",") + for part in parts: + try: + reg_match = SIZE_REGEX.match(part) + if reg_match: + match = reg_match.groupdict() + op = OPERATORS[match["op"]] + num = float(match["num"]) if "." in match["num"] else int(match["num"]) + unit = match["unit"] + base = 1024 if unit.endswith("i") else 1000 + exp = "kmgtpezy".index(unit[0]) + 1 if unit else 0 + numbytes = num * base**exp + result.add((op, numbytes)) + except (AttributeError, KeyError, IndexError, ValueError, TypeError) as e: + raise ValueError("Invalid size format: %s" % part) from e + return result + + +def satisfies_constraints(size, constraints): + return all(op(size, p_size) for op, p_size in constraints) + + +class Size(Filter): + """Matches files and folders by size + + Args: + *conditions (list(str) or str): + The size constraints. + + Accepts file size conditions, e.g: `">= 500 MB"`, `"< 20k"`, `">0"`, + `"= 10 KiB"`. + + It is possible to define both lower and upper conditions like this: + `">20k, < 1 TB"`, `">= 20 Mb, <25 Mb"`. The filter will match if all given + conditions are satisfied. + + - Accepts all units from KB to YB. + - If no unit is given, kilobytes are assumend. + - If binary prefix is given (KiB, GiB) the size is calculated using base 1024. + + **Returns:** + + - `{size.bytes}`: (int) Size in bytes + - `{size.traditional}`: (str) Size with unit (powers of 1024, JDEC prefixes) + - `{size.binary}`: (str) Size with unit (powers of 1024, IEC prefixes) + - `{size.decimal}`: (str) Size with unit (powers of 1000, SI prefixes) + """ + + name = "size" + arg_schema = Or(object, [object]) + schema_support_instance_without_args = True + + def __init__(self, *conditions: Sequence[str]) -> None: + self.conditions = ", ".join(flattened_string_list(list(conditions))) + self.constraints = create_constraints(self.conditions) + + def matches(self, filesize: int) -> bool: + if not self.constraints: + return True + return all(op(filesize, c_size) for op, c_size in self.constraints) + + def pipeline(self, args: dict) -> FilterResult: + fs = args["fs"] + fs_path = args["fs_path"] + + if fs.isdir(fs_path): + size = sum( + info.size + for _, info in fs.walk.info(path=fs_path, namespaces=["details"]) + ) + else: + size = fs.getsize(fs_path) + + return FilterResult( + matches=self.matches(size), + updates={ + self.get_name(): { + "bytes": size, + "traditional": traditional(size), + "binary": binary(size), + "decimal": decimal(size), + }, + }, + ) + + def __str__(self) -> str: + return "FileSize({})".format(" ".join(self.conditions)) diff --git a/organize/filters/utils.py b/organize/filters/utils.py new file mode 100644 index 00000000..4c9c479e --- /dev/null +++ b/organize/filters/utils.py @@ -0,0 +1,12 @@ +from datetime import datetime, timedelta + + +def age_condition_applies(dt: datetime, age: timedelta, mode: str, reference: datetime): + """ + Returns whether `dt` is older / newer (`mode`) than `age` as measured on `reference` + """ + if mode not in ("older", "newer"): + raise ValueError(mode) + + is_past = (dt + age).timestamp() < reference.timestamp() + return (mode == "older") == is_past diff --git a/organize/migration.py b/organize/migration.py new file mode 100644 index 00000000..35e322c9 --- /dev/null +++ b/organize/migration.py @@ -0,0 +1,38 @@ +class MigrationWarning(UserWarning): + pass + + +class NeedsMigrationError(Exception): + pass + + +def entry_name_args(entry): + if isinstance(entry, str): + return (entry.lower(), []) + elif isinstance(entry, dict): + name, value = next(iter(entry.items())) + if isinstance(value, str): + return (name.lower(), []) + elif isinstance(value, dict): + args = [x.lower() for x in value.keys()] + return (name.lower(), args) + + +def migrate_v1(config: dict): + for rule in config.get("rules", []): + if "folders" in rule: + raise NeedsMigrationError("`folders` are now `locations`") + for fil in rule.get("filters", []): + name, _ = entry_name_args(fil) + if name == "filename": + raise NeedsMigrationError("`filename` is now `name`") + if name == "filesize": + raise NeedsMigrationError("`filesize` is now `size`") + for act in rule.get("actions", []): + name, args = entry_name_args(act) + if name in ("move", "copy", "rename"): + if "overwrite" in args or "counter_seperator" in args: + raise NeedsMigrationError( + "`%s` does not support `overwrite` and " + "`counter_seperator` anymore. Please use the new arguments." + ) diff --git a/organize/utils.py b/organize/utils.py index cc5263f3..50029d35 100644 --- a/organize/utils.py +++ b/organize/utils.py @@ -1,174 +1,255 @@ +import logging import os -import re -from collections.abc import Mapping from copy import deepcopy -from typing import Any, Sequence, Tuple, Union, List, Hashable +from datetime import datetime +from typing import Any, List, Sequence, Union, Tuple + +import jinja2 +from fs import open_fs +from fs import path as fspath +from fs.base import FS +from fs.memoryfs import MemoryFS +from jinja2 import nativetypes + + +def finalize_placeholder(x): + # This is used to make the `path` arg available in the filters and actions. + # If a template uses `path` where no syspath is available this makes it possible + # to raise an exception. + if isinstance(x, Exception): + raise x + return x -from pathlib import Path -WILDCARD_REGEX = re.compile(r"(? Tuple[Path, str]: - """ split a string with wildcards into a base folder and globstring """ - path = fullpath(globstr.strip()) - parts = path.parts - for i, part in enumerate(parts): - if WILDCARD_REGEX.search(part): - return (Path(*parts[:i]), str(Path(*parts[i:]))) - return (path, "") +def basic_args(): + """The basic args which are guaranteed to be available.""" + return { + "env": os.environ, + "now": datetime.now(), + "utcnow": datetime.utcnow(), + } -def fullpath(path: Union[str, Path]) -> Path: - """ Expand '~' and resolve the given path. Path can be a string or a Path obj. """ - return Path(os.path.expandvars(str(path))).expanduser().resolve(strict=False) +class SimulationFS(MemoryFS): + def __init__(self, fs_url, *args, **kwargs): + super().__init__(*args, **kwargs) + self.fs_url = fs_url -def flatten(arr: List[Any]) -> List[Any]: - if arr == []: - return [] - if not isinstance(arr, list): - return [arr] - return flatten(arr[0]) + flatten(arr[1:]) + def __str__(self): + if not self.fs_url: + return "" + elif "://" in self.fs_url: + return "<%s>" % self.fs_url + return self.fs_url -def flattened_string_list(x, case_sensitive=True) -> Sequence[str]: - x = [str(x) for x in flatten(x)] - if not case_sensitive: - x = [x.lower() for x in x] - return x +def open_fs_or_sim(fs_url, *args, simulate=False, **kwargs): + if simulate: + simFS = SimulationFS(fs_url) + return simFS + return open_fs(fs_url, *args, **kwargs) -def first_key(dic: Mapping) -> Hashable: - return list(dic.keys())[0] +def expand_user(fs_url: str) -> str: + fs_url = os.path.expanduser(fs_url) + if fs_url.startswith("zip://~"): + fs_url = fs_url.replace("zip://~", "zip://" + os.path.expanduser("~")) + elif fs_url.startswith("tar://~"): + fs_url = fs_url.replace("tar://~", "tar://" + os.path.expanduser("~")) + return fs_url -class DotDict(dict): - """ - Quick and dirty implementation of a dot-able dict, which allows access and - assignment via object properties rather than dict indexing. - Keys are case insensitive. - """ +def expand_args(template: Union[str, jinja2.environment.Template], args=None): + if not args: + args = basic_args() - def __init__(self, *args, **kwargs): - super().__init__() - # we could just call super(DotDict, self).__init__(*args, **kwargs) - # but that won't get us nested dotdict objects - od = dict(*args, **kwargs) - for key, val in od.items(): - if isinstance(val, Mapping): - value = DotDict(val) - else: - value = val - self[self.normkey(key)] = value - - @staticmethod - def normkey(key): - if isinstance(key, str): - return key.lower() - else: - return key + if isinstance(template, str): + text = Template.from_string(template).render(**args) + else: + text = template.render(**args) - def __delattr__(self, key): - try: - del self[self.normkey(key)] - except KeyError as ex: - raise AttributeError("No attribute called: %s" % key) from ex + # expand user and fill environment vars + text = expand_user(text) + text = os.path.expandvars(text) - def __getattr__(self, key): - try: - return self[self.normkey(key)] - except KeyError as ex: - raise AttributeError("No attribute called: %s" % key) from ex - - def __setattr__(self, key, value) -> None: - self[self.normkey(key)] = value - - def update(self, other): - """ recursively update the dotdict instance with another dicts items """ - for key, val in other.items(): - normkey = self.normkey(key) - if isinstance(val, Mapping): - if isinstance(self.get(normkey), dict): - self[normkey].update(val) - else: - self[normkey] = __class__(val) - else: - self[normkey] = val - - def merge(self, other) -> Mapping: - """ recursively merge values from another dict and return a new instance """ - new_dct = deepcopy(self) - new_dct.update(other) - return new_dct - - -def increment_filename_version(path: Path, separator=" ") -> Path: - stem = path.stem - try: - # try to find any existing counter - splitstem = stem.split(separator) # raises ValueError on missing sep - if len(splitstem) < 2: - raise ValueError() - counter = int(splitstem[-1]) - stem = separator.join(splitstem[:-1]) - except (ValueError, IndexError): - # not found, we start with 1 - counter = 1 - return path.with_name( - "{stem}{sep}{cnt}{suffix}".format( - stem=stem, sep=separator, cnt=(counter + 1), suffix=path.suffix - ) - ) - - -def find_unused_filename(path: Path, separator=" ") -> Path: + return text + + +def fs_path_from_options( + path: str, filesystem: Union[FS, str, None] = "" +) -> Tuple[FS, str]: """ - We assume the given path already exists. This function adds a counter to the - filename until we find a unused filename. + path can be a fs_url a normal fs_path + filesystem is optional and may be a fs_url. + + - user tilde is expanded + - if a filesystem is given, we use that. + - otherwise we treat the path as a filesystem. """ - # TODO: Check whether the assumption can be eliminated for cleaner code. - # TODO: Optimization: The counter only needs to be parsed once. - tmp = path - while True: - tmp = increment_filename_version(tmp, separator=separator) - if not tmp.exists(): - return tmp + path = expand_user(path) + + if not filesystem: + return (open_fs(path), "/") + else: + if isinstance(filesystem, str): + filesystem = expand_user(filesystem) + return (open_fs(filesystem), path) + return (filesystem, path) + + +def is_same_resource(fs1: FS, path1: str, fs2: FS, path2: str): + from fs.errors import NoSysPath, NoURL + from fs.tarfs import ReadTarFS, WriteTarFS + from fs.zipfs import ReadZipFS, WriteZipFS + from fs.wrapfs import WrapFS + from fs.path import abspath + + # def unwrap(fs, path): + # base = "/" + # if isinstance(fs, WrapFS): + # fs, base = fs.delegate_path("/") + # return fs, normpath(join(base, path)) # to support ".." in path + + def unwrap(fs, path): + if isinstance(fs, WrapFS): + fs, path = fs.delegate_path(path) + return fs, abspath(path) + + # completely unwrap WrapFS instances + fs1, path1 = unwrap(fs1, path1) + fs2, path2 = unwrap(fs2, path2) + + # obvious check + if fs1 == fs2 and path1 == path2: + return True + + # check all fs with syspath support + try: + return fs1.getsyspath(path1) == fs2.getsyspath(path2) + except NoSysPath: + pass + + # check zip and tar + Tar = (WriteTarFS, ReadTarFS) + Zip = (WriteZipFS, ReadZipFS) + if (isinstance(fs1, Tar) and isinstance(fs2, Tar)) or ( + isinstance(fs1, Zip) and isinstance(fs2, Zip) + ): + return path1 == path2 and fs1._file == fs2._file + + # check all fs with url support + if isinstance(fs1, fs2.__class__): + try: + return fs1.geturl(path1) == fs2.geturl(path2) + except NoURL: + pass + return False -def dict_merge(dct, merge_dct, add_keys=True): - """ Recursive dict merge. +def safe_description(fs: FS, path): + try: + if isinstance(fs, SimulationFS): + return "%s%s" % (str(fs), fspath.abspath(path)) + return fs.getsyspath(path) + except Exception as e: + return '{} in "{}"'.format(path, fs) + + +def ensure_list(inp): + if not isinstance(inp, list): + return [inp] + return inp + + +def ensure_dict(inp): + if isinstance(inp, dict): + return inp + elif isinstance(inp, str): + return {inp: {}} + raise ValueError("Cannot ensure dict: %s" % inp) + + +def to_args(inp): + """Convert a argument into a (args, kwargs) tuple. + + >>> to_args(None) + ([], {}) + >>> to_args('test') + (['test'], {}) + >>> to_args([1, 2, 3]) + ([1, 2, 3], {}) + >>> to_args({'a': {'b': 'c'}}) + ([], {'a': {'b': 'c'}}) + >>> to_args([[1, 2, [3, 4], [5, 6]]]) + ([1, 2, 3, 4, 5, 6], {}) + """ + if inp is None: + return ([], {}) + if isinstance(inp, dict): + return ([], inp) + return (flatten(ensure_list(inp)), {}) - Inspired by :meth:``dict.update()``, instead of - updating only top-level keys, dict_merge recurses down into dicts nested - to an arbitrary depth, updating keys. The ``merge_dct`` is merged into - ``dct``. - This version will return a copy of the dictionary and leave the original - arguments untouched. +def flatten(arr: List[Any]) -> List[Any]: + if arr == []: + return [] + if not isinstance(arr, list): + return [arr] + return flatten(arr[0]) + flatten(arr[1:]) - The optional argument ``add_keys``, determines whether keys which are - present in ``merge_dict`` but not ``dct`` should be included in the - new dict. - Args: - dct (dict) onto which the merge is executed - merge_dct (dict): dct merged into dct - add_keys (bool): whether to add new keys +def flattened_string_list(x, case_sensitive=True) -> Sequence[str]: + x = [str(x) for x in flatten(x)] + if not case_sensitive: + x = [x.lower() for x in x] + return x - Returns: - dict: updated dict - Taken from comment thread: https://gist.github.com/angstwad/bf22d1822c38a92ec0a9 +def flatten_all_lists_in_dict(obj): """ - dct = deepcopy(dct) - if not add_keys: - merge_dct = {k: merge_dct[k] for k in set(dct).intersection(set(merge_dct))} - - for k, v in merge_dct.items(): - if isinstance(dct.get(k), dict) and isinstance(v, Mapping): - dct[k] = dict_merge(dct[k], v, add_keys=add_keys) + >>> flatten_all_lists_in_dict({1: [[2], [3, {5: [5, 6]}]]}) + {1: [2, 3, {5: [5, 6]}]} + """ + if isinstance(obj, dict): + for key, value in obj.items(): + obj[key] = flatten_all_lists_in_dict(value) + return obj + elif isinstance(obj, list): + return [flatten_all_lists_in_dict(x) for x in flatten(obj)] + else: + return obj + + +def deep_merge(a: dict, b: dict, *, add_keys=True) -> dict: + result = deepcopy(a) + for bk, bv in b.items(): + av = result.get(bk) + if isinstance(av, dict) and isinstance(bv, dict): + result[bk] = deep_merge(av, bv, add_keys=add_keys) + elif (av is not None) or add_keys: + result[bk] = deepcopy(bv) + return result + + +def deep_merge_inplace(base: dict, updates: dict) -> None: + for bk, bv in updates.items(): + av = base.get(bk) + if isinstance(av, dict) and isinstance(bv, dict): + deep_merge_inplace(av, bv) else: - dct[k] = v - - return dct + base[bk] = bv diff --git a/poetry.lock b/poetry.lock index 8255623b..f288a67b 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1,11 +1,3 @@ -[[package]] -name = "alabaster" -version = "0.7.12" -description = "A configurable sidebar-enabled Sphinx theme" -category = "dev" -optional = false -python-versions = "*" - [[package]] name = "appdirs" version = "1.4.4" @@ -14,17 +6,9 @@ category = "main" optional = false python-versions = "*" -[[package]] -name = "appnope" -version = "0.1.2" -description = "Disable App Nap on macOS >= 10.9" -category = "dev" -optional = false -python-versions = "*" - [[package]] name = "argcomplete" -version = "1.10.0" +version = "1.10.3" description = "Bash tab completion for argparse" category = "main" optional = true @@ -34,17 +18,15 @@ python-versions = "*" test = ["coverage", "flake8", "pexpect", "wheel"] [[package]] -name = "astroid" -version = "2.5.6" -description = "An abstract syntax tree for Python with inference support." +name = "astunparse" +version = "1.6.3" +description = "An AST unparser for Python" category = "dev" optional = false -python-versions = "~=3.6" +python-versions = "*" [package.dependencies] -lazy-object-proxy = ">=1.4.0" -typed-ast = {version = ">=1.4.0,<1.5", markers = "implementation_name == \"cpython\" and python_version < \"3.8\""} -wrapt = ">=1.11,<1.13" +six = ">=1.6.1,<2.0" [[package]] name = "atomicwrites" @@ -56,40 +38,35 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "attrs" -version = "20.3.0" +version = "21.4.0" description = "Classes Without Boilerplate" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.extras] -dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface", "furo", "sphinx", "pre-commit"] -docs = ["furo", "sphinx", "zope.interface"] -tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "zope.interface"] -tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six"] +dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit", "cloudpickle"] +docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] +tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "cloudpickle"] +tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "cloudpickle"] [[package]] -name = "babel" -version = "2.9.0" -description = "Internationalization utilities" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +name = "backports.zoneinfo" +version = "0.2.1" +description = "Backport of the standard library zoneinfo module" +category = "main" +optional = true +python-versions = ">=3.6" [package.dependencies] -pytz = ">=2015.7" +importlib-resources = {version = "*", markers = "python_version < \"3.7\""} -[[package]] -name = "backcall" -version = "0.2.0" -description = "Specifications for callback functions passed in to an API" -category = "dev" -optional = false -python-versions = "*" +[package.extras] +tzdata = ["tzdata"] [[package]] name = "beautifulsoup4" -version = "4.8.0" +version = "4.8.2" description = "Screen-scraping library" category = "main" optional = true @@ -102,9 +79,17 @@ soupsieve = ">=1.2" html5lib = ["html5lib"] lxml = ["lxml"] +[[package]] +name = "cached-property" +version = "1.5.2" +description = "A decorator for caching properties in classes." +category = "dev" +optional = false +python-versions = "*" + [[package]] name = "certifi" -version = "2020.12.5" +version = "2021.10.8" description = "Python package for providing Mozilla's CA Bundle." category = "dev" optional = false @@ -112,7 +97,7 @@ python-versions = "*" [[package]] name = "cffi" -version = "1.14.5" +version = "1.15.0" description = "Foreign Function Interface for Python calling C code." category = "main" optional = false @@ -126,9 +111,32 @@ name = "chardet" version = "3.0.4" description = "Universal encoding detector for Python 2 and 3" category = "main" -optional = false +optional = true python-versions = "*" +[[package]] +name = "charset-normalizer" +version = "2.0.11" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +category = "dev" +optional = false +python-versions = ">=3.5.0" + +[package.extras] +unicode_backport = ["unicodedata2"] + +[[package]] +name = "click" +version = "8.0.3" +description = "Composable command line interface toolkit" +category = "main" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + [[package]] name = "colorama" version = "0.4.4" @@ -138,28 +146,39 @@ optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] -name = "decorator" -version = "5.0.7" -description = "Decorators for Humans" -category = "dev" +name = "commonmark" +version = "0.9.1" +description = "Python parser for the CommonMark Markdown spec" +category = "main" optional = false -python-versions = ">=3.5" +python-versions = "*" + +[package.extras] +test = ["flake8 (==3.7.8)", "hypothesis (==3.55.3)"] [[package]] -name = "docopt" -version = "0.6.2" -description = "Pythonic argument parser, that will make you smile" +name = "compressed-rtf" +version = "1.0.6" +description = "Compressed Rich Text Format (RTF) compression and decompression package" category = "main" -optional = false +optional = true python-versions = "*" [[package]] -name = "docutils" -version = "0.16" -description = "Docutils -- Python Documentation Utilities" -category = "dev" +name = "contextlib2" +version = "21.6.0" +description = "Backports and enhancements for the contextlib module" +category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.6" + +[[package]] +name = "dataclasses" +version = "0.8" +description = "A backport of the dataclasses module for Python 3.6" +category = "main" +optional = false +python-versions = ">=3.6, <3.7" [[package]] name = "docx2txt" @@ -169,6 +188,14 @@ category = "main" optional = true python-versions = "*" +[[package]] +name = "ebcdic" +version = "1.1.1" +description = "Additional EBCDIC codecs" +category = "main" +optional = true +python-versions = "*" + [[package]] name = "ebooklib" version = "0.17.1" @@ -191,46 +218,56 @@ python-versions = "*" [[package]] name = "extract-msg" -version = "0.23.1" +version = "0.29.0" description = "Extracts emails and attachments saved in Microsoft Outlook's .msg files" category = "main" optional = true python-versions = "*" [package.dependencies] +compressed-rtf = ">=1.0.6" +ebcdic = ">=1.1.1" imapclient = "2.1.0" -olefile = "0.46" -tzlocal = "1.5.1" +olefile = ">=0.46" +tzlocal = ">=2.1" [[package]] -name = "flake8" -version = "3.9.1" -description = "the modular source code checker: pep8 pyflakes and co" -category = "dev" +name = "fs" +version = "2.4.14" +description = "Python's filesystem abstraction layer" +category = "main" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" +python-versions = "*" [package.dependencies] -importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} -mccabe = ">=0.6.0,<0.7.0" -pycodestyle = ">=2.7.0,<2.8.0" -pyflakes = ">=2.3.0,<2.4.0" +appdirs = ">=1.4.3,<1.5.0" +pytz = "*" +six = ">=1.10,<2.0" + +[package.extras] +scandir = ["scandir (>=1.5,<2.0)"] [[package]] -name = "idna" -version = "2.10" -description = "Internationalized Domain Names in Applications (IDNA)" +name = "ghp-import" +version = "2.0.2" +description = "Copy your docs directly to the gh-pages branch." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = "*" + +[package.dependencies] +python-dateutil = ">=2.8.1" + +[package.extras] +dev = ["twine", "markdown", "flake8", "wheel"] [[package]] -name = "imagesize" -version = "1.2.0" -description = "Getting image size from png/jpeg/jpeg2000/gif file" +name = "idna" +version = "3.3" +description = "Internationalized Domain Names in Applications (IDNA)" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.5" [[package]] name = "imapclient" @@ -249,9 +286,9 @@ test = ["mock (>=1.3.0)"] [[package]] name = "importlib-metadata" -version = "4.0.1" +version = "4.8.3" description = "Read metadata from Python packages" -category = "dev" +category = "main" optional = false python-versions = ">=3.6" @@ -261,111 +298,49 @@ zipp = ">=0.5" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] +perf = ["ipython"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] [[package]] -name = "ipdb" -version = "0.12.3" -description = "IPython-enabled pdb" -category = "dev" -optional = false -python-versions = ">=2.7" - -[package.dependencies] -ipython = {version = ">=5.1.0", markers = "python_version >= \"3.4\""} - -[[package]] -name = "ipython" -version = "7.16.1" -description = "IPython: Productive Interactive Computing" -category = "dev" -optional = false +name = "importlib-resources" +version = "5.4.0" +description = "Read resources from Python packages" +category = "main" +optional = true python-versions = ">=3.6" [package.dependencies] -appnope = {version = "*", markers = "sys_platform == \"darwin\""} -backcall = "*" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -decorator = "*" -jedi = ">=0.10" -pexpect = {version = "*", markers = "sys_platform != \"win32\""} -pickleshare = "*" -prompt-toolkit = ">=2.0.0,<3.0.0 || >3.0.0,<3.0.1 || >3.0.1,<3.1.0" -pygments = "*" -traitlets = ">=4.2" - -[package.extras] -all = ["Sphinx (>=1.3)", "ipykernel", "ipyparallel", "ipywidgets", "nbconvert", "nbformat", "nose (>=0.10.1)", "notebook", "numpy (>=1.14)", "pygments", "qtconsole", "requests", "testpath"] -doc = ["Sphinx (>=1.3)"] -kernel = ["ipykernel"] -nbconvert = ["nbconvert"] -nbformat = ["nbformat"] -notebook = ["notebook", "ipywidgets"] -parallel = ["ipyparallel"] -qtconsole = ["qtconsole"] -test = ["nose (>=0.10.1)", "requests", "testpath", "pygments", "nbformat", "ipykernel", "numpy (>=1.14)"] - -[[package]] -name = "ipython-genutils" -version = "0.2.0" -description = "Vestigial utilities from IPython" -category = "dev" -optional = false -python-versions = "*" - -[[package]] -name = "isort" -version = "5.8.0" -description = "A Python utility / library to sort Python imports." -category = "dev" -optional = false -python-versions = ">=3.6,<4.0" +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] -pipfile_deprecated_finder = ["pipreqs", "requirementslib"] -requirements_deprecated_finder = ["pipreqs", "pip-api"] -colors = ["colorama (>=0.4.3,<0.5.0)"] +docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] +testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"] [[package]] -name = "jedi" -version = "0.18.0" -description = "An autocompletion tool for Python that can be used for text editors." +name = "iniconfig" +version = "1.1.1" +description = "iniconfig: brain-dead simple config-ini parsing" category = "dev" optional = false -python-versions = ">=3.6" - -[package.dependencies] -parso = ">=0.8.0,<0.9.0" - -[package.extras] -qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] -testing = ["Django (<3.1)", "colorama", "docopt", "pytest (<6.0.0)"] +python-versions = "*" [[package]] name = "jinja2" -version = "2.11.3" +version = "3.0.3" description = "A very fast and expressive template engine." -category = "dev" +category = "main" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=3.6" [package.dependencies] -MarkupSafe = ">=0.23" +MarkupSafe = ">=2.0" [package.extras] -i18n = ["Babel (>=0.8)"] - -[[package]] -name = "lazy-object-proxy" -version = "1.6.0" -description = "A fast and thorough lazy object proxy." -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +i18n = ["Babel (>=2.7)"] [[package]] name = "lxml" -version = "4.6.3" +version = "4.7.1" description = "Powerful and Pythonic XML processing library combining libxml2/libxslt with the ElementTree API." category = "main" optional = true @@ -390,52 +365,124 @@ mdfind-wrapper = ">=0.1.3,<0.2.0" xattr = ">=0.9.7,<0.10.0" [[package]] -name = "markupsafe" -version = "1.1.1" -description = "Safely add untrusted strings to HTML/XML markup." +name = "markdown" +version = "3.3.6" +description = "Python implementation of Markdown." category = "dev" optional = false -python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*" +python-versions = ">=3.6" + +[package.dependencies] +importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} + +[package.extras] +testing = ["coverage", "pyyaml"] [[package]] -name = "mccabe" -version = "0.6.1" -description = "McCabe checker, plugin for flake8" -category = "dev" +name = "markupsafe" +version = "2.0.1" +description = "Safely add untrusted strings to HTML/XML markup." +category = "main" optional = false -python-versions = "*" +python-versions = ">=3.6" [[package]] name = "mdfind-wrapper" -version = "0.1.4" +version = "0.1.5" description = "A python library that wraps the mdfind." category = "main" optional = false python-versions = ">=3.6" [[package]] -name = "more-itertools" -version = "8.7.0" -description = "More routines for operating on iterables, beyond itertools" +name = "mergedeep" +version = "1.3.4" +description = "A deep merge function for 🐍." category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" + +[[package]] +name = "mkdocs" +version = "1.2.3" +description = "Project documentation with Markdown." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +click = ">=3.3" +ghp-import = ">=1.0" +importlib-metadata = ">=3.10" +Jinja2 = ">=2.10.1" +Markdown = ">=3.2.1" +mergedeep = ">=1.3.4" +packaging = ">=20.5" +PyYAML = ">=3.10" +pyyaml-env-tag = ">=0.1" +watchdog = ">=2.0" + +[package.extras] +i18n = ["babel (>=2.9.0)"] + +[[package]] +name = "mkdocs-autorefs" +version = "0.3.1" +description = "Automatically link across pages in MkDocs." +category = "dev" +optional = false +python-versions = ">=3.6.2,<4.0.0" + +[package.dependencies] +Markdown = ">=3.3,<4.0" +mkdocs = ">=1.1,<2.0" + +[[package]] +name = "mkdocs-include-markdown-plugin" +version = "3.2.3" +description = "Mkdocs Markdown includer plugin." +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.extras] +dev = ["bump2version (==1.0.1)", "flake8 (==3.9.2)", "flake8-implicit-str-concat (==0.2.0)", "flake8-print (==4.0.0)", "isort (==5.9.1)", "mdpo (==0.3.61)", "pre-commit (==2.13.0)", "pytest (==6.2.4)", "pytest-cov (==2.12.1)", "pyupgrade (==2.19.4)", "yamllint (==1.26.1)"] +test = ["pytest (==6.2.4)", "pytest-cov (==2.12.1)"] + +[[package]] +name = "mkdocstrings" +version = "0.17.0" +description = "Automatic documentation from sources, for MkDocs." +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +Jinja2 = ">=2.11.1" +Markdown = ">=3.3" +MarkupSafe = ">=1.1" +mkdocs = ">=1.2" +mkdocs-autorefs = ">=0.1" +pymdown-extensions = ">=6.3" +pytkdocs = ">=0.14.0" [[package]] name = "mypy" -version = "0.812" +version = "0.931" description = "Optional static typing for Python" category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" [package.dependencies] -mypy-extensions = ">=0.4.3,<0.5.0" -typed-ast = ">=1.4.0,<1.5.0" -typing-extensions = ">=3.7.4" +mypy-extensions = ">=0.4.3" +tomli = ">=1.1.0" +typed-ast = {version = ">=1.4.0,<2", markers = "python_version < \"3.8\""} +typing-extensions = ">=3.10" [package.extras] dmypy = ["psutil (>=4.0)"] +python2 = ["typed-ast (>=1.4.0,<2)"] [[package]] name = "mypy-extensions" @@ -455,74 +502,36 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "packaging" -version = "20.9" +version = "21.3" description = "Core utilities for Python packages" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[package.dependencies] -pyparsing = ">=2.0.2" - -[[package]] -name = "parso" -version = "0.8.2" -description = "A Python Parser" -category = "dev" -optional = false python-versions = ">=3.6" -[package.extras] -qa = ["flake8 (==3.8.3)", "mypy (==0.782)"] -testing = ["docopt", "pytest (<6.0.0)"] +[package.dependencies] +pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" [[package]] name = "pdfminer.six" -version = "20181108" +version = "20191110" description = "PDF parser and analyzer" category = "main" optional = true python-versions = "*" [package.dependencies] +chardet = {version = "*", markers = "python_version > \"3.0\""} pycryptodome = "*" six = "*" sortedcontainers = "*" -[[package]] -name = "pendulum" -version = "2.1.2" -description = "Python datetimes made easy" -category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" - -[package.dependencies] -python-dateutil = ">=2.6,<3.0" -pytzdata = ">=2020.1" - -[[package]] -name = "pexpect" -version = "4.8.0" -description = "Pexpect allows easy control of interactive console applications." -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -ptyprocess = ">=0.5" - -[[package]] -name = "pickleshare" -version = "0.7.5" -description = "Tiny 'shelve'-like database with concurrency support" -category = "dev" -optional = false -python-versions = "*" +[package.extras] +dev = ["nose", "tox"] +docs = ["sphinx", "sphinx-argparse"] [[package]] name = "pillow" -version = "8.2.0" +version = "8.4.0" description = "Python Imaging Library (Fork)" category = "main" optional = true @@ -530,56 +539,30 @@ python-versions = ">=3.6" [[package]] name = "pluggy" -version = "0.13.1" +version = "1.0.0" description = "plugin and hook calling mechanisms for python" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=3.6" [package.dependencies] importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} [package.extras] dev = ["pre-commit", "tox"] - -[[package]] -name = "prompt-toolkit" -version = "3.0.3" -description = "Library for building powerful interactive command lines in Python" -category = "dev" -optional = false -python-versions = ">=3.6" - -[package.dependencies] -wcwidth = "*" - -[[package]] -name = "ptyprocess" -version = "0.7.0" -description = "Run a subprocess in a pseudo terminal" -category = "dev" -optional = false -python-versions = "*" +testing = ["pytest", "pytest-benchmark"] [[package]] name = "py" -version = "1.10.0" +version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - -[[package]] -name = "pycodestyle" -version = "2.7.0" -description = "Python style guide checker" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "pycparser" -version = "2.20" +version = "2.21" description = "C parser in Python" category = "main" optional = false @@ -587,79 +570,69 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pycryptodome" -version = "3.10.1" +version = "3.14.1" description = "Cryptographic library for Python" category = "main" optional = true python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -[[package]] -name = "pyflakes" -version = "2.3.1" -description = "passive checker of Python programs" -category = "dev" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" - [[package]] name = "pygments" -version = "2.8.1" +version = "2.11.2" description = "Pygments is a syntax highlighting package written in Python." -category = "dev" +category = "main" optional = false python-versions = ">=3.5" [[package]] -name = "pylint" -version = "2.8.2" -description = "python code static checker" +name = "pymdown-extensions" +version = "9.1" +description = "Extension pack for Python Markdown." category = "dev" optional = false -python-versions = "~=3.6" +python-versions = ">=3.6" [package.dependencies] -astroid = ">=2.5.6,<2.7" -colorama = {version = "*", markers = "sys_platform == \"win32\""} -isort = ">=4.2.5,<6" -mccabe = ">=0.6,<0.7" -toml = ">=0.7.1" +Markdown = ">=3.2" [[package]] name = "pyparsing" -version = "2.4.7" +version = "3.0.7" description = "Python parsing module" category = "dev" optional = false -python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" +python-versions = ">=3.6" + +[package.extras] +diagrams = ["jinja2", "railroad-diagrams"] [[package]] name = "pytest" -version = "4.6.11" +version = "6.2.5" description = "pytest: simple powerful testing with Python" category = "dev" optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.7" +python-versions = ">=3.6" [package.dependencies] -atomicwrites = ">=1.0" -attrs = ">=17.4.0" -colorama = {version = "*", markers = "sys_platform == \"win32\" and python_version != \"3.4\""} +atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} +attrs = ">=19.2.0" +colorama = {version = "*", markers = "sys_platform == \"win32\""} importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} -more-itertools = {version = ">=4.0.0", markers = "python_version > \"2.7\""} +iniconfig = "*" packaging = "*" -pluggy = ">=0.12,<1.0" -py = ">=1.5.0" -six = ">=1.10.0" -wcwidth = "*" +pluggy = ">=0.12,<2.0" +py = ">=1.8.2" +toml = "*" [package.extras] -testing = ["argcomplete", "hypothesis (>=3.56)", "nose", "requests", "mock"] +testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] [[package]] name = "python-dateutil" -version = "2.8.1" +version = "2.8.2" description = "Extensions to the standard Python datetime module" -category = "main" +category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" @@ -668,7 +641,7 @@ six = ">=1.5" [[package]] name = "python-pptx" -version = "0.6.18" +version = "0.6.21" description = "Generate and manipulate Open XML PowerPoint (.pptx) files" category = "main" optional = true @@ -679,21 +652,42 @@ lxml = ">=3.1.0" Pillow = ">=3.3.2" XlsxWriter = ">=0.5.7" +[[package]] +name = "pytkdocs" +version = "0.15.0" +description = "Load Python objects documentation." +category = "dev" +optional = false +python-versions = ">=3.6.2" + +[package.dependencies] +astunparse = {version = ">=1.6", markers = "python_version < \"3.9\""} +cached-property = {version = ">=1.5", markers = "python_version < \"3.8\""} +dataclasses = {version = ">=0.7", markers = "python_version < \"3.7\""} +typing-extensions = {version = ">=3.7", markers = "python_version < \"3.8\""} + +[package.extras] +numpy-style = ["docstring_parser (>=0.7)"] + [[package]] name = "pytz" -version = "2021.1" +version = "2021.3" description = "World timezone definitions, modern and historical" category = "main" optional = false python-versions = "*" [[package]] -name = "pytzdata" -version = "2020.1" -description = "The Olson timezone database for Python." +name = "pytz-deprecation-shim" +version = "0.1.0.post0" +description = "Shims to make deprecation of pytz easier" category = "main" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" +optional = true +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" + +[package.dependencies] +"backports.zoneinfo" = {version = "*", markers = "python_version >= \"3.6\" and python_version < \"3.9\""} +tzdata = {version = "*", markers = "python_version >= \"3.6\""} [[package]] name = "pyyaml" @@ -703,32 +697,77 @@ category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" +[[package]] +name = "pyyaml-env-tag" +version = "0.1" +description = "A custom YAML tag for referencing environment variables in YAML files. " +category = "dev" +optional = false +python-versions = ">=3.6" + +[package.dependencies] +pyyaml = "*" + [[package]] name = "requests" -version = "2.25.1" +version = "2.27.1" description = "Python HTTP for Humans." category = "dev" optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" [package.dependencies] certifi = ">=2017.4.17" -chardet = ">=3.0.2,<5" -idna = ">=2.5,<3" +charset-normalizer = {version = ">=2.0.0,<2.1.0", markers = "python_version >= \"3\""} +idna = {version = ">=2.5,<4", markers = "python_version >= \"3\""} urllib3 = ">=1.21.1,<1.27" [package.extras] -security = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)"] socks = ["PySocks (>=1.5.6,!=1.5.7)", "win-inet-pton"] +use_chardet_on_py3 = ["chardet (>=3.0.2,<5)"] + +[[package]] +name = "rich" +version = "11.1.0" +description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" +category = "main" +optional = false +python-versions = ">=3.6.2,<4.0.0" + +[package.dependencies] +colorama = ">=0.4.0,<0.5.0" +commonmark = ">=0.9.0,<0.10.0" +dataclasses = {version = ">=0.7,<0.9", markers = "python_version < \"3.7\""} +pygments = ">=2.6.0,<3.0.0" +typing-extensions = {version = ">=3.7.4,<5.0", markers = "python_version < \"3.8\""} + +[package.extras] +jupyter = ["ipywidgets (>=7.5.1,<8.0.0)"] + +[[package]] +name = "schema" +version = "0.7.5" +description = "Simple data validation library" +category = "main" +optional = false +python-versions = "*" + +[package.dependencies] +contextlib2 = ">=0.5.5" [[package]] name = "send2trash" -version = "1.5.0" +version = "1.8.0" description = "Send file to trash natively under Mac OS X, Windows and Linux." category = "main" optional = false python-versions = "*" +[package.extras] +nativelib = ["pyobjc-framework-cocoa", "pywin32"] +objc = ["pyobjc-framework-cocoa"] +win32 = ["pywin32"] + [[package]] name = "simplematch" version = "1.3" @@ -745,17 +784,9 @@ category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*" -[[package]] -name = "snowballstemmer" -version = "2.1.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -category = "dev" -optional = false -python-versions = "*" - [[package]] name = "sortedcontainers" -version = "2.3.0" +version = "2.4.0" description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" category = "main" optional = true @@ -763,7 +794,7 @@ python-versions = "*" [[package]] name = "soupsieve" -version = "2.2.1" +version = "2.3.1" description = "A modern CSS selector implementation for Beautiful Soup." category = "main" optional = true @@ -777,143 +808,26 @@ category = "main" optional = true python-versions = "*" -[[package]] -name = "sphinx" -version = "3.5.4" -description = "Python documentation generator" -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.dependencies] -alabaster = ">=0.7,<0.8" -babel = ">=1.3" -colorama = {version = ">=0.3.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.12,<0.17" -imagesize = "*" -Jinja2 = ">=2.3" -packaging = "*" -Pygments = ">=2.0" -requests = ">=2.5.0" -snowballstemmer = ">=1.1" -sphinxcontrib-applehelp = "*" -sphinxcontrib-devhelp = "*" -sphinxcontrib-htmlhelp = "*" -sphinxcontrib-jsmath = "*" -sphinxcontrib-qthelp = "*" -sphinxcontrib-serializinghtml = "*" - -[package.extras] -docs = ["sphinxcontrib-websupport"] -lint = ["flake8 (>=3.5.0)", "isort", "mypy (>=0.800)", "docutils-stubs"] -test = ["pytest", "pytest-cov", "html5lib", "cython", "typed-ast"] - -[[package]] -name = "sphinx-rtd-theme" -version = "0.5.2" -description = "Read the Docs theme for Sphinx" -category = "dev" -optional = false -python-versions = "*" - -[package.dependencies] -docutils = "<0.17" -sphinx = "*" - -[package.extras] -dev = ["transifex-client", "sphinxcontrib-httpdomain", "bump2version"] - -[[package]] -name = "sphinxcontrib-applehelp" -version = "1.0.2" -description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-devhelp" -version = "1.0.2" -description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-htmlhelp" -version = "1.0.3" -description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] -test = ["pytest", "html5lib"] - -[[package]] -name = "sphinxcontrib-jsmath" -version = "1.0.1" -description = "A sphinx extension which renders display math in HTML via JavaScript" -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.extras] -test = ["pytest", "flake8", "mypy"] - -[[package]] -name = "sphinxcontrib-qthelp" -version = "1.0.3" -description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-serializinghtml" -version = "1.1.4" -description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." -category = "dev" -optional = false -python-versions = ">=3.5" - -[package.extras] -lint = ["flake8", "mypy", "docutils-stubs"] -test = ["pytest"] - [[package]] name = "textract" -version = "1.6.3" +version = "1.6.4" description = "extract text from any document. no muss. no fuss." category = "main" optional = true python-versions = "*" [package.dependencies] -argcomplete = "1.10.0" -beautifulsoup4 = "4.8.0" -chardet = "3.0.4" -docx2txt = "0.8" -EbookLib = "0.17.1" -extract-msg = "0.23.1" -"pdfminer.six" = "20181108" -python-pptx = "0.6.18" -six = "1.12.0" -SpeechRecognition = "3.8.1" -xlrd = "1.2.0" +argcomplete = ">=1.10.0,<1.11.0" +beautifulsoup4 = ">=4.8.0,<4.9.0" +chardet = ">=3.0.0,<4.0.0" +docx2txt = ">=0.8,<1.0" +EbookLib = "<1.0.0" +extract-msg = "<=0.29" +"pdfminer.six" = "20191110" +python-pptx = ">=0.6.18,<0.7.0" +six = ">=1.12.0,<1.13.0" +SpeechRecognition = ">=3.8.1,<3.9.0" +xlrd = ">=1.2.0,<1.3.0" [package.extras] pocketsphinx = ["pocketsphinx (==0.1.15)"] @@ -927,87 +841,96 @@ optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] -name = "traitlets" -version = "4.3.3" -description = "Traitlets Python config system" +name = "tomli" +version = "1.2.3" +description = "A lil' TOML parser" category = "dev" optional = false -python-versions = "*" - -[package.dependencies] -decorator = "*" -ipython-genutils = "*" -six = "*" - -[package.extras] -test = ["pytest", "mock"] +python-versions = ">=3.6" [[package]] name = "typed-ast" -version = "1.4.3" +version = "1.5.2" description = "a fork of Python 2 and 3 ast modules with type comment support" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.6" [[package]] -name = "typing-extensions" -version = "3.7.4.3" -description = "Backported and Experimental Type Hints for Python 3.5+" +name = "types-pyyaml" +version = "6.0.4" +description = "Typing stubs for PyYAML" category = "dev" optional = false python-versions = "*" +[[package]] +name = "typing-extensions" +version = "4.0.1" +description = "Backported and Experimental Type Hints for Python 3.6+" +category = "main" +optional = false +python-versions = ">=3.6" + +[[package]] +name = "tzdata" +version = "2021.5" +description = "Provider of IANA time zone data" +category = "main" +optional = true +python-versions = ">=2" + [[package]] name = "tzlocal" -version = "1.5.1" +version = "4.1" description = "tzinfo object for the local timezone" category = "main" optional = true -python-versions = "*" +python-versions = ">=3.6" [package.dependencies] -pytz = "*" +"backports.zoneinfo" = {version = "*", markers = "python_version < \"3.9\""} +pytz-deprecation-shim = "*" +tzdata = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +devenv = ["black", "pyroma", "pytest-cov", "zest.releaser"] +test = ["pytest-mock (>=3.3)", "pytest (>=4.3)"] [[package]] name = "urllib3" -version = "1.26.4" +version = "1.26.8" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" [package.extras] +brotli = ["brotlipy (>=0.6.0)"] secure = ["pyOpenSSL (>=0.14)", "cryptography (>=1.3.4)", "idna (>=2.0.0)", "certifi", "ipaddress"] socks = ["PySocks (>=1.5.6,!=1.5.7,<2.0)"] -brotli = ["brotlipy (>=0.6.0)"] [[package]] -name = "wcwidth" -version = "0.2.5" -description = "Measures the displayed width of unicode strings in a terminal" +name = "watchdog" +version = "2.1.6" +description = "Filesystem events monitoring" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.6" -[[package]] -name = "wrapt" -version = "1.12.1" -description = "Module for decorators, wrappers and monkey patching." -category = "dev" -optional = false -python-versions = "*" +[package.extras] +watchmedo = ["PyYAML (>=3.10)"] [[package]] name = "xattr" -version = "0.9.7" +version = "0.9.9" description = "Python wrapper for extended filesystem attributes" category = "main" optional = false python-versions = "*" [package.dependencies] -cffi = ">=1.0.0" +cffi = ">=1.0" [[package]] name = "xlrd" @@ -1019,138 +942,173 @@ python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "xlsxwriter" -version = "1.4.0" +version = "3.0.2" description = "A Python module for creating Excel XLSX files." category = "main" optional = true -python-versions = "*" +python-versions = ">=3.4" [[package]] name = "zipp" -version = "3.4.1" +version = "3.6.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" +category = "main" optional = false python-versions = ">=3.6" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] -testing = ["pytest (>=4.6)", "pytest-checkdocs (>=1.2.3)", "pytest-flake8", "pytest-cov", "pytest-enabler", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] +testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] [extras] textract = ["textract"] [metadata] lock-version = "1.1" -python-versions = "^3.6" -content-hash = "d1e76e5a1ba4b03a7cb81610e1e3b8f4e28202d088a53295a0fe12d5c0ade8e5" +python-versions = "^3.6.2" +content-hash = "06d46cffe503d5f1dfde1d798c57a742c2caf464373543ca23a4763dc5410642" [metadata.files] -alabaster = [ - {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, - {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, -] appdirs = [ {file = "appdirs-1.4.4-py2.py3-none-any.whl", hash = "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128"}, {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, ] -appnope = [ - {file = "appnope-0.1.2-py2.py3-none-any.whl", hash = "sha256:93aa393e9d6c54c5cd570ccadd8edad61ea0c4b9ea7a01409020c9aa019eb442"}, - {file = "appnope-0.1.2.tar.gz", hash = "sha256:dd83cd4b5b460958838f6eb3000c660b1f9caf2a5b1de4264e941512f603258a"}, -] argcomplete = [ - {file = "argcomplete-1.10.0-py2.py3-none-any.whl", hash = "sha256:2f2052ea5156eb5cc7edce9c0ddc937e30c49c1097d51b24f34350a08632a264"}, - {file = "argcomplete-1.10.0.tar.gz", hash = "sha256:45836de8cc63d2f6e06b898cef1e4ce1e9907d246ec77ac8e64f23f153d6bec1"}, + {file = "argcomplete-1.10.3-py2.py3-none-any.whl", hash = "sha256:d8ea63ebaec7f59e56e7b2a386b1d1c7f1a7ae87902c9ee17d377eaa557f06fa"}, + {file = "argcomplete-1.10.3.tar.gz", hash = "sha256:a37f522cf3b6a34abddfedb61c4546f60023b3799b22d1cd971eacdc0861530a"}, ] -astroid = [ - {file = "astroid-2.5.6-py3-none-any.whl", hash = "sha256:4db03ab5fc3340cf619dbc25e42c2cc3755154ce6009469766d7143d1fc2ee4e"}, - {file = "astroid-2.5.6.tar.gz", hash = "sha256:8a398dfce302c13f14bab13e2b14fe385d32b73f4e4853b9bdfb64598baa1975"}, +astunparse = [ + {file = "astunparse-1.6.3-py2.py3-none-any.whl", hash = "sha256:c2652417f2c8b5bb325c885ae329bdf3f86424075c4fd1a128674bc6fba4b8e8"}, + {file = "astunparse-1.6.3.tar.gz", hash = "sha256:5ad93a8456f0d084c3456d059fd9a92cce667963232cbf763eac3bc5b7940872"}, ] atomicwrites = [ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, ] attrs = [ - {file = "attrs-20.3.0-py2.py3-none-any.whl", hash = "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6"}, - {file = "attrs-20.3.0.tar.gz", hash = "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700"}, -] -babel = [ - {file = "Babel-2.9.0-py2.py3-none-any.whl", hash = "sha256:9d35c22fcc79893c3ecc85ac4a56cde1ecf3f19c540bba0922308a6c06ca6fa5"}, - {file = "Babel-2.9.0.tar.gz", hash = "sha256:da031ab54472314f210b0adcff1588ee5d1d1d0ba4dbd07b94dba82bde791e05"}, -] -backcall = [ - {file = "backcall-0.2.0-py2.py3-none-any.whl", hash = "sha256:fbbce6a29f263178a1f7915c1940bde0ec2b2a967566fe1c65c1dfb7422bd255"}, - {file = "backcall-0.2.0.tar.gz", hash = "sha256:5cbdbf27be5e7cfadb448baf0aa95508f91f2bbc6c6437cd9cd06e2a4c215e1e"}, + {file = "attrs-21.4.0-py2.py3-none-any.whl", hash = "sha256:2d27e3784d7a565d36ab851fe94887c5eccd6a463168875832a1be79c82828b4"}, + {file = "attrs-21.4.0.tar.gz", hash = "sha256:626ba8234211db98e869df76230a137c4c40a12d72445c45d5f5b716f076e2fd"}, +] +"backports.zoneinfo" = [ + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:da6013fd84a690242c310d77ddb8441a559e9cb3d3d59ebac9aca1a57b2e18bc"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:89a48c0d158a3cc3f654da4c2de1ceba85263fafb861b98b59040a5086259722"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:1c5742112073a563c81f786e77514969acb58649bcdf6cdf0b4ed31a348d4546"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win32.whl", hash = "sha256:e8236383a20872c0cdf5a62b554b27538db7fa1bbec52429d8d106effbaeca08"}, + {file = "backports.zoneinfo-0.2.1-cp36-cp36m-win_amd64.whl", hash = "sha256:8439c030a11780786a2002261569bdf362264f605dfa4d65090b64b05c9f79a7"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:f04e857b59d9d1ccc39ce2da1021d196e47234873820cbeaad210724b1ee28ac"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:17746bd546106fa389c51dbea67c8b7c8f0d14b5526a579ca6ccf5ed72c526cf"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5c144945a7752ca544b4b78c8c41544cdfaf9786f25fe5ffb10e838e19a27570"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win32.whl", hash = "sha256:e55b384612d93be96506932a786bbcde5a2db7a9e6a4bb4bffe8b733f5b9036b"}, + {file = "backports.zoneinfo-0.2.1-cp37-cp37m-win_amd64.whl", hash = "sha256:a76b38c52400b762e48131494ba26be363491ac4f9a04c1b7e92483d169f6582"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:8961c0f32cd0336fb8e8ead11a1f8cd99ec07145ec2931122faaac1c8f7fd987"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e81b76cace8eda1fca50e345242ba977f9be6ae3945af8d46326d776b4cf78d1"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7b0a64cda4145548fed9efc10322770f929b944ce5cee6c0dfe0c87bf4c0c8c9"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-win32.whl", hash = "sha256:1b13e654a55cd45672cb54ed12148cd33628f672548f373963b0bff67b217328"}, + {file = "backports.zoneinfo-0.2.1-cp38-cp38-win_amd64.whl", hash = "sha256:4a0f800587060bf8880f954dbef70de6c11bbe59c673c3d818921f042f9954a6"}, + {file = "backports.zoneinfo-0.2.1.tar.gz", hash = "sha256:fadbfe37f74051d024037f223b8e001611eac868b5c5b06144ef4d8b799862f2"}, ] beautifulsoup4 = [ - {file = "beautifulsoup4-4.8.0-py2-none-any.whl", hash = "sha256:05668158c7b85b791c5abde53e50265e16f98ad601c402ba44d70f96c4159612"}, - {file = "beautifulsoup4-4.8.0-py3-none-any.whl", hash = "sha256:f040590be10520f2ea4c2ae8c3dae441c7cfff5308ec9d58a0ec0c1b8f81d469"}, - {file = "beautifulsoup4-4.8.0.tar.gz", hash = "sha256:25288c9e176f354bf277c0a10aa96c782a6a18a17122dba2e8cec4a97e03343b"}, + {file = "beautifulsoup4-4.8.2-py2-none-any.whl", hash = "sha256:e1505eeed31b0f4ce2dbb3bc8eb256c04cc2b3b72af7d551a4ab6efd5cbe5dae"}, + {file = "beautifulsoup4-4.8.2-py3-none-any.whl", hash = "sha256:9fbb4d6e48ecd30bcacc5b63b94088192dcda178513b2ae3c394229f8911b887"}, + {file = "beautifulsoup4-4.8.2.tar.gz", hash = "sha256:05fd825eb01c290877657a56df4c6e4c311b3965bda790c613a3d6fb01a5462a"}, +] +cached-property = [ + {file = "cached-property-1.5.2.tar.gz", hash = "sha256:9fa5755838eecbb2d234c3aa390bd80fbd3ac6b6869109bfc1b499f7bd89a130"}, + {file = "cached_property-1.5.2-py2.py3-none-any.whl", hash = "sha256:df4f613cf7ad9a588cc381aaf4a512d26265ecebd5eb9e1ba12f1319eb85a6a0"}, ] certifi = [ - {file = "certifi-2020.12.5-py2.py3-none-any.whl", hash = "sha256:719a74fb9e33b9bd44cc7f3a8d94bc35e4049deebe19ba7d8e108280cfd59830"}, - {file = "certifi-2020.12.5.tar.gz", hash = "sha256:1a4995114262bffbc2413b159f2a1a480c969de6e6eb13ee966d470af86af59c"}, + {file = "certifi-2021.10.8-py2.py3-none-any.whl", hash = "sha256:d62a0163eb4c2344ac042ab2bdf75399a71a2d8c7d47eac2e2ee91b9d6339569"}, + {file = "certifi-2021.10.8.tar.gz", hash = "sha256:78884e7c1d4b00ce3cea67b44566851c4343c120abd683433ce934a68ea58872"}, ] cffi = [ - {file = "cffi-1.14.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:bb89f306e5da99f4d922728ddcd6f7fcebb3241fc40edebcb7284d7514741991"}, - {file = "cffi-1.14.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:34eff4b97f3d982fb93e2831e6750127d1355a923ebaeeb565407b3d2f8d41a1"}, - {file = "cffi-1.14.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:99cd03ae7988a93dd00bcd9d0b75e1f6c426063d6f03d2f90b89e29b25b82dfa"}, - {file = "cffi-1.14.5-cp27-cp27m-win32.whl", hash = "sha256:65fa59693c62cf06e45ddbb822165394a288edce9e276647f0046e1ec26920f3"}, - {file = "cffi-1.14.5-cp27-cp27m-win_amd64.whl", hash = "sha256:51182f8927c5af975fece87b1b369f722c570fe169f9880764b1ee3bca8347b5"}, - {file = "cffi-1.14.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:43e0b9d9e2c9e5d152946b9c5fe062c151614b262fda2e7b201204de0b99e482"}, - {file = "cffi-1.14.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:cbde590d4faaa07c72bf979734738f328d239913ba3e043b1e98fe9a39f8b2b6"}, - {file = "cffi-1.14.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:5de7970188bb46b7bf9858eb6890aad302577a5f6f75091fd7cdd3ef13ef3045"}, - {file = "cffi-1.14.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:a465da611f6fa124963b91bf432d960a555563efe4ed1cc403ba5077b15370aa"}, - {file = "cffi-1.14.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:d42b11d692e11b6634f7613ad8df5d6d5f8875f5d48939520d351007b3c13406"}, - {file = "cffi-1.14.5-cp35-cp35m-win32.whl", hash = "sha256:72d8d3ef52c208ee1c7b2e341f7d71c6fd3157138abf1a95166e6165dd5d4369"}, - {file = "cffi-1.14.5-cp35-cp35m-win_amd64.whl", hash = "sha256:29314480e958fd8aab22e4a58b355b629c59bf5f2ac2492b61e3dc06d8c7a315"}, - {file = "cffi-1.14.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:3d3dd4c9e559eb172ecf00a2a7517e97d1e96de2a5e610bd9b68cea3925b4892"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:48e1c69bbacfc3d932221851b39d49e81567a4d4aac3b21258d9c24578280058"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:69e395c24fc60aad6bb4fa7e583698ea6cc684648e1ffb7fe85e3c1ca131a7d5"}, - {file = "cffi-1.14.5-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:9e93e79c2551ff263400e1e4be085a1210e12073a31c2011dbbda14bda0c6132"}, - {file = "cffi-1.14.5-cp36-cp36m-win32.whl", hash = "sha256:58e3f59d583d413809d60779492342801d6e82fefb89c86a38e040c16883be53"}, - {file = "cffi-1.14.5-cp36-cp36m-win_amd64.whl", hash = "sha256:005a36f41773e148deac64b08f233873a4d0c18b053d37da83f6af4d9087b813"}, - {file = "cffi-1.14.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2894f2df484ff56d717bead0a5c2abb6b9d2bf26d6960c4604d5c48bbc30ee73"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0857f0ae312d855239a55c81ef453ee8fd24136eaba8e87a2eceba644c0d4c06"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:cd2868886d547469123fadc46eac7ea5253ea7fcb139f12e1dfc2bbd406427d1"}, - {file = "cffi-1.14.5-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:35f27e6eb43380fa080dccf676dece30bef72e4a67617ffda586641cd4508d49"}, - {file = "cffi-1.14.5-cp37-cp37m-win32.whl", hash = "sha256:9ff227395193126d82e60319a673a037d5de84633f11279e336f9c0f189ecc62"}, - {file = "cffi-1.14.5-cp37-cp37m-win_amd64.whl", hash = "sha256:9cf8022fb8d07a97c178b02327b284521c7708d7c71a9c9c355c178ac4bbd3d4"}, - {file = "cffi-1.14.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:8b198cec6c72df5289c05b05b8b0969819783f9418e0409865dac47288d2a053"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:ad17025d226ee5beec591b52800c11680fca3df50b8b29fe51d882576e039ee0"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6c97d7350133666fbb5cf4abdc1178c812cb205dc6f41d174a7b0f18fb93337e"}, - {file = "cffi-1.14.5-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:8ae6299f6c68de06f136f1f9e69458eae58f1dacf10af5c17353eae03aa0d827"}, - {file = "cffi-1.14.5-cp38-cp38-win32.whl", hash = "sha256:b85eb46a81787c50650f2392b9b4ef23e1f126313b9e0e9013b35c15e4288e2e"}, - {file = "cffi-1.14.5-cp38-cp38-win_amd64.whl", hash = "sha256:1f436816fc868b098b0d63b8920de7d208c90a67212546d02f84fe78a9c26396"}, - {file = "cffi-1.14.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:1071534bbbf8cbb31b498d5d9db0f274f2f7a865adca4ae429e147ba40f73dea"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:9de2e279153a443c656f2defd67769e6d1e4163952b3c622dcea5b08a6405322"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:6e4714cc64f474e4d6e37cfff31a814b509a35cb17de4fb1999907575684479c"}, - {file = "cffi-1.14.5-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:158d0d15119b4b7ff6b926536763dc0714313aa59e320ddf787502c70c4d4bee"}, - {file = "cffi-1.14.5-cp39-cp39-win32.whl", hash = "sha256:afb29c1ba2e5a3736f1c301d9d0abe3ec8b86957d04ddfa9d7a6a42b9367e396"}, - {file = "cffi-1.14.5-cp39-cp39-win_amd64.whl", hash = "sha256:f2d45f97ab6bb54753eab54fffe75aaf3de4ff2341c9daee1987ee1837636f1d"}, - {file = "cffi-1.14.5.tar.gz", hash = "sha256:fd78e5fee591709f32ef6edb9a015b4aa1a5022598e36227500c8f4e02328d9c"}, + {file = "cffi-1.15.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:c2502a1a03b6312837279c8c1bd3ebedf6c12c4228ddbad40912d671ccc8a962"}, + {file = "cffi-1.15.0-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:23cfe892bd5dd8941608f93348c0737e369e51c100d03718f108bf1add7bd6d0"}, + {file = "cffi-1.15.0-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:41d45de54cd277a7878919867c0f08b0cf817605e4eb94093e7516505d3c8d14"}, + {file = "cffi-1.15.0-cp27-cp27m-win32.whl", hash = "sha256:4a306fa632e8f0928956a41fa8e1d6243c71e7eb59ffbd165fc0b41e316b2474"}, + {file = "cffi-1.15.0-cp27-cp27m-win_amd64.whl", hash = "sha256:e7022a66d9b55e93e1a845d8c9eba2a1bebd4966cd8bfc25d9cd07d515b33fa6"}, + {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:14cd121ea63ecdae71efa69c15c5543a4b5fbcd0bbe2aad864baca0063cecf27"}, + {file = "cffi-1.15.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d4d692a89c5cf08a8557fdeb329b82e7bf609aadfaed6c0d79f5a449a3c7c023"}, + {file = "cffi-1.15.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0104fb5ae2391d46a4cb082abdd5c69ea4eab79d8d44eaaf79f1b1fd806ee4c2"}, + {file = "cffi-1.15.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:91ec59c33514b7c7559a6acda53bbfe1b283949c34fe7440bcf917f96ac0723e"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f5c7150ad32ba43a07c4479f40241756145a1f03b43480e058cfd862bf5041c7"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:00c878c90cb53ccfaae6b8bc18ad05d2036553e6d9d1d9dbcf323bbe83854ca3"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abb9a20a72ac4e0fdb50dae135ba5e77880518e742077ced47eb1499e29a443c"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a5263e363c27b653a90078143adb3d076c1a748ec9ecc78ea2fb916f9b861962"}, + {file = "cffi-1.15.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f54a64f8b0c8ff0b64d18aa76675262e1700f3995182267998c31ae974fbc382"}, + {file = "cffi-1.15.0-cp310-cp310-win32.whl", hash = "sha256:c21c9e3896c23007803a875460fb786118f0cdd4434359577ea25eb556e34c55"}, + {file = "cffi-1.15.0-cp310-cp310-win_amd64.whl", hash = "sha256:5e069f72d497312b24fcc02073d70cb989045d1c91cbd53979366077959933e0"}, + {file = "cffi-1.15.0-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:64d4ec9f448dfe041705426000cc13e34e6e5bb13736e9fd62e34a0b0c41566e"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2756c88cbb94231c7a147402476be2c4df2f6078099a6f4a480d239a8817ae39"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b96a311ac60a3f6be21d2572e46ce67f09abcf4d09344c49274eb9e0bf345fc"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75e4024375654472cc27e91cbe9eaa08567f7fbdf822638be2814ce059f58032"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:59888172256cac5629e60e72e86598027aca6bf01fa2465bdb676d37636573e8"}, + {file = "cffi-1.15.0-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:27c219baf94952ae9d50ec19651a687b826792055353d07648a5695413e0c605"}, + {file = "cffi-1.15.0-cp36-cp36m-win32.whl", hash = "sha256:4958391dbd6249d7ad855b9ca88fae690783a6be9e86df65865058ed81fc860e"}, + {file = "cffi-1.15.0-cp36-cp36m-win_amd64.whl", hash = "sha256:f6f824dc3bce0edab5f427efcfb1d63ee75b6fcb7282900ccaf925be84efb0fc"}, + {file = "cffi-1.15.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:06c48159c1abed75c2e721b1715c379fa3200c7784271b3c46df01383b593636"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:c2051981a968d7de9dd2d7b87bcb9c939c74a34626a6e2f8181455dd49ed69e4"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:fd8a250edc26254fe5b33be00402e6d287f562b6a5b2152dec302fa15bb3e997"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91d77d2a782be4274da750752bb1650a97bfd8f291022b379bb8e01c66b4e96b"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:45db3a33139e9c8f7c09234b5784a5e33d31fd6907800b316decad50af323ff2"}, + {file = "cffi-1.15.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:263cc3d821c4ab2213cbe8cd8b355a7f72a8324577dc865ef98487c1aeee2bc7"}, + {file = "cffi-1.15.0-cp37-cp37m-win32.whl", hash = "sha256:17771976e82e9f94976180f76468546834d22a7cc404b17c22df2a2c81db0c66"}, + {file = "cffi-1.15.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3415c89f9204ee60cd09b235810be700e993e343a408693e80ce7f6a40108029"}, + {file = "cffi-1.15.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4238e6dab5d6a8ba812de994bbb0a79bddbdf80994e4ce802b6f6f3142fcc880"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0808014eb713677ec1292301ea4c81ad277b6cdf2fdd90fd540af98c0b101d20"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:57e9ac9ccc3101fac9d6014fba037473e4358ef4e89f8e181f8951a2c0162024"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b6c2ea03845c9f501ed1313e78de148cd3f6cad741a75d43a29b43da27f2e1e"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:10dffb601ccfb65262a27233ac273d552ddc4d8ae1bf93b21c94b8511bffe728"}, + {file = "cffi-1.15.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:786902fb9ba7433aae840e0ed609f45c7bcd4e225ebb9c753aa39725bb3e6ad6"}, + {file = "cffi-1.15.0-cp38-cp38-win32.whl", hash = "sha256:da5db4e883f1ce37f55c667e5c0de439df76ac4cb55964655906306918e7363c"}, + {file = "cffi-1.15.0-cp38-cp38-win_amd64.whl", hash = "sha256:181dee03b1170ff1969489acf1c26533710231c58f95534e3edac87fff06c443"}, + {file = "cffi-1.15.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:45e8636704eacc432a206ac7345a5d3d2c62d95a507ec70d62f23cd91770482a"}, + {file = "cffi-1.15.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:31fb708d9d7c3f49a60f04cf5b119aeefe5644daba1cd2a0fe389b674fd1de37"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:6dc2737a3674b3e344847c8686cf29e500584ccad76204efea14f451d4cc669a"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74fdfdbfdc48d3f47148976f49fab3251e550a8720bebc99bf1483f5bfb5db3e"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ffaa5c925128e29efbde7301d8ecaf35c8c60ffbcd6a1ffd3a552177c8e5e796"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3f7d084648d77af029acb79a0ff49a0ad7e9d09057a9bf46596dac9514dc07df"}, + {file = "cffi-1.15.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:ef1f279350da2c586a69d32fc8733092fd32cc8ac95139a00377841f59a3f8d8"}, + {file = "cffi-1.15.0-cp39-cp39-win32.whl", hash = "sha256:2a23af14f408d53d5e6cd4e3d9a24ff9e05906ad574822a10563efcef137979a"}, + {file = "cffi-1.15.0-cp39-cp39-win_amd64.whl", hash = "sha256:3773c4d81e6e818df2efbc7dd77325ca0dcb688116050fb2b3011218eda36139"}, + {file = "cffi-1.15.0.tar.gz", hash = "sha256:920f0d66a896c2d99f0adbb391f990a84091179542c205fa53ce5787aff87954"}, ] chardet = [ {file = "chardet-3.0.4-py2.py3-none-any.whl", hash = "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"}, {file = "chardet-3.0.4.tar.gz", hash = "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae"}, ] +charset-normalizer = [ + {file = "charset-normalizer-2.0.11.tar.gz", hash = "sha256:98398a9d69ee80548c762ba991a4728bfc3836768ed226b3945908d1a688371c"}, + {file = "charset_normalizer-2.0.11-py3-none-any.whl", hash = "sha256:2842d8f5e82a1f6aa437380934d5e1cd4fcf2003b06fed6940769c164a480a45"}, +] +click = [ + {file = "click-8.0.3-py3-none-any.whl", hash = "sha256:353f466495adaeb40b6b5f592f9f91cb22372351c84caeb068132442a4518ef3"}, + {file = "click-8.0.3.tar.gz", hash = "sha256:410e932b050f5eed773c4cda94de75971c89cdb3155a72a0831139a79e5ecb5b"}, +] colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, + {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, +] +commonmark = [ + {file = "commonmark-0.9.1-py2.py3-none-any.whl", hash = "sha256:da2f38c92590f83de410ba1a3cbceafbc74fee9def35f9251ba9a971d6d66fd9"}, + {file = "commonmark-0.9.1.tar.gz", hash = "sha256:452f9dc859be7f06631ddcb328b6919c67984aca654e5fefb3914d54691aed60"}, ] -decorator = [ - {file = "decorator-5.0.7-py3-none-any.whl", hash = "sha256:945d84890bb20cc4a2f4a31fc4311c0c473af65ea318617f13a7257c9a58bc98"}, - {file = "decorator-5.0.7.tar.gz", hash = "sha256:6f201a6c4dac3d187352661f508b9364ec8091217442c9478f1f83c003a0f060"}, +compressed-rtf = [ + {file = "compressed_rtf-1.0.6.tar.gz", hash = "sha256:c1c827f1d124d24608981a56e8b8691eb1f2a69a78ccad6440e7d92fde1781dd"}, ] -docopt = [ - {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, +contextlib2 = [ + {file = "contextlib2-21.6.0-py2.py3-none-any.whl", hash = "sha256:3fbdb64466afd23abaf6c977627b75b6139a5a3e8ce38405c5b413aed7a0471f"}, + {file = "contextlib2-21.6.0.tar.gz", hash = "sha256:ab1e2bfe1d01d968e1b7e8d9023bc51ef3509bba217bb730cee3827e1ee82869"}, ] -docutils = [ - {file = "docutils-0.16-py2.py3-none-any.whl", hash = "sha256:0c5b78adfbf7762415433f5515cd5c9e762339e23369dbe8000d84a4bf4ab3af"}, - {file = "docutils-0.16.tar.gz", hash = "sha256:c2de3a60e9e7d07be26b7f2b00ca0309c207e06c100f9cc2a94931fc75a478fc"}, +dataclasses = [ + {file = "dataclasses-0.8-py3-none-any.whl", hash = "sha256:0201d89fa866f68c8ebd9d08ee6ff50c0b255f8ec63a71c16fda7af82bb887bf"}, + {file = "dataclasses-0.8.tar.gz", hash = "sha256:8479067f342acf957dc82ec415d355ab5edb7e7646b90dc6e2fd1d96ad084c97"}, ] docx2txt = [ {file = "docx2txt-0.8.tar.gz", hash = "sha256:2c06d98d7cfe2d3947e5760a57d924e3ff07745b379c8737723922e7009236e5"}, ] +ebcdic = [ + {file = "ebcdic-1.1.1-py2.py3-none-any.whl", hash = "sha256:33b4cb729bc2d0bf46cc1847b0e5946897cb8d3f53520c5b9aa5fa98d7e735f1"}, +] ebooklib = [ {file = "EbookLib-0.17.1.tar.gz", hash = "sha256:fe23e22c28050196c68db3e7b13b257bf39426d927cb395c6f2cc13ac11327f1"}, ] @@ -1159,188 +1117,227 @@ exifread = [ {file = "ExifRead-2.3.2.tar.gz", hash = "sha256:a0f74af5040168d3883bbc980efe26d06c89f026dc86ba28eb34107662d51766"}, ] extract-msg = [ - {file = "extract_msg-0.23.1-py2.py3-none-any.whl", hash = "sha256:0e733743d4b5b7ca62265d1477d4b99f03e44f3202fa53ee97d54f5b2c75b1b3"}, - {file = "extract_msg-0.23.1.tar.gz", hash = "sha256:3746d5f68266740575ef9097516f39c5f601fa031e188cea338a13b66de16ada"}, + {file = "extract_msg-0.29.0-py2.py3-none-any.whl", hash = "sha256:a8885dc385d0c88c4b87fb2a573727c0115cd2ef5157956cf183878f940eef28"}, + {file = "extract_msg-0.29.0.tar.gz", hash = "sha256:ae6ce5f78fddb582350cb49bbf2776eadecdbf3c74b7a305dced42bd187a5401"}, ] -flake8 = [ - {file = "flake8-3.9.1-py2.py3-none-any.whl", hash = "sha256:3b9f848952dddccf635be78098ca75010f073bfe14d2c6bda867154bea728d2a"}, - {file = "flake8-3.9.1.tar.gz", hash = "sha256:1aa8990be1e689d96c745c5682b687ea49f2e05a443aff1f8251092b0014e378"}, +fs = [ + {file = "fs-2.4.14-py2.py3-none-any.whl", hash = "sha256:b298013377f51125b3d7f0c86920de4e3e2d4a83731bd5caf1f1e5bddabe7798"}, + {file = "fs-2.4.14.tar.gz", hash = "sha256:9555dc2bc58c58cac03478ac7e9f622d29fe2d20a4384c24c90ab50de2c7b36c"}, ] -idna = [ - {file = "idna-2.10-py2.py3-none-any.whl", hash = "sha256:b97d804b1e9b523befed77c48dacec60e6dcb0b5391d57af6a65a312a90648c0"}, - {file = "idna-2.10.tar.gz", hash = "sha256:b307872f855b18632ce0c21c5e45be78c0ea7ae4c15c828c20788b26921eb3f6"}, +ghp-import = [ + {file = "ghp-import-2.0.2.tar.gz", hash = "sha256:947b3771f11be850c852c64b561c600fdddf794bab363060854c1ee7ad05e071"}, + {file = "ghp_import-2.0.2-py3-none-any.whl", hash = "sha256:5f8962b30b20652cdffa9c5a9812f7de6bcb56ec475acac579807719bf242c46"}, ] -imagesize = [ - {file = "imagesize-1.2.0-py2.py3-none-any.whl", hash = "sha256:6965f19a6a2039c7d48bca7dba2473069ff854c36ae6f19d2cde309d998228a1"}, - {file = "imagesize-1.2.0.tar.gz", hash = "sha256:b1f6b5a4eab1f73479a50fb79fcf729514a900c341d8503d62a62dbc4127a2b1"}, +idna = [ + {file = "idna-3.3-py3-none-any.whl", hash = "sha256:84d9dd047ffa80596e0f246e2eab0b391788b0503584e8945f2368256d2735ff"}, + {file = "idna-3.3.tar.gz", hash = "sha256:9d643ff0a55b762d5cdb124b8eaa99c66322e2157b69160bc32796e824360e6d"}, ] imapclient = [ {file = "IMAPClient-2.1.0-py2.py3-none-any.whl", hash = "sha256:3eeb97b9aa8faab0caa5024d74bfde59408fbd542781246f6960873c7bf0dd01"}, {file = "IMAPClient-2.1.0.zip", hash = "sha256:60ba79758cc9f13ec910d7a3df9acaaf2bb6c458720d9a02ec33a41352fd1b99"}, ] importlib-metadata = [ - {file = "importlib_metadata-4.0.1-py3-none-any.whl", hash = "sha256:d7eb1dea6d6a6086f8be21784cc9e3bcfa55872b52309bc5fad53a8ea444465d"}, - {file = "importlib_metadata-4.0.1.tar.gz", hash = "sha256:8c501196e49fb9df5df43833bdb1e4328f64847763ec8a50703148b73784d581"}, -] -ipdb = [ - {file = "ipdb-0.12.3.tar.gz", hash = "sha256:5d9a4a0e3b7027a158fc6f2929934341045b9c3b0b86ed5d7e84e409653f72fd"}, + {file = "importlib_metadata-4.8.3-py3-none-any.whl", hash = "sha256:65a9576a5b2d58ca44d133c42a241905cc45e34d2c06fd5ba2bafa221e5d7b5e"}, + {file = "importlib_metadata-4.8.3.tar.gz", hash = "sha256:766abffff765960fcc18003801f7044eb6755ffae4521c8e8ce8e83b9c9b0668"}, ] -ipython = [ - {file = "ipython-7.16.1-py3-none-any.whl", hash = "sha256:2dbcc8c27ca7d3cfe4fcdff7f45b27f9a8d3edfa70ff8024a71c7a8eb5f09d64"}, - {file = "ipython-7.16.1.tar.gz", hash = "sha256:9f4fcb31d3b2c533333893b9172264e4821c1ac91839500f31bd43f2c59b3ccf"}, +importlib-resources = [ + {file = "importlib_resources-5.4.0-py3-none-any.whl", hash = "sha256:33a95faed5fc19b4bc16b29a6eeae248a3fe69dd55d4d229d2b480e23eeaad45"}, + {file = "importlib_resources-5.4.0.tar.gz", hash = "sha256:d756e2f85dd4de2ba89be0b21dba2a3bbec2e871a42a3a16719258a11f87506b"}, ] -ipython-genutils = [ - {file = "ipython_genutils-0.2.0-py2.py3-none-any.whl", hash = "sha256:72dd37233799e619666c9f639a9da83c34013a73e8bbc79a7a6348d93c61fab8"}, - {file = "ipython_genutils-0.2.0.tar.gz", hash = "sha256:eb2e116e75ecef9d4d228fdc66af54269afa26ab4463042e33785b887c628ba8"}, -] -isort = [ - {file = "isort-5.8.0-py3-none-any.whl", hash = "sha256:2bb1680aad211e3c9944dbce1d4ba09a989f04e238296c87fe2139faa26d655d"}, - {file = "isort-5.8.0.tar.gz", hash = "sha256:0a943902919f65c5684ac4e0154b1ad4fac6dcaa5d9f3426b732f1c8b5419be6"}, -] -jedi = [ - {file = "jedi-0.18.0-py2.py3-none-any.whl", hash = "sha256:18456d83f65f400ab0c2d3319e48520420ef43b23a086fdc05dff34132f0fb93"}, - {file = "jedi-0.18.0.tar.gz", hash = "sha256:92550a404bad8afed881a137ec9a461fed49eca661414be45059329614ed0707"}, +iniconfig = [ + {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, + {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] jinja2 = [ - {file = "Jinja2-2.11.3-py2.py3-none-any.whl", hash = "sha256:03e47ad063331dd6a3f04a43eddca8a966a26ba0c5b7207a9a9e4e08f1b29419"}, - {file = "Jinja2-2.11.3.tar.gz", hash = "sha256:a6d58433de0ae800347cab1fa3043cebbabe8baa9d29e668f1c768cb87a333c6"}, -] -lazy-object-proxy = [ - {file = "lazy-object-proxy-1.6.0.tar.gz", hash = "sha256:489000d368377571c6f982fba6497f2aa13c6d1facc40660963da62f5c379726"}, - {file = "lazy_object_proxy-1.6.0-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:c6938967f8528b3668622a9ed3b31d145fab161a32f5891ea7b84f6b790be05b"}, - {file = "lazy_object_proxy-1.6.0-cp27-cp27m-win32.whl", hash = "sha256:ebfd274dcd5133e0afae738e6d9da4323c3eb021b3e13052d8cbd0e457b1256e"}, - {file = "lazy_object_proxy-1.6.0-cp27-cp27m-win_amd64.whl", hash = "sha256:ed361bb83436f117f9917d282a456f9e5009ea12fd6de8742d1a4752c3017e93"}, - {file = "lazy_object_proxy-1.6.0-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:d900d949b707778696fdf01036f58c9876a0d8bfe116e8d220cfd4b15f14e741"}, - {file = "lazy_object_proxy-1.6.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:5743a5ab42ae40caa8421b320ebf3a998f89c85cdc8376d6b2e00bd12bd1b587"}, - {file = "lazy_object_proxy-1.6.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:bf34e368e8dd976423396555078def5cfc3039ebc6fc06d1ae2c5a65eebbcde4"}, - {file = "lazy_object_proxy-1.6.0-cp36-cp36m-win32.whl", hash = "sha256:b579f8acbf2bdd9ea200b1d5dea36abd93cabf56cf626ab9c744a432e15c815f"}, - {file = "lazy_object_proxy-1.6.0-cp36-cp36m-win_amd64.whl", hash = "sha256:4f60460e9f1eb632584c9685bccea152f4ac2130e299784dbaf9fae9f49891b3"}, - {file = "lazy_object_proxy-1.6.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:d7124f52f3bd259f510651450e18e0fd081ed82f3c08541dffc7b94b883aa981"}, - {file = "lazy_object_proxy-1.6.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:22ddd618cefe54305df49e4c069fa65715be4ad0e78e8d252a33debf00f6ede2"}, - {file = "lazy_object_proxy-1.6.0-cp37-cp37m-win32.whl", hash = "sha256:9d397bf41caad3f489e10774667310d73cb9c4258e9aed94b9ec734b34b495fd"}, - {file = "lazy_object_proxy-1.6.0-cp37-cp37m-win_amd64.whl", hash = "sha256:24a5045889cc2729033b3e604d496c2b6f588c754f7a62027ad4437a7ecc4837"}, - {file = "lazy_object_proxy-1.6.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:17e0967ba374fc24141738c69736da90e94419338fd4c7c7bef01ee26b339653"}, - {file = "lazy_object_proxy-1.6.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:410283732af311b51b837894fa2f24f2c0039aa7f220135192b38fcc42bd43d3"}, - {file = "lazy_object_proxy-1.6.0-cp38-cp38-win32.whl", hash = "sha256:85fb7608121fd5621cc4377a8961d0b32ccf84a7285b4f1d21988b2eae2868e8"}, - {file = "lazy_object_proxy-1.6.0-cp38-cp38-win_amd64.whl", hash = "sha256:d1c2676e3d840852a2de7c7d5d76407c772927addff8d742b9808fe0afccebdf"}, - {file = "lazy_object_proxy-1.6.0-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:b865b01a2e7f96db0c5d12cfea590f98d8c5ba64ad222300d93ce6ff9138bcad"}, - {file = "lazy_object_proxy-1.6.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:4732c765372bd78a2d6b2150a6e99d00a78ec963375f236979c0626b97ed8e43"}, - {file = "lazy_object_proxy-1.6.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:9698110e36e2df951c7c36b6729e96429c9c32b3331989ef19976592c5f3c77a"}, - {file = "lazy_object_proxy-1.6.0-cp39-cp39-win32.whl", hash = "sha256:1fee665d2638491f4d6e55bd483e15ef21f6c8c2095f235fef72601021e64f61"}, - {file = "lazy_object_proxy-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:f5144c75445ae3ca2057faac03fda5a902eff196702b0a24daf1d6ce0650514b"}, + {file = "Jinja2-3.0.3-py3-none-any.whl", hash = "sha256:077ce6014f7b40d03b47d1f1ca4b0fc8328a692bd284016f806ed0eaca390ad8"}, + {file = "Jinja2-3.0.3.tar.gz", hash = "sha256:611bb273cd68f3b993fabdc4064fc858c5b47a973cb5aa7999ec1ba405c87cd7"}, ] lxml = [ - {file = "lxml-4.6.3-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:df7c53783a46febb0e70f6b05df2ba104610f2fb0d27023409734a3ecbb78fb2"}, - {file = "lxml-4.6.3-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:1b7584d421d254ab86d4f0b13ec662a9014397678a7c4265a02a6d7c2b18a75f"}, - {file = "lxml-4.6.3-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:079f3ae844f38982d156efce585bc540c16a926d4436712cf4baee0cce487a3d"}, - {file = "lxml-4.6.3-cp27-cp27m-win32.whl", hash = "sha256:bc4313cbeb0e7a416a488d72f9680fffffc645f8a838bd2193809881c67dd106"}, - {file = "lxml-4.6.3-cp27-cp27m-win_amd64.whl", hash = "sha256:8157dadbb09a34a6bd95a50690595e1fa0af1a99445e2744110e3dca7831c4ee"}, - {file = "lxml-4.6.3-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:7728e05c35412ba36d3e9795ae8995e3c86958179c9770e65558ec3fdfd3724f"}, - {file = "lxml-4.6.3-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:4bff24dfeea62f2e56f5bab929b4428ae6caba2d1eea0c2d6eb618e30a71e6d4"}, - {file = "lxml-4.6.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:74f7d8d439b18fa4c385f3f5dfd11144bb87c1da034a466c5b5577d23a1d9b51"}, - {file = "lxml-4.6.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f90ba11136bfdd25cae3951af8da2e95121c9b9b93727b1b896e3fa105b2f586"}, - {file = "lxml-4.6.3-cp35-cp35m-win32.whl", hash = "sha256:f2380a6376dfa090227b663f9678150ef27543483055cc327555fb592c5967e2"}, - {file = "lxml-4.6.3-cp35-cp35m-win_amd64.whl", hash = "sha256:c4f05c5a7c49d2fb70223d0d5bcfbe474cf928310ac9fa6a7c6dddc831d0b1d4"}, - {file = "lxml-4.6.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:d2e35d7bf1c1ac8c538f88d26b396e73dd81440d59c1ef8522e1ea77b345ede4"}, - {file = "lxml-4.6.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:289e9ca1a9287f08daaf796d96e06cb2bc2958891d7911ac7cae1c5f9e1e0ee3"}, - {file = "lxml-4.6.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:bccbfc27563652de7dc9bdc595cb25e90b59c5f8e23e806ed0fd623755b6565d"}, - {file = "lxml-4.6.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:820628b7b3135403540202e60551e741f9b6d3304371712521be939470b454ec"}, - {file = "lxml-4.6.3-cp36-cp36m-win32.whl", hash = "sha256:5a0a14e264069c03e46f926be0d8919f4105c1623d620e7ec0e612a2e9bf1c04"}, - {file = "lxml-4.6.3-cp36-cp36m-win_amd64.whl", hash = "sha256:92e821e43ad382332eade6812e298dc9701c75fe289f2a2d39c7960b43d1e92a"}, - {file = "lxml-4.6.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:efd7a09678fd8b53117f6bae4fa3825e0a22b03ef0a932e070c0bdbb3a35e654"}, - {file = "lxml-4.6.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:efac139c3f0bf4f0939f9375af4b02c5ad83a622de52d6dfa8e438e8e01d0eb0"}, - {file = "lxml-4.6.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:0fbcf5565ac01dff87cbfc0ff323515c823081c5777a9fc7703ff58388c258c3"}, - {file = "lxml-4.6.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:122fba10466c7bd4178b07dba427aa516286b846b2cbd6f6169141917283aae2"}, - {file = "lxml-4.6.3-cp37-cp37m-win32.whl", hash = "sha256:3439c71103ef0e904ea0a1901611863e51f50b5cd5e8654a151740fde5e1cade"}, - {file = "lxml-4.6.3-cp37-cp37m-win_amd64.whl", hash = "sha256:4289728b5e2000a4ad4ab8da6e1db2e093c63c08bdc0414799ee776a3f78da4b"}, - {file = "lxml-4.6.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b007cbb845b28db4fb8b6a5cdcbf65bacb16a8bd328b53cbc0698688a68e1caa"}, - {file = "lxml-4.6.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:76fa7b1362d19f8fbd3e75fe2fb7c79359b0af8747e6f7141c338f0bee2f871a"}, - {file = "lxml-4.6.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:26e761ab5b07adf5f555ee82fb4bfc35bf93750499c6c7614bd64d12aaa67927"}, - {file = "lxml-4.6.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:66e575c62792c3f9ca47cb8b6fab9e35bab91360c783d1606f758761810c9791"}, - {file = "lxml-4.6.3-cp38-cp38-win32.whl", hash = "sha256:89b8b22a5ff72d89d48d0e62abb14340d9e99fd637d046c27b8b257a01ffbe28"}, - {file = "lxml-4.6.3-cp38-cp38-win_amd64.whl", hash = "sha256:2a9d50e69aac3ebee695424f7dbd7b8c6d6eb7de2a2eb6b0f6c7db6aa41e02b7"}, - {file = "lxml-4.6.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ce256aaa50f6cc9a649c51be3cd4ff142d67295bfc4f490c9134d0f9f6d58ef0"}, - {file = "lxml-4.6.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:7610b8c31688f0b1be0ef882889817939490a36d0ee880ea562a4e1399c447a1"}, - {file = "lxml-4.6.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f8380c03e45cf09f8557bdaa41e1fa7c81f3ae22828e1db470ab2a6c96d8bc23"}, - {file = "lxml-4.6.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:884ab9b29feaca361f7f88d811b1eea9bfca36cf3da27768d28ad45c3ee6f969"}, - {file = "lxml-4.6.3-cp39-cp39-win32.whl", hash = "sha256:33bb934a044cf32157c12bfcfbb6649807da20aa92c062ef51903415c704704f"}, - {file = "lxml-4.6.3-cp39-cp39-win_amd64.whl", hash = "sha256:542d454665a3e277f76954418124d67516c5f88e51a900365ed54a9806122b83"}, - {file = "lxml-4.6.3.tar.gz", hash = "sha256:39b78571b3b30645ac77b95f7c69d1bffc4cf8c3b157c435a34da72e78c82468"}, + {file = "lxml-4.7.1-cp27-cp27m-macosx_10_14_x86_64.whl", hash = "sha256:d546431636edb1d6a608b348dd58cc9841b81f4116745857b6cb9f8dadb2725f"}, + {file = "lxml-4.7.1-cp27-cp27m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6308062534323f0d3edb4e702a0e26a76ca9e0e23ff99be5d82750772df32a9e"}, + {file = "lxml-4.7.1-cp27-cp27m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:f76dbe44e31abf516114f6347a46fa4e7c2e8bceaa4b6f7ee3a0a03c8eba3c17"}, + {file = "lxml-4.7.1-cp27-cp27m-win32.whl", hash = "sha256:d5618d49de6ba63fe4510bdada62d06a8acfca0b4b5c904956c777d28382b419"}, + {file = "lxml-4.7.1-cp27-cp27m-win_amd64.whl", hash = "sha256:9393a05b126a7e187f3e38758255e0edf948a65b22c377414002d488221fdaa2"}, + {file = "lxml-4.7.1-cp27-cp27mu-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:50d3dba341f1e583265c1a808e897b4159208d814ab07530202b6036a4d86da5"}, + {file = "lxml-4.7.1-cp27-cp27mu-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:44f552e0da3c8ee3c28e2eb82b0b784200631687fc6a71277ea8ab0828780e7d"}, + {file = "lxml-4.7.1-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:e662c6266e3a275bdcb6bb049edc7cd77d0b0f7e119a53101d367c841afc66dc"}, + {file = "lxml-4.7.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:4c093c571bc3da9ebcd484e001ba18b8452903cd428c0bc926d9b0141bcb710e"}, + {file = "lxml-4.7.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:3e26ad9bc48d610bf6cc76c506b9e5ad9360ed7a945d9be3b5b2c8535a0145e3"}, + {file = "lxml-4.7.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a5f623aeaa24f71fce3177d7fee875371345eb9102b355b882243e33e04b7175"}, + {file = "lxml-4.7.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7b5e2acefd33c259c4a2e157119c4373c8773cf6793e225006a1649672ab47a6"}, + {file = "lxml-4.7.1-cp310-cp310-win32.whl", hash = "sha256:67fa5f028e8a01e1d7944a9fb616d1d0510d5d38b0c41708310bd1bc45ae89f6"}, + {file = "lxml-4.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:b1d381f58fcc3e63fcc0ea4f0a38335163883267f77e4c6e22d7a30877218a0e"}, + {file = "lxml-4.7.1-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:38d9759733aa04fb1697d717bfabbedb21398046bd07734be7cccc3d19ea8675"}, + {file = "lxml-4.7.1-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:dfd0d464f3d86a1460683cd742306d1138b4e99b79094f4e07e1ca85ee267fe7"}, + {file = "lxml-4.7.1-cp35-cp35m-win32.whl", hash = "sha256:534e946bce61fd162af02bad7bfd2daec1521b71d27238869c23a672146c34a5"}, + {file = "lxml-4.7.1-cp35-cp35m-win_amd64.whl", hash = "sha256:6ec829058785d028f467be70cd195cd0aaf1a763e4d09822584ede8c9eaa4b03"}, + {file = "lxml-4.7.1-cp36-cp36m-macosx_10_14_x86_64.whl", hash = "sha256:ade74f5e3a0fd17df5782896ddca7ddb998845a5f7cd4b0be771e1ffc3b9aa5b"}, + {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:41358bfd24425c1673f184d7c26c6ae91943fe51dfecc3603b5e08187b4bcc55"}, + {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:6e56521538f19c4a6690f439fefed551f0b296bd785adc67c1777c348beb943d"}, + {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:5b0f782f0e03555c55e37d93d7a57454efe7495dab33ba0ccd2dbe25fc50f05d"}, + {file = "lxml-4.7.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:490712b91c65988012e866c411a40cc65b595929ececf75eeb4c79fcc3bc80a6"}, + {file = "lxml-4.7.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:34c22eb8c819d59cec4444d9eebe2e38b95d3dcdafe08965853f8799fd71161d"}, + {file = "lxml-4.7.1-cp36-cp36m-win32.whl", hash = "sha256:2a906c3890da6a63224d551c2967413b8790a6357a80bf6b257c9a7978c2c42d"}, + {file = "lxml-4.7.1-cp36-cp36m-win_amd64.whl", hash = "sha256:36b16fecb10246e599f178dd74f313cbdc9f41c56e77d52100d1361eed24f51a"}, + {file = "lxml-4.7.1-cp37-cp37m-macosx_10_14_x86_64.whl", hash = "sha256:a5edc58d631170de90e50adc2cc0248083541affef82f8cd93bea458e4d96db8"}, + {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:87c1b0496e8c87ec9db5383e30042357b4839b46c2d556abd49ec770ce2ad868"}, + {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:0a5f0e4747f31cff87d1eb32a6000bde1e603107f632ef4666be0dc065889c7a"}, + {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:bf6005708fc2e2c89a083f258b97709559a95f9a7a03e59f805dd23c93bc3986"}, + {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:fc15874816b9320581133ddc2096b644582ab870cf6a6ed63684433e7af4b0d3"}, + {file = "lxml-4.7.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:0b5e96e25e70917b28a5391c2ed3ffc6156513d3db0e1476c5253fcd50f7a944"}, + {file = "lxml-4.7.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ec9027d0beb785a35aa9951d14e06d48cfbf876d8ff67519403a2522b181943b"}, + {file = "lxml-4.7.1-cp37-cp37m-win32.whl", hash = "sha256:9fbc0dee7ff5f15c4428775e6fa3ed20003140560ffa22b88326669d53b3c0f4"}, + {file = "lxml-4.7.1-cp37-cp37m-win_amd64.whl", hash = "sha256:1104a8d47967a414a436007c52f533e933e5d52574cab407b1e49a4e9b5ddbd1"}, + {file = "lxml-4.7.1-cp38-cp38-macosx_10_14_x86_64.whl", hash = "sha256:fc9fb11b65e7bc49f7f75aaba1b700f7181d95d4e151cf2f24d51bfd14410b77"}, + {file = "lxml-4.7.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:317bd63870b4d875af3c1be1b19202de34c32623609ec803b81c99193a788c1e"}, + {file = "lxml-4.7.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:610807cea990fd545b1559466971649e69302c8a9472cefe1d6d48a1dee97440"}, + {file = "lxml-4.7.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:09b738360af8cb2da275998a8bf79517a71225b0de41ab47339c2beebfff025f"}, + {file = "lxml-4.7.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:6a2ab9d089324d77bb81745b01f4aeffe4094306d939e92ba5e71e9a6b99b71e"}, + {file = "lxml-4.7.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:eed394099a7792834f0cb4a8f615319152b9d801444c1c9e1b1a2c36d2239f9e"}, + {file = "lxml-4.7.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:735e3b4ce9c0616e85f302f109bdc6e425ba1670a73f962c9f6b98a6d51b77c9"}, + {file = "lxml-4.7.1-cp38-cp38-win32.whl", hash = "sha256:772057fba283c095db8c8ecde4634717a35c47061d24f889468dc67190327bcd"}, + {file = "lxml-4.7.1-cp38-cp38-win_amd64.whl", hash = "sha256:13dbb5c7e8f3b6a2cf6e10b0948cacb2f4c9eb05029fe31c60592d08ac63180d"}, + {file = "lxml-4.7.1-cp39-cp39-macosx_10_14_x86_64.whl", hash = "sha256:718d7208b9c2d86aaf0294d9381a6acb0158b5ff0f3515902751404e318e02c9"}, + {file = "lxml-4.7.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:5bee1b0cbfdb87686a7fb0e46f1d8bd34d52d6932c0723a86de1cc532b1aa489"}, + {file = "lxml-4.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:e410cf3a2272d0a85526d700782a2fa92c1e304fdcc519ba74ac80b8297adf36"}, + {file = "lxml-4.7.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:585ea241ee4961dc18a95e2f5581dbc26285fcf330e007459688096f76be8c42"}, + {file = "lxml-4.7.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a555e06566c6dc167fbcd0ad507ff05fd9328502aefc963cb0a0547cfe7f00db"}, + {file = "lxml-4.7.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:adaab25be351fff0d8a691c4f09153647804d09a87a4e4ea2c3f9fe9e8651851"}, + {file = "lxml-4.7.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:82d16a64236970cb93c8d63ad18c5b9f138a704331e4b916b2737ddfad14e0c4"}, + {file = "lxml-4.7.1-cp39-cp39-win32.whl", hash = "sha256:59e7da839a1238807226f7143c68a479dee09244d1b3cf8c134f2fce777d12d0"}, + {file = "lxml-4.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:a1bbc4efa99ed1310b5009ce7f3a1784698082ed2c1ef3895332f5df9b3b92c2"}, + {file = "lxml-4.7.1-pp37-pypy37_pp73-macosx_10_14_x86_64.whl", hash = "sha256:0607ff0988ad7e173e5ddf7bf55ee65534bd18a5461183c33e8e41a59e89edf4"}, + {file = "lxml-4.7.1-pp37-pypy37_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:6c198bfc169419c09b85ab10cb0f572744e686f40d1e7f4ed09061284fc1303f"}, + {file = "lxml-4.7.1-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:a58d78653ae422df6837dd4ca0036610b8cb4962b5cfdbd337b7b24de9e5f98a"}, + {file = "lxml-4.7.1-pp38-pypy38_pp73-macosx_10_14_x86_64.whl", hash = "sha256:e18281a7d80d76b66a9f9e68a98cf7e1d153182772400d9a9ce855264d7d0ce7"}, + {file = "lxml-4.7.1-pp38-pypy38_pp73-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:8e54945dd2eeb50925500957c7c579df3cd07c29db7810b83cf30495d79af267"}, + {file = "lxml-4.7.1-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_24_x86_64.whl", hash = "sha256:447d5009d6b5447b2f237395d0018901dcc673f7d9f82ba26c1b9f9c3b444b60"}, + {file = "lxml-4.7.1.tar.gz", hash = "sha256:a1613838aa6b89af4ba10a0f3a972836128801ed008078f8c1244e65958f1b24"}, ] macos-tags = [ {file = "macos-tags-1.5.1.tar.gz", hash = "sha256:f144c5bc05d01573966d8aca2483cb345b20b76a5b32e9967786e086a38712e7"}, {file = "macos_tags-1.5.1-py3-none-any.whl", hash = "sha256:56419233af32242b703dd35bcf38c9f198abd969faddbe986eb8aaa6d95349cf"}, ] +markdown = [ + {file = "Markdown-3.3.6-py3-none-any.whl", hash = "sha256:9923332318f843411e9932237530df53162e29dc7a4e2b91e35764583c46c9a3"}, + {file = "Markdown-3.3.6.tar.gz", hash = "sha256:76df8ae32294ec39dcf89340382882dfa12975f87f45c3ed1ecdb1e8cefc7006"}, +] markupsafe = [ - {file = "MarkupSafe-1.1.1-cp27-cp27m-macosx_10_6_intel.whl", hash = "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161"}, - {file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"}, - {file = "MarkupSafe-1.1.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183"}, - {file = "MarkupSafe-1.1.1-cp27-cp27m-win32.whl", hash = "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b"}, - {file = "MarkupSafe-1.1.1-cp27-cp27m-win_amd64.whl", hash = "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e"}, - {file = "MarkupSafe-1.1.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f"}, - {file = "MarkupSafe-1.1.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1"}, - {file = "MarkupSafe-1.1.1-cp34-cp34m-macosx_10_6_intel.whl", hash = "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5"}, - {file = "MarkupSafe-1.1.1-cp34-cp34m-manylinux1_i686.whl", hash = "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1"}, - {file = "MarkupSafe-1.1.1-cp34-cp34m-manylinux1_x86_64.whl", hash = "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735"}, - {file = "MarkupSafe-1.1.1-cp34-cp34m-win32.whl", hash = "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21"}, - {file = "MarkupSafe-1.1.1-cp34-cp34m-win_amd64.whl", hash = "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235"}, - {file = "MarkupSafe-1.1.1-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b"}, - {file = "MarkupSafe-1.1.1-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f"}, - {file = "MarkupSafe-1.1.1-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905"}, - {file = "MarkupSafe-1.1.1-cp35-cp35m-win32.whl", hash = "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1"}, - {file = "MarkupSafe-1.1.1-cp35-cp35m-win_amd64.whl", hash = "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d"}, - {file = "MarkupSafe-1.1.1-cp36-cp36m-macosx_10_6_intel.whl", hash = "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff"}, - {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473"}, - {file = "MarkupSafe-1.1.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e"}, - {file = "MarkupSafe-1.1.1-cp36-cp36m-win32.whl", hash = "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66"}, - {file = "MarkupSafe-1.1.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5"}, - {file = "MarkupSafe-1.1.1-cp37-cp37m-macosx_10_6_intel.whl", hash = "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d"}, - {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e"}, - {file = "MarkupSafe-1.1.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6"}, - {file = "MarkupSafe-1.1.1-cp37-cp37m-win32.whl", hash = "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2"}, - {file = "MarkupSafe-1.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c"}, - {file = "MarkupSafe-1.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6788b695d50a51edb699cb55e35487e430fa21f1ed838122d722e0ff0ac5ba15"}, - {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:cdb132fc825c38e1aeec2c8aa9338310d29d337bebbd7baa06889d09a60a1fa2"}, - {file = "MarkupSafe-1.1.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:13d3144e1e340870b25e7b10b98d779608c02016d5184cfb9927a9f10c689f42"}, - {file = "MarkupSafe-1.1.1-cp38-cp38-win32.whl", hash = "sha256:596510de112c685489095da617b5bcbbac7dd6384aeebeda4df6025d0256a81b"}, - {file = "MarkupSafe-1.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:e8313f01ba26fbbe36c7be1966a7b7424942f670f38e666995b88d012765b9be"}, - {file = "MarkupSafe-1.1.1.tar.gz", hash = "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b"}, -] -mccabe = [ - {file = "mccabe-0.6.1-py2.py3-none-any.whl", hash = "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42"}, - {file = "mccabe-0.6.1.tar.gz", hash = "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d8446c54dc28c01e5a2dbac5a25f071f6653e6e40f3a8818e8b45d790fe6ef53"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:36bc903cbb393720fad60fc28c10de6acf10dc6cc883f3e24ee4012371399a38"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d7d807855b419fc2ed3e631034685db6079889a1f01d5d9dac950f764da3dad"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:add36cb2dbb8b736611303cd3bfcee00afd96471b09cda130da3581cbdc56a6d"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:168cd0a3642de83558a5153c8bd34f175a9a6e7f6dc6384b9655d2697312a646"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:4dc8f9fb58f7364b63fd9f85013b780ef83c11857ae79f2feda41e270468dd9b"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:20dca64a3ef2d6e4d5d615a3fd418ad3bde77a47ec8a23d984a12b5b4c74491a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:cdfba22ea2f0029c9261a4bd07e830a8da012291fbe44dc794e488b6c9bb353a"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win32.whl", hash = "sha256:99df47edb6bda1249d3e80fdabb1dab8c08ef3975f69aed437cb69d0a5de1e28"}, + {file = "MarkupSafe-2.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:e0f138900af21926a02425cf736db95be9f4af72ba1bb21453432a07f6082134"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f9081981fe268bd86831e5c75f7de206ef275defcb82bc70740ae6dc507aee51"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:0955295dd5eec6cb6cc2fe1698f4c6d84af2e92de33fbcac4111913cd100a6ff"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:0446679737af14f45767963a1a9ef7620189912317d095f2d9ffa183a4d25d2b"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:f826e31d18b516f653fe296d967d700fddad5901ae07c622bb3705955e1faa94"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:fa130dd50c57d53368c9d59395cb5526eda596d3ffe36666cd81a44d56e48872"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:905fec760bd2fa1388bb5b489ee8ee5f7291d692638ea5f67982d968366bef9f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf5d821ffabf0ef3533c39c518f3357b171a1651c1ff6827325e4489b0e46c3c"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:0d4b31cc67ab36e3392bbf3862cfbadac3db12bdd8b02a2731f509ed5b829724"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:baa1a4e8f868845af802979fcdbf0bb11f94f1cb7ced4c4b8a351bb60d108145"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:deb993cacb280823246a026e3b2d81c493c53de6acfd5e6bfe31ab3402bb37dd"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:63f3268ba69ace99cab4e3e3b5840b03340efed0948ab8f78d2fd87ee5442a4f"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:8d206346619592c6200148b01a2142798c989edcb9c896f9ac9722a99d4e77e6"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win32.whl", hash = "sha256:6c4ca60fa24e85fe25b912b01e62cb969d69a23a5d5867682dd3e80b5b02581d"}, + {file = "MarkupSafe-2.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:b2f4bf27480f5e5e8ce285a8c8fd176c0b03e93dcc6646477d4630e83440c6a9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0717a7390a68be14b8c793ba258e075c6f4ca819f15edfc2a3a027c823718567"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:6557b31b5e2c9ddf0de32a691f2312a32f77cd7681d8af66c2692efdbef84c18"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:49e3ceeabbfb9d66c3aef5af3a60cc43b85c33df25ce03d0031a608b0a8b2e3f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:d7f9850398e85aba693bb640262d3611788b1f29a79f0c93c565694658f4071f"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:6a7fae0dd14cf60ad5ff42baa2e95727c3d81ded453457771d02b7d2b3f9c0c2"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:b7f2d075102dc8c794cbde1947378051c4e5180d52d276987b8d28a3bd58c17d"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e9936f0b261d4df76ad22f8fee3ae83b60d7c3e871292cd42f40b81b70afae85"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:2a7d351cbd8cfeb19ca00de495e224dea7e7d919659c2841bbb7f420ad03e2d6"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:60bf42e36abfaf9aff1f50f52644b336d4f0a3fd6d8a60ca0d054ac9f713a864"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d6c7ebd4e944c85e2c3421e612a7057a2f48d478d79e61800d81468a8d842207"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:f0567c4dc99f264f49fe27da5f735f414c4e7e7dd850cfd8e69f0862d7c74ea9"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:89c687013cb1cd489a0f0ac24febe8c7a666e6e221b783e53ac50ebf68e45d86"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win32.whl", hash = "sha256:a30e67a65b53ea0a5e62fe23682cfe22712e01f453b95233b25502f7c61cb415"}, + {file = "MarkupSafe-2.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:611d1ad9a4288cf3e3c16014564df047fe08410e628f89805e475368bd304914"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:5bb28c636d87e840583ee3adeb78172efc47c8b26127267f54a9c0ec251d41a9"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:be98f628055368795d818ebf93da628541e10b75b41c559fdf36d104c5787066"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1d609f577dc6e1aa17d746f8bd3c31aa4d258f4070d61b2aa5c4166c1539de35"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7d91275b0245b1da4d4cfa07e0faedd5b0812efc15b702576d103293e252af1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:01a9b8ea66f1658938f65b93a85ebe8bc016e6769611be228d797c9d998dd298"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:47ab1e7b91c098ab893b828deafa1203de86d0bc6ab587b160f78fe6c4011f75"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:97383d78eb34da7e1fa37dd273c20ad4320929af65d156e35a5e2d89566d9dfb"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6fcf051089389abe060c9cd7caa212c707e58153afa2c649f00346ce6d260f1b"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5855f8438a7d1d458206a2466bf82b0f104a3724bf96a1c781ab731e4201731a"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:3dd007d54ee88b46be476e293f48c85048603f5f516008bee124ddd891398ed6"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:aca6377c0cb8a8253e493c6b451565ac77e98c2951c45f913e0b52facdcff83f"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:04635854b943835a6ea959e948d19dcd311762c5c0c6e1f0e16ee57022669194"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6300b8454aa6930a24b9618fbb54b5a68135092bc666f7b06901f897fa5c2fee"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win32.whl", hash = "sha256:023cb26ec21ece8dc3907c0e8320058b2e0cb3c55cf9564da612bc325bed5e64"}, + {file = "MarkupSafe-2.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:984d76483eb32f1bcb536dc27e4ad56bba4baa70be32fa87152832cdd9db0833"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:2ef54abee730b502252bcdf31b10dacb0a416229b72c18b19e24a4509f273d26"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3c112550557578c26af18a1ccc9e090bfe03832ae994343cfdacd287db6a6ae7"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_i686.whl", hash = "sha256:53edb4da6925ad13c07b6d26c2a852bd81e364f95301c66e930ab2aef5b5ddd8"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f5653a225f31e113b152e56f154ccbe59eeb1c7487b39b9d9f9cdb58e6c79dc5"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:4efca8f86c54b22348a5467704e3fec767b2db12fc39c6d963168ab1d3fc9135"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:ab3ef638ace319fa26553db0624c4699e31a28bb2a835c5faca8f8acf6a5a902"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:f8ba0e8349a38d3001fae7eadded3f6606f0da5d748ee53cc1dab1d6527b9509"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c47adbc92fc1bb2b3274c4b3a43ae0e4573d9fbff4f54cd484555edbf030baf1"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:37205cac2a79194e3750b0af2a5720d95f786a55ce7df90c3af697bfa100eaac"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:1f2ade76b9903f39aa442b4aadd2177decb66525062db244b35d71d0ee8599b6"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:4296f2b1ce8c86a6aea78613c34bb1a672ea0e3de9c6ba08a960efe0b0a09047"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f02365d4e99430a12647f09b6cc8bab61a6564363f313126f775eb4f6ef798e"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5b6d930f030f8ed98e3e6c98ffa0652bdb82601e7a016ec2ab5d7ff23baa78d1"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win32.whl", hash = "sha256:10f82115e21dc0dfec9ab5c0223652f7197feb168c940f3ef61563fc2d6beb74"}, + {file = "MarkupSafe-2.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:693ce3f9e70a6cf7d2fb9e6c9d8b204b6b39897a2c4a1aa65728d5ac97dcc1d8"}, + {file = "MarkupSafe-2.0.1.tar.gz", hash = "sha256:594c67807fb16238b30c44bdf74f36c02cdf22d1c8cda91ef8a0ed8dabf5620a"}, ] mdfind-wrapper = [ - {file = "mdfind-wrapper-0.1.4.tar.gz", hash = "sha256:7b8f37e6e5037fea9722821f6d26c538abd1a08385a20820ab73158d70267653"}, - {file = "mdfind_wrapper-0.1.4-py3-none-any.whl", hash = "sha256:8100c30333a7c82fd3af897cf84f6cecaa300fd6bb2ec762884d4ca6affe7a3c"}, + {file = "mdfind-wrapper-0.1.5.tar.gz", hash = "sha256:c0dbd5bc99c6d1fb4678bfa1841a3380ccac61e9b43a26a8d658aa9cafe27441"}, + {file = "mdfind_wrapper-0.1.5-py3-none-any.whl", hash = "sha256:fd00e65684b47f2d286eb7394eb172f4766f2926d95eddff6eb948352f620cbc"}, +] +mergedeep = [ + {file = "mergedeep-1.3.4-py3-none-any.whl", hash = "sha256:70775750742b25c0d8f36c55aed03d24c3384d17c951b3175d898bd778ef0307"}, + {file = "mergedeep-1.3.4.tar.gz", hash = "sha256:0096d52e9dad9939c3d975a774666af186eda617e6ca84df4c94dec30004f2a8"}, +] +mkdocs = [ + {file = "mkdocs-1.2.3-py3-none-any.whl", hash = "sha256:a1fa8c2d0c1305d7fc2b9d9f607c71778572a8b110fb26642aa00296c9e6d072"}, + {file = "mkdocs-1.2.3.tar.gz", hash = "sha256:89f5a094764381cda656af4298727c9f53dc3e602983087e1fe96ea1df24f4c1"}, +] +mkdocs-autorefs = [ + {file = "mkdocs-autorefs-0.3.1.tar.gz", hash = "sha256:12baad29359f468b44d980ed35b713715409097a1d8e3d0ef90962db95205eda"}, + {file = "mkdocs_autorefs-0.3.1-py3-none-any.whl", hash = "sha256:f0fd7c115eaafda7fb16bf5ff5d70eda55d7c0599eac64f8b25eacf864312a85"}, +] +mkdocs-include-markdown-plugin = [ + {file = "mkdocs_include_markdown_plugin-3.2.3-py3-none-any.whl", hash = "sha256:5a8b0c60d8981225c012b8f657b6557910997e46dacae4aff039b181487236cf"}, + {file = "mkdocs_include_markdown_plugin-3.2.3.tar.gz", hash = "sha256:64dd8c408a1b5b7422d4a5a826c434e5372af2fb7bd244dd5c87e09ff8f13302"}, ] -more-itertools = [ - {file = "more-itertools-8.7.0.tar.gz", hash = "sha256:c5d6da9ca3ff65220c3bfd2a8db06d698f05d4d2b9be57e1deb2be5a45019713"}, - {file = "more_itertools-8.7.0-py3-none-any.whl", hash = "sha256:5652a9ac72209ed7df8d9c15daf4e1aa0e3d2ccd3c87f8265a0673cd9cbc9ced"}, +mkdocstrings = [ + {file = "mkdocstrings-0.17.0-py3-none-any.whl", hash = "sha256:103fc1dd58cb23b7e0a6da5292435f01b29dc6fa0ba829132537f3f556f985de"}, + {file = "mkdocstrings-0.17.0.tar.gz", hash = "sha256:75b5cfa2039aeaf3a5f5cf0aa438507b0330ce76c8478da149d692daa7213a98"}, ] mypy = [ - {file = "mypy-0.812-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:a26f8ec704e5a7423c8824d425086705e381b4f1dfdef6e3a1edab7ba174ec49"}, - {file = "mypy-0.812-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:28fb5479c494b1bab244620685e2eb3c3f988d71fd5d64cc753195e8ed53df7c"}, - {file = "mypy-0.812-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:9743c91088d396c1a5a3c9978354b61b0382b4e3c440ce83cf77994a43e8c521"}, - {file = "mypy-0.812-cp35-cp35m-win_amd64.whl", hash = "sha256:d7da2e1d5f558c37d6e8c1246f1aec1e7349e4913d8fb3cb289a35de573fe2eb"}, - {file = "mypy-0.812-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:4eec37370483331d13514c3f55f446fc5248d6373e7029a29ecb7b7494851e7a"}, - {file = "mypy-0.812-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:d65cc1df038ef55a99e617431f0553cd77763869eebdf9042403e16089fe746c"}, - {file = "mypy-0.812-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:61a3d5b97955422964be6b3baf05ff2ce7f26f52c85dd88db11d5e03e146a3a6"}, - {file = "mypy-0.812-cp36-cp36m-win_amd64.whl", hash = "sha256:25adde9b862f8f9aac9d2d11971f226bd4c8fbaa89fb76bdadb267ef22d10064"}, - {file = "mypy-0.812-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:552a815579aa1e995f39fd05dde6cd378e191b063f031f2acfe73ce9fb7f9e56"}, - {file = "mypy-0.812-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:499c798053cdebcaa916eef8cd733e5584b5909f789de856b482cd7d069bdad8"}, - {file = "mypy-0.812-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:5873888fff1c7cf5b71efbe80e0e73153fe9212fafdf8e44adfe4c20ec9f82d7"}, - {file = "mypy-0.812-cp37-cp37m-win_amd64.whl", hash = "sha256:9f94aac67a2045ec719ffe6111df543bac7874cee01f41928f6969756e030564"}, - {file = "mypy-0.812-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d23e0ea196702d918b60c8288561e722bf437d82cb7ef2edcd98cfa38905d506"}, - {file = "mypy-0.812-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:674e822aa665b9fd75130c6c5f5ed9564a38c6cea6a6432ce47eafb68ee578c5"}, - {file = "mypy-0.812-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:abf7e0c3cf117c44d9285cc6128856106183938c68fd4944763003decdcfeb66"}, - {file = "mypy-0.812-cp38-cp38-win_amd64.whl", hash = "sha256:0d0a87c0e7e3a9becdfbe936c981d32e5ee0ccda3e0f07e1ef2c3d1a817cf73e"}, - {file = "mypy-0.812-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7ce3175801d0ae5fdfa79b4f0cfed08807af4d075b402b7e294e6aa72af9aa2a"}, - {file = "mypy-0.812-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:b09669bcda124e83708f34a94606e01b614fa71931d356c1f1a5297ba11f110a"}, - {file = "mypy-0.812-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:33f159443db0829d16f0a8d83d94df3109bb6dd801975fe86bacb9bf71628e97"}, - {file = "mypy-0.812-cp39-cp39-win_amd64.whl", hash = "sha256:3f2aca7f68580dc2508289c729bd49ee929a436208d2b2b6aab15745a70a57df"}, - {file = "mypy-0.812-py3-none-any.whl", hash = "sha256:2f9b3407c58347a452fc0736861593e105139b905cca7d097e413453a1d650b4"}, - {file = "mypy-0.812.tar.gz", hash = "sha256:cd07039aa5df222037005b08fbbfd69b3ab0b0bd7a07d7906de75ae52c4e3119"}, + {file = "mypy-0.931-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:3c5b42d0815e15518b1f0990cff7a705805961613e701db60387e6fb663fe78a"}, + {file = "mypy-0.931-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c89702cac5b302f0c5d33b172d2b55b5df2bede3344a2fbed99ff96bddb2cf00"}, + {file = "mypy-0.931-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:300717a07ad09525401a508ef5d105e6b56646f7942eb92715a1c8d610149714"}, + {file = "mypy-0.931-cp310-cp310-win_amd64.whl", hash = "sha256:7b3f6f557ba4afc7f2ce6d3215d5db279bcf120b3cfd0add20a5d4f4abdae5bc"}, + {file = "mypy-0.931-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:1bf752559797c897cdd2c65f7b60c2b6969ffe458417b8d947b8340cc9cec08d"}, + {file = "mypy-0.931-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:4365c60266b95a3f216a3047f1d8e3f895da6c7402e9e1ddfab96393122cc58d"}, + {file = "mypy-0.931-cp36-cp36m-win_amd64.whl", hash = "sha256:1b65714dc296a7991000b6ee59a35b3f550e0073411ac9d3202f6516621ba66c"}, + {file = "mypy-0.931-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e839191b8da5b4e5d805f940537efcaa13ea5dd98418f06dc585d2891d228cf0"}, + {file = "mypy-0.931-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:50c7346a46dc76a4ed88f3277d4959de8a2bd0a0fa47fa87a4cde36fe247ac05"}, + {file = "mypy-0.931-cp37-cp37m-win_amd64.whl", hash = "sha256:d8f1ff62f7a879c9fe5917b3f9eb93a79b78aad47b533911b853a757223f72e7"}, + {file = "mypy-0.931-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:f9fe20d0872b26c4bba1c1be02c5340de1019530302cf2dcc85c7f9fc3252ae0"}, + {file = "mypy-0.931-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:1b06268df7eb53a8feea99cbfff77a6e2b205e70bf31743e786678ef87ee8069"}, + {file = "mypy-0.931-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8c11003aaeaf7cc2d0f1bc101c1cc9454ec4cc9cb825aef3cafff8a5fdf4c799"}, + {file = "mypy-0.931-cp38-cp38-win_amd64.whl", hash = "sha256:d9d2b84b2007cea426e327d2483238f040c49405a6bf4074f605f0156c91a47a"}, + {file = "mypy-0.931-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ff3bf387c14c805ab1388185dd22d6b210824e164d4bb324b195ff34e322d166"}, + {file = "mypy-0.931-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5b56154f8c09427bae082b32275a21f500b24d93c88d69a5e82f3978018a0266"}, + {file = "mypy-0.931-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:8ca7f8c4b1584d63c9a0f827c37ba7a47226c19a23a753d52e5b5eddb201afcd"}, + {file = "mypy-0.931-cp39-cp39-win_amd64.whl", hash = "sha256:74f7eccbfd436abe9c352ad9fb65872cc0f1f0a868e9d9c44db0893440f0c697"}, + {file = "mypy-0.931-py3-none-any.whl", hash = "sha256:1171f2e0859cfff2d366da2c7092b06130f232c636a3f7301e3feb8b41f6377d"}, + {file = "mypy-0.931.tar.gz", hash = "sha256:0038b21890867793581e4cb0d810829f5fd4441aa75796b53033af3aa30430ce"}, ] mypy-extensions = [ {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, @@ -1350,174 +1347,134 @@ olefile = [ {file = "olefile-0.46.zip", hash = "sha256:133b031eaf8fd2c9399b78b8bc5b8fcbe4c31e85295749bb17a87cba8f3c3964"}, ] packaging = [ - {file = "packaging-20.9-py2.py3-none-any.whl", hash = "sha256:67714da7f7bc052e064859c05c595155bd1ee9f69f76557e21f051443c20947a"}, - {file = "packaging-20.9.tar.gz", hash = "sha256:5b327ac1320dc863dca72f4514ecc086f31186744b84a230374cc1fd776feae5"}, -] -parso = [ - {file = "parso-0.8.2-py2.py3-none-any.whl", hash = "sha256:a8c4922db71e4fdb90e0d0bc6e50f9b273d3397925e5e60a717e719201778d22"}, - {file = "parso-0.8.2.tar.gz", hash = "sha256:12b83492c6239ce32ff5eed6d3639d6a536170723c6f3f1506869f1ace413398"}, + {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, + {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, ] "pdfminer.six" = [ - {file = "pdfminer.six-20181108-py2-none-any.whl", hash = "sha256:d12653375fcc00615d76dbd48fc551a2d5ffd6f572c11660d417ccf91a600f9b"}, - {file = "pdfminer.six-20181108-py2.py3-none-any.whl", hash = "sha256:f04d029d1d3e58c87da51bdefef2e9a1dbf2d7b63f727dd2a3e36054f5ae96ea"}, - {file = "pdfminer.six-20181108.tar.gz", hash = "sha256:9cc58857cf0a360213008061d903282462abee55cdcc7e0b6e08d6834e55050d"}, -] -pendulum = [ - {file = "pendulum-2.1.2-cp27-cp27m-macosx_10_15_x86_64.whl", hash = "sha256:b6c352f4bd32dff1ea7066bd31ad0f71f8d8100b9ff709fb343f3b86cee43efe"}, - {file = "pendulum-2.1.2-cp27-cp27m-win_amd64.whl", hash = "sha256:318f72f62e8e23cd6660dbafe1e346950281a9aed144b5c596b2ddabc1d19739"}, - {file = "pendulum-2.1.2-cp35-cp35m-macosx_10_15_x86_64.whl", hash = "sha256:0731f0c661a3cb779d398803655494893c9f581f6488048b3fb629c2342b5394"}, - {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:3481fad1dc3f6f6738bd575a951d3c15d4b4ce7c82dce37cf8ac1483fde6e8b0"}, - {file = "pendulum-2.1.2-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:9702069c694306297ed362ce7e3c1ef8404ac8ede39f9b28b7c1a7ad8c3959e3"}, - {file = "pendulum-2.1.2-cp35-cp35m-win_amd64.whl", hash = "sha256:fb53ffa0085002ddd43b6ca61a7b34f2d4d7c3ed66f931fe599e1a531b42af9b"}, - {file = "pendulum-2.1.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:c501749fdd3d6f9e726086bf0cd4437281ed47e7bca132ddb522f86a1645d360"}, - {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:c807a578a532eeb226150d5006f156632df2cc8c5693d778324b43ff8c515dd0"}, - {file = "pendulum-2.1.2-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:2d1619a721df661e506eff8db8614016f0720ac171fe80dda1333ee44e684087"}, - {file = "pendulum-2.1.2-cp36-cp36m-win_amd64.whl", hash = "sha256:f888f2d2909a414680a29ae74d0592758f2b9fcdee3549887779cd4055e975db"}, - {file = "pendulum-2.1.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:e95d329384717c7bf627bf27e204bc3b15c8238fa8d9d9781d93712776c14002"}, - {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:4c9c689747f39d0d02a9f94fcee737b34a5773803a64a5fdb046ee9cac7442c5"}, - {file = "pendulum-2.1.2-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:1245cd0075a3c6d889f581f6325dd8404aca5884dea7223a5566c38aab94642b"}, - {file = "pendulum-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:db0a40d8bcd27b4fb46676e8eb3c732c67a5a5e6bfab8927028224fbced0b40b"}, - {file = "pendulum-2.1.2-cp38-cp38-macosx_10_15_x86_64.whl", hash = "sha256:f5e236e7730cab1644e1b87aca3d2ff3e375a608542e90fe25685dae46310116"}, - {file = "pendulum-2.1.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:de42ea3e2943171a9e95141f2eecf972480636e8e484ccffaf1e833929e9e052"}, - {file = "pendulum-2.1.2-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7c5ec650cb4bec4c63a89a0242cc8c3cebcec92fcfe937c417ba18277d8560be"}, - {file = "pendulum-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:33fb61601083f3eb1d15edeb45274f73c63b3c44a8524703dc143f4212bf3269"}, - {file = "pendulum-2.1.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:29c40a6f2942376185728c9a0347d7c0f07905638c83007e1d262781f1e6953a"}, - {file = "pendulum-2.1.2-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:94b1fc947bfe38579b28e1cccb36f7e28a15e841f30384b5ad6c5e31055c85d7"}, - {file = "pendulum-2.1.2.tar.gz", hash = "sha256:b06a0ca1bfe41c990bbf0c029f0b6501a7f2ec4e38bfec730712015e8860f207"}, -] -pexpect = [ - {file = "pexpect-4.8.0-py2.py3-none-any.whl", hash = "sha256:0b48a55dcb3c05f3329815901ea4fc1537514d6ba867a152b581d69ae3710937"}, - {file = "pexpect-4.8.0.tar.gz", hash = "sha256:fc65a43959d153d0114afe13997d439c22823a27cefceb5ff35c2178c6784c0c"}, -] -pickleshare = [ - {file = "pickleshare-0.7.5-py2.py3-none-any.whl", hash = "sha256:9649af414d74d4df115d5d718f82acb59c9d418196b7b4290ed47a12ce62df56"}, - {file = "pickleshare-0.7.5.tar.gz", hash = "sha256:87683d47965c1da65cdacaf31c8441d12b8044cdec9aca500cd78fc2c683afca"}, + {file = "pdfminer.six-20191110-py2.py3-none-any.whl", hash = "sha256:ca2ca58f3ac66a486bce53a6ddba95dc2b27781612915fa41c444790ba9cd2a8"}, + {file = "pdfminer.six-20191110.tar.gz", hash = "sha256:141a53ec491bee6d45bf9b2c7f82601426fb5d32636bcf6b9c8a8f3b6431fea6"}, ] pillow = [ - {file = "Pillow-8.2.0-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:dc38f57d8f20f06dd7c3161c59ca2c86893632623f33a42d592f097b00f720a9"}, - {file = "Pillow-8.2.0-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a013cbe25d20c2e0c4e85a9daf438f85121a4d0344ddc76e33fd7e3965d9af4b"}, - {file = "Pillow-8.2.0-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:8bb1e155a74e1bfbacd84555ea62fa21c58e0b4e7e6b20e4447b8d07990ac78b"}, - {file = "Pillow-8.2.0-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c5236606e8570542ed424849f7852a0ff0bce2c4c8d0ba05cc202a5a9c97dee9"}, - {file = "Pillow-8.2.0-cp36-cp36m-win32.whl", hash = "sha256:12e5e7471f9b637762453da74e390e56cc43e486a88289995c1f4c1dc0bfe727"}, - {file = "Pillow-8.2.0-cp36-cp36m-win_amd64.whl", hash = "sha256:5afe6b237a0b81bd54b53f835a153770802f164c5570bab5e005aad693dab87f"}, - {file = "Pillow-8.2.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:cb7a09e173903541fa888ba010c345893cd9fc1b5891aaf060f6ca77b6a3722d"}, - {file = "Pillow-8.2.0-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:0d19d70ee7c2ba97631bae1e7d4725cdb2ecf238178096e8c82ee481e189168a"}, - {file = "Pillow-8.2.0-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:083781abd261bdabf090ad07bb69f8f5599943ddb539d64497ed021b2a67e5a9"}, - {file = "Pillow-8.2.0-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:c6b39294464b03457f9064e98c124e09008b35a62e3189d3513e5148611c9388"}, - {file = "Pillow-8.2.0-cp37-cp37m-win32.whl", hash = "sha256:01425106e4e8cee195a411f729cff2a7d61813b0b11737c12bd5991f5f14bcd5"}, - {file = "Pillow-8.2.0-cp37-cp37m-win_amd64.whl", hash = "sha256:3b570f84a6161cf8865c4e08adf629441f56e32f180f7aa4ccbd2e0a5a02cba2"}, - {file = "Pillow-8.2.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:031a6c88c77d08aab84fecc05c3cde8414cd6f8406f4d2b16fed1e97634cc8a4"}, - {file = "Pillow-8.2.0-cp38-cp38-manylinux1_i686.whl", hash = "sha256:66cc56579fd91f517290ab02c51e3a80f581aba45fd924fcdee01fa06e635812"}, - {file = "Pillow-8.2.0-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:6c32cc3145928c4305d142ebec682419a6c0a8ce9e33db900027ddca1ec39178"}, - {file = "Pillow-8.2.0-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:624b977355cde8b065f6d51b98497d6cd5fbdd4f36405f7a8790e3376125e2bb"}, - {file = "Pillow-8.2.0-cp38-cp38-win32.whl", hash = "sha256:5cbf3e3b1014dddc45496e8cf38b9f099c95a326275885199f427825c6522232"}, - {file = "Pillow-8.2.0-cp38-cp38-win_amd64.whl", hash = "sha256:463822e2f0d81459e113372a168f2ff59723e78528f91f0bd25680ac185cf797"}, - {file = "Pillow-8.2.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:95d5ef984eff897850f3a83883363da64aae1000e79cb3c321915468e8c6add5"}, - {file = "Pillow-8.2.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b91c36492a4bbb1ee855b7d16fe51379e5f96b85692dc8210831fbb24c43e484"}, - {file = "Pillow-8.2.0-cp39-cp39-manylinux1_i686.whl", hash = "sha256:d68cb92c408261f806b15923834203f024110a2e2872ecb0bd2a110f89d3c602"}, - {file = "Pillow-8.2.0-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:f217c3954ce5fd88303fc0c317af55d5e0204106d86dea17eb8205700d47dec2"}, - {file = "Pillow-8.2.0-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:5b70110acb39f3aff6b74cf09bb4169b167e2660dabc304c1e25b6555fa781ef"}, - {file = "Pillow-8.2.0-cp39-cp39-win32.whl", hash = "sha256:a7d5e9fad90eff8f6f6106d3b98b553a88b6f976e51fce287192a5d2d5363713"}, - {file = "Pillow-8.2.0-cp39-cp39-win_amd64.whl", hash = "sha256:238c197fc275b475e87c1453b05b467d2d02c2915fdfdd4af126145ff2e4610c"}, - {file = "Pillow-8.2.0-pp36-pypy36_pp73-macosx_10_10_x86_64.whl", hash = "sha256:0e04d61f0064b545b989126197930807c86bcbd4534d39168f4aa5fda39bb8f9"}, - {file = "Pillow-8.2.0-pp36-pypy36_pp73-manylinux2010_i686.whl", hash = "sha256:63728564c1410d99e6d1ae8e3b810fe012bc440952168af0a2877e8ff5ab96b9"}, - {file = "Pillow-8.2.0-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:c03c07ed32c5324939b19e36ae5f75c660c81461e312a41aea30acdd46f93a7c"}, - {file = "Pillow-8.2.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:4d98abdd6b1e3bf1a1cbb14c3895226816e666749ac040c4e2554231068c639b"}, - {file = "Pillow-8.2.0-pp37-pypy37_pp73-manylinux2010_i686.whl", hash = "sha256:aac00e4bc94d1b7813fe882c28990c1bc2f9d0e1aa765a5f2b516e8a6a16a9e4"}, - {file = "Pillow-8.2.0-pp37-pypy37_pp73-manylinux2010_x86_64.whl", hash = "sha256:22fd0f42ad15dfdde6c581347eaa4adb9a6fc4b865f90b23378aa7914895e120"}, - {file = "Pillow-8.2.0-pp37-pypy37_pp73-win32.whl", hash = "sha256:e98eca29a05913e82177b3ba3d198b1728e164869c613d76d0de4bde6768a50e"}, - {file = "Pillow-8.2.0.tar.gz", hash = "sha256:a787ab10d7bb5494e5f76536ac460741788f1fbce851068d73a87ca7c35fc3e1"}, + {file = "Pillow-8.4.0-cp310-cp310-macosx_10_10_universal2.whl", hash = "sha256:81f8d5c81e483a9442d72d182e1fb6dcb9723f289a57e8030811bac9ea3fef8d"}, + {file = "Pillow-8.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:3f97cfb1e5a392d75dd8b9fd274d205404729923840ca94ca45a0af57e13dbe6"}, + {file = "Pillow-8.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eb9fc393f3c61f9054e1ed26e6fe912c7321af2f41ff49d3f83d05bacf22cc78"}, + {file = "Pillow-8.4.0-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d82cdb63100ef5eedb8391732375e6d05993b765f72cb34311fab92103314649"}, + {file = "Pillow-8.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:62cc1afda735a8d109007164714e73771b499768b9bb5afcbbee9d0ff374b43f"}, + {file = "Pillow-8.4.0-cp310-cp310-win32.whl", hash = "sha256:e3dacecfbeec9a33e932f00c6cd7996e62f53ad46fbe677577394aaa90ee419a"}, + {file = "Pillow-8.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:620582db2a85b2df5f8a82ddeb52116560d7e5e6b055095f04ad828d1b0baa39"}, + {file = "Pillow-8.4.0-cp36-cp36m-macosx_10_10_x86_64.whl", hash = "sha256:1bc723b434fbc4ab50bb68e11e93ce5fb69866ad621e3c2c9bdb0cd70e345f55"}, + {file = "Pillow-8.4.0-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:72cbcfd54df6caf85cc35264c77ede902452d6df41166010262374155947460c"}, + {file = "Pillow-8.4.0-cp36-cp36m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70ad9e5c6cb9b8487280a02c0ad8a51581dcbbe8484ce058477692a27c151c0a"}, + {file = "Pillow-8.4.0-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:25a49dc2e2f74e65efaa32b153527fc5ac98508d502fa46e74fa4fd678ed6645"}, + {file = "Pillow-8.4.0-cp36-cp36m-win32.whl", hash = "sha256:93ce9e955cc95959df98505e4608ad98281fff037350d8c2671c9aa86bcf10a9"}, + {file = "Pillow-8.4.0-cp36-cp36m-win_amd64.whl", hash = "sha256:2e4440b8f00f504ee4b53fe30f4e381aae30b0568193be305256b1462216feff"}, + {file = "Pillow-8.4.0-cp37-cp37m-macosx_10_10_x86_64.whl", hash = "sha256:8c803ac3c28bbc53763e6825746f05cc407b20e4a69d0122e526a582e3b5e153"}, + {file = "Pillow-8.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c8a17b5d948f4ceeceb66384727dde11b240736fddeda54ca740b9b8b1556b29"}, + {file = "Pillow-8.4.0-cp37-cp37m-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1394a6ad5abc838c5cd8a92c5a07535648cdf6d09e8e2d6df916dfa9ea86ead8"}, + {file = "Pillow-8.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:792e5c12376594bfcb986ebf3855aa4b7c225754e9a9521298e460e92fb4a488"}, + {file = "Pillow-8.4.0-cp37-cp37m-win32.whl", hash = "sha256:d99ec152570e4196772e7a8e4ba5320d2d27bf22fdf11743dd882936ed64305b"}, + {file = "Pillow-8.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:7b7017b61bbcdd7f6363aeceb881e23c46583739cb69a3ab39cb384f6ec82e5b"}, + {file = "Pillow-8.4.0-cp38-cp38-macosx_10_10_x86_64.whl", hash = "sha256:d89363f02658e253dbd171f7c3716a5d340a24ee82d38aab9183f7fdf0cdca49"}, + {file = "Pillow-8.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:0a0956fdc5defc34462bb1c765ee88d933239f9a94bc37d132004775241a7585"}, + {file = "Pillow-8.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b7bb9de00197fb4261825c15551adf7605cf14a80badf1761d61e59da347779"}, + {file = "Pillow-8.4.0-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:72b9e656e340447f827885b8d7a15fc8c4e68d410dc2297ef6787eec0f0ea409"}, + {file = "Pillow-8.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a5a4532a12314149d8b4e4ad8ff09dde7427731fcfa5917ff16d0291f13609df"}, + {file = "Pillow-8.4.0-cp38-cp38-win32.whl", hash = "sha256:82aafa8d5eb68c8463b6e9baeb4f19043bb31fefc03eb7b216b51e6a9981ae09"}, + {file = "Pillow-8.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:066f3999cb3b070a95c3652712cffa1a748cd02d60ad7b4e485c3748a04d9d76"}, + {file = "Pillow-8.4.0-cp39-cp39-macosx_10_10_x86_64.whl", hash = "sha256:5503c86916d27c2e101b7f71c2ae2cddba01a2cf55b8395b0255fd33fa4d1f1a"}, + {file = "Pillow-8.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4acc0985ddf39d1bc969a9220b51d94ed51695d455c228d8ac29fcdb25810e6e"}, + {file = "Pillow-8.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0b052a619a8bfcf26bd8b3f48f45283f9e977890263e4571f2393ed8898d331b"}, + {file = "Pillow-8.4.0-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:493cb4e415f44cd601fcec11c99836f707bb714ab03f5ed46ac25713baf0ff20"}, + {file = "Pillow-8.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b8831cb7332eda5dc89b21a7bce7ef6ad305548820595033a4b03cf3091235ed"}, + {file = "Pillow-8.4.0-cp39-cp39-win32.whl", hash = "sha256:5e9ac5f66616b87d4da618a20ab0a38324dbe88d8a39b55be8964eb520021e02"}, + {file = "Pillow-8.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:3eb1ce5f65908556c2d8685a8f0a6e989d887ec4057326f6c22b24e8a172c66b"}, + {file = "Pillow-8.4.0-pp36-pypy36_pp73-macosx_10_10_x86_64.whl", hash = "sha256:ddc4d832a0f0b4c52fff973a0d44b6c99839a9d016fe4e6a1cb8f3eea96479c2"}, + {file = "Pillow-8.4.0-pp36-pypy36_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a3e5ddc44c14042f0844b8cf7d2cd455f6cc80fd7f5eefbe657292cf601d9ad"}, + {file = "Pillow-8.4.0-pp36-pypy36_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c70e94281588ef053ae8998039610dbd71bc509e4acbc77ab59d7d2937b10698"}, + {file = "Pillow-8.4.0-pp37-pypy37_pp73-macosx_10_10_x86_64.whl", hash = "sha256:3862b7256046fcd950618ed22d1d60b842e3a40a48236a5498746f21189afbbc"}, + {file = "Pillow-8.4.0-pp37-pypy37_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4901622493f88b1a29bd30ec1a2f683782e57c3c16a2dbc7f2595ba01f639df"}, + {file = "Pillow-8.4.0-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84c471a734240653a0ec91dec0996696eea227eafe72a33bd06c92697728046b"}, + {file = "Pillow-8.4.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:244cf3b97802c34c41905d22810846802a3329ddcb93ccc432870243211c79fc"}, + {file = "Pillow-8.4.0.tar.gz", hash = "sha256:b8e2f83c56e141920c39464b852de3719dfbfb6e3c99a2d8da0edf4fb33176ed"}, ] pluggy = [ - {file = "pluggy-0.13.1-py2.py3-none-any.whl", hash = "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"}, - {file = "pluggy-0.13.1.tar.gz", hash = "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0"}, -] -prompt-toolkit = [ - {file = "prompt_toolkit-3.0.3-py3-none-any.whl", hash = "sha256:c93e53af97f630f12f5f62a3274e79527936ed466f038953dfa379d4941f651a"}, - {file = "prompt_toolkit-3.0.3.tar.gz", hash = "sha256:a402e9bf468b63314e37460b68ba68243d55b2f8c4d0192f85a019af3945050e"}, -] -ptyprocess = [ - {file = "ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35"}, - {file = "ptyprocess-0.7.0.tar.gz", hash = "sha256:5c5d0a3b48ceee0b48485e0c26037c0acd7d29765ca3fbb5cb3831d347423220"}, + {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, + {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] py = [ - {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, - {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, -] -pycodestyle = [ - {file = "pycodestyle-2.7.0-py2.py3-none-any.whl", hash = "sha256:514f76d918fcc0b55c6680472f0a37970994e07bbb80725808c17089be302068"}, - {file = "pycodestyle-2.7.0.tar.gz", hash = "sha256:c389c1d06bf7904078ca03399a4816f974a1d590090fecea0c63ec26ebaf1cef"}, + {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, + {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] pycparser = [ - {file = "pycparser-2.20-py2.py3-none-any.whl", hash = "sha256:7582ad22678f0fcd81102833f60ef8d0e57288b6b5fb00323d101be910e35705"}, - {file = "pycparser-2.20.tar.gz", hash = "sha256:2d475327684562c3a96cc71adf7dc8c4f0565175cf86b6d7a404ff4c771f15f0"}, + {file = "pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9"}, + {file = "pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206"}, ] pycryptodome = [ - {file = "pycryptodome-3.10.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:1c5e1ca507de2ad93474be5cfe2bfa76b7cf039a1a32fc196f40935944871a06"}, - {file = "pycryptodome-3.10.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:6260e24d41149268122dd39d4ebd5941e9d107f49463f7e071fd397e29923b0c"}, - {file = "pycryptodome-3.10.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:3f840c49d38986f6e17dbc0673d37947c88bc9d2d9dba1c01b979b36f8447db1"}, - {file = "pycryptodome-3.10.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:2dea65df54349cdfa43d6b2e8edb83f5f8d6861e5cf7b1fbc3e34c5694c85e27"}, - {file = "pycryptodome-3.10.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:e61e363d9a5d7916f3a4ce984a929514c0df3daf3b1b2eb5e6edbb131ee771cf"}, - {file = "pycryptodome-3.10.1-cp27-cp27m-manylinux2014_aarch64.whl", hash = "sha256:2603c98ae04aac675fefcf71a6c87dc4bb74a75e9071ae3923bbc91a59f08d35"}, - {file = "pycryptodome-3.10.1-cp27-cp27m-win32.whl", hash = "sha256:38661348ecb71476037f1e1f553159b80d256c00f6c0b00502acac891f7116d9"}, - {file = "pycryptodome-3.10.1-cp27-cp27m-win_amd64.whl", hash = "sha256:1723ebee5561628ce96748501cdaa7afaa67329d753933296321f0be55358dce"}, - {file = "pycryptodome-3.10.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:77997519d8eb8a4adcd9a47b9cec18f9b323e296986528186c0e9a7a15d6a07e"}, - {file = "pycryptodome-3.10.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:99b2f3fc51d308286071d0953f92055504a6ffe829a832a9fc7a04318a7683dd"}, - {file = "pycryptodome-3.10.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:e0a4d5933a88a2c98bbe19c0c722f5483dc628d7a38338ac2cb64a7dbd34064b"}, - {file = "pycryptodome-3.10.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d3d6958d53ad307df5e8469cc44474a75393a434addf20ecd451f38a72fe29b8"}, - {file = "pycryptodome-3.10.1-cp27-cp27mu-manylinux2014_aarch64.whl", hash = "sha256:a8eb8b6ea09ec1c2535bf39914377bc8abcab2c7d30fa9225eb4fe412024e427"}, - {file = "pycryptodome-3.10.1-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:31c1df17b3dc5f39600a4057d7db53ac372f492c955b9b75dd439f5d8b460129"}, - {file = "pycryptodome-3.10.1-cp35-abi3-manylinux1_i686.whl", hash = "sha256:a3105a0eb63eacf98c2ecb0eb4aa03f77f40fbac2bdde22020bb8a536b226bb8"}, - {file = "pycryptodome-3.10.1-cp35-abi3-manylinux1_x86_64.whl", hash = "sha256:a92d5c414e8ee1249e850789052608f582416e82422502dc0ac8c577808a9067"}, - {file = "pycryptodome-3.10.1-cp35-abi3-manylinux2010_i686.whl", hash = "sha256:60386d1d4cfaad299803b45a5bc2089696eaf6cdd56f9fc17479a6f89595cfc8"}, - {file = "pycryptodome-3.10.1-cp35-abi3-manylinux2010_x86_64.whl", hash = "sha256:501ab36aae360e31d0ec370cf5ce8ace6cb4112060d099b993bc02b36ac83fb6"}, - {file = "pycryptodome-3.10.1-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:fc7489a50323a0df02378bc2fff86eb69d94cc5639914346c736be981c6a02e7"}, - {file = "pycryptodome-3.10.1-cp35-abi3-win32.whl", hash = "sha256:9b6f711b25e01931f1c61ce0115245a23cdc8b80bf8539ac0363bdcf27d649b6"}, - {file = "pycryptodome-3.10.1-cp35-abi3-win_amd64.whl", hash = "sha256:7fd519b89585abf57bf47d90166903ec7b43af4fe23c92273ea09e6336af5c07"}, - {file = "pycryptodome-3.10.1-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:09c1555a3fa450e7eaca41ea11cd00afe7c91fef52353488e65663777d8524e0"}, - {file = "pycryptodome-3.10.1-pp27-pypy_73-manylinux1_x86_64.whl", hash = "sha256:758949ca62690b1540dfb24ad773c6da9cd0e425189e83e39c038bbd52b8e438"}, - {file = "pycryptodome-3.10.1-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:e3bf558c6aeb49afa9f0c06cee7fb5947ee5a1ff3bd794b653d39926b49077fa"}, - {file = "pycryptodome-3.10.1-pp27-pypy_73-win32.whl", hash = "sha256:f977cdf725b20f6b8229b0c87acb98c7717e742ef9f46b113985303ae12a99da"}, - {file = "pycryptodome-3.10.1-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:6d2df5223b12437e644ce0a3be7809471ffa71de44ccd28b02180401982594a6"}, - {file = "pycryptodome-3.10.1-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:98213ac2b18dc1969a47bc65a79a8fca02a414249d0c8635abb081c7f38c91b6"}, - {file = "pycryptodome-3.10.1-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:12222a5edc9ca4a29de15fbd5339099c4c26c56e13c2ceddf0b920794f26165d"}, - {file = "pycryptodome-3.10.1-pp36-pypy36_pp73-win32.whl", hash = "sha256:6bbf7fee7b7948b29d7e71fcacf48bac0c57fb41332007061a933f2d996f9713"}, - {file = "pycryptodome-3.10.1.tar.gz", hash = "sha256:3e2e3a06580c5f190df843cdb90ea28d61099cf4924334d5297a995de68e4673"}, -] -pyflakes = [ - {file = "pyflakes-2.3.1-py2.py3-none-any.whl", hash = "sha256:7893783d01b8a89811dd72d7dfd4d84ff098e5eed95cfa8905b22bbffe52efc3"}, - {file = "pyflakes-2.3.1.tar.gz", hash = "sha256:f5bc8ecabc05bb9d291eb5203d6810b49040f6ff446a756326104746cc00c1db"}, + {file = "pycryptodome-3.14.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:75a3a364fee153e77ed889c957f6f94ec6d234b82e7195b117180dcc9fc16f96"}, + {file = "pycryptodome-3.14.1-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:aae395f79fa549fb1f6e3dc85cf277f0351e15a22e6547250056c7f0c990d6a5"}, + {file = "pycryptodome-3.14.1-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:f403a3e297a59d94121cb3ee4b1cf41f844332940a62d71f9e4a009cc3533493"}, + {file = "pycryptodome-3.14.1-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ce7a875694cd6ccd8682017a7c06c6483600f151d8916f2b25cf7a439e600263"}, + {file = "pycryptodome-3.14.1-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:a36ab51674b014ba03da7f98b675fcb8eabd709a2d8e18219f784aba2db73b72"}, + {file = "pycryptodome-3.14.1-cp27-cp27m-manylinux2014_aarch64.whl", hash = "sha256:50a5346af703330944bea503106cd50c9c2212174cfcb9939db4deb5305a8367"}, + {file = "pycryptodome-3.14.1-cp27-cp27m-win32.whl", hash = "sha256:36e3242c4792e54ed906c53f5d840712793dc68b726ec6baefd8d978c5282d30"}, + {file = "pycryptodome-3.14.1-cp27-cp27m-win_amd64.whl", hash = "sha256:c880a98376939165b7dc504559f60abe234b99e294523a273847f9e7756f4132"}, + {file = "pycryptodome-3.14.1-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:dcd65355acba9a1d0fc9b923875da35ed50506e339b35436277703d7ace3e222"}, + {file = "pycryptodome-3.14.1-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:766a8e9832128c70012e0c2b263049506cbf334fb21ff7224e2704102b6ef59e"}, + {file = "pycryptodome-3.14.1-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:2562de213960693b6d657098505fd4493c45f3429304da67efcbeb61f0edfe89"}, + {file = "pycryptodome-3.14.1-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:d1b7739b68a032ad14c5e51f7e4e1a5f92f3628bba024a2bda1f30c481fc85d8"}, + {file = "pycryptodome-3.14.1-cp27-cp27mu-manylinux2014_aarch64.whl", hash = "sha256:27e92c1293afcb8d2639baf7eb43f4baada86e4de0f1fb22312bfc989b95dae2"}, + {file = "pycryptodome-3.14.1-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:f2772af1c3ef8025c85335f8b828d0193fa1e43256621f613280e2c81bfad423"}, + {file = "pycryptodome-3.14.1-cp35-abi3-manylinux1_i686.whl", hash = "sha256:9ec761a35dbac4a99dcbc5cd557e6e57432ddf3e17af8c3c86b44af9da0189c0"}, + {file = "pycryptodome-3.14.1-cp35-abi3-manylinux1_x86_64.whl", hash = "sha256:e64738207a02a83590df35f59d708bf1e7ea0d6adce712a777be2967e5f7043c"}, + {file = "pycryptodome-3.14.1-cp35-abi3-manylinux2010_i686.whl", hash = "sha256:e24d4ec4b029611359566c52f31af45c5aecde7ef90bf8f31620fd44c438efe7"}, + {file = "pycryptodome-3.14.1-cp35-abi3-manylinux2010_x86_64.whl", hash = "sha256:8b5c28058102e2974b9868d72ae5144128485d466ba8739abd674b77971454cc"}, + {file = "pycryptodome-3.14.1-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:924b6aad5386fb54f2645f22658cb0398b1f25bc1e714a6d1522c75d527deaa5"}, + {file = "pycryptodome-3.14.1-cp35-abi3-win32.whl", hash = "sha256:53dedbd2a6a0b02924718b520a723e88bcf22e37076191eb9b91b79934fb2192"}, + {file = "pycryptodome-3.14.1-cp35-abi3-win_amd64.whl", hash = "sha256:ea56a35fd0d13121417d39a83f291017551fa2c62d6daa6b04af6ece7ed30d84"}, + {file = "pycryptodome-3.14.1-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:028dcbf62d128b4335b61c9fbb7dd8c376594db607ef36d5721ee659719935d5"}, + {file = "pycryptodome-3.14.1-pp27-pypy_73-manylinux1_x86_64.whl", hash = "sha256:69f05aaa90c99ac2f2af72d8d7f185f729721ad7c4be89e9e3d0ab101b0ee875"}, + {file = "pycryptodome-3.14.1-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:12ef157eb1e01a157ca43eda275fa68f8db0dd2792bc4fe00479ab8f0e6ae075"}, + {file = "pycryptodome-3.14.1-pp27-pypy_73-win32.whl", hash = "sha256:f572a3ff7b6029dd9b904d6be4e0ce9e309dcb847b03e3ac8698d9d23bb36525"}, + {file = "pycryptodome-3.14.1-pp36-pypy36_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9924248d6920b59c260adcae3ee231cd5af404ac706ad30aa4cd87051bf09c50"}, + {file = "pycryptodome-3.14.1-pp36-pypy36_pp73-manylinux1_x86_64.whl", hash = "sha256:e0c04c41e9ade19fbc0eff6aacea40b831bfcb2c91c266137bcdfd0d7b2f33ba"}, + {file = "pycryptodome-3.14.1-pp36-pypy36_pp73-manylinux2010_x86_64.whl", hash = "sha256:893f32210de74b9f8ac869ed66c97d04e7d351182d6d39ebd3b36d3db8bda65d"}, + {file = "pycryptodome-3.14.1-pp36-pypy36_pp73-win32.whl", hash = "sha256:7fb90a5000cc9c9ff34b4d99f7f039e9c3477700e309ff234eafca7b7471afc0"}, + {file = "pycryptodome-3.14.1.tar.gz", hash = "sha256:e04e40a7f8c1669195536a37979dd87da2c32dbdc73d6fe35f0077b0c17c803b"}, ] pygments = [ - {file = "Pygments-2.8.1-py3-none-any.whl", hash = "sha256:534ef71d539ae97d4c3a4cf7d6f110f214b0e687e92f9cb9d2a3b0d3101289c8"}, - {file = "Pygments-2.8.1.tar.gz", hash = "sha256:2656e1a6edcdabf4275f9a3640db59fd5de107d88e8663c5d4e9a0fa62f77f94"}, + {file = "Pygments-2.11.2-py3-none-any.whl", hash = "sha256:44238f1b60a76d78fc8ca0528ee429702aae011c265fe6a8dd8b63049ae41c65"}, + {file = "Pygments-2.11.2.tar.gz", hash = "sha256:4e426f72023d88d03b2fa258de560726ce890ff3b630f88c21cbb8b2503b8c6a"}, ] -pylint = [ - {file = "pylint-2.8.2-py3-none-any.whl", hash = "sha256:f7e2072654a6b6afdf5e2fb38147d3e2d2d43c89f648637baab63e026481279b"}, - {file = "pylint-2.8.2.tar.gz", hash = "sha256:586d8fa9b1891f4b725f587ef267abe2a1bad89d6b184520c7f07a253dd6e217"}, +pymdown-extensions = [ + {file = "pymdown-extensions-9.1.tar.gz", hash = "sha256:74247f2c80f1d9e3c7242abe1c16317da36c6f26c7ad4b8a7f457f0ec20f0365"}, + {file = "pymdown_extensions-9.1-py3-none-any.whl", hash = "sha256:b03e66f91f33af4a6e7a0e20c740313522995f69a03d86316b1449766c473d0e"}, ] pyparsing = [ - {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, - {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, + {file = "pyparsing-3.0.7-py3-none-any.whl", hash = "sha256:a6c06a88f252e6c322f65faf8f418b16213b51bdfaece0524c1c1bc30c63c484"}, + {file = "pyparsing-3.0.7.tar.gz", hash = "sha256:18ee9022775d270c55187733956460083db60b37d0d0fb357445f3094eed3eea"}, ] pytest = [ - {file = "pytest-4.6.11-py2.py3-none-any.whl", hash = "sha256:a00a7d79cbbdfa9d21e7d0298392a8dd4123316bfac545075e6f8f24c94d8c97"}, - {file = "pytest-4.6.11.tar.gz", hash = "sha256:50fa82392f2120cc3ec2ca0a75ee615be4c479e66669789771f1758332be4353"}, + {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, + {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, ] python-dateutil = [ - {file = "python-dateutil-2.8.1.tar.gz", hash = "sha256:73ebfe9dbf22e832286dafa60473e4cd239f8592f699aa5adaf10050e6e1823c"}, - {file = "python_dateutil-2.8.1-py2.py3-none-any.whl", hash = "sha256:75bb3f31ea686f1197762692a9ee6a7550b59fc6ca3a1f4b5d7e32fb98e2da2a"}, + {file = "python-dateutil-2.8.2.tar.gz", hash = "sha256:0123cacc1627ae19ddf3c27a5de5bd67ee4586fbdd6440d9748f8abb483d3e86"}, + {file = "python_dateutil-2.8.2-py2.py3-none-any.whl", hash = "sha256:961d03dc3453ebbc59dbdea9e4e11c5651520a876d0f4db161e8674aae935da9"}, ] python-pptx = [ - {file = "python-pptx-0.6.18.tar.gz", hash = "sha256:a857d69e52d7e8a8fb32fca8182fdd4a3c68c689de8d4e4460e9b4a95efa7bc4"}, + {file = "python-pptx-0.6.21.tar.gz", hash = "sha256:7798a2aaf89563565b3c7120c0acfe9aff775db0db3580544e3bf4840c2e378f"}, +] +pytkdocs = [ + {file = "pytkdocs-0.15.0-py3-none-any.whl", hash = "sha256:d6b2aec34448ec89acb8c1c25062cc1e70c6b26395d46fc7ee753b7e5a4e736a"}, + {file = "pytkdocs-0.15.0.tar.gz", hash = "sha256:4b45af89d6fa5fa50f979b0f9f54539286b84e245c81991bb838149aa2d9d9c9"}, ] pytz = [ - {file = "pytz-2021.1-py2.py3-none-any.whl", hash = "sha256:eb10ce3e7736052ed3623d49975ce333bcd712c7bb19a58b9e2089d4057d0798"}, - {file = "pytz-2021.1.tar.gz", hash = "sha256:83a4a90894bf38e243cf052c8b58f381bfe9a7a483f6a9cab140bc7f702ac4da"}, + {file = "pytz-2021.3-py2.py3-none-any.whl", hash = "sha256:3672058bc3453457b622aab7a1c3bfd5ab0bdae451512f6cf25f64ed37f5b87c"}, + {file = "pytz-2021.3.tar.gz", hash = "sha256:acad2d8b20a1af07d4e4c9d2e9285c5ed9104354062f275f3fcd88dcef4f1326"}, ] -pytzdata = [ - {file = "pytzdata-2020.1-py2.py3-none-any.whl", hash = "sha256:e1e14750bcf95016381e4d472bad004eef710f2d6417240904070b3d6654485f"}, - {file = "pytzdata-2020.1.tar.gz", hash = "sha256:3efa13b335a00a8de1d345ae41ec78dd11c9f8807f522d39850f2dd828681540"}, +pytz-deprecation-shim = [ + {file = "pytz_deprecation_shim-0.1.0.post0-py2.py3-none-any.whl", hash = "sha256:8314c9692a636c8eb3bda879b9f119e350e93223ae83e70e80c31675a0fdc1a6"}, + {file = "pytz_deprecation_shim-0.1.0.post0.tar.gz", hash = "sha256:af097bae1b616dde5c5744441e2ddc69e74dfdcb0c263129610d85b87445a59d"}, ] pyyaml = [ {file = "PyYAML-5.4.1-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:3b2b1824fe7112845700f815ff6a489360226a5609b96ec2190a45e62a9fc922"}, @@ -1550,13 +1507,25 @@ pyyaml = [ {file = "PyYAML-5.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:c20cfa2d49991c8b4147af39859b167664f2ad4561704ee74c1de03318e898db"}, {file = "PyYAML-5.4.1.tar.gz", hash = "sha256:607774cbba28732bfa802b54baa7484215f530991055bb562efbed5b2f20a45e"}, ] +pyyaml-env-tag = [ + {file = "pyyaml_env_tag-0.1-py3-none-any.whl", hash = "sha256:af31106dec8a4d68c60207c1886031cbf839b68aa7abccdb19868200532c2069"}, + {file = "pyyaml_env_tag-0.1.tar.gz", hash = "sha256:70092675bda14fdec33b31ba77e7543de9ddc88f2e5b99160396572d11525bdb"}, +] requests = [ - {file = "requests-2.25.1-py2.py3-none-any.whl", hash = "sha256:c210084e36a42ae6b9219e00e48287def368a26d03a048ddad7bfee44f75871e"}, - {file = "requests-2.25.1.tar.gz", hash = "sha256:27973dd4a904a4f13b263a19c866c13b92a39ed1c964655f025f3f8d3d75b804"}, + {file = "requests-2.27.1-py2.py3-none-any.whl", hash = "sha256:f22fa1e554c9ddfd16e6e41ac79759e17be9e492b3587efa038054674760e72d"}, + {file = "requests-2.27.1.tar.gz", hash = "sha256:68d7c56fd5a8999887728ef304a6d12edc7be74f1cfa47714fc8b414525c9a61"}, +] +rich = [ + {file = "rich-11.1.0-py3-none-any.whl", hash = "sha256:365ebcdbfb3aa8d4b0ed2490e0fbf7b886a39d14eb7ea5fb7aece950835e1eed"}, + {file = "rich-11.1.0.tar.gz", hash = "sha256:43e03d8eec12e21beaecc22c828a41c4247356414a12d5879834863d4ad53816"}, +] +schema = [ + {file = "schema-0.7.5-py2.py3-none-any.whl", hash = "sha256:f3ffdeeada09ec34bf40d7d79996d9f7175db93b7a5065de0faa7f41083c1e6c"}, + {file = "schema-0.7.5.tar.gz", hash = "sha256:f06717112c61895cabc4707752b88716e8420a8819d71404501e114f91043197"}, ] send2trash = [ - {file = "Send2Trash-1.5.0-py3-none-any.whl", hash = "sha256:f1691922577b6fa12821234aeb57599d887c4900b9ca537948d2dac34aea888b"}, - {file = "Send2Trash-1.5.0.tar.gz", hash = "sha256:60001cc07d707fe247c94f74ca6ac0d3255aabcb930529690897ca2a39db28b2"}, + {file = "Send2Trash-1.8.0-py3-none-any.whl", hash = "sha256:f20eaadfdb517eaca5ce077640cb261c7d2698385a6a0f072a4a5447fd49fa08"}, + {file = "Send2Trash-1.8.0.tar.gz", hash = "sha256:d2c24762fd3759860a0aff155e45871447ea58d2be6bdd39b5c8f966a0c99c2d"}, ] simplematch = [ {file = "simplematch-1.3-py3-none-any.whl", hash = "sha256:be1d9a7e5055aaf9b35d16f565d6fc198d03e2b5804e954557e1c972d2f868f9"}, @@ -1566,131 +1535,165 @@ six = [ {file = "six-1.12.0-py2.py3-none-any.whl", hash = "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c"}, {file = "six-1.12.0.tar.gz", hash = "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73"}, ] -snowballstemmer = [ - {file = "snowballstemmer-2.1.0-py2.py3-none-any.whl", hash = "sha256:b51b447bea85f9968c13b650126a888aabd4cb4463fca868ec596826325dedc2"}, - {file = "snowballstemmer-2.1.0.tar.gz", hash = "sha256:e997baa4f2e9139951b6f4c631bad912dfd3c792467e2f03d7239464af90e914"}, -] sortedcontainers = [ - {file = "sortedcontainers-2.3.0-py2.py3-none-any.whl", hash = "sha256:37257a32add0a3ee490bb170b599e93095eed89a55da91fa9f48753ea12fd73f"}, - {file = "sortedcontainers-2.3.0.tar.gz", hash = "sha256:59cc937650cf60d677c16775597c89a960658a09cf7c1a668f86e1e4464b10a1"}, + {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, + {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, ] soupsieve = [ - {file = "soupsieve-2.2.1-py3-none-any.whl", hash = "sha256:c2c1c2d44f158cdbddab7824a9af8c4f83c76b1e23e049479aa432feb6c4c23b"}, - {file = "soupsieve-2.2.1.tar.gz", hash = "sha256:052774848f448cf19c7e959adf5566904d525f33a3f8b6ba6f6f8f26ec7de0cc"}, + {file = "soupsieve-2.3.1-py3-none-any.whl", hash = "sha256:1a3cca2617c6b38c0343ed661b1fa5de5637f257d4fe22bd9f1338010a1efefb"}, + {file = "soupsieve-2.3.1.tar.gz", hash = "sha256:b8d49b1cd4f037c7082a9683dfa1801aa2597fb11c3a1155b7a5b94829b4f1f9"}, ] speechrecognition = [ {file = "SpeechRecognition-3.8.1-py2.py3-none-any.whl", hash = "sha256:4d8f73a0c05ec70331c3bacaa89ecc06dfa8d9aba0899276664cda06ab597e8e"}, ] -sphinx = [ - {file = "Sphinx-3.5.4-py3-none-any.whl", hash = "sha256:2320d4e994a191f4b4be27da514e46b3d6b420f2ff895d064f52415d342461e8"}, - {file = "Sphinx-3.5.4.tar.gz", hash = "sha256:19010b7b9fa0dc7756a6e105b2aacd3a80f798af3c25c273be64d7beeb482cb1"}, -] -sphinx-rtd-theme = [ - {file = "sphinx_rtd_theme-0.5.2-py2.py3-none-any.whl", hash = "sha256:4a05bdbe8b1446d77a01e20a23ebc6777c74f43237035e76be89699308987d6f"}, - {file = "sphinx_rtd_theme-0.5.2.tar.gz", hash = "sha256:32bd3b5d13dc8186d7a42fc816a23d32e83a4827d7d9882948e7b837c232da5a"}, -] -sphinxcontrib-applehelp = [ - {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, - {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, -] -sphinxcontrib-devhelp = [ - {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, - {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, -] -sphinxcontrib-htmlhelp = [ - {file = "sphinxcontrib-htmlhelp-1.0.3.tar.gz", hash = "sha256:e8f5bb7e31b2dbb25b9cc435c8ab7a79787ebf7f906155729338f3156d93659b"}, - {file = "sphinxcontrib_htmlhelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:3c0bc24a2c41e340ac37c85ced6dafc879ab485c095b1d65d2461ac2f7cca86f"}, -] -sphinxcontrib-jsmath = [ - {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, - {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, -] -sphinxcontrib-qthelp = [ - {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, - {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, -] -sphinxcontrib-serializinghtml = [ - {file = "sphinxcontrib-serializinghtml-1.1.4.tar.gz", hash = "sha256:eaa0eccc86e982a9b939b2b82d12cc5d013385ba5eadcc7e4fed23f4405f77bc"}, - {file = "sphinxcontrib_serializinghtml-1.1.4-py2.py3-none-any.whl", hash = "sha256:f242a81d423f59617a8e5cf16f5d4d74e28ee9a66f9e5b637a18082991db5a9a"}, -] textract = [ - {file = "textract-1.6.3-py3-none-any.whl", hash = "sha256:ff2f4c61d720d3291e2deb870d3b24d0c63397cb4c094966e96c1bdb2f89df38"}, - {file = "textract-1.6.3.tar.gz", hash = "sha256:6213b2f923b85af8e5e380241db9361e3f5dbd444a74108745fd4121ae151310"}, + {file = "textract-1.6.4.tar.gz", hash = "sha256:35ac0302e2dbe53eb8d513b4cf0741264ea89a695fd89a3d48e3bd94d517cef6"}, ] toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] -traitlets = [ - {file = "traitlets-4.3.3-py2.py3-none-any.whl", hash = "sha256:70b4c6a1d9019d7b4f6846832288f86998aa3b9207c6821f3578a6a6a467fe44"}, - {file = "traitlets-4.3.3.tar.gz", hash = "sha256:d023ee369ddd2763310e4c3eae1ff649689440d4ae59d7485eb4cfbbe3e359f7"}, +tomli = [ + {file = "tomli-1.2.3-py3-none-any.whl", hash = "sha256:e3069e4be3ead9668e21cb9b074cd948f7b3113fd9c8bba083f48247aab8b11c"}, + {file = "tomli-1.2.3.tar.gz", hash = "sha256:05b6166bff487dc068d322585c7ea4ef78deed501cc124060e0f238e89a9231f"}, ] typed-ast = [ - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:2068531575a125b87a41802130fa7e29f26c09a2833fea68d9a40cf33902eba6"}, - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:c907f561b1e83e93fad565bac5ba9c22d96a54e7ea0267c708bffe863cbe4075"}, - {file = "typed_ast-1.4.3-cp35-cp35m-manylinux2014_aarch64.whl", hash = "sha256:1b3ead4a96c9101bef08f9f7d1217c096f31667617b58de957f690c92378b528"}, - {file = "typed_ast-1.4.3-cp35-cp35m-win32.whl", hash = "sha256:dde816ca9dac1d9c01dd504ea5967821606f02e510438120091b84e852367428"}, - {file = "typed_ast-1.4.3-cp35-cp35m-win_amd64.whl", hash = "sha256:777a26c84bea6cd934422ac2e3b78863a37017618b6e5c08f92ef69853e765d3"}, - {file = "typed_ast-1.4.3-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f8afcf15cc511ada719a88e013cec87c11aff7b91f019295eb4530f96fe5ef2f"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:52b1eb8c83f178ab787f3a4283f68258525f8d70f778a2f6dd54d3b5e5fb4341"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:01ae5f73431d21eead5015997ab41afa53aa1fbe252f9da060be5dad2c730ace"}, - {file = "typed_ast-1.4.3-cp36-cp36m-manylinux2014_aarch64.whl", hash = "sha256:c190f0899e9f9f8b6b7863debfb739abcb21a5c054f911ca3596d12b8a4c4c7f"}, - {file = "typed_ast-1.4.3-cp36-cp36m-win32.whl", hash = "sha256:398e44cd480f4d2b7ee8d98385ca104e35c81525dd98c519acff1b79bdaac363"}, - {file = "typed_ast-1.4.3-cp36-cp36m-win_amd64.whl", hash = "sha256:bff6ad71c81b3bba8fa35f0f1921fb24ff4476235a6e94a26ada2e54370e6da7"}, - {file = "typed_ast-1.4.3-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:0fb71b8c643187d7492c1f8352f2c15b4c4af3f6338f21681d3681b3dc31a266"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:760ad187b1041a154f0e4d0f6aae3e40fdb51d6de16e5c99aedadd9246450e9e"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:5feca99c17af94057417d744607b82dd0a664fd5e4ca98061480fd8b14b18d04"}, - {file = "typed_ast-1.4.3-cp37-cp37m-manylinux2014_aarch64.whl", hash = "sha256:95431a26309a21874005845c21118c83991c63ea800dd44843e42a916aec5899"}, - {file = "typed_ast-1.4.3-cp37-cp37m-win32.whl", hash = "sha256:aee0c1256be6c07bd3e1263ff920c325b59849dc95392a05f258bb9b259cf39c"}, - {file = "typed_ast-1.4.3-cp37-cp37m-win_amd64.whl", hash = "sha256:9ad2c92ec681e02baf81fdfa056fe0d818645efa9af1f1cd5fd6f1bd2bdfd805"}, - {file = "typed_ast-1.4.3-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:b36b4f3920103a25e1d5d024d155c504080959582b928e91cb608a65c3a49e1a"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_i686.whl", hash = "sha256:067a74454df670dcaa4e59349a2e5c81e567d8d65458d480a5b3dfecec08c5ff"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:7538e495704e2ccda9b234b82423a4038f324f3a10c43bc088a1636180f11a41"}, - {file = "typed_ast-1.4.3-cp38-cp38-manylinux2014_aarch64.whl", hash = "sha256:af3d4a73793725138d6b334d9d247ce7e5f084d96284ed23f22ee626a7b88e39"}, - {file = "typed_ast-1.4.3-cp38-cp38-win32.whl", hash = "sha256:f2362f3cb0f3172c42938946dbc5b7843c2a28aec307c49100c8b38764eb6927"}, - {file = "typed_ast-1.4.3-cp38-cp38-win_amd64.whl", hash = "sha256:dd4a21253f42b8d2b48410cb31fe501d32f8b9fbeb1f55063ad102fe9c425e40"}, - {file = "typed_ast-1.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f328adcfebed9f11301eaedfa48e15bdece9b519fb27e6a8c01aa52a17ec31b3"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_i686.whl", hash = "sha256:2c726c276d09fc5c414693a2de063f521052d9ea7c240ce553316f70656c84d4"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:cae53c389825d3b46fb37538441f75d6aecc4174f615d048321b716df2757fb0"}, - {file = "typed_ast-1.4.3-cp39-cp39-manylinux2014_aarch64.whl", hash = "sha256:b9574c6f03f685070d859e75c7f9eeca02d6933273b5e69572e5ff9d5e3931c3"}, - {file = "typed_ast-1.4.3-cp39-cp39-win32.whl", hash = "sha256:209596a4ec71d990d71d5e0d312ac935d86930e6eecff6ccc7007fe54d703808"}, - {file = "typed_ast-1.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:9c6d1a54552b5330bc657b7ef0eae25d00ba7ffe85d9ea8ae6540d2197a3788c"}, - {file = "typed_ast-1.4.3.tar.gz", hash = "sha256:fb1bbeac803adea29cedd70781399c99138358c26d05fcbd23c13016b7f5ec65"}, + {file = "typed_ast-1.5.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:183b183b7771a508395d2cbffd6db67d6ad52958a5fdc99f450d954003900266"}, + {file = "typed_ast-1.5.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:676d051b1da67a852c0447621fdd11c4e104827417bf216092ec3e286f7da596"}, + {file = "typed_ast-1.5.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bc2542e83ac8399752bc16e0b35e038bdb659ba237f4222616b4e83fb9654985"}, + {file = "typed_ast-1.5.2-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:74cac86cc586db8dfda0ce65d8bcd2bf17b58668dfcc3652762f3ef0e6677e76"}, + {file = "typed_ast-1.5.2-cp310-cp310-win_amd64.whl", hash = "sha256:18fe320f354d6f9ad3147859b6e16649a0781425268c4dde596093177660e71a"}, + {file = "typed_ast-1.5.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:31d8c6b2df19a777bc8826770b872a45a1f30cfefcfd729491baa5237faae837"}, + {file = "typed_ast-1.5.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:963a0ccc9a4188524e6e6d39b12c9ca24cc2d45a71cfdd04a26d883c922b4b78"}, + {file = "typed_ast-1.5.2-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:0eb77764ea470f14fcbb89d51bc6bbf5e7623446ac4ed06cbd9ca9495b62e36e"}, + {file = "typed_ast-1.5.2-cp36-cp36m-win_amd64.whl", hash = "sha256:294a6903a4d087db805a7656989f613371915fc45c8cc0ddc5c5a0a8ad9bea4d"}, + {file = "typed_ast-1.5.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:26a432dc219c6b6f38be20a958cbe1abffcc5492821d7e27f08606ef99e0dffd"}, + {file = "typed_ast-1.5.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7407cfcad702f0b6c0e0f3e7ab876cd1d2c13b14ce770e412c0c4b9728a0f88"}, + {file = "typed_ast-1.5.2-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f30ddd110634c2d7534b2d4e0e22967e88366b0d356b24de87419cc4410c41b7"}, + {file = "typed_ast-1.5.2-cp37-cp37m-win_amd64.whl", hash = "sha256:8c08d6625bb258179b6e512f55ad20f9dfef019bbfbe3095247401e053a3ea30"}, + {file = "typed_ast-1.5.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:90904d889ab8e81a956f2c0935a523cc4e077c7847a836abee832f868d5c26a4"}, + {file = "typed_ast-1.5.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:bbebc31bf11762b63bf61aaae232becb41c5bf6b3461b80a4df7e791fabb3aca"}, + {file = "typed_ast-1.5.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c29dd9a3a9d259c9fa19d19738d021632d673f6ed9b35a739f48e5f807f264fb"}, + {file = "typed_ast-1.5.2-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:58ae097a325e9bb7a684572d20eb3e1809802c5c9ec7108e85da1eb6c1a3331b"}, + {file = "typed_ast-1.5.2-cp38-cp38-win_amd64.whl", hash = "sha256:da0a98d458010bf4fe535f2d1e367a2e2060e105978873c04c04212fb20543f7"}, + {file = "typed_ast-1.5.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:33b4a19ddc9fc551ebabca9765d54d04600c4a50eda13893dadf67ed81d9a098"}, + {file = "typed_ast-1.5.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:1098df9a0592dd4c8c0ccfc2e98931278a6c6c53cb3a3e2cf7e9ee3b06153344"}, + {file = "typed_ast-1.5.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42c47c3b43fe3a39ddf8de1d40dbbfca60ac8530a36c9b198ea5b9efac75c09e"}, + {file = "typed_ast-1.5.2-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:f290617f74a610849bd8f5514e34ae3d09eafd521dceaa6cf68b3f4414266d4e"}, + {file = "typed_ast-1.5.2-cp39-cp39-win_amd64.whl", hash = "sha256:df05aa5b241e2e8045f5f4367a9f6187b09c4cdf8578bb219861c4e27c443db5"}, + {file = "typed_ast-1.5.2.tar.gz", hash = "sha256:525a2d4088e70a9f75b08b3f87a51acc9cde640e19cc523c7e41aa355564ae27"}, +] +types-pyyaml = [ + {file = "types-PyYAML-6.0.4.tar.gz", hash = "sha256:6252f62d785e730e454dfa0c9f0fb99d8dae254c5c3c686903cf878ea27c04b7"}, + {file = "types_PyYAML-6.0.4-py3-none-any.whl", hash = "sha256:693b01c713464a6851f36ff41077f8adbc6e355eda929addfb4a97208aea9b4b"}, ] typing-extensions = [ - {file = "typing_extensions-3.7.4.3-py2-none-any.whl", hash = "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f"}, - {file = "typing_extensions-3.7.4.3-py3-none-any.whl", hash = "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918"}, - {file = "typing_extensions-3.7.4.3.tar.gz", hash = "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c"}, + {file = "typing_extensions-4.0.1-py3-none-any.whl", hash = "sha256:7f001e5ac290a0c0401508864c7ec868be4e701886d5b573a9528ed3973d9d3b"}, + {file = "typing_extensions-4.0.1.tar.gz", hash = "sha256:4ca091dea149f945ec56afb48dae714f21e8692ef22a395223bcd328961b6a0e"}, +] +tzdata = [ + {file = "tzdata-2021.5-py2.py3-none-any.whl", hash = "sha256:3eee491e22ebfe1e5cfcc97a4137cd70f092ce59144d81f8924a844de05ba8f5"}, + {file = "tzdata-2021.5.tar.gz", hash = "sha256:68dbe41afd01b867894bbdfd54fa03f468cfa4f0086bfb4adcd8de8f24f3ee21"}, ] tzlocal = [ - {file = "tzlocal-1.5.1.tar.gz", hash = "sha256:4ebeb848845ac898da6519b9b31879cf13b6626f7184c496037b818e238f2c4e"}, + {file = "tzlocal-4.1-py3-none-any.whl", hash = "sha256:28ba8d9fcb6c9a782d6e0078b4f6627af1ea26aeaa32b4eab5324abc7df4149f"}, + {file = "tzlocal-4.1.tar.gz", hash = "sha256:0f28015ac68a5c067210400a9197fc5d36ba9bc3f8eaf1da3cbd59acdfed9e09"}, ] urllib3 = [ - {file = "urllib3-1.26.4-py2.py3-none-any.whl", hash = "sha256:2f4da4594db7e1e110a944bb1b551fdf4e6c136ad42e4234131391e21eb5b0df"}, - {file = "urllib3-1.26.4.tar.gz", hash = "sha256:e7b021f7241115872f92f43c6508082facffbd1c048e3c6e2bb9c2a157e28937"}, -] -wcwidth = [ - {file = "wcwidth-0.2.5-py2.py3-none-any.whl", hash = "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784"}, - {file = "wcwidth-0.2.5.tar.gz", hash = "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83"}, -] -wrapt = [ - {file = "wrapt-1.12.1.tar.gz", hash = "sha256:b62ffa81fb85f4332a4f609cab4ac40709470da05643a082ec1eb88e6d9b97d7"}, + {file = "urllib3-1.26.8-py2.py3-none-any.whl", hash = "sha256:000ca7f471a233c2251c6c7023ee85305721bfdf18621ebff4fd17a8653427ed"}, + {file = "urllib3-1.26.8.tar.gz", hash = "sha256:0e7c33d9a63e7ddfcb86780aac87befc2fbddf46c58dbb487e0855f7ceec283c"}, +] +watchdog = [ + {file = "watchdog-2.1.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:9693f35162dc6208d10b10ddf0458cc09ad70c30ba689d9206e02cd836ce28a3"}, + {file = "watchdog-2.1.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:aba5c812f8ee8a3ff3be51887ca2d55fb8e268439ed44110d3846e4229eb0e8b"}, + {file = "watchdog-2.1.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:4ae38bf8ba6f39d5b83f78661273216e7db5b00f08be7592062cb1fc8b8ba542"}, + {file = "watchdog-2.1.6-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:ad6f1796e37db2223d2a3f302f586f74c72c630b48a9872c1e7ae8e92e0ab669"}, + {file = "watchdog-2.1.6-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:922a69fa533cb0c793b483becaaa0845f655151e7256ec73630a1b2e9ebcb660"}, + {file = "watchdog-2.1.6-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:b2fcf9402fde2672545b139694284dc3b665fd1be660d73eca6805197ef776a3"}, + {file = "watchdog-2.1.6-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3386b367e950a11b0568062b70cc026c6f645428a698d33d39e013aaeda4cc04"}, + {file = "watchdog-2.1.6-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:8f1c00aa35f504197561060ca4c21d3cc079ba29cf6dd2fe61024c70160c990b"}, + {file = "watchdog-2.1.6-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b52b88021b9541a60531142b0a451baca08d28b74a723d0c99b13c8c8d48d604"}, + {file = "watchdog-2.1.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8047da932432aa32c515ec1447ea79ce578d0559362ca3605f8e9568f844e3c6"}, + {file = "watchdog-2.1.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e92c2d33858c8f560671b448205a268096e17870dcf60a9bb3ac7bfbafb7f5f9"}, + {file = "watchdog-2.1.6-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b7d336912853d7b77f9b2c24eeed6a5065d0a0cc0d3b6a5a45ad6d1d05fb8cd8"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_aarch64.whl", hash = "sha256:cca7741c0fcc765568350cb139e92b7f9f3c9a08c4f32591d18ab0a6ac9e71b6"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_armv7l.whl", hash = "sha256:25fb5240b195d17de949588628fdf93032ebf163524ef08933db0ea1f99bd685"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_i686.whl", hash = "sha256:be9be735f827820a06340dff2ddea1fb7234561fa5e6300a62fe7f54d40546a0"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_ppc64.whl", hash = "sha256:d0d19fb2441947b58fbf91336638c2b9f4cc98e05e1045404d7a4cb7cddc7a65"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_ppc64le.whl", hash = "sha256:3becdb380d8916c873ad512f1701f8a92ce79ec6978ffde92919fd18d41da7fb"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_s390x.whl", hash = "sha256:ae67501c95606072aafa865b6ed47343ac6484472a2f95490ba151f6347acfc2"}, + {file = "watchdog-2.1.6-py3-none-manylinux2014_x86_64.whl", hash = "sha256:e0f30db709c939cabf64a6dc5babb276e6d823fd84464ab916f9b9ba5623ca15"}, + {file = "watchdog-2.1.6-py3-none-win32.whl", hash = "sha256:e02794ac791662a5eafc6ffeaf9bcc149035a0e48eb0a9d40a8feb4622605a3d"}, + {file = "watchdog-2.1.6-py3-none-win_amd64.whl", hash = "sha256:bd9ba4f332cf57b2c1f698be0728c020399ef3040577cde2939f2e045b39c1e5"}, + {file = "watchdog-2.1.6-py3-none-win_ia64.whl", hash = "sha256:a0f1c7edf116a12f7245be06120b1852275f9506a7d90227648b250755a03923"}, + {file = "watchdog-2.1.6.tar.gz", hash = "sha256:a36e75df6c767cbf46f61a91c70b3ba71811dfa0aca4a324d9407a06a8b7a2e7"}, ] xattr = [ - {file = "xattr-0.9.7-cp27-cp27m-macosx_10_13_x86_64.whl", hash = "sha256:1b2cd125150aa9bbfb02929627101b3303920a68487e9c865ddd170188ddd796"}, - {file = "xattr-0.9.7-cp36-cp36m-macosx_10_13_x86_64.whl", hash = "sha256:e2c72a3a501bac715489180ca2b646e48a1ca3a794c1103dd6f0f987d43f570c"}, - {file = "xattr-0.9.7-cp37-cp37m-macosx_10_13_x86_64.whl", hash = "sha256:1e11ba8ab86dfe74419704c53722ea9b5915833db07416e7c10db5dfb02218bb"}, - {file = "xattr-0.9.7.tar.gz", hash = "sha256:b0bbca828e04ef2d484a6522ae7b3a7ccad5e43fa1c6f54d78e24bb870f49d44"}, + {file = "xattr-0.9.9-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:58a9fb4fd19b467e88f4b75b5243706caa57e312d3aee757b53b57c7fd0f4ba9"}, + {file = "xattr-0.9.9-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:e71efca59705c7abde5b7f76323ebe00ed2977f10cba4204b9421dada036b5ca"}, + {file = "xattr-0.9.9-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:1aad96b6603961c3d1ca1aaa8369b1a8d684a7b37357b2428087c286bf0e561c"}, + {file = "xattr-0.9.9-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:46cb74f98d31d9d70f975ec3e6554360a9bdcbb4b9fb50a69fabe54f9f928c97"}, + {file = "xattr-0.9.9-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:80c2db56058a687d7439be041f916cbeb2943fbe2623e53d5da721a4552d8991"}, + {file = "xattr-0.9.9-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:c360d1cc42e885b64d84f64de3c501dd7bce576248327ef583b4625ee63aa023"}, + {file = "xattr-0.9.9-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:debd87afe6bdf88c3689bde52eecf2b166388b13ef7388259d23223374db417d"}, + {file = "xattr-0.9.9-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:4280c9f33a8678828f1bbc3d3dc8b823b5e4a113ee5ecb0fb98bff60cc2b9ad1"}, + {file = "xattr-0.9.9-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:e0916ec1656d2071cd3139d1f52426825985d8ed076f981ef7f0bc13dfa8e96c"}, + {file = "xattr-0.9.9-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a517916fbf2f58a3222bb2048fe1eeff4e23e07a4ce6228a27de004c80bf53ab"}, + {file = "xattr-0.9.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e886c882b3b28c7a684c3e3daf46347da5428a46b88bc6d62c4867d574b90c54"}, + {file = "xattr-0.9.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:373e3d1fd9258438fc38d1438142d3659f36743f374a20457346ef26741ed441"}, + {file = "xattr-0.9.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a7beeb54ca140273b2f6320bb98b701ec30628af2ebe4eb30f7051419eb4ef3"}, + {file = "xattr-0.9.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ef3ca29cdaae9c47c625d84bb6c9046f7275cccde0ea805caa23ca58d3671f3f"}, + {file = "xattr-0.9.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5c381d890931cd18b137ce3fb5c5f08b672c3c61e2e47b1a7442ee46e827abfe"}, + {file = "xattr-0.9.9-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:59c5783ccf57cf2700ce57d51a92134900ed26f6ab20d209f383fb898903fea6"}, + {file = "xattr-0.9.9-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:966b885b69d95362e2a12d39f84889cf857090e57263b5ac33409498aa00c160"}, + {file = "xattr-0.9.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efaaf0cb1ea8e9febb7baad301ae8cc9ad7a96fdfc5c6399d165e7a19e3e61ce"}, + {file = "xattr-0.9.9-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:f19fa75ed1e9db86354efab29869cb2be6976d456bd7c89e67b118d5384a1d98"}, + {file = "xattr-0.9.9-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ca28ad06828244b315214ee35388f57e81e90aac2ceac3f32e42ae394e31b9c"}, + {file = "xattr-0.9.9-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:532c7f1656dd2fe937116b9e210229f716d7fc7ac142f9cdace7da92266d32e8"}, + {file = "xattr-0.9.9-cp36-cp36m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:11c28033c17e98c67e0def9d6ebd415ad3c006a7bc3fee6bad79c5e52d0dff49"}, + {file = "xattr-0.9.9-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:473cabb30e544ea08c8c01c1ef18053147cdc8552d443ac97815e46fbb13c7d4"}, + {file = "xattr-0.9.9-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:c4a308522b444d090fbd66a385c9519b6b977818226921b0d2fc403667c93564"}, + {file = "xattr-0.9.9-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:82493434488aca72d88b5129dac8f212e7b8bdca7ceffe7bb977c850f2452e4e"}, + {file = "xattr-0.9.9-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:e41d289706c7e8940f4d08e865da6a8ae988123e40a44f9a97ddc09e67795d7d"}, + {file = "xattr-0.9.9-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ef08698e360cf43688dca3db3421b156b29948a714d5d089348073f463c11646"}, + {file = "xattr-0.9.9-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4eb10ac16ca8d534c0395425d52121e0c1981f808e1b3f577f6a5ec33d3853e4"}, + {file = "xattr-0.9.9-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5605fec07b0e964bd980cc70ec335b9eb1b7ac7c6f314c7c2d8f54b09104fe4c"}, + {file = "xattr-0.9.9-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:974e7d577ddb15e4552fb0ec10a4cfe09bdf6267365aa2b8394bb04637785aad"}, + {file = "xattr-0.9.9-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:ad6777de922c638bfa87a0d7faebc5722ddef04a1210b2a8909289b58b769af0"}, + {file = "xattr-0.9.9-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:3887e70873ebf0efbde32f9929ec1c7e45ec0013561743e2cc0406a91e51113b"}, + {file = "xattr-0.9.9-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:83caa8e93a45a0f25f91b92d9b45f490c87bff74f02555df6312efeba0dacc31"}, + {file = "xattr-0.9.9-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:e33ec0a1d913d946d1ab7509f37ee37306c45af735347f13b963df34ffe6e029"}, + {file = "xattr-0.9.9-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:263c58dca83372260c5c195e0b59959e38e1f107f0b7350de82e3db38479036c"}, + {file = "xattr-0.9.9-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:125dfb9905428162349d3b8b825d9a18280893f0cb0db2a2467d5ef253fa6ce2"}, + {file = "xattr-0.9.9-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e243524e0dde16d7a2e1b52512ad2c6964df2143dd1c79b820dcb4c6c0822c20"}, + {file = "xattr-0.9.9-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:01ec07d24a14406bdc6a123041c63a88e1c4a3f820e4a7d30f7609d57311b499"}, + {file = "xattr-0.9.9-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:85c1df5f1d209345ea96de137419e886a27bb55076b3ae01faacf35aafcf3a61"}, + {file = "xattr-0.9.9-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:ca74d3eff92d6dc16e271fbad9cbab547fb9a0c983189c4031c3ff3d150dd871"}, + {file = "xattr-0.9.9-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:7d17505e49ac70c0e71939c5aac96417a863583fb30a2d6304d5ac881230548f"}, + {file = "xattr-0.9.9-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:1ae47a6398d3c04623fa386a4aa2f66e5cd3cdb1a7e69d1bfaeb8c73983bf271"}, + {file = "xattr-0.9.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:809e2537d0aff9fca97dacf3245cbbaf711bbced5d1b0235a8d1906b04e26114"}, + {file = "xattr-0.9.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:de3af84364f06d67b3662ccf7c1a73e1d389d8d274394e952651e7bf1bbd2718"}, + {file = "xattr-0.9.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3b62cdad232d2d2dedd39b543701db8e3883444ec0d57ce3fab8f75e5f8b0301"}, + {file = "xattr-0.9.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6b11d2eda397d47f7075743409683c233519ca52aa1dac109b413a4d8c15b740"}, + {file = "xattr-0.9.9-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:661c0a939aefdf071887121f534bb10588d69c7b2dfca5c486af2fc81a0786e8"}, + {file = "xattr-0.9.9-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:5db7c2db320a8d5264d437d71f1eb7270a7e4a6545296e7766161d17752590b7"}, + {file = "xattr-0.9.9-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:83203e60cbaca9536d297e5039b285a600ff84e6e9e8536fe2d521825eeeb437"}, + {file = "xattr-0.9.9-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:42bfb4e4da06477e739770ac6942edbdc71e9fc3b497b67db5fba712fa8109c2"}, + {file = "xattr-0.9.9-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:67047d04d1c56ad4f0f5886085e91b0077238ab3faaec6492c3c21920c6566eb"}, + {file = "xattr-0.9.9-pp37-pypy37_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:885782bc82ded1a3f684d54a1af259ae9fcc347fa54b5a05b8aad82b8a42044c"}, + {file = "xattr-0.9.9-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bc84ccec618b5aa089e7cee8b07fcc92d4069aac4053da604c8143a0d6b1381"}, + {file = "xattr-0.9.9-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baeff3e5dda8ea7e9424cfaee51829f46afe3836c30d02f343f9049c685681ca"}, + {file = "xattr-0.9.9.tar.gz", hash = "sha256:09cb7e1efb3aa1b4991d6be4eb25b73dc518b4fe894f0915f5b0dcede972f346"}, ] xlrd = [ {file = "xlrd-1.2.0-py2.py3-none-any.whl", hash = "sha256:e551fb498759fa3a5384a94ccd4c3c02eb7c00ea424426e212ac0c57be9dfbde"}, {file = "xlrd-1.2.0.tar.gz", hash = "sha256:546eb36cee8db40c3eaa46c351e67ffee6eeb5fa2650b71bc4c758a29a1b29b2"}, ] xlsxwriter = [ - {file = "XlsxWriter-1.4.0-py2.py3-none-any.whl", hash = "sha256:1a6dd98892e8010d3e089d1cb61385baa8f76fa547598df2c221cc37238c72d3"}, - {file = "XlsxWriter-1.4.0.tar.gz", hash = "sha256:82be5a58c09bdc2ff8afc25acc815c465275239ddfc56d6e7b2a7e6c5d2e213b"}, + {file = "XlsxWriter-3.0.2-py3-none-any.whl", hash = "sha256:1aa65166697c42284e82f5bf9a33c2e913341eeef2b262019c3f5b5334768765"}, + {file = "XlsxWriter-3.0.2.tar.gz", hash = "sha256:53005f03e8eb58f061ebf41d5767c7495ee0772c2396fe26b7e0ca22fa9c2570"}, ] zipp = [ - {file = "zipp-3.4.1-py3-none-any.whl", hash = "sha256:51cb66cc54621609dd593d1787f286ee42a5c0adbb4b29abea5a63edc3e03098"}, - {file = "zipp-3.4.1.tar.gz", hash = "sha256:3607921face881ba3e026887d8150cca609d517579abe052ac81fc5aeffdbd76"}, + {file = "zipp-3.6.0-py3-none-any.whl", hash = "sha256:9fe5ea21568a0a70e50f273397638d39b03353731e6cbbb3fd8502a33fec40bc"}, + {file = "zipp-3.6.0.tar.gz", hash = "sha256:71c644c5369f4a6e07636f0aa966270449561fcea2e3d6747b8d23efaa9d7832"}, ] diff --git a/pyproject.toml b/pyproject.toml index d6ef558e..0da71c7a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,16 +1,22 @@ [tool.poetry] name = "organize-tool" -version = "1.10.1" +version = "2.0.0" description = "The file management automation tool" -packages = [ - { include = "organize" }, -] +packages = [{ include = "organize" }] authors = ["Thomas Feldmann "] license = "MIT" readme = "README.md" repository = "https://github.com/tfeldmann/organize" documentation = "https://organize.readthedocs.io" -keywords = ["file", "management", "automation", "tool", "organization", "rules", "yaml"] +keywords = [ + "file", + "management", + "automation", + "tool", + "organization", + "rules", + "yaml", +] classifiers = [ # Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers "Development Status :: 5 - Production/Stable", @@ -23,32 +29,63 @@ classifiers = [ ] [tool.poetry.scripts] -organize = "organize.cli:main" +organize = "organize.cli:cli" [tool.poetry.dependencies] -python = "^3.6" -appdirs = "^1.4.4" -docopt = "^0.6.2" +python = "^3.6.2" +fs = "^2.4.14" +rich = "^11.0.0" PyYAML = "^5.4.1" -Send2Trash = "^1.5.0" -colorama = "^0.4.4" -exifread = "^2.1" -textract = { version = "^1.6.3", optional = true } -pendulum = "^2.0.5" +Send2Trash = "^1.8.0" +ExifRead = "^2.3.2" +textract = { version = "^1.6.4", optional = true } simplematch = "^1.3" -macos-tags = { version = "^1.5.1", markers = "sys_platform == 'darwin'"} +macos-tags = { version = "^1.5.1", markers = "sys_platform == 'darwin'" } +schema = "^0.7.5" +Jinja2 = "^3.0.3" +click = "^8.0.3" + +# must be in main dependencies for readthedocs. +mkdocs = { version = "^1.2.3", optional = true } +mkdocstrings = { version = "^0.17.0", optional = true } +mkdocs-include-markdown-plugin = { version = "^3.2.3", optional = true } +mkdocs-autorefs = { version = "^0.3.1", optional = true } [tool.poetry.extras] textract = ["textract"] +docs = [ + "mkdocs", + "mkdocstrings", + "mkdocs-include-markdown-plugin", + "mkdocs-autorefs", +] [tool.poetry.dev-dependencies] -pytest = "^4.6" -pylint = "^2.3" -ipdb = "^0.12.0" -sphinx = "^3.1.0" -sphinx-rtd-theme = "^0.5.2" -mypy = "^0.812" -flake8 = "^3.9.1" +pytest = "^6.2.5" +mypy = "^0.931" +requests = "^2.27.1" +types-PyYAML = "^6.0.3" + +[tool.mypy] +python_version = "3.6" + +[[tool.mypy.overrides]] +module = [ + "schema", + "simplematch", + "appdirs", + "send2trash", + "exifread", + "textract", + "requests", + "macos_tags", +] +ignore_missing_imports = true + +[tool.pytest.ini_options] +addopts = "--doctest-modules" +testpaths = ["tests", "organize"] +norecursedirs = ["tests/todo", "organize/filters", ".configs"] [build-system] requires = ["poetry-core>=1.0.0"] diff --git a/tests/actions/test_copy.py b/tests/actions/test_copy.py index d9596c56..a892a43f 100644 --- a/tests/actions/test_copy.py +++ b/tests/actions/test_copy.py @@ -1,150 +1,67 @@ -import os - -from organize.actions import Copy -from pathlib import Path - -USER_DIR = os.path.expanduser("~") - -DEFAULT_ARGS = { - "basedir": Path.home(), - "path": Path.home() / "test.py", - "simulate": False, -} - - -def test_tilde_expansion(mock_exists, mock_samefile, mock_copy, mock_trash, mock_mkdir): - mock_exists.return_value = False - mock_samefile.return_value = False - copy = Copy(dest="~/newname.py", overwrite=False) - updates = copy.run(**DEFAULT_ARGS) - mock_mkdir.assert_called_with(exist_ok=True, parents=True) - mock_exists.assert_called_with() - mock_trash.assert_not_called() - mock_copy.assert_called_with( - src=os.path.join(USER_DIR, "test.py"), dst=os.path.join(USER_DIR, "newname.py") - ) - # keep old file path - assert updates is None - - -def test_into_folder(mock_exists, mock_samefile, mock_copy, mock_trash, mock_mkdir): - mock_exists.return_value = False - mock_samefile.return_value = False - copy = Copy(dest="~/somefolder/", overwrite=False) - copy.run(**DEFAULT_ARGS) - mock_mkdir.assert_called_with(exist_ok=True, parents=True) - mock_exists.assert_called_with() - mock_trash.assert_not_called() - mock_copy.assert_called_with( - src=os.path.join(USER_DIR, "test.py"), - dst=os.path.join(USER_DIR, "somefolder", "test.py"), - ) - - -def test_overwrite(mock_exists, mock_samefile, mock_copy, mock_trash, mock_mkdir): - mock_exists.return_value = True - mock_samefile.return_value = False - copy = Copy(dest="~/somefolder/", overwrite=True) - copy.run(**DEFAULT_ARGS) - mock_mkdir.assert_called_with(exist_ok=True, parents=True) - mock_exists.assert_called_with() - mock_trash.assert_called_with(os.path.join(USER_DIR, "somefolder", "test.py")) - mock_copy.assert_called_with( - src=os.path.join(USER_DIR, "test.py"), - dst=os.path.join(USER_DIR, "somefolder", "test.py"), - ) - - -def test_already_exists(mock_exists, mock_samefile, mock_copy, mock_trash, mock_mkdir): - mock_exists.side_effect = [True, False] - mock_samefile.return_value = False - copy = Copy(dest="~/folder/", overwrite=False) - copy.run(**DEFAULT_ARGS) - mock_mkdir.assert_called_with(exist_ok=True, parents=True) - mock_exists.assert_called_with() - mock_trash.assert_not_called() - mock_copy.assert_called_with( - src=os.path.join(USER_DIR, "test.py"), - dst=os.path.join(USER_DIR, "folder", "test 2.py"), - ) - - -def test_already_exists_multiple( - mock_exists, mock_samefile, mock_copy, mock_trash, mock_mkdir -): - mock_exists.side_effect = [True, True, True, False] - mock_samefile.return_value = False - copy = Copy(dest="~/folder/", overwrite=False) - copy.run(**DEFAULT_ARGS) - mock_mkdir.assert_called_with(exist_ok=True, parents=True) - mock_exists.assert_called_with() - mock_trash.assert_not_called() - mock_copy.assert_called_with( - src=os.path.join(USER_DIR, "test.py"), - dst=os.path.join(USER_DIR, "folder", "test 4.py"), - ) - - -def test_already_exists_multiple_with_separator( - mock_exists, mock_samefile, mock_copy, mock_trash, mock_mkdir -): - args = { - "basedir": Path.home(), - "path": Path.home() / "test_2.py", - "simulate": False, +from copy import deepcopy +import fs +from conftest import make_files, read_files +from organize import core + +files = { + "files": { + "test.txt": "", + "file.txt": "Hello world\nAnother line", + "another.txt": "", + "folder": { + "x.txt": "", + }, } - mock_exists.side_effect = [True, True, True, False] - mock_samefile.return_value = False - copy = Copy(dest="~/folder/", overwrite=False, counter_separator="_") - copy.run(**args) - mock_mkdir.assert_called_with(exist_ok=True, parents=True) - mock_exists.assert_called_with() - mock_trash.assert_not_called() - mock_copy.assert_called_with( - src=os.path.join(USER_DIR, "test_2.py"), - dst=os.path.join(USER_DIR, "folder", "test_5.py"), - ) - - -def test_makedirs(mock_parent, mock_copy, mock_trash): - copy = Copy(dest="~/some/new/folder/", overwrite=False) - copy.run(**DEFAULT_ARGS) - mock_parent.mkdir.assert_called_with(parents=True, exist_ok=True) - mock_trash.assert_not_called() - mock_copy.assert_called_with( - src=os.path.join(USER_DIR, "test.py"), - dst=os.path.join(USER_DIR, "some", "new", "folder", "test.py"), - ) - - -def test_args(mock_exists, mock_samefile, mock_copy, mock_trash, mock_mkdir): - args = { - "basedir": Path.home(), - "path": Path.home() / "test.py", - "simulate": False, - "nr": {"upper": 1}, - } - mock_exists.return_value = False - mock_samefile.return_value = False - copy = Copy(dest="~/{nr.upper}-name.py", overwrite=False) - copy.run(**args) - mock_mkdir.assert_called_with(exist_ok=True, parents=True) - mock_exists.assert_called_with() - mock_trash.assert_not_called() - mock_copy.assert_called_with( - src=os.path.join(USER_DIR, "test.py"), dst=os.path.join(USER_DIR, "1-name.py") - ) +} -def test_path(mock_exists, mock_samefile, mock_copy, mock_trash, mock_mkdir): - mock_exists.return_value = False - mock_samefile.return_value = False - copy = Copy(dest="~/{path.stem}/{path.suffix}/{path.name}", overwrite=False) - copy.run(**DEFAULT_ARGS) - mock_mkdir.assert_called_with(exist_ok=True, parents=True) - mock_exists.assert_called_with() - mock_trash.assert_not_called() - mock_copy.assert_called_with( - src=os.path.join(USER_DIR, "test.py"), - dst=os.path.join(USER_DIR, "test", ".py", "test.py"), - ) +def test_copy_on_itself(): + with fs.open_fs("mem://") as mem: + config = { + "rules": [ + { + "locations": [ + {"path": "files", "filesystem": mem}, + ], + "actions": [ + {"copy": {"dest": "files/", "filesystem": mem}}, + ], + }, + ] + } + make_files(mem, files) + core.run(config, simulate=False) + result = read_files(mem) + assert result == files + + +def test_does_not_create_folder_in_simulation(): + with fs.open_fs("mem://") as mem: + config = { + "rules": [ + { + "locations": [ + {"path": "files", "filesystem": mem}, + ], + "actions": [ + {"copy": {"dest": "files/new-subfolder/", "filesystem": mem}}, + {"copy": {"dest": "files/copyhere/", "filesystem": mem}}, + ], + }, + ] + } + make_files(mem, files) + core.run(config, simulate=True) + result = read_files(mem) + assert result == files + + core.run(config, simulate=False, validate=False) + result = read_files(mem) + + expected = deepcopy(files) + expected["files"]["new-subfolder"] = deepcopy(files["files"]) + expected["files"]["new-subfolder"].pop("folder") + expected["files"]["copyhere"] = deepcopy(files["files"]) + expected["files"]["copyhere"].pop("folder") + + assert result == expected diff --git a/tests/actions/test_echo.py b/tests/actions/test_echo.py index a8533bb9..0751f3af 100644 --- a/tests/actions/test_echo.py +++ b/tests/actions/test_echo.py @@ -1,25 +1,24 @@ +from datetime import datetime from organize.actions import Echo -from pathlib import Path - from unittest.mock import patch def test_echo_basic(): echo = Echo("Hello World") with patch.object(echo, "print") as m: - echo.run(path=Path("~"), simulate=False) + echo.run(simulate=False) m.assert_called_with("Hello World") def test_echo_args(): - echo = Echo("This is the year {year}") + echo = Echo('Date formatting: {now.strftime("%Y-%m-%d")}') with patch.object(echo, "print") as m: - echo.run(path=Path("~"), simulate=False, year=2017) - m.assert_called_with("This is the year 2017") + echo.run(simulate=False, now=datetime(2019, 1, 5)) + m.assert_called_with("Date formatting: 2019-01-05") def test_echo_path(): - echo = Echo("{path.stem} for {year}") + echo = Echo("{year}") with patch.object(echo, "print") as m: - echo.run(simulate=False, path=Path("/this/isafile.txt"), year=2017) - m.assert_called_with("isafile for 2017") + echo.run(simulate=False, year=2017) + m.assert_called_with("2017") diff --git a/tests/actions/test_move.py b/tests/actions/test_move.py index 764a396e..0abe50ea 100644 --- a/tests/actions/test_move.py +++ b/tests/actions/test_move.py @@ -1,170 +1,43 @@ -import os - -from organize.actions import Move -from pathlib import Path -from organize.utils import DotDict - -USER_DIR = os.path.expanduser("~") - -ARGS = DotDict(basedir=Path.home(), path=Path.home() / "test.py", simulate=False) - - -def test_tilde_expansion(mock_exists, mock_samefile, mock_move, mock_trash, mock_mkdir): - mock_exists.return_value = False - mock_samefile.return_value = False - move = Move(dest="~/newname.py", overwrite=False) - updates = move.run(**ARGS) - mock_mkdir.assert_called_with(exist_ok=True, parents=True) - mock_exists.assert_called_with() - mock_trash.assert_not_called() - mock_move.assert_called_with( - src=os.path.join(USER_DIR, "test.py"), dst=os.path.join(USER_DIR, "newname.py") - ) - assert updates == {"path": Path("~/newname.py").expanduser()} - - -def test_into_folder(mock_exists, mock_samefile, mock_move, mock_trash, mock_mkdir): - mock_exists.return_value = False - mock_samefile.return_value = False - move = Move(dest="~/somefolder/", overwrite=False) - updates = move.run(**ARGS) - mock_mkdir.assert_called_with(exist_ok=True, parents=True) - mock_exists.assert_called_with() - mock_trash.assert_not_called() - mock_move.assert_called_with( - src=os.path.join(USER_DIR, "test.py"), - dst=os.path.join(USER_DIR, "somefolder", "test.py"), - ) - assert updates == {"path": Path(USER_DIR) / "somefolder" / "test.py"} - - -def test_overwrite(mock_exists, mock_samefile, mock_move, mock_trash, mock_mkdir): - mock_exists.return_value = True - mock_samefile.return_value = False - move = Move(dest="~/somefolder/", overwrite=True) - updates = move.run(**ARGS) - mock_mkdir.assert_called_with(exist_ok=True, parents=True) - mock_exists.assert_called_with() - mock_trash.assert_called_with(os.path.join(USER_DIR, "somefolder", "test.py")) - mock_move.assert_called_with( - src=os.path.join(USER_DIR, "test.py"), - dst=os.path.join(USER_DIR, "somefolder", "test.py"), - ) - assert updates is not None - - -def test_already_exists(mock_exists, mock_samefile, mock_move, mock_trash, mock_mkdir): - mock_exists.side_effect = [True, False] - mock_samefile.return_value = False - move = Move(dest="~/folder/", overwrite=False) - updates = move.run(**ARGS) - mock_mkdir.assert_called_with(exist_ok=True, parents=True) - mock_exists.assert_called_with() - mock_trash.assert_not_called() - mock_move.assert_called_with( - src=os.path.join(USER_DIR, "test.py"), - dst=os.path.join(USER_DIR, "folder", "test 2.py"), - ) - assert updates is not None - - -def test_already_exists_multiple( - mock_exists, mock_samefile, mock_move, mock_trash, mock_mkdir -): - mock_exists.side_effect = [True, True, True, False] - mock_samefile.return_value = False - move = Move(dest="~/folder/", overwrite=False) - updates = move.run(**ARGS) - mock_mkdir.assert_called_with(exist_ok=True, parents=True) - mock_exists.assert_called_with() - mock_trash.assert_not_called() - mock_move.assert_called_with( - src=os.path.join(USER_DIR, "test.py"), - dst=os.path.join(USER_DIR, "folder", "test 4.py"), - ) - assert updates is not None - - -def test_already_exists_multiple_separator( - mock_exists, mock_samefile, mock_move, mock_trash, mock_mkdir -): - mock_exists.side_effect = [True, True, True, False] - mock_samefile.return_value = False - move = Move(dest="~/folder/", overwrite=False, counter_separator="_") - updates = move.run(**ARGS) - mock_mkdir.assert_called_with(exist_ok=True, parents=True) - mock_exists.assert_called_with() - mock_trash.assert_not_called() - mock_move.assert_called_with( - src=os.path.join(USER_DIR, "test.py"), - dst=os.path.join(USER_DIR, "folder", "test_4.py"), - ) - assert updates is not None - - -def test_makedirs(mock_parent, mock_move, mock_trash): - move = Move(dest="~/some/new/folder/", overwrite=False) - updates = move.run(**ARGS) - mock_parent.mkdir.assert_called_with(parents=True, exist_ok=True) - mock_trash.assert_not_called() - mock_move.assert_called_with( - src=os.path.join(USER_DIR, "test.py"), - dst=os.path.join(USER_DIR, "some", "new", "folder", "test.py"), - ) - assert updates is not None - - -def test_args(mock_exists, mock_samefile, mock_move, mock_trash, mock_mkdir): - args = ARGS.merge({"nr": {"upper": 1}}) - mock_exists.return_value = False - mock_samefile.return_value = False - move = Move(dest="~/{nr.upper}-name.py", overwrite=False) - updates = move.run(**args) - mock_mkdir.assert_called_with(exist_ok=True, parents=True) - mock_exists.assert_called_with() - mock_trash.assert_not_called() - mock_move.assert_called_with( - src=os.path.join(USER_DIR, "test.py"), dst=os.path.join(USER_DIR, "1-name.py") - ) - assert updates is not None - - -def test_path(mock_exists, mock_samefile, mock_move, mock_trash, mock_mkdir): - mock_exists.return_value = False - mock_samefile.return_value = False - move = Move(dest="~/{path.stem}/{path.suffix}/{path.name}", overwrite=False) - updates = move.run(**ARGS) - mock_mkdir.assert_called_with(exist_ok=True, parents=True) - mock_exists.assert_called_with() - mock_trash.assert_not_called() - mock_move.assert_called_with( - src=os.path.join(USER_DIR, "test.py"), - dst=os.path.join(USER_DIR, "test", ".py", "test.py"), - ) - assert updates is not None - - -def test_keep_location(mock_exists, mock_samefile, mock_move, mock_trash, mock_mkdir): - mock_exists.return_value = True - mock_samefile.return_value = True - move = Move(dest="~/test.py") - updates = move.run(**ARGS) - mock_mkdir.assert_not_called() - mock_exists.assert_called_with() - mock_trash.assert_not_called() - mock_move.assert_not_called() - assert updates is not None - - -def test_dont_keep_case_sensitive( - mock_exists, mock_samefile, mock_move, mock_trash, mock_mkdir -): - mock_exists.return_value = True - mock_samefile.return_value = True - move = Move(dest="~/TEST.PY") - updates = move.run(**ARGS) - assert mock_mkdir.call_count > 0 - mock_exists.assert_called_with() - mock_trash.assert_not_called() - assert mock_move.call_count > 0 - assert updates is not None +import fs +from conftest import make_files, read_files +from organize import core + + +def test_move_on_itself(): + files = { + "files": { + "test.txt": "", + "file.txt": "Hello world\nAnother line", + "another.txt": "", + "folder": { + "x.txt": "", + }, + } + } + with fs.open_fs("mem://") as mem: + config = { + "rules": [ + { + "locations": [ + {"path": "files", "filesystem": mem}, + ], + "actions": [ + {"copy": {"dest": "files/", "filesystem": mem}}, + ], + }, + ] + } + make_files(mem, files) + core.run(config, simulate=False) + result = read_files(mem) + + assert result == { + "files": { + "test.txt": "", + "file.txt": "Hello world\nAnother line", + "another.txt": "", + "folder": { + "x.txt": "", + }, + } + } diff --git a/tests/actions/test_shell.py b/tests/actions/test_shell.py index 1063745b..51be3089 100644 --- a/tests/actions/test_shell.py +++ b/tests/actions/test_shell.py @@ -5,21 +5,15 @@ def test_shell_basic(): - with patch("subprocess.call") as m: - shell = Shell("echo 'Hello World'") - shell.run(path=Path.home(), simulate=False) - m.assert_called_with("echo 'Hello World'", shell=True) + shell = Shell("echo 'Hello World'") + result = shell.run(simulate=True) + assert not result + result = shell.run(simulate=False) + assert result["shell"] == {"output": "Hello World\n", "returncode": 0} -def test_shell_args(): - with patch("subprocess.call") as m: - shell = Shell("echo {year}") - shell.run(path=Path.home(), year=2017, simulate=False) - m.assert_called_with("echo 2017", shell=True) - -def test_shell_path(): - with patch("subprocess.call") as m: - shell = Shell("echo {path.stem} for {year}") - shell.run(path=Path("/") / "this" / "isafile.txt", year=2017, simulate=False) - m.assert_called_with("echo isafile for 2017", shell=True) +def test_shell_template_simulation(): + shell = Shell("echo '{msg}'", run_in_simulation=True) + result = shell.run(msg="Hello", simulate=True) + assert result["shell"] == {"output": "Hello\n", "returncode": 0} diff --git a/tests/actions/test_trash.py b/tests/actions/test_trash.py index 31dca3ba..258a1025 100644 --- a/tests/actions/test_trash.py +++ b/tests/actions/test_trash.py @@ -1,12 +1,10 @@ -import os +from unittest.mock import patch from organize.actions import Trash -from pathlib import Path -USER_DIR = os.path.expanduser("~") - -def test_trash(mock_trash): - trash = Trash() - trash.run(path=Path.home() / "this" / "file.tar.gz", simulate=False) - mock_trash.assert_called_with(os.path.join(USER_DIR, "this", "file.tar.gz")) +def test_trash(): + with patch("send2trash.send2trash") as mck: + trash = Trash() + trash.trash(path="~/Desktop/Test.zip", simulate=False) + mck.assert_called_with("~/Desktop/Test.zip") diff --git a/tests/conftest.py b/tests/conftest.py index 220b63a4..f23b4337 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,96 +1,81 @@ -import os -from typing import Iterable, Tuple, Union from unittest.mock import patch import pytest +from fs.base import FS +from fs.path import basename, join -from pathlib import Path -from organize.utils import DotDict - -TESTS_FOLDER = os.path.dirname(os.path.abspath(__file__)) - -TESTS_FOLDER = os.path.dirname(os.path.abspath(__file__)) - - -def create_filesystem(tmp_path, files, config): - # create files - for f in files: - try: - name, content = f - except Exception: - name = f - content = "" - p = tmp_path / "files" / Path(name) - p.parent.mkdir(parents=True, exist_ok=True) - with p.open("w") as ptr: - ptr.write(content) - # create config - with (tmp_path / "config.yaml").open("w") as f: - f.write(config) - # change working directory - os.chdir(str(tmp_path)) - - -def assertdir(path, *files): - os.chdir(str(path / "files")) - assert set(files) == set(str(x) for x in Path(".").glob("**/*") if x.is_file()) - - -@pytest.fixture -def mock_exists(): - with patch.object(Path, "exists") as mck: - yield mck - - -@pytest.fixture -def mock_samefile(): - with patch.object(Path, "samefile") as mck: - yield mck - - -@pytest.fixture -def mock_rename(): - with patch.object(Path, "rename") as mck: - yield mck - - -@pytest.fixture -def mock_move(): - with patch("shutil.move") as mck: - yield mck - - -@pytest.fixture -def mock_copy(): - with patch("shutil.copy2") as mck: - yield mck - - -@pytest.fixture -def mock_remove(): - with patch("os.remove") as mck: - yield mck - - -@pytest.fixture -def mock_trash(): - with patch("send2trash.send2trash") as mck: - yield mck - - -@pytest.fixture -def mock_parent(): - with patch.object(Path, "parent") as mck: - yield mck - - -@pytest.fixture -def mock_mkdir(): - with patch.object(Path, "mkdir") as mck: - yield mck +from organize import config @pytest.fixture def mock_echo(): with patch("organize.actions.Echo.print") as mck: yield mck + + +def make_files(fs: FS, layout: dict, path="/"): + """ + layout = { + "folder": { + "subfolder": { + "test.txt": "", + "other.pdf": b"binary", + }, + }, + "file.txt": "Hello world\nAnother line", + } + """ + fs.makedirs(path, recreate=True) + for k, v in layout.items(): + respath = join(path, k) + + # folders are dicts + if isinstance(v, dict): + make_files(fs=fs, layout=v, path=respath) + + # everything else is a file + elif v is None: + fs.touch(respath) + elif isinstance(v, bytes): + fs.writebytes(respath, v) + elif isinstance(v, str): + fs.writetext(respath, v) + else: + raise ValueError("Unknown file data %s" % v) + + +def read_files(fs: FS, path="/"): + result = dict() + for x in fs.walk.files(path, max_depth=0): + result[basename(x)] = fs.readtext(x) + for x in fs.walk.dirs(path, max_depth=0): + result[basename(x)] = read_files(fs, path=join(path, x)) + return result + + +def rules_shortcut(fs: FS, filters, actions, location="files", max_depth=0): + if isinstance(filters, str): + filters = config.load_from_string(filters) + if isinstance(actions, str): + actions = config.load_from_string(actions) + + # for action in actions: + # for opts in action.values(): + # if "filesystem" in opts and opts["filesystem"] == "mem": + # opts["filesystem"] = fs + + return { + "rules": [ + { + "locations": [ + { + "path": location, + "filesystem": fs, + "max_depth": max_depth, + } + ], + "actions": actions, + "filters": filters, + } + ] + } diff --git a/tests/core/test_config.py b/tests/core/test_config.py index 1a7e4420..b67cc52c 100644 --- a/tests/core/test_config.py +++ b/tests/core/test_config.py @@ -1,104 +1,53 @@ +from fs import open_fs import pytest -from organize.actions import Echo, Move, Shell, Trash, Rename -from organize.config import Config, Rule -from organize.filters import Extension, LastModified, FileContent, Filename +from organize import config, core +from schema import SchemaError + + +def validate_and_convert(string: str): + conf = config.load_from_string(string) + conf = config.cleanup(conf) + config.validate(conf) + core.replace_with_instances(conf) + return conf def test_basic(): - config = """ + STR = """ rules: - - folders: '~/Desktop' + - locations: '~/' filters: - extension: - jpg - png - extension: txt actions: - - move: {dest: '~/Desktop/New Folder', overwrite: true} - - echo: 'Moved {path}/{extension.upper}' - - folders: - - '~/test1' - - '/test2' - filters: + - move: + dest: '~/New Folder' + - echo: 'Moved {path}/{extension.upper()}' + - locations: + - path: '~/test1' + ignore_errors: true actions: - shell: cmd: 'say {path.stem}' """ - conf = Config.from_string(config) - assert conf.rules == [ - Rule( - folders=["~/Desktop"], - filters=[Extension(".JPG", "PNG"), Extension("txt")], - actions=[ - Move(dest="~/Desktop/New Folder", overwrite=True), - Echo(msg="Moved {path}/{extension.upper}"), - ], - subfolders=False, - system_files=False, - ), - Rule( - folders=["~/test1", "/test2"], - filters=[], - actions=[Shell(cmd="say {path.stem}")], - subfolders=False, - system_files=False, - ), - ] - - -def test_case_insensitive(): - config = """ - rules: - - folders: '~/Desktop' - filters: - - extension: ['JPg', 'png'] - - Extension: txt - actions: - - moVe: {dest: '~/Desktop/New Folder', overwrite: true} - - EC_HO: 'Moved {path}/{extension.upper}' - - folders: - - '~/test1' - - /test2 - filters: - actions: - - SHELL: - cmd: 'say {path.stem}' - """ - conf = Config.from_string(config) - assert conf.rules == [ - Rule( - folders=["~/Desktop"], - filters=[Extension(".JPG", "PNG"), Extension("txt")], - actions=[ - Move(dest="~/Desktop/New Folder", overwrite=True), - Echo(msg="Moved {path}/{extension.upper}"), - ], - subfolders=False, - system_files=False, - ), - Rule( - folders=["~/test1", "/test2"], - filters=[], - actions=[Shell(cmd="say {path.stem}")], - subfolders=False, - system_files=False, - ), - ] + validate_and_convert(STR) def test_yaml_ref(): - config = """ + STR = """ media: &media - wav - png all_folders: &all - - ~/Desktop - - ~/Documents + - "~" + - "/" rules: - - folders: *all + - locations: *all filters: - extension: *media - extension: @@ -107,238 +56,215 @@ def test_yaml_ref(): - lastmodified: days: 10 actions: - - echo: - msg: 'Hello World' - - folders: + - echo: 'Hello World' + - locations: - *all - - /more/more - filters: + - path: /more/more + ignore_errors: true actions: - trash """ - conf = Config.from_string(config) - assert conf.rules == [ - Rule( - folders=["~/Desktop", "~/Documents"], - filters=[ - Extension(".wav", ".PNG"), - Extension(".wav", ".PNG", "jpg"), - LastModified(days=10), - ], - actions=[Echo(msg="Hello World")], - subfolders=False, - system_files=False, - ), - Rule( - folders=["~/Desktop", "~/Documents", "/more/more"], - filters=[], - actions=[Trash()], - subfolders=False, - system_files=False, - ), - ] + validate_and_convert(STR) def test_error_filter_dict(): - conf = Config.from_string( - """ + STR = """ rules: - - folders: '/' + - locations: '/' filters: - Extension: 'jpg' + extension: 'jpg' actions: - trash """ - ) - with pytest.raises(Config.FiltersNoListError): - _ = conf.rules + with pytest.raises(SchemaError): + validate_and_convert(STR) -def test_error_action_dict(): - conf = Config.from_string( - """ - rules: - - folders: '/' - filters: - - extension: 'jpg' - actions: - Trash - """ - ) - with pytest.raises(Config.ActionsNoListError): - _ = conf.rules +# def test_error_action_dict(): +# conf = Config.from_string( +# """ +# rules: +# - folders: '/' +# filters: +# - extension: 'jpg' +# actions: +# Trash +# """ +# ) +# with pytest.raises(Config.ActionsNoListError): +# _ = conf.rules -def test_empty_filters(): - conf = """ - rules: - - folders: '/' - filters: - actions: - - trash - - folders: '~/' - actions: - - trash - """ - assert Config.from_string(conf).rules == [ - Rule( - folders=["/"], - filters=[], - actions=[Trash()], - subfolders=False, - system_files=False, - ), - Rule( - folders=["~/"], - filters=[], - actions=[Trash()], - subfolders=False, - system_files=False, - ), - ] +# def test_empty_filters(): +# conf = """ +# rules: +# - folders: '/' +# filters: +# actions: +# - trash +# - folders: '~/' +# actions: +# - trash +# """ +# assert Config.from_string(conf).rules == [ +# Rule( +# folders=["/"], +# filters=[], +# actions=[Trash()], +# subfolders=False, +# system_files=False, +# ), +# Rule( +# folders=["~/"], +# filters=[], +# actions=[Trash()], +# subfolders=False, +# system_files=False, +# ), +# ] -@pytest.mark.skip -def test_flatten_filters_and_actions(): - config = """ - folder_aliases: - Downloads: &downloads ~/Downloads/ - Payables_due: &payables_due ~/PayablesDue/ - Payables_paid: &payables_paid ~/Accounting/Expenses/ - Receivables_due: &receivables_due ~/Receivables/ - Receivables_paid: &receivables_paid ~/Accounting/Income/ +# @pytest.mark.skip +# def test_flatten_filters_and_actions(): +# config = """ +# folder_aliases: +# Downloads: &downloads ~/Downloads/ +# Payables_due: &payables_due ~/PayablesDue/ +# Payables_paid: &payables_paid ~/Accounting/Expenses/ +# Receivables_due: &receivables_due ~/Receivables/ +# Receivables_paid: &receivables_paid ~/Accounting/Income/ - defaults: - filters: &default_filters - - extension: pdf - - filecontent: '(?P...)' - actions: &default_actions - - echo: 'Dated: {filecontent.date}' - - echo: 'Stem of filename: {filecontent.stem}' - post_actions: &default_sorting - - rename: '{python.timestamp}-{filecontent.stem}.{extension.lower}' - - move: '{path.parent}/{python.quarter}/' +# defaults: +# filters: &default_filters +# - extension: pdf +# - filecontent: '(?P...)' +# actions: &default_actions +# - echo: 'Dated: {filecontent.date}' +# - echo: 'Stem of filename: {filecontent.stem}' +# post_actions: &default_sorting +# - rename: '{python.timestamp}-{filecontent.stem}.{extension.lower}' +# - move: '{path.parent}/{python.quarter}/' - rules: - - folders: *downloads - filters: - - *default_filters - - filecontent: 'Due Date' # regex to id as payable - - filecontent: '(?P...)' # regex to extract supplier - actions: - - *default_actions - - move: *payables_due - - *default_sorting +# rules: +# - folders: *downloads +# filters: +# - *default_filters +# - filecontent: 'Due Date' # regex to id as payable +# - filecontent: '(?P...)' # regex to extract supplier +# actions: +# - *default_actions +# - move: *payables_due +# - *default_sorting - - folders: *downloads - filters: - - *default_filters - - filecontent: 'Account: 000000000' # regex to id as receivables due - - filecontent: '(?P...)' # regex to extract customer - actions: - - *default_actions - - move: *receivables_due - - *default_sorting +# - folders: *downloads +# filters: +# - *default_filters +# - filecontent: 'Account: 000000000' # regex to id as receivables due +# - filecontent: '(?P...)' # regex to extract customer +# actions: +# - *default_actions +# - move: *receivables_due +# - *default_sorting - - folders: *downloads - filters: - - *default_filters - - filecontent: 'PAID' # regex to id as receivables paid - - filecontent: '(?P...)' # regex to extract customer - - filecontent: '(?P...)' # regex to extract date paid - - filename: - startswith: 2020 - actions: - - *default_actions - - move: *receivables_paid - - *default_sorting - - rename: '{filecontent.paid}_{filecontent.stem}.{extension}' - """ - conf = Config.from_string(config) - assert conf.rules == [ - Rule( - folders=["~/Downloads/"], - filters=[ - # default_filters - Extension("pdf"), - FileContent(expr="(?P...)"), - # added filters - FileContent(expr="Due Date"), - FileContent(expr="(?P...)"), - ], - actions=[ - # default_actions - Echo(msg="Dated: {filecontent.date}"), - Echo(msg="Stem of filename: {filecontent.stem}"), - # added actions - Move(dest="~/PayablesDue/", overwrite=False), - # default_sorting - Rename( - name="{python.timestamp}-{filecontent.stem}.{extension.lower}", - overwrite=False, - ), - Move(dest="{path.parent}/{python.quarter}/", overwrite=False), - ], - subfolders=False, - system_files=False, - ), - Rule( - folders=["~/Downloads/"], - filters=[ - # default_filters - Extension("pdf"), - FileContent(expr="(?P...)"), - # added filters - FileContent(expr="Account: 000000000"), - FileContent(expr="(?P...)"), - ], - actions=[ - # default_actions - Echo(msg="Dated: {filecontent.date}"), - Echo(msg="Stem of filename: {filecontent.stem}"), - # added actions - Move(dest="~/Receivables/", overwrite=False), - # default_sorting - Rename( - name="{python.timestamp}-{filecontent.stem}.{extension.lower}", - overwrite=False, - ), - Move(dest="{path.parent}/{python.quarter}/", overwrite=False), - ], - subfolders=False, - system_files=False, - ), - Rule( - folders=["~/Downloads/"], - filters=[ - # default_filters - Extension("pdf"), - FileContent(expr="(?P...)"), - # added filters - FileContent(expr="PAID"), - FileContent(expr="(?P...)"), - FileContent(expr="(?P...)"), - Filename(startswith="2020"), - ], - actions=[ - # default_actions - Echo(msg="Dated: {filecontent.date}"), - Echo(msg="Stem of filename: {filecontent.stem}"), - # added actions - Move(dest="~/Accounting/Income/", overwrite=False), - # default_sorting - Rename( - name="{python.timestamp}-{filecontent.stem}.{extension.lower}", - overwrite=False, - ), - Move(dest="{path.parent}/{python.quarter}/", overwrite=False), - # added actions - Rename( - name="{filecontent.paid}_{filecontent.stem}.{extension}", - overwrite=False, - ), - ], - subfolders=False, - system_files=False, - ), - ] +# - folders: *downloads +# filters: +# - *default_filters +# - filecontent: 'PAID' # regex to id as receivables paid +# - filecontent: '(?P...)' # regex to extract customer +# - filecontent: '(?P...)' # regex to extract date paid +# - filename: +# startswith: 2020 +# actions: +# - *default_actions +# - move: *receivables_paid +# - *default_sorting +# - rename: '{filecontent.paid}_{filecontent.stem}.{extension}' +# """ +# conf = Config.from_string(config) +# assert conf.rules == [ +# Rule( +# folders=["~/Downloads/"], +# filters=[ +# # default_filters +# Extension("pdf"), +# FileContent(expr="(?P...)"), +# # added filters +# FileContent(expr="Due Date"), +# FileContent(expr="(?P...)"), +# ], +# actions=[ +# # default_actions +# Echo(msg="Dated: {filecontent.date}"), +# Echo(msg="Stem of filename: {filecontent.stem}"), +# # added actions +# Move(dest="~/PayablesDue/", overwrite=False), +# # default_sorting +# Rename( +# name="{python.timestamp}-{filecontent.stem}.{extension.lower}", +# overwrite=False, +# ), +# Move(dest="{path.parent}/{python.quarter}/", overwrite=False), +# ], +# subfolders=False, +# system_files=False, +# ), +# Rule( +# folders=["~/Downloads/"], +# filters=[ +# # default_filters +# Extension("pdf"), +# FileContent(expr="(?P...)"), +# # added filters +# FileContent(expr="Account: 000000000"), +# FileContent(expr="(?P...)"), +# ], +# actions=[ +# # default_actions +# Echo(msg="Dated: {filecontent.date}"), +# Echo(msg="Stem of filename: {filecontent.stem}"), +# # added actions +# Move(dest="~/Receivables/", overwrite=False), +# # default_sorting +# Rename( +# name="{python.timestamp}-{filecontent.stem}.{extension.lower}", +# overwrite=False, +# ), +# Move(dest="{path.parent}/{python.quarter}/", overwrite=False), +# ], +# subfolders=False, +# system_files=False, +# ), +# Rule( +# folders=["~/Downloads/"], +# filters=[ +# # default_filters +# Extension("pdf"), +# FileContent(expr="(?P...)"), +# # added filters +# FileContent(expr="PAID"), +# FileContent(expr="(?P...)"), +# FileContent(expr="(?P...)"), +# Filename(startswith="2020"), +# ], +# actions=[ +# # default_actions +# Echo(msg="Dated: {filecontent.date}"), +# Echo(msg="Stem of filename: {filecontent.stem}"), +# # added actions +# Move(dest="~/Accounting/Income/", overwrite=False), +# # default_sorting +# Rename( +# name="{python.timestamp}-{filecontent.stem}.{extension.lower}", +# overwrite=False, +# ), +# Move(dest="{path.parent}/{python.quarter}/", overwrite=False), +# # added actions +# Rename( +# name="{filecontent.paid}_{filecontent.stem}.{extension}", +# overwrite=False, +# ), +# ], +# subfolders=False, +# system_files=False, +# ), +# ] diff --git a/tests/core/test_utils.py b/tests/core/test_utils.py deleted file mode 100644 index cf6ca059..00000000 --- a/tests/core/test_utils.py +++ /dev/null @@ -1,164 +0,0 @@ -from organize.utils import ( - DotDict, - Path, - dict_merge, - find_unused_filename, - increment_filename_version, - splitglob, -) - - -def test_splitglob(): - assert splitglob("~/Downloads") == (Path.home() / "Downloads", "") - assert splitglob(r"/Test/\* tmp\*/*[!H]/**/*.*") == ( - Path(r"/Test/\* tmp\*"), - "*[!H]/**/*.*", - ) - assert splitglob("~/Downloads/Program 0.1*.exe") == ( - Path.home() / "Downloads", - "Program 0.1*.exe", - ) - assert splitglob("~/Downloads/Program[ms].exe") == ( - Path.home() / "Downloads", - "Program[ms].exe", - ) - assert splitglob("~/Downloads/Program.exe") == ( - Path.home() / "Downloads" / "Program.exe", - "", - ) - # https://github.com/tfeldmann/organize/issues/40 - assert splitglob("~/Ältere/Erträgnisaufstellung_*.pdf") == ( - Path.home() / "Ältere", - "Erträgnisaufstellung_*.pdf", - ) - # https://github.com/tfeldmann/organize/issues/39 - assert splitglob("~/Downloads/*.pdf") == (Path.home() / "Downloads", "*.pdf") - - -def test_unused_filename_basic(mock_exists): - mock_exists.return_value = False - assert find_unused_filename(Path("somefile.jpg")) == Path("somefile 2.jpg") - - -def test_unused_filename_separator(mock_exists): - mock_exists.return_value = False - assert find_unused_filename(Path("somefile.jpg"), separator="_") == Path( - "somefile_2.jpg" - ) - - -def test_unused_filename_multiple(mock_exists): - mock_exists.side_effect = [True, True, False] - assert find_unused_filename(Path("somefile.jpg")) == Path("somefile 4.jpg") - - -def test_unused_filename_increase(mock_exists): - mock_exists.side_effect = [True, False] - assert find_unused_filename(Path("somefile 7.jpg")) == Path("somefile 9.jpg") - - -def test_unused_filename_increase_digit(mock_exists): - mock_exists.side_effect = [True, False] - assert find_unused_filename(Path("7.gif")) == Path("7 3.gif") - - -def test_increment_filename_version(): - assert ( - increment_filename_version(Path.home() / "f3" / "test_123.7z") - == Path.home() / "f3" / "test_123 2.7z" - ) - assert ( - increment_filename_version(Path.home() / "f3" / "test_123_2 10.7z") - == Path.home() / "f3" / "test_123_2 11.7z" - ) - - -def test_increment_filename_version_separator(): - assert increment_filename_version(Path("test_123.7z"), separator="_") == Path( - "test_124.7z" - ) - assert increment_filename_version(Path("test_123_2.7z"), separator="_") == Path( - "test_123_3.7z" - ) - - -def test_increment_filename_version_no_separator(): - assert increment_filename_version(Path("test.7z"), separator="") == Path("test2.7z") - assert increment_filename_version(Path("test 10.7z"), separator="") == Path( - "test 102.7z" - ) - - -def test_merges_dicts(): - a = {"a": 1, "b": {"b1": 2, "b2": 3}} - b = {"a": 1, "b": {"b1": 4}} - - assert dict_merge(a, b)["a"] == 1 - assert dict_merge(a, b)["b"]["b2"] == 3 - assert dict_merge(a, b)["b"]["b1"] == 4 - - -def test_returns_copy(): - a = {"regex": {"first": "A", "second": "B"}} - b = {"regex": {"third": "C"}} - - x = dict_merge(a, b) - a["regex"]["first"] = "X" - assert x["regex"]["first"] == "A" - assert x["regex"]["second"] == "B" - assert x["regex"]["third"] == "C" - - -def test_inserts_new_keys(): - """Will it insert new keys by default?""" - a = {"a": 1, "b": {"b1": 2, "b2": 3}} - b = {"a": 1, "b": {"b1": 4, "b3": 5}, "c": 6} - - assert dict_merge(a, b)["a"] == 1 - assert dict_merge(a, b)["b"]["b2"] == 3 - assert dict_merge(a, b)["b"]["b1"] == 4 - assert dict_merge(a, b)["b"]["b3"] == 5 - assert dict_merge(a, b)["c"] == 6 - - -def test_does_not_insert_new_keys(): - """Will it avoid inserting new keys when required?""" - a = {"a": 1, "b": {"b1": 2, "b2": 3}} - b = {"a": 1, "b": {"b1": 4, "b3": 5}, "c": 6} - - assert dict_merge(a, b, add_keys=False)["a"] == 1 - assert dict_merge(a, b, add_keys=False)["b"]["b2"] == 3 - assert dict_merge(a, b, add_keys=False)["b"]["b1"] == 4 - try: - assert dict_merge(a, b, add_keys=False)["b"]["b3"] == 5 - except KeyError: - pass - else: - raise Exception("New keys added when they should not be") - - try: - assert dict_merge(a, b, add_keys=False)["b"]["b3"] == 6 - except KeyError: - pass - else: - raise Exception("New keys added when they should not be") - - -def test_dotdict_merge(): - a = DotDict() - b = {1: {2: 2, 3: 3, 4: {5: "fin."}}} - a.update(b) - assert a == b - b[1][2] = 5 - assert a != b - - a.update({1: {4: {5: "new.", 6: "fin."}, 2: "x"}}) - assert a == {1: {2: "x", 3: 3, 4: {5: "new.", 6: "fin."}}} - - -def test_dotdict_keeptype(): - a = DotDict() - a.update({"nr": {"upper": 1}}) - assert a.nr.upper == 1 - - assert "{nr.upper}".format(**a) == "1" diff --git a/tests/docs/__init__.py b/tests/docs/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/docs/test_docs.py b/tests/docs/test_docs.py new file mode 100644 index 00000000..38bc441f --- /dev/null +++ b/tests/docs/test_docs.py @@ -0,0 +1,49 @@ +""" +Tests all snippets in the docs and readme like this: + +```yaml +rules: +``` + +To exclude, use shorthand `yml`. +""" + +import re + +import fs +from schema import SchemaError + +from organize.actions import ACTIONS +from organize.config import CONFIG_SCHEMA, load_from_string +from organize.filters import FILTERS + +RE_CONFIG = re.compile(r"```yaml\n(?Prules:(?:.*?\n)+?)```", re.MULTILINE) + + +def test_examples_are_valid(): + docdir = fs.open_fs(".") + for f in docdir.walk.files(filter=["*.md"], max_depth=2): + text = docdir.readtext(f) + for match in RE_CONFIG.findall(text): + err = "" + try: + config = load_from_string(match) + CONFIG_SCHEMA.validate(config) + except SchemaError as e: + print(f"{f}:\n({match})") + err = e.autos[-1] + assert not err + + +def test_all_filters_documented(): + docdir = fs.open_fs("docs") + filter_docs = docdir.readtext("filters.md") + for name in FILTERS.keys(): + assert "## {}".format(name) in filter_docs + + +def test_all_actions_documented(): + docdir = fs.open_fs("docs") + action_docs = docdir.readtext("actions.md") + for name in ACTIONS.keys(): + assert "## {}".format(name) in action_docs diff --git a/tests/filters/test_created.py b/tests/filters/test_created.py index f49726ad..d3a3fc8b 100644 --- a/tests/filters/test_created.py +++ b/tests/filters/test_created.py @@ -1,26 +1,17 @@ -from unittest.mock import patch +from datetime import datetime, timedelta -import pendulum - -from pathlib import Path from organize.filters import Created def test_min(): - now = pendulum.now() - created = Created(days=10, hours=12, mode="older") - with patch.object(created, "_created") as mock_cr: - mock_cr.return_value = now - pendulum.duration(days=10, hours=0) - assert created.run(path=Path("~")) is None - mock_cr.return_value = now - pendulum.duration(days=10, hours=13) - assert created.run(path=Path("~")) + now = datetime.now() + ct = Created(days=10, hours=12, mode="older") + assert not ct.matches_created_time(now - timedelta(days=10, hours=0)) + assert ct.matches_created_time(now - timedelta(days=10, hours=13)) def test_max(): - now = pendulum.now() - created = Created(days=10, hours=12, mode="newer") - with patch.object(created, "_created") as mock_cr: - mock_cr.return_value = now - pendulum.duration(days=10, hours=0) - assert created.run(path=Path("~")) - mock_cr.return_value = now - pendulum.duration(days=10, hours=13) - assert created.run(path=Path("~")) is None + now = datetime.now() + ct = Created(days=10, hours=12, mode="newer") + assert ct.matches_created_time(now - timedelta(days=10, hours=0)) + assert not ct.matches_created_time(now - timedelta(days=10, hours=13)) diff --git a/tests/filters/test_extension.py b/tests/filters/test_extension.py index 516b57dc..1f468fb4 100644 --- a/tests/filters/test_extension.py +++ b/tests/filters/test_extension.py @@ -1,37 +1,47 @@ -from pathlib import Path +from fs import open_fs +from fs.path import dirname from organize.filters import Extension def test_extension(): extension = Extension("JPG", ".gif", "pdf") testpathes = [ - (Path("~/somefile.pdf"), True), - (Path("/home/test/somefile.pdf.jpeg"), False), - (Path("/home/test/gif.TXT"), False), - (Path("/home/test/txt.GIF"), True), - (Path("~/somefile.pdf"), True), + ("/somefile.pdf", True), + ("/home/test/somefile.pdf.jpeg", False), + ("/home/test/gif.TXT", False), + ("/home/test/txt.GIF", True), + ("/somefile.pdf", True), ] - for path, match in testpathes: - assert bool(extension.matches(path)) == match + with open_fs("mem://", writeable=True, create=True) as mem: + for f, match in testpathes: + mem.makedirs(dirname(f), recreate=True) + mem.touch(f) + assert extension.run(fs=mem, fs_path=f).matches == match def test_extension_empty(): - extension = Extension() - assert extension.matches(Path("~/test.txt")) + with open_fs("mem://") as mem: + mem.touch("test.txt") + extension = Extension() + assert extension.run(fs=mem, fs_path="test.txt").matches def test_extension_result(): - path = Path("~/somefile.TxT") - extension = Extension("txt") - assert extension.matches(path) - result = extension.run(path=path)["extension"] - assert str(result) == "TxT" - assert result.lower == "txt" - assert result.upper == "TXT" - - extension = Extension(".txt") - assert extension.matches(path) - result = extension.run(path=path)["extension"] - assert str(result) == "TxT" - assert result.lower == "txt" - assert result.upper == "TXT" + with open_fs("mem://") as mem: + + path = "somefile.TxT" + mem.touch(path) + + extension = Extension("txt") + assert extension.matches(".TxT") + result = extension.run(fs=mem, fs_path=path).updates["extension"] + assert str(result) == "TxT" + assert result.lower() == "txt" + assert result.upper() == "TXT" + + extension = Extension(".txt") + assert extension.matches(".TXT") + result = extension.run(fs=mem, fs_path=path).updates["extension"] + assert str(result) == "TxT" + assert result.lower() == "txt" + assert result.upper() == "TXT" diff --git a/tests/filters/test_filename.py b/tests/filters/test_filename.py deleted file mode 100644 index 43efcc64..00000000 --- a/tests/filters/test_filename.py +++ /dev/null @@ -1,102 +0,0 @@ -from pathlib import Path -from organize.filters import Filename - - -def test_filename_startswith(): - filename = Filename(startswith="begin") - assert filename.matches(Path("~/here/beginhere.pdf")) - assert not filename.matches(Path("~/here/.beginhere.pdf")) - assert not filename.matches(Path("~/here/herebegin.begin")) - - -def test_filename_contains(): - filename = Filename(contains="begin") - assert filename.matches(Path("~/here/beginhere.pdf")) - assert filename.matches(Path("~/here/.beginhere.pdf")) - assert filename.matches(Path("~/here/herebegin.begin")) - assert not filename.matches(Path("~/here/other.begin")) - - -def test_filename_endswith(): - filename = Filename(endswith="end") - assert filename.matches(Path("~/here/hereend.pdf")) - assert not filename.matches(Path("~/here/end.tar.gz")) - assert not filename.matches(Path("~/here/theendishere.txt")) - - -def test_filename_multiple(): - filename = Filename(startswith="begin", contains="con", endswith="end") - assert filename.matches(Path("~/here/begin_somethgin_con_end.pdf")) - assert not filename.matches(Path("~/here/beginend.pdf")) - assert not filename.matches(Path("~/here/begincon.begin")) - assert not filename.matches(Path("~/here/conend.begin")) - assert filename.matches(Path("~/here/beginconend.begin")) - - -def test_filename_case(): - filename = Filename( - startswith="star", contains="con", endswith="end", case_sensitive=False - ) - assert filename.matches(Path("~/STAR_conEnD.dpf")) - assert not filename.matches(Path("~/here/STAREND.pdf")) - assert not filename.matches(Path("~/here/STARCON.begin")) - assert not filename.matches(Path("~/here/CONEND.begin")) - assert filename.matches(Path("~/here/STARCONEND.begin")) - - -def test_filename_list(): - filename = Filename( - startswith="_", - contains=["1", "A", "3", "6"], - endswith=["5", "6"], - case_sensitive=False, - ) - assert filename.matches(Path("~/_15.dpf")) - assert filename.matches(Path("~/_A5.dpf")) - assert filename.matches(Path("~/_A6.dpf")) - assert filename.matches(Path("~/_a6.dpf")) - assert filename.matches(Path("~/_35.dpf")) - assert filename.matches(Path("~/_36.dpf")) - assert filename.matches(Path("~/_somethinga56")) - assert filename.matches(Path("~/_6")) - assert not filename.matches(Path("~/")) - assert not filename.matches(Path("~/a_5")) - - -def test_filename_list_case_sensitive(): - filename = Filename( - startswith="_", - contains=["1", "A", "3", "7"], - endswith=["5", "6"], - case_sensitive=True, - ) - assert filename.matches(Path("~/_15.dpf")) - assert filename.matches(Path("~/_A5.dpf")) - assert filename.matches(Path("~/_A6.dpf")) - assert not filename.matches(Path("~/_a6.dpf")) - assert filename.matches(Path("~/_35.dpf")) - assert filename.matches(Path("~/_36.dpf")) - assert filename.matches(Path("~/_somethingA56")) - assert not filename.matches(Path("~/_6")) - assert not filename.matches(Path("~/_a5.dpf")) - assert not filename.matches(Path("~/-A5.dpf")) - assert not filename.matches(Path("~/")) - assert not filename.matches(Path("~/_a5")) - - -def test_filename_match(): - fn = Filename("Invoice_*_{year:int}_{month}_{day}") - p = "~/Documents/Invoice_RE1001_2021_01_31.pdf" - assert fn.matches(Path(p)) - assert fn.run(path=Path(p)) == { - "filename": {"year": 2021, "month": "01", "day": "31"} - } - - -def test_filename_match_case_insensitive(): - case = Filename("upper_{m1}_{m2}", case_sensitive=True) - icase = Filename("upper_{m1}_{m2}", case_sensitive=False) - p = "~/Documents/UPPER_MiXed_lower.pdf" - assert icase.matches(Path(p)) - assert icase.run(path=Path(p)) == {"filename": {"m1": "MiXed", "m2": "lower"}} - assert not case.matches(Path(p)) diff --git a/tests/filters/test_filesize.py b/tests/filters/test_filesize.py deleted file mode 100644 index fa03a895..00000000 --- a/tests/filters/test_filesize.py +++ /dev/null @@ -1,28 +0,0 @@ -from organize.filters import FileSize - - -def test_constrains_mope1(): - assert not FileSize("<1b,>2b").matches(1) - assert FileSize(">=1b,<2b").matches(1) - assert not FileSize(">1.000001b").matches(1) - assert FileSize("<1.000001B").matches(1) - assert FileSize("<1.000001").matches(1) - assert FileSize("<=1,>=0.001kb").matches(1) - assert FileSize("<1").matches(0) - assert not FileSize(">1").matches(0) - assert not FileSize("<1,>1b").matches(0) - assert FileSize(">99.99999GB").matches(100000000000) - assert FileSize("0").matches(0) - - -def test_constrains_base(): - assert FileSize(">1kb,<1kib").matches(1010) - assert FileSize(">1k,<1ki").matches(1010) - assert FileSize("1k").matches(1000) - assert FileSize("1000").matches(1000) - - -def test_other(): - assert FileSize("<100 Mb").matches(20) - assert FileSize("<100 Mb, <10 mb, <1 mb, > 0").matches(20) - assert FileSize(["<100 Mb", ">= 0 Tb"]).matches(20) diff --git a/tests/filters/test_last_modified.py b/tests/filters/test_last_modified.py deleted file mode 100644 index 7e8f105f..00000000 --- a/tests/filters/test_last_modified.py +++ /dev/null @@ -1,26 +0,0 @@ -from unittest.mock import patch - -import pendulum - -from pathlib import Path -from organize.filters import LastModified - - -def test_min(): - now = pendulum.now() - last_modified = LastModified(days=10, hours=12, mode="older") - with patch.object(last_modified, "_last_modified") as mock_lm: - mock_lm.return_value = now - pendulum.duration(days=10, hours=0) - assert not last_modified.run(path=Path("~")) - mock_lm.return_value = now - pendulum.duration(days=10, hours=13) - assert last_modified.run(path=Path("~")) - - -def test_max(): - now = pendulum.now() - last_modified = LastModified(days=10, hours=12, mode="newer") - with patch.object(last_modified, "_last_modified") as mock_lm: - mock_lm.return_value = now - pendulum.duration(days=10, hours=0) - assert last_modified.run(path=Path("~")) - mock_lm.return_value = now - pendulum.duration(days=10, hours=13) - assert not last_modified.run(path=Path("~")) diff --git a/tests/filters/test_lastmodified.py b/tests/filters/test_lastmodified.py new file mode 100644 index 00000000..67cb4499 --- /dev/null +++ b/tests/filters/test_lastmodified.py @@ -0,0 +1,17 @@ +from datetime import datetime, timedelta + +from organize.filters import LastModified + + +def test_min(): + now = datetime.now() + lm = LastModified(days=10, hours=12, mode="older") + assert not lm.matches_lastmodified_time(now - timedelta(days=10, hours=0)) + assert lm.matches_lastmodified_time(now - timedelta(days=10, hours=13)) + + +def test_max(): + now = datetime.now() + lm = LastModified(days=10, hours=12, mode="newer") + assert lm.matches_lastmodified_time(now - timedelta(days=10, hours=0)) + assert not lm.matches_lastmodified_time(now - timedelta(days=10, hours=13)) diff --git a/tests/filters/test_name.py b/tests/filters/test_name.py new file mode 100644 index 00000000..7c4c8253 --- /dev/null +++ b/tests/filters/test_name.py @@ -0,0 +1,108 @@ +import fs +from organize.filters import Name + + +def test_name_startswith(): + name = Name(startswith="begin") + assert name.matches("beginhere") + assert not name.matches(".beginhere") + assert not name.matches("herebegin") + + +def test_name_contains(): + name = Name(contains="begin") + assert name.matches("beginhere") + assert name.matches(".beginhere") + assert name.matches("herebegin") + assert not name.matches("other") + + +def test_name_endswith(): + name = Name(endswith="end") + assert name.matches("hereend") + assert name.matches("end") + assert not name.matches("theendishere") + + +def test_name_multiple(): + name = Name(startswith="begin", contains="con", endswith="end") + assert name.matches("begin_somethgin_con_end") + assert not name.matches("beginend") + assert not name.matches("begincon") + assert not name.matches("conend") + assert name.matches("beginconend") + + +def test_name_case(): + name = Name(startswith="star", contains="con", endswith="end", case_sensitive=False) + assert name.matches("STAR_conEnD") + assert not name.matches("STAREND") + assert not name.matches("STARCON") + assert not name.matches("CONEND") + assert name.matches("STARCONEND") + + +def test_name_list(): + name = Name( + startswith="_", + contains=["1", "A", "3", "6"], + endswith=["5", "6"], + case_sensitive=False, + ) + assert name.matches("_15") + assert name.matches("_A5") + assert name.matches("_A6") + assert name.matches("_a6") + assert name.matches("_35") + assert name.matches("_36") + assert name.matches("_somethinga56") + assert name.matches("_6") + assert not name.matches("") + assert not name.matches("a_5") + + +def test_name_list_case_sensitive(): + name = Name( + startswith="_", + contains=["1", "A", "3", "7"], + endswith=["5", "6"], + case_sensitive=True, + ) + assert name.matches("_15") + assert name.matches("_A5") + assert name.matches("_A6") + assert not name.matches("_a6") + assert name.matches("_35") + assert name.matches("_36") + assert name.matches("_somethingA56") + assert not name.matches("_6") + assert not name.matches("_a5") + assert not name.matches("-A5") + assert not name.matches("") + assert not name.matches("_a5") + + +def test_name_match(): + with fs.open_fs("mem://") as mem: + p = "Invoice_RE1001_2021_01_31" + fs_path = p + ".txt" + mem.touch(fs_path) + fn = Name("Invoice_*_{year:int}_{month}_{day}") + assert fn.matches(p) + result = fn.run(fs=mem, fs_path=fs_path) + assert result.matches + assert result.updates == {"name": {"year": 2021, "month": "01", "day": "31"}} + + +def test_name_match_case_insensitive(): + with fs.open_fs("mem://") as mem: + p = "UPPER_MiXed_lower" + fs_path = p + ".txt" + mem.touch(fs_path) + case = Name("upper_{m1}_{m2}", case_sensitive=True) + icase = Name("upper_{m1}_{m2}", case_sensitive=False) + assert icase.matches(p) + result = icase.run(fs=mem, fs_path=fs_path) + assert result.matches + assert result.updates == {"name": {"m1": "MiXed", "m2": "lower"}} + assert not case.matches(p) diff --git a/tests/filters/test_python.py b/tests/filters/test_python.py index a408ae8b..dfe68b3a 100644 --- a/tests/filters/test_python.py +++ b/tests/filters/test_python.py @@ -1,13 +1,12 @@ from organize.filters import Python -from pathlib import Path def test_basic(): p = Python( """ - print(path) - return 1 + return "some-string" """ ) - assert p.run(path=Path.home()) - assert p.run(path=Path.home()) == {"python": 1} + result = p.run() + assert result.matches + assert result.updates == {"python": "some-string"} diff --git a/tests/filters/test_regex.py b/tests/filters/test_regex.py index e3c02957..bd98db27 100644 --- a/tests/filters/test_regex.py +++ b/tests/filters/test_regex.py @@ -1,21 +1,20 @@ from pathlib import Path from organize.filters import Regex -from organize.utils import DotDict TESTDATA = [ - (Path("~/Invoices/RG123456123456-sig.pdf"), True, "123456123456"), - (Path("~/Invoices/RG002312321542-sig.pdf"), True, "002312321542"), - (Path("~/Invoices/RG002312321542.pdf"), False, None), + ("RG123456123456-sig.pdf", True, "123456123456"), + ("RG002312321542-sig.pdf", True, "002312321542"), + ("RG002312321542.pdf", False, None), ] def test_regex_backslash(): regex = Regex(r"^\.pdf$") - assert regex.matches(Path(".pdf")) - assert not regex.matches(Path("+pdf")) - assert not regex.matches(Path("/pdf")) - assert not regex.matches(Path("\\pdf")) + assert regex.matches(".pdf") + assert not regex.matches("+pdf") + assert not regex.matches("/pdf") + assert not regex.matches("\\pdf") def test_regex_basic(): @@ -26,15 +25,17 @@ def test_regex_basic(): def test_regex_return(): regex = Regex(r"^RG(?P\d{12})-sig\.pdf$") - for path, valid, result in TESTDATA: + for path, valid, test_result in TESTDATA: if valid: - dct = regex.run(path=path) - assert dct == {"regex": {"the_number": result}} + result = regex.run(fs_path=path) + assert result.updates == {"regex": {"the_number": test_result}} + assert result.matches == True def test_regex_umlaut(): regex = Regex(r"^Erträgnisaufstellung-(?P\d*)\.pdf") - doc = Path("~/Documents/Erträgnisaufstellung-1998.pdf") + doc = "Erträgnisaufstellung-1998.pdf" assert regex.matches(doc) - dct = regex.run(path=doc) - assert dct == {"regex": {"year": "1998"}} + result = regex.run(fs_path=doc) + assert result.updates == {"regex": {"year": "1998"}} + assert result.matches diff --git a/tests/filters/test_size.py b/tests/filters/test_size.py new file mode 100644 index 00000000..1bbd41eb --- /dev/null +++ b/tests/filters/test_size.py @@ -0,0 +1,28 @@ +from organize.filters import Size + + +def test_constrains_mope1(): + assert not Size("<1b,>2b").matches(1) + assert Size(">=1b,<2b").matches(1) + assert not Size(">1.000001b").matches(1) + assert Size("<1.000001B").matches(1) + assert Size("<1.000001").matches(1) + assert Size("<=1,>=0.001kb").matches(1) + assert Size("<1").matches(0) + assert not Size(">1").matches(0) + assert not Size("<1,>1b").matches(0) + assert Size(">99.99999GB").matches(100000000000) + assert Size("0").matches(0) + + +def test_constrains_base(): + assert Size(">1kb,<1kib").matches(1010) + assert Size(">1k,<1ki").matches(1010) + assert Size("1k").matches(1000) + assert Size("1000").matches(1000) + + +def test_other(): + assert Size("<100 Mb").matches(20) + assert Size("<100 Mb, <10 mb, <1 mb, > 0").matches(20) + assert Size(["<100 Mb", ">= 0 Tb"]).matches(20) diff --git a/tests/integration/test_codepost_usecase.py b/tests/integration/test_codepost_usecase.py index 4aa40f0b..e9f09eba 100644 --- a/tests/integration/test_codepost_usecase.py +++ b/tests/integration/test_codepost_usecase.py @@ -1,21 +1,25 @@ -from conftest import assertdir, create_filesystem -from organize.cli import main +import fs +from conftest import make_files, read_files +from organize import config, core -def test_codepost_usecase(tmp_path): - create_filesystem( - tmp_path, - files=[ - "Devonte-Betts.txt", - "Alaina-Cornish.txt", - "Dimitri-Bean.txt", - "Lowri-Frey.txt", - "Someunknown-User.txt", - ], - config=r""" - rules: - - folders: files - filters: + +def test_codepost_usecase(): + files = { + "files": { + "Devonte-Betts.txt": "", + "Alaina-Cornish.txt": "", + "Dimitri-Bean.txt": "", + "Lowri-Frey.txt": "", + "Someunknown-User.txt": "", + } + } + + with fs.open_fs("temp://") as mem: + make_files(mem, files) + + filters = config.load_from_string( + """ - extension: txt - regex: (?P\w+)-(?P\w+)\..* - python: | @@ -25,18 +29,33 @@ def test_codepost_usecase(tmp_path): "Bean": "dbean@aol.com", "Frey": "l-frey@frey.org", } - if regex.lastname in emails: - return {"mail": emails[regex.lastname]} - actions: - - rename: '{python.mail}.txt' - """, - ) - main(["run", "--config-file=%s" % (tmp_path / "config.yaml")]) - assertdir( - tmp_path, - "dbetts@mail.de.txt", - "acornish@google.com.txt", - "dbean@aol.com.txt", - "l-frey@frey.org.txt", - "Someunknown-User.txt", # no email found -> keep file - ) + if regex["lastname"] in emails: + return {"mail": emails[regex["lastname"]]} + """ + ) + conf = { + "rules": [ + { + "locations": [ + {"path": "files", "filesystem": mem}, + ], + "filters": filters, + "actions": [ + {"move": {"dest": "files/{python.mail}.txt", "filesystem": mem}} + ], + }, + ] + } + core.run(conf, simulate=False) + result = read_files(mem) + mem.tree() + + assert result == { + "files": { + "dbetts@mail.de.txt": "", + "acornish@google.com.txt": "", + "dbean@aol.com.txt": "", + "l-frey@frey.org.txt": "", + "Someunknown-User.txt": "", + } + } diff --git a/tests/integration/test_delete.py b/tests/integration/test_delete.py new file mode 100644 index 00000000..c2d15707 --- /dev/null +++ b/tests/integration/test_delete.py @@ -0,0 +1,44 @@ +import fs +from conftest import make_files, read_files +from organize import core + + +def test_delete(): + files = { + "files": { + "folder": { + "subfolder": { + "test.txt": "", + "other.pdf": "binary", + }, + "file.txt": "Hello world\nAnother line", + }, + } + } + with fs.open_fs("mem://") as mem: + config = { + "rules": [ + { + "locations": [{"path": "files", "filesystem": mem}], + "actions": ["delete"], + }, + { + "locations": [{"path": "files", "filesystem": mem}], + "targets": "dirs", + "actions": ["delete"], + }, + ] + } + make_files(mem, files) + + # simulate + core.run(config, simulate=True) + result = read_files(mem) + assert result == files + + # run + core.run(config, simulate=False, validate=False) + result = read_files(mem) + assert result == { + "files": {}, + } diff --git a/tests/integration/test_dict_merge.py b/tests/integration/test_dict_merge.py index 839e8043..dbe4f3e3 100644 --- a/tests/integration/test_dict_merge.py +++ b/tests/integration/test_dict_merge.py @@ -1,26 +1,32 @@ from unittest.mock import call -from conftest import create_filesystem -from organize.cli import main +import fs +from conftest import make_files, rules_shortcut +from organize import core -def test_multiple_regex_placeholders(tmp_path, mock_echo): - create_filesystem( - tmp_path, - files=["test-123.jpg", "other-456.pdf"], - config=r""" - rules: - - folders: files - filters: - - regex: (?P\w+)-(?P\d+).* - - regex: (?P.+?)\.\w{3} - - extension - actions: - - echo: '{regex.word} {regex.number} {regex.all} {extension}' - """, - ) - main(["run", "--config-file=%s" % (tmp_path / "config.yaml")]) - mock_echo.assert_has_calls( - (call("test 123 test-123 jpg"), call("other 456 other-456 pdf"),), - any_order=True, - ) +def test_multiple_regex_placeholders(mock_echo): + files = { + "files": {"test-123.jpg": "", "other-456.pdf": ""}, + } + with fs.open_fs("mem://") as mem: + rules = rules_shortcut( + fs=mem, + filters=r""" + - regex: (?P\w+)-(?P\d+).* + - regex: (?P.+?)\.\w{3} + - extension + """, + actions=""" + - echo: '{regex.word} {regex.number} {regex.all} {extension}' + """, + ) + make_files(mem, files) + core.run(rules, simulate=False, validate=False) + mock_echo.assert_has_calls( + ( + call("test 123 test-123 jpg"), + call("other 456 other-456 pdf"), + ), + any_order=True, + ) diff --git a/tests/integration/test_duplicate.py b/tests/integration/test_duplicate.py index a0633c57..174e59e1 100644 --- a/tests/integration/test_duplicate.py +++ b/tests/integration/test_duplicate.py @@ -1,60 +1,75 @@ -from conftest import create_filesystem, assertdir -from organize.cli import main +import fs +from conftest import make_files, rules_shortcut, read_files +from organize import core CONTENT_SMALL = "COPY CONTENT" CONTENT_LARGE = "XYZ" * 3000 -def test_duplicate_smallfiles(tmp_path): - create_filesystem( - tmp_path, - files=[ - ("unique.txt", "I'm unique."), - ("unique_too.txt", "I'm unique, too."), - ("a.txt", CONTENT_SMALL), - ("copy2.txt", CONTENT_SMALL), - ("other/copy.txt", CONTENT_SMALL), - ("other/copy.jpg", CONTENT_SMALL), - ("large_unique.txt", CONTENT_LARGE), - ("other/large.txt", CONTENT_LARGE), - ], - config=""" - rules: - - folders: files - subfolders: true - filters: - - duplicate - actions: - - trash - """, - ) - main(["run", "--config-file=%s" % (tmp_path / "config.yaml")]) - assertdir( - tmp_path, "unique.txt", "unique_too.txt", "a.txt", "large_unique.txt" - ) - - -def test_duplicate_largefiles(tmp_path): - create_filesystem( - tmp_path, - files=[ - ("unique.txt", CONTENT_LARGE + "1"), - ("unique_too.txt", CONTENT_LARGE + "2"), - ("a.txt", CONTENT_LARGE), - ("copy2.txt", CONTENT_LARGE), - ("other/copy.txt", CONTENT_LARGE), - ("other/copy.jpg", CONTENT_LARGE), - ("other/large.txt", CONTENT_LARGE), - ], - config=""" - rules: - - folders: files - subfolders: true - filters: - - duplicate - actions: - - trash - """, - ) - main(["run", "--config-file=%s" % (tmp_path / "config.yaml")]) - assertdir(tmp_path, "unique.txt", "unique_too.txt", "a.txt") +def test_duplicate_smallfiles(): + files = { + "files": { + "unique.txt": "I'm unique.", + "unique_too.txt": "I'm unique: too.", + "a.txt": CONTENT_SMALL, + "copy2.txt": CONTENT_SMALL, + "other": { + "copy.txt": CONTENT_SMALL, + "copy.jpg": CONTENT_SMALL, + "large.txt": CONTENT_LARGE, + }, + "large_unique.txt": CONTENT_LARGE, + }, + } + + with fs.open_fs("mem://") as mem: + make_files(mem, files) + rules = rules_shortcut( + mem, + filters="- duplicate", + actions="- echo: '{fs_path} is duplicate of {duplicate}'\n- delete", + max_depth=None, + ) + core.run(rules, simulate=False, validate=False) + result = read_files(mem) + mem.tree() + assert result == { + "files": { + "unique.txt": "I'm unique.", + "unique_too.txt": "I'm unique: too.", + "a.txt": CONTENT_SMALL, + "other": { + "large.txt": CONTENT_LARGE, + }, + }, + } + + +# main(["run", "--config-file=%s" % (tmp_path / "config.yaml")]) +# assertdir(tmp_path,) + + +# def test_duplicate_largefiles(tmp_path): +# create_filesystem( +# tmp_path, +# files=[ +# ("unique.txt", CONTENT_LARGE + "1"), +# ("unique_too.txt", CONTENT_LARGE + "2"), +# ("a.txt", CONTENT_LARGE), +# ("copy2.txt", CONTENT_LARGE), +# ("other/copy.txt", CONTENT_LARGE), +# ("other/copy.jpg", CONTENT_LARGE), +# ("other/large.txt", CONTENT_LARGE), +# ], +# config=""" +# rules: +# - folders: files +# subfolders: true +# filters: +# - duplicate +# actions: +# - trash +# """, +# ) +# main(["run", "--config-file=%s" % (tmp_path / "config.yaml")]) +# assertdir(tmp_path, "unique.txt", "unique_too.txt", "a.txt") diff --git a/tests/integration/test_rename.py b/tests/integration/test_rename.py index 977eebb8..5e8df5bf 100644 --- a/tests/integration/test_rename.py +++ b/tests/integration/test_rename.py @@ -1,33 +1,44 @@ -from conftest import create_filesystem, assertdir -from organize.cli import main +import fs +from conftest import rules_shortcut, make_files, read_files +from organize import core -def test_rename_issue51(tmp_path): +def test_rename_issue52(): # test for issue https://github.com/tfeldmann/organize/issues/51 - create_filesystem( - tmp_path, - files=["19asd_WF_test2.pdf", "other.pdf", "18asd_WFX_test2.pdf",], - config=r""" - rules: - - folders: files - filters: - - filename: - startswith: "19" - contains: - - "_WF_" - actions: - - rename: "{path.stem}_unread{path.suffix}" - - copy: - dest: "files/copy/" - overwrite: false - counter_separator: "_" - """, - ) - main(["run", "--config-file=%s" % (tmp_path / "config.yaml")]) - assertdir( - tmp_path, - "19asd_WF_test2_unread.pdf", - "other.pdf", - "copy/19asd_WF_test2_unread.pdf", - "18asd_WFX_test2.pdf", - ) + files = { + "files": { + "19asd_WF_test2.PDF": "", + "other.pdf": "", + "18asd_WFX_test2.pdf": "", + } + } + with fs.open_fs("temp://") as mem: + make_files(mem, files) + config = rules_shortcut( + mem, + filters=""" + - extension + - name: + startswith: "19" + contains: + - "_WF_" + """, + actions=[ + {"rename": "{path.stem}_unread.{extension.lower()}"}, + {"copy": {"dest": "files/copy/", "filesystem": mem}}, + ], + ) + core.run(config, simulate=False) + mem.tree() + result = read_files(mem) + + assert result == { + "files": { + "copy": { + "19asd_WF_test2_unread.pdf": "", + }, + "19asd_WF_test2_unread.pdf": "", + "other.pdf": "", + "18asd_WFX_test2.pdf": "", + } + } diff --git a/tests/todo/integration/__init__.py b/tests/todo/integration/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/integration/test_exif.py b/tests/todo/integration/test_exif.py similarity index 100% rename from tests/integration/test_exif.py rename to tests/todo/integration/test_exif.py diff --git a/tests/integration/test_extension.py b/tests/todo/integration/test_extension.py similarity index 100% rename from tests/integration/test_extension.py rename to tests/todo/integration/test_extension.py diff --git a/tests/integration/test_file_content.py b/tests/todo/integration/test_file_content.py similarity index 100% rename from tests/integration/test_file_content.py rename to tests/todo/integration/test_file_content.py diff --git a/tests/integration/test_filesize.py b/tests/todo/integration/test_filesize.py similarity index 100% rename from tests/integration/test_filesize.py rename to tests/todo/integration/test_filesize.py diff --git a/tests/integration/test_globstrings.py b/tests/todo/integration/test_globstrings.py similarity index 100% rename from tests/integration/test_globstrings.py rename to tests/todo/integration/test_globstrings.py diff --git a/tests/integration/test_integration.py b/tests/todo/integration/test_integration.py similarity index 100% rename from tests/integration/test_integration.py rename to tests/todo/integration/test_integration.py diff --git a/tests/integration/test_python_filter.py b/tests/todo/integration/test_python_filter.py similarity index 100% rename from tests/integration/test_python_filter.py rename to tests/todo/integration/test_python_filter.py diff --git a/tests/integration/test_regex.py b/tests/todo/integration/test_regex.py similarity index 100% rename from tests/integration/test_regex.py rename to tests/todo/integration/test_regex.py diff --git a/tests/todo/integration/test_rename.py b/tests/todo/integration/test_rename.py new file mode 100644 index 00000000..977eebb8 --- /dev/null +++ b/tests/todo/integration/test_rename.py @@ -0,0 +1,33 @@ +from conftest import create_filesystem, assertdir +from organize.cli import main + + +def test_rename_issue51(tmp_path): + # test for issue https://github.com/tfeldmann/organize/issues/51 + create_filesystem( + tmp_path, + files=["19asd_WF_test2.pdf", "other.pdf", "18asd_WFX_test2.pdf",], + config=r""" + rules: + - folders: files + filters: + - filename: + startswith: "19" + contains: + - "_WF_" + actions: + - rename: "{path.stem}_unread{path.suffix}" + - copy: + dest: "files/copy/" + overwrite: false + counter_separator: "_" + """, + ) + main(["run", "--config-file=%s" % (tmp_path / "config.yaml")]) + assertdir( + tmp_path, + "19asd_WF_test2_unread.pdf", + "other.pdf", + "copy/19asd_WF_test2_unread.pdf", + "18asd_WFX_test2.pdf", + ) diff --git a/tests/integration/test_startswith.py b/tests/todo/integration/test_startswith.py similarity index 100% rename from tests/integration/test_startswith.py rename to tests/todo/integration/test_startswith.py diff --git a/tests/integration/test_unicode.py b/tests/todo/integration/test_unicode.py similarity index 100% rename from tests/integration/test_unicode.py rename to tests/todo/integration/test_unicode.py diff --git a/tests/todo/todo_copy.py b/tests/todo/todo_copy.py new file mode 100644 index 00000000..d9596c56 --- /dev/null +++ b/tests/todo/todo_copy.py @@ -0,0 +1,150 @@ +import os + +from organize.actions import Copy +from pathlib import Path + +USER_DIR = os.path.expanduser("~") + +DEFAULT_ARGS = { + "basedir": Path.home(), + "path": Path.home() / "test.py", + "simulate": False, +} + + +def test_tilde_expansion(mock_exists, mock_samefile, mock_copy, mock_trash, mock_mkdir): + mock_exists.return_value = False + mock_samefile.return_value = False + copy = Copy(dest="~/newname.py", overwrite=False) + updates = copy.run(**DEFAULT_ARGS) + mock_mkdir.assert_called_with(exist_ok=True, parents=True) + mock_exists.assert_called_with() + mock_trash.assert_not_called() + mock_copy.assert_called_with( + src=os.path.join(USER_DIR, "test.py"), dst=os.path.join(USER_DIR, "newname.py") + ) + # keep old file path + assert updates is None + + +def test_into_folder(mock_exists, mock_samefile, mock_copy, mock_trash, mock_mkdir): + mock_exists.return_value = False + mock_samefile.return_value = False + copy = Copy(dest="~/somefolder/", overwrite=False) + copy.run(**DEFAULT_ARGS) + mock_mkdir.assert_called_with(exist_ok=True, parents=True) + mock_exists.assert_called_with() + mock_trash.assert_not_called() + mock_copy.assert_called_with( + src=os.path.join(USER_DIR, "test.py"), + dst=os.path.join(USER_DIR, "somefolder", "test.py"), + ) + + +def test_overwrite(mock_exists, mock_samefile, mock_copy, mock_trash, mock_mkdir): + mock_exists.return_value = True + mock_samefile.return_value = False + copy = Copy(dest="~/somefolder/", overwrite=True) + copy.run(**DEFAULT_ARGS) + mock_mkdir.assert_called_with(exist_ok=True, parents=True) + mock_exists.assert_called_with() + mock_trash.assert_called_with(os.path.join(USER_DIR, "somefolder", "test.py")) + mock_copy.assert_called_with( + src=os.path.join(USER_DIR, "test.py"), + dst=os.path.join(USER_DIR, "somefolder", "test.py"), + ) + + +def test_already_exists(mock_exists, mock_samefile, mock_copy, mock_trash, mock_mkdir): + mock_exists.side_effect = [True, False] + mock_samefile.return_value = False + copy = Copy(dest="~/folder/", overwrite=False) + copy.run(**DEFAULT_ARGS) + mock_mkdir.assert_called_with(exist_ok=True, parents=True) + mock_exists.assert_called_with() + mock_trash.assert_not_called() + mock_copy.assert_called_with( + src=os.path.join(USER_DIR, "test.py"), + dst=os.path.join(USER_DIR, "folder", "test 2.py"), + ) + + +def test_already_exists_multiple( + mock_exists, mock_samefile, mock_copy, mock_trash, mock_mkdir +): + mock_exists.side_effect = [True, True, True, False] + mock_samefile.return_value = False + copy = Copy(dest="~/folder/", overwrite=False) + copy.run(**DEFAULT_ARGS) + mock_mkdir.assert_called_with(exist_ok=True, parents=True) + mock_exists.assert_called_with() + mock_trash.assert_not_called() + mock_copy.assert_called_with( + src=os.path.join(USER_DIR, "test.py"), + dst=os.path.join(USER_DIR, "folder", "test 4.py"), + ) + + +def test_already_exists_multiple_with_separator( + mock_exists, mock_samefile, mock_copy, mock_trash, mock_mkdir +): + args = { + "basedir": Path.home(), + "path": Path.home() / "test_2.py", + "simulate": False, + } + mock_exists.side_effect = [True, True, True, False] + mock_samefile.return_value = False + copy = Copy(dest="~/folder/", overwrite=False, counter_separator="_") + copy.run(**args) + mock_mkdir.assert_called_with(exist_ok=True, parents=True) + mock_exists.assert_called_with() + mock_trash.assert_not_called() + mock_copy.assert_called_with( + src=os.path.join(USER_DIR, "test_2.py"), + dst=os.path.join(USER_DIR, "folder", "test_5.py"), + ) + + +def test_makedirs(mock_parent, mock_copy, mock_trash): + copy = Copy(dest="~/some/new/folder/", overwrite=False) + copy.run(**DEFAULT_ARGS) + mock_parent.mkdir.assert_called_with(parents=True, exist_ok=True) + mock_trash.assert_not_called() + mock_copy.assert_called_with( + src=os.path.join(USER_DIR, "test.py"), + dst=os.path.join(USER_DIR, "some", "new", "folder", "test.py"), + ) + + +def test_args(mock_exists, mock_samefile, mock_copy, mock_trash, mock_mkdir): + args = { + "basedir": Path.home(), + "path": Path.home() / "test.py", + "simulate": False, + "nr": {"upper": 1}, + } + mock_exists.return_value = False + mock_samefile.return_value = False + copy = Copy(dest="~/{nr.upper}-name.py", overwrite=False) + copy.run(**args) + mock_mkdir.assert_called_with(exist_ok=True, parents=True) + mock_exists.assert_called_with() + mock_trash.assert_not_called() + mock_copy.assert_called_with( + src=os.path.join(USER_DIR, "test.py"), dst=os.path.join(USER_DIR, "1-name.py") + ) + + +def test_path(mock_exists, mock_samefile, mock_copy, mock_trash, mock_mkdir): + mock_exists.return_value = False + mock_samefile.return_value = False + copy = Copy(dest="~/{path.stem}/{path.suffix}/{path.name}", overwrite=False) + copy.run(**DEFAULT_ARGS) + mock_mkdir.assert_called_with(exist_ok=True, parents=True) + mock_exists.assert_called_with() + mock_trash.assert_not_called() + mock_copy.assert_called_with( + src=os.path.join(USER_DIR, "test.py"), + dst=os.path.join(USER_DIR, "test", ".py", "test.py"), + ) diff --git a/tests/todo/todo_move.py b/tests/todo/todo_move.py new file mode 100644 index 00000000..764a396e --- /dev/null +++ b/tests/todo/todo_move.py @@ -0,0 +1,170 @@ +import os + +from organize.actions import Move +from pathlib import Path +from organize.utils import DotDict + +USER_DIR = os.path.expanduser("~") + +ARGS = DotDict(basedir=Path.home(), path=Path.home() / "test.py", simulate=False) + + +def test_tilde_expansion(mock_exists, mock_samefile, mock_move, mock_trash, mock_mkdir): + mock_exists.return_value = False + mock_samefile.return_value = False + move = Move(dest="~/newname.py", overwrite=False) + updates = move.run(**ARGS) + mock_mkdir.assert_called_with(exist_ok=True, parents=True) + mock_exists.assert_called_with() + mock_trash.assert_not_called() + mock_move.assert_called_with( + src=os.path.join(USER_DIR, "test.py"), dst=os.path.join(USER_DIR, "newname.py") + ) + assert updates == {"path": Path("~/newname.py").expanduser()} + + +def test_into_folder(mock_exists, mock_samefile, mock_move, mock_trash, mock_mkdir): + mock_exists.return_value = False + mock_samefile.return_value = False + move = Move(dest="~/somefolder/", overwrite=False) + updates = move.run(**ARGS) + mock_mkdir.assert_called_with(exist_ok=True, parents=True) + mock_exists.assert_called_with() + mock_trash.assert_not_called() + mock_move.assert_called_with( + src=os.path.join(USER_DIR, "test.py"), + dst=os.path.join(USER_DIR, "somefolder", "test.py"), + ) + assert updates == {"path": Path(USER_DIR) / "somefolder" / "test.py"} + + +def test_overwrite(mock_exists, mock_samefile, mock_move, mock_trash, mock_mkdir): + mock_exists.return_value = True + mock_samefile.return_value = False + move = Move(dest="~/somefolder/", overwrite=True) + updates = move.run(**ARGS) + mock_mkdir.assert_called_with(exist_ok=True, parents=True) + mock_exists.assert_called_with() + mock_trash.assert_called_with(os.path.join(USER_DIR, "somefolder", "test.py")) + mock_move.assert_called_with( + src=os.path.join(USER_DIR, "test.py"), + dst=os.path.join(USER_DIR, "somefolder", "test.py"), + ) + assert updates is not None + + +def test_already_exists(mock_exists, mock_samefile, mock_move, mock_trash, mock_mkdir): + mock_exists.side_effect = [True, False] + mock_samefile.return_value = False + move = Move(dest="~/folder/", overwrite=False) + updates = move.run(**ARGS) + mock_mkdir.assert_called_with(exist_ok=True, parents=True) + mock_exists.assert_called_with() + mock_trash.assert_not_called() + mock_move.assert_called_with( + src=os.path.join(USER_DIR, "test.py"), + dst=os.path.join(USER_DIR, "folder", "test 2.py"), + ) + assert updates is not None + + +def test_already_exists_multiple( + mock_exists, mock_samefile, mock_move, mock_trash, mock_mkdir +): + mock_exists.side_effect = [True, True, True, False] + mock_samefile.return_value = False + move = Move(dest="~/folder/", overwrite=False) + updates = move.run(**ARGS) + mock_mkdir.assert_called_with(exist_ok=True, parents=True) + mock_exists.assert_called_with() + mock_trash.assert_not_called() + mock_move.assert_called_with( + src=os.path.join(USER_DIR, "test.py"), + dst=os.path.join(USER_DIR, "folder", "test 4.py"), + ) + assert updates is not None + + +def test_already_exists_multiple_separator( + mock_exists, mock_samefile, mock_move, mock_trash, mock_mkdir +): + mock_exists.side_effect = [True, True, True, False] + mock_samefile.return_value = False + move = Move(dest="~/folder/", overwrite=False, counter_separator="_") + updates = move.run(**ARGS) + mock_mkdir.assert_called_with(exist_ok=True, parents=True) + mock_exists.assert_called_with() + mock_trash.assert_not_called() + mock_move.assert_called_with( + src=os.path.join(USER_DIR, "test.py"), + dst=os.path.join(USER_DIR, "folder", "test_4.py"), + ) + assert updates is not None + + +def test_makedirs(mock_parent, mock_move, mock_trash): + move = Move(dest="~/some/new/folder/", overwrite=False) + updates = move.run(**ARGS) + mock_parent.mkdir.assert_called_with(parents=True, exist_ok=True) + mock_trash.assert_not_called() + mock_move.assert_called_with( + src=os.path.join(USER_DIR, "test.py"), + dst=os.path.join(USER_DIR, "some", "new", "folder", "test.py"), + ) + assert updates is not None + + +def test_args(mock_exists, mock_samefile, mock_move, mock_trash, mock_mkdir): + args = ARGS.merge({"nr": {"upper": 1}}) + mock_exists.return_value = False + mock_samefile.return_value = False + move = Move(dest="~/{nr.upper}-name.py", overwrite=False) + updates = move.run(**args) + mock_mkdir.assert_called_with(exist_ok=True, parents=True) + mock_exists.assert_called_with() + mock_trash.assert_not_called() + mock_move.assert_called_with( + src=os.path.join(USER_DIR, "test.py"), dst=os.path.join(USER_DIR, "1-name.py") + ) + assert updates is not None + + +def test_path(mock_exists, mock_samefile, mock_move, mock_trash, mock_mkdir): + mock_exists.return_value = False + mock_samefile.return_value = False + move = Move(dest="~/{path.stem}/{path.suffix}/{path.name}", overwrite=False) + updates = move.run(**ARGS) + mock_mkdir.assert_called_with(exist_ok=True, parents=True) + mock_exists.assert_called_with() + mock_trash.assert_not_called() + mock_move.assert_called_with( + src=os.path.join(USER_DIR, "test.py"), + dst=os.path.join(USER_DIR, "test", ".py", "test.py"), + ) + assert updates is not None + + +def test_keep_location(mock_exists, mock_samefile, mock_move, mock_trash, mock_mkdir): + mock_exists.return_value = True + mock_samefile.return_value = True + move = Move(dest="~/test.py") + updates = move.run(**ARGS) + mock_mkdir.assert_not_called() + mock_exists.assert_called_with() + mock_trash.assert_not_called() + mock_move.assert_not_called() + assert updates is not None + + +def test_dont_keep_case_sensitive( + mock_exists, mock_samefile, mock_move, mock_trash, mock_mkdir +): + mock_exists.return_value = True + mock_samefile.return_value = True + move = Move(dest="~/TEST.PY") + updates = move.run(**ARGS) + assert mock_mkdir.call_count > 0 + mock_exists.assert_called_with() + mock_trash.assert_not_called() + assert mock_move.call_count > 0 + assert updates is not None diff --git a/tests/actions/test_rename.py b/tests/todo/todo_rename.py similarity index 100% rename from tests/actions/test_rename.py rename to tests/todo/todo_rename.py diff --git a/tests/utils/test_deep_merge.py b/tests/utils/test_deep_merge.py new file mode 100644 index 00000000..b57bf522 --- /dev/null +++ b/tests/utils/test_deep_merge.py @@ -0,0 +1,68 @@ +from organize.utils import deep_merge, deep_merge_inplace + + +def test_merges_dicts(): + a = {"a": 1, "b": {"b1": 2, "b2": 3}} + b = {"a": 1, "b": {"b1": 4}} + + print(deep_merge(a, b)) + assert deep_merge(a, b)["a"] == 1 + assert deep_merge(a, b)["b"]["b2"] == 3 + assert deep_merge(a, b)["b"]["b1"] == 4 + + +def test_returns_copy(): + a = {"regex": {"first": "A", "second": "B"}} + b = {"regex": {"third": "C"}} + + x = deep_merge(a, b) + a["regex"]["first"] = "X" + assert x["regex"]["first"] == "A" + assert x["regex"]["second"] == "B" + assert x["regex"]["third"] == "C" + + +def test_inserts_new_keys(): + """Will it insert new keys by default?""" + a = {"a": 1, "b": {"b1": 2, "b2": 3}} + b = {"a": 1, "b": {"b1": 4, "b3": 5}, "c": 6} + + assert deep_merge(a, b)["a"] == 1 + assert deep_merge(a, b)["b"]["b2"] == 3 + assert deep_merge(a, b)["b"]["b1"] == 4 + assert deep_merge(a, b)["b"]["b3"] == 5 + assert deep_merge(a, b)["c"] == 6 + + +def test_does_not_insert_new_keys(): + """Will it avoid inserting new keys when required?""" + a = {"a": 1, "b": {"b1": 2, "b2": 3}} + b = {"a": 1, "b": {"b1": 4, "b3": 5}, "c": 6} + + assert deep_merge(a, b, add_keys=True) == { + "a": 1, + "b": {"b1": 4, "b2": 3, "b3": 5}, + "c": 6, + } + assert deep_merge(a, b, add_keys=False) == { + "a": 1, + "b": {"b1": 4, "b2": 3}, + } + + +def test_inplace_merge(): + a = {} + b = {1: {2: 2, 3: 3, 4: {5: "fin."}}} + a = deep_merge(a, b) + assert a == b + b[1][2] = 5 + assert a != b + + deep_merge_inplace(a, {1: {4: {5: "new.", 6: "fin."}, 2: "x"}}) + assert a == {1: {2: "x", 3: 3, 4: {5: "new.", 6: "fin."}}} + + +def test_inplace_keeptype(): + a = {} + deep_merge_inplace(a, {"nr": {"upper": 1}}) + assert a["nr"]["upper"] == 1 diff --git a/tests/utils/test_is_same_resource.py b/tests/utils/test_is_same_resource.py new file mode 100644 index 00000000..6334dbcb --- /dev/null +++ b/tests/utils/test_is_same_resource.py @@ -0,0 +1,57 @@ +from fs import open_fs +from fs.memoryfs import MemoryFS +from organize.utils import is_same_resource + + +def test_mem(): + a = open_fs("mem://") + a.touch("file1") + b = a.makedir("b") + b.touch("file2") + c = a + + assert is_same_resource(a, "b/file2", a, "b/file2") + assert is_same_resource(a, "b/file2", b, "file2") + assert is_same_resource(a, "file1", c, "file1") + + +def test_mem2(): + mem = MemoryFS() + fs1, path1 = mem.makedir("files"), "test.txt" + fs2, path2 = mem, "files/test.txt" + + assert is_same_resource(fs1, path1, fs2, path2) + + +def test_osfs(): + a = open_fs("~").makedir("Desktop", recreate=True) + b = open_fs("~/") + c = b.opendir("Desktop") + + assert is_same_resource(a, "file.txt", a, "file.txt") + assert is_same_resource(a, "file.txt", b, "Desktop/file.txt") + assert is_same_resource(a, "file.txt", c, "file.txt") + + +def test_inter(): + a = open_fs("temp://") + b = open_fs(a.getsyspath("/")) + a_dir = a.makedir("a") + + assert is_same_resource(a, "test.txt", b, "test.txt") + assert is_same_resource(b, "a/subfile.txt", a_dir, "subfile.txt") + # assert is_same_resource(a, "test.txt", a_dir, "../test.txt") + + +def test_nested(): + for protocol in ("mem://", "temp://"): + with open_fs(protocol) as mem: + x = mem.makedir("sub1") + x = x.makedir("sub2") + x = x.makedir("sub3") + x.touch("file") + + y = mem.opendir("sub1") + + assert is_same_resource(mem, "sub1/sub2/sub3/file", x, "file") + assert is_same_resource(y, "sub2/sub3/file", x, "file")