From 4a2293023656428bfdea34f81b5f683718ef3e35 Mon Sep 17 00:00:00 2001 From: George Pchelkin Date: Mon, 20 Feb 2023 00:03:41 +0300 Subject: [PATCH] Release v0.15.0 (#698) --- .editorconfig | 2 +- .env.sample | 116 +- .github/dependabot.yml | 6 +- .github/workflows/build.yml | 5 +- .github/workflows/release.yml | 2 +- .gitignore | 7 +- AUTHORS.rst | 2 +- CHANGELOG.rst | 4 + LICENSE | 8 +- Makefile | 19 +- README.rst | 8 +- app.json | 76 +- docs/_templates/moreinfo.html | 3 - docs/conf.py | 92 +- docs/pages/changelog.rst | 1 - poetry.lock | 1044 ++++++++++------ pyproject.toml | 212 ++-- render.yaml | 44 +- requirements-dev.txt | 55 +- requirements-docs.txt | 35 +- requirements.txt | 59 +- runtime.txt | 2 +- scdlbot.service.sample | 24 +- scdlbot/__init__.py | 7 - scdlbot/__main__.py | 96 -- scdlbot/exceptions.py | 122 -- scdlbot/scdlbot.py | 2161 ++++++++++++++++++++------------- scdlbot/texts/failed.txt | 1 + scdlbot/texts/no_audio.txt | 1 - scdlbot/texts/no_urls.txt | 5 +- scdlbot/texts/settings.tg.md | 9 +- scdlbot/utils.py | 162 --- setup.cfg | 2 +- tmpreaper.conf.sample | 4 +- 34 files changed, 2364 insertions(+), 2032 deletions(-) delete mode 100644 scdlbot/__init__.py delete mode 100755 scdlbot/__main__.py delete mode 100644 scdlbot/exceptions.py mode change 100644 => 100755 scdlbot/scdlbot.py create mode 100644 scdlbot/texts/failed.txt delete mode 100644 scdlbot/texts/no_audio.txt delete mode 100644 scdlbot/utils.py diff --git a/.editorconfig b/.editorconfig index ff9efb2a5..c9156b4dd 100644 --- a/.editorconfig +++ b/.editorconfig @@ -10,7 +10,7 @@ indent_style = space indent_size = 2 trim_trailing_whitespace = true -[*.{py, pyi}] +[*.{py,pyi}] indent_style = space indent_size = 4 diff --git a/.env.sample b/.env.sample index 14dd64799..f1efc39c9 100644 --- a/.env.sample +++ b/.env.sample @@ -1,84 +1,70 @@ +# TODO generate heroku app.json from sample # Telegram Bot API Token, obtain it here: https://t.me/BotFather. This is the only required configuration variable. -TG_BOT_TOKEN="166849652:AAEPCgHuDf3K6HvI2OkrJmDN2k9R6mcfmLs" +TG_BOT_TOKEN="166849652:AAEPCgHuDf3K6HvI2OkrJmDN2k9Rexample" +# TODO #TG_BOT_API="https://api.telegram.org" #TG_BOT_API="http://127.0.0.1:8081" +# Chat ID of bot owner for alerts and permissions +TG_BOT_OWNER_CHAT_ID="1265343" +# TODO +CHAT_STORAGE="/home/gpchelkin/scdlbot.pickle" # (Absolute?) path to parent directory for downloads directories, default: /tmp/scdlbot DL_DIR="/tmp/scdlbot" - +# TODO +BIN_PATH="" +# TODO +WORKERS="2" # Download timeout in seconds, stop downloading if it takes longer than allowed DL_TIMEOUT="300" - +# TODO +CHECK_URL_TIMEOUT="30" +# TODO +COMMON_CONNECTION_TIMEOUT="10" +# Telegram upload file size limit (in bytes). API limit is 50 MB, Local mode API limit is 2000 MB (so you can change it to 1900_000_000). +MAX_TG_FILE_SIZE = "45_000_000" +# Bot will not try to split and send files bigger than this (in bytes) +MAX_CONVERT_FILE_SIZE="80_000_000" # Comma-separated chat IDs with no replying and caption spam NO_FLOOD_CHAT_IDS="-10018859218,-1011068201" - -ALERT_CHAT_IDS="1265343,3265143" - -BIN_PATH="" - -CHAT_STORAGE="/home/gpchelkin/scdlbotdata" - -# For using inline mode bot needs to store audios somewhere. ID of that chat. -STORE_CHAT_ID="-1795100" - +# HTTP or local path with cookies file for Instagram and/or Yandex.Music +COOKIES_FILE="https://example.com/cookies.txt" +# TODO +PROXIES="http://127.0.0.1:3187,http://127.0.0.1:3188," +# TODO +SOURCE_IPS="9.21.18.2,9.21.16.9" +# A space separated list of domains which should be considered whitelisted - the bot will only process these domains. Example of domain: example.com if the domain has a subdomain this needs to be included: subdomain.example.com **NOTE** that if both whitelist and blacklist will be used, only the blacklist will be taken into consideration. +WHITELIST_DOMAINS="example.com,subdomain.example.com" +# A space separated list of domains which should be considered blacklisted - the bot will not process these domains. Example of domain: example.com if the domain has a subdomain this needs to be included: subdomain.example.com **NOTE** that if both whitelist and blacklist will be used, only the blacklist will be taken into consideration. +BLACKLIST_DOMAINS="example.com,subdomain.example.com" +#BLACKLIST_DOMAINS="invidious.tube,invidious.kavin.rocks,invidious.himiko.cloud,invidious.namazso.eu,dev.viewtube.io,tube.cadence.moe,piped.kavin.rocks" A space separated list of chat_ids which should be considered whitelisted - the bot will only join those chats **NOTE** that if both whitelist and blacklist will be used, only the blacklist will be taken into consideration. -WHITELIST_CHATS ="-1795100, 1795102" +WHITELIST_CHATS ="-1795100,1795102" # A space separated list of chat_ids which should be considered blacklisted - the bot will not join those chats. **NOTE** that if both whitelist and blacklist will be used, only the blacklist will be taken into consideration. -BLACKLIST_CHATS ="-1795100, 1795102" - -SOURCE_IPS="9.21.18.2,9.21.16.9" -PROXIES="socks5://127.0.0.1:1080,socks5://127.0.0.1:1081," -SERVE_AUDIO="0" -# Host and port for metrics -METRICS_HOST="127.0.0.1" -METRICS_PORT="8000" - -# HTTP or local path with cookies file for Yandex.Music -COOKIES_FILE="http://test.test.com/cookies.txt" - -# Don't try to split and send files over this number of bytes -MAX_CONVERT_FILE_SIZE="80_000_000" - -# Telegram upload file size limit -MAX_TG_FILE_SIZE = "45_000_000" -#MAX_TG_FILE_SIZE = "1900_000_000" - -#Hostname to show in Syslog messages. In most cases it is already set in environment, you may want to use it on Heroku. -HOSTNAME="prod-aws" - -# Syslog server, for example: logsX.papertrailapp.com:55555 -SYSLOG_ADDRESS="logs2.papertrailapp.com:51181" - -# Set to 1 to enable verbose debug logging -SYSLOG_DEBUG="0" - - -# Webhooks: These links should help. In NGINX use TG_BOT_TOKEN without ":" symbol instead of TOKEN1, and port in proxy_pass according to PORT environment variable. -# https://github.com/python-telegram-bot/python-telegram-bot/wiki/Webhooks#using-nginx-with-one-domainport-for-all-bots -# https://nginx.org/en/linux_packages.html#mainline -# https://certbot.eff.org/lets-encrypt/ubuntubionic-nginx +BLACKLIST_CHATS ="-1795100,1795102" +### Webhook: # Use webhook for bot updates: 1, use polling (default): 0, more info https://core.telegram.org/bots/api#getting-updates -USE_WEBHOOK="0" - +WEBHOOK_ENABLE="0" # Host and port for webhook to listen to; Heroku sets PORT automatically for web dynos, so you shouldn't set it for Heroku HOST="0.0.0.0" PORT="5000" - -# Your host URL like https://scdlbot.herokuapp.com/, required for webhook mode -APP_URL="https://yourapp.heroku.com/" - -# A space separated list of domains which should be considered whitelisted - the bot will only process these domains. Example of domain: example.com if the domain has a subdomain this needs to be included: subdomain.example.com **NOTE** that if both whitelist and blacklist will be used, only the blacklist will be taken into consideration. -WHITELIST_DOMS="example.com subdomain.example.com" -# A space separated list of domains which should be considered blacklisted - the bot will not process these domains. Example of domain: example.com if the domain has a subdomain this needs to be included: subdomain.example.com **NOTE** that if both whitelist and blacklist will be used, only the blacklist will be taken into consideration. -BLACKLIST_DOMS="example.com subdomain.example.com" +# Your host URL like https://scdlbot.herokuapp.com, required for webhook mode +WEBHOOK_APP_URL_ROOT="https://yourapp.heroku.com" # Your URL path for webhook to listen -URL_PATH="166849652AAEPCgHuDf3K6HvI2OkrJmDN2k9R6mcfmLs" - -# Amount of threads in the thread pool for functions decorated with @run_async: -# https://docs.python-telegram-bot.org/en/stable/telegram.ext.updater.html -WORKERS=4 +WEBHOOK_APP_URL_PATH="166849652AAEPCgHuDf3K6HvI2OkrJmDN2k9Rexample" +# TODO +WEBHOOK_SECRET_TOKEN="CHANGEME" +# These links should help. In NGINX use WEBHOOK_APP_URL_PATH (TG_BOT_TOKEN without ":"), and port in proxy_pass according to PORT environment variable. +# https://github.com/python-telegram-bot/python-telegram-bot/wiki/Webhooks#using-nginx-with-one-domainport-for-all-bots -# Variables for some specific deployments, you don't need it: -#LC_ALL="en_US.UTF8" -#PYENV_VERSION="3.8.2" +### Monitoring and logging: +# Host and port for metrics in Prometheus OpenMetrics format: +METRICS_HOST="127.0.0.1" +METRICS_PORT="8000" +# Set to DEBUG to enable verbose debug logging +LOGLEVEL="INFO" +# Syslog server, for example: logsX.papertrailapp.com:55555 +SYSLOG_ADDRESS="logs2.papertrailapp.com:51181" +# Hostname to show in Syslog messages. In most cases it is already set in environment, you may want to set it manually in Heroku. +HOSTNAME="prod-aws" diff --git a/.github/dependabot.yml b/.github/dependabot.yml index c32a074e7..f52d72a54 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -4,7 +4,7 @@ updates: directory: "/" schedule: interval: daily - time: '12:00' + time: "12:00" timezone: Europe/Moscow open-pull-requests-limit: 99 reviewers: @@ -12,11 +12,11 @@ updates: assignees: - gpchelkin -- package-ecosystem: "github-actions" +- package-ecosystem: github-actions directory: "/" schedule: interval: "daily" - time: '12:00' + time: "12:00" timezone: Europe/Moscow open-pull-requests-limit: 99 reviewers: diff --git a/.github/workflows/build.yml b/.github/workflows/build.yml index ba4b6c7f8..fd2975b23 100644 --- a/.github/workflows/build.yml +++ b/.github/workflows/build.yml @@ -12,6 +12,9 @@ concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true +permissions: + contents: read + jobs: build: runs-on: ubuntu-latest @@ -32,7 +35,7 @@ jobs: run: | pip install --upgrade pip pip install --upgrade setuptools wheel - export POETRY_VERSION=1.3.1 + export POETRY_VERSION=1.3.2 wget --output-document=install-poetry.py https://install.python-poetry.org python "install-poetry.py" rm -f "install-poetry.py" diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index cf4ae5c42..735efe680 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -23,7 +23,7 @@ jobs: run: | pip install --upgrade pip pip install --upgrade setuptools wheel - export POETRY_VERSION=1.3.1 + export POETRY_VERSION=1.3.2 wget --output-document=install-poetry.py https://install.python-poetry.org python "install-poetry.py" rm -f "install-poetry.py" diff --git a/.gitignore b/.gitignore index 465911628..2cdc42fb7 100644 --- a/.gitignore +++ b/.gitignore @@ -148,9 +148,8 @@ ENV/ .idea #### scdlbot #### -scdlbotdata -scdlbotdata.dat -scdlbotdata.dir -scdlbotdata.bak +*.pickle *.mp3 *.mp4 +*.webm +notes.txt diff --git a/AUTHORS.rst b/AUTHORS.rst index 1a887ad67..881dc9d32 100644 --- a/AUTHORS.rst +++ b/AUTHORS.rst @@ -10,4 +10,4 @@ Contributors ------------ * Leonid Runyshkin @leovp -* Vadim Larionov @vadimlarionov +* Vadim Larionov @vadimlarionov diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 99134e6ef..13cfb0d77 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -1,6 +1,10 @@ Version history =============== +0.15.0 (2023-02-20) +----------------------- +* refactor and fix many things + 0.14.2 (2022-12-19) ----------------------- * merge #508: diff --git a/LICENSE b/LICENSE index 015a68a8e..943b4f827 100644 --- a/LICENSE +++ b/LICENSE @@ -1,17 +1,17 @@ The MIT License (MIT) - Copyright (c) 2019 George Pchelkin - + Copyright (c) 2023 George Pchelkin + Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: - + The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. - + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. diff --git a/Makefile b/Makefile index 2ffe9dc1f..5ae4aad31 100644 --- a/Makefile +++ b/Makefile @@ -3,7 +3,7 @@ SHELL:=/usr/bin/env bash .PHONY: format format: poetry run isort . - poetry run black . + poetry run black --extend-exclude docs/ . .PHONY: lint lint: @@ -18,8 +18,25 @@ package: poetry run pip check poetry run safety check --full-report +.PHONY: update +update: + poetry self lock + poetry self install --sync + poetry self update + poetry update --with main,dev,docs + poetry export --only main --without-hashes -f requirements.txt -o requirements.txt + poetry export --only docs --without-hashes -f requirements.txt -o requirements-docs.txt + poetry export --only dev --without-hashes -f requirements.txt -o requirements-dev.txt + .PHONY: test test: lint package +.PHONY: run_dev +run_dev: + ps -ef | grep '[s]cdlbot/scdlbot.py' | grep -v bash | awk '{print $$2}' | xargs --no-run-if-empty kill -15 + set -o allexport; \ + source .env-dev; \ + poetry run python scdlbot/scdlbot.py + .DEFAULT: @cd docs && $(MAKE) $@ diff --git a/README.rst b/README.rst index 0b372ed44..7aab78711 100644 --- a/README.rst +++ b/README.rst @@ -82,8 +82,8 @@ scdlbot is standing on the shoulders of giants: - `Bandcamp `__: `bandcamp-dl `__ - `YouTube `__, `Yandex.Music `__, - `Mixcloud `__, and almost everything from this `list `__: - `youtube-dl `__ + `Mixcloud `__, and almost `everything from this list `__: + `yt-dlp `__ Run your own scdlbot -------------------- @@ -163,9 +163,9 @@ You will need `Heroku CLI `__ installed. # If you've installed from PyPI - download Procfile first (otherwise already present in Git repository): curl -O https://raw.githubusercontent.com/gpchelkin/scdlbot/master/Procfile - # For long polling mode (when USE_WEBHOOK=0): + # For long polling mode (when WEBHOOK_ENABLE=0): heroku local -e .env worker - # For webhook mode (when USE_WEBHOOK=1): + # For webhook mode (when WEBHOOK_ENABLE=1): heroku local -e .env web Using Python only diff --git a/app.json b/app.json index 4735e9d1f..4e10658bb 100644 --- a/app.json +++ b/app.json @@ -29,52 +29,18 @@ "description": "Telegram Bot API Token", "required": true }, - "DL_DIR": { - "description": "Parent directory for downloads directories", - "value": "/tmp/scdlbot", - "required": false - }, - "DL_TIMEOUT": { - "description": "Download timeout in seconds, stop downloading if it takes longer than allowed", - "value": "300", - "required": false - }, - "NO_FLOOD_CHAT_IDS": { - "description": "Comma-separated chat IDs with no replying and caption hashtags", - "required": false - }, - "ALERT_CHAT_IDS": { - "description": "Comma-separated chat IDs with no replying and caption hashtags", - "required": false - }, - "BIN_PATH": { - "description": "Custom directory where scdl and bandcamp-dl binaries are available", - "required": false - }, - "CHAT_STORAGE": { - "description": "Chat storage", - "value": "/tmp/scdlbotdata", - "required": false - }, - "STORE_CHAT_ID": { - "description": "Chat ID for storing audios for inline mode", - "required": false - }, - "WHITELIST_CHATS": { - "description": "A space separated list of chat_ids which should be considered whitelisted - the bot will only join those chats **NOTE** that if both whitelist and blacklist will be used, only the blacklist will be taken into consideration.", - "required": false - }, - "BLACKLIST_CHATS": { - "description": "A space separated list of chat_ids which should be considered blacklisted - the bot will not join those chats. **NOTE** that if both whitelist and blacklist will be used, only the blacklist will be taken into consideration.", + "WEBHOOK_ENABLE": { + "description": "Use webhook for bot updates: 1, use polling (default): 0", + "value": "0", "required": false }, - "COOKIES_FILE": { - "description": "HTTP or local path with cookies file for Yandex.Music", + "WEBHOOK_APP_URL_ROOT": { + "description": "Your host URL like https://scdlbot.herokuapp.com, required for webhook mode", "required": false }, - "MAX_CONVERT_FILE_SIZE": { - "description": "Don't try to split and send files over this number of bytes", - "value": "80_000_000", + "HOST": { + "description": "Hostname to show in Syslog messages", + "value": "0.0.0.0", "required": false }, "HOSTNAME": { @@ -82,32 +48,6 @@ "value": "heroku", "required": false }, - "SYSLOG_ADDRESS": { - "description": "Syslog server, for example: logsX.papertrailapp.com:55555", - "required": false - }, - "SYSLOG_DEBUG": { - "description": "Set to 1 to enable verbose debug logging", - "value": "0", - "required": false - }, - "USE_WEBHOOK": { - "description": "Use webhook for bot updates: 1, use polling (default): 0", - "value": "0", - "required": false - }, - "APP_URL": { - "description": "Your host URL like https://scdlbot.herokuapp.com/, required for webhook mode", - "required": false - }, - "WHITELIST_DOMS": { - "description": "A space separated list of domains which should be considered whitelisted - the bot will only process these domains. Example of domain: example.com if the domain has a subdomain this needs to be included: subdomain.example.com **NOTE** that if both whitelist and blacklist will be used, only the blacklist will be taken into consideration.", - "required": false - }, - "BLACKLIST_DOMS": { - "description": "A space separated list of domains which should be considered blacklisted - the bot will not process these domains. Example of domain: example.com if the domain has a subdomain this needs to be included: subdomain.example.com **NOTE** that if both whitelist and blacklist will be used, only the blacklist will be taken into consideration.", - "required": false - } }, "formation": { "web": { diff --git a/docs/_templates/moreinfo.html b/docs/_templates/moreinfo.html index f233d5042..8ba066a86 100644 --- a/docs/_templates/moreinfo.html +++ b/docs/_templates/moreinfo.html @@ -8,7 +8,4 @@

  • PyPI
  • -
  • - Coverage -
  • diff --git a/docs/conf.py b/docs/conf.py index 6caf1ebc5..6f3101f2c 100755 --- a/docs/conf.py +++ b/docs/conf.py @@ -13,62 +13,63 @@ import os import sys -import tomlkit +import tomli -sys.path.insert(0, os.path.abspath("..")) +sys.path.insert(0, os.path.abspath('..')) # -- Project information ----------------------------------------------------- - def _get_project_meta(): - with open("../pyproject.toml") as pyproject: - file_contents = pyproject.read() - - return tomlkit.parse(file_contents)["tool"]["poetry"] + with open('../pyproject.toml', mode='rb') as pyproject: + return tomli.load(pyproject)['tool']['poetry'] pkg_meta = _get_project_meta() -project = str(pkg_meta["name"]) -copyright = "2020, gpchelkin" # noqa: A001 # skipcq: PYL-W0622 -author = "gpchelkin" +project = str(pkg_meta['name']) +copyright = '2023, gpchelkin' # noqa: A001 # skipcq: PYL-W0622 +author = 'gpchelkin' # The short X.Y version -version = str(pkg_meta["version"]) +version = str(pkg_meta['version']) # The full version, including alpha/beta/rc tags release = str(version) + # -- General configuration --------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. -needs_sphinx = "3.2" +needs_sphinx = '3.3' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ - "sphinx.ext.autodoc", - "sphinx.ext.doctest", - "sphinx.ext.todo", - "sphinx.ext.coverage", - "sphinx.ext.viewcode", - "sphinx.ext.autosummary", + 'sphinx.ext.autodoc', + 'sphinx.ext.doctest', + 'sphinx.ext.todo', + 'sphinx.ext.coverage', + 'sphinx.ext.viewcode', + 'sphinx.ext.autosummary', + # Used to write beautiful docstrings: - "sphinx.ext.napoleon", + 'sphinx.ext.napoleon', + # Used to include .md files: - "m2r2", + 'm2r2', + # Used to insert typehints into the final docs: - "sphinx_autodoc_typehints", + 'sphinx_autodoc_typehints', ] -autoclass_content = "class" -autodoc_member_order = "bysource" +autoclass_content = 'class' +autodoc_member_order = 'bysource' -autodoc_member_order = "bysource" +autodoc_member_order = 'bysource' autodoc_default_flags = { - "members": "", - "undoc-members": "code,error_template", - "exclude-members": "__dict__,__weakref__", + 'members': '', + 'undoc-members': 'code,error_template', + 'exclude-members': '__dict__,__weakref__', } # Set `typing.TYPE_CHECKING` to `True`: @@ -76,55 +77,56 @@ def _get_project_meta(): set_type_checking_flag = False # Add any paths that contain templates here, relative to this directory. -templates_path = ["_templates"] +templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: -source_suffix = [".rst", ".md"] +source_suffix = ['.rst', '.md'] # The master toctree document. -master_doc = "index" +master_doc = 'index' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. -language = "en" +language = 'en' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. # This pattern also affects html_static_path and html_extra_path . -exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] +exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] # The name of the Pygments (syntax highlighting) style to use. -pygments_style = "sphinx" +pygments_style = 'sphinx' add_module_names = False autodoc_default_options = { - "show-inheritance": True, + 'show-inheritance': True, } + # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. -html_theme = "alabaster" +html_theme = 'alabaster' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. html_theme_options = { - "sidebar_collapse": False, - "show_powered_by": False, + 'sidebar_collapse': False, + 'show_powered_by': False, } # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". -html_static_path = ["_static"] +html_static_path = ['_static'] # Custom sidebar templates, must be a dictionary that maps document names # to template names. @@ -132,13 +134,13 @@ def _get_project_meta(): # This is required for the alabaster theme # refs: http://alabaster.readthedocs.io/en/latest/installation.html#sidebars html_sidebars = { - "**": [ - "about.html", - "badges.html", - "navigation.html", - "moreinfo.html", - "github.html", - "searchbox.html", + '**': [ + 'about.html', + 'badges.html', + 'navigation.html', + 'moreinfo.html', + 'github.html', + 'searchbox.html', ], } diff --git a/docs/pages/changelog.rst b/docs/pages/changelog.rst index 7e79282aa..09929fe43 100644 --- a/docs/pages/changelog.rst +++ b/docs/pages/changelog.rst @@ -1,2 +1 @@ .. include:: ../../CHANGELOG.rst - diff --git a/poetry.lock b/poetry.lock index 1ce33bfa0..79691aca2 100644 --- a/poetry.lock +++ b/poetry.lock @@ -2,16 +2,37 @@ [[package]] name = "alabaster" -version = "0.7.12" +version = "0.7.13" description = "A configurable sidebar-enabled Sphinx theme" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.6" files = [ - {file = "alabaster-0.7.12-py2.py3-none-any.whl", hash = "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359"}, - {file = "alabaster-0.7.12.tar.gz", hash = "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02"}, + {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, + {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, ] +[[package]] +name = "anyio" +version = "3.6.2" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +category = "main" +optional = false +python-versions = ">=3.6.2" +files = [ + {file = "anyio-3.6.2-py3-none-any.whl", hash = "sha256:fbbe32bd270d2a2ef3ed1c5d45041250284e31fc0a4df4a5a6071842051a51e3"}, + {file = "anyio-3.6.2.tar.gz", hash = "sha256:25ea0d673ae30af41a0c442f81cf3b38c7e79fdc7b60335a4c14e05eb0947421"}, +] + +[package.dependencies] +idna = ">=2.8" +sniffio = ">=1.1" + +[package.extras] +doc = ["packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx-rtd-theme"] +test = ["contextlib2", "coverage[toml] (>=4.5)", "hypothesis (>=4.0)", "mock (>=4)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "uvloop (<0.15)", "uvloop (>=0.15)"] +trio = ["trio (>=0.16,<0.22)"] + [[package]] name = "appdirs" version = "1.4.4" @@ -24,6 +45,36 @@ files = [ {file = "appdirs-1.4.4.tar.gz", hash = "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41"}, ] +[[package]] +name = "apscheduler" +version = "3.10.0" +description = "In-process task scheduler with Cron-like capabilities" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "APScheduler-3.10.0-py3-none-any.whl", hash = "sha256:575299f20073c60a2cc9d4fa5906024cdde33c5c0ce6087c4e3c14be3b50fdd4"}, + {file = "APScheduler-3.10.0.tar.gz", hash = "sha256:a49fc23269218416f0e41890eea7a75ed6b284f10630dcfe866ab659621a3696"}, +] + +[package.dependencies] +pytz = "*" +setuptools = ">=0.7" +six = ">=1.4.0" +tzlocal = ">=2.0,<3.0.0 || >=4.0.0" + +[package.extras] +doc = ["sphinx", "sphinx-rtd-theme"] +gevent = ["gevent"] +mongodb = ["pymongo (>=3.0)"] +redis = ["redis (>=3.0)"] +rethinkdb = ["rethinkdb (>=2.4.0)"] +sqlalchemy = ["sqlalchemy (>=1.4)"] +testing = ["pytest", "pytest-asyncio", "pytest-cov", "pytest-tornado5"] +tornado = ["tornado (>=4.3)"] +twisted = ["twisted"] +zookeeper = ["kazoo"] + [[package]] name = "args" version = "0.1.0" @@ -49,21 +100,22 @@ files = [ [[package]] name = "attrs" -version = "22.1.0" +version = "22.2.0" description = "Classes Without Boilerplate" category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.6" files = [ - {file = "attrs-22.1.0-py2.py3-none-any.whl", hash = "sha256:86efa402f67bf2df34f51a335487cf46b1ec130d02b8d39fd248abfd30da551c"}, - {file = "attrs-22.1.0.tar.gz", hash = "sha256:29adc2665447e5191d0e7c568fde78b21f9672d344281d0c6e1ab085429b22b6"}, + {file = "attrs-22.2.0-py3-none-any.whl", hash = "sha256:29e95c7f6778868dbd49170f98f8818f78f3dc5e0e37c0b1f474e3561b240836"}, + {file = "attrs-22.2.0.tar.gz", hash = "sha256:c9227bfc2f01993c03f68db37d1d15c9690188323c067c641f1a35ca58185f99"}, ] [package.extras] -dev = ["cloudpickle", "coverage[toml] (>=5.0.2)", "furo", "hypothesis", "mypy (>=0.900,!=0.940)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "sphinx", "sphinx-notfound-page", "zope.interface"] -docs = ["furo", "sphinx", "sphinx-notfound-page", "zope.interface"] -tests = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "zope.interface"] -tests-no-zope = ["cloudpickle", "coverage[toml] (>=5.0.2)", "hypothesis", "mypy (>=0.900,!=0.940)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins"] +cov = ["attrs[tests]", "coverage-enable-subprocess", "coverage[toml] (>=5.3)"] +dev = ["attrs[docs,tests]"] +docs = ["furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier", "zope.interface"] +tests = ["attrs[tests-no-zope]", "zope.interface"] +tests-no-zope = ["cloudpickle", "cloudpickle", "hypothesis", "hypothesis", "mypy (>=0.971,<0.990)", "mypy (>=0.971,<0.990)", "pympler", "pympler", "pytest (>=4.3.0)", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-mypy-plugins", "pytest-xdist[psutil]", "pytest-xdist[psutil]"] [[package]] name = "autorepr" @@ -144,14 +196,14 @@ yaml = ["PyYAML"] [[package]] name = "beautifulsoup4" -version = "4.11.1" +version = "4.11.2" description = "Screen-scraping library" category = "main" optional = false python-versions = ">=3.6.0" files = [ - {file = "beautifulsoup4-4.11.1-py3-none-any.whl", hash = "sha256:58d5c3d29f5a36ffeb94f02f0d786cd53014cf9b3b3951d42e0080d8a9498d30"}, - {file = "beautifulsoup4-4.11.1.tar.gz", hash = "sha256:ad9aa55b65ef2808eb405f46cf74df7fcb7044d5cbc26487f96eb2ef2e436693"}, + {file = "beautifulsoup4-4.11.2-py3-none-any.whl", hash = "sha256:0e79446b10b3ecb499c1556f7e228a53e64a2bfcebd455f370d8927cb5b59e39"}, + {file = "beautifulsoup4-4.11.2.tar.gz", hash = "sha256:bc4bdda6717de5a2987436fb8d72f45dc90dd856bdfd512a1314ce90349a0106"}, ] [package.dependencies] @@ -163,32 +215,46 @@ lxml = ["lxml"] [[package]] name = "black" -version = "22.12.0" +version = "23.1.0" description = "The uncompromising code formatter." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "black-22.12.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9eedd20838bd5d75b80c9f5487dbcb06836a43833a37846cf1d8c1cc01cef59d"}, - {file = "black-22.12.0-cp310-cp310-win_amd64.whl", hash = "sha256:159a46a4947f73387b4d83e87ea006dbb2337eab6c879620a3ba52699b1f4351"}, - {file = "black-22.12.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d30b212bffeb1e252b31dd269dfae69dd17e06d92b87ad26e23890f3efea366f"}, - {file = "black-22.12.0-cp311-cp311-win_amd64.whl", hash = "sha256:7412e75863aa5c5411886804678b7d083c7c28421210180d67dfd8cf1221e1f4"}, - {file = "black-22.12.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c116eed0efb9ff870ded8b62fe9f28dd61ef6e9ddd28d83d7d264a38417dcee2"}, - {file = "black-22.12.0-cp37-cp37m-win_amd64.whl", hash = "sha256:1f58cbe16dfe8c12b7434e50ff889fa479072096d79f0a7f25e4ab8e94cd8350"}, - {file = "black-22.12.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:77d86c9f3db9b1bf6761244bc0b3572a546f5fe37917a044e02f3166d5aafa7d"}, - {file = "black-22.12.0-cp38-cp38-win_amd64.whl", hash = "sha256:82d9fe8fee3401e02e79767016b4907820a7dc28d70d137eb397b92ef3cc5bfc"}, - {file = "black-22.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:101c69b23df9b44247bd88e1d7e90154336ac4992502d4197bdac35dd7ee3320"}, - {file = "black-22.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:559c7a1ba9a006226f09e4916060982fd27334ae1998e7a38b3f33a37f7a2148"}, - {file = "black-22.12.0-py3-none-any.whl", hash = "sha256:436cc9167dd28040ad90d3b404aec22cedf24a6e4d7de221bec2730ec0c97bcf"}, - {file = "black-22.12.0.tar.gz", hash = "sha256:229351e5a18ca30f447bf724d007f890f97e13af070bb6ad4c0a441cd7596a2f"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:b6a92a41ee34b883b359998f0c8e6eb8e99803aa8bf3123bf2b2e6fec505a221"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:57c18c5165c1dbe291d5306e53fb3988122890e57bd9b3dcb75f967f13411a26"}, + {file = "black-23.1.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:9880d7d419bb7e709b37e28deb5e68a49227713b623c72b2b931028ea65f619b"}, + {file = "black-23.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e6663f91b6feca5d06f2ccd49a10f254f9298cc1f7f49c46e498a0771b507104"}, + {file = "black-23.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:9afd3f493666a0cd8f8df9a0200c6359ac53940cbde049dcb1a7eb6ee2dd7074"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:bfffba28dc52a58f04492181392ee380e95262af14ee01d4bc7bb1b1c6ca8d27"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c1c476bc7b7d021321e7d93dc2cbd78ce103b84d5a4cf97ed535fbc0d6660648"}, + {file = "black-23.1.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:382998821f58e5c8238d3166c492139573325287820963d2f7de4d518bd76958"}, + {file = "black-23.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bf649fda611c8550ca9d7592b69f0637218c2369b7744694c5e4902873b2f3a"}, + {file = "black-23.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:121ca7f10b4a01fd99951234abdbd97728e1240be89fde18480ffac16503d481"}, + {file = "black-23.1.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:a8471939da5e824b891b25751955be52ee7f8a30a916d570a5ba8e0f2eb2ecad"}, + {file = "black-23.1.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8178318cb74f98bc571eef19068f6ab5613b3e59d4f47771582f04e175570ed8"}, + {file = "black-23.1.0-cp37-cp37m-win_amd64.whl", hash = "sha256:a436e7881d33acaf2536c46a454bb964a50eff59b21b51c6ccf5a40601fbef24"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:a59db0a2094d2259c554676403fa2fac3473ccf1354c1c63eccf7ae65aac8ab6"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:0052dba51dec07ed029ed61b18183942043e00008ec65d5028814afaab9a22fd"}, + {file = "black-23.1.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:49f7b39e30f326a34b5c9a4213213a6b221d7ae9d58ec70df1c4a307cf2a1580"}, + {file = "black-23.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:162e37d49e93bd6eb6f1afc3e17a3d23a823042530c37c3c42eeeaf026f38468"}, + {file = "black-23.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:8b70eb40a78dfac24842458476135f9b99ab952dd3f2dab738c1881a9b38b753"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:a29650759a6a0944e7cca036674655c2f0f63806ddecc45ed40b7b8aa314b651"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:bb460c8561c8c1bec7824ecbc3ce085eb50005883a6203dcfb0122e95797ee06"}, + {file = "black-23.1.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:c91dfc2c2a4e50df0026f88d2215e166616e0c80e86004d0003ece0488db2739"}, + {file = "black-23.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a951cc83ab535d248c89f300eccbd625e80ab880fbcfb5ac8afb5f01a258ac9"}, + {file = "black-23.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:0680d4380db3719ebcfb2613f34e86c8e6d15ffeabcf8ec59355c5e7b85bb555"}, + {file = "black-23.1.0-py3-none-any.whl", hash = "sha256:7a0f701d314cfa0896b9001df70a530eb2472babb76086344e688829efd97d32"}, + {file = "black-23.1.0.tar.gz", hash = "sha256:b0bd97bea8903f5a2ba7219257a44e3f1f9d00073d6cc1add68f0beec69692ac"}, ] [package.dependencies] click = ">=8.0.0" mypy-extensions = ">=0.4.3" +packaging = ">=22.0" pathspec = ">=0.9.0" platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_full_version < \"3.11.0a7\""} +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} [package.extras] @@ -233,6 +299,16 @@ files = [ {file = "Brotli-1.0.9-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:b83bb06a0192cccf1eb8d0a28672a1b79c74c3a8a5f2619625aeb6f28b3a82bb"}, {file = "Brotli-1.0.9-cp310-cp310-win32.whl", hash = "sha256:26d168aac4aaec9a4394221240e8a5436b5634adc3cd1cdf637f6645cecbf181"}, {file = "Brotli-1.0.9-cp310-cp310-win_amd64.whl", hash = "sha256:622a231b08899c864eb87e85f81c75e7b9ce05b001e59bbfbf43d4a71f5f32b2"}, + {file = "Brotli-1.0.9-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:cc0283a406774f465fb45ec7efb66857c09ffefbe49ec20b7882eff6d3c86d3a"}, + {file = "Brotli-1.0.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:11d3283d89af7033236fa4e73ec2cbe743d4f6a81d41bd234f24bf63dde979df"}, + {file = "Brotli-1.0.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c1306004d49b84bd0c4f90457c6f57ad109f5cc6067a9664e12b7b79a9948ad"}, + {file = "Brotli-1.0.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b1375b5d17d6145c798661b67e4ae9d5496920d9265e2f00f1c2c0b5ae91fbde"}, + {file = "Brotli-1.0.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cab1b5964b39607a66adbba01f1c12df2e55ac36c81ec6ed44f2fca44178bf1a"}, + {file = "Brotli-1.0.9-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:8ed6a5b3d23ecc00ea02e1ed8e0ff9a08f4fc87a1f58a2530e71c0f48adf882f"}, + {file = "Brotli-1.0.9-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:cb02ed34557afde2d2da68194d12f5719ee96cfb2eacc886352cb73e3808fc5d"}, + {file = "Brotli-1.0.9-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:b3523f51818e8f16599613edddb1ff924eeb4b53ab7e7197f85cbc321cdca32f"}, + {file = "Brotli-1.0.9-cp311-cp311-win32.whl", hash = "sha256:ba72d37e2a924717990f4d7482e8ac88e2ef43fb95491eb6e0d124d77d2a150d"}, + {file = "Brotli-1.0.9-cp311-cp311-win_amd64.whl", hash = "sha256:3ffaadcaeafe9d30a7e4e1e97ad727e4f5610b9fa2f7551998471e3736738679"}, {file = "Brotli-1.0.9-cp35-cp35m-macosx_10_6_intel.whl", hash = "sha256:c83aa123d56f2e060644427a882a36b3c12db93727ad7a7b9efd7d7f3e9cc2c4"}, {file = "Brotli-1.0.9-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:6b2ae9f5f67f89aade1fab0f7fd8f2832501311c363a21579d02defa844d9296"}, {file = "Brotli-1.0.9-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:68715970f16b6e92c574c30747c95cf8cf62804569647386ff032195dc89a430"}, @@ -277,7 +353,17 @@ files = [ {file = "Brotli-1.0.9-cp39-cp39-win32.whl", hash = "sha256:cfc391f4429ee0a9370aa93d812a52e1fee0f37a81861f4fdd1f4fb28e8547c3"}, {file = "Brotli-1.0.9-cp39-cp39-win_amd64.whl", hash = "sha256:854c33dad5ba0fbd6ab69185fec8dab89e13cda6b7d191ba111987df74f38761"}, {file = "Brotli-1.0.9-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:9749a124280a0ada4187a6cfd1ffd35c350fb3af79c706589d98e088c5044267"}, + {file = "Brotli-1.0.9-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:73fd30d4ce0ea48010564ccee1a26bfe39323fde05cb34b5863455629db61dc7"}, + {file = "Brotli-1.0.9-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:02177603aaca36e1fd21b091cb742bb3b305a569e2402f1ca38af471777fb019"}, {file = "Brotli-1.0.9-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:76ffebb907bec09ff511bb3acc077695e2c32bc2142819491579a695f77ffd4d"}, + {file = "Brotli-1.0.9-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:b43775532a5904bc938f9c15b77c613cb6ad6fb30990f3b0afaea82797a402d8"}, + {file = "Brotli-1.0.9-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:5bf37a08493232fbb0f8229f1824b366c2fc1d02d64e7e918af40acd15f3e337"}, + {file = "Brotli-1.0.9-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:330e3f10cd01da535c70d09c4283ba2df5fb78e915bea0a28becad6e2ac010be"}, + {file = "Brotli-1.0.9-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:e1abbeef02962596548382e393f56e4c94acd286bd0c5afba756cffc33670e8a"}, + {file = "Brotli-1.0.9-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:3148362937217b7072cf80a2dcc007f09bb5ecb96dae4617316638194113d5be"}, + {file = "Brotli-1.0.9-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:336b40348269f9b91268378de5ff44dc6fbaa2268194f85177b53463d313842a"}, + {file = "Brotli-1.0.9-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b8b09a16a1950b9ef495a0f8b9d0a87599a9d1f179e2d4ac014b2ec831f87e7"}, + {file = "Brotli-1.0.9-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c8e521a0ce7cf690ca84b8cc2272ddaf9d8a50294fd086da67e517439614c755"}, {file = "Brotli-1.0.9.zip", hash = "sha256:4d1b810aa0ed773f81dceda2cc7b403d01057458730e309856356d4ef4188438"}, ] @@ -443,19 +529,102 @@ files = [ [[package]] name = "charset-normalizer" -version = "2.1.1" +version = "3.0.1" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." category = "main" optional = false -python-versions = ">=3.6.0" +python-versions = "*" files = [ - {file = "charset-normalizer-2.1.1.tar.gz", hash = "sha256:5a3d016c7c547f69d6f81fb0db9449ce888b418b5b9952cc5e6e66843e9dd845"}, - {file = "charset_normalizer-2.1.1-py3-none-any.whl", hash = "sha256:83e9a75d1911279afd89352c68b45348559d1fc0506b054b346651b5e7fee29f"}, + {file = "charset-normalizer-3.0.1.tar.gz", hash = "sha256:ebea339af930f8ca5d7a699b921106c6e29c617fe9606fa7baa043c1cdae326f"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:88600c72ef7587fe1708fd242b385b6ed4b8904976d5da0893e31df8b3480cb6"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:c75ffc45f25324e68ab238cb4b5c0a38cd1c3d7f1fb1f72b5541de469e2247db"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:db72b07027db150f468fbada4d85b3b2729a3db39178abf5c543b784c1254539"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62595ab75873d50d57323a91dd03e6966eb79c41fa834b7a1661ed043b2d404d"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ff6f3db31555657f3163b15a6b7c6938d08df7adbfc9dd13d9d19edad678f1e8"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:772b87914ff1152b92a197ef4ea40efe27a378606c39446ded52c8f80f79702e"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70990b9c51340e4044cfc394a81f614f3f90d41397104d226f21e66de668730d"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:292d5e8ba896bbfd6334b096e34bffb56161c81408d6d036a7dfa6929cff8783"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:2edb64ee7bf1ed524a1da60cdcd2e1f6e2b4f66ef7c077680739f1641f62f555"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:31a9ddf4718d10ae04d9b18801bd776693487cbb57d74cc3458a7673f6f34639"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:44ba614de5361b3e5278e1241fda3dc1838deed864b50a10d7ce92983797fa76"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:12db3b2c533c23ab812c2b25934f60383361f8a376ae272665f8e48b88e8e1c6"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:c512accbd6ff0270939b9ac214b84fb5ada5f0409c44298361b2f5e13f9aed9e"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-win32.whl", hash = "sha256:502218f52498a36d6bf5ea77081844017bf7982cdbe521ad85e64cabee1b608b"}, + {file = "charset_normalizer-3.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:601f36512f9e28f029d9481bdaf8e89e5148ac5d89cffd3b05cd533eeb423b59"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0298eafff88c99982a4cf66ba2efa1128e4ddaca0b05eec4c456bbc7db691d8d"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a8d0fc946c784ff7f7c3742310cc8a57c5c6dc31631269876a88b809dbeff3d3"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:87701167f2a5c930b403e9756fab1d31d4d4da52856143b609e30a1ce7160f3c"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:14e76c0f23218b8f46c4d87018ca2e441535aed3632ca134b10239dfb6dadd6b"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0c0a590235ccd933d9892c627dec5bc7511ce6ad6c1011fdf5b11363022746c1"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8c7fe7afa480e3e82eed58e0ca89f751cd14d767638e2550c77a92a9e749c317"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:79909e27e8e4fcc9db4addea88aa63f6423ebb171db091fb4373e3312cb6d603"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8ac7b6a045b814cf0c47f3623d21ebd88b3e8cf216a14790b455ea7ff0135d18"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:72966d1b297c741541ca8cf1223ff262a6febe52481af742036a0b296e35fa5a"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:f9d0c5c045a3ca9bedfc35dca8526798eb91a07aa7a2c0fee134c6c6f321cbd7"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:5995f0164fa7df59db4746112fec3f49c461dd6b31b841873443bdb077c13cfc"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4a8fcf28c05c1f6d7e177a9a46a1c52798bfe2ad80681d275b10dcf317deaf0b"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:761e8904c07ad053d285670f36dd94e1b6ab7f16ce62b9805c475b7aa1cffde6"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-win32.whl", hash = "sha256:71140351489970dfe5e60fc621ada3e0f41104a5eddaca47a7acb3c1b851d6d3"}, + {file = "charset_normalizer-3.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:9ab77acb98eba3fd2a85cd160851816bfce6871d944d885febf012713f06659c"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:84c3990934bae40ea69a82034912ffe5a62c60bbf6ec5bc9691419641d7d5c9a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:74292fc76c905c0ef095fe11e188a32ebd03bc38f3f3e9bcb85e4e6db177b7ea"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c95a03c79bbe30eec3ec2b7f076074f4281526724c8685a42872974ef4d36b72"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f4c39b0e3eac288fedc2b43055cfc2ca7a60362d0e5e87a637beac5d801ef478"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:df2c707231459e8a4028eabcd3cfc827befd635b3ef72eada84ab13b52e1574d"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:93ad6d87ac18e2a90b0fe89df7c65263b9a99a0eb98f0a3d2e079f12a0735837"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:59e5686dd847347e55dffcc191a96622f016bc0ad89105e24c14e0d6305acbc6"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_i686.whl", hash = "sha256:cd6056167405314a4dc3c173943f11249fa0f1b204f8b51ed4bde1a9cd1834dc"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_ppc64le.whl", hash = "sha256:083c8d17153ecb403e5e1eb76a7ef4babfc2c48d58899c98fcaa04833e7a2f9a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_s390x.whl", hash = "sha256:f5057856d21e7586765171eac8b9fc3f7d44ef39425f85dbcccb13b3ebea806c"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:7eb33a30d75562222b64f569c642ff3dc6689e09adda43a082208397f016c39a"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-win32.whl", hash = "sha256:95dea361dd73757c6f1c0a1480ac499952c16ac83f7f5f4f84f0658a01b8ef41"}, + {file = "charset_normalizer-3.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:eaa379fcd227ca235d04152ca6704c7cb55564116f8bc52545ff357628e10602"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:3e45867f1f2ab0711d60c6c71746ac53537f1684baa699f4f668d4c6f6ce8e14"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cadaeaba78750d58d3cc6ac4d1fd867da6fc73c88156b7a3212a3cd4819d679d"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:911d8a40b2bef5b8bbae2e36a0b103f142ac53557ab421dc16ac4aafee6f53dc"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:503e65837c71b875ecdd733877d852adbc465bd82c768a067badd953bf1bc5a3"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a60332922359f920193b1d4826953c507a877b523b2395ad7bc716ddd386d866"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:16a8663d6e281208d78806dbe14ee9903715361cf81f6d4309944e4d1e59ac5b"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:a16418ecf1329f71df119e8a65f3aa68004a3f9383821edcb20f0702934d8087"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:9d9153257a3f70d5f69edf2325357251ed20f772b12e593f3b3377b5f78e7ef8"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:02a51034802cbf38db3f89c66fb5d2ec57e6fe7ef2f4a44d070a593c3688667b"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:2e396d70bc4ef5325b72b593a72c8979999aa52fb8bcf03f701c1b03e1166918"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:11b53acf2411c3b09e6af37e4b9005cba376c872503c8f28218c7243582df45d"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-win32.whl", hash = "sha256:0bf2dae5291758b6f84cf923bfaa285632816007db0330002fa1de38bfcb7154"}, + {file = "charset_normalizer-3.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:2c03cc56021a4bd59be889c2b9257dae13bf55041a3372d3295416f86b295fb5"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:024e606be3ed92216e2b6952ed859d86b4cfa52cd5bc5f050e7dc28f9b43ec42"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:4b0d02d7102dd0f997580b51edc4cebcf2ab6397a7edf89f1c73b586c614272c"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:358a7c4cb8ba9b46c453b1dd8d9e431452d5249072e4f56cfda3149f6ab1405e"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81d6741ab457d14fdedc215516665050f3822d3e56508921cc7239f8c8e66a58"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b8af03d2e37866d023ad0ddea594edefc31e827fee64f8de5611a1dbc373174"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9cf4e8ad252f7c38dd1f676b46514f92dc0ebeb0db5552f5f403509705e24753"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e696f0dd336161fca9adbb846875d40752e6eba585843c768935ba5c9960722b"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c22d3fe05ce11d3671297dc8973267daa0f938b93ec716e12e0f6dee81591dc1"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:109487860ef6a328f3eec66f2bf78b0b72400280d8f8ea05f69c51644ba6521a"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:37f8febc8ec50c14f3ec9637505f28e58d4f66752207ea177c1d67df25da5aed"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:f97e83fa6c25693c7a35de154681fcc257c1c41b38beb0304b9c4d2d9e164479"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:a152f5f33d64a6be73f1d30c9cc82dfc73cec6477ec268e7c6e4c7d23c2d2291"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:39049da0ffb96c8cbb65cbf5c5f3ca3168990adf3551bd1dee10c48fce8ae820"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-win32.whl", hash = "sha256:4457ea6774b5611f4bed5eaa5df55f70abde42364d498c5134b7ef4c6958e20e"}, + {file = "charset_normalizer-3.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:e62164b50f84e20601c1ff8eb55620d2ad25fb81b59e3cd776a1902527a788af"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:8eade758719add78ec36dc13201483f8e9b5d940329285edcd5f70c0a9edbd7f"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8499ca8f4502af841f68135133d8258f7b32a53a1d594aa98cc52013fff55678"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:3fc1c4a2ffd64890aebdb3f97e1278b0cc72579a08ca4de8cd2c04799a3a22be"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:00d3ffdaafe92a5dc603cb9bd5111aaa36dfa187c8285c543be562e61b755f6b"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2ac1b08635a8cd4e0cbeaf6f5e922085908d48eb05d44c5ae9eabab148512ca"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f6f45710b4459401609ebebdbcfb34515da4fc2aa886f95107f556ac69a9147e"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ae1de54a77dc0d6d5fcf623290af4266412a7c4be0b1ff7444394f03f5c54e3"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3b590df687e3c5ee0deef9fc8c547d81986d9a1b56073d82de008744452d6541"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:ab5de034a886f616a5668aa5d098af2b5385ed70142090e2a31bcbd0af0fdb3d"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9cb3032517f1627cc012dbc80a8ec976ae76d93ea2b5feaa9d2a5b8882597579"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:608862a7bf6957f2333fc54ab4399e405baad0163dc9f8d99cb236816db169d4"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:0f438ae3532723fb6ead77e7c604be7c8374094ef4ee2c5e03a3a17f1fca256c"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:356541bf4381fa35856dafa6a965916e54bed415ad8a24ee6de6e37deccf2786"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-win32.whl", hash = "sha256:39cf9ed17fe3b1bc81f33c9ceb6ce67683ee7526e65fde1447c772afc54a1bb8"}, + {file = "charset_normalizer-3.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:0a11e971ed097d24c534c037d298ad32c6ce81a45736d31e0ff0ad37ab437d59"}, + {file = "charset_normalizer-3.0.1-py3-none-any.whl", hash = "sha256:7e189e2e1d3ed2f4aebabd2d5b0f931e883676e51c7624826e0a4e5fe8a0bf24"}, ] -[package.extras] -unicode-backport = ["unicodedata2"] - [[package]] name = "click" version = "8.1.3" @@ -527,67 +696,20 @@ files = [ [package.extras] testing = ["flake8", "pytest", "pytest-cov", "pytest-virtualenv", "pytest-xdist", "sphinx"] -[[package]] -name = "cryptography" -version = "38.0.4" -description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -category = "main" -optional = false -python-versions = ">=3.6" -files = [ - {file = "cryptography-38.0.4-cp36-abi3-macosx_10_10_universal2.whl", hash = "sha256:2fa36a7b2cc0998a3a4d5af26ccb6273f3df133d61da2ba13b3286261e7efb70"}, - {file = "cryptography-38.0.4-cp36-abi3-macosx_10_10_x86_64.whl", hash = "sha256:1f13ddda26a04c06eb57119caf27a524ccae20533729f4b1e4a69b54e07035eb"}, - {file = "cryptography-38.0.4-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:2ec2a8714dd005949d4019195d72abed84198d877112abb5a27740e217e0ea8d"}, - {file = "cryptography-38.0.4-cp36-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50a1494ed0c3f5b4d07650a68cd6ca62efe8b596ce743a5c94403e6f11bf06c1"}, - {file = "cryptography-38.0.4-cp36-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a10498349d4c8eab7357a8f9aa3463791292845b79597ad1b98a543686fb1ec8"}, - {file = "cryptography-38.0.4-cp36-abi3-manylinux_2_24_x86_64.whl", hash = "sha256:10652dd7282de17990b88679cb82f832752c4e8237f0c714be518044269415db"}, - {file = "cryptography-38.0.4-cp36-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:bfe6472507986613dc6cc00b3d492b2f7564b02b3b3682d25ca7f40fa3fd321b"}, - {file = "cryptography-38.0.4-cp36-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:ce127dd0a6a0811c251a6cddd014d292728484e530d80e872ad9806cfb1c5b3c"}, - {file = "cryptography-38.0.4-cp36-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:53049f3379ef05182864d13bb9686657659407148f901f3f1eee57a733fb4b00"}, - {file = "cryptography-38.0.4-cp36-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:8a4b2bdb68a447fadebfd7d24855758fe2d6fecc7fed0b78d190b1af39a8e3b0"}, - {file = "cryptography-38.0.4-cp36-abi3-win32.whl", hash = "sha256:1d7e632804a248103b60b16fb145e8df0bc60eed790ece0d12efe8cd3f3e7744"}, - {file = "cryptography-38.0.4-cp36-abi3-win_amd64.whl", hash = "sha256:8e45653fb97eb2f20b8c96f9cd2b3a0654d742b47d638cf2897afbd97f80fa6d"}, - {file = "cryptography-38.0.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ca57eb3ddaccd1112c18fc80abe41db443cc2e9dcb1917078e02dfa010a4f353"}, - {file = "cryptography-38.0.4-pp37-pypy37_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:c9e0d79ee4c56d841bd4ac6e7697c8ff3c8d6da67379057f29e66acffcd1e9a7"}, - {file = "cryptography-38.0.4-pp37-pypy37_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:0e70da4bdff7601b0ef48e6348339e490ebfb0cbe638e083c9c41fb49f00c8bd"}, - {file = "cryptography-38.0.4-pp38-pypy38_pp73-macosx_10_10_x86_64.whl", hash = "sha256:998cd19189d8a747b226d24c0207fdaa1e6658a1d3f2494541cb9dfbf7dcb6d2"}, - {file = "cryptography-38.0.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67461b5ebca2e4c2ab991733f8ab637a7265bb582f07c7c88914b5afb88cb95b"}, - {file = "cryptography-38.0.4-pp38-pypy38_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:4eb85075437f0b1fd8cd66c688469a0c4119e0ba855e3fef86691971b887caf6"}, - {file = "cryptography-38.0.4-pp38-pypy38_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:3178d46f363d4549b9a76264f41c6948752183b3f587666aff0555ac50fd7876"}, - {file = "cryptography-38.0.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:6391e59ebe7c62d9902c24a4d8bcbc79a68e7c4ab65863536127c8a9cd94043b"}, - {file = "cryptography-38.0.4-pp39-pypy39_pp73-macosx_10_10_x86_64.whl", hash = "sha256:78e47e28ddc4ace41dd38c42e6feecfdadf9c3be2af389abbfeef1ff06822285"}, - {file = "cryptography-38.0.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2fb481682873035600b5502f0015b664abc26466153fab5c6bc92c1ea69d478b"}, - {file = "cryptography-38.0.4-pp39-pypy39_pp73-manylinux_2_24_x86_64.whl", hash = "sha256:4367da5705922cf7070462e964f66e4ac24162e22ab0a2e9d31f1b270dd78083"}, - {file = "cryptography-38.0.4-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:b4cad0cea995af760f82820ab4ca54e5471fc782f70a007f31531957f43e9dee"}, - {file = "cryptography-38.0.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:80ca53981ceeb3241998443c4964a387771588c4e4a5d92735a493af868294f9"}, - {file = "cryptography-38.0.4.tar.gz", hash = "sha256:175c1a818b87c9ac80bb7377f5520b7f31b3ef2a0004e2420319beadedb67290"}, -] - -[package.dependencies] -cffi = ">=1.12" - -[package.extras] -docs = ["sphinx (>=1.6.5,!=1.8.0,!=3.1.0,!=3.1.1)", "sphinx-rtd-theme"] -docstest = ["pyenchant (>=1.6.11)", "sphinxcontrib-spelling (>=4.0.1)", "twine (>=1.12.0)"] -pep8test = ["black", "flake8", "flake8-import-order", "pep8-naming"] -sdist = ["setuptools-rust (>=0.11.4)"] -ssh = ["bcrypt (>=3.1.5)"] -test = ["hypothesis (>=1.11.4,!=3.79.2)", "iso8601", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-subtests", "pytest-xdist", "pytz"] - [[package]] name = "dacite" -version = "1.6.0" +version = "1.8.0" description = "Simple creation of data classes from dictionaries." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "dacite-1.6.0-py3-none-any.whl", hash = "sha256:4331535f7aabb505c732fa4c3c094313fc0a1d5ea19907bf4726a7819a68b93f"}, - {file = "dacite-1.6.0.tar.gz", hash = "sha256:d48125ed0a0352d3de9f493bf980038088f45f3f9d7498f090b50a847daaa6df"}, + {file = "dacite-1.8.0-py3-none-any.whl", hash = "sha256:f7b1205cc5d9b62835aac8cbc1e6e37c1da862359a401f1edbe2ae08fbdc6193"}, + {file = "dacite-1.8.0.tar.gz", hash = "sha256:6257a5e505b61a8cafee7ef3ad08cf32ee9b885718f42395d017e0a9b4c6af65"}, ] [package.extras] -dev = ["black", "coveralls", "mypy", "pylint", "pytest (>=5)", "pytest-cov"] +dev = ["black", "coveralls", "mypy", "pre-commit", "pylint", "pytest (>=5)", "pytest-benchmark", "pytest-cov"] [[package]] name = "darglint" @@ -601,18 +723,6 @@ files = [ {file = "darglint-1.8.1.tar.gz", hash = "sha256:080d5106df149b199822e7ee7deb9c012b49891538f14a11be681044f0bb20da"}, ] -[[package]] -name = "decorator" -version = "5.1.1" -description = "Decorators for Humans" -category = "main" -optional = false -python-versions = ">=3.5" -files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] - [[package]] name = "demjson3" version = "3.0.6" @@ -644,14 +754,14 @@ tests = ["check-manifest (>=0.42)", "mock (>=1.3.0)", "pytest (==5.4.3)", "pytes [[package]] name = "doc8" -version = "1.0.0" +version = "1.1.1" description = "Style checker for Sphinx (or other) RST documentation" category = "dev" optional = false -python-versions = ">=3.7" +python-versions = ">=3.8" files = [ - {file = "doc8-1.0.0-py3-none-any.whl", hash = "sha256:0c6c3104fa7f7bb2103589c0a8e272c105fdff3ddd1ef4808e51b2782185e9ab"}, - {file = "doc8-1.0.0.tar.gz", hash = "sha256:1e999a14fe415ea96d89d5053c790d01061f19b6737706b817d1579c2a07cc16"}, + {file = "doc8-1.1.1-py3-none-any.whl", hash = "sha256:e493aa3f36820197c49f407583521bb76a0fde4fffbcd0e092be946ff95931ac"}, + {file = "doc8-1.1.1.tar.gz", hash = "sha256:d97a93e8f5a2efc4713a0804657dedad83745cca4cd1d88de9186f77f9776004"}, ] [package.dependencies] @@ -706,14 +816,14 @@ pipenv = ["pipenv"] [[package]] name = "dpath" -version = "2.1.3" +version = "2.1.4" description = "Filesystem-like pathing and searching for dictionaries" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "dpath-2.1.3-py3-none-any.whl", hash = "sha256:d9560e03ccd83b3c6f29988b0162ce9b34fd28b9d8dbda46663b20c68d9cdae3"}, - {file = "dpath-2.1.3.tar.gz", hash = "sha256:d1a7a0e6427d0a4156c792c82caf1f0109603f68ace792e36ca4596fd2cb8d9d"}, + {file = "dpath-2.1.4-py3-none-any.whl", hash = "sha256:3a4f6cc07e3a1b34bc73baa3a6854ee0a48fb2cf18a8c9b1911b66fd72afaa85"}, + {file = "dpath-2.1.4.tar.gz", hash = "sha256:3380a77d0db4abf104125860ff6eb4bd07c97c65b81aad42a609717089a1bed0"}, ] [[package]] @@ -742,19 +852,34 @@ files = [ [[package]] name = "exceptiongroup" -version = "1.0.4" +version = "1.1.0" description = "Backport of PEP 654 (exception groups)" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "exceptiongroup-1.0.4-py3-none-any.whl", hash = "sha256:542adf9dea4055530d6e1279602fa5cb11dab2395fa650b8674eaec35fc4a828"}, - {file = "exceptiongroup-1.0.4.tar.gz", hash = "sha256:bd14967b79cd9bdb54d97323216f8fdf533e278df937aa2a90089e7d6e06e5ec"}, + {file = "exceptiongroup-1.1.0-py3-none-any.whl", hash = "sha256:327cbda3da756e2de031a3107b81ab7b3770a602c4d16ca618298c526f4bec1e"}, + {file = "exceptiongroup-1.1.0.tar.gz", hash = "sha256:bcb67d800a4497e1b404c2dd44fca47d3b7a5e5433dbab67f96c1a685cdfdf23"}, ] [package.extras] test = ["pytest (>=6)"] +[[package]] +name = "fake-useragent" +version = "1.1.1" +description = "Up-to-date simple useragent faker with real world database" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "fake-useragent-1.1.1.tar.gz", hash = "sha256:579c72b18ba792a5bd54ba48e63e464d21933e336472c974091a6757f31bfcdc"}, + {file = "fake_useragent-1.1.1-py3-none-any.whl", hash = "sha256:9f9b3667d3741ba81e34ebf9a6aa32658ecf7835499257826dd72642af629d59"}, +] + +[package.dependencies] +importlib-resources = {version = ">=5.0", markers = "python_version < \"3.10\""} + [[package]] name = "ffmpeg-python" version = "0.2.0" @@ -890,14 +1015,14 @@ pycodestyle = "*" [[package]] name = "flake8-docstrings" -version = "1.6.0" +version = "1.7.0" description = "Extension for flake8 which uses pydocstyle to check docstrings" category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "flake8-docstrings-1.6.0.tar.gz", hash = "sha256:9fe7c6a306064af8e62a055c2f61e9eb1da55f84bb39caef2b84ce53708ac34b"}, - {file = "flake8_docstrings-1.6.0-py2.py3-none-any.whl", hash = "sha256:99cac583d6c7e32dd28bbfbef120a7c0d1b6dde4adb5a9fd441c4227a6534bde"}, + {file = "flake8_docstrings-1.7.0-py2.py3-none-any.whl", hash = "sha256:51f2344026da083fc084166a9353f5082b01f72901df422f74b4d953ae88ac75"}, + {file = "flake8_docstrings-1.7.0.tar.gz", hash = "sha256:4c8cc748dc16e6869728699e5d0d685da9a10b0ea718e090b1ba088e67a941af"}, ] [package.dependencies] @@ -957,13 +1082,13 @@ flake8 = "*" [[package]] name = "flake8-quotes" -version = "3.3.1" +version = "3.3.2" description = "Flake8 lint for quotes." category = "dev" optional = false python-versions = "*" files = [ - {file = "flake8-quotes-3.3.1.tar.gz", hash = "sha256:633adca6fb8a08131536af0d750b44d6985b9aba46f498871e21588c3e6f525a"}, + {file = "flake8-quotes-3.3.2.tar.gz", hash = "sha256:6e26892b632dacba517bf27219c459a8396dcfac0f5e8204904c5a4ba9b480e1"}, ] [package.dependencies] @@ -1057,13 +1182,13 @@ six = ">=1.8.0" [[package]] name = "future" -version = "0.18.2" +version = "0.18.3" description = "Clean single-source support for Python 3 and 2" category = "main" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" files = [ - {file = "future-0.18.2.tar.gz", hash = "sha256:b1bead90b70cf6ec3f0710ae53a525360fa360d306a86583adc6bf83a4db537d"}, + {file = "future-0.18.3.tar.gz", hash = "sha256:34a17436ed1e96697a86f9de3d15a3b0be01d8bc8de9c1dffd59fb8234ed5307"}, ] [[package]] @@ -1083,29 +1208,128 @@ smmap = ">=3.0.1,<6" [[package]] name = "gitpython" -version = "3.1.29" -description = "GitPython is a python library used to interact with Git repositories" +version = "3.1.31" +description = "GitPython is a Python library used to interact with Git repositories" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "GitPython-3.1.29-py3-none-any.whl", hash = "sha256:41eea0deec2deea139b459ac03656f0dd28fc4a3387240ec1d3c259a2c47850f"}, - {file = "GitPython-3.1.29.tar.gz", hash = "sha256:cc36bfc4a3f913e66805a28e84703e419d9c264c1077e537b54f0e1af85dbefd"}, + {file = "GitPython-3.1.31-py3-none-any.whl", hash = "sha256:f04893614f6aa713a60cbbe1e6a97403ef633103cdd0ef5eb6efe0deb98dbe8d"}, + {file = "GitPython-3.1.31.tar.gz", hash = "sha256:8ce3bcf69adfdf7c7d503e78fd3b1c492af782d58893b650adb2ac8912ddd573"}, ] [package.dependencies] gitdb = ">=4.0.1,<5" +[[package]] +name = "h11" +version = "0.14.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, + {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, +] + +[[package]] +name = "h2" +version = "4.1.0" +description = "HTTP/2 State-Machine based protocol implementation" +category = "main" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "h2-4.1.0-py3-none-any.whl", hash = "sha256:03a46bcf682256c95b5fd9e9a99c1323584c3eec6440d379b9903d709476bc6d"}, + {file = "h2-4.1.0.tar.gz", hash = "sha256:a83aca08fbe7aacb79fec788c9c0bac936343560ed9ec18b82a13a12c28d2abb"}, +] + +[package.dependencies] +hpack = ">=4.0,<5" +hyperframe = ">=6.0,<7" + +[[package]] +name = "hpack" +version = "4.0.0" +description = "Pure-Python HPACK header compression" +category = "main" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "hpack-4.0.0-py3-none-any.whl", hash = "sha256:84a076fad3dc9a9f8063ccb8041ef100867b1878b25ef0ee63847a5d53818a6c"}, + {file = "hpack-4.0.0.tar.gz", hash = "sha256:fc41de0c63e687ebffde81187a948221294896f6bdc0ae2312708df339430095"}, +] + +[[package]] +name = "httpcore" +version = "0.16.3" +description = "A minimal low-level HTTP client." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "httpcore-0.16.3-py3-none-any.whl", hash = "sha256:da1fb708784a938aa084bde4feb8317056c55037247c787bd7e19eb2c2949dc0"}, + {file = "httpcore-0.16.3.tar.gz", hash = "sha256:c5d6f04e2fc530f39e0c077e6a30caa53f1451096120f1f38b954afd0b17c0cb"}, +] + +[package.dependencies] +anyio = ">=3.0,<5.0" +certifi = "*" +h11 = ">=0.13,<0.15" +sniffio = ">=1.0.0,<2.0.0" + +[package.extras] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] + +[[package]] +name = "httpx" +version = "0.23.3" +description = "The next generation HTTP client." +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "httpx-0.23.3-py3-none-any.whl", hash = "sha256:a211fcce9b1254ea24f0cd6af9869b3d29aba40154e947d2a07bb499b3e310d6"}, + {file = "httpx-0.23.3.tar.gz", hash = "sha256:9818458eb565bb54898ccb9b8b251a28785dd4a55afbc23d0eb410754fe7d0f9"}, +] + +[package.dependencies] +certifi = "*" +h2 = {version = ">=3,<5", optional = true, markers = "extra == \"http2\""} +httpcore = ">=0.15.0,<0.17.0" +rfc3986 = {version = ">=1.3,<2", extras = ["idna2008"]} +sniffio = "*" + +[package.extras] +brotli = ["brotli", "brotlicffi"] +cli = ["click (>=8.0.0,<9.0.0)", "pygments (>=2.0.0,<3.0.0)", "rich (>=10,<13)"] +http2 = ["h2 (>=3,<5)"] +socks = ["socksio (>=1.0.0,<2.0.0)"] + +[[package]] +name = "hyperframe" +version = "6.0.1" +description = "HTTP/2 framing layer for Python" +category = "main" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "hyperframe-6.0.1-py3-none-any.whl", hash = "sha256:0ec6bafd80d8ad2195c4f03aacba3a8265e57bc4cff261e802bf39970ed02a15"}, + {file = "hyperframe-6.0.1.tar.gz", hash = "sha256:ae510046231dc8e9ecb1a6586f63d2347bf4c8905914aa84ba585ae85f28a914"}, +] + [[package]] name = "identify" -version = "2.5.10" +version = "2.5.18" description = "File identification library for Python" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "identify-2.5.10-py2.py3-none-any.whl", hash = "sha256:fb7c2feaeca6976a3ffa31ec3236a6911fbc51aec9acc111de2aed99f244ade2"}, - {file = "identify-2.5.10.tar.gz", hash = "sha256:dce9e31fee7dbc45fea36a9e855c316b8fbf807e65a862f160840bb5a2bf5dfd"}, + {file = "identify-2.5.18-py2.py3-none-any.whl", hash = "sha256:93aac7ecf2f6abf879b8f29a8002d3c6de7086b8c28d88e1ad15045a15ab63f9"}, + {file = "identify-2.5.18.tar.gz", hash = "sha256:89e144fa560cc4cffb6ef2ab5e9fb18ed9f9b3cb054384bab4b95c12f6c309fe"}, ] [package.extras] @@ -1137,39 +1361,58 @@ files = [ [[package]] name = "importlib-metadata" -version = "5.1.0" +version = "6.0.0" description = "Read metadata from Python packages" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "importlib_metadata-5.1.0-py3-none-any.whl", hash = "sha256:d84d17e21670ec07990e1044a99efe8d615d860fd176fc29ef5c306068fda313"}, - {file = "importlib_metadata-5.1.0.tar.gz", hash = "sha256:d5059f9f1e8e41f80e9c56c2ee58811450c31984dfa625329ffd7c0dad88a73b"}, + {file = "importlib_metadata-6.0.0-py3-none-any.whl", hash = "sha256:7efb448ec9a5e313a57655d35aa54cd3e01b7e1fbcf72dce1bf06119420f5bad"}, + {file = "importlib_metadata-6.0.0.tar.gz", hash = "sha256:e354bedeb60efa6affdcc8ae121b73544a7aa74156d047311948f6d711cd378d"}, ] [package.dependencies] zipp = ">=0.5" [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] perf = ["ipython"] testing = ["flake8 (<5)", "flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)"] +[[package]] +name = "importlib-resources" +version = "5.12.0" +description = "Read resources from Python packages" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "importlib_resources-5.12.0-py3-none-any.whl", hash = "sha256:7b1deeebbf351c7578e09bf2f63fa2ce8b5ffec296e0d349139d43cca061a81a"}, + {file = "importlib_resources-5.12.0.tar.gz", hash = "sha256:4be82589bf5c1d7999aedf2a45159d10cb3ca4f19b2271f8792bc8e6da7b22f6"}, +] + +[package.dependencies] +zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["flake8 (<5)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + [[package]] name = "isort" -version = "5.11.3" +version = "5.12.0" description = "A Python utility / library to sort Python imports." category = "dev" optional = false -python-versions = ">=3.7.0" +python-versions = ">=3.8.0" files = [ - {file = "isort-5.11.3-py3-none-any.whl", hash = "sha256:83155ffa936239d986b0f190347a3f2285f42a9b9e1725c89d865b27dd0627e5"}, - {file = "isort-5.11.3.tar.gz", hash = "sha256:a8ca25fbfad0f7d5d8447a4314837298d9f6b23aed8618584c894574f626b64b"}, + {file = "isort-5.12.0-py3-none-any.whl", hash = "sha256:f84c2818376e66cf843d497486ea8fed8700b340f308f076c6fb1229dff318b6"}, + {file = "isort-5.12.0.tar.gz", hash = "sha256:8bef7dde241278824a6d83f44a544709b065191b95b6e50894bdc722fcba0504"}, ] [package.extras] -colors = ["colorama (>=0.4.3,<0.5.0)"] -pipfile-deprecated-finder = ["pipreqs", "requirementslib"] +colors = ["colorama (>=0.4.3)"] +pipfile-deprecated-finder = ["pip-shims (>=0.5.2)", "pipreqs", "requirementslib"] plugins = ["setuptools"] requirements-deprecated-finder = ["pip-api", "pipreqs"] @@ -1255,6 +1498,7 @@ files = [ {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ca989b91cf3a3ba28930a9fc1e9aeafc2a395448641df1f387a2d394638943b0"}, {file = "lxml-4.9.2-cp35-cp35m-manylinux_2_5_x86_64.manylinux1_x86_64.whl", hash = "sha256:822068f85e12a6e292803e112ab876bc03ed1f03dddb80154c395f891ca6b31e"}, {file = "lxml-4.9.2-cp35-cp35m-win32.whl", hash = "sha256:be7292c55101e22f2a3d4d8913944cbea71eea90792bf914add27454a13905df"}, + {file = "lxml-4.9.2-cp35-cp35m-win_amd64.whl", hash = "sha256:998c7c41910666d2976928c38ea96a70d1aa43be6fe502f21a651e17483a43c5"}, {file = "lxml-4.9.2-cp36-cp36m-macosx_10_15_x86_64.whl", hash = "sha256:b26a29f0b7fc6f0897f043ca366142d2b609dc60756ee6e4e90b5f762c6adc53"}, {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:ab323679b8b3030000f2be63e22cdeea5b47ee0abd2d6a1dc0c8103ddaa56cd7"}, {file = "lxml-4.9.2-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:689bb688a1db722485e4610a503e3e9210dcc20c520b45ac8f7533c837be76fe"}, @@ -1264,6 +1508,7 @@ files = [ {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:58bfa3aa19ca4c0f28c5dde0ff56c520fbac6f0daf4fac66ed4c8d2fb7f22e74"}, {file = "lxml-4.9.2-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:bc718cd47b765e790eecb74d044cc8d37d58562f6c314ee9484df26276d36a38"}, {file = "lxml-4.9.2-cp36-cp36m-win32.whl", hash = "sha256:d5bf6545cd27aaa8a13033ce56354ed9e25ab0e4ac3b5392b763d8d04b08e0c5"}, + {file = "lxml-4.9.2-cp36-cp36m-win_amd64.whl", hash = "sha256:3ab9fa9d6dc2a7f29d7affdf3edebf6ece6fb28a6d80b14c3b2fb9d39b9322c3"}, {file = "lxml-4.9.2-cp37-cp37m-macosx_10_15_x86_64.whl", hash = "sha256:05ca3f6abf5cf78fe053da9b1166e062ade3fa5d4f92b4ed688127ea7d7b1d03"}, {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_24_i686.whl", hash = "sha256:a5da296eb617d18e497bcf0a5c528f5d3b18dadb3619fbdadf4ed2356ef8d941"}, {file = "lxml-4.9.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.manylinux_2_24_aarch64.whl", hash = "sha256:04876580c050a8c5341d706dd464ff04fd597095cc8c023252566a8826505726"}, @@ -1315,14 +1560,14 @@ source = ["Cython (>=0.29.7)"] [[package]] name = "m2r2" -version = "0.3.3" +version = "0.3.3.post2" description = "Markdown and reStructuredText in a single file." category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "m2r2-0.3.3-py3-none-any.whl", hash = "sha256:2ee32a5928c3598b67c70e6d22981ec936c03d5bfd2f64229e77678731952f16"}, - {file = "m2r2-0.3.3.tar.gz", hash = "sha256:f9b6e9efbc2b6987dbd43d2fd15a6d115ba837d8158ae73295542635b4086e75"}, + {file = "m2r2-0.3.3.post2-py3-none-any.whl", hash = "sha256:86157721eb6eabcd54d4eea7195890cc58fa6188b8d0abea633383cfbb5e11e3"}, + {file = "m2r2-0.3.3.post2.tar.gz", hash = "sha256:e62bcb0e74b3ce19cda0737a0556b04cf4a43b785072fcef474558f2c1482ca8"}, ] [package.dependencies] @@ -1331,52 +1576,62 @@ mistune = "0.8.4" [[package]] name = "markupsafe" -version = "2.1.1" +version = "2.1.2" description = "Safely add untrusted strings to HTML/XML markup." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:86b1f75c4e7c2ac2ccdaec2b9022845dbb81880ca318bb7a0a01fbf7813e3812"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f121a1420d4e173a5d96e47e9a0c0dcff965afdf1626d28de1460815f7c4ee7a"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a49907dd8420c5685cfa064a1335b6754b74541bbb3706c259c02ed65b644b3e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c1bfff05d95783da83491be968e8fe789263689c02724e0c691933c52994f5"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b7bd98b796e2b6553da7225aeb61f447f80a1ca64f41d83612e6139ca5213aa4"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b09bf97215625a311f669476f44b8b318b075847b49316d3e28c08e41a7a573f"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:694deca8d702d5db21ec83983ce0bb4b26a578e71fbdbd4fdcd387daa90e4d5e"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:efc1913fd2ca4f334418481c7e595c00aad186563bbc1ec76067848c7ca0a933"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win32.whl", hash = "sha256:4a33dea2b688b3190ee12bd7cfa29d39c9ed176bda40bfa11099a3ce5d3a7ac6"}, - {file = "MarkupSafe-2.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:dda30ba7e87fbbb7eab1ec9f58678558fd9a6b8b853530e176eabd064da81417"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:671cd1187ed5e62818414afe79ed29da836dde67166a9fac6d435873c44fdd02"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3799351e2336dc91ea70b034983ee71cf2f9533cdff7c14c90ea126bfd95d65a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e72591e9ecd94d7feb70c1cbd7be7b3ebea3f548870aa91e2732960fa4d57a37"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6fbf47b5d3728c6aea2abb0589b5d30459e369baa772e0f37a0320185e87c980"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:d5ee4f386140395a2c818d149221149c54849dfcfcb9f1debfe07a8b8bd63f9a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:bcb3ed405ed3222f9904899563d6fc492ff75cce56cba05e32eff40e6acbeaa3"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:e1c0b87e09fa55a220f058d1d49d3fb8df88fbfab58558f1198e08c1e1de842a"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win32.whl", hash = "sha256:8dc1c72a69aa7e082593c4a203dcf94ddb74bb5c8a731e4e1eb68d031e8498ff"}, - {file = "MarkupSafe-2.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:97a68e6ada378df82bc9f16b800ab77cbf4b2fada0081794318520138c088e4a"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:e8c843bbcda3a2f1e3c2ab25913c80a3c5376cd00c6e8c4a86a89a28c8dc5452"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:0212a68688482dc52b2d45013df70d169f542b7394fc744c02a57374a4207003"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e576a51ad59e4bfaac456023a78f6b5e6e7651dcd383bcc3e18d06f9b55d6d1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b9fe39a2ccc108a4accc2676e77da025ce383c108593d65cc909add5c3bd601"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:96e37a3dc86e80bf81758c152fe66dbf60ed5eca3d26305edf01892257049925"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:6d0072fea50feec76a4c418096652f2c3238eaa014b2f94aeb1d56a66b41403f"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:089cf3dbf0cd6c100f02945abeb18484bd1ee57a079aefd52cffd17fba910b88"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:6a074d34ee7a5ce3effbc526b7083ec9731bb3cbf921bbe1d3005d4d2bdb3a63"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win32.whl", hash = "sha256:421be9fbf0ffe9ffd7a378aafebbf6f4602d564d34be190fc19a193232fd12b1"}, - {file = "MarkupSafe-2.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:fc7b548b17d238737688817ab67deebb30e8073c95749d55538ed473130ec0c7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e04e26803c9c3851c931eac40c695602c6295b8d432cbe78609649ad9bd2da8a"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b87db4360013327109564f0e591bd2a3b318547bcef31b468a92ee504d07ae4f"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:99a2a507ed3ac881b975a2976d59f38c19386d128e7a9a18b7df6fff1fd4c1d6"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:56442863ed2b06d19c37f94d999035e15ee982988920e12a5b4ba29b62ad1f77"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3ce11ee3f23f79dbd06fb3d63e2f6af7b12db1d46932fe7bd8afa259a5996603"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:33b74d289bd2f5e527beadcaa3f401e0df0a89927c1559c8566c066fa4248ab7"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:43093fb83d8343aac0b1baa75516da6092f58f41200907ef92448ecab8825135"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8e3dcf21f367459434c18e71b2a9532d96547aef8a871872a5bd69a715c15f96"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win32.whl", hash = "sha256:d4306c36ca495956b6d568d276ac11fdd9c30a36f1b6eb928070dc5360b22e1c"}, - {file = "MarkupSafe-2.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:46d00d6cfecdde84d40e572d63735ef81423ad31184100411e6e3388d405e247"}, - {file = "MarkupSafe-2.1.1.tar.gz", hash = "sha256:7f91197cc9e48f989d12e4e6fbc46495c446636dfc81b9ccf50bb0ec74b91d4b"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:665a36ae6f8f20a4676b53224e33d456a6f5a72657d9c83c2aa00765072f31f7"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:340bea174e9761308703ae988e982005aedf427de816d1afe98147668cc03036"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22152d00bf4a9c7c83960521fc558f55a1adbc0631fbb00a9471e097b19d72e1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:28057e985dace2f478e042eaa15606c7efccb700797660629da387eb289b9323"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ca244fa73f50a800cf8c3ebf7fd93149ec37f5cb9596aa8873ae2c1d23498601"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:d9d971ec1e79906046aa3ca266de79eac42f1dbf3612a05dc9368125952bd1a1"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:7e007132af78ea9df29495dbf7b5824cb71648d7133cf7848a2a5dd00d36f9ff"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:7313ce6a199651c4ed9d7e4cfb4aa56fe923b1adf9af3b420ee14e6d9a73df65"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win32.whl", hash = "sha256:c4a549890a45f57f1ebf99c067a4ad0cb423a05544accaf2b065246827ed9603"}, + {file = "MarkupSafe-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:835fb5e38fd89328e9c81067fd642b3593c33e1e17e2fdbf77f5676abb14a156"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:2ec4f2d48ae59bbb9d1f9d7efb9236ab81429a764dedca114f5fdabbc3788013"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:608e7073dfa9e38a85d38474c082d4281f4ce276ac0010224eaba11e929dd53a"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:65608c35bfb8a76763f37036547f7adfd09270fbdbf96608be2bead319728fcd"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f2bfb563d0211ce16b63c7cb9395d2c682a23187f54c3d79bfec33e6705473c6"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:da25303d91526aac3672ee6d49a2f3db2d9502a4a60b55519feb1a4c7714e07d"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:9cad97ab29dfc3f0249b483412c85c8ef4766d96cdf9dcf5a1e3caa3f3661cf1"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:085fd3201e7b12809f9e6e9bc1e5c96a368c8523fad5afb02afe3c051ae4afcc"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1bea30e9bf331f3fef67e0a3877b2288593c98a21ccb2cf29b74c581a4eb3af0"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win32.whl", hash = "sha256:7df70907e00c970c60b9ef2938d894a9381f38e6b9db73c5be35e59d92e06625"}, + {file = "MarkupSafe-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:e55e40ff0cc8cc5c07996915ad367fa47da6b3fc091fdadca7f5403239c5fec3"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a6e40afa7f45939ca356f348c8e23048e02cb109ced1eb8420961b2f40fb373a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cf877ab4ed6e302ec1d04952ca358b381a882fbd9d1b07cccbfd61783561f98a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63ba06c9941e46fa389d389644e2d8225e0e3e5ebcc4ff1ea8506dce646f8c8a"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f1cd098434e83e656abf198f103a8207a8187c0fc110306691a2e94a78d0abb2"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:55f44b440d491028addb3b88f72207d71eeebfb7b5dbf0643f7c023ae1fba619"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a6f2fcca746e8d5910e18782f976489939d54a91f9411c32051b4aab2bd7c513"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:0b462104ba25f1ac006fdab8b6a01ebbfbce9ed37fd37fd4acd70c67c973e460"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win32.whl", hash = "sha256:7668b52e102d0ed87cb082380a7e2e1e78737ddecdde129acadb0eccc5423859"}, + {file = "MarkupSafe-2.1.2-cp37-cp37m-win_amd64.whl", hash = "sha256:6d6607f98fcf17e534162f0709aaad3ab7a96032723d8ac8750ffe17ae5a0666"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:a806db027852538d2ad7555b203300173dd1b77ba116de92da9afbc3a3be3eed"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:a4abaec6ca3ad8660690236d11bfe28dfd707778e2442b45addd2f086d6ef094"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f03a532d7dee1bed20bc4884194a16160a2de9ffc6354b3878ec9682bb623c54"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4cf06cdc1dda95223e9d2d3c58d3b178aa5dacb35ee7e3bbac10e4e1faacb419"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22731d79ed2eb25059ae3df1dfc9cb1546691cc41f4e3130fe6bfbc3ecbbecfa"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:f8ffb705ffcf5ddd0e80b65ddf7bed7ee4f5a441ea7d3419e861a12eaf41af58"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8db032bf0ce9022a8e41a22598eefc802314e81b879ae093f36ce9ddf39ab1ba"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:2298c859cfc5463f1b64bd55cb3e602528db6fa0f3cfd568d3605c50678f8f03"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win32.whl", hash = "sha256:50c42830a633fa0cf9e7d27664637532791bfc31c731a87b202d2d8ac40c3ea2"}, + {file = "MarkupSafe-2.1.2-cp38-cp38-win_amd64.whl", hash = "sha256:bb06feb762bade6bf3c8b844462274db0c76acc95c52abe8dbed28ae3d44a147"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:99625a92da8229df6d44335e6fcc558a5037dd0a760e11d84be2260e6f37002f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8bca7e26c1dd751236cfb0c6c72d4ad61d986e9a41bbf76cb445f69488b2a2bd"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40627dcf047dadb22cd25ea7ecfe9cbf3bbbad0482ee5920b582f3809c97654f"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40dfd3fefbef579ee058f139733ac336312663c6706d1163b82b3003fb1925c4"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:090376d812fb6ac5f171e5938e82e7f2d7adc2b629101cec0db8b267815c85e2"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2e7821bffe00aa6bd07a23913b7f4e01328c3d5cc0b40b36c0bd81d362faeb65"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:c0a33bc9f02c2b17c3ea382f91b4db0e6cde90b63b296422a939886a7a80de1c"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:b8526c6d437855442cdd3d87eede9c425c4445ea011ca38d937db299382e6fa3"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win32.whl", hash = "sha256:137678c63c977754abe9086a3ec011e8fd985ab90631145dfb9294ad09c102a7"}, + {file = "MarkupSafe-2.1.2-cp39-cp39-win_amd64.whl", hash = "sha256:0576fe974b40a400449768941d5d0858cc624e3249dfd1e0c33674e5c7ca7aed"}, + {file = "MarkupSafe-2.1.2.tar.gz", hash = "sha256:abcabc8c2b26036d62d4c746381a6f7cf60aafcc653198ad678306986b09450d"}, ] [[package]] @@ -1440,20 +1695,20 @@ files = [ [[package]] name = "mock" -version = "4.0.3" +version = "5.0.1" description = "Rolling backport of unittest.mock for all Pythons" category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "mock-4.0.3-py3-none-any.whl", hash = "sha256:122fcb64ee37cfad5b3f48d7a7d51875d7031aaf3d8be7c42e2bee25044eee62"}, - {file = "mock-4.0.3.tar.gz", hash = "sha256:7d3fbbde18228f4ff2f1f119a45cdffa458b4c0dee32eb4d2bb2f82554bac7bc"}, + {file = "mock-5.0.1-py3-none-any.whl", hash = "sha256:c41cfb1e99ba5d341fbcc5308836e7d7c9786d302f995b2c271ce2144dece9eb"}, + {file = "mock-5.0.1.tar.gz", hash = "sha256:e3ea505c03babf7977fd21674a69ad328053d414f05e6433c30d8fa14a534a6b"}, ] [package.extras] build = ["blurb", "twine", "wheel"] docs = ["sphinx"] -test = ["pytest (<5.4)", "pytest-cov"] +test = ["pytest", "pytest-cov"] [[package]] name = "more-itertools" @@ -1481,14 +1736,14 @@ files = [ [[package]] name = "mypy-extensions" -version = "0.4.3" -description = "Experimental type system extensions for programs checked with the mypy typechecker." +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." category = "dev" optional = false -python-versions = "*" +python-versions = ">=3.5" files = [ - {file = "mypy_extensions-0.4.3-py2.py3-none-any.whl", hash = "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d"}, - {file = "mypy_extensions-0.4.3.tar.gz", hash = "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8"}, + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] [[package]] @@ -1551,29 +1806,26 @@ six = ">=1.8.0" [[package]] name = "packaging" -version = "21.3" +version = "23.0" description = "Core utilities for Python packages" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "packaging-21.3-py3-none-any.whl", hash = "sha256:ef103e05f519cdc783ae24ea4e2e0f508a9c99b2d4969652eed6a2e1ea5bd522"}, - {file = "packaging-21.3.tar.gz", hash = "sha256:dd47c42927d89ab911e606518907cc2d3a1f38bbd026385970643f9c5b8ecfeb"}, + {file = "packaging-23.0-py3-none-any.whl", hash = "sha256:714ac14496c3e68c99c29b00845f7a2b85f3bb6f1078fd9f72fd20f0570002b2"}, + {file = "packaging-23.0.tar.gz", hash = "sha256:b6ad297f8907de0fa2fe1ccbd26fdaf387f5f47c7275fedf8cce89f99446cf97"}, ] -[package.dependencies] -pyparsing = ">=2.0.2,<3.0.5 || >3.0.5" - [[package]] name = "pathspec" -version = "0.10.3" +version = "0.11.0" description = "Utility library for gitignore style pattern matching of file paths." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "pathspec-0.10.3-py3-none-any.whl", hash = "sha256:3c95343af8b756205e2aba76e843ba9520a24dd84f68c22b9f93251507509dd6"}, - {file = "pathspec-0.10.3.tar.gz", hash = "sha256:56200de4077d9d0791465aa9095a01d421861e405b5096955051deefd697d6f6"}, + {file = "pathspec-0.11.0-py3-none-any.whl", hash = "sha256:3a66eb970cbac598f9e5ccb5b2cf58930cd8e3ed86d393d541eaf2d8b1705229"}, + {file = "pathspec-0.11.0.tar.gz", hash = "sha256:64d338d4e0914e91c1792321e6907b5a593f1ab1851de7fc269557a21b30ebbc"}, ] [[package]] @@ -1591,28 +1843,16 @@ files = [ [package.extras] test = ["allpairspy", "click", "faker", "pytest (>=6.0.1)", "pytest-discord (>=0.0.6)", "pytest-md-report (>=0.0.12)"] -[[package]] -name = "patool" -version = "1.12" -description = "portable archive file manager" -category = "main" -optional = false -python-versions = "*" -files = [ - {file = "patool-1.12-py2.py3-none-any.whl", hash = "sha256:3f642549c9a78f5b8bef1af92df385b521d360520d1f34e4dba3fd1dee2a21bc"}, - {file = "patool-1.12.tar.gz", hash = "sha256:e3180cf8bfe13bedbcf6f5628452fca0c2c84a3b5ae8c2d3f55720ea04cb1097"}, -] - [[package]] name = "pbr" -version = "5.11.0" +version = "5.11.1" description = "Python Build Reasonableness" category = "dev" optional = false python-versions = ">=2.6" files = [ - {file = "pbr-5.11.0-py2.py3-none-any.whl", hash = "sha256:db2317ff07c84c4c63648c9064a79fe9d9f5c7ce85a9099d4b6258b3db83225a"}, - {file = "pbr-5.11.0.tar.gz", hash = "sha256:b97bc6695b2aff02144133c2e7399d5885223d42b7912ffaec2ca3898e673bfe"}, + {file = "pbr-5.11.1-py2.py3-none-any.whl", hash = "sha256:567f09558bae2b3ab53cb3c1e2e33e726ff3338e7bae3db5dc954b3a44eef12b"}, + {file = "pbr-5.11.1.tar.gz", hash = "sha256:aefc51675b0b533d56bb5fd1c8c6c0522fe31896679882e1c4c63d5e4a0fccb3"}, ] [[package]] @@ -1632,19 +1872,19 @@ flake8 = ">=3.9.1" [[package]] name = "platformdirs" -version = "2.6.0" +version = "3.0.0" description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "platformdirs-2.6.0-py3-none-any.whl", hash = "sha256:1a89a12377800c81983db6be069ec068eee989748799b946cce2a6e80dcc54ca"}, - {file = "platformdirs-2.6.0.tar.gz", hash = "sha256:b46ffafa316e6b83b47489d240ce17173f123a9b9c83282141c3daf26ad9ac2e"}, + {file = "platformdirs-3.0.0-py3-none-any.whl", hash = "sha256:b1d5eb14f221506f50d6604a561f4c5786d9e80355219694a1b244bcd96f4567"}, + {file = "platformdirs-3.0.0.tar.gz", hash = "sha256:8a1228abb1ef82d788f74139988b137e78692984ec7b08eaa6c65f1723af28f9"}, ] [package.extras] -docs = ["furo (>=2022.9.29)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"] -test = ["appdirs (==1.4.4)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.22,!=1.23.4)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] [[package]] name = "pluggy" @@ -1664,14 +1904,14 @@ testing = ["pytest", "pytest-benchmark"] [[package]] name = "plumbum" -version = "1.8.0" +version = "1.8.1" description = "Plumbum: shell combinators library" category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "plumbum-1.8.0-py3-none-any.whl", hash = "sha256:0f6b59c8a03bfcdddd1efc04a126062663348e892ce7ddef49ec60e47b9e2c09"}, - {file = "plumbum-1.8.0.tar.gz", hash = "sha256:f1da1f167a2afe731a85de3f56810f424926c0a1a8fd1999ceb2ef20b618246d"}, + {file = "plumbum-1.8.1-py3-none-any.whl", hash = "sha256:07cf5f50bf739e91fb83ce304fc66b41dbd12db4d4546ff5266087dd9d148314"}, + {file = "plumbum-1.8.1.tar.gz", hash = "sha256:88a40fc69247d0cd585e21ca169b3820f46c484535102e16455d2202727bb37b"}, ] [package.dependencies] @@ -1679,19 +1919,19 @@ pywin32 = {version = "*", markers = "platform_system == \"Windows\" and platform [package.extras] dev = ["paramiko", "psutil", "pytest (>=6.0)", "pytest-cov", "pytest-mock", "pytest-timeout"] -docs = ["Sphinx (>=4.0.0)", "sphinx-rtd-theme (>=1.0.0)"] +docs = ["sphinx (>=4.0.0)", "sphinx-rtd-theme (>=1.0.0)"] ssh = ["paramiko"] [[package]] name = "prometheus-client" -version = "0.15.0" +version = "0.16.0" description = "Python client for the Prometheus monitoring system." category = "main" optional = false python-versions = ">=3.6" files = [ - {file = "prometheus_client-0.15.0-py3-none-any.whl", hash = "sha256:db7c05cbd13a0f79975592d112320f2605a325969b270a94b71dcabc47b931d2"}, - {file = "prometheus_client-0.15.0.tar.gz", hash = "sha256:be26aa452490cfcf6da953f9436e95a9f2b4d578ca80094b4458930e5f584ab1"}, + {file = "prometheus_client-0.16.0-py3-none-any.whl", hash = "sha256:0836af6eb2c8f4fed712b2f279f6c0a8bbab29f9f4aa15276b91c7cb0d1616ab"}, + {file = "prometheus_client-0.16.0.tar.gz", hash = "sha256:a03e35b359f14dd1630898543e2120addfdeacd1a6069c1367ae90fd93ad3f48"}, ] [package.extras] @@ -1723,57 +1963,64 @@ files = [ [[package]] name = "pycryptodomex" -version = "3.16.0" +version = "3.17" description = "Cryptographic library for Python" category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ - {file = "pycryptodomex-3.16.0-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b3d04c00d777c36972b539fb79958790126847d84ec0129fce1efef250bfe3ce"}, - {file = "pycryptodomex-3.16.0-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:e5a670919076b71522c7d567a9043f66f14b202414a63c3a078b5831ae342c03"}, - {file = "pycryptodomex-3.16.0-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:ce338a9703f54b2305a408fc9890eb966b727ce72b69f225898bb4e9d9ed3f1f"}, - {file = "pycryptodomex-3.16.0-cp27-cp27m-manylinux2014_aarch64.whl", hash = "sha256:a1c0ae7123448ecb034c75c713189cb00ebe2d415b11682865b6c54d200d9c93"}, - {file = "pycryptodomex-3.16.0-cp27-cp27m-win32.whl", hash = "sha256:8851585ff19871e5d69e1790f4ca5f6fd1699d6b8b14413b472a4c0dbc7ea780"}, - {file = "pycryptodomex-3.16.0-cp27-cp27m-win_amd64.whl", hash = "sha256:8dd2d9e3c617d0712ed781a77efd84ea579e76c5f9b2a4bc0b684ebeddf868b2"}, - {file = "pycryptodomex-3.16.0-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:2ad9bb86b355b6104796567dd44c215b3dc953ef2fae5e0bdfb8516731df92cf"}, - {file = "pycryptodomex-3.16.0-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:e25a2f5667d91795f9417cb856f6df724ccdb0cdd5cbadb212ee9bf43946e9f8"}, - {file = "pycryptodomex-3.16.0-cp27-cp27mu-manylinux2014_aarch64.whl", hash = "sha256:b0789a8490114a2936ed77c87792cfe77582c829cb43a6d86ede0f9624ba8aa3"}, - {file = "pycryptodomex-3.16.0-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:0da835af786fdd1c9930994c78b23e88d816dc3f99aa977284a21bbc26d19735"}, - {file = "pycryptodomex-3.16.0-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:22aed0868622d95179217c298e37ed7410025c7b29dac236d3230617d1e4ed56"}, - {file = "pycryptodomex-3.16.0-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:1619087fb5b31510b0b0b058a54f001a5ffd91e6ffee220d9913064519c6a69d"}, - {file = "pycryptodomex-3.16.0-cp35-abi3-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:70288d9bfe16b2fd0d20b6c365db614428f1bcde7b20d56e74cf88ade905d9eb"}, - {file = "pycryptodomex-3.16.0-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:7993d26dae4d83b8f4ce605bb0aecb8bee330bb3c95475ef06f3694403621e71"}, - {file = "pycryptodomex-3.16.0-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:1cda60207be8c1cf0b84b9138f9e3ca29335013d2b690774a5e94678ff29659a"}, - {file = "pycryptodomex-3.16.0-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:04610536921c1ec7adba158ef570348550c9f3a40bc24be9f8da2ef7ab387981"}, - {file = "pycryptodomex-3.16.0-cp35-abi3-win32.whl", hash = "sha256:daa67f5ebb6fbf1ee9c90decaa06ca7fc88a548864e5e484d52b0920a57fe8a5"}, - {file = "pycryptodomex-3.16.0-cp35-abi3-win_amd64.whl", hash = "sha256:231dc8008cbdd1ae0e34645d4523da2dbc7a88c325f0d4a59635a86ee25b41dd"}, - {file = "pycryptodomex-3.16.0-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:4dbbe18cc232b5980c7633972ae5417d0df76fe89e7db246eefd17ef4d8e6d7a"}, - {file = "pycryptodomex-3.16.0-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:893f8a97d533c66cc3a56e60dd3ed40a3494ddb4aafa7e026429a08772f8a849"}, - {file = "pycryptodomex-3.16.0-pp27-pypy_73-win32.whl", hash = "sha256:6a465e4f856d2a4f2a311807030c89166529ccf7ccc65bef398de045d49144b6"}, - {file = "pycryptodomex-3.16.0-pp37-pypy37_pp73-macosx_10_9_x86_64.whl", hash = "sha256:ba57ac7861fd2c837cdb33daf822f2a052ff57dd769a2107807f52a36d0e8d38"}, - {file = "pycryptodomex-3.16.0-pp37-pypy37_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_12_i686.manylinux2010_i686.whl", hash = "sha256:f2b971a7b877348a27dcfd0e772a0343fb818df00b74078e91c008632284137d"}, - {file = "pycryptodomex-3.16.0-pp37-pypy37_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_12_x86_64.manylinux2010_x86_64.whl", hash = "sha256:e2453162f473c1eae4826eb10cd7bce19b5facac86d17fb5f29a570fde145abd"}, - {file = "pycryptodomex-3.16.0-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:0ba28aa97cdd3ff5ed1a4f2b7f5cd04e721166bd75bd2b929e2734433882b583"}, - {file = "pycryptodomex-3.16.0.tar.gz", hash = "sha256:e9ba9d8ed638733c9e95664470b71d624a6def149e2db6cc52c1aca5a6a2df1d"}, + {file = "pycryptodomex-3.17-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:12056c38e49d972f9c553a3d598425f8a1c1d35b2e4330f89d5ff1ffb70de041"}, + {file = "pycryptodomex-3.17-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:ab33c2d9f275e05e235dbca1063753b5346af4a5cac34a51fa0da0d4edfb21d7"}, + {file = "pycryptodomex-3.17-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:caa937ff29d07a665dfcfd7a84f0d4207b2ebf483362fa9054041d67fdfacc20"}, + {file = "pycryptodomex-3.17-cp27-cp27m-manylinux2014_aarch64.whl", hash = "sha256:db23d7341e21b273d2440ec6faf6c8b1ca95c8894da612e165be0b89a8688340"}, + {file = "pycryptodomex-3.17-cp27-cp27m-musllinux_1_1_aarch64.whl", hash = "sha256:f854c8476512cebe6a8681cc4789e4fcff6019c17baa0fd72b459155dc605ab4"}, + {file = "pycryptodomex-3.17-cp27-cp27m-win32.whl", hash = "sha256:a57e3257bacd719769110f1f70dd901c5b6955e9596ad403af11a3e6e7e3311c"}, + {file = "pycryptodomex-3.17-cp27-cp27m-win_amd64.whl", hash = "sha256:d38ab9e53b1c09608ba2d9b8b888f1e75d6f66e2787e437adb1fecbffec6b112"}, + {file = "pycryptodomex-3.17-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:3c2516b42437ae6c7a29ef3ddc73c8d4714e7b6df995b76be4695bbe4b3b5cd2"}, + {file = "pycryptodomex-3.17-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:5c23482860302d0d9883404eaaa54b0615eefa5274f70529703e2c43cc571827"}, + {file = "pycryptodomex-3.17-cp27-cp27mu-manylinux2014_aarch64.whl", hash = "sha256:7a8dc3ee7a99aae202a4db52de5a08aa4d01831eb403c4d21da04ec2f79810db"}, + {file = "pycryptodomex-3.17-cp27-cp27mu-musllinux_1_1_aarch64.whl", hash = "sha256:7cc28dd33f1f3662d6da28ead4f9891035f63f49d30267d3b41194c8778997c8"}, + {file = "pycryptodomex-3.17-cp35-abi3-macosx_10_9_universal2.whl", hash = "sha256:2d4d395f109faba34067a08de36304e846c791808524614c731431ee048fe70a"}, + {file = "pycryptodomex-3.17-cp35-abi3-macosx_10_9_x86_64.whl", hash = "sha256:55eed98b4150a744920597c81b3965b632038781bab8a08a12ea1d004213c600"}, + {file = "pycryptodomex-3.17-cp35-abi3-manylinux2014_aarch64.whl", hash = "sha256:7fa0b52df90343fafe319257b31d909be1d2e8852277fb0376ba89d26d2921db"}, + {file = "pycryptodomex-3.17-cp35-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:78f0ddd4adc64baa39b416f3637aaf99f45acb0bcdc16706f0cc7ebfc6f10109"}, + {file = "pycryptodomex-3.17-cp35-abi3-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a4fa037078e92c7cc49f6789a8bac3de06856740bb2038d05f2d9a2e4b165d59"}, + {file = "pycryptodomex-3.17-cp35-abi3-musllinux_1_1_aarch64.whl", hash = "sha256:88b0d5bb87eaf2a31e8a759302b89cf30c97f2f8ca7d83b8c9208abe8acb447a"}, + {file = "pycryptodomex-3.17-cp35-abi3-musllinux_1_1_i686.whl", hash = "sha256:6feedf4b0e36b395329b4186a805f60f900129cdf0170e120ecabbfcb763995d"}, + {file = "pycryptodomex-3.17-cp35-abi3-musllinux_1_1_x86_64.whl", hash = "sha256:7a6651a07f67c28b6e978d63aa3a3fccea0feefed9a8453af3f7421a758461b7"}, + {file = "pycryptodomex-3.17-cp35-abi3-win32.whl", hash = "sha256:32e764322e902bbfac49ca1446604d2839381bbbdd5a57920c9daaf2e0b778df"}, + {file = "pycryptodomex-3.17-cp35-abi3-win_amd64.whl", hash = "sha256:4b51e826f0a04d832eda0790bbd0665d9bfe73e5a4d8ea93b6a9b38beeebe935"}, + {file = "pycryptodomex-3.17-pp27-pypy_73-macosx_10_9_x86_64.whl", hash = "sha256:d4cf0128da167562c49b0e034f09e9cedd733997354f2314837c2fa461c87bb1"}, + {file = "pycryptodomex-3.17-pp27-pypy_73-manylinux2010_x86_64.whl", hash = "sha256:c92537b596bd5bffb82f8964cabb9fef1bca8a28a9e0a69ffd3ec92a4a7ad41b"}, + {file = "pycryptodomex-3.17-pp27-pypy_73-win32.whl", hash = "sha256:599bb4ae4bbd614ca05f49bd4e672b7a250b80b13ae1238f05fd0f09d87ed80a"}, + {file = "pycryptodomex-3.17-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4c4674f4b040321055c596aac926d12f7f6859dfe98cd12f4d9453b43ab6adc8"}, + {file = "pycryptodomex-3.17-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:67a3648025e4ddb72d43addab764336ba2e670c8377dba5dd752e42285440d31"}, + {file = "pycryptodomex-3.17-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40e8a11f578bd0851b02719c862d55d3ee18d906c8b68a9c09f8c564d6bb5b92"}, + {file = "pycryptodomex-3.17-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:23d83b610bd97704f0cd3acc48d99b76a15c8c1540d8665c94d514a49905bad7"}, + {file = "pycryptodomex-3.17-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:fd29d35ac80755e5c0a99d96b44fb9abbd7e871849581ea6a4cb826d24267537"}, + {file = "pycryptodomex-3.17-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64b876d57cb894b31056ad8dd6a6ae1099b117ae07a3d39707221133490e5715"}, + {file = "pycryptodomex-3.17-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ee8bf4fdcad7d66beb744957db8717afc12d176e3fd9c5d106835133881a049b"}, + {file = "pycryptodomex-3.17-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:c84689c73358dfc23f9fdcff2cb9e7856e65e2ce3b5ed8ff630d4c9bdeb1867b"}, + {file = "pycryptodomex-3.17.tar.gz", hash = "sha256:0af93aad8d62e810247beedef0261c148790c52f3cd33643791cc6396dd217c1"}, ] [[package]] name = "pydocstyle" -version = "6.1.1" +version = "6.3.0" description = "Python docstring style checker" category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "pydocstyle-6.1.1-py3-none-any.whl", hash = "sha256:6987826d6775056839940041beef5c08cc7e3d71d63149b48e36727f70144dc4"}, - {file = "pydocstyle-6.1.1.tar.gz", hash = "sha256:1d41b7c459ba0ee6c345f2eb9ae827cab14a7533a88c5c6f7e94923f72df92dc"}, + {file = "pydocstyle-6.3.0-py3-none-any.whl", hash = "sha256:118762d452a49d6b05e194ef344a55822987a462831ade91ec5c06fd2169d019"}, + {file = "pydocstyle-6.3.0.tar.gz", hash = "sha256:7ce43f0c0ac87b07494eb9c0b462c0b73e6ff276807f204d6b53edc72b7e44e1"}, ] [package.dependencies] -snowballstemmer = "*" +snowballstemmer = ">=2.2.0" [package.extras] -toml = ["toml"] +toml = ["tomli (>=1.2.3)"] [[package]] name = "pyflakes" @@ -1789,34 +2036,19 @@ files = [ [[package]] name = "pygments" -version = "2.13.0" +version = "2.14.0" description = "Pygments is a syntax highlighting package written in Python." category = "dev" optional = false python-versions = ">=3.6" files = [ - {file = "Pygments-2.13.0-py3-none-any.whl", hash = "sha256:f643f331ab57ba3c9d89212ee4a2dabc6e94f117cf4eefde99a0574720d14c42"}, - {file = "Pygments-2.13.0.tar.gz", hash = "sha256:56a8508ae95f98e2b9bdf93a6be5ae3f7d8af858b43e02c5a2ff083726be40c1"}, + {file = "Pygments-2.14.0-py3-none-any.whl", hash = "sha256:fa7bd7bd2771287c0de303af8bfdfc731f51bd2c6a47ab69d117138893b82717"}, + {file = "Pygments-2.14.0.tar.gz", hash = "sha256:b3ed06a9e8ac9a9aae5a6f5dbe78a8a58655d17b43b93c078f094ddc476ae297"}, ] [package.extras] plugins = ["importlib-metadata"] -[[package]] -name = "pyparsing" -version = "3.0.9" -description = "pyparsing module - Classes and methods to define and execute parsing grammars" -category = "dev" -optional = false -python-versions = ">=3.6.8" -files = [ - {file = "pyparsing-3.0.9-py3-none-any.whl", hash = "sha256:5026bae9a10eeaefb61dab2f09052b9f4307d44aee4eda64b309723d8d206bbc"}, - {file = "pyparsing-3.0.9.tar.gz", hash = "sha256:2b020ecf7d21b687f219b71ecad3631f644a47f01403fa1d1036b0c6416d70fb"}, -] - -[package.extras] -diagrams = ["jinja2", "railroad-diagrams"] - [[package]] name = "pysocks" version = "1.7.1" @@ -1847,14 +2079,14 @@ six = ">=1.5" [[package]] name = "python-slugify" -version = "7.0.0" +version = "8.0.0" description = "A Python slugify application that also handles Unicode" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "python-slugify-7.0.0.tar.gz", hash = "sha256:7a0f21a39fa6c1c4bf2e5984c9b9ae944483fd10b54804cb0e23a3ccd4954f0b"}, - {file = "python_slugify-7.0.0-py2.py3-none-any.whl", hash = "sha256:003aee64f9fd955d111549f96c4b58a3f40b9319383c70fad6277a4974bbf570"}, + {file = "python-slugify-8.0.0.tar.gz", hash = "sha256:f1da83f3c7ab839b3f84543470cd95bdb5a81f1a0b80fed502f78b7dca256062"}, + {file = "python_slugify-8.0.0-py2.py3-none-any.whl", hash = "sha256:51f217508df20a6c166c7821683384b998560adcf8f19a6c2ca8b460528ccd9c"}, ] [package.dependencies] @@ -1865,52 +2097,59 @@ unidecode = ["Unidecode (>=1.1.1)"] [[package]] name = "python-telegram-bot" -version = "12.8" +version = "20.1" description = "We have made you a wrapper you can't refuse" category = "main" optional = false -python-versions = "*" +python-versions = ">=3.7" files = [ - {file = "python-telegram-bot-12.8.tar.gz", hash = "sha256:327186c56469216207dcdf8706892e58e0a62e51ef46f5143268e387bbb4edc3"}, - {file = "python_telegram_bot-12.8-py2.py3-none-any.whl", hash = "sha256:7eebed539ccacf77896cff9e41d1f68746b8ff3ca4da1e2e59285e9c749cb050"}, + {file = "python-telegram-bot-20.1.tar.gz", hash = "sha256:f9caf2ce867926b31717e24f3f9341e619e7be128f6e892fad958c6053ef06e0"}, + {file = "python_telegram_bot-20.1-py3-none-any.whl", hash = "sha256:b5096cf726f02b66a4dd0260027c853ec86ffa30bf651b8ac88b1dc558950b7d"}, ] [package.dependencies] -certifi = "*" -cryptography = "*" -decorator = ">=4.4.0" -tornado = ">=5.1" +APScheduler = {version = ">=3.10.0,<3.11.0", optional = true, markers = "extra == \"job-queue\""} +httpx = {version = ">=0.23.3,<0.24.0", extras = ["http2"]} +pytz = {version = ">=2018.6", optional = true, markers = "extra == \"job-queue\""} +tornado = {version = ">=6.2,<7.0", optional = true, markers = "extra == \"webhooks\""} [package.extras] -json = ["ujson"] -socks = ["PySocks"] +all = ["APScheduler (>=3.10.0,<3.11.0)", "aiolimiter (>=1.0.0,<1.1.0)", "cachetools (>=5.3.0,<5.4.0)", "cryptography (>=39.0.1)", "httpx[socks]", "pytz (>=2018.6)", "tornado (>=6.2,<7.0)"] +callback-data = ["cachetools (>=5.3.0,<5.4.0)"] +ext = ["APScheduler (>=3.10.0,<3.11.0)", "aiolimiter (>=1.0.0,<1.1.0)", "cachetools (>=5.3.0,<5.4.0)", "pytz (>=2018.6)", "tornado (>=6.2,<7.0)"] +job-queue = ["APScheduler (>=3.10.0,<3.11.0)", "pytz (>=2018.6)"] +passport = ["cryptography (>=39.0.1)"] +rate-limiter = ["aiolimiter (>=1.0.0,<1.1.0)"] +socks = ["httpx[socks]"] +webhooks = ["tornado (>=6.2,<7.0)"] [[package]] -name = "python-telegram-handler" -version = "2.2.1" -description = "A python logging handler that sends logs via Telegram Bot Api." +name = "pytz" +version = "2022.7.1" +description = "World timezone definitions, modern and historical" category = "main" optional = false python-versions = "*" files = [ - {file = "python-telegram-handler-2.2.1.tar.gz", hash = "sha256:f6e9ca60e15fa4e4595e323cc57362fe20cca3ca16e06158ad726caa48b3b16e"}, + {file = "pytz-2022.7.1-py2.py3-none-any.whl", hash = "sha256:78f4f37d8198e0627c5f1143240bb0206b8691d8d7ac6d78fee88b78733f8c4a"}, + {file = "pytz-2022.7.1.tar.gz", hash = "sha256:01a0681c4b9684a28304615eba55d1ab31ae00bf68ec157ec3708a8182dbbcd0"}, ] -[package.dependencies] -requests = "*" - [[package]] -name = "pytz" -version = "2022.7" -description = "World timezone definitions, modern and historical" -category = "dev" +name = "pytz-deprecation-shim" +version = "0.1.0.post0" +description = "Shims to make deprecation of pytz easier" +category = "main" optional = false -python-versions = "*" +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,>=2.7" files = [ - {file = "pytz-2022.7-py2.py3-none-any.whl", hash = "sha256:93007def75ae22f7cd991c84e02d434876818661f8df9ad5df9e950ff4e52cfd"}, - {file = "pytz-2022.7.tar.gz", hash = "sha256:7ccfae7b4b2c067464a6733c6261673fdb8fd1be905460396b97a073e9fa683a"}, + {file = "pytz_deprecation_shim-0.1.0.post0-py2.py3-none-any.whl", hash = "sha256:8314c9692a636c8eb3bda879b9f119e350e93223ae83e70e80c31675a0fdc1a6"}, + {file = "pytz_deprecation_shim-0.1.0.post0.tar.gz", hash = "sha256:af097bae1b616dde5c5744441e2ddc69e74dfdcb0c263129610d85b87445a59d"}, ] +[package.dependencies] +tzdata = {version = "*", markers = "python_version >= \"3.6\""} + [[package]] name = "pywin32" version = "305" @@ -2004,19 +2243,19 @@ sphinx = ">=1.3.1" [[package]] name = "requests" -version = "2.28.1" +version = "2.28.2" description = "Python HTTP for Humans." category = "main" optional = false python-versions = ">=3.7, <4" files = [ - {file = "requests-2.28.1-py3-none-any.whl", hash = "sha256:8fefa2a1a1365bf5520aac41836fbee479da67864514bdb821f31ce07ce65349"}, - {file = "requests-2.28.1.tar.gz", hash = "sha256:7c5599b102feddaa661c826c56ab4fee28bfd17f5abca1ebbe3e7f19d7c97983"}, + {file = "requests-2.28.2-py3-none-any.whl", hash = "sha256:64299f4909223da747622c030b781c0d7811e359c37124b4bd368fb8c6518baa"}, + {file = "requests-2.28.2.tar.gz", hash = "sha256:98b1b2782e3c6c4904938b84c0eb932721069dfdb9134313beff7c83c2df24bf"}, ] [package.dependencies] certifi = ">=2017.4.17" -charset-normalizer = ">=2,<3" +charset-normalizer = ">=2,<4" idna = ">=2.5,<4" PySocks = {version = ">=1.5.6,<1.5.7 || >1.5.7", optional = true, markers = "extra == \"socks\""} urllib3 = ">=1.21.1,<1.27" @@ -2027,14 +2266,14 @@ use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "requests-cache" -version = "0.9.7" +version = "0.9.8" description = "A transparent persistent cache for the requests library" category = "dev" optional = false python-versions = ">=3.7,<4.0" files = [ - {file = "requests_cache-0.9.7-py3-none-any.whl", hash = "sha256:3f57badcd8406ecda7f8eaa8145afd0b180c5ae4ff05165a2c4d40f3dc88a6e5"}, - {file = "requests_cache-0.9.7.tar.gz", hash = "sha256:b7c26ea98143bac7058fad6e773d56c3442eabc0da9ea7480af5edfc134ff515"}, + {file = "requests_cache-0.9.8-py3-none-any.whl", hash = "sha256:3a16021a4b5014b5b32af9c34f07cb911e99a69074d664dfd4fddb62a2997c21"}, + {file = "requests_cache-0.9.8.tar.gz", hash = "sha256:eaed4eb5fd5c392ba5e7cfa000d4ab96b1d32c1a1620f37aa558c43741ac362b"}, ] [package.dependencies] @@ -2070,6 +2309,24 @@ files = [ [package.dependencies] docutils = ">=0.11,<1.0" +[[package]] +name = "rfc3986" +version = "1.5.0" +description = "Validating URI References per RFC 3986" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "rfc3986-1.5.0-py2.py3-none-any.whl", hash = "sha256:a86d6e1f5b1dc238b218b012df0aa79409667bb209e58da56d0b94704e712a97"}, + {file = "rfc3986-1.5.0.tar.gz", hash = "sha256:270aaf10d87d0d4e095063c65bf3ddbc6ee3d0b226328ce21e036f946e421835"}, +] + +[package.dependencies] +idna = {version = "*", optional = true, markers = "extra == \"idna2008\""} + +[package.extras] +idna2008 = ["idna"] + [[package]] name = "ruamel-yaml" version = "0.17.21" @@ -2135,20 +2392,20 @@ files = [ [[package]] name = "safety" -version = "2.3.5" +version = "2.3.4" description = "Checks installed dependencies for known vulnerabilities and licenses." category = "dev" optional = false python-versions = "*" files = [ - {file = "safety-2.3.5-py3-none-any.whl", hash = "sha256:2227fcac1b22b53c1615af78872b48348661691450aa25d6704a5504dbd1f7e2"}, - {file = "safety-2.3.5.tar.gz", hash = "sha256:a60c11f8952f412cbb165d70cb1f673a3b43a2ba9a93ce11f97e6a4de834aa3a"}, + {file = "safety-2.3.4-py3-none-any.whl", hash = "sha256:6224dcd9b20986a2b2c5e7acfdfba6bca42bb11b2783b24ed04f32317e5167ea"}, + {file = "safety-2.3.4.tar.gz", hash = "sha256:b9e74e794e82f54d11f4091c5d820c4d2d81de9f953bf0b4f33ac8bc402ae72c"}, ] [package.dependencies] Click = ">=8.0.2" dparse = ">=0.6.2" -packaging = ">=21.0,<22.0" +packaging = ">=21.0" requests = "*" "ruamel.yaml" = ">=0.17.21" setuptools = ">=19.3" @@ -2178,20 +2435,31 @@ requests = "*" soundcloud-v2 = ">=1.3.0" termcolor = "*" +[[package]] +name = "sdnotify" +version = "0.3.2" +description = "A pure Python implementation of systemd's service notification protocol (sd_notify)" +category = "main" +optional = false +python-versions = "*" +files = [ + {file = "sdnotify-0.3.2.tar.gz", hash = "sha256:73977fc746b36cc41184dd43c3fe81323e7b8b06c2bb0826c4f59a20c56bb9f1"}, +] + [[package]] name = "setuptools" -version = "65.6.3" +version = "67.3.2" description = "Easily download, build, install, upgrade, and uninstall Python packages" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "setuptools-65.6.3-py3-none-any.whl", hash = "sha256:57f6f22bde4e042978bcd50176fdb381d7c21a9efa4041202288d3737a0c6a54"}, - {file = "setuptools-65.6.3.tar.gz", hash = "sha256:a7620757bf984b58deaf32fc8a4577a9bbc0850cf92c20e1ce41c38c19e5fb75"}, + {file = "setuptools-67.3.2-py3-none-any.whl", hash = "sha256:bb6d8e508de562768f2027902929f8523932fcd1fb784e6d573d2cafac995a48"}, + {file = "setuptools-67.3.2.tar.gz", hash = "sha256:95f00380ef2ffa41d9bba85d95b27689d923c93dfbafed4aecd7cf988a25e012"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8 (<5)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] @@ -2219,6 +2487,18 @@ files = [ {file = "smmap-5.0.0.tar.gz", hash = "sha256:c840e62059cd3be204b0c9c9f74be2c09d5648eddd4580d9314c3ecde0b30936"}, ] +[[package]] +name = "sniffio" +version = "1.3.0" +description = "Sniff out which async library your code is running under" +category = "main" +optional = false +python-versions = ">=3.7" +files = [ + {file = "sniffio-1.3.0-py3-none-any.whl", hash = "sha256:eecefdce1e5bbfb7ad2eeaabf7c1eeb404d7757c379bd1f7e5cce9d8bf425384"}, + {file = "sniffio-1.3.0.tar.gz", hash = "sha256:e60305c5e5d314f5389259b7f22aaa33d8f7dee49763119234af3755c55b9101"}, +] + [[package]] name = "snowballstemmer" version = "2.2.0" @@ -2265,39 +2545,39 @@ test = ["coveralls", "pytest", "pytest-dotenv"] [[package]] name = "soupsieve" -version = "2.3.2.post1" +version = "2.4" description = "A modern CSS selector implementation for Beautiful Soup." category = "main" optional = false -python-versions = ">=3.6" +python-versions = ">=3.7" files = [ - {file = "soupsieve-2.3.2.post1-py3-none-any.whl", hash = "sha256:3b2503d3c7084a42b1ebd08116e5f81aadfaea95863628c80a3b774a11b7c759"}, - {file = "soupsieve-2.3.2.post1.tar.gz", hash = "sha256:fc53893b3da2c33de295667a0e19f078c14bf86544af307354de5fcf12a3f30d"}, + {file = "soupsieve-2.4-py3-none-any.whl", hash = "sha256:49e5368c2cda80ee7e84da9dbe3e110b70a4575f196efb74e51b94549d921955"}, + {file = "soupsieve-2.4.tar.gz", hash = "sha256:e28dba9ca6c7c00173e34e4ba57448f0688bb681b7c5e8bf4971daafc093d69a"}, ] [[package]] name = "sphinx" -version = "5.3.0" +version = "6.1.3" description = "Python documentation generator" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "Sphinx-5.3.0.tar.gz", hash = "sha256:51026de0a9ff9fc13c05d74913ad66047e104f56a129ff73e174eb5c3ee794b5"}, - {file = "sphinx-5.3.0-py3-none-any.whl", hash = "sha256:060ca5c9f7ba57a08a1219e547b269fadf125ae25b06b9fa7f66768efb652d6d"}, + {file = "Sphinx-6.1.3.tar.gz", hash = "sha256:0dac3b698538ffef41716cf97ba26c1c7788dba73ce6f150c1ff5b4720786dd2"}, + {file = "sphinx-6.1.3-py3-none-any.whl", hash = "sha256:807d1cb3d6be87eb78a381c3e70ebd8d346b9a25f3753e9947e866b2786865fc"}, ] [package.dependencies] alabaster = ">=0.7,<0.8" babel = ">=2.9" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -docutils = ">=0.14,<0.20" +docutils = ">=0.18,<0.20" imagesize = ">=1.3" importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} Jinja2 = ">=3.0" packaging = ">=21.0" -Pygments = ">=2.12" -requests = ">=2.5.0" +Pygments = ">=2.13" +requests = ">=2.25.0" snowballstemmer = ">=2.0" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" @@ -2308,39 +2588,39 @@ sphinxcontrib-serializinghtml = ">=1.1.5" [package.extras] docs = ["sphinxcontrib-websupport"] -lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-bugbear", "flake8-comprehensions", "flake8-simplify", "isort", "mypy (>=0.981)", "sphinx-lint", "types-requests", "types-typed-ast"] -test = ["cython", "html5lib", "pytest (>=4.6)", "typed_ast"] +lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-simplify", "isort", "mypy (>=0.990)", "ruff", "sphinx-lint", "types-requests"] +test = ["cython", "html5lib", "pytest (>=4.6)"] [[package]] name = "sphinx-autodoc-typehints" -version = "1.19.5" +version = "1.22" description = "Type hints (PEP 484) support for the Sphinx autodoc extension" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "sphinx_autodoc_typehints-1.19.5-py3-none-any.whl", hash = "sha256:ea55b3cc3f485e3a53668bcdd08de78121ab759f9724392fdb5bf3483d786328"}, - {file = "sphinx_autodoc_typehints-1.19.5.tar.gz", hash = "sha256:38a227378e2bc15c84e29af8cb1d7581182da1107111fd1c88b19b5eb7076205"}, + {file = "sphinx_autodoc_typehints-1.22-py3-none-any.whl", hash = "sha256:ef4a8b9d52de66065aa7d3adfabf5a436feb8a2eff07c2ddc31625d8807f2b69"}, + {file = "sphinx_autodoc_typehints-1.22.tar.gz", hash = "sha256:71fca2d5eee9b034204e4c686ab20b4d8f5eb9409396216bcae6c87c38e18ea6"}, ] [package.dependencies] sphinx = ">=5.3" [package.extras] -docs = ["furo (>=2022.9.29)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.4)"] -testing = ["covdefaults (>=2.2)", "coverage (>=6.5)", "diff-cover (>=7.0.1)", "nptyping (>=2.3.1)", "pytest (>=7.2)", "pytest-cov (>=4)", "sphobjinv (>=2.2.2)", "typing-extensions (>=4.4)"] +docs = ["furo (>=2022.12.7)", "sphinx (>=6.1.3)", "sphinx-autodoc-typehints (>=1.21)"] +testing = ["covdefaults (>=2.2.2)", "coverage (>=7.0.5)", "diff-cover (>=7.3)", "nptyping (>=2.4.1)", "pytest (>=7.2.1)", "pytest-cov (>=4)", "sphobjinv (>=2.3.1)", "typing-extensions (>=4.4)"] type-comment = ["typed-ast (>=1.5.4)"] [[package]] name = "sphinxcontrib-applehelp" -version = "1.0.2" -description = "sphinxcontrib-applehelp is a sphinx extension which outputs Apple help books" +version = "1.0.4" +description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" category = "dev" optional = false -python-versions = ">=3.5" +python-versions = ">=3.8" files = [ - {file = "sphinxcontrib-applehelp-1.0.2.tar.gz", hash = "sha256:a072735ec80e7675e3f432fcae8610ecf509c5f1869d17e2eecff44389cdbc58"}, - {file = "sphinxcontrib_applehelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:806111e5e962be97c29ec4c1e7fe277bfd19e9652fb1a4392105b43e01af885a"}, + {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, + {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, ] [package.extras] @@ -2365,14 +2645,14 @@ test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" -version = "2.0.0" +version = "2.0.1" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" category = "dev" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "sphinxcontrib-htmlhelp-2.0.0.tar.gz", hash = "sha256:f5f8bb2d0d629f398bf47d0d69c07bc13b65f75a81ad9e2f71a63d4b7a2f6db2"}, - {file = "sphinxcontrib_htmlhelp-2.0.0-py2.py3-none-any.whl", hash = "sha256:d412243dfb797ae3ec2b59eca0e52dac12e75a241bf0e4eb861e450d06c6ed07"}, + {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, + {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, ] [package.extras] @@ -2428,14 +2708,14 @@ test = ["pytest"] [[package]] name = "stevedore" -version = "4.1.1" +version = "5.0.0" description = "Manage dynamic plugins for Python applications" category = "dev" optional = false python-versions = ">=3.8" files = [ - {file = "stevedore-4.1.1-py3-none-any.whl", hash = "sha256:aa6436565c069b2946fe4ebff07f5041e0c8bf18c7376dd29edf80cf7d524e4e"}, - {file = "stevedore-4.1.1.tar.gz", hash = "sha256:7f8aeb6e3f90f96832c301bff21a7eb5eefbe894c88c506483d355565d88cc1a"}, + {file = "stevedore-5.0.0-py3-none-any.whl", hash = "sha256:bd5a71ff5e5e5f5ea983880e4a1dd1bb47f8feebbb3d95b592398e2f02194771"}, + {file = "stevedore-5.0.0.tar.gz", hash = "sha256:2c428d2338976279e8eb2196f7a94910960d9f7ba2f41f3988511e95ca447021"}, ] [package.dependencies] @@ -2460,14 +2740,14 @@ test = ["pylint", "pytest", "pytest-black", "pytest-cov", "pytest-pylint"] [[package]] name = "termcolor" -version = "2.1.1" +version = "2.2.0" description = "ANSI color formatting for output in terminal" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "termcolor-2.1.1-py3-none-any.whl", hash = "sha256:fa852e957f97252205e105dd55bbc23b419a70fec0085708fc0515e399f304fd"}, - {file = "termcolor-2.1.1.tar.gz", hash = "sha256:67cee2009adc6449c650f6bcf3bdeed00c8ba53a8cda5362733c53e0a39fb70b"}, + {file = "termcolor-2.2.0-py3-none-any.whl", hash = "sha256:91ddd848e7251200eac969846cbae2dacd7d71c2871e92733289e7e3666f48e7"}, + {file = "termcolor-2.2.0.tar.gz", hash = "sha256:dfc8ac3f350788f23b2947b3e6cfa5a53b630b612e6cd8965a015a776020b99a"}, ] [package.extras] @@ -2544,16 +2824,48 @@ files = [ [[package]] name = "typing-extensions" -version = "4.4.0" +version = "4.5.0" description = "Backported and Experimental Type Hints for Python 3.7+" category = "dev" optional = false python-versions = ">=3.7" files = [ - {file = "typing_extensions-4.4.0-py3-none-any.whl", hash = "sha256:16fa4864408f655d35ec496218b85f79b3437c829e93320c7c9215ccfd92489e"}, - {file = "typing_extensions-4.4.0.tar.gz", hash = "sha256:1511434bb92bf8dd198c12b1cc812e800d4181cfcb867674e0f8279cc93087aa"}, + {file = "typing_extensions-4.5.0-py3-none-any.whl", hash = "sha256:fb33085c39dd998ac16d1431ebc293a8b3eedd00fd4a32de0ff79002c19511b4"}, + {file = "typing_extensions-4.5.0.tar.gz", hash = "sha256:5cb5f4a79139d699607b3ef622a1dedafa84e115ab0024e0d9c044a9479ca7cb"}, ] +[[package]] +name = "tzdata" +version = "2022.7" +description = "Provider of IANA time zone data" +category = "main" +optional = false +python-versions = ">=2" +files = [ + {file = "tzdata-2022.7-py2.py3-none-any.whl", hash = "sha256:2b88858b0e3120792a3c0635c23daf36a7d7eeeca657c323da299d2094402a0d"}, + {file = "tzdata-2022.7.tar.gz", hash = "sha256:fe5f866eddd8b96e9fcba978f8e503c909b19ea7efda11e52e39494bad3a7bfa"}, +] + +[[package]] +name = "tzlocal" +version = "4.2" +description = "tzinfo object for the local timezone" +category = "main" +optional = false +python-versions = ">=3.6" +files = [ + {file = "tzlocal-4.2-py3-none-any.whl", hash = "sha256:89885494684c929d9191c57aa27502afc87a579be5cdd3225c77c463ea043745"}, + {file = "tzlocal-4.2.tar.gz", hash = "sha256:ee5842fa3a795f023514ac2d801c4a81d1743bbe642e3940143326b3a00addd7"}, +] + +[package.dependencies] +pytz-deprecation-shim = "*" +tzdata = {version = "*", markers = "platform_system == \"Windows\""} + +[package.extras] +devenv = ["black", "pyroma", "pytest-cov", "zest.releaser"] +test = ["pytest (>=4.3)", "pytest-mock (>=3.3)"] + [[package]] name = "unicode-slugify" version = "0.1.5" @@ -2599,14 +2911,14 @@ six = "*" [[package]] name = "urllib3" -version = "1.26.13" +version = "1.26.14" description = "HTTP library with thread-safe connection pooling, file post, and more." category = "main" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" files = [ - {file = "urllib3-1.26.13-py2.py3-none-any.whl", hash = "sha256:47cc05d99aaa09c9e72ed5809b60e7ba354e64b59c9c173ac3018642d8bb41fc"}, - {file = "urllib3-1.26.13.tar.gz", hash = "sha256:c083dd0dce68dbfbe1129d5271cb90f9447dea7d52097c6e0126120c521ddea8"}, + {file = "urllib3-1.26.14-py2.py3-none-any.whl", hash = "sha256:75edcdc2f7d85b137124a6c3c9fc3933cdeaa12ecb9a6a959f22797a0feca7e1"}, + {file = "urllib3-1.26.14.tar.gz", hash = "sha256:076907bf8fd355cde77728471316625a4d2f7e713c125f51953bb5b3eecf4f72"}, ] [package.extras] @@ -2743,14 +3055,14 @@ dev = ["black (>=19.3b0)", "pytest (>=4.6.2)"] [[package]] name = "yt-dlp" -version = "2022.11.11" +version = "2023.2.17" description = "A youtube-dl fork with additional features and patches" category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "yt-dlp-2022.11.11.tar.gz", hash = "sha256:f6b962023c17a77151476f0f6ed71be87d017629ba5d9994528bc548521191b6"}, - {file = "yt_dlp-2022.11.11-py2.py3-none-any.whl", hash = "sha256:8bb7bd9ab2e6ecf4db7627e9151ce00572ae7ee24dedc78f611e7467b0ccd7d9"}, + {file = "yt-dlp-2023.2.17.tar.gz", hash = "sha256:9af92de5effc193bdb51216d9ebf28874d96180d202fae752b0d9f2a63380f3a"}, + {file = "yt_dlp-2023.2.17-py2.py3-none-any.whl", hash = "sha256:3b2df037c80922f0f83f63ee2f9253496b4a8668c0fe8d2a836ba9040f853b07"}, ] [package.dependencies] @@ -2763,21 +3075,21 @@ websockets = "*" [[package]] name = "zipp" -version = "3.11.0" +version = "3.14.0" description = "Backport of pathlib-compatible object wrapper for zip files" -category = "dev" +category = "main" optional = false python-versions = ">=3.7" files = [ - {file = "zipp-3.11.0-py3-none-any.whl", hash = "sha256:83a28fcb75844b5c0cdaf5aa4003c2d728c77e05f5aeabe8e95e56727005fbaa"}, - {file = "zipp-3.11.0.tar.gz", hash = "sha256:a7a22e05929290a67401440b39690ae6563279bced5f314609d9d03798f56766"}, + {file = "zipp-3.14.0-py3-none-any.whl", hash = "sha256:188834565033387710d046e3fe96acfc9b5e86cbca7f39ff69cf21a4128198b7"}, + {file = "zipp-3.14.0.tar.gz", hash = "sha256:9e5421e176ef5ab4c0ad896624e87a7b2f07aca746c9b2aa305952800cb8eecb"}, ] [package.extras] -docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)"] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] testing = ["flake8 (<5)", "func-timeout", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "29fce0c89261224ef918b4fcd657027cfef452d2b2267d1b1b89ce4519863cf1" +content-hash = "bbd552326505f16f9ef9ab4170bd40924ed03b3c0dd722b80c7147f88955047b" diff --git a/pyproject.toml b/pyproject.toml index e17ea952d..4d345709a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -2,120 +2,66 @@ requires = ["poetry-core>=1.4.0"] build-backend = "poetry.core.masonry.api" -[tool.isort] -# isort configuration: -# https://github.com/timothycrosley/isort/wiki/isort-Settings -include_trailing_comma = true -use_parentheses = true -# See https://github.com/timothycrosley/isort#multi-line-output-modes -multi_line_output = 3 -# match flake8 max-line-length: -line_length = 180 -[tool.black] -# match flake8 max-line-length: -line-length = 180 -[tool.nitpick] -style = "https://raw.githubusercontent.com/wemake-services/wemake-python-styleguide/master/styles/nitpick-style-wemake.toml" - -# TODO flakeheaven -# https://github.com/flakeheaven/flakeheaven -# https://wemake-python-styleguide.readthedocs.io/en/latest/pages/usage/integrations/flakehell.html -# https://github.com/wemake-services/wemake-python-styleguide/blob/master/styles/flakehell.toml -[tool.flakeheaven] -format = "grouped" -show_source = true -statistics = false -doctests = true -enable_extensions = "G" -accept_encodings = "utf-8" -max_complexity = 6 -max_line_length = 180 -ignore = "D100, D104, D401, W504, RST303, RST304, DAR103, DAR203" -# poetry run flakeheaven baseline > .flakeheaven_baseline -baseline = ".flakeheaven_baseline" -[tool.flakeheaven.plugins] -"flake8-*" = ["+*"] -mccabe = ["+*"] -nitpick = ["+*"] -"pep8-naming" = ["+*"] -pycodestyle = ["+*"] -pyflakes = ["+*"] -"wemake-python-styleguide" = ["+*"] - -[tool.tox] -legacy_tox_ini = """ -[tox] -skipsdist = true -envlist = py39, py310, py311 - -[gh-actions] -#tox-gh-actions: run tox only on envs which match github actions workflow python version (one env per job) -python = - 3.9: py39 - 3.10: py310 - 3.11: py311 - -[testenv] -allowlist_externals = - make - poetry -# either use command 'poetry install', -# or use tox-poetry - it will install main and dev deps to venv with poetry -commands = - poetry install --with main,dev --verbose - make test -""" - [tool.poetry] name = "scdlbot" description = "Telegram Bot for downloading MP3 rips of tracks/sets from SoundCloud, Bandcamp, YouTube with tags and artwork" -version = "0.14.2" +version = "0.15.0" license = "MIT" authors = ["George Pchelkin "] readme = "README.rst" -repository = "https://github.com/gpchelkin/scdlbot" homepage = "https://github.com/gpchelkin/scdlbot" documentation = "https://scdlbot.readthedocs.io" +repository = "https://github.com/gpchelkin/scdlbot" keywords = ["scdlbot", "telegram", "bot", "soundcloud", "bandcamp", "youtube", "mixcloud", "yandex", "audio", "music", "download"] classifiers = [ - "Development Status :: 4 - Beta", - "Intended Audience :: End Users/Desktop", - "Topic :: Multimedia :: Sound/Audio", - "Topic :: Internet", - "Operating System :: OS Independent", - "Natural Language :: English", + "Development Status :: 4 - Beta", + "Intended Audience :: Developers", + "Intended Audience :: System Administrators", + "Topic :: Multimedia :: Sound/Audio", + "Topic :: Internet", + "Operating System :: OS Independent", + "Natural Language :: English", ] include = ["AUTHORS.rst", "CONTRIBUTING.md", "CHANGELOG.rst", "LICENSE", "README.rst", "tests/*", "docs/*.rst", "docs/conf.py", "docs/Makefile", "docs/*.jpg", "docs/*.png", "docs/*.gif", "scdlbot/texts/*"] +[tool.poetry.urls] +# https://github.com/python-poetry/poetry/blob/master/docs/pyproject.md#urls +"Issues" = "https://github.com/gpchelkin/scdlbot/issues" +"CI" = "https://github.com/gpchelkin/scdlbot/actions" +"Changelog" = "https://github.com/gpchelkin/scdlbot/releases" +#"Changelog" = "https://github.com/gpchelkin/scdlbot/blob/master/CHANGELOG.rst" + [tool.poetry.scripts] -scdlbot = "scdlbot.__main__:main" +scdlbot = "scdlbot.scdlbot:main" [tool.poetry.dependencies] python = "^3.9" - -# Requirements of project's code: -python-telegram-bot = "12.8" -#python-telegram-bot = "13.11" -python-telegram-handler = "2.2.1" -boltons = "21.0.0" -plumbum = "^1.8.0" -ffmpeg-python = "0.2.0" +python-telegram-bot = { version = "20.1", extras = ["webhooks", "job-queue"] } +#ptbcontrib = { git = "https://github.com/python-telegram-bot/ptbcontrib.git", branch = "main", extras = ["postgres-persistence"] } +# https://python-poetry.org/docs/dependency-specification#path-dependencies +#ptbcontrib = { path = "../ptbcontrib/", develop = false } +requests = { version = "^2.28.2", extras = ["socks"] } +boltons = "^21.0.0" mutagen = "^1.46.0" -patool = "1.12" -requests = { version = "^2.28.1", extras = ["socks"] } -prometheus_client = "^0.15.0" +plumbum = "^1.8.1" +ffmpeg-python = "0.2.0" +prometheus_client = "^0.16.0" +sdnotify = "^0.3.2" +fake-useragent = "^1.1.1" +#python-telegram-handler = "^2.2.1" +#patool = "1.12" #pyshorteners = "1.0.1" #transliterate = "1.10.2" -#fake-useragent = { git = "https://github.com/Jordan9675/fake-useragent.git", branch = "master" } ### For publishing to PyPI at release: -yt-dlp = "2022.11.11" +# https://github.com/yt-dlp/yt-dlp/wiki/Forks +yt-dlp = "2023.2.17" ##youtube_dl = "2021.12.17" ##youtube-dlc = "2020.11.11.post3" scdl = "2.7.3" @@ -125,7 +71,6 @@ bandcamp-downloader = "0.0.13" #yt-dlp = { git = "https://github.com/yt-dlp/yt-dlp.git", branch = "master" } ##yt-dlp = { git = "https://github.com/pritam20ps05/yt-dlp.git", branch = "instagram_fix" } ##youtube_dl = { git = "https://github.com/l1ving/youtube-dl.git" } -## https://github.com/ytdl-org/youtube-dl/pull/26684 ##youtube_dl = { git = "https://github.com/gilou/youtube-dl.git", branch = "bandcamp_update" } ##youtube_dl = { git = "https://github.com/gpchelkin/youtube-dl.git", branch = "bandcamp_update" } ##youtube-dlc = { git = "https://github.com/blackjack4494/yt-dlc.git", branch = "master" } @@ -135,28 +80,99 @@ bandcamp-downloader = "0.0.13" ##scdl = { git = "https://github.com/7x11x13/scdl.git", branch = "master" } #bandcamp-downloader = { git = "https://github.com/iheanyi/bandcamp-dl.git", branch = "master" } ##bandcamp-downloader = { git = "https://github.com/gpchelkin/bandcamp-dl.git", branch = "demjson3" } +#fake-useragent = { git = "https://github.com/fake-useragent/fake-useragent.git", tag = "1.1.2" } [tool.poetry.group.dev] # must be false if tox-poetry is used (it doesn't support groups): optional = true + [tool.poetry.group.dev.dependencies] # make additional external tools: poetry, tox, (tox-poetry) # make format: -isort = "^5.11" -black = "^22.12" +isort = "^5.12" +black = "^23.1" # make lint: wemake-python-styleguide = "^0.17" -doc8 = "^1.0.0" -nitpick = "^0.32.0" -safety = "^2.3.5" +doc8 = "^1.1" +nitpick = "^0.32" +safety = "^2.3" flakeheaven = "^3.2" [tool.poetry.group.docs] optional = true + [tool.poetry.group.docs.dependencies] -sphinx = { version = "^5.3" } -sphinx-autodoc-typehints = { version = "^1.19" } -recommonmark = { version = "^0.7" } -m2r2 = { version = "^0.3" } -tomlkit = { version = "^0.11" } -typing-extensions = { version = "^4.4" } +sphinx = "^6.1" +sphinx-autodoc-typehints = "^1.22" +m2r2 = "^0.3" +tomli = "^2.0" +recommonmark = "^0.7" +typing-extensions = "^4.5" + + +[tool.isort] +# isort configuration: +# https://pycqa.github.io/isort/docs/configuration/options.html +# https://pycqa.github.io/isort/docs/configuration/profiles.html +# https://pycqa.github.io/isort/docs/configuration/multi_line_output_modes.html#3-vertical-hanging-indent +multi_line_output = 3 +include_trailing_comma = true +use_parentheses = true +# match flake8 max-line-length: +line_length = 180 + +[tool.black] +# match flake8 max-line-length: +line-length = 180 + +[tool.nitpick] +style = "https://raw.githubusercontent.com/wemake-services/wemake-python-styleguide/master/styles/nitpick-style-wemake.toml" + +[tool.flakeheaven] +# TODO flakeheaven +# https://github.com/flakeheaven/flakeheaven +# https://wemake-python-styleguide.readthedocs.io/en/latest/pages/usage/integrations/flakehell.html +# https://github.com/wemake-services/wemake-python-styleguide/blob/master/styles/flakehell.toml +format = "grouped" +show_source = true +statistics = false +doctests = true +enable_extensions = "G" +accept_encodings = "utf-8" +max_complexity = 6 +max_line_length = 180 +ignore = "D100, D104, D401, W504, RST303, RST304, DAR103, DAR203" +# poetry run flakeheaven baseline > .flakeheaven_baseline +baseline = ".flakeheaven_baseline" +[tool.flakeheaven.plugins] +"flake8-*" = ["+*"] +mccabe = ["+*"] +nitpick = ["+*"] +"pep8-naming" = ["+*"] +pycodestyle = ["+*"] +pyflakes = ["+*"] +"wemake-python-styleguide" = ["+*"] + +[tool.tox] +legacy_tox_ini = """ +[tox] +skipsdist = true +envlist = py39, py310, py311 + +[gh-actions] +#tox-gh-actions: run tox only on envs which match github actions workflow python version (one env per job) +python = + 3.9: py39 + 3.10: py310 + 3.11: py311 + +[testenv] +allowlist_externals = + make + poetry +# either use command 'poetry install', +# or use tox-poetry - it will install main and dev deps to venv with poetry +commands = + poetry install --with main,dev --verbose + make test +""" diff --git a/render.yaml b/render.yaml index eaebe4a32..916b4b715 100644 --- a/render.yaml +++ b/render.yaml @@ -2,38 +2,30 @@ # https://www.npmjs.com/package/@renderinc/heroku-import # Schema documented at https://render.com/docs/yaml-spec services: - - type: web # valid values: https://render.com/docs/yaml-spec#type - name: scdlbot - env: docker # valid values: https://render.com/docs/yaml-spec#environment - dockerfilePath: Dockerfile.render - plan: free # optional; defaults to starter + - name: scdlbot + plan: free numInstances: 1 + type: web + env: docker + dockerfilePath: Dockerfile.render envVars: - - key: ALERT_CHAT_IDS + - key: TG_BOT_OWNER_CHAT_ID value: 1306343 - - key: APP_URL - value: https://scdlbot.herokuapp.com/ - - key: DL_DIR - value: /tmp/scdlbot - - key: DL_TIMEOUT - value: 900 + - key: WEBHOOK_ENABLE + value: 1 + - key: WEBHOOK_APP_URL_ROOT + value: https://scdlbot.onrender.com - key: HOST value: 0.0.0.0 - key: HOSTNAME - value: test-heroku + value: test-render + - key: WORKERS + value: 3 + - key: DL_TIMEOUT + value: 900 - key: MAX_CONVERT_FILE_SIZE - value: 300_000_000 + value: 150_000_000 - key: MAX_TG_FILE_SIZE value: 45_000_000 - - key: NO_FLOOD_CHAT_IDS - value: -1001108859218,-1001106680201 - - key: STORE_CHAT_ID - value: -172951900 - - key: SYSLOG_ADDRESS - value: logs6.papertrailapp.com:54882 - - key: SYSLOG_DEBUG - value: 1 - - key: USE_WEBHOOK - value: 1 - - key: WORKERS - value: 2 + - key: LOGLEVEL + value: INFO diff --git a/requirements-dev.txt b/requirements-dev.txt index f9221705a..b910edfd8 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,35 +1,35 @@ appdirs==1.4.4 ; python_version >= "3.9" and python_version < "4.0" astor==0.8.1 ; python_version >= "3.9" and python_version < "4.0" -attrs==22.1.0 ; python_version >= "3.9" and python_version < "4.0" +attrs==22.2.0 ; python_version >= "3.9" and python_version < "4.0" autorepr==0.3.0 ; python_version >= "3.9" and python_version < "4.0" bandit==1.7.4 ; python_version >= "3.9" and python_version < "4.0" -black==22.12.0 ; python_version >= "3.9" and python_version < "4.0" +black==23.1.0 ; python_version >= "3.9" and python_version < "4.0" cattrs==22.2.0 ; python_version >= "3.9" and python_version < "4.0" certifi==2022.12.7 ; python_version >= "3.9" and python_version < "4" -charset-normalizer==2.1.1 ; python_version >= "3.9" and python_version < "4" +charset-normalizer==3.0.1 ; python_version >= "3.9" and python_version < "4" click==8.1.3 ; python_version >= "3.9" and python_version < "4.0" colorama==0.4.6 ; python_version >= "3.9" and python_version < "4.0" configupdater==3.1.1 ; python_version >= "3.9" and python_version < "4.0" darglint==1.8.1 ; python_version >= "3.9" and python_version < "4.0" dictdiffer==0.9.0 ; python_version >= "3.9" and python_version < "4.0" -doc8==1.0.0 ; python_version >= "3.9" and python_version < "4.0" +doc8==1.1.1 ; python_version >= "3.9" and python_version < "4.0" docutils==0.19 ; python_version >= "3.9" and python_version < "4.0" dparse==0.6.2 ; python_version >= "3.9" and python_version < "4.0" -dpath==2.1.3 ; python_version >= "3.9" and python_version < "4.0" +dpath==2.1.4 ; python_version >= "3.9" and python_version < "4.0" entrypoints==0.4 ; python_version >= "3.9" and python_version < "4.0" eradicate==2.1.0 ; python_version >= "3.9" and python_version < "4.0" -exceptiongroup==1.0.4 ; python_version >= "3.9" and python_version < "3.11" +exceptiongroup==1.1.0 ; python_version >= "3.9" and python_version < "3.11" flake8-bandit==3.0.0 ; python_version >= "3.9" and python_version < "4.0" flake8-broken-line==0.5.0 ; python_version >= "3.9" and python_version < "4.0" flake8-bugbear==22.12.6 ; python_version >= "3.9" and python_version < "4.0" flake8-commas==2.1.0 ; python_version >= "3.9" and python_version < "4.0" flake8-comprehensions==3.10.1 ; python_version >= "3.9" and python_version < "4.0" flake8-debugger==4.1.2 ; python_version >= "3.9" and python_version < "4.0" -flake8-docstrings==1.6.0 ; python_version >= "3.9" and python_version < "4.0" +flake8-docstrings==1.7.0 ; python_version >= "3.9" and python_version < "4.0" flake8-eradicate==1.4.0 ; python_version >= "3.9" and python_version < "4.0" flake8-isort==4.2.0 ; python_version >= "3.9" and python_version < "4.0" flake8-polyfill==1.0.2 ; python_version >= "3.9" and python_version < "4.0" -flake8-quotes==3.3.1 ; python_version >= "3.9" and python_version < "4.0" +flake8-quotes==3.3.2 ; python_version >= "3.9" and python_version < "4.0" flake8-rst-docstrings==0.2.7 ; python_version >= "3.9" and python_version < "4.0" flake8-string-format==0.3.0 ; python_version >= "3.9" and python_version < "4.0" flake8==4.0.1 ; python_version >= "3.9" and python_version < "4.0" @@ -37,51 +37,50 @@ flakeheaven==3.2.1 ; python_version >= "3.9" and python_version < "4.0" flatten-dict==0.4.2 ; python_version >= "3.9" and python_version < "4.0" furl==2.1.3 ; python_version >= "3.9" and python_version < "4.0" gitdb==4.0.10 ; python_version >= "3.9" and python_version < "4.0" -gitpython==3.1.29 ; python_version >= "3.9" and python_version < "4.0" -identify==2.5.10 ; python_version >= "3.9" and python_version < "4.0" +gitpython==3.1.31 ; python_version >= "3.9" and python_version < "4.0" +identify==2.5.18 ; python_version >= "3.9" and python_version < "4.0" idna==3.4 ; python_version >= "3.9" and python_version < "4" -isort==5.11.3 ; python_version >= "3.9" and python_version < "4.0" +isort==5.12.0 ; python_version >= "3.9" and python_version < "4.0" jmespath==1.0.1 ; python_version >= "3.9" and python_version < "4.0" loguru==0.6.0 ; python_version >= "3.9" and python_version < "4.0" marshmallow-polyfield==5.11 ; python_version >= "3.9" and python_version < "4.0" marshmallow==3.19.0 ; python_version >= "3.9" and python_version < "4.0" mccabe==0.6.1 ; python_version >= "3.9" and python_version < "4.0" more-itertools==9.0.0 ; python_version >= "3.9" and python_version < "4.0" -mypy-extensions==0.4.3 ; python_version >= "3.9" and python_version < "4.0" +mypy-extensions==1.0.0 ; python_version >= "3.9" and python_version < "4.0" nitpick==0.32.0 ; python_version >= "3.9" and python_version < "4.0" orderedmultidict==1.0.1 ; python_version >= "3.9" and python_version < "4.0" -packaging==21.3 ; python_version >= "3.9" and python_version < "4.0" -pathspec==0.10.3 ; python_version >= "3.9" and python_version < "4.0" -pbr==5.11.0 ; python_version >= "3.9" and python_version < "4.0" +packaging==23.0 ; python_version >= "3.9" and python_version < "4.0" +pathspec==0.11.0 ; python_version >= "3.9" and python_version < "4.0" +pbr==5.11.1 ; python_version >= "3.9" and python_version < "4.0" pep8-naming==0.13.2 ; python_version >= "3.9" and python_version < "4.0" -platformdirs==2.6.0 ; python_version >= "3.9" and python_version < "4.0" +platformdirs==3.0.0 ; python_version >= "3.9" and python_version < "4.0" pluggy==1.0.0 ; python_version >= "3.9" and python_version < "4.0" pycodestyle==2.8.0 ; python_version >= "3.9" and python_version < "4.0" -pydocstyle==6.1.1 ; python_version >= "3.9" and python_version < "4.0" +pydocstyle==6.3.0 ; python_version >= "3.9" and python_version < "4.0" pyflakes==2.4.0 ; python_version >= "3.9" and python_version < "4.0" -pygments==2.13.0 ; python_version >= "3.9" and python_version < "4.0" -pyparsing==3.0.9 ; python_version >= "3.9" and python_version < "4.0" -python-slugify==7.0.0 ; python_version >= "3.9" and python_version < "4.0" +pygments==2.14.0 ; python_version >= "3.9" and python_version < "4.0" +python-slugify==8.0.0 ; python_version >= "3.9" and python_version < "4.0" pyyaml==6.0 ; python_version >= "3.9" and python_version < "4.0" -requests-cache==0.9.7 ; python_version >= "3.9" and python_version < "4.0" -requests==2.28.1 ; python_version >= "3.9" and python_version < "4" +requests-cache==0.9.8 ; python_version >= "3.9" and python_version < "4.0" +requests==2.28.2 ; python_version >= "3.9" and python_version < "4" restructuredtext-lint==1.4.0 ; python_version >= "3.9" and python_version < "4.0" ruamel-yaml-clib==0.2.7 ; platform_python_implementation == "CPython" and python_version < "3.11" and python_version >= "3.9" ruamel-yaml==0.17.21 ; python_version >= "3.9" and python_version < "4.0" -safety==2.3.5 ; python_version >= "3.9" and python_version < "4.0" -setuptools==65.6.3 ; python_version >= "3.9" and python_version < "4.0" +safety==2.3.4 ; python_version >= "3.9" and python_version < "4.0" +setuptools==67.3.2 ; python_version >= "3.9" and python_version < "4.0" six==1.16.0 ; python_version >= "3.9" and python_version < "4.0" smmap==5.0.0 ; python_version >= "3.9" and python_version < "4.0" snowballstemmer==2.2.0 ; python_version >= "3.9" and python_version < "4.0" sortedcontainers==2.4.0 ; python_version >= "3.9" and python_version < "4.0" -stevedore==4.1.1 ; python_version >= "3.9" and python_version < "4.0" +stevedore==5.0.0 ; python_version >= "3.9" and python_version < "4.0" strenum==0.4.9 ; python_version >= "3.9" and python_version < "4.0" text-unidecode==1.3 ; python_version >= "3.9" and python_version < "4.0" toml==0.10.2 ; python_version >= "3.9" and python_version < "4.0" -tomli==2.0.1 ; python_version >= "3.9" and python_full_version < "3.11.0a7" +tomli==2.0.1 ; python_version >= "3.9" and python_version < "3.11" tomlkit==0.11.6 ; python_version >= "3.9" and python_version < "4.0" -typing-extensions==4.4.0 ; python_version >= "3.9" and python_version < "4.0" +typing-extensions==4.5.0 ; python_version >= "3.9" and python_version < "4.0" url-normalize==1.4.3 ; python_version >= "3.9" and python_version < "4.0" -urllib3==1.26.13 ; python_version >= "3.9" and python_version < "4.0" +urllib3==1.26.14 ; python_version >= "3.9" and python_version < "4.0" wemake-python-styleguide==0.17.0 ; python_version >= "3.9" and python_version < "4.0" win32-setctime==1.1.0 ; python_version >= "3.9" and python_version < "4.0" and sys_platform == "win32" diff --git a/requirements-docs.txt b/requirements-docs.txt index aeec64a53..17a8282e8 100644 --- a/requirements-docs.txt +++ b/requirements-docs.txt @@ -1,33 +1,32 @@ -alabaster==0.7.12 ; python_version >= "3.9" and python_version < "4.0" +alabaster==0.7.13 ; python_version >= "3.9" and python_version < "4.0" babel==2.11.0 ; python_version >= "3.9" and python_version < "4.0" certifi==2022.12.7 ; python_version >= "3.9" and python_version < "4" -charset-normalizer==2.1.1 ; python_version >= "3.9" and python_version < "4" +charset-normalizer==3.0.1 ; python_version >= "3.9" and python_version < "4" colorama==0.4.6 ; python_version >= "3.9" and python_version < "4.0" and sys_platform == "win32" commonmark==0.9.1 ; python_version >= "3.9" and python_version < "4.0" docutils==0.19 ; python_version >= "3.9" and python_version < "4.0" idna==3.4 ; python_version >= "3.9" and python_version < "4" imagesize==1.4.1 ; python_version >= "3.9" and python_version < "4.0" -importlib-metadata==5.1.0 ; python_version >= "3.9" and python_version < "3.10" +importlib-metadata==6.0.0 ; python_version >= "3.9" and python_version < "3.10" jinja2==3.1.2 ; python_version >= "3.9" and python_version < "4.0" -m2r2==0.3.3 ; python_version >= "3.9" and python_version < "4.0" -markupsafe==2.1.1 ; python_version >= "3.9" and python_version < "4.0" +m2r2==0.3.3.post2 ; python_version >= "3.9" and python_version < "4.0" +markupsafe==2.1.2 ; python_version >= "3.9" and python_version < "4.0" mistune==0.8.4 ; python_version >= "3.9" and python_version < "4.0" -packaging==21.3 ; python_version >= "3.9" and python_version < "4.0" -pygments==2.13.0 ; python_version >= "3.9" and python_version < "4.0" -pyparsing==3.0.9 ; python_version >= "3.9" and python_version < "4.0" -pytz==2022.7 ; python_version >= "3.9" and python_version < "4.0" +packaging==23.0 ; python_version >= "3.9" and python_version < "4.0" +pygments==2.14.0 ; python_version >= "3.9" and python_version < "4.0" +pytz==2022.7.1 ; python_version >= "3.9" and python_version < "4.0" recommonmark==0.7.1 ; python_version >= "3.9" and python_version < "4.0" -requests==2.28.1 ; python_version >= "3.9" and python_version < "4" +requests==2.28.2 ; python_version >= "3.9" and python_version < "4" snowballstemmer==2.2.0 ; python_version >= "3.9" and python_version < "4.0" -sphinx-autodoc-typehints==1.19.5 ; python_version >= "3.9" and python_version < "4.0" -sphinx==5.3.0 ; python_version >= "3.9" and python_version < "4.0" -sphinxcontrib-applehelp==1.0.2 ; python_version >= "3.9" and python_version < "4.0" +sphinx-autodoc-typehints==1.22 ; python_version >= "3.9" and python_version < "4.0" +sphinx==6.1.3 ; python_version >= "3.9" and python_version < "4.0" +sphinxcontrib-applehelp==1.0.4 ; python_version >= "3.9" and python_version < "4.0" sphinxcontrib-devhelp==1.0.2 ; python_version >= "3.9" and python_version < "4.0" -sphinxcontrib-htmlhelp==2.0.0 ; python_version >= "3.9" and python_version < "4.0" +sphinxcontrib-htmlhelp==2.0.1 ; python_version >= "3.9" and python_version < "4.0" sphinxcontrib-jsmath==1.0.1 ; python_version >= "3.9" and python_version < "4.0" sphinxcontrib-qthelp==1.0.3 ; python_version >= "3.9" and python_version < "4.0" sphinxcontrib-serializinghtml==1.1.5 ; python_version >= "3.9" and python_version < "4.0" -tomlkit==0.11.6 ; python_version >= "3.9" and python_version < "4.0" -typing-extensions==4.4.0 ; python_version >= "3.9" and python_version < "4.0" -urllib3==1.26.13 ; python_version >= "3.9" and python_version < "4" -zipp==3.11.0 ; python_version >= "3.9" and python_version < "3.10" +tomli==2.0.1 ; python_version >= "3.9" and python_version < "4.0" +typing-extensions==4.5.0 ; python_version >= "3.9" and python_version < "4.0" +urllib3==1.26.14 ; python_version >= "3.9" and python_version < "4" +zipp==3.14.0 ; python_version >= "3.9" and python_version < "3.10" diff --git a/requirements.txt b/requirements.txt index 5b7026a61..43085bbaf 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,46 +1,61 @@ +anyio==3.6.2 ; python_version >= "3.9" and python_version < "4.0" +apscheduler==3.10.0 ; python_version >= "3.9" and python_version < "4.0" args==0.1.0 ; python_version >= "3.9" and python_version < "4.0" bandcamp-downloader==0.0.13 ; python_version >= "3.9" and python_version < "4.0" -beautifulsoup4==4.11.1 ; python_version >= "3.9" and python_version < "4.0" +beautifulsoup4==4.11.2 ; python_version >= "3.9" and python_version < "4.0" boltons==21.0.0 ; python_version >= "3.9" and python_version < "4.0" brotli==1.0.9 ; python_version >= "3.9" and python_version < "4.0" and platform_python_implementation == "CPython" brotlicffi==1.0.9.2 ; python_version >= "3.9" and python_version < "4.0" and platform_python_implementation != "CPython" -certifi==2022.12.7 ; python_version >= "3.9" and python_version < "4.0" -cffi==1.15.1 ; python_version >= "3.9" and python_version < "4.0" +certifi==2022.12.7 ; python_version >= "3.9" and python_version < "4" +cffi==1.15.1 ; python_version >= "3.9" and python_version < "4.0" and platform_python_implementation != "CPython" chardet==5.1.0 ; python_version >= "3.9" and python_version < "4.0" -charset-normalizer==2.1.1 ; python_version >= "3.9" and python_version < "4" +charset-normalizer==3.0.1 ; python_version >= "3.9" and python_version < "4" clint==0.5.1 ; python_version >= "3.9" and python_version < "4.0" -cryptography==38.0.4 ; python_version >= "3.9" and python_version < "4.0" -dacite==1.6.0 ; python_version >= "3.9" and python_version < "4.0" -decorator==5.1.1 ; python_version >= "3.9" and python_version < "4.0" +dacite==1.8.0 ; python_version >= "3.9" and python_version < "4.0" demjson3==3.0.6 ; python_version >= "3.9" and python_version < "4.0" docopt==0.6.2 ; python_version >= "3.9" and python_version < "4.0" +fake-useragent==1.1.1 ; python_version >= "3.9" and python_version < "4.0" ffmpeg-python==0.2.0 ; python_version >= "3.9" and python_version < "4.0" -future==0.18.2 ; python_version >= "3.9" and python_version < "4.0" +future==0.18.3 ; python_version >= "3.9" and python_version < "4.0" +h11==0.14.0 ; python_version >= "3.9" and python_version < "4.0" +h2==4.1.0 ; python_version >= "3.9" and python_version < "4.0" +hpack==4.0.0 ; python_version >= "3.9" and python_version < "4.0" +httpcore==0.16.3 ; python_version >= "3.9" and python_version < "4.0" +httpx[http2]==0.23.3 ; python_version >= "3.9" and python_version < "4.0" +hyperframe==6.0.1 ; python_version >= "3.9" and python_version < "4.0" idna==3.4 ; python_version >= "3.9" and python_version < "4" +importlib-resources==5.12.0 ; python_version >= "3.9" and python_version < "3.10" lxml==4.9.2 ; python_version >= "3.9" and python_version < "4.0" -mock==4.0.3 ; python_version >= "3.9" and python_version < "4.0" +mock==5.0.1 ; python_version >= "3.9" and python_version < "4.0" mutagen==1.46.0 ; python_version >= "3.9" and python_version < "4.0" pathvalidate==2.5.2 ; python_version >= "3.9" and python_version < "4.0" -patool==1.12 ; python_version >= "3.9" and python_version < "4.0" -plumbum==1.8.0 ; python_version >= "3.9" and python_version < "4.0" -prometheus-client==0.15.0 ; python_version >= "3.9" and python_version < "4.0" -pycparser==2.21 ; python_version >= "3.9" and python_version < "4.0" -pycryptodomex==3.16.0 ; python_version >= "3.9" and python_version < "4.0" +plumbum==1.8.1 ; python_version >= "3.9" and python_version < "4.0" +prometheus-client==0.16.0 ; python_version >= "3.9" and python_version < "4.0" +pycparser==2.21 ; python_version >= "3.9" and python_version < "4.0" and platform_python_implementation != "CPython" +pycryptodomex==3.17 ; python_version >= "3.9" and python_version < "4.0" pysocks==1.7.1 ; python_version >= "3.9" and python_version < "4" python-dateutil==2.8.2 ; python_version >= "3.9" and python_version < "4.0" -python-telegram-bot==12.8 ; python_version >= "3.9" and python_version < "4.0" -python-telegram-handler==2.2.1 ; python_version >= "3.9" and python_version < "4.0" +python-telegram-bot[job-queue,webhooks]==20.1 ; python_version >= "3.9" and python_version < "4.0" +pytz-deprecation-shim==0.1.0.post0 ; python_version >= "3.9" and python_version < "4.0" +pytz==2022.7.1 ; python_version >= "3.9" and python_version < "4.0" pywin32==305 ; platform_system == "Windows" and platform_python_implementation != "PyPy" and python_version >= "3.9" and python_version < "4.0" -requests==2.28.1 ; python_version >= "3.9" and python_version < "4" -requests[socks]==2.28.1 ; python_version >= "3.9" and python_version < "4" +requests==2.28.2 ; python_version >= "3.9" and python_version < "4" +requests[socks]==2.28.2 ; python_version >= "3.9" and python_version < "4" +rfc3986[idna2008]==1.5.0 ; python_version >= "3.9" and python_version < "4.0" scdl==2.7.3 ; python_version >= "3.9" and python_version < "4.0" +sdnotify==0.3.2 ; python_version >= "3.9" and python_version < "4.0" +setuptools==67.3.2 ; python_version >= "3.9" and python_version < "4.0" six==1.16.0 ; python_version >= "3.9" and python_version < "4.0" +sniffio==1.3.0 ; python_version >= "3.9" and python_version < "4.0" soundcloud-v2==1.3.1 ; python_version >= "3.9" and python_version < "4.0" -soupsieve==2.3.2.post1 ; python_version >= "3.9" and python_version < "4.0" -termcolor==2.1.1 ; python_version >= "3.9" and python_version < "4.0" +soupsieve==2.4 ; python_version >= "3.9" and python_version < "4.0" +termcolor==2.2.0 ; python_version >= "3.9" and python_version < "4.0" tornado==6.2 ; python_version >= "3.9" and python_version < "4.0" +tzdata==2022.7 ; python_version >= "3.9" and python_version < "4.0" +tzlocal==4.2 ; python_version >= "3.9" and python_version < "4.0" unicode-slugify==0.1.5 ; python_version >= "3.9" and python_version < "4.0" unidecode==1.3.6 ; python_version >= "3.9" and python_version < "4.0" -urllib3==1.26.13 ; python_version >= "3.9" and python_version < "4" +urllib3==1.26.14 ; python_version >= "3.9" and python_version < "4" websockets==10.4 ; python_version >= "3.9" and python_version < "4.0" -yt-dlp==2022.11.11 ; python_version >= "3.9" and python_version < "4.0" +yt-dlp==2023.2.17 ; python_version >= "3.9" and python_version < "4.0" +zipp==3.14.0 ; python_version >= "3.9" and python_version < "3.10" diff --git a/runtime.txt b/runtime.txt index 335156c09..4b44813f0 100644 --- a/runtime.txt +++ b/runtime.txt @@ -1 +1 @@ -python-3.11.0 +python-3.11.1 diff --git a/scdlbot.service.sample b/scdlbot.service.sample index a75dbafef..317b660be 100644 --- a/scdlbot.service.sample +++ b/scdlbot.service.sample @@ -1,10 +1,9 @@ - # This is systemd service file. How to install service: # sudo cp scdlbot.service.sample /etc/systemd/system/scdlbot.service # sudo nano /etc/systemd/system/scdlbot.service # sudo systemctl daemon-reload # sudo systemctl enable scdlbot -# sudo systemctl start scdlbot +# sudo systemctl restart scdlbot # sudo systemctl status scdlbot [Unit] @@ -15,15 +14,22 @@ After=network.target User=www-data Group=www-data Type=simple -EnvironmentFile=-/etc/default/scdlbot -Environment=PYTHONUNBUFFERED=1 -Environment=XDG_CONFIG_HOME=/scdlbothome +EnvironmentFile=/etc/default/scdlbot +#Environment=SYSTEMD_LOG_LEVEL=debug ExecStart=/usr/local/bin/scdlbot -#StandardOutput=syslog -#StandardError=syslog +#ExecStart=/opt/pyenv/versions/3.11.0/bin/scdlbot +WatchdogSec=300 +#NotifyAccess=all Restart=always -MemoryHigh=60% -MemoryMax=70% +RestartSec=10 +CPUQuotaPeriodSec=1000ms +CPUQuota=165% +MemoryHigh=2000M +MemoryMax=2400M +TasksMax=infinity +LimitAS=infinity +LimitRSS=infinity +LimitNOFILE=65536 [Install] WantedBy=multi-user.target diff --git a/scdlbot/__init__.py b/scdlbot/__init__.py deleted file mode 100644 index ff32063cf..000000000 --- a/scdlbot/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# -*- coding: utf-8 -*- - -"""Top-level package for Music Downloader Telegram Bot.""" - -__author__ = """George Pchelkin""" -__email__ = "george@pchelk.in" -__version__ = "0.14.2" diff --git a/scdlbot/__main__.py b/scdlbot/__main__.py deleted file mode 100755 index f59d0350f..000000000 --- a/scdlbot/__main__.py +++ /dev/null @@ -1,96 +0,0 @@ -# -*- coding: utf-8 -*- - -import logging -import os -from logging.handlers import SysLogHandler - -from prometheus_client import start_http_server -from telegram_handler import TelegramHandler - -from scdlbot.scdlbot import ScdlBot - -logging_handlers = [] - -console_formatter = logging.Formatter("[%(name)s] %(levelname)s: %(message)s") -console_handler = logging.StreamHandler() -console_handler.setFormatter(console_formatter) -console_handler.setLevel(logging.DEBUG) -logging_handlers.append(console_handler) - -tg_bot_token = os.environ["TG_BOT_TOKEN"] -alert_chat_ids = list(map(int, os.getenv("ALERT_CHAT_IDS", "0").split(","))) -telegram_handler = TelegramHandler(token=tg_bot_token, chat_id=str(alert_chat_ids[0])) -telegram_handler.setLevel(logging.WARNING) -logging_handlers.append(telegram_handler) - -syslog_debug = bool(int(os.getenv("SYSLOG_DEBUG", "0"))) -syslog_logging_level = logging.DEBUG if syslog_debug else logging.INFO -syslog_hostname = os.getenv("HOSTNAME", "test-host") -syslog_formatter = logging.Formatter("%(asctime)s " + syslog_hostname + " %(name)s: %(message)s", datefmt="%b %d %H:%M:%S") - -syslog_address = os.getenv("SYSLOG_ADDRESS", "") -if syslog_address: - syslog_host, syslog_udp_port = syslog_address.split(":") - syslog_handler = SysLogHandler(address=(syslog_host, int(syslog_udp_port))) - syslog_handler.setFormatter(syslog_formatter) - syslog_handler.setLevel(syslog_logging_level) - logging_handlers.append(syslog_handler) - -logging.basicConfig(format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", datefmt="%Y-%m-%d %H:%M:%S", level=logging.DEBUG, handlers=logging_handlers) - - -def main(): - # expose prometheus/openmetrics metrics: - metrics_host = os.getenv("METRICS_HOST", "127.0.0.1") - metrics_port = int(os.getenv("METRICS_PORT", "8000")) - start_http_server(metrics_port, addr=metrics_host) - - store_chat_id = int(os.getenv("STORE_CHAT_ID", "0")) - no_flood_chat_ids = list(map(int, os.getenv("NO_FLOOD_CHAT_IDS", "0").split(","))) - dl_timeout = int(os.getenv("DL_TIMEOUT", "300")) - dl_dir = os.path.expanduser(os.getenv("DL_DIR", "/tmp/scdlbot")) - chat_storage_file = os.path.expanduser(os.getenv("CHAT_STORAGE", "/tmp/scdlbotdata")) - serve_audio = bool(int(os.getenv("SERVE_AUDIO", "0"))) - app_url = os.getenv("APP_URL", "") - tg_bot_api = os.getenv("TG_BOT_API", "https://api.telegram.org") - max_tg_file_size = int(os.getenv("MAX_TG_FILE_SIZE", "45_000_000")) - max_convert_file_size = int(os.getenv("MAX_CONVERT_FILE_SIZE", "80_000_000")) - proxies = os.getenv("PROXIES", None) - if proxies: - proxies = proxies.split(",") - source_ips = os.getenv("SOURCE_IPS", None) - if source_ips: - source_ips = source_ips.split(",") - cookies_file = os.getenv("COOKIES_FILE", "") - workers = int(os.getenv("WORKERS", 4)) - - scdlbot = ScdlBot( - tg_bot_token, - tg_bot_api, - proxies, - store_chat_id, - no_flood_chat_ids, - alert_chat_ids, - dl_dir, - dl_timeout, - max_tg_file_size, - max_convert_file_size, - chat_storage_file, - app_url, - serve_audio, - cookies_file, - source_ips, - workers, - ) - - use_webhook = bool(int(os.getenv("USE_WEBHOOK", "0"))) - webhook_host = os.getenv("HOST", "127.0.0.1") - webhook_port = int(os.getenv("PORT", "5000")) - cert_file = os.getenv("CERT_FILE", "") - cert_key_file = os.getenv("CERT_KEY_FILE", "") - url_path = os.getenv("URL_PATH", tg_bot_token.replace(":", "")) - scdlbot.start(use_webhook, webhook_host, webhook_port, cert_file, cert_key_file, url_path) - - -if __name__ == "__main__": - main() diff --git a/scdlbot/exceptions.py b/scdlbot/exceptions.py deleted file mode 100644 index ea75c2a0e..000000000 --- a/scdlbot/exceptions.py +++ /dev/null @@ -1,122 +0,0 @@ -class Error(Exception): - """Base class for exceptions in this module.""" - - -class FileNotSupportedError(Error): - """Exception raised for errors in the input. - - Attributes: - expression -- input expression in which the error occurred - message -- explanation of the error - """ - - def __init__(self, file_format): - self.file_format = file_format - - -class FileTooLargeError(Error): - """Exception raised for errors in the input. - - Attributes: - expression -- input expression in which the error occurred - message -- explanation of the error - """ - - def __init__(self, file_size): - self.file_size = file_size - - -class FileSplittedPartiallyError(Error): - """Exception raised for errors in the input. - - Attributes: - expression -- input expression in which the error occurred - message -- explanation of the error - """ - - def __init__(self, file_parts): - self.file_parts = file_parts - - -class FileNotConvertedError(Error): - """Exception raised for errors in the input. - - Attributes: - expression -- input expression in which the error occurred - message -- explanation of the error - """ - - def __init__(self): - pass - - -class FileSentPartiallyError(Error): - """Exception raised for errors in the input. - - Attributes: - expression -- input expression in which the error occurred - message -- explanation of the error - """ - - def __init__(self, sent_audio_ids): - self.sent_audio_ids = sent_audio_ids - - -class URLError(Error): - """Exception raised for errors in the input. - - Attributes: - expression -- input expression in which the error occurred - message -- explanation of the error - """ - - def __init__(self): - self.status = "" - - -class URLDirectError(URLError): - """Exception raised for errors in the input. - - Attributes: - expression -- input expression in which the error occurred - message -- explanation of the error - """ - - def __init__(self): - self.status = "direct" - - -class URLCountryError(URLError): - """Exception raised for errors in the input. - - Attributes: - expression -- input expression in which the error occurred - message -- explanation of the error - """ - - def __init__(self): - self.status = "country" - - -class URLLiveError(URLError): - """Exception raised for errors in the input. - - Attributes: - expression -- input expression in which the error occurred - message -- explanation of the error - """ - - def __init__(self): - self.status = "live" - - -class URLTimeoutError(URLError): - """Exception raised for errors in the input. - - Attributes: - expression -- input expression in which the error occurred - message -- explanation of the error - """ - - def __init__(self): - self.status = "timeout" diff --git a/scdlbot/scdlbot.py b/scdlbot/scdlbot.py old mode 100644 new mode 100755 index a30e02cf4..bc76232a1 --- a/scdlbot/scdlbot.py +++ b/scdlbot/scdlbot.py @@ -1,628 +1,956 @@ -# -*- coding: utf-8 -*- +import asyncio +import concurrent.futures +import datetime -"""Main module.""" - -import gc +# import gc +import logging +import os import pathlib import random -import shelve import shutil -from datetime import datetime -from multiprocessing import Process, Queue -from queue import Empty +import tempfile +import threading +import time +import traceback +from logging.handlers import SysLogHandler from subprocess import PIPE, TimeoutExpired # skipcq: BAN-B404 -from urllib.parse import urljoin, urlparse +from urllib.parse import urljoin from uuid import uuid4 import ffmpeg -from boltons.urlutils import find_all_links -from mutagen.id3 import ID3 +import pkg_resources +import prometheus_client +import requests +import sdnotify +from fake_useragent import UserAgent + +# from boltons.urlutils import find_all_links +from mutagen.id3 import ID3, ID3v1SaveOptions from mutagen.mp3 import EasyMP3 as MP3 -from prometheus_client import Summary -from telegram import Chat, ChatAction, ChatMember, InlineKeyboardButton, InlineKeyboardMarkup, Message, MessageEntity, Update -from telegram.error import BadRequest, ChatMigrated, NetworkError, TelegramError, TimedOut, Unauthorized -from telegram.ext import CallbackContext, CallbackQueryHandler, CommandHandler, Filters, MessageHandler, Updater -from telegram.ext.dispatcher import run_async +from telegram import Bot, Chat, ChatMember, InlineKeyboardButton, InlineKeyboardMarkup, MessageEntity, Update +from telegram.constants import ChatAction +from telegram.error import BadRequest, ChatMigrated, Forbidden, NetworkError, TelegramError, TimedOut +from telegram.ext import Application, ApplicationBuilder, CallbackQueryHandler, CommandHandler, ContextTypes, MessageHandler, PicklePersistence, filters +from telegram.request import HTTPXRequest + +# from telegram_handler import TelegramHandler + +# Support different old versions just in case: +# https://github.com/yt-dlp/yt-dlp/wiki/Forks +try: + import yt_dlp as ydl +except ImportError: + try: + import youtube_dl as ydl + except ImportError: + import youtube_dlc as ydl + +from boltons.urlutils import URL +from plumbum import ProcessExecutionError, local + +TG_BOT_TOKEN = os.environ["TG_BOT_TOKEN"] +TG_BOT_API = os.getenv("TG_BOT_API", "https://api.telegram.org") +# https://github.com/python-telegram-bot/python-telegram-bot/wiki/Local-Bot-API-Server +# https://github.com/tdlib/telegram-bot-api#usage +TG_BOT_API_LOCAL_MODE = False +if "TG_BOT_API_LOCAL_MODE" in os.environ: + TG_BOT_API_LOCAL_MODE = bool(int(os.getenv("TG_BOT_API_LOCAL_MODE", "0"))) +elif "127.0.0.1" in TG_BOT_API or "localhost" in TG_BOT_API: + TG_BOT_API_LOCAL_MODE = True +HTTP_VERSION = "2" +if TG_BOT_API_LOCAL_MODE: + HTTP_VERSION = "1.1" +TG_BOT_OWNER_CHAT_ID = int(os.getenv("TG_BOT_OWNER_CHAT_ID", "0")) + +CHAT_STORAGE = os.path.expanduser(os.getenv("CHAT_STORAGE", "/tmp/scdlbot.pickle")) +DL_DIR = os.path.expanduser(os.getenv("DL_DIR", "/tmp/scdlbot")) +BIN_PATH = os.getenv("BIN_PATH", "") +scdl_bin = local[os.path.join(BIN_PATH, "scdl")] +bcdl_bin = local[os.path.join(BIN_PATH, "bandcamp-dl")] +WORKERS = int(os.getenv("WORKERS", 2)) +EXECUTOR = concurrent.futures.ProcessPoolExecutor(max_workers=WORKERS) +DL_TIMEOUT = int(os.getenv("DL_TIMEOUT", 300)) +CHECK_URL_TIMEOUT = int(os.getenv("CHECK_URL_TIMEOUT", 30)) +# Timeouts: https://www.python-httpx.org/advanced/ +COMMON_CONNECTION_TIMEOUT = int(os.getenv("COMMON_CONNECTION_TIMEOUT", 10)) +MAX_TG_FILE_SIZE = int(os.getenv("MAX_TG_FILE_SIZE", "45_000_000")) +MAX_CONVERT_FILE_SIZE = int(os.getenv("MAX_CONVERT_FILE_SIZE", "80_000_000")) +NO_FLOOD_CHAT_IDS = list(map(int, os.getenv("NO_FLOOD_CHAT_IDS", "0").split(","))) +COOKIES_FILE = os.getenv("COOKIES_FILE", None) +PROXIES = [] +if "PROXIES" in os.environ: + PROXIES = [None if x == "direct" else x for x in os.getenv("PROXIES").split(",")] +SOURCE_IPS = [] +if "SOURCE_IPS" in os.environ: + SOURCE_IPS = os.getenv("SOURCE_IPS").split(",") +BLACKLIST_TELEGRAM_DOMAINS = [ + "telegram.org", + "telegram.me", + "t.me", + "telegram.dog", + "telegra.ph", + "te.legra.ph", + "graph.org", + "tdesktop.com", + "desktop.telegram.org", + "telesco.pe", + "contest.com", + "contest.dev", +] +WHITELIST_DOMAINS = {} +if "WHITELIST_DOMAINS" in os.environ: + WHITELIST_DOMAINS = set(x for x in os.getenv("WHITELIST_DOMAINS").split(",")) +BLACKLIST_DOMAINS = {} +if "BLACKLIST_DOMAINS" in os.environ: + BLACKLIST_DOMAINS = set(x for x in os.getenv("BLACKLIST_DOMAINS").split(",")) +WHITELIST_CHATS = [] +if "WHITELIST_CHATS" in os.environ: + try: + WHITELIST_CHATS = set(int(x) for x in os.getenv("WHITELIST_CHATS").split(",")) + except ValueError: + raise ValueError("Your whitelisted chats list does not contain valid integers.") +BLACKLIST_CHATS = [] +if "BLACKLIST_CHATS" in os.environ: + try: + BLACKLIST_CHATS = set(int(x) for x in os.getenv("BLACKLIST_CHATS").split(",")) + except ValueError: + raise ValueError("Your blacklisted chats list does not contain valid integers.") -from scdlbot.utils import * +# Webhook: +WEBHOOK_ENABLE = bool(int(os.getenv("WEBHOOK_ENABLE", "0"))) +WEBHOOK_HOST = os.getenv("HOST", "127.0.0.1") +WEBHOOK_PORT = int(os.getenv("PORT", "5000")) +WEBHOOK_APP_URL_ROOT = os.getenv("WEBHOOK_APP_URL_ROOT", "") +WEBHOOK_APP_URL_PATH = os.getenv("WEBHOOK_APP_URL_PATH", TG_BOT_TOKEN.replace(":", "")) +WEBHOOK_CERT_FILE = os.getenv("WEBHOOK_CERT_FILE", None) +WEBHOOK_KEY_FILE = os.getenv("WEBHOOK_KEY_FILE", None) +WEBHOOK_SECRET_TOKEN = os.getenv("WEBHOOK_SECRET_TOKEN", None) +# Prometheus metrics: +METRICS_HOST = os.getenv("METRICS_HOST", "127.0.0.1") +METRICS_PORT = int(os.getenv("METRICS_PORT", "8000")) +REGISTRY = prometheus_client.CollectorRegistry() +EXECUTOR_TASKS_REMAINING = prometheus_client.Gauge( + "executor_tasks_remaining", + "Value: executor_tasks_remaining", + registry=REGISTRY, +) +BOT_REQUESTS = prometheus_client.Counter( + "bot_requests_total", + "Value: bot_requests_total", + labelnames=["type", "chat_type", "mode"], + registry=REGISTRY, +) + +# Logging: +logging_handlers = [] +LOGLEVEL = os.getenv("LOGLEVEL", "INFO").upper() +HOSTNAME = os.getenv("HOSTNAME", "scdlbot-host") + +console_formatter = logging.Formatter("[%(name)s] %(levelname)s: %(message)s") +console_handler = logging.StreamHandler() +console_handler.setFormatter(console_formatter) +console_handler.setLevel(LOGLEVEL) +logging_handlers.append(console_handler) + +SYSLOG_ADDRESS = os.getenv("SYSLOG_ADDRESS", None) +if SYSLOG_ADDRESS: + syslog_formatter = logging.Formatter("%(asctime)s " + HOSTNAME + " %(name)s: %(message)s", datefmt="%b %d %H:%M:%S") + syslog_host, syslog_udp_port = SYSLOG_ADDRESS.split(":") + syslog_handler = SysLogHandler(address=(syslog_host, int(syslog_udp_port))) + syslog_handler.setFormatter(syslog_formatter) + syslog_handler.setLevel(LOGLEVEL) + logging_handlers.append(syslog_handler) + +# telegram_handler = TelegramHandler(token=TG_BOT_TOKEN, chat_id=str(TG_BOT_OWNER_CHAT_ID)) +# telegram_handler.setLevel(logging.WARNING) +# logging_handlers.append(telegram_handler) + +logging.basicConfig( + format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + level=LOGLEVEL, + handlers=logging_handlers, +) logger = logging.getLogger(__name__) -REQUEST_TIME = Summary("request_processing_seconds", "Time spent processing request") - - -class ScdlBot: - def __init__( - self, - tg_bot_token, - tg_bot_api="https://api.telegram.org", - proxies=None, - store_chat_id=None, - no_flood_chat_ids=None, - alert_chat_ids=None, - dl_dir="/tmp/scdlbot", - dl_timeout=300, - max_tg_file_size=45_000_000, - max_convert_file_size=80_000_000, - chat_storage_file="/tmp/scdlbotdata", - app_url=None, - serve_audio=False, - cookies_file=None, - source_ips=None, - workers=4, - ): - self.SITES = { - "sc": "soundcloud", - "scapi": "api.soundcloud", - "bc": "bandcamp", - "yt": "youtu", - } - self.APP_URL = app_url - self.DL_TIMEOUT = dl_timeout - self.TG_BOT_API = tg_bot_api - self.MAX_TG_FILE_SIZE = max_tg_file_size - self.MAX_CONVERT_FILE_SIZE = max_convert_file_size - self.SERVE_AUDIO = serve_audio - if self.SERVE_AUDIO: - self.MAX_TG_FILE_SIZE = 19_000_000 - self.HELP_TEXT = get_response_text("help.tg.md") - self.SETTINGS_TEXT = get_response_text("settings.tg.md") - self.DL_TIMEOUT_TEXT = get_response_text("dl_timeout.txt").format(self.DL_TIMEOUT // 60) - self.WAIT_BIT_TEXT = [get_response_text("wait_bit.txt"), get_response_text("wait_beat.txt"), get_response_text("wait_beet.txt")] - self.NO_AUDIO_TEXT = get_response_text("no_audio.txt") - self.NO_URLS_TEXT = get_response_text("no_urls.txt") - self.OLD_MSG_TEXT = get_response_text("old_msg.txt") - self.REGION_RESTRICTION_TEXT = get_response_text("region_restriction.txt") - self.DIRECT_RESTRICTION_TEXT = get_response_text("direct_restriction.txt") - self.LIVE_RESTRICTION_TEXT = get_response_text("live_restriction.txt") - # self.chat_storage = {} - self.chat_storage = shelve.open(chat_storage_file, writeback=True) - for chat_id in no_flood_chat_ids: - self.init_chat(chat_id=chat_id, chat_type=Chat.PRIVATE if chat_id > 0 else Chat.SUPERGROUP, flood="no") - self.ALERT_CHAT_IDS = set(alert_chat_ids) if alert_chat_ids else set() - self.STORE_CHAT_ID = store_chat_id - self.DL_DIR = dl_dir - self.COOKIES_DOWNLOAD_FILE = "/tmp/scdlbot_cookies.txt" - self.proxies = proxies - self.source_ips = source_ips - # https://yandex.com/support/music-app-ios/search-and-listen/listening-abroad.html - self.cookies_file = cookies_file - self.workers = workers - - # if sc_auth_token: - # config = configparser.ConfigParser() - # config['scdl'] = {} - # config['scdl']['path'] = self.DL_DIR - # config['scdl']['auth_token'] = sc_auth_token - # config_dir = os.path.join(os.path.expanduser('~'), '.config', 'scdl') - # config_path = os.path.join(config_dir, 'scdl.cfg') - # os.makedirs(config_dir, exist_ok=True) - # with open(config_path, 'w') as config_file: - # config.write(config_file) - - self.updater = Updater(token=tg_bot_token, base_url=f"{self.TG_BOT_API}/bot", use_context=True, base_file_url=f"{self.TG_BOT_API}/file/bot", workers=self.workers) - dispatcher = self.updater.dispatcher - - start_command_handler = CommandHandler("start", self.help_command_callback) - dispatcher.add_handler(start_command_handler) - help_command_handler = CommandHandler("help", self.help_command_callback) - dispatcher.add_handler(help_command_handler) - settings_command_handler = CommandHandler("settings", self.settings_command_callback) - dispatcher.add_handler(settings_command_handler) - - dl_command_handler = CommandHandler("dl", self.common_command_callback, filters=~Filters.update.edited_message & ~Filters.forwarded) - dispatcher.add_handler(dl_command_handler) - link_command_handler = CommandHandler("link", self.common_command_callback, filters=~Filters.update.edited_message & ~Filters.forwarded) - dispatcher.add_handler(link_command_handler) - message_with_links_handler = MessageHandler( - ~Filters.update.edited_message - & ~Filters.command - & ( - (Filters.text & (Filters.entity(MessageEntity.URL) | Filters.entity(MessageEntity.TEXT_LINK))) - | (Filters.caption & (Filters.caption_entity(MessageEntity.URL) | Filters.caption_entity(MessageEntity.TEXT_LINK))) - ), - self.common_command_callback, - ) - dispatcher.add_handler(message_with_links_handler) +# Systemd watchdog monitoring: +SYSTEMD_NOTIFIER = sdnotify.SystemdNotifier() - button_query_handler = CallbackQueryHandler(self.button_query_callback) - dispatcher.add_handler(button_query_handler) +# TODO randomize User-Agent +# UA = UserAgent() +# UA.update() - unknown_handler = MessageHandler(Filters.command, self.unknown_command_callback) - dispatcher.add_handler(unknown_handler) - blacklist_whitelist_handler = MessageHandler(Filters.status_update.new_chat_members, self.blacklist_whitelist) - dispatcher.add_handler(blacklist_whitelist_handler) +# Text constants from resources: +def get_response_text(file_name): + # https://stackoverflow.com/a/20885799/2490759 + path = "/".join(("texts", file_name)) + return pkg_resources.resource_string(__name__, path).decode("UTF-8") - dispatcher.add_error_handler(self.error_callback) - self.bot_username = self.updater.bot.get_me().username - self.RANT_TEXT_PRIVATE = "Read /help to learn how to use me" - self.RANT_TEXT_PUBLIC = "[Start me in PM to read help and learn how to use me](t.me/{}?start=1)".format(self.bot_username) +HELP_TEXT = get_response_text("help.tg.md") +SETTINGS_TEXT = get_response_text("settings.tg.md") +DL_TIMEOUT_TEXT = get_response_text("dl_timeout.txt").format(DL_TIMEOUT // 60) +WAIT_BIT_TEXT = [get_response_text("wait_bit.txt"), get_response_text("wait_beat.txt"), get_response_text("wait_beet.txt")] +NO_URLS_TEXT = get_response_text("no_urls.txt") +FAILED_TEXT = get_response_text("failed.txt") +REGION_RESTRICTION_TEXT = get_response_text("region_restriction.txt") +DIRECT_RESTRICTION_TEXT = get_response_text("direct_restriction.txt") +LIVE_RESTRICTION_TEXT = get_response_text("live_restriction.txt") +OLD_MSG_TEXT = get_response_text("old_msg.txt") +# RANT_TEXT_PRIVATE = "Read /help to learn how to use me" +# RANT_TEXT_PUBLIC = f"[Start me in PM to read help and learn how to use me](t.me/{TG_BOT_USERNAME}?start=1)" - def start(self, use_webhook=False, webhook_host="127.0.0.1", webhook_port=None, cert_file=None, cert_key_file=None, url_path="scdlbot"): - if use_webhook: - self.updater.start_webhook(listen=webhook_host, port=webhook_port, url_path=url_path) - # cert=cert_file if cert_file else None, - # key=cert_key_file if cert_key_file else None, - # webhook_url=urljoin(app_url, url_path)) - self.updater.bot.set_webhook(url=urljoin(self.APP_URL, url_path), certificate=open(cert_file, "rb") if cert_file else None) - else: - self.updater.start_polling() - logger.warning("Bot started") - self.updater.idle() +# Known and supported site domains: +# support soundcloud.com and soundcloud.app.goo.gl links: +DOMAIN_SC = "soundcloud" +DOMAIN_SC_API = "api.soundcloud" +DOMAIN_BC = "bandcamp.com" +# support both youtube.com and youtu.be links: +DOMAIN_YT = "youtu" +DOMAIN_YT_BE = "youtu.be" +DOMAIN_TT = "tiktok.com" +DOMAIN_IG = "instagram.com" +DOMAIN_TW = "twitter.com" +DOMAINS = [DOMAIN_SC, DOMAIN_SC_API, DOMAIN_BC, DOMAIN_YT, DOMAIN_YT_BE, DOMAIN_TT, DOMAIN_IG, DOMAIN_TW] + + +# TODO get rid of these dumb exceptions: +class FileNotSupportedError(Exception): + def __init__(self, file_format): + self.file_format = file_format + + +class FileTooLargeError(Exception): + def __init__(self, file_size): + self.file_size = file_size - def unknown_command_callback(self, update: Update, context: CallbackContext): + +class FileSplittedPartiallyError(Exception): + def __init__(self, file_parts): + self.file_parts = file_parts + + +class FileNotConvertedError(Exception): + def __init__(self): pass - # bot.send_message(chat_id=update.message.chat_id, text="Unknown command") - def error_callback(self, update: Update, context: CallbackContext): # skipcq: PYL-R0201 - try: - raise context.error - except Unauthorized: - # remove update.message.chat_id from conversation list - logger.debug("Update {} caused Unauthorized error: {}".format(update, context.error)) - except BadRequest: - # handle malformed requests - read more below! - logger.debug("Update {} caused BadRequest error: {}".format(update, context.error)) - except TimedOut: - # handle slow connection problems - logger.debug("Update {} caused TimedOut error: {}".format(update, context.error)) - except NetworkError: - # handle other connection problems - logger.debug("Update {} caused NetworkError: {}".format(update, context.error)) - except ChatMigrated as e: - # the chat_id of a group has changed, use e.new_chat_id instead - logger.debug("Update {} caused ChatMigrated error: {}".format(update, context.error)) - except TelegramError: - # handle all other telegram related errors - logger.debug("Update {} caused TelegramError: {}".format(update, context.error)) - - def init_chat(self, message=None, chat_id=None, chat_type=None, flood="yes"): - if message: - chat_id = str(message.chat_id) - chat_type = message.chat.type - else: - chat_id = str(chat_id) - if chat_id not in self.chat_storage: - self.chat_storage[chat_id] = {} - if "settings" not in self.chat_storage[chat_id]: - self.chat_storage[chat_id]["settings"] = {} - if "mode" not in self.chat_storage[chat_id]["settings"]: - if chat_type == Chat.PRIVATE: - self.chat_storage[chat_id]["settings"]["mode"] = "dl" - else: - self.chat_storage[chat_id]["settings"]["mode"] = "ask" - if "flood" not in self.chat_storage[chat_id]["settings"]: - self.chat_storage[chat_id]["settings"]["flood"] = flood - if "rant_msg_ids" not in self.chat_storage[chat_id]["settings"]: - self.chat_storage[chat_id]["settings"]["rant_msg_ids"] = [] - self.chat_storage.sync() - # logger.debug("Current chat_storage: %r", self.chat_storage) - - def cleanup_chat(self, chat_id): - chat_msgs = self.chat_storage[str(chat_id)].copy() - for msg_id in chat_msgs: - if msg_id != "settings": - timedelta = datetime.now().replace(tzinfo=None) - self.chat_storage[str(chat_id)][msg_id]["message"].date.replace(tzinfo=None) - if timedelta.days > 0: - self.chat_storage[str(chat_id)].pop(msg_id) - self.chat_storage.sync() - - def rant_and_cleanup(self, bot, chat_id, rant_text, reply_to_message_id=None): - rant_msg = bot.send_message(chat_id=chat_id, reply_to_message_id=reply_to_message_id, text=rant_text, parse_mode="Markdown", disable_web_page_preview=True) - flood = self.chat_storage[str(chat_id)]["settings"]["flood"] - if flood == "no": - rant_msgs = self.chat_storage[str(chat_id)]["settings"]["rant_msg_ids"].copy() - for rant_msg_id in rant_msgs: - try: - bot.delete_message(chat_id=chat_id, message_id=rant_msg_id) - except: - pass - self.chat_storage[str(chat_id)]["settings"]["rant_msg_ids"].remove(rant_msg_id) - self.chat_storage[str(chat_id)]["settings"]["rant_msg_ids"].append(rant_msg.message_id) - self.chat_storage.sync() - - def help_command_callback(self, update: Update, context: CallbackContext): - if update.channel_post: - message = update.channel_post - elif update.message: - message = update.message - self.init_chat(message) - event_name = "help" - entities = message.parse_entities(types=[MessageEntity.BOT_COMMAND]) - for entity_value in entities.values(): - event_name = entity_value.replace("/", "").replace("@{}".format(self.bot_username), "") + +class FileSentPartiallyError(Exception): + def __init__(self, sent_audio_ids): + self.sent_audio_ids = sent_audio_ids + + +def get_random_wait_text(): + return random.choice(WAIT_BIT_TEXT) + + +def get_link_text(urls): + link_text = "" + for i, url in enumerate(urls): + link_text += "[Source Link #{}]({}) | `{}`\n".format(str(i + 1), url, URL(url).host) + # TODO long link message split in many + direct_urls = urls[url].splitlines()[:3] + for idx, direct_url in enumerate(direct_urls): + if direct_url.startswith("http"): + content_type = "" + if "googlevideo" in direct_url: + if "audio" in direct_url: + content_type = "Audio" + else: + content_type = "Video" + link_text += "β€’ {} #{} [Direct Link]({})\n".format(content_type, str(idx + 1), direct_url) + link_text += "\n*Note:* Final download URLs are only guaranteed to work on the same machine/IP where extracted" + return link_text + + +def get_settings_inline_keyboard(chat_data): + mode = chat_data["settings"]["mode"] + flood = chat_data["settings"]["flood"] + allow_unknown_sites = chat_data["settings"]["allow_unknown_sites"] + emoji_radio_selected = "🟒" + emoji_radio_unselected = "🟑" + emoji_toggle_enabled = "βœ…" + emoji_toggle_disabled = "❌" + emoji_close = "❌" + button_dl = InlineKeyboardButton(text=" ".join([emoji_radio_selected if mode == "dl" else emoji_radio_unselected, "Download"]), callback_data=" ".join(["settings", "dl"])) + button_link = InlineKeyboardButton(text=" ".join([emoji_radio_selected if mode == "link" else emoji_radio_unselected, "Links"]), callback_data=" ".join(["settings", "link"])) + button_ask = InlineKeyboardButton(text=" ".join([emoji_radio_selected if mode == "ask" else emoji_radio_unselected, "Ask"]), callback_data=" ".join(["settings", "ask"])) + button_flood = InlineKeyboardButton(text=" ".join([emoji_toggle_enabled if flood else emoji_toggle_disabled, "Captions"]), callback_data=" ".join(["settings", "flood"])) + button_allow_unknown_sites = InlineKeyboardButton( + text=" ".join([emoji_toggle_enabled if allow_unknown_sites else emoji_toggle_disabled, "Unknown sites"]), callback_data=" ".join(["settings", "allow_unknown_sites"]) + ) + button_close = InlineKeyboardButton(text=" ".join([emoji_close, "Close settings"]), callback_data=" ".join(["settings", "close"])) + inline_keyboard = InlineKeyboardMarkup([[button_dl, button_link, button_ask], [button_allow_unknown_sites, button_flood], [button_close]]) + return inline_keyboard + + +def chat_allowed(chat_id): + if WHITELIST_CHATS: + if chat_id not in WHITELIST_CHATS: + return False + if BLACKLIST_CHATS: + if chat_id in BLACKLIST_CHATS: + return False + return True + + +def url_valid_and_allowed(url, allow_unknown_sites=False): + host = url.host + if host in BLACKLIST_TELEGRAM_DOMAINS: + return False + if WHITELIST_DOMAINS: + if host not in WHITELIST_DOMAINS: + return False + if BLACKLIST_DOMAINS: + if host in BLACKLIST_DOMAINS: + return False + if allow_unknown_sites: + return True + if any((site in host for site in DOMAINS)): + return True + else: + return False + + +async def start_help_commands_callback(update: Update, context: ContextTypes.DEFAULT_TYPE): + message = None + if update.channel_post: + message = update.channel_post + elif update.message: + message = update.message + chat_id = update.effective_chat.id + chat_type = update.effective_chat.type + command_name = "help" + # Determine the original command: + entities = message.parse_entities(types=[MessageEntity.BOT_COMMAND]) + for entity_value in entities.values(): + command_name = entity_value.replace("/", "").replace(f"@{context.bot.username}", "") + break + logger.debug(command_name) + BOT_REQUESTS.labels(type=command_name, chat_type=chat_type, mode="None").inc() + await context.bot.send_message(chat_id=chat_id, text=HELP_TEXT, parse_mode="Markdown", disable_web_page_preview=True) + + +async def settings_command_callback(update: Update, context: ContextTypes.DEFAULT_TYPE): + command_name = "settings" + chat_id = update.effective_chat.id + chat_type = update.effective_chat.type + logger.debug(command_name) + BOT_REQUESTS.labels(type=command_name, chat_type=chat_type, mode="None").inc() + init_chat_data( + chat_data=context.chat_data, + mode=("dl" if chat_type == Chat.PRIVATE else "ask"), + flood=(chat_id not in NO_FLOOD_CHAT_IDS), + ) + await context.bot.send_message(chat_id=chat_id, parse_mode="Markdown", reply_markup=get_settings_inline_keyboard(context.chat_data), text=SETTINGS_TEXT) + + +async def dl_link_commands_and_messages_callback(update: Update, context: ContextTypes.DEFAULT_TYPE): + message = None + if update.channel_post: + message = update.channel_post + elif update.message: + message = update.message + chat_id = update.effective_chat.id + chat_type = update.effective_chat.type + if not chat_allowed(chat_id): + await context.bot.send_message(chat_id=chat_id, text="This command isn't allowed in this chat.") + return + init_chat_data( + chat_data=context.chat_data, + mode=("dl" if chat_type == Chat.PRIVATE else "ask"), + flood=(chat_id not in NO_FLOOD_CHAT_IDS), + ) + # Determine the original command: + command_entities = message.parse_entities(types=[MessageEntity.BOT_COMMAND]) + allow_unknown_sites = context.chat_data["settings"]["allow_unknown_sites"] + mode = context.chat_data["settings"]["mode"] + command_passed = False + action = None + if command_entities: + command_passed = True + # Try to determine action from command: + action = None + for entity_value in command_entities.values(): + action = entity_value.replace("/", "").replace("@{}".format(context.bot.username), "") break - log_and_track(event_name, message) - chat_id = message.chat_id - chat_type = message.chat.type - reply_to_message_id = message.message_id - flood = self.chat_storage[str(chat_id)]["settings"]["flood"] - if chat_type != Chat.PRIVATE and flood == "no": - self.rant_and_cleanup(context.bot, chat_id, self.RANT_TEXT_PUBLIC, reply_to_message_id=reply_to_message_id) - else: - context.bot.send_message(chat_id=chat_id, text=self.HELP_TEXT, parse_mode="Markdown", disable_web_page_preview=True) - - def get_wait_text(self): - return random.choice(self.WAIT_BIT_TEXT) - - def get_settings_inline_keyboard(self, chat_id): - mode = self.chat_storage[str(chat_id)]["settings"]["mode"] - flood = self.chat_storage[str(chat_id)]["settings"]["flood"] - emoji_yes = "βœ…" - emoji_no = "❌" - button_dl = InlineKeyboardButton(text=" ".join([emoji_yes if mode == "dl" else emoji_no, "Download"]), callback_data=" ".join(["settings", "dl"])) - button_link = InlineKeyboardButton(text=" ".join([emoji_yes if mode == "link" else emoji_no, "Links"]), callback_data=" ".join(["settings", "link"])) - button_ask = InlineKeyboardButton(text=" ".join([emoji_yes if mode == "ask" else emoji_no, "Ask"]), callback_data=" ".join(["settings", "ask"])) - button_flood = InlineKeyboardButton(text=" ".join([emoji_yes if flood == "yes" else emoji_no, "Captions"]), callback_data=" ".join(["settings", "flood"])) - button_close = InlineKeyboardButton(text=" ".join([emoji_no, "Close settings"]), callback_data=" ".join(["settings", "close"])) - inline_keyboard = InlineKeyboardMarkup([[button_dl, button_link, button_ask], [button_flood, button_close]]) - return inline_keyboard - - def settings_command_callback(self, update: Update, context: CallbackContext): - if update.channel_post: - message = update.channel_post - elif update.message: - message = update.message - self.init_chat(message) - log_and_track("settings") - chat_id = message.chat_id - context.bot.send_message(chat_id=chat_id, parse_mode="Markdown", reply_markup=self.get_settings_inline_keyboard(chat_id), text=self.SETTINGS_TEXT) - - def common_command_callback(self, update: Update, context: CallbackContext): - if update.channel_post: - message = update.channel_post - elif update.message: - message = update.message - self.init_chat(message) - chat_id = message.chat_id - if not self.is_chat_allowed(chat_id): - context.bot.send_message(chat_id=chat_id, text="This command isn't allowed in this chat.") - return - chat_type = message.chat.type - reply_to_message_id = message.message_id - command_entities = message.parse_entities(types=[MessageEntity.BOT_COMMAND]) - command_passed = False - if not command_entities: - command_passed = False - # if no command then it is just a message and use default mode - mode = self.chat_storage[str(chat_id)]["settings"]["mode"] - else: - command_passed = True - # try to determine mode from command - mode = None - for entity_value in command_entities.values(): - mode = entity_value.replace("/", "").replace("@{}".format(self.bot_username), "") - break - if not mode: - mode = "dl" - if command_passed and not context.args: - rant_text = self.RANT_TEXT_PRIVATE if chat_type == Chat.PRIVATE else self.RANT_TEXT_PUBLIC - rant_text += "\nYou can simply send message with links (to download) OR command as `/{} `.".format(mode) - self.rant_and_cleanup(context.bot, chat_id, rant_text, reply_to_message_id=reply_to_message_id) - return - event_name = ("{}_cmd".format(mode)) if command_passed else ("{}_msg".format(mode)) - log_and_track(event_name, message) - - apologize = False - # apologize and send TYPING: always in PM, only when it's command in non-PM - if chat_type == Chat.PRIVATE or command_passed: - apologize = True - source_ip = None - proxy = None - if self.source_ips: - source_ip = random.choice(self.source_ips) - if self.proxies: - proxy = random.choice(self.proxies) - self.prepare_urls( - message=message, mode=mode, source_ip=source_ip, proxy=proxy, apologize=apologize, chat_id=chat_id, reply_to_message_id=reply_to_message_id, bot=context.bot + # If no command then it is just a message and use message action from settings: + if not action: + action = mode + if action == "silent": + return + if command_passed and not context.args: + # TODO rant for empty commands? + # rant_text = RANT_TEXT_PRIVATE if chat_type == Chat.PRIVATE else RANT_TEXT_PUBLIC + # rant_text += "\nYou can simply send message with links (to download) OR command as `/{} `.".format(mode) + # rant_and_cleanup(context.bot, chat_id, rant_text, reply_to_message_id=reply_to_message_id) + return + command_name = f"{action}_cmd" if command_passed else f"{action}_msg" + logger.debug(command_name) + BOT_REQUESTS.labels(type=command_name, chat_type=chat_type, mode=mode).inc() + apologize = False + # Apologize for fails: always in PM; only when it was explicit command in non-PM: + if chat_type == Chat.PRIVATE or command_passed: + apologize = True + reply_to_message_id = message.message_id + source_ip = None + if SOURCE_IPS: + source_ip = random.choice(SOURCE_IPS) + proxy = None + if PROXIES: + proxy = random.choice(PROXIES) + wait_message_id = None + if action in ["dl", "link"]: + await context.bot.send_chat_action(chat_id=chat_id, action=ChatAction.TYPING) + wait_message = await context.bot.send_message(chat_id=chat_id, reply_to_message_id=reply_to_message_id, parse_mode="Markdown", text=f"_{get_random_wait_text()}_") + wait_message_id = wait_message.message_id + + urls_dict = {} + + # Get our main running asyncio loop: + loop_main = asyncio.get_event_loop() + + # a) Run heavy task blocking the main running asyncio loop. + # Needs to have timeout signals in function, but they are bad. + # urls_dict = get_direct_urls_dict(message, action, proxy, source_ip, allow_unknown_sites) + + # b) Run heavy task in separate process or thread, but without blocking the main running asyncio loop. + # Function will continue working till the end: https://stackoverflow.com/a/34457515/2490759 + # You may add timeout signals in function, but they are bad. + # If ThreadPoolExecutor - no signals. + # We monitor EXECUTOR process pool task queue, so we use it. + # FIXME maybe don't use wait_for here because it includes pool queue waiting + + # pool = concurrent.futures.ThreadPoolExecutor() + try: + urls_dict = await asyncio.wait_for( + loop_main.run_in_executor(EXECUTOR, get_direct_urls_dict, message, action, proxy, source_ip, allow_unknown_sites), + timeout=CHECK_URL_TIMEOUT * 10, ) + except asyncio.TimeoutError: + logger.debug("get_direct_urls_dict took too much time and was dropped (but still running)") + except Exception: + logger.debug("get_direct_urls_dict failed for some unhandled reason") + # pool.shutdown(wait=False, cancel_futures=True) - def button_query_callback(self, update: Update, context: CallbackContext): - btn_msg = update.callback_query.message - self.init_chat(btn_msg) - user_id = update.callback_query.from_user.id - btn_msg_id = btn_msg.message_id - chat = btn_msg.chat - chat_id = chat.id - chat_type = chat.type - orig_msg_id, action = update.callback_query.data.split() - if not self.is_chat_allowed(chat_id): - update.callback_query.answer(text="This command isn't allowed in this chat.") - return - if orig_msg_id == "settings": - if chat_type != Chat.PRIVATE: - chat_member_status = chat.get_member(user_id).status - if chat_member_status not in [ChatMember.ADMINISTRATOR, ChatMember.CREATOR] and user_id not in self.ALERT_CHAT_IDS: - log_and_track("settings_fail") - update.callback_query.answer(text="You're not chat admin") - return - log_and_track("settings_{}".format(action), btn_msg) - if action == "close": - context.bot.delete_message(chat_id, btn_msg_id) - else: - setting_changed = False - if action in ["dl", "link", "ask"]: - current_setting = self.chat_storage[str(chat_id)]["settings"]["mode"] - if action != current_setting: - setting_changed = True - self.chat_storage[str(chat_id)]["settings"]["mode"] = action - elif action in ["flood"]: - current_setting = self.chat_storage[str(chat_id)]["settings"]["flood"] - setting_changed = True - self.chat_storage[str(chat_id)]["settings"][action] = "no" if current_setting == "yes" else "yes" - if setting_changed: - self.chat_storage.sync() - update.callback_query.answer(text="Settings changed") - update.callback_query.edit_message_reply_markup(parse_mode="Markdown", reply_markup=self.get_settings_inline_keyboard(chat_id)) - else: - update.callback_query.answer(text="Settings not changed") - - elif orig_msg_id in self.chat_storage[str(chat_id)]: - msg_from_storage = self.chat_storage[str(chat_id)].pop(orig_msg_id) - orig_msg = msg_from_storage["message"] - urls = msg_from_storage["urls"] - source_ip = msg_from_storage["source_ip"] - proxy = msg_from_storage["proxy"] - log_and_track("{}_msg".format(action), orig_msg) - if action == "dl": - update.callback_query.answer(text=self.get_wait_text()) - wait_message = update.callback_query.edit_message_text(parse_mode="Markdown", text=get_italic(self.get_wait_text())) - for url in urls: - self.download_url_and_send( - context.bot, url, urls[url], chat_id=chat_id, reply_to_message_id=orig_msg_id, wait_message_id=wait_message.message_id, source_ip=source_ip, proxy=proxy - ) - elif action == "link": - context.bot.send_message(chat_id=chat_id, reply_to_message_id=orig_msg_id, parse_mode="Markdown", disable_web_page_preview=True, text=get_link_text(urls)) - context.bot.delete_message(chat_id=chat_id, message_id=btn_msg_id) - elif action == "nodl": - context.bot.delete_message(chat_id=chat_id, message_id=btn_msg_id) + logger.debug(f"prepare_urls: urls dict: {urls_dict}") + urls_values = " ".join(urls_dict.values()) + + # Continue only if any good direct url status exist (or if we deal with trusted urls): + if action == "dl": + if not urls_dict: + if apologize: + await context.bot.send_message(chat_id=chat_id, reply_to_message_id=reply_to_message_id, text=NO_URLS_TEXT, parse_mode="Markdown") + await context.bot.delete_message(chat_id=chat_id, message_id=wait_message_id) else: - update.callback_query.answer(text=self.OLD_MSG_TEXT) - context.bot.delete_message(chat_id=chat_id, message_id=btn_msg_id) - - @REQUEST_TIME.time() - @run_async - def prepare_urls(self, message, mode=None, source_ip=None, proxy=None, apologize=None, chat_id=None, reply_to_message_id=None, bot=None): - direct_urls = False - if mode == "link": - direct_urls = True - - if apologize: - bot.send_chat_action(chat_id=chat_id, action=ChatAction.TYPING) - - if isinstance(message, Message): - urls = [] - url_entities = message.parse_entities(types=[MessageEntity.URL]) - url_caption_entities = message.parse_caption_entities(types=[MessageEntity.URL]) - url_entities.update(url_caption_entities) - for entity in url_entities: - url_str = url_entities[entity] - if self.url_valid(url_str): - logger.debug("Entity URL Parsed: %s", url_str) - if "://" not in url_str: - url_str = "http://{}".format(url_str) - urls.append(URL(url_str)) - else: - logger.debug("Entry URL not valid or blacklisted: %s", url_str) - text_link_entities = message.parse_entities(types=[MessageEntity.TEXT_LINK]) - text_link_caption_entities = message.parse_caption_entities(types=[MessageEntity.TEXT_LINK]) - text_link_entities.update(text_link_caption_entities) - for entity in text_link_entities: - url_str = entity.url - if self.url_valid(url_str): - logger.debug("Entity Text Link Parsed: %s", url_str) - urls.append(URL(url_str)) + await context.bot.send_chat_action(chat_id=chat_id, action=ChatAction.TYPING) + for url in urls_dict: + direct_urls_status = urls_dict[url] + if direct_urls_status in ["failed", "restrict_direct", "restrict_region", "restrict_live", "timeout"]: + if direct_urls_status == "failed": + await context.bot.send_message(chat_id=chat_id, reply_to_message_id=reply_to_message_id, text=FAILED_TEXT, parse_mode="Markdown") + elif direct_urls_status == "timeout": + await context.bot.send_message(chat_id=chat_id, reply_to_message_id=reply_to_message_id, text=DL_TIMEOUT_TEXT, parse_mode="Markdown") + elif direct_urls_status == "restrict_direct": + await context.bot.send_message(chat_id=chat_id, reply_to_message_id=reply_to_message_id, text=DIRECT_RESTRICTION_TEXT, parse_mode="Markdown") + elif direct_urls_status == "restrict_region": + await context.bot.send_message(chat_id=chat_id, reply_to_message_id=reply_to_message_id, text=REGION_RESTRICTION_TEXT, parse_mode="Markdown") + elif direct_urls_status == "restrict_live": + await context.bot.send_message(chat_id=chat_id, reply_to_message_id=reply_to_message_id, text=LIVE_RESTRICTION_TEXT, parse_mode="Markdown") else: - logger.debug("Entry URL not valid or blacklisted: %s", url_str) + kwargs = { + "bot_options": { + "token": context.bot.token, + "base_url": context.bot.base_url.split("/bot")[0] + "/bot", + "base_file_url": context.bot.base_file_url.split("/file/bot")[0] + "/file/bot", + "local_mode": context.bot.local_mode, + }, + "chat_id": chat_id, + "url": url, + "flood": context.chat_data["settings"]["flood"], + "reply_to_message_id": reply_to_message_id, + "wait_message_id": wait_message_id, + "cookies_file": COOKIES_FILE, + "source_ip": source_ip, + "proxy": proxy, + } + await context.bot.send_chat_action(chat_id=chat_id, action=ChatAction.RECORD_VOICE) + # Run heavy task in separate process, "fire and forget": + EXECUTOR.submit(download_url_and_send, **kwargs) + + elif action == "link": + if "http" not in urls_values: + if apologize: + await context.bot.send_message(chat_id=chat_id, reply_to_message_id=reply_to_message_id, text=NO_URLS_TEXT, parse_mode="Markdown") else: - all_links = find_all_links(message, default_scheme="http") - urls = [link for link in all_links if self.url_valid(link)] - logger.debug(urls) - - urls_dict = {} - for url_item in urls: - # unshorten soundcloud.app.goo.gl and other links, but not tiktok or instagram or youtube: - if "tiktok" in url_item.host or "instagr" in url_item.host or self.SITES["yt"] in url_item.host: - url = url_item + await context.bot.send_chat_action(chat_id=chat_id, action=ChatAction.TYPING) + await context.bot.send_message( + chat_id=chat_id, reply_to_message_id=reply_to_message_id, parse_mode="Markdown", disable_web_page_preview=True, text=get_link_text(urls_dict) + ) + await context.bot.delete_message(chat_id=chat_id, message_id=wait_message_id) + elif action == "ask": + if "http" not in urls_values: + if apologize: + await context.bot.send_message(chat_id=chat_id, reply_to_message_id=reply_to_message_id, text=NO_URLS_TEXT, parse_mode="Markdown") + else: + url_message_id = str(reply_to_message_id) + context.chat_data[url_message_id] = {"urls": urls_dict, "source_ip": source_ip, "proxy": proxy} + question = "🎢 links found, what to do?" + button_dl = InlineKeyboardButton(text="⬇️ Download", callback_data=" ".join([url_message_id, "dl"])) + button_link = InlineKeyboardButton(text="πŸ”—οΈ Get links", callback_data=" ".join([url_message_id, "link"])) + button_cancel = InlineKeyboardButton(text="❌", callback_data=" ".join([url_message_id, "cancel"])) + inline_keyboard = InlineKeyboardMarkup([[button_dl, button_link, button_cancel]]) + await context.bot.send_message(chat_id=chat_id, reply_to_message_id=reply_to_message_id, reply_markup=inline_keyboard, text=question) + + +async def button_press_callback(update: Update, context: ContextTypes.DEFAULT_TYPE): + button_message = update.callback_query.message + button_message_id = button_message.message_id + user_id = update.callback_query.from_user.id + chat = update.effective_chat + chat_id = update.effective_chat.id + chat_type = update.effective_chat.type + # get message id and action from button data: + # TODO create separate callbacks by callback query data pattern + url_message_id, button_action = update.callback_query.data.split() + if not chat_allowed(chat_id): + await update.callback_query.answer(text="This command isn't allowed in this chat.") + return + if url_message_id == "settings": + # button on settings message: + if chat_type != Chat.PRIVATE: + chat_member = await chat.get_member(user_id) + # logger.debug(chat_member.status) + if chat_member.status not in [ChatMember.OWNER, ChatMember.ADMINISTRATOR] and user_id != TG_BOT_OWNER_CHAT_ID: + logger.debug("settings_fail") + await update.callback_query.answer(text="You're not chat admin.") + return + command_name = f"settings_{button_action}" + logger.debug(command_name) + BOT_REQUESTS.labels(type=command_name, chat_type=chat_type, mode="None").inc() + if button_action == "close": + await context.bot.delete_message(chat_id, button_message_id) + else: + setting_changed = False + if button_action in ["dl", "link", "ask"]: + # Radio buttons: + current_setting = context.chat_data["settings"]["mode"] + if button_action != current_setting: + setting_changed = True + context.chat_data["settings"]["mode"] = button_action + elif button_action in ["flood", "allow_unknown_sites"]: + # Toggles: + # TODO support multiple settings windows + current_setting = context.chat_data["settings"][button_action] + context.chat_data["settings"][button_action] = not current_setting + setting_changed = True + if setting_changed: + await update.callback_query.answer(text="Settings changed") + await update.callback_query.edit_message_reply_markup(reply_markup=get_settings_inline_keyboard(context.chat_data)) else: - try: - url = URL( - requests.head( - url_item, - allow_redirects=True, - timeout=5, - proxies=dict(http=proxy, https=proxy), - headers={"User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:105.0) Gecko/20100101 Firefox/105.0"}, - ).url - ) - except: - url = url_item - url_text = url.to_text(True) - # FIXME crutch: - url_text = url_text.replace("m.soundcloud.com", "soundcloud.com") - url_parts_num = len([part for part in url.path_parts if part]) - try: - if ( - # SoundCloud: tracks, sets and widget pages, no /you/ pages # TODO private sets are 5 - (self.SITES["sc"] in url.host and (2 <= url_parts_num <= 4 or self.SITES["scapi"] in url_text) and (not "you" in url.path_parts)) - or - # Bandcamp: tracks and albums - (self.SITES["bc"] in url.host and (2 <= url_parts_num <= 2)) - or - # YouTube: videos and playlists - (self.SITES["yt"] in url.host and ("youtu.be" in url.host or "watch" in url.path or "playlist" in url.path)) - ): - if direct_urls or self.SITES["yt"] in url.host: - urls_dict[url_text] = get_direct_urls(url_text, self.cookies_file, self.COOKIES_DOWNLOAD_FILE, source_ip, proxy) - else: - urls_dict[url_text] = "http" - elif not any((site in url.host for site in self.SITES.values())): - urls_dict[url_text] = get_direct_urls(url_text, self.cookies_file, self.COOKIES_DOWNLOAD_FILE, source_ip, proxy) - except ProcessExecutionError: - logger.debug("youtube-dl get-url failed: %s", url_text) - except URLError as exc: - urls_dict[url_text] = exc.status - - logger.debug(urls_dict) - if not urls_dict and apologize: - bot.send_message(chat_id=chat_id, reply_to_message_id=reply_to_message_id, text=self.NO_URLS_TEXT, parse_mode="Markdown") - return - - if mode == "dl": - wait_message = bot.send_message(chat_id=chat_id, reply_to_message_id=reply_to_message_id, parse_mode="Markdown", text=get_italic(self.get_wait_text())) + await update.callback_query.answer(text="Settings not changed") + + elif url_message_id in context.chat_data: + # mode is ask, we got data from button on asking message. + # if it asked, then we were in prepare_urls: + url_message_data = context.chat_data.pop(url_message_id) + urls_dict = url_message_data["urls"] + command_name = f"{button_action}_msg" + logger.debug(command_name) + BOT_REQUESTS.labels(type=command_name, chat_type=chat_type, mode="ask").inc() + if button_action == "dl": + await update.callback_query.answer(text=get_random_wait_text()) + wait_message = await update.callback_query.edit_message_text(parse_mode="Markdown", text=f"_{get_random_wait_text()}_") for url in urls_dict: - self.download_url_and_send( - bot, url, urls_dict[url], chat_id=chat_id, reply_to_message_id=reply_to_message_id, wait_message_id=wait_message.message_id, source_ip=source_ip, proxy=proxy + kwargs = { + "bot_options": { + "token": context.bot.token, + "base_url": context.bot.base_url.split("/bot")[0] + "/bot", + "base_file_url": context.bot.base_file_url.split("/file/bot")[0] + "/file/bot", + "local_mode": context.bot.local_mode, + }, + "chat_id": chat_id, + "url": url, + "flood": context.chat_data["settings"]["flood"], + "reply_to_message_id": url_message_id, + "wait_message_id": wait_message.message_id, + "cookies_file": COOKIES_FILE, + "source_ip": url_message_data["source_ip"], + "proxy": url_message_data["proxy"], + } + await context.bot.send_chat_action(chat_id=chat_id, action=ChatAction.RECORD_VOICE) + # Run heavy task in separate process, "fire and forget": + EXECUTOR.submit(download_url_and_send, **kwargs) + + elif button_action == "link": + await context.bot.send_message(chat_id=chat_id, reply_to_message_id=url_message_id, parse_mode="Markdown", disable_web_page_preview=True, text=get_link_text(urls_dict)) + await context.bot.delete_message(chat_id=chat_id, message_id=button_message_id) + elif button_action == "cancel": + await context.bot.delete_message(chat_id=chat_id, message_id=button_message_id) + else: + await update.callback_query.answer(text=OLD_MSG_TEXT) + await context.bot.delete_message(chat_id=chat_id, message_id=button_message_id) + + +async def blacklist_whitelist_callback(update: Update, context: ContextTypes.DEFAULT_TYPE): + chat_id = update.effective_chat.id + if not chat_allowed(chat_id): + await context.bot.leave_chat(chat_id) + + +async def unknown_command_callback(update: Update, context: ContextTypes.DEFAULT_TYPE): + return + + +async def error_callback(update: Update, context: ContextTypes.DEFAULT_TYPE): # skipcq: PYL-R0201 + try: + raise context.error + except Forbidden: + # remove update.message.chat_id from conversation list + logger.debug(f"Update {update} caused Forbidden error: {context.error}") + except BadRequest: + # handle malformed requests - read more below! + logger.debug(f"Update {update} caused BadRequest error: {context.error}") + except TimedOut: + # handle slow connection problems + logger.debug(f"Update {update} caused TimedOut error: {context.error}") + except NetworkError: + # handle other connection problems + logger.debug(f"Update {update} caused NetworkError error: {context.error}") + except ChatMigrated as e: + # the chat_id of a group has changed, use e.new_chat_id instead + logger.debug(f"Update {update} caused ChatMigrated error: {context.error}") + except TelegramError: + # handle all other telegram related errors + logger.debug(f"Update {update} caused TelegramError error: {context.error}") + + +def init_chat_data(chat_data, mode="dl", flood=True): + if "settings" not in chat_data: + chat_data["settings"] = {} + if "mode" not in chat_data["settings"]: + chat_data["settings"]["mode"] = mode + if "flood" not in chat_data["settings"]: + chat_data["settings"]["flood"] = flood + if "allow_unknown_sites" not in chat_data["settings"]: + chat_data["settings"]["allow_unknown_sites"] = False + + +def get_direct_urls_dict(message, mode, proxy, source_ip, allow_unknown_sites): + # If telegram message passed: + urls = [] + url_entities = message.parse_entities(types=[MessageEntity.URL]) + url_caption_entities = message.parse_caption_entities(types=[MessageEntity.URL]) + url_entities.update(url_caption_entities) + for entity in url_entities: + url_str = url_entities[entity] + if "://" not in url_str: + url_str = "http://" + url_str + # TODO try except + url = URL(url_str) + if url_valid_and_allowed(url, allow_unknown_sites=allow_unknown_sites): + logger.info("Entity URL parsed: %s", url) + urls.append(url) + else: + logger.info("Entry URL is not valid or blacklisted: %s", url_str) + text_link_entities = message.parse_entities(types=[MessageEntity.TEXT_LINK]) + text_link_caption_entities = message.parse_caption_entities(types=[MessageEntity.TEXT_LINK]) + text_link_entities.update(text_link_caption_entities) + for entity in text_link_entities: + url = URL(entity.url) + if url_valid_and_allowed(url, allow_unknown_sites=allow_unknown_sites): + logger.info("Entity Text Link parsed: %s", url) + urls.append(url) + else: + logger.info("Entry Text Link is not valid or blacklisted: %s", url) + # If message just some text passed (not isinstance(message, Message)): + # all_links = find_all_links(message, default_scheme="http") + # urls = [link for link in all_links if url_valid_and_allowed(link)] + logger.info(f"prepare_urls: urls list: {urls}") + + urls_dict = {} + for url_item in urls: + unknown_site = not any((site in url_item.host for site in DOMAINS)) + # unshorten soundcloud.app.goo.gl and unknown sites links + # example: https://soundcloud.app.goo.gl/mBMvG + if unknown_site or DOMAIN_SC in url_item.host: + proxy_arg = None + if proxy: + proxy_arg = {"http": proxy, "https": proxy} + try: + url = URL( + requests.head( + url_item.to_text(full_quote=True), + allow_redirects=True, + timeout=2, + proxies=proxy_arg, + # headers={"User-Agent": UA.random}, + headers={"User-Agent": "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:105.0) Gecko/20100101 Firefox/105.0"}, + ).url ) - elif mode == "link": - wait_message = bot.send_message(chat_id=chat_id, reply_to_message_id=reply_to_message_id, parse_mode="Markdown", text=get_italic(self.get_wait_text())) - bot.send_message(chat_id=chat_id, reply_to_message_id=reply_to_message_id, parse_mode="Markdown", disable_web_page_preview=True, text=get_link_text(urls_dict)) - bot.delete_message(chat_id=chat_id, message_id=wait_message.message_id) - elif mode == "ask": - # ask only if good urls exist - if "http" in " ".join(urls_dict.values()): - orig_msg_id = str(reply_to_message_id) - self.chat_storage[str(chat_id)][orig_msg_id] = {"message": message, "urls": urls_dict, "source_ip": source_ip, "proxy": proxy} - question = "🎢 links found, what to do?" - button_dl = InlineKeyboardButton(text="βœ… Download", callback_data=" ".join([orig_msg_id, "dl"])) - button_link = InlineKeyboardButton(text="❇️ Links", callback_data=" ".join([orig_msg_id, "link"])) - button_cancel = InlineKeyboardButton(text="❎", callback_data=" ".join([orig_msg_id, "nodl"])) - inline_keyboard = InlineKeyboardMarkup([[button_dl, button_link, button_cancel]]) - bot.send_message(chat_id=chat_id, reply_to_message_id=reply_to_message_id, reply_markup=inline_keyboard, text=question) - self.cleanup_chat(chat_id) - - def url_valid(self, url): - telegram_domains = ["t.me", "telegram.org", "telegram.dog", "telegra.ph", "tdesktop.com", "telesco.pe", "graph.org", "contest.dev"] - logger.debug("Checking Url Entry: %s", url) - try: - netloc = urlparse(url).netloc - except AttributeError: - return False - if netloc in telegram_domains: - return False - return self.url_allowed(url) - - def url_allowed(self, url): - # Example export BLACKLIST_DOMS = "invidious.tube invidious.kavin.rocks invidious.himiko.cloud invidious.namazso.eu dev.viewtube.io tube.cadence.moe piped.kavin.rocks" - whitelist = set(x for x in os.environ.get("WHITELIST_DOMS", "").split()) - blacklist = set(x for x in os.environ.get("BLACKLIST_DOMS", "").split()) - netloc = urlparse(url).netloc - if whitelist: - if netloc not in whitelist: - return False - if blacklist: - if netloc in blacklist: - return False - if whitelist and blacklist: - if netloc in blacklist: - return False - return True + except: + url = url_item + else: + url = url_item + unknown_site = not any((site in url.host for site in DOMAINS)) + url_text = url.to_text(full_quote=True) + logger.debug(f"unshortened link: {url_text}") + # url_text = url_text.replace("m.soundcloud.com", "soundcloud.com") + url_parts_num = len([part for part in url.path_parts if part]) + if mode == "link" or unknown_site: + # We run it if it was explicitly requested as per "link" mode. + # We run it for links from unknown sites (if they were allowed). + # If it's known site, we need to check it more thoroughly below. + urls_dict[url_text] = ydl_get_direct_urls(url_text, COOKIES_FILE, source_ip, proxy) + elif DOMAIN_SC in url.host and (2 <= url_parts_num <= 4 or DOMAIN_SC_API in url.host) and (not "you" in url.path_parts): + # SoundCloud: tracks, sets and widget pages, no /you/ pages + # TODO support private sets URLs that have 5 parts + # We know for sure these links can be downloaded, so we just skip running ydl_get_direct_urls + urls_dict[url_text] = "http" + elif DOMAIN_BC in url.host and (2 <= url_parts_num <= 2): + # Bandcamp: tracks and albums + # We know for sure these links can be downloaded, so we just skip running ydl_get_direct_urls + urls_dict[url_text] = "http" + elif DOMAIN_TT in url.host: + # TikTok: videos + # We know for sure these links can be downloaded, so we just skip running ydl_get_direct_urls + urls_dict[url_text] = "http" + elif DOMAIN_TW in url.host: + # Twitter: videos + # We know for sure these links can be downloaded, so we just skip running ydl_get_direct_urls + urls_dict[url_text] = "http" + elif DOMAIN_IG in url.host: + # Instagram: videos, reels + # TODO We run it for checking Instagram ban to avoid fake asking: + urls_dict[url_text] = ydl_get_direct_urls(url_text, COOKIES_FILE, source_ip, proxy) + elif DOMAIN_YT in url.host and (DOMAIN_YT_BE in url.host or "watch" in url.path or "playlist" in url.path): + # YouTube: videos and playlists + # We still run it for checking YouTube region restriction to avoid fake asking: + urls_dict[url_text] = ydl_get_direct_urls(url_text, COOKIES_FILE, source_ip, proxy) + return urls_dict + - @REQUEST_TIME.time() - @run_async - def download_url_and_send(self, bot, url, direct_urls, chat_id, reply_to_message_id=None, wait_message_id=None, source_ip=None, proxy=None): - bot.send_chat_action(chat_id=chat_id, action=ChatAction.RECORD_AUDIO) - download_dir = os.path.join(self.DL_DIR, str(uuid4())) - shutil.rmtree(download_dir, ignore_errors=True) - os.makedirs(download_dir) - - status = 0 - if direct_urls == "direct": - status = -3 - elif direct_urls == "country": - status = -4 - elif direct_urls == "live": - status = -5 - elif direct_urls == "timeout": - status = -6 +def ydl_get_direct_urls(url, cookies_file=None, source_ip=None, proxy=None): + # TODO transform into unified ydl function and deduplicate + logger.debug("Entering: ydl_get_direct_urls: %s", url) + status = "" + cmd_name = "ydl_get_direct_urls" + ydl_opts = { + "format": "bestaudio/best", + "noplaylist": True, + "skip_download": True, + # "forceprint": {"before_dl":} + } + if proxy: + ydl_opts["proxy"] = proxy + if source_ip: + ydl_opts["source_address"] = source_ip + cookies_download_file = None + if cookies_file: + cookies_download_file = tempfile.NamedTemporaryFile() + cookies_download_file_path = pathlib.Path(cookies_download_file.name) + if cookies_file.startswith("http"): + # URL for downloading cookie file: + try: + r = requests.get(cookies_file, allow_redirects=True, timeout=5) + with open(cookies_download_file_path, "wb") as cfile: + cfile.write(r.content) + ydl_opts["cookiefile"] = str(cookies_download_file_path) + except: + logger.debug("download_url_and_send could not download cookies file") + pass + elif cookies_file.startswith("firefox:"): + # TODO better handling of env var + ydl_opts["cookiesfrombrowser"] = ("firefox", cookies_file.split(":")[1], None, None) else: - if (self.SITES["sc"] in url and self.SITES["scapi"] not in url) or (self.SITES["bc"] in url): - cmd_name = "scdl" - cmd_args = [] - cmd = None - cmd_input = None - if self.SITES["sc"] in url and self.SITES["scapi"] not in url: - cmd = scdl_bin - cmd_name = str(cmd) - cmd_args = ( - "-l", - url, # URL of track/playlist/user - "-c", # Continue if a music already exist - "--path", - download_dir, # Download the music to a custom path - "--onlymp3", # Download only the mp3 file even if the track is Downloadable - "--addtofile", # Add the artist name to the filename if it isn't in the filename already - "--addtimestamp", - # Adds the timestamp of the creation of the track to the title (useful to sort chronologically) - "--no-playlist-folder", - # Download playlist tracks into directory, instead of making a playlist subfolder - "--extract-artist", # Set artist tag from title instead of username - ) - cmd_input = None - elif self.SITES["bc"] in url: - cmd = bandcamp_dl_bin - cmd_name = str(cmd) - cmd_args = ( - "--base-dir", - download_dir, # Base location of which all files are downloaded - "--template", - "%{track} - %{artist} - %{title} [%{album}]", # Output filename template - "--overwrite", # Overwrite tracks that already exist - "--group", # Use album/track Label as iTunes grouping - # "--embed-art", # Embed album art (if available) - "--no-slugify", # Disable slugification of track, album, and artist names - url, # URL of album/track - ) - cmd_input = "yes" + # cookie file local path: + shutil.copyfile(cookies_file, cookies_download_file_path) + ydl_opts["cookiefile"] = str(cookies_download_file_path) - logger.info("%s starts: %s", cmd_name, url) - env = None - if proxy: - env = {"http_proxy": proxy, "https_proxy": proxy} - cmd_proc = cmd[cmd_args].popen(env=env, stdin=PIPE, stdout=PIPE, stderr=PIPE, universal_newlines=True) - try: - cmd_stdout, cmd_stderr = cmd_proc.communicate(input=cmd_input, timeout=self.DL_TIMEOUT) - cmd_retcode = cmd_proc.returncode - # TODO listed are common scdl problems for one track with 0 retcode, all its output is always in stderr: - if cmd_retcode or (any(err in cmd_stderr for err in ["Error resolving url", "is not streamable", "Failed to get item"]) and ".mp3" not in cmd_stderr): - raise ProcessExecutionError(cmd_args, cmd_retcode, cmd_stdout, cmd_stderr) - logger.info("%s succeeded: %s", cmd_name, url) - status = 1 - except TimeoutExpired: - cmd_proc.kill() - logger.info("%s took too much time and dropped: %s", cmd_name, url) - status = -1 - except ProcessExecutionError: - logger.exception("%s failed: %s", cmd_name, url) - - if status == 0: - cmd = youtube_dl_func - cmd_name = "youtube_dl_func" - # TODO: set different ydl_opts for different sites - host = urlparse(url).hostname - ydl_opts = {} - if host == "tiktok.com" or host.endswith(".tiktok.com"): - ydl_opts = { - "outtmpl": os.path.join(download_dir, "tiktok.%(ext)s"), - "videoformat": "mp4", - } - elif "instagr" in host: - ydl_opts = { - "outtmpl": os.path.join(download_dir, "inst.%(ext)s"), - "videoformat": "mp4", - "postprocessors": [ - { - "key": "FFmpegVideoConvertor", - "preferedformat": "mp4", - } - ], + # FIXME apply CHECK_URL_TIMEOUT by using a process (as we did before). Maybe use https://github.com/noxdafox/pebble + logger.debug("%s starts: %s", cmd_name, url) + try: + info_dict = ydl.YoutubeDL(ydl_opts).extract_info(url, download=False) + # TODO actualize checks, fix for youtube playlists + if "url" in info_dict: + direct_url = info_dict["url"] + elif "entries" in info_dict: + direct_url = "\n".join([x["url"] for x in info_dict["entries"] if "url" in x]) + else: + raise Exception() + if "yt_live_broadcast" in direct_url: + status = "restrict_live" + elif "returning it as such" in direct_url: + status = "restrict_direct" + elif "proxy server" in direct_url: + status = "restrict_region" + # end actualize checks + else: + status = direct_url + logger.debug("%s succeeded: %s", cmd_name, url) + except Exception: + logger.debug("%s failed: %s", cmd_name, url) + logger.debug(traceback.format_exc()) + status = "failed" + if cookies_file: + cookies_download_file.close() + + return status + + +def download_url_and_send( + bot_options, + chat_id, + url, + flood=False, + reply_to_message_id=None, + wait_message_id=None, + cookies_file=None, + source_ip=None, + proxy=None, +): + logger.debug("Entering: download_url_and_send") + # loop_main = asyncio.get_event_loop() + + # https://github.com/python-telegram-bot/python-telegram-bot/wiki/Concurrency#applicationconcurrent_updates + # https://docs.python-telegram-bot.org/en/v20.1/telegram.ext.applicationbuilder.html#telegram.ext.ApplicationBuilder + # https://github.com/python-telegram-bot/python-telegram-bot/issues/3509 + # We use concurrent_updates with limit instead of unlimited create_task + # send_audio has connection troubles when running async. + # Works bad on my computer, but good on server with local API. + # But still we run it in another process. + + # We can only submit sync function to process executor (because it forks and there is no point). + # We must not pass here, because they get serialized on fork and context/bot cannot be serialized pickled. + # https://docs.python-telegram-bot.org/en/v20.1/telegram.bot.html + # But we want to run async bot functions from framework here. + # We can't use loop_main.run_in_executor(None) because it doesn't give the result + # We can't use loop_main.run_until_complete because it is already running in our forked process + # So we run additional loop in additional thread and use it + loop_additional = asyncio.new_event_loop() + thread_additional = threading.Thread(target=loop_additional.run_forever, name="Async Runner", daemon=True) + + def run_async(coro): + if not thread_additional.is_alive(): + thread_additional.start() + future = asyncio.run_coroutine_threadsafe(coro, loop_additional) + return future.result() + + bot = Bot( + token=bot_options["token"], + base_url=bot_options["base_url"], + base_file_url=bot_options["base_file_url"], + local_mode=bot_options["local_mode"], + request=HTTPXRequest(http_version=HTTP_VERSION), + get_updates_request=HTTPXRequest(http_version=HTTP_VERSION), + ) + run_async(bot.initialize()) + logger.debug(bot.token) + download_dir = os.path.join(DL_DIR, str(uuid4())) + shutil.rmtree(download_dir, ignore_errors=True) + os.makedirs(download_dir) + url_obj = URL(url) + host = url_obj.host + download_video = False + status = "initial" + cmd = None + cmd_name = "" + cmd_args = () + cmd_input = None + if (DOMAIN_SC in host and DOMAIN_SC_API not in host) or (DOMAIN_BC in host): + # If link is sc/bc, we try scdl/bcdl first: + if DOMAIN_SC in host and DOMAIN_SC_API not in host: + cmd = scdl_bin + cmd_name = str(cmd) + cmd_args = ( + "-l", + url, # URL of track/playlist/user + "-c", # Continue if a music already exist + "--path", + download_dir, # Download the music to a custom path + "--onlymp3", # Download only the mp3 file even if the track is Downloadable + "--addtofile", # Add the artist name to the filename if it isn't in the filename already + "--addtimestamp", + # Adds the timestamp of the creation of the track to the title (useful to sort chronologically) + "--no-playlist-folder", + # Download playlist tracks into directory, instead of making a playlist subfolder + "--extract-artist", # Set artist tag from title instead of username + ) + cmd_input = None + elif DOMAIN_BC in host: + cmd = bcdl_bin + cmd_name = str(cmd) + cmd_args = ( + "--base-dir", + download_dir, # Base location of which all files are downloaded + "--template", + "%{track} - %{artist} - %{title} [%{album}]", # Output filename template + "--overwrite", # Overwrite tracks that already exist + "--group", # Use album/track Label as iTunes grouping + # "--embed-art", # Embed album art (if available) + "--no-slugify", # Disable slugification of track, album, and artist names + url, # URL of album/track + ) + cmd_input = "yes" + + env = None + if proxy: + env = {"http_proxy": proxy, "https_proxy": proxy} + logger.debug("%s starts: %s", cmd_name, url) + cmd_proc = cmd[cmd_args].popen(env=env, stdin=PIPE, stdout=PIPE, stderr=PIPE, universal_newlines=True) + try: + cmd_stdout, cmd_stderr = cmd_proc.communicate(input=cmd_input, timeout=DL_TIMEOUT) + cmd_retcode = cmd_proc.returncode + # listed are common scdl problems for one track with 0 retcode, all its output is always in stderr: + if cmd_retcode or (any(err in cmd_stderr for err in ["Error resolving url", "is not streamable", "Failed to get item"]) and ".mp3" not in cmd_stderr): + raise ProcessExecutionError(cmd_args, cmd_retcode, cmd_stdout, cmd_stderr) + logger.debug("%s succeeded: %s", cmd_name, url) + status = "success" + except TimeoutExpired: + cmd_proc.kill() + logger.debug("%s took too much time and dropped: %s", cmd_name, url) + except ProcessExecutionError: + logger.debug("%s failed: %s", cmd_name, url) + logger.debug(traceback.format_exc()) + + if status == "initial": + # If link is not sc/bc or scdl/bcdl just failed, we use ydl + cmd_name = "ydl_download" + # https://github.com/yt-dlp/yt-dlp/blob/master/yt_dlp/YoutubeDL.py#L159 + # https://github.com/yt-dlp/yt-dlp/blob/master/yt_dlp/utils.py#L3414 + ydl_opts = { + # https://github.com/yt-dlp/yt-dlp#output-template + # Default outtmpl is "%(title)s [%(id)s].%(ext)s" + # Take first 16 symbols of title: + "outtmpl": os.path.join(download_dir, "%(title).16s [%(id)s].%(ext)s"), + "restrictfilenames": True, + "windowsfilenames": True, + # "trim_file_name": 32, + } + if DOMAIN_TT in host: + download_video = True + ydl_opts["videoformat"] = "mp4" + elif DOMAIN_TW in host: + download_video = True + ydl_opts["videoformat"] = "mp4" + elif DOMAIN_IG in host: + download_video = True + ydl_opts.update( + { + "videoformat": "webm", + # "postprocessors": [ + # { + # "key": "FFmpegVideoConvertor", + # "preferedformat": "mp4", + # } + # ], } - else: - ydl_opts = { - "outtmpl": os.path.join(download_dir, "%(title)s.%(ext)s"), - # default: %(autonumber)s - %(title)s-%(id)s.%(ext)s + ) + else: + ydl_opts.update( + { "format": "bestaudio/best", "postprocessors": [ { @@ -633,320 +961,423 @@ def download_url_and_send(self, bot, url, direct_urls, chat_id, reply_to_message { "key": "FFmpegMetadata", }, - # {'key': 'EmbedThumbnail'}, + # {"key": "EmbedThumbnail"}, ], "noplaylist": True, } - if proxy: - ydl_opts["proxy"] = proxy - if source_ip: - ydl_opts["source_address"] = source_ip - # https://github.com/ytdl-org/youtube-dl/blob/master/youtube_dl/YoutubeDL.py#L210 - if self.cookies_file: - if "http" in self.cookies_file: - ydl_opts["cookiefile"] = self.COOKIES_DOWNLOAD_FILE - else: - ydl_opts["cookiefile"] = self.cookies_file - queue = Queue() - cmd_args = ( - url, - ydl_opts, - queue, ) - logger.info("%s starts: %s", cmd_name, url) - cmd_proc = Process(target=cmd, args=cmd_args) - cmd_proc.start() - try: - cmd_retcode, cmd_stderr = queue.get(block=True, timeout=self.DL_TIMEOUT) - cmd_stdout = "" - cmd_proc.join() - if cmd_retcode: - raise ProcessExecutionError(cmd_args, cmd_retcode, cmd_stdout, cmd_stderr) - # raise cmd_status # TODO: pass and re-raise original Exception? - logger.info("%s succeeded: %s", cmd_name, url) - status = 1 - except Empty: - cmd_proc.join(1) - if cmd_proc.is_alive(): - cmd_proc.terminate() - logger.info("%s took too much time and dropped: %s", cmd_name, url) - status = -1 - except ProcessExecutionError: - logger.exception("%s failed: %s", cmd_name, url) - status = -2 - gc.collect() - - if status in [-1, -6]: - bot.send_message(chat_id=chat_id, reply_to_message_id=reply_to_message_id, text=self.DL_TIMEOUT_TEXT, parse_mode="Markdown") - elif status == -2: - bot.send_message(chat_id=chat_id, reply_to_message_id=reply_to_message_id, text=self.NO_AUDIO_TEXT, parse_mode="Markdown") - elif status == -3: - bot.send_message(chat_id=chat_id, reply_to_message_id=reply_to_message_id, text=self.DIRECT_RESTRICTION_TEXT, parse_mode="Markdown") - elif status == -4: - bot.send_message(chat_id=chat_id, reply_to_message_id=reply_to_message_id, text=self.REGION_RESTRICTION_TEXT, parse_mode="Markdown") - elif status == -5: - bot.send_message(chat_id=chat_id, reply_to_message_id=reply_to_message_id, text=self.LIVE_RESTRICTION_TEXT, parse_mode="Markdown") - elif status == 1: - file_list = [] - for d, dirs, files in os.walk(download_dir): - for file in files: - file_list.append(os.path.join(d, file)) - if not file_list: - logger.info("No files in dir: %s", download_dir) - bot.send_message(chat_id=chat_id, reply_to_message_id=reply_to_message_id, text="*Sorry*, I couldn't download any files from provided links", parse_mode="Markdown") + if proxy: + ydl_opts["proxy"] = proxy + if source_ip: + ydl_opts["source_address"] = source_ip + cookies_download_file = None + if cookies_file: + cookies_download_file = tempfile.NamedTemporaryFile() + cookies_download_file_path = pathlib.Path(cookies_download_file.name) + if cookies_file.startswith("http"): + # URL for downloading cookie file: + try: + r = requests.get(cookies_file, allow_redirects=True, timeout=5) + with open(cookies_download_file_path, "wb") as cfile: + cfile.write(r.content) + ydl_opts["cookiefile"] = str(cookies_download_file_path) + except: + logger.debug("download_url_and_send could not download cookies file") + pass + elif cookies_file.startswith("firefox:"): + # TODO better handling of env var + ydl_opts["cookiesfrombrowser"] = ("firefox", cookies_file.split(":")[1], None, None) else: - for file in sorted(file_list): - file_name = os.path.split(file)[-1] - file_parts = [] - try: - file_root, file_ext = os.path.splitext(file) - file_format = file_ext.replace(".", "").lower() - file_size = os.path.getsize(file) - if file_format not in ["mp3", "m4a", "mp4"]: - raise FileNotSupportedError(file_format) - if file_size > self.MAX_CONVERT_FILE_SIZE: + # cookie file local path: + shutil.copyfile(cookies_file, cookies_download_file_path) + ydl_opts["cookiefile"] = str(cookies_download_file_path) + + # FIXME apply DL_TIMEOUT by using a process (as we did before). Maybe use https://github.com/noxdafox/pebble + logger.debug("%s starts: %s", cmd_name, url) + try: + # TODO check result + info_dict = ydl.YoutubeDL(ydl_opts).download([url]) + logger.debug("%s succeeded: %s", cmd_name, url) + status = "success" + except Exception as exc: + print(exc) + logger.debug("%s failed: %s", cmd_name, url) + # logger.debug(traceback.format_exc()) + status = "failed" + if cookies_file: + cookies_download_file.close() + # gc.collect() + + if status == "failed": + run_async(bot.send_message(chat_id=chat_id, reply_to_message_id=reply_to_message_id, text=FAILED_TEXT, parse_mode="Markdown")) + elif status == "timeout": + run_async(bot.send_message(chat_id=chat_id, reply_to_message_id=reply_to_message_id, text=DL_TIMEOUT_TEXT, parse_mode="Markdown")) + elif status == "success": + file_list = [] + for d, dirs, files in os.walk(download_dir): + for file in files: + file_list.append(os.path.join(d, file)) + if not file_list: + logger.debug("No files in dir: %s", download_dir) + run_async( + bot.send_message( + chat_id=chat_id, reply_to_message_id=reply_to_message_id, text="*Sorry*, I couldn't download any files from some of the provided links", parse_mode="Markdown" + ) + ) + else: + for file in sorted(file_list): + file_name = os.path.split(file)[-1] + file_parts = [] + try: + file_root, file_ext = os.path.splitext(file) + file_format = file_ext.replace(".", "").lower() + file_size = os.path.getsize(file) + if file_format not in ["mp3", "m4a", "mp4", "webm"]: + raise FileNotSupportedError(file_format) + # We don't convert videos (from tiktok or instagram or twitter): + if file_format in ["m4a", "mp4", "webm"] and not download_video: + if file_size > MAX_CONVERT_FILE_SIZE: raise FileTooLargeError(file_size) - # FIXME tiktok.mp4 is for tiktok, inst.mp4 for instagram - if file_format not in ["mp3"] and not ("tiktok." in file or "inst." in file): - logger.info("Converting: %s", file) - try: - file_converted = file.replace(file_ext, ".mp3") + logger.debug("Converting: %s", file) + try: + file_converted = file.replace(file_ext, ".mp3") + ffinput = ffmpeg.input(file) + # audio_bitrate="320k" + ffmpeg.output(ffinput, file_converted, vn=None).run() + file = file_converted + file_root, file_ext = os.path.splitext(file) + file_format = file_ext.replace(".", "").lower() + file_size = os.path.getsize(file) + except Exception: + raise FileNotConvertedError + + file_parts = [] + if file_size <= MAX_TG_FILE_SIZE: + file_parts.append(file) + else: + logger.debug("Splitting: %s", file) + id3 = None + try: + id3 = ID3(file, translate=False) + except: + pass + + parts_number = file_size // MAX_TG_FILE_SIZE + 1 + + # https://github.com/c0decracker/video-splitter + # https://superuser.com/a/1354956/464797 + try: + # file_duration = float(ffmpeg.probe(file)['format']['duration']) + part_size = file_size // parts_number + cur_position = 0 + for i in range(parts_number): + file_part = file.replace(file_ext, ".part{}{}".format(str(i + 1), file_ext)) ffinput = ffmpeg.input(file) - # audio_bitrate="320k" - ffmpeg.output(ffinput, file_converted, vn=None).run() - file = file_converted - file_root, file_ext = os.path.splitext(file) - file_format = file_ext.replace(".", "").lower() - file_size = os.path.getsize(file) - except Exception: - # TODO exceptions - raise FileNotConvertedError - - file_parts = [] - if file_size <= self.MAX_TG_FILE_SIZE: - file_parts.append(file) - else: - logger.info("Splitting: %s", file) - id3 = None - try: - id3 = ID3(file, translate=False) - except: - pass - - parts_number = file_size // self.MAX_TG_FILE_SIZE + 1 - - # https://github.com/c0decracker/video-splitter - # https://superuser.com/a/1354956/464797 - try: - # file_duration = float(ffmpeg.probe(file)['format']['duration']) - part_size = file_size // parts_number - cur_position = 0 - for i in range(parts_number): - file_part = file.replace(file_ext, ".part{}{}".format(str(i + 1), file_ext)) - ffinput = ffmpeg.input(file) - if i == (parts_number - 1): - ffmpeg.output(ffinput, file_part, codec="copy", vn=None, ss=cur_position).run() - else: - ffmpeg.output(ffinput, file_part, codec="copy", vn=None, ss=cur_position, fs=part_size).run() - part_duration = float(ffmpeg.probe(file_part)["format"]["duration"]) - cur_position += part_duration - if id3: - try: - id3.save(file_part, v1=2, v2_version=4) - except: - pass - file_parts.append(file_part) - except Exception: - # TODO exceptions - raise FileSplittedPartiallyError(file_parts) - - except FileNotSupportedError as exc: - if not (exc.file_format in ["m3u", "jpg", "jpeg", "png", "finished", "tmp"]): - logger.warning("Unsupported file format: %s", file_name) + if i == (parts_number - 1): + ffmpeg.output(ffinput, file_part, codec="copy", vn=None, ss=cur_position).run() + else: + ffmpeg.output(ffinput, file_part, codec="copy", vn=None, ss=cur_position, fs=part_size).run() + part_duration = float(ffmpeg.probe(file_part)["format"]["duration"]) + cur_position += part_duration + if id3: + try: + id3.save(file_part, v1=ID3v1SaveOptions.CREATE, v2_version=4) + except: + pass + file_parts.append(file_part) + except Exception: + raise FileSplittedPartiallyError(file_parts) + + except FileNotSupportedError as exc: + # If format is not some extra garbage from downloaders: + if not (exc.file_format in ["m3u", "jpg", "jpeg", "png", "finished", "tmp"]): + logger.debug("Unsupported file format: %s", file_name) + run_async( bot.send_message( chat_id=chat_id, reply_to_message_id=reply_to_message_id, text="*Sorry*, downloaded file `{}` is in format I could not yet convert or send".format(file_name), parse_mode="Markdown", ) - except FileTooLargeError as exc: - logger.info("Large file for convert: %s", file_name) + ) + except FileTooLargeError as exc: + logger.debug("Large file for convert: %s", file_name) + run_async( bot.send_message( chat_id=chat_id, reply_to_message_id=reply_to_message_id, text="*Sorry*, downloaded file `{}` is `{}` MB and it is larger than I could convert (`{} MB`)".format( - file_name, exc.file_size // 1000000, self.MAX_CONVERT_FILE_SIZE // 1000000 + file_name, exc.file_size // 1000000, MAX_CONVERT_FILE_SIZE // 1000000 ), parse_mode="Markdown", ) - except FileSplittedPartiallyError as exc: - file_parts = exc.file_parts - logger.exception("Splitting failed: %s", file_name) + ) + except FileSplittedPartiallyError as exc: + file_parts = exc.file_parts + logger.debug("Splitting failed: %s", file_name) + run_async( bot.send_message( chat_id=chat_id, reply_to_message_id=reply_to_message_id, text="*Sorry*, not enough memory to convert file `{}`..".format(file_name), parse_mode="Markdown", ) - except FileNotConvertedError as exc: - logger.exception("Splitting failed: %s", file_name) + ) + except FileNotConvertedError as exc: + logger.debug("Splitting failed: %s", file_name) + run_async( bot.send_message( chat_id=chat_id, reply_to_message_id=reply_to_message_id, text="*Sorry*, not enough memory to convert file `{}`..".format(file_name), parse_mode="Markdown", ) - try: - caption = None - flood = self.chat_storage[str(chat_id)]["settings"]["flood"] - if flood == "yes": - addition = "" - url_obj = URL(url) - if self.SITES["yt"] in url_obj.host: - source = "YouTube" - file_root, file_ext = os.path.splitext(file_name) - file_title = file_root.replace(file_ext, "") - addition = ": " + file_title - elif self.SITES["sc"] in url_obj.host: - source = "SoundCloud" - elif self.SITES["bc"] in url_obj.host: - source = "Bandcamp" - else: - source = url_obj.host.replace(".com", "").replace("www.", "").replace("m.", "") - # if "youtu.be" in url_obj.host: - # url = url.replace("http://", "").replace("https://", "") - # else: - # url = shorten_url(url) - caption = "@{} _got it from_ [{}]({}){}".format(self.bot_username.replace("_", "\_"), source, url, addition.replace("_", "\_")) - # logger.info(caption) - reply_to_message_id_send = reply_to_message_id if flood == "yes" else None - sent_audio_ids = [] - for index, file_part in enumerate(file_parts): - path = pathlib.Path(file_part) - file_name = os.path.split(file_part)[-1] - # file_name = translit(file_name, 'ru', reversed=True) - logger.info("Sending: %s", file_name) - bot.send_chat_action(chat_id=chat_id, action=ChatAction.UPLOAD_AUDIO) - caption_part = None - if len(file_parts) > 1: - caption_part = "Part {} of {}".format(str(index + 1), str(len(file_parts))) - if caption: - if caption_part: - caption_full = caption_part + " | " + caption + ) + caption = None + reply_to_message_id_send = None + if flood: + addition = "" + if DOMAIN_YT in host: + source = "YouTube" + file_root, file_ext = os.path.splitext(file_name) + file_title = file_root.replace(file_ext, "") + addition = ": " + file_title + elif DOMAIN_SC in host: + source = "SoundCloud" + elif DOMAIN_BC in host: + source = "Bandcamp" + else: + source = url_obj.host.replace(".com", "").replace("www.", "").replace("m.", "") + # TODO fix youtube id in [] + caption = "@{} _got it from_ [{}]({}){}".format(bot.username.replace("_", "\_"), source, url, addition.replace("_", "\_")) + # logger.debug(caption) + reply_to_message_id_send = reply_to_message_id + sent_audio_ids = [] + for index, file_part in enumerate(file_parts): + path = pathlib.Path(file_part) + file_name = os.path.split(file_part)[-1] + # file_name = translit(file_name, 'ru', reversed=True) + logger.debug("Sending: %s", file_name) + run_async(bot.send_chat_action(chat_id=chat_id, action=ChatAction.UPLOAD_VOICE)) + caption_part = None + if len(file_parts) > 1: + caption_part = "Part {} of {}".format(str(index + 1), str(len(file_parts))) + if caption: + if caption_part: + caption_full = caption_part + " | " + caption + else: + caption_full = caption + else: + if caption_part: + caption_full = caption_part + else: + caption_full = "" + # caption_full = textwrap.shorten(caption_full, width=190, placeholder="..") + retries = 3 + for i in range(retries): + try: + logger.debug(f"Trying {i+1} time to send file part: {file_part}") + if file_part.endswith(".mp3"): + mp3 = MP3(file_part) + duration = round(mp3.info.length) + performer = None + title = None + try: + performer = ", ".join(mp3["artist"]) + title = ", ".join(mp3["title"]) + except: + pass + if TG_BOT_API_LOCAL_MODE: + audio = path.absolute().as_uri() + logger.debug(audio) else: - caption_full = caption + audio = open(file_part, "rb") + audio_msg = run_async( + bot.send_audio( + chat_id=chat_id, + reply_to_message_id=reply_to_message_id_send, + audio=audio, + duration=duration, + performer=performer, + title=title, + caption=caption_full, + parse_mode="Markdown", + read_timeout=COMMON_CONNECTION_TIMEOUT, + write_timeout=COMMON_CONNECTION_TIMEOUT, + connect_timeout=COMMON_CONNECTION_TIMEOUT, + pool_timeout=COMMON_CONNECTION_TIMEOUT, + ), + ) + sent_audio_ids.append(audio_msg.audio.file_id) + logger.debug("Sending audio succeeded: %s", file_name) + break + elif download_video: + video = open(file_part, "rb") + duration = int(float(ffmpeg.probe(file_part)["format"]["duration"])) + videostream = next(item for item in ffmpeg.probe(file_part)["streams"] if item["codec_type"] == "video") + width = int(videostream["width"]) + height = int(videostream["height"]) + video_msg = run_async( + bot.send_video( + chat_id=chat_id, + reply_to_message_id=reply_to_message_id_send, + video=video, + supports_streaming=True, + duration=duration, + width=width, + height=height, + caption=caption_full, + parse_mode="Markdown", + read_timeout=COMMON_CONNECTION_TIMEOUT, + write_timeout=COMMON_CONNECTION_TIMEOUT, + connect_timeout=COMMON_CONNECTION_TIMEOUT, + pool_timeout=COMMON_CONNECTION_TIMEOUT, + ), + ) + sent_audio_ids.append(video_msg.video.file_id) + logger.debug("Sending video succeeded: %s", file_name) + break + except TelegramError: + print(traceback.format_exc()) + if i == retries - 1: + logger.debug("Sending failed because of TelegramError: %s", file_name) else: - if caption_part: - caption_full = caption_part - else: - caption_full = "" - # caption_full = textwrap.shorten(caption_full, width=190, placeholder="..") - for i in range(3): - try: - if file_part.endswith(".mp3"): - mp3 = MP3(file_part) - duration = round(mp3.info.length) - performer = None - title = None - try: - performer = ", ".join(mp3["artist"]) - title = ", ".join(mp3["title"]) - except: - pass - if "127.0.0.1" in self.TG_BOT_API: - audio = path.absolute().as_uri() - logger.debug(audio) - elif self.SERVE_AUDIO: - audio = str(urljoin(self.APP_URL, str(path.relative_to(self.DL_DIR)))) - logger.debug(audio) - else: - audio = open(file_part, "rb") - if i > 0: - # maybe: Reply message not found - reply_to_message_id_send = None - audio_msg = bot.send_audio( - chat_id=chat_id, - reply_to_message_id=reply_to_message_id_send, - audio=audio, - duration=duration, - performer=performer, - title=title, - caption=caption_full, - parse_mode="Markdown", - ) - sent_audio_ids.append(audio_msg.audio.file_id) - logger.info("Sending succeeded: %s", file_name) - break - elif "tiktok." in file_part or "inst." in file_part: - video = open(file_part, "rb") - duration = float(ffmpeg.probe(file_part)["format"]["duration"]) - videostream = next(item for item in ffmpeg.probe(file_part)["streams"] if item["codec_type"] == "video") - width = int(videostream["width"]) - height = int(videostream["height"]) - video_msg = bot.send_video( - chat_id=chat_id, - reply_to_message_id=reply_to_message_id_send, - video=video, - supports_streaming=True, - duration=duration, - width=width, - height=height, - caption=caption_full, - parse_mode="Markdown", - ) - sent_audio_ids.append(video_msg.video.file_id) - logger.info("Sending succeeded: %s", file_name) - break - except TelegramError: - if i == 2: - logger.exception("Sending failed because of TelegramError: %s", file_name) - if len(sent_audio_ids) != len(file_parts): - raise FileSentPartiallyError(sent_audio_ids) - - except FileSentPartiallyError as exc: - sent_audio_ids = exc.sent_audio_ids + time.sleep(5) + if len(sent_audio_ids) != len(file_parts): + run_async( bot.send_message( chat_id=chat_id, reply_to_message_id=reply_to_message_id, text="*Sorry*, could not send file `{}` or some of it's parts..".format(file_name), parse_mode="Markdown", ) - logger.warning("Sending some parts failed: %s", file_name) - - if not self.SERVE_AUDIO: - shutil.rmtree(download_dir, ignore_errors=True) - if wait_message_id: # TODO: delete only once - try: - bot.delete_message(chat_id=chat_id, message_id=wait_message_id) - except: - pass + ) + logger.debug("Sending some parts failed: %s", file_name) - @run_async - def blacklist_whitelist(self, update: Update, context: CallbackContext): - if update.channel_post: - message = update.channel_post - elif update.message: - message = update.message - chat_id = message.chat_id - if not self.is_chat_allowed(chat_id): - context.bot.leave_chat(chat_id) - - def is_chat_allowed(self, chat_id): - try: - whitelist = set(int(x) for x in os.environ.get("WHITELIST_CHATS", "").split()) - except ValueError: - raise ValueError("Your whitelisted chats does not contain valid integers.") + shutil.rmtree(download_dir, ignore_errors=True) + if wait_message_id: try: - blacklist = set(int(x) for x in os.environ.get("BLACKLIST_CHATS", "").split()) - except ValueError: - raise ValueError("Your blacklisted chats does not contain valid integers.") - if whitelist: - if chat_id not in whitelist: - return False - if blacklist: - if chat_id in blacklist: - return False - if whitelist and blacklist: - if chat_id in blacklist: - return False - return True + run_async( + bot.delete_message( + chat_id=chat_id, + message_id=wait_message_id, + ), + ) + except: + pass + run_async(bot.shutdown()) + + +async def post_shutdown(application: Application) -> None: + EXECUTOR.shutdown(wait=False, cancel_futures=True) + + +async def post_init(application: Application) -> None: + SYSTEMD_NOTIFIER.notify("READY=1") + SYSTEMD_NOTIFIER.notify(f"STATUS=Application initialized") + + +async def callback_watchdog(context: ContextTypes.DEFAULT_TYPE): + SYSTEMD_NOTIFIER.notify("WATCHDOG=1") + SYSTEMD_NOTIFIER.notify(f"STATUS=Watchdog was sent {datetime.datetime.now()}") + + +async def callback_monitor(context: ContextTypes.DEFAULT_TYPE): + logger.debug(f"EXECUTOR pending work items: {len(EXECUTOR._pending_work_items)} tasks remain") + EXECUTOR_TASKS_REMAINING.set(len(EXECUTOR._pending_work_items)) + + +def main(): + # Start exposing Prometheus/OpenMetrics metrics: + prometheus_client.start_http_server(METRICS_PORT, addr=METRICS_HOST, registry=REGISTRY) + + # Maybe we can use token again if we will buy SoundCloud Go+ + # https://github.com/flyingrub/scdl/issues/429 + # if sc_auth_token: + # config = configparser.ConfigParser() + # config['scdl'] = {} + # config['scdl']['path'] = DL_DIR + # config['scdl']['auth_token'] = sc_auth_token + # config_dir = os.path.join(os.path.expanduser('~'), '.config', 'scdl') + # config_path = os.path.join(config_dir, 'scdl.cfg') + # os.makedirs(config_dir, exist_ok=True) + # with open(config_path, 'w') as config_file: + # config.write(config_file) + + persistence = PicklePersistence(filepath=CHAT_STORAGE) + application = ( + ApplicationBuilder() + .token(TG_BOT_TOKEN) + .local_mode(TG_BOT_API_LOCAL_MODE) + # https://github.com/python-telegram-bot/python-telegram-bot/issues/3556 + .http_version(HTTP_VERSION) + .get_updates_http_version(HTTP_VERSION) + .base_url(f"{TG_BOT_API}/bot") + .base_file_url(f"{TG_BOT_API}/file/bot") + .persistence(persistence) + .post_init(post_init) + .post_shutdown(post_shutdown) + .concurrent_updates(256) + .connection_pool_size(512) + .connect_timeout(COMMON_CONNECTION_TIMEOUT) + .read_timeout(COMMON_CONNECTION_TIMEOUT) + .write_timeout(COMMON_CONNECTION_TIMEOUT) + .pool_timeout(COMMON_CONNECTION_TIMEOUT) + .build() + ) + + blacklist_whitelist_handler = MessageHandler(filters.StatusUpdate.NEW_CHAT_MEMBERS, blacklist_whitelist_callback) + start_command_handler = CommandHandler("start", start_help_commands_callback) + help_command_handler = CommandHandler("help", start_help_commands_callback) + settings_command_handler = CommandHandler("settings", settings_command_callback) + dl_command_handler = CommandHandler("dl", dl_link_commands_and_messages_callback, filters=~filters.UpdateType.EDITED_MESSAGE & ~filters.FORWARDED) + link_command_handler = CommandHandler("link", dl_link_commands_and_messages_callback, filters=~filters.UpdateType.EDITED_MESSAGE & ~filters.FORWARDED) + message_with_links_handler = MessageHandler( + ~filters.UpdateType.EDITED_MESSAGE + & ~filters.COMMAND + & ( + (filters.TEXT & (filters.Entity(MessageEntity.URL) | filters.Entity(MessageEntity.TEXT_LINK))) + | (filters.CAPTION & (filters.CaptionEntity(MessageEntity.URL) | filters.CaptionEntity(MessageEntity.TEXT_LINK))) + ), + dl_link_commands_and_messages_callback, + ) + button_query_handler = CallbackQueryHandler(button_press_callback) + unknown_handler = MessageHandler(filters.COMMAND, unknown_command_callback) + + application.add_handler(blacklist_whitelist_handler) + application.add_handler(start_command_handler) + application.add_handler(help_command_handler) + application.add_handler(settings_command_handler) + application.add_handler(dl_command_handler) + application.add_handler(link_command_handler) + application.add_handler(message_with_links_handler) + application.add_handler(button_query_handler) + application.add_handler(unknown_handler) + application.add_error_handler(error_callback) + + job_queue = application.job_queue + job_watchdog = job_queue.run_repeating(callback_watchdog, interval=60, first=10) + job_monitor = job_queue.run_repeating(callback_monitor, interval=5, first=5) + + if WEBHOOK_ENABLE: + application.run_webhook( + drop_pending_updates=True, + listen=WEBHOOK_HOST, + port=WEBHOOK_PORT, + url_path=WEBHOOK_APP_URL_PATH, + webhook_url=urljoin(WEBHOOK_APP_URL_ROOT, WEBHOOK_APP_URL_PATH), + secret_token=WEBHOOK_SECRET_TOKEN, + max_connections=1024, + cert=WEBHOOK_CERT_FILE, + key=WEBHOOK_KEY_FILE, + ) + else: + # TODO await it somehow + application.bot.delete_webhook() + application.run_polling( + drop_pending_updates=True, + ) + + +if __name__ == "__main__": + main() diff --git a/scdlbot/texts/failed.txt b/scdlbot/texts/failed.txt new file mode 100644 index 000000000..a4ebe9176 --- /dev/null +++ b/scdlbot/texts/failed.txt @@ -0,0 +1 @@ +*Sorry*, something went wrong. A team of highly trained 🐝🐝🐝 has been dispatched to deal with this situation, but I recommend you to double-check if your link is alive and supported, see /help diff --git a/scdlbot/texts/no_audio.txt b/scdlbot/texts/no_audio.txt deleted file mode 100644 index dc45b7d2e..000000000 --- a/scdlbot/texts/no_audio.txt +++ /dev/null @@ -1 +0,0 @@ -*Sorry*, something went _wrong_. A team of highly trained 🐝🐝🐝 has been dispatched to deal with this situation, but I recommend you to double-check if your link is alive and supported, see /help diff --git a/scdlbot/texts/no_urls.txt b/scdlbot/texts/no_urls.txt index 9a3e514a0..024f6b564 100644 --- a/scdlbot/texts/no_urls.txt +++ b/scdlbot/texts/no_urls.txt @@ -1,4 +1,3 @@ -*Sorry*, no supported links found in your request. -Or, if it was YouTube link, YouTube just didn't let me download from it too repeatedly.. -Just wait and try again later, or try my test version bot: @scdltestbot or some another bot like @utubebot. +*Sorry*, no supported links were found in your request, or the website just didn't let me download from it.. +Just wait and try again later. For more information see /help diff --git a/scdlbot/texts/settings.tg.md b/scdlbot/texts/settings.tg.md index 5d343f4b6..72ad77b8d 100644 --- a/scdlbot/texts/settings.tg.md +++ b/scdlbot/texts/settings.tg.md @@ -1,3 +1,6 @@ -Select working mode for messages *without command*. -Sane defaults are *Download* for *PM* and *Ask* for groups. -Toggle *Captions* off: send audios *without source captions* and *not as replies*. +Select working mode for messages with links, but *without command specified*. +Sane defaults are *Download* for *private chats* and *Ask* for group chats. + +Enable *Captions* to send audios *with source captions* and *as replies*. + +Enable *Unknown sites* to allow checking unknown sites (will reset in a day). diff --git a/scdlbot/utils.py b/scdlbot/utils.py deleted file mode 100644 index 168545d3a..000000000 --- a/scdlbot/utils.py +++ /dev/null @@ -1,162 +0,0 @@ -import logging -import os - -import pkg_resources -import requests - -try: - import yt_dlp as youtube_dl - - youtube_dl_bin_name = "yt-dlp" -except: - try: - import youtube_dl - - youtube_dl_bin_name = "youtube-dl" - except: - import youtube_dlc as youtube_dl - - youtube_dl_bin_name = "youtube-dlc" - -from boltons.urlutils import URL -from plumbum import ProcessExecutionError, ProcessTimedOut, local - -from scdlbot.exceptions import * - -# from requests.exceptions import Timeout, RequestException, SSLError - -bin_path = os.getenv("BIN_PATH", "") -scdl_bin = local[os.path.join(bin_path, "scdl")] -bandcamp_dl_bin = local[os.path.join(bin_path, "bandcamp-dl")] -youtube_dl_bin = local[os.path.join(bin_path, youtube_dl_bin_name)] - -BOTAN_TRACK_URL = "https://api.botan.io/track" - -logger = logging.getLogger(__name__) - - -def get_response_text(file_name): - # https://stackoverflow.com/a/20885799/2490759 - path = "/".join(("texts", file_name)) - return pkg_resources.resource_string(__name__, path).decode("UTF-8") - - -def get_direct_urls(url, cookies_file=None, cookies_download_file=None, source_ip=None, proxy=None): - logger.debug("Entered get_direct_urls") - youtube_dl_args = [] - - # https://github.com/ytdl-org/youtube-dl#how-do-i-pass-cookies-to-youtube-dl - if cookies_file: - if "http" in cookies_file: - try: - r = requests.get(cookies_file, allow_redirects=True, timeout=5) - open(cookies_download_file, "wb").write(r.content) - youtube_dl_args.extend(["--cookies", cookies_download_file]) - except: - pass - else: - youtube_dl_args.extend(["--cookies", cookies_file]) - - if source_ip: - youtube_dl_args.extend(["--source-address", source_ip]) - - if proxy: - youtube_dl_args.extend(["--proxy", proxy]) - - youtube_dl_args.extend(["--get-url", url]) - try: - ret_code, std_out, std_err = youtube_dl_bin[youtube_dl_args].run(timeout=60) - except ProcessTimedOut as exc: - raise URLTimeoutError - except ProcessExecutionError as exc: - # TODO: look at case: one page has multiple videos, some available, some not - if "returning it as such" in exc.stderr: - raise URLDirectError - if "proxy server" in exc.stderr: - raise URLCountryError - raise exc - if "yt_live_broadcast" in std_out: - raise URLLiveError - return std_out - - -def get_italic(text): - return "_{}_".format(text) - - -def youtube_dl_func(url, ydl_opts, queue=None): - ydl = youtube_dl.YoutubeDL(ydl_opts) - try: - ydl.download([url]) - except Exception as exc: - ydl_status = 1, str(exc) - # ydl_status = exc #TODO: pass and re-raise original Exception - else: - ydl_status = 0, "OK" - if queue: - queue.put(ydl_status) - else: - return ydl_status - - -# def botan_track(token, message, event_name): -# try: -# # uid = message.chat_id -# uid = message.from_user.id -# except AttributeError: -# logger.warning('Botan no chat_id in message') -# return False -# num_retries = 2 -# ssl_verify = True -# for i in range(num_retries): -# try: -# r = requests.post( -# BOTAN_TRACK_URL, -# params={"token": token, "uid": uid, "name": event_name}, -# data=message.to_json(), -# verify=ssl_verify, -# timeout=2, -# ) -# return r.json() -# except Timeout: -# logger.exception("Botan timeout on event: %s", event_name) -# except SSLError: -# ssl_verify = False -# except (Exception, RequestException, ValueError): -# # catastrophic error -# logger.exception("Botan πŸ™€astrophic error on event: %s", event_name) -# return False - - -def shorten_url(url): - try: - return requests.get("https://clck.ru/--?url=" + url).text.replace("https://", "") - except: - return url - - -def log_and_track(event_name, message=None): - logger.info("Event: %s", event_name) - if message: - pass - # if self.botan_token: - # return botan_track(self.botan_token, message, event_name) - - -def get_link_text(urls): - link_text = "" - for i, url in enumerate(urls): - link_text += "[Source Link #{}]({}) | `{}`\n".format(str(i + 1), url, URL(url).host) - direct_urls = urls[url].splitlines() - for direct_url in direct_urls: - if "http" in direct_url: - content_type = "" - if "googlevideo" in direct_url: - if "audio" in direct_url: - content_type = "Audio" - else: - content_type = "Video" - # direct_url = shorten_url(direct_url) - link_text += "β€’ {} [Direct Link]({})\n".format(content_type, direct_url) - link_text += "\n*Note:* Final download URLs are only guaranteed to work on the same machine/IP where extracted" - return link_text diff --git a/setup.cfg b/setup.cfg index 2d82b6d26..8247674f9 100644 --- a/setup.cfg +++ b/setup.cfg @@ -44,4 +44,4 @@ per-file-ignores = # doc8 configuration: https://pypi.org/project/doc8/ ignore-path = docs/_build max-line-length = 80 -sphinx = True +sphinx = true diff --git a/tmpreaper.conf.sample b/tmpreaper.conf.sample index 8ae184f9a..7a6f5708b 100644 --- a/tmpreaper.conf.sample +++ b/tmpreaper.conf.sample @@ -1,6 +1,6 @@ # sudo apt install tmpreaper -# sudo mv /etc/cron.daily/tmpreaper /etc/cron.hourly/ -# sudo mv tmpreaper.conf.sample /etc/tmpreaper.conf +# sudo mv /etc/cron.daily/tmpreaper /etc/cron.hourly/tmpreaper +# sudo cp tmpreaper.conf.sample /etc/tmpreaper.conf SHOWWARNING=false TMPREAPER_TIME=1h