diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..42db774 --- /dev/null +++ b/.gitignore @@ -0,0 +1,38 @@ +# general files +*.swp + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] + +# Distribution / packaging +build/ +develop-eggs/ +dist/ +eggs/ +.eggs/ +sdist/ +wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST +pip-wheel-metadata + +# Unit test +.tox/ +.cache +.pytest_cache/ + +# pyenv +.python-version + +# Environments +.env +.venv + +# mypy +.mypy_cache/ + +# editors +.vscode diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..721b755 --- /dev/null +++ b/LICENSE @@ -0,0 +1,21 @@ +MIT License + +Copyright (c) 2021 Preferred Networks, Inc. + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/MANIFEST.in b/MANIFEST.in new file mode 100644 index 0000000..e608d00 --- /dev/null +++ b/MANIFEST.in @@ -0,0 +1,4 @@ +include LICENSE +include README.md + +recursive-include pysen *.py py.typed diff --git a/Pipfile b/Pipfile new file mode 100644 index 0000000..9819fa4 --- /dev/null +++ b/Pipfile @@ -0,0 +1,17 @@ +[[source]] +name = "pypi" +url = "https://pypi.org/simple" +verify_ssl = true + +[packages] +pysen = {editable = true,extras = ["lint"],path = "."} +pytest = ">=5.4.0,<6.0.0" +tox = ">=3.15.0,<4.0.0" +black = "==20.8b1" +flake8 = "==3.8.4" +flake8-bugbear = "==20.11.1" +isort = "==5.1.4" +mypy = "==0.782" + +[requires] +python_version = "3.7" diff --git a/Pipfile.lock b/Pipfile.lock new file mode 100644 index 0000000..0de8570 --- /dev/null +++ b/Pipfile.lock @@ -0,0 +1,401 @@ +{ + "_meta": { + "hash": { + "sha256": "d9e0d68ae6bfa08bba124230c7b74844d6234de7144db67f1c82624136ae83fa" + }, + "pipfile-spec": 6, + "requires": { + "python_version": "3.7" + }, + "sources": [ + { + "name": "pypi", + "url": "https://pypi.org/simple", + "verify_ssl": true + } + ] + }, + "default": { + "appdirs": { + "hashes": [ + "sha256:7d5d0167b2b1ba821647616af46a749d1c653740dd0d2415100fe26e27afdf41", + "sha256:a841dacd6b99318a741b166adb07e19ee71a274450e68237b4650ca1055ab128" + ], + "version": "==1.4.4" + }, + "attrs": { + "hashes": [ + "sha256:31b2eced602aa8423c2aea9c76a724617ed67cf9513173fd3a4f03e3a929c7e6", + "sha256:832aa3cde19744e49938b91fea06d69ecb9e649c93ba974535d08ad92164f700" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==20.3.0" + }, + "black": { + "hashes": [ + "sha256:1c02557aa099101b9d21496f8a914e9ed2222ef70336404eeeac8edba836fbea", + "sha256:70b62ef1527c950db59062cda342ea224d772abdf6adc58b86a45421bab20a6b" + ], + "index": "pypi", + "version": "==20.8b1" + }, + "click": { + "hashes": [ + "sha256:d2b5255c7c6349bc1bd1e59e08cd12acbbd63ce649f2588755783aa94dfb6b1a", + "sha256:dacca89f4bfadd5de3d7489b7c8a566eee0d3676333fbb50030263894c38c0dc" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==7.1.2" + }, + "colorlog": { + "hashes": [ + "sha256:0a9dcdba6cab68e8a768448b418a858d73c52b37b6e8dea2568296faece393bd", + "sha256:18d05b616438a75762d7d214b9ec3b05d274466c9f3ddd92807e755840c88251" + ], + "version": "==4.7.2" + }, + "dacite": { + "hashes": [ + "sha256:4331535f7aabb505c732fa4c3c094313fc0a1d5ea19907bf4726a7819a68b93f", + "sha256:d48125ed0a0352d3de9f493bf980038088f45f3f9d7498f090b50a847daaa6df" + ], + "markers": "python_version >= '3.6'", + "version": "==1.6.0" + }, + "distlib": { + "hashes": [ + "sha256:8c09de2c67b3e7deef7184574fc060ab8a793e7adbb183d942c389c8b13c52fb", + "sha256:edf6116872c863e1aa9d5bb7cb5e05a022c519a4594dc703843343a9ddd9bff1" + ], + "version": "==0.3.1" + }, + "filelock": { + "hashes": [ + "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59", + "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836" + ], + "version": "==3.0.12" + }, + "flake8": { + "hashes": [ + "sha256:749dbbd6bfd0cf1318af27bf97a14e28e5ff548ef8e5b1566ccfb25a11e7c839", + "sha256:aadae8761ec651813c24be05c6f7b4680857ef6afaae4651a4eccaef97ce6c3b" + ], + "index": "pypi", + "version": "==3.8.4" + }, + "flake8-bugbear": { + "hashes": [ + "sha256:528020129fea2dea33a466b9d64ab650aa3e5f9ffc788b70ea4bc6cf18283538", + "sha256:f35b8135ece7a014bc0aee5b5d485334ac30a6da48494998cc1fabf7ec70d703" + ], + "index": "pypi", + "version": "==20.11.1" + }, + "gitdb": { + "hashes": [ + "sha256:91f36bfb1ab7949b3b40e23736db18231bf7593edada2ba5c3a174a7b23657ac", + "sha256:c9e1f2d0db7ddb9a704c2a0217be31214e91a4fe1dea1efad19ae42ba0c285c9" + ], + "markers": "python_version >= '3.4'", + "version": "==4.0.5" + }, + "gitpython": { + "hashes": [ + "sha256:42dbefd8d9e2576c496ed0059f3103dcef7125b9ce16f9d5f9c834aed44a1dac", + "sha256:867ec3dfb126aac0f8296b19fb63b8c4a399f32b4b6fafe84c4b10af5fa9f7b5" + ], + "markers": "python_version >= '3.4'", + "version": "==3.1.12" + }, + "importlib-metadata": { + "hashes": [ + "sha256:ace61d5fc652dc280e7b6b4ff732a9c2d40db2c0f92bc6cb74e07b73d53a1771", + "sha256:fa5daa4477a7414ae34e95942e4dd07f62adf589143c875c133c1e53c4eff38d" + ], + "markers": "python_version < '3.8'", + "version": "==3.4.0" + }, + "isort": { + "hashes": [ + "sha256:145072eedc4927cc9c1f9478f2d83b2fc1e6469df4129c02ef4e8c742207a46c", + "sha256:ae3007f72a2e9da36febd3454d8be4b175d6ca17eb765841d5fe3d038aede79d" + ], + "index": "pypi", + "version": "==5.1.4" + }, + "pysen": { + "editable": true, + "extras": [ + "lint" + ], + "path": "." + }, + "mccabe": { + "hashes": [ + "sha256:ab8a6258860da4b6677da4bd2fe5dc2c659cff31b3ee4f7f5d64e79735b80d42", + "sha256:dd8d182285a0fe56bace7f45b5e7d1a6ebcbf524e8f3bd87eb0f125271b8831f" + ], + "version": "==0.6.1" + }, + "more-itertools": { + "hashes": [ + "sha256:8e1a2a43b2f2727425f2b5839587ae37093f19153dc26c0927d1048ff6557330", + "sha256:b3a9005928e5bed54076e6e549c792b306fddfe72b2d1d22dd63d42d5d3899cf" + ], + "markers": "python_version >= '3.5'", + "version": "==8.6.0" + }, + "mypy": { + "hashes": [ + "sha256:2c6cde8aa3426c1682d35190b59b71f661237d74b053822ea3d748e2c9578a7c", + "sha256:3fdda71c067d3ddfb21da4b80e2686b71e9e5c72cca65fa216d207a358827f86", + "sha256:5dd13ff1f2a97f94540fd37a49e5d255950ebcdf446fb597463a40d0df3fac8b", + "sha256:6731603dfe0ce4352c555c6284c6db0dc935b685e9ce2e4cf220abe1e14386fd", + "sha256:6bb93479caa6619d21d6e7160c552c1193f6952f0668cdda2f851156e85186fc", + "sha256:81c7908b94239c4010e16642c9102bfc958ab14e36048fa77d0be3289dda76ea", + "sha256:9c7a9a7ceb2871ba4bac1cf7217a7dd9ccd44c27c2950edbc6dc08530f32ad4e", + "sha256:a4a2cbcfc4cbf45cd126f531dedda8485671545b43107ded25ce952aac6fb308", + "sha256:b7fbfabdbcc78c4f6fc4712544b9b0d6bf171069c6e0e3cb82440dd10ced3406", + "sha256:c05b9e4fb1d8a41d41dec8786c94f3b95d3c5f528298d769eb8e73d293abc48d", + "sha256:d7df6eddb6054d21ca4d3c6249cae5578cb4602951fd2b6ee2f5510ffb098707", + "sha256:e0b61738ab504e656d1fe4ff0c0601387a5489ca122d55390ade31f9ca0e252d", + "sha256:eff7d4a85e9eea55afa34888dfeaccde99e7520b51f867ac28a48492c0b1130c", + "sha256:f05644db6779387ccdb468cc47a44b4356fc2ffa9287135d05b70a98dc83b89a" + ], + "index": "pypi", + "version": "==0.782" + }, + "mypy-extensions": { + "hashes": [ + "sha256:090fedd75945a69ae91ce1303b5824f428daf5a028d2f6ab8a299250a846f15d", + "sha256:2d82818f5bb3e369420cb3c4060a7970edba416647068eb4c5343488a6c604a8" + ], + "version": "==0.4.3" + }, + "packaging": { + "hashes": [ + "sha256:24e0da08660a87484d1602c30bb4902d74816b6985b93de36926f5bc95741858", + "sha256:78598185a7008a470d64526a8059de9aaa449238f280fc9eb6b13ba6c4109093" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==20.8" + }, + "pathspec": { + "hashes": [ + "sha256:86379d6b86d75816baba717e64b1a3a3469deb93bb76d613c9ce79edc5cb68fd", + "sha256:aa0cb481c4041bf52ffa7b0d8fa6cd3e88a2ca4879c533c9153882ee2556790d" + ], + "version": "==0.8.1" + }, + "pluggy": { + "hashes": [ + "sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0", + "sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==0.13.1" + }, + "py": { + "hashes": [ + "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3", + "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.10.0" + }, + "pycodestyle": { + "hashes": [ + "sha256:2295e7b2f6b5bd100585ebcb1f616591b652db8a741695b3d8f5d28bdc934367", + "sha256:c58a7d2815e0e8d7972bf1803331fb0152f867bd89adf8a01dfd55085434192e" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.6.0" + }, + "pyflakes": { + "hashes": [ + "sha256:0d94e0e05a19e57a99444b6ddcf9a6eb2e5c68d3ca1e98e90707af8152c90a92", + "sha256:35b2d75ee967ea93b55750aa9edbbf72813e06a66ba54438df2cfac9e3c27fc8" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.2.0" + }, + "pyparsing": { + "hashes": [ + "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1", + "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b" + ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==2.4.7" + }, + "pytest": { + "hashes": [ + "sha256:5c0db86b698e8f170ba4582a492248919255fcd4c79b1ee64ace34301fb589a1", + "sha256:7979331bfcba207414f5e1263b5a0f8f521d0f457318836a7355531ed1a4c7d8" + ], + "index": "pypi", + "version": "==5.4.3" + }, + "regex": { + "hashes": [ + "sha256:02951b7dacb123d8ea6da44fe45ddd084aa6777d4b2454fa0da61d569c6fa538", + "sha256:0d08e71e70c0237883d0bef12cad5145b84c3705e9c6a588b2a9c7080e5af2a4", + "sha256:1862a9d9194fae76a7aaf0150d5f2a8ec1da89e8b55890b1786b8f88a0f619dc", + "sha256:1ab79fcb02b930de09c76d024d279686ec5d532eb814fd0ed1e0051eb8bd2daa", + "sha256:1fa7ee9c2a0e30405e21031d07d7ba8617bc590d391adfc2b7f1e8b99f46f444", + "sha256:262c6825b309e6485ec2493ffc7e62a13cf13fb2a8b6d212f72bd53ad34118f1", + "sha256:2a11a3e90bd9901d70a5b31d7dd85114755a581a5da3fc996abfefa48aee78af", + "sha256:2c99e97d388cd0a8d30f7c514d67887d8021541b875baf09791a3baad48bb4f8", + "sha256:3128e30d83f2e70b0bed9b2a34e92707d0877e460b402faca908c6667092ada9", + "sha256:38c8fd190db64f513fe4e1baa59fed086ae71fa45083b6936b52d34df8f86a88", + "sha256:3bddc701bdd1efa0d5264d2649588cbfda549b2899dc8d50417e47a82e1387ba", + "sha256:4902e6aa086cbb224241adbc2f06235927d5cdacffb2425c73e6570e8d862364", + "sha256:49cae022fa13f09be91b2c880e58e14b6da5d10639ed45ca69b85faf039f7a4e", + "sha256:56e01daca75eae420bce184edd8bb341c8eebb19dd3bce7266332258f9fb9dd7", + "sha256:5862975b45d451b6db51c2e654990c1820523a5b07100fc6903e9c86575202a0", + "sha256:6a8ce43923c518c24a2579fda49f093f1397dad5d18346211e46f134fc624e31", + "sha256:6c54ce4b5d61a7129bad5c5dc279e222afd00e721bf92f9ef09e4fae28755683", + "sha256:6e4b08c6f8daca7d8f07c8d24e4331ae7953333dbd09c648ed6ebd24db5a10ee", + "sha256:717881211f46de3ab130b58ec0908267961fadc06e44f974466d1887f865bd5b", + "sha256:749078d1eb89484db5f34b4012092ad14b327944ee7f1c4f74d6279a6e4d1884", + "sha256:7913bd25f4ab274ba37bc97ad0e21c31004224ccb02765ad984eef43e04acc6c", + "sha256:7a25fcbeae08f96a754b45bdc050e1fb94b95cab046bf56b016c25e9ab127b3e", + "sha256:83d6b356e116ca119db8e7c6fc2983289d87b27b3fac238cfe5dca529d884562", + "sha256:8b882a78c320478b12ff024e81dc7d43c1462aa4a3341c754ee65d857a521f85", + "sha256:8f6a2229e8ad946e36815f2a03386bb8353d4bde368fdf8ca5f0cb97264d3b5c", + "sha256:9801c4c1d9ae6a70aeb2128e5b4b68c45d4f0af0d1535500884d644fa9b768c6", + "sha256:a15f64ae3a027b64496a71ab1f722355e570c3fac5ba2801cafce846bf5af01d", + "sha256:a3d748383762e56337c39ab35c6ed4deb88df5326f97a38946ddd19028ecce6b", + "sha256:a63f1a07932c9686d2d416fb295ec2c01ab246e89b4d58e5fa468089cab44b70", + "sha256:b2b1a5ddae3677d89b686e5c625fc5547c6e492bd755b520de5332773a8af06b", + "sha256:b2f4007bff007c96a173e24dcda236e5e83bde4358a557f9ccf5e014439eae4b", + "sha256:baf378ba6151f6e272824b86a774326f692bc2ef4cc5ce8d5bc76e38c813a55f", + "sha256:bafb01b4688833e099d79e7efd23f99172f501a15c44f21ea2118681473fdba0", + "sha256:bba349276b126947b014e50ab3316c027cac1495992f10e5682dc677b3dfa0c5", + "sha256:c084582d4215593f2f1d28b65d2a2f3aceff8342aa85afd7be23a9cad74a0de5", + "sha256:d1ebb090a426db66dd80df8ca85adc4abfcbad8a7c2e9a5ec7513ede522e0a8f", + "sha256:d2d8ce12b7c12c87e41123997ebaf1a5767a5be3ec545f64675388970f415e2e", + "sha256:e32f5f3d1b1c663af7f9c4c1e72e6ffe9a78c03a31e149259f531e0fed826512", + "sha256:e3faaf10a0d1e8e23a9b51d1900b72e1635c2d5b0e1bea1c18022486a8e2e52d", + "sha256:f7d29a6fc4760300f86ae329e3b6ca28ea9c20823df123a2ea8693e967b29917", + "sha256:f8f295db00ef5f8bae530fc39af0b40486ca6068733fb860b42115052206466f" + ], + "version": "==2020.11.13" + }, + "six": { + "hashes": [ + "sha256:30639c035cdb23534cd4aa2dd52c3bf48f06e5f4a941509c8bafd8ce11080259", + "sha256:8b74bedcbbbaca38ff6d7491d76f2b06b3592611af620f8426e82dddb04a5ced" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==1.15.0" + }, + "smmap": { + "hashes": [ + "sha256:7bfcf367828031dc893530a29cb35eb8c8f2d7c8f2d0989354d75d24c8573714", + "sha256:84c2751ef3072d4f6b2785ec7ee40244c6f45eb934d9e543e2c51f1bd3d54c50" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==3.0.5" + }, + "toml": { + "hashes": [ + "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b", + "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f" + ], + "markers": "python_version >= '2.6' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==0.10.2" + }, + "tomlkit": { + "hashes": [ + "sha256:6babbd33b17d5c9691896b0e68159215a9387ebfa938aa3ac42f4a4beeb2b831", + "sha256:ac57f29693fab3e309ea789252fcce3061e19110085aa31af5446ca749325618" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3, 3.4'", + "version": "==0.7.0" + }, + "tox": { + "hashes": [ + "sha256:0aa777ee466f2ef18e6f58428c793c32378779e0a321dbb8934848bc3e78998c", + "sha256:f501808381c01c6d7827c2f17328be59c0a715046e94605ddca15fb91e65827d" + ], + "index": "pypi", + "version": "==3.21.2" + }, + "typed-ast": { + "hashes": [ + "sha256:07d49388d5bf7e863f7fa2f124b1b1d89d8aa0e2f7812faff0a5658c01c59aa1", + "sha256:14bf1522cdee369e8f5581238edac09150c765ec1cb33615855889cf33dcb92d", + "sha256:240296b27397e4e37874abb1df2a608a92df85cf3e2a04d0d4d61055c8305ba6", + "sha256:36d829b31ab67d6fcb30e185ec996e1f72b892255a745d3a82138c97d21ed1cd", + "sha256:37f48d46d733d57cc70fd5f30572d11ab8ed92da6e6b28e024e4a3edfb456e37", + "sha256:4c790331247081ea7c632a76d5b2a265e6d325ecd3179d06e9cf8d46d90dd151", + "sha256:5dcfc2e264bd8a1db8b11a892bd1647154ce03eeba94b461effe68790d8b8e07", + "sha256:7147e2a76c75f0f64c4319886e7639e490fee87c9d25cb1d4faef1d8cf83a440", + "sha256:7703620125e4fb79b64aa52427ec192822e9f45d37d4b6625ab37ef403e1df70", + "sha256:8368f83e93c7156ccd40e49a783a6a6850ca25b556c0fa0240ed0f659d2fe496", + "sha256:84aa6223d71012c68d577c83f4e7db50d11d6b1399a9c779046d75e24bed74ea", + "sha256:85f95aa97a35bdb2f2f7d10ec5bbdac0aeb9dafdaf88e17492da0504de2e6400", + "sha256:8db0e856712f79c45956da0c9a40ca4246abc3485ae0d7ecc86a20f5e4c09abc", + "sha256:9044ef2df88d7f33692ae3f18d3be63dec69c4fb1b5a4a9ac950f9b4ba571606", + "sha256:963c80b583b0661918718b095e02303d8078950b26cc00b5e5ea9ababe0de1fc", + "sha256:987f15737aba2ab5f3928c617ccf1ce412e2e321c77ab16ca5a293e7bbffd581", + "sha256:9ec45db0c766f196ae629e509f059ff05fc3148f9ffd28f3cfe75d4afb485412", + "sha256:9fc0b3cb5d1720e7141d103cf4819aea239f7d136acf9ee4a69b047b7986175a", + "sha256:a2c927c49f2029291fbabd673d51a2180038f8cd5a5b2f290f78c4516be48be2", + "sha256:a38878a223bdd37c9709d07cd357bb79f4c760b29210e14ad0fb395294583787", + "sha256:b4fcdcfa302538f70929eb7b392f536a237cbe2ed9cba88e3bf5027b39f5f77f", + "sha256:c0c74e5579af4b977c8b932f40a5464764b2f86681327410aa028a22d2f54937", + "sha256:c1c876fd795b36126f773db9cbb393f19808edd2637e00fd6caba0e25f2c7b64", + "sha256:c9aadc4924d4b5799112837b226160428524a9a45f830e0d0f184b19e4090487", + "sha256:cc7b98bf58167b7f2db91a4327da24fb93368838eb84a44c472283778fc2446b", + "sha256:cf54cfa843f297991b7388c281cb3855d911137223c6b6d2dd82a47ae5125a41", + "sha256:d003156bb6a59cda9050e983441b7fa2487f7800d76bdc065566b7d728b4581a", + "sha256:d175297e9533d8d37437abc14e8a83cbc68af93cc9c1c59c2c292ec59a0697a3", + "sha256:d746a437cdbca200622385305aedd9aef68e8a645e385cc483bdc5e488f07166", + "sha256:e683e409e5c45d5c9082dc1daf13f6374300806240719f95dc783d1fc942af10" + ], + "version": "==1.4.2" + }, + "typing-extensions": { + "hashes": [ + "sha256:7cb407020f00f7bfc3cb3e7881628838e69d8f3fcab2f64742a5e76b2f841918", + "sha256:99d4073b617d30288f569d3f13d2bd7548c3a7e4c8de87db09a9d29bb3a4a60c", + "sha256:dafc7639cde7f1b6e1acc0f457842a83e722ccca8eef5270af2d74792619a89f" + ], + "markers": "python_version < '3.8'", + "version": "==3.7.4.3" + }, + "unidiff": { + "hashes": [ + "sha256:90c5214e9a357ff4b2fee19d91e77706638e3e00592a732d9405ea4e93da981f", + "sha256:e1dd956a492ccc4351e24931b2f2d29c79e3be17a99dd8f14e95324321d93a88" + ], + "version": "==0.6.0" + }, + "virtualenv": { + "hashes": [ + "sha256:219ee956e38b08e32d5639289aaa5bd190cfbe7dafcb8fa65407fca08e808f9c", + "sha256:227a8fed626f2f20a6cdb0870054989f82dd27b2560a911935ba905a2a5e0034" + ], + "markers": "python_version >= '2.7' and python_version not in '3.0, 3.1, 3.2, 3.3'", + "version": "==20.4.0" + }, + "wcwidth": { + "hashes": [ + "sha256:beb4802a9cebb9144e99086eff703a642a13d6a0052920003a230f3294bbe784", + "sha256:c4d647b99872929fdb7bdcaa4fbe7f01413ed3d98077df798530e5b04f116c83" + ], + "version": "==0.2.5" + }, + "zipp": { + "hashes": [ + "sha256:102c24ef8f171fd729d46599845e95c7ab894a4cf45f5de11a44cc7444fb1108", + "sha256:ed5eee1974372595f9e416cc7bbeeb12335201d8081ca8a0743c954d4446e5cb" + ], + "markers": "python_version >= '3.6'", + "version": "==3.4.0" + } + }, + "develop": {} +} diff --git a/README.md b/README.md new file mode 100644 index 0000000..35c19d3 --- /dev/null +++ b/README.md @@ -0,0 +1,220 @@ +# pysen + +![](https://github.com/pfnet/pysen/blob/main/assets/imgs/pysen.gif?raw=true) + +## What is pysen? + +pysen aims to provide a unified platform to configure and run day-to-day development tools. +We envision the following scenarios in the future: + +- You open any project and `pysen run lint`, `pysen run format` will check and format the entire codebase +- Standardized coding styles are setup with a few lines in a single `pyproject.toml` file + +pysen centralizes the code and knowledge related to development tools that teams have accumulated, most notably for python linters. +You can make tasks that can be executed from both `setup.py` and our command-line tool. +We currently provide tasks that manage setting files for the following tools: + +- linters + - flake8 + - isort + - mypy + - black +- utilities + - (planned) protoc + +## What isn't pysen? + +* pysen is not a linting tool per se. Rather, `pysen run lint` orchestrates multiple python linting tools by automatically setting up their configurations from a more abstract setting for pysen. +* pysen does not manage your depedencies and packages. We recommend using package managers such as [pipenv](https://github.com/pypa/pipenv) or [poetry](https://python-poetry.org/) to lock your dependecy versions, **including the versions for the linting tools that pysen coordinates** (i.e., isort, mypy, flake8, black). The supported versions for these tools can be found in the `extra_requires/lint` section in pysen's [setup.py](https://github.com/pfnet/pysen/blob/main/setup.py). You should **not** rely on `pip install pysen[lint]` to control the versions of your linting tools. +* pysen is not limited to linting purposes or python. See the [plugin section](README.md#create-a-plugin-to-customize-pysen) for details. + +## Install + +### PyPI + +```sh +pip install "pysen[lint]" +``` + + +### Other installation examples + +```sh +# pipenv +pipenv install --dev "pysen[lint]==0.9.0" +# poetry +poetry add -D pysen==0.9.0 -E lint +``` + + +## Quickstart: Set up linters using pysen + +Put the following pysen configuration to `pyproject.toml` of your python package: +```toml +[tool.pysen] +version = "0.9" + +[tool.pysen.lint] +enable_black = true +enable_flake8 = true +enable_isort = true +enable_mypy = true +mypy_preset = "strict" +line_length = 88 +py_version = "py37" +[[tool.pysen.lint.mypy_targets]] + paths = ["."] +``` + +then, execute the following command: +```sh +$ pysen run lint +$ pysen run format # corrects errors with compatible commands (black, isort) +``` + +That's it! +pysen, or more accurately pysen tasks that support the specified linters, generate setting files for black, isort, mypy, and flake8 +and run them with the appropriate configuration. +For more details about the configuration items that you can write in `pyproject.toml`, please refer to `pysen/pyproject_model.py`. + +You can also add custom setup commands to your Python package by adding the following lines to its `setup.py`: +```py +import pysen +setup = pysen.setup_from_pyproject(__file__) +``` + +```sh +$ python setup.py lint +``` + +We also provide a Python interface for customizing our configuration and extending pysen. +For more details, please refer to the following two examples: +- Example configuration from Python: `examples/advanced_example/config.py` +- Example plugin for pysen: `examples/plugin_example/plugin.py` + +## How it works: Settings file directory + +Under the hood, whenever you run pysen, it generates the setting files as ephemeral temporary files to be used by linters. +You may want to keep those setting files on your disk, e.g. when you want to use them for your editor. +If that is the case, run the following command to generate the setting files to your directory of choice: + +```sh +$ pysen generate [out_dir] +``` + +You can specify the settings directory that pysen uses when you `pysen run`. +To do so add the following section to your `pyproject.toml`: + +```toml +[tool.pysen-cli] +settings_dir = "path/to/generate/settings" +``` + +When you specify a directory that already contains some configurations, pysen merges the contents. The resulting behavior may differ from when you don't specify `settings_dir`. + +Also keep in mind that this option is honored only when you use pysen through its CLI. When using pre-commit or setuptools you need to specify `settings_dir` as arguments. + +## Tips: IDE / Text editor integration + +### vim + +You can add errors that pysen reports to your quickfix window by: + +``` +:cex system("pysen run_files lint --error-format gnu ".expand('%:p')) +``` + +Another way is to set pysen to `makeprg`: + +``` +set makeprg=pysen\ run_files\ --error-format\ gnu\ lint\ % +``` + +Then running `:make` will populate your quickfix window with errors. +This also works with [`vim-dispatch`](https://github.com/tpope/vim-dispatch) as long as you invoke `:Make` instead of `:Dispatch` (for [this reason](https://github.com/tpope/vim-dispatch/issues/41#issuecomment-20555488)) + +The result will look like the following: + +![pysen-vim](https://github.com/pfnet/pysen/blob/main/assets/imgs/pysen_vim.gif?raw=true) + +### VSCode + +Refer to the [example task setting](/assets/vscode/tasks.json). +Running the task will populate your "PROBLEMS" window like so: + +![pysen-vscode](https://github.com/pfnet/pysen/blob/main/assets/imgs/pysen_vscode.jpg?raw=true) + +Note that this may report duplicate errors if you have configured linters like `flake8` directly through your VSCode python extension. +We do not currently recommend watching for file changes to trigger the task in large projects since `pysen` will check for all files and may consume a considerable amount of time. + +## Configure pysen + +We provide two methods to write configuration for pysen. + +One is the `[tool.pysen.lint]` section in `pyproject.toml`. +It is the most simple way to configure pysen, but the settings we provide are limited. + +The other method is to write a python script that configures pysen directly. +If you want to customize configuration files that pysen generates, command-line arguments that pysen takes, or whatever actions pysen performs, we recommend you use this method. +For more examples, please refer to `pysen/examples`. + +### pyproject.toml configuration model + +Please refer to `pysen/pyproject_model.py` for the latest model. + +Here is an example of a basic configuration: +```toml +[tool.pysen] +version = "0.9" + +[tool.pysen.lint] +enable_black = true +enable_flake8 = true +enable_isort = true +enable_mypy = true +mypy_preset = "strict" +line_length = 88 +py_version = "py37" +isort_known_third_party = ["numpy"] +isort_known_first_party = ["pysen"] +mypy_ignore_packages = ["pysen.generated.*"] +mypy_path = ["stubs"] +[[tool.pysen.lint.mypy_targets]] + paths = [".", "tests/"] + +[tool.pysen.lint.source] + includes = ["."] + include_globs = ["**/*.template"] + excludes = ["third_party/"] + exclude_globs = ["**/*_grpc.py"] + +[tool.pysen.lint.mypy_modules."pysen.scripts"] + preset = "entry" + +[tool.pysen.lint.mypy_modules."numpy"] + ignore_errors = true +``` + +### Create a plugin to customize pysen + +We provide a plugin interface for customizing our tool support, setting files management, setup commands and so on. +For more details, please refer to `pysen/examples/plugin_example`. + +## Development + +`pipenv` is required for managing our development environment. +```sh +# setup your environment +$ pipenv sync +# activate the environment +$ pipenv shell +``` + +- Update depedencies in `Pipfile.lock` +```sh +$ pipenv lock --pre +``` +- Run all tests +```sh +$ pipenv run tox +``` diff --git a/assets/imgs/pysen.gif b/assets/imgs/pysen.gif new file mode 100644 index 0000000..f592285 Binary files /dev/null and b/assets/imgs/pysen.gif differ diff --git a/assets/imgs/pysen_vim.gif b/assets/imgs/pysen_vim.gif new file mode 100644 index 0000000..10cce9b Binary files /dev/null and b/assets/imgs/pysen_vim.gif differ diff --git a/assets/imgs/pysen_vscode.jpg b/assets/imgs/pysen_vscode.jpg new file mode 100644 index 0000000..14ee5f1 Binary files /dev/null and b/assets/imgs/pysen_vscode.jpg differ diff --git a/examples/advanced_example/README.md b/examples/advanced_example/README.md new file mode 100644 index 0000000..c6f9546 --- /dev/null +++ b/examples/advanced_example/README.md @@ -0,0 +1,16 @@ +## Description + +This is an advanced example to know the custom builder of `pysen`. +This example configures linter settings for pysen without using `tool.pysen.lint` of `pyproject.toml`. +Please see `pyproject.toml` and `lint.py` to know how to define and use a custom builder. + +```sh +$ pysen list +available targets: + * lint + - mypy + - flake8 + +$ pysen run lint +... +``` diff --git a/examples/advanced_example/advanced_example/__init__.py b/examples/advanced_example/advanced_example/__init__.py new file mode 100644 index 0000000..a53c73e --- /dev/null +++ b/examples/advanced_example/advanced_example/__init__.py @@ -0,0 +1,15 @@ +from typing import Any, Callable, Optional, Sequence, Set, Tuple + + +def foo( + a: Any, + b: Callable[[], Tuple[int, int, str]], + c: Set[str], + d: Optional[Sequence[int]] = None, + e: Any = None, +) -> None: + pass + + +print("Hello world") +foo(a=1, b=lambda: (1, 2, "hoge"), c=set(), d=None, e=None) diff --git a/examples/advanced_example/lint.py b/examples/advanced_example/lint.py new file mode 100644 index 0000000..ec51964 --- /dev/null +++ b/examples/advanced_example/lint.py @@ -0,0 +1,32 @@ +import pathlib +from typing import Optional, Sequence + +import pysen +from pysen import Source +from pysen.component import ComponentBase +from pysen.manifest import Manifest, ManifestBase + + +def build( + components: Sequence[ComponentBase], src_path: Optional[pathlib.Path] +) -> ManifestBase: + source = Source([".", "tests", "tools"]) + mypy = pysen.Mypy( + setting=pysen.MypySetting.strict(), + module_settings={ # per module setting + "example_advanced_package.cmds.*": pysen.MypySetting.entry(), + "example_advanced_package.tools.*": pysen.MypySetting( + disallow_any_generics=False, + ), + }, + mypy_targets=[ + pysen.MypyTarget([pathlib.Path("."), pathlib.Path("tests")]), + pysen.MypyTarget([pathlib.Path("tools")]), + ], + ) + flake8_setting = pysen.Flake8Setting( + ignore=["W", "E"], select=["B", "C", "F"], max_complexity=10 + ) + flake8 = pysen.Flake8(setting=flake8_setting, source=source) + + return Manifest([mypy, flake8]) diff --git a/examples/advanced_example/pyproject.toml b/examples/advanced_example/pyproject.toml new file mode 100644 index 0000000..1996a98 --- /dev/null +++ b/examples/advanced_example/pyproject.toml @@ -0,0 +1,8 @@ +[tool.pysen] +builder = "lint.py" + +[tool.pysen.lint] +enable_mypy = true + +[[tool.pysen.lint.mypy_targets]] + paths = ["."] diff --git a/examples/advanced_example/setup.py b/examples/advanced_example/setup.py new file mode 100644 index 0000000..9aa39e5 --- /dev/null +++ b/examples/advanced_example/setup.py @@ -0,0 +1,5 @@ +from setuptools import setup + +setup( + name="example-advanced-package", version="0.0.0", packages=[], +) diff --git a/examples/advanced_example/tests/test.py b/examples/advanced_example/tests/test.py new file mode 100644 index 0000000..e69de29 diff --git a/examples/advanced_example/tools/tool.py b/examples/advanced_example/tools/tool.py new file mode 100644 index 0000000..e69de29 diff --git a/examples/plugin_example/README.md b/examples/plugin_example/README.md new file mode 100644 index 0000000..ed58231 --- /dev/null +++ b/examples/plugin_example/README.md @@ -0,0 +1,45 @@ +## Description + +This is an advanced example to know our plugin system. + +The overview of our plugin system: +``` ++-----------+ 1. n. +-----------+ n. 1. +----------+ +| pyproject | -----> | component | ------> | manifest | ++-----------+ plugin +-----------+ builder +----------+ +``` + +This example shows + +- How to implement a custom plugin and use it from pyproject.toml +- How to implement a custom builder and use it from pyproject.toml + +In this example, we implemented `ShellPlugin` that executes a given command when a given target. +We register the plugin to pysen in `tool.pysen.plugin` section of `pyproject.toml`, +then define configurations for the plugin in `tool.pysen.plugin.shell` and `tool.pysen.plugin.pwd`. + +Please see `pyproject.toml`, `plugin.py`, and `builder.py` to know how to use a custom builder. + +```sh +$ pysen list +available targets: + * lint + - flake8 + - check ls + * hook + - check ls + - check pwd + +$ pysen --ignore-lint list +available targets: + * lint + - check ls + * hook + - check ls + - check pwd + +$ pysen run lint +... +$ pysen run hook +... +``` diff --git a/examples/plugin_example/builder.py b/examples/plugin_example/builder.py new file mode 100644 index 0000000..e6e2010 --- /dev/null +++ b/examples/plugin_example/builder.py @@ -0,0 +1,58 @@ +import argparse +import pathlib +from typing import Dict, List, Optional, Sequence + +from pysen import dumper +from pysen.component import ComponentBase, LintComponentBase +from pysen.manifest import ( + ComponentName, + ManifestBase, + ParserType, + TargetName, + TargetType, + export_settings, + get_target, + get_targets, +) +from pysen.runner_options import PathContext, RunOptions + + +class CustomManifest(ManifestBase): + def __init__(self, components: Sequence[ComponentBase]) -> None: + self._components = components + + def configure_parser(self, parser: ParserType) -> None: + super().configure_parser(parser) + + parser.add_argument( + "--ignore-lint", + action="store_true", + help="set True to ignore lint components", + ) + + def _get_components(self, args: argparse.Namespace) -> Sequence[ComponentBase]: + components = self._components + if args.ignore_lint: + components = [c for c in components if not isinstance(c, LintComponentBase)] + return components + + def export_settings(self, paths: PathContext, args: argparse.Namespace) -> None: + export_settings(paths, self._components, dumper.dump) + + def get_targets(self, args: argparse.Namespace) -> Dict[str, List[ComponentName]]: + return get_targets(self._get_components(args)) + + def get_target( + self, + target: TargetName, + paths: PathContext, + options: RunOptions, + args: argparse.Namespace, + ) -> TargetType: + return get_target(target, self._get_components(args), paths, options) + + +def build( + components: Sequence[ComponentBase], src_path: Optional[pathlib.Path] +) -> ManifestBase: + return CustomManifest(components) diff --git a/examples/plugin_example/plugin.py b/examples/plugin_example/plugin.py new file mode 100644 index 0000000..38a784d --- /dev/null +++ b/examples/plugin_example/plugin.py @@ -0,0 +1,89 @@ +import dataclasses +import pathlib +import subprocess +from typing import DefaultDict, List, Sequence + +import dacite + +from pysen.command import CommandBase +from pysen.component import ComponentBase, RunOptions +from pysen.path import change_dir +from pysen.plugin import PluginBase +from pysen.pyproject_model import Config, PluginConfig +from pysen.reporter import Reporter +from pysen.runner_options import PathContext +from pysen.setting import SettingFile + + +class ShellCommand(CommandBase): + def __init__(self, name: str, base_dir: pathlib.Path, cmd: Sequence[str]) -> None: + self._name = name + self._base_dir = base_dir + self._cmd = cmd + + @property + def name(self) -> str: + return self._name + + def __call__(self, reporter: Reporter) -> int: + with change_dir(self._base_dir): + try: + ret = subprocess.run(self._cmd) + reporter.logger.info(f"{self._cmd} returns {ret.returncode}") + return ret.returncode + except BaseException as e: + reporter.logger.info( + f"an error occured while executing: {self._cmd}\n{e}" + ) + return 255 + + +class ShellComponent(ComponentBase): + def __init__(self, name: str, cmd: Sequence[str], targets: Sequence[str]) -> None: + self._name = name + self._cmd = cmd + self._targets = targets + + @property + def name(self) -> str: + return self._name + + def export_settings( + self, paths: PathContext, files: DefaultDict[str, SettingFile], + ) -> None: + print(f"Called export_settings at {self._name}: do nothing") + + @property + def targets(self) -> Sequence[str]: + return self._targets + + def create_command( + self, target: str, paths: PathContext, options: RunOptions + ) -> CommandBase: + assert target in self._targets + return ShellCommand(self._name, paths.base_dir, self._cmd) + + +@dataclasses.dataclass +class ShellPluginConfig: + name: str + command: List[str] + targets: List[str] + + +class ShellPlugin(PluginBase): + def load( + self, file_path: pathlib.Path, config_data: PluginConfig, root: Config + ) -> Sequence[ComponentBase]: + assert ( + config_data.config is not None + ), f"{config_data.location}.config must be not None" + config = dacite.from_dict( + ShellPluginConfig, config_data.config, dacite.Config(strict=True) + ) + return [ShellComponent(config.name, config.command, config.targets)] + + +# NOTE(igarashi): This is the entry point of a plugin method +def plugin() -> PluginBase: + return ShellPlugin() diff --git a/examples/plugin_example/pyproject.toml b/examples/plugin_example/pyproject.toml new file mode 100644 index 0000000..00b45d0 --- /dev/null +++ b/examples/plugin_example/pyproject.toml @@ -0,0 +1,25 @@ +[tool.pysen] +builder = "./builder.py" + +[tool.pysen.lint] +enable_flake8 = true +enable_mypy = true + +[[tool.pysen.lint.mypy_targets]] + paths = ["."] + +[tool.pysen.plugin.shell] +script = "plugin.py" + +[tool.pysen.plugin.shell.config] + name = "check ls" + command = ["ls"] + targets = ["lint", "hook"] + +[tool.pysen.plugin.pwd] +script = "plugin.py" + +[tool.pysen.plugin.pwd.config] + name = "check pwd" + command = ["pwd"] + targets = ["hook"] diff --git a/examples/plugin_example/setup.py b/examples/plugin_example/setup.py new file mode 100644 index 0000000..9aa39e5 --- /dev/null +++ b/examples/plugin_example/setup.py @@ -0,0 +1,5 @@ +from setuptools import setup + +setup( + name="example-advanced-package", version="0.0.0", packages=[], +) diff --git a/examples/simple_package/README.md b/examples/simple_package/README.md new file mode 100644 index 0000000..95032fa --- /dev/null +++ b/examples/simple_package/README.md @@ -0,0 +1,14 @@ +## Description + +This is a simple example to show you how to configure `pysen` using `pyproject.toml`. +Please see `pyproject.toml` for more details. + +You can check whether `simple_package/__init__.py` satisfies the format rules by: +```sh +$ pysen run lint +``` + +You can also correct errors by: +```sh +$ pysen run format +``` diff --git a/examples/simple_package/pyproject.toml b/examples/simple_package/pyproject.toml new file mode 100644 index 0000000..533b96b --- /dev/null +++ b/examples/simple_package/pyproject.toml @@ -0,0 +1,9 @@ +[tool.pysen.lint] +enable_black = true +enable_mypy = true +mypy_preset = "strict" +line_length = 100 +py_version = "py37" + +[[tool.pysen.lint.mypy_targets]] + paths = ["."] diff --git a/examples/simple_package/setup.py b/examples/simple_package/setup.py new file mode 100644 index 0000000..e2c51c5 --- /dev/null +++ b/examples/simple_package/setup.py @@ -0,0 +1,7 @@ +from setuptools import setup + +setup( + name="example-simple-package", + version="0.0.0", + packages=[], +) diff --git a/examples/simple_package/simple_package/__init__.py b/examples/simple_package/simple_package/__init__.py new file mode 100644 index 0000000..a53c73e --- /dev/null +++ b/examples/simple_package/simple_package/__init__.py @@ -0,0 +1,15 @@ +from typing import Any, Callable, Optional, Sequence, Set, Tuple + + +def foo( + a: Any, + b: Callable[[], Tuple[int, int, str]], + c: Set[str], + d: Optional[Sequence[int]] = None, + e: Any = None, +) -> None: + pass + + +print("Hello world") +foo(a=1, b=lambda: (1, 2, "hoge"), c=set(), d=None, e=None) diff --git a/examples/sync_cmdclass_pyproject/README.md b/examples/sync_cmdclass_pyproject/README.md new file mode 100644 index 0000000..d942c30 --- /dev/null +++ b/examples/sync_cmdclass_pyproject/README.md @@ -0,0 +1,11 @@ +## Description + +This is an example to show you how to add cmdclasses to setup.py from `pyproject.toml`. +Please see `pyproject.toml` and `setup.py` for more details. + +```sh +$ pysen run lint +$ pysen run format +$ python setup.py lint +$ python setup.py format +``` diff --git a/examples/sync_cmdclass_pyproject/pyproject.toml b/examples/sync_cmdclass_pyproject/pyproject.toml new file mode 100644 index 0000000..533b96b --- /dev/null +++ b/examples/sync_cmdclass_pyproject/pyproject.toml @@ -0,0 +1,9 @@ +[tool.pysen.lint] +enable_black = true +enable_mypy = true +mypy_preset = "strict" +line_length = 100 +py_version = "py37" + +[[tool.pysen.lint.mypy_targets]] + paths = ["."] diff --git a/examples/sync_cmdclass_pyproject/setup.py b/examples/sync_cmdclass_pyproject/setup.py new file mode 100644 index 0000000..386d3a4 --- /dev/null +++ b/examples/sync_cmdclass_pyproject/setup.py @@ -0,0 +1,15 @@ +try: + import pysen + + setup = pysen.setup_from_pyproject(__file__) +except ImportError: + import setuptools + + setup = setuptools.setup + + +setup( + name="example-sync-cmdclass-pyproject", + version="0.0.0", + packages=[], +) diff --git a/examples/sync_cmdclass_pyproject/sync_cmdclass_pyproject/__init__.py b/examples/sync_cmdclass_pyproject/sync_cmdclass_pyproject/__init__.py new file mode 100644 index 0000000..a53c73e --- /dev/null +++ b/examples/sync_cmdclass_pyproject/sync_cmdclass_pyproject/__init__.py @@ -0,0 +1,15 @@ +from typing import Any, Callable, Optional, Sequence, Set, Tuple + + +def foo( + a: Any, + b: Callable[[], Tuple[int, int, str]], + c: Set[str], + d: Optional[Sequence[int]] = None, + e: Any = None, +) -> None: + pass + + +print("Hello world") +foo(a=1, b=lambda: (1, 2, "hoge"), c=set(), d=None, e=None) diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..92524a9 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,38 @@ +[tool.pysen-cli] +settings_dir = "." + +[tool.pysen] +version = "0.9" + +[tool.pysen.lint] +enable_black = true +enable_flake8 = true +enable_isort = true +enable_mypy = true +mypy_preset = "strict" +py_version = "py37" +isort_known_first_party = ["fakes", "pysen"] +[[tool.pysen.lint.mypy_targets]] + paths = [".", "tests/"] + +[tool.pysen.lint.source] + excludes = ["build/", "dist/", "examples/", ".tox"] + +[tool.pytest.ini_options] +markers = ["examples"] + +[tool.black] # automatically generated by pysen +line-length = 88 +target-version = ["py37"] + +[tool.isort] # automatically generated by pysen +default_section = "THIRDPARTY" +ensure_newline_before_comments = true +force_grid_wrap = 0 +force_single_line = false +include_trailing_comma = true +known_first_party = ["fakes", "pysen"] +line_length = 88 +multi_line_output = 3 +use_parentheses = true + diff --git a/pysen/__init__.py b/pysen/__init__.py new file mode 100644 index 0000000..ed476d1 --- /dev/null +++ b/pysen/__init__.py @@ -0,0 +1,86 @@ +import logging +import pathlib +from typing import Any, List, Optional, Sequence + +import pysen.pyproject # NOQA + +from ._version import __version__ # NOQA +from .black import Black, BlackSetting # NOQA +from .command import CommandBase # NOQA +from .component import ComponentBase # NOQA +from .factory import ConfigureLintOptions, configure_lint # NOQA +from .flake8 import Flake8, Flake8Setting # NOQA +from .isort import Isort, IsortSetting # NOQA +from .lint_command import SingleFileFormatCommandBase, SingleFileLintCommandBase # NOQA +from .logging_utils import setup_logger # NOQA +from .manifest import Manifest, ManifestBase # NOQA +from .manifest_builder import build as build_manifest # NOQA +from .mypy import Mypy, MypyPreset, MypySetting, MypyTarget # NOQA +from .path import PathLikeType # NOQA +from .plugin import PluginBase, PluginConfig # NOQA +from .py_version import PythonVersion # NOQA +from .pyproject import load_manifest # NOQA +from .pyproject_model import Config # NOQA +from .reporter import ReporterFactory # NOQA +from .runner import Runner # NOQA +from .runner_options import RunOptions # NOQA +from .source import Source # NOQA + +_logger = logging.getLogger(__name__) + + +try: + from .setuptools import generate_setting_files # NOQA isort:skip + from .setuptools import setup # NOQA isort:skip + from .setuptools import setup_from_pyproject # NOQA isort:skip +except ImportError: + _logger.warning("[pysen.setuptools] failed to import setuptools") + + +def run( + base_dir: pathlib.Path, + target_name: str, + manifest_args: Optional[Sequence[str]] = None, + reporter_factory: Optional[ReporterFactory] = None, + *, + settings_dir: Optional[pathlib.Path] = None, + options: Optional[RunOptions] = None, + pyproject: Optional[pathlib.Path] = None, + components: Optional[Sequence[ComponentBase]] = None, + manifest: Optional[ManifestBase] = None, +) -> bool: + func_args: List[Any] = [x is not None for x in (pyproject, components, manifest)] + if func_args.count(True) != 1: + raise ValueError( + "only one of pyproject, components, and manifest must be specified" + ) + + target: ManifestBase + if manifest is not None: + target = manifest + elif components is not None: + target = build_manifest(list(components)) + else: + assert pyproject is not None + target = load_manifest(pyproject) + + manifest_args = manifest_args or [] + reporter_factory = reporter_factory or ReporterFactory() + + assert manifest_args is not None + assert reporter_factory is not None + + runner = Runner(target) + parsed_args = runner.parse_manifest_arguments(manifest_args) + options = options or RunOptions() + runner.run( + target_name, + base_dir, + parsed_args, + reporter_factory, + options, + files=None, + settings_dir=settings_dir, + ) + + return not reporter_factory.has_error() diff --git a/pysen/_version.py b/pysen/_version.py new file mode 100644 index 0000000..3e2f46a --- /dev/null +++ b/pysen/_version.py @@ -0,0 +1 @@ +__version__ = "0.9.0" diff --git a/pysen/black.py b/pysen/black.py new file mode 100644 index 0000000..109aa5b --- /dev/null +++ b/pysen/black.py @@ -0,0 +1,102 @@ +import pathlib +from typing import DefaultDict, Optional, Sequence + +from pysen.ext import black_wrapper +from pysen.ext.black_wrapper import BlackSetting + +from .command import CommandBase +from .component import LintComponentBase +from .lint_command import LintCommandBase +from .path import resolve_path +from .reporter import Reporter +from .runner_options import PathContext, RunOptions +from .setting import SettingFile +from .source import PythonFileFilter, Source + +_SettingFileName = "pyproject.toml" + + +class BlackCommand(LintCommandBase): + def __init__( + self, + name: str, + paths: PathContext, + source: Source, + inplace_edit: bool, + ) -> None: + super().__init__(paths.base_dir, source) + self._name = name + self._setting_path = resolve_path(paths.settings_dir, _SettingFileName) + self._inplace_edit = inplace_edit + + @property + def name(self) -> str: + return self._name + + @property + def has_side_effects(self) -> bool: + return self._inplace_edit + + def __call__(self, reporter: Reporter) -> int: + sources = self._get_sources(reporter, PythonFileFilter) + reporter.logger.info(f"Checking {len(sources)} files") + return black_wrapper.run( + reporter, self.base_dir, self._setting_path, sources, self._inplace_edit + ) + + def run_files(self, reporter: Reporter, files: Sequence[pathlib.Path]) -> int: + covered_files = self._get_covered_files(reporter, files, PythonFileFilter) + + if len(covered_files) == 0: + return 0 + + return black_wrapper.run( + reporter, + self.base_dir, + self._setting_path, + covered_files, + self._inplace_edit, + ) + + +class Black(LintComponentBase): + def __init__( + self, + name: str = "black", + setting: Optional[BlackSetting] = None, + source: Optional[Source] = None, + ) -> None: + super().__init__(name, source) + + self._setting = setting or BlackSetting.default() + + @property + def setting(self) -> BlackSetting: + return self._setting + + @setting.setter + def setting(self, value: BlackSetting) -> None: + self._setting = value + + def export_settings( + self, + paths: PathContext, + files: DefaultDict[str, SettingFile], + ) -> None: + setting_file = files[_SettingFileName] + section, setting = self._setting.export() + setting_file.set_section(section, setting) + + @property + def targets(self) -> Sequence[str]: + return ["lint", "format"] + + def create_command( + self, target: str, paths: PathContext, options: RunOptions + ) -> CommandBase: + if target == "lint": + return BlackCommand(self.name, paths, self.source, False) + elif target == "format": + return BlackCommand(self.name, paths, self.source, True) + + raise AssertionError(f"unknown {target}") diff --git a/pysen/cli.py b/pysen/cli.py new file mode 100644 index 0000000..72bc101 --- /dev/null +++ b/pysen/cli.py @@ -0,0 +1,364 @@ +import argparse +import dataclasses +import enum +import logging +import pathlib +import sys +from typing import Optional, Sequence, Tuple + +from . import __version__, cli_config, exceptions +from .cli_config import CliConfig +from .diagnostic import DiagnosticFormatter, FLCMFormatter +from .logging_utils import setup_logger +from .manifest import ManifestBase +from .path import wrap_path +from .pyproject import find_pyproject, load_manifest +from .reporter import ReporterFactory +from .runner import Runner +from .runner_options import RunOptions + +CLI_DESCRIPTION = "pysen CLI" + + +_ErrorFormat = { + "gnu": FLCMFormatter, +} + + +@dataclasses.dataclass +class _SetupOptions: + error_formatter: Optional[DiagnosticFormatter] + options: RunOptions + loglevel: int + process_output: bool + + +@enum.unique +class LogLevel(enum.Enum): + debug = logging.DEBUG + info = logging.INFO + warning = logging.WARNING + error = logging.ERROR + + +@enum.unique +class ProcessOutputMode(enum.Enum): + auto = enum.auto() + show = enum.auto() + hide = enum.auto() + + +def _get_loglevel(user_specification: Optional[str], default: int) -> int: + if user_specification is None: + return default + else: + ret: int = LogLevel[user_specification].value + return ret + + +def _is_process_output_requested(user_specification: str, default: bool) -> bool: + user_specification_mode = ProcessOutputMode[user_specification] + return user_specification_mode == ProcessOutputMode.show or ( + user_specification_mode == ProcessOutputMode.auto and default + ) + + +def _use_pretty_logging() -> bool: + return sys.stderr.isatty() + + +def _show_version() -> None: + print(__version__) + + +def _setup_run( + base_dir: pathlib.Path, + args: argparse.Namespace, + config: Optional[CliConfig], +) -> _SetupOptions: + error_formatter: Optional[DiagnosticFormatter] = None + default_loglevel = logging.INFO + if args.error_format is not None: + error_formatter = _ErrorFormat[args.error_format] + default_loglevel = logging.WARNING + + # setup logger again with new configuration + loglevel = _get_loglevel(args.loglevel, default_loglevel) + setup_logger(loglevel, pretty=_use_pretty_logging()) + process_output = _is_process_output_requested( + args.process_output, error_formatter is None + ) + + options = RunOptions( + require_diagnostics=error_formatter is not None, no_parallel=args.no_parallel + ) + return _SetupOptions(error_formatter, options, loglevel, process_output) + + +def _run_target( + target_name: str, + runner: Runner, + base_dir: pathlib.Path, + args: argparse.Namespace, + files: Optional[Sequence[pathlib.Path]], + setup_options: _SetupOptions, + config: Optional[CliConfig], +) -> None: + settings_dir: Optional[pathlib.Path] = None + if config is not None: + settings_dir = config.settings_dir + + reporter_factory = ReporterFactory( + pretty=_use_pretty_logging(), + process_output=setup_options.process_output, + loglevel=setup_options.loglevel, + ) + try: + runner.run( + target_name, + base_dir, + args, + reporter_factory, + setup_options.options, + settings_dir=settings_dir, + files=files, + ) + except exceptions.CommandNotFoundError: + sys.stderr.write(f"target: {target_name} not found\n") + sys.exit(1) + + error_exit = reporter_factory.has_error() + + if setup_options.error_formatter is None: + print("\n ** execution summary **") + print(reporter_factory.format_summary()) + if error_exit: + sys.stderr.write(f"{target_name} finished with error(s)\n") + print(reporter_factory.format_error_summary()) + else: + ret = reporter_factory.format_diagnostic_summary(setup_options.error_formatter) + if reporter_factory.has_error(): + print(ret) + print(reporter_factory.format_error_summary(), file=sys.stderr) + else: + print("No errors found") + + if error_exit: + sys.exit(1) + + +def _start_run( + base_dir: pathlib.Path, + runner: Runner, + config: Optional[CliConfig], + args: argparse.Namespace, +) -> None: + target_names = args.targets + setup_options = _setup_run(base_dir, args, config) + for target_name in target_names: + _run_target( + target_name=target_name, + runner=runner, + base_dir=base_dir, + args=args, + files=None, + setup_options=setup_options, + config=config, + ) + + +def _start_run_files( + base_dir: pathlib.Path, + runner: Runner, + config: Optional[CliConfig], + args: argparse.Namespace, +) -> None: + target_name = args.target + files = [pathlib.Path(p).resolve() for p in args.files] + for p in files: + if not p.exists(): + raise FileNotFoundError(f"{p} does not exist") + + setup_options = _setup_run(base_dir, args, config) + _run_target( + target_name=target_name, + runner=runner, + base_dir=base_dir, + args=args, + files=files, + setup_options=setup_options, + config=config, + ) + + +def _start_generate( + base_dir: pathlib.Path, + runner: Runner, + config: Optional[CliConfig], + args: argparse.Namespace, +) -> None: + # NOTE(igarashi): args.export_dir must be resolved by cwd(), not base_dir + # since it is a cli argument + settings_dir: pathlib.Path = pathlib.Path(args.export_dir).resolve() + settings_dir.mkdir(parents=True, exist_ok=True) + runner.export_settings(base_dir, settings_dir, args) + + +def _start_list( + base_dir: pathlib.Path, + runner: Runner, + config: Optional[CliConfig], + args: argparse.Namespace, +) -> None: + targets = runner.get_targets(args) + print("available targets:") + for name, target in targets.items(): + print(f" * {name}") + for c in target: + print(f" - {c}") + + +def _setup_manifest_parser() -> argparse.ArgumentParser: + parser = argparse.ArgumentParser(description=CLI_DESCRIPTION, add_help=False) + parser.add_argument( + "--config", + type=str, + help="Path for pyproject.toml", + default=None, + ) + parser.add_argument( + "--version", + action="store_true", + help="Show pysen version and exit", + ) + parser.add_argument( + "--loglevel", + type=str, + help="Set loglevel", + choices=list(LogLevel.__members__), + ) + parser.add_argument( + "--process-output", + type=str, + help="Process output control", + default="auto", + dest="process_output", + choices=list(ProcessOutputMode.__members__), + ) + parser.add_argument( + "-s", + action="store_const", + help="Shortcut for --process-output=show", + const="show", + dest="process_output", + ) + return parser + + +def _parse_manifest_options() -> Tuple[ManifestBase, Optional[CliConfig], pathlib.Path]: + # NOTE(igarashi): show detailed help to the user when a configuration is available + # In this method, we + # - try to load an available pyproject.toml file + # - show the help when `--help` is set and pysen cannot load any config + # - `--help` is handled by `main` parser in `cli()` method in order to show detailed help + # when a config is successfully loaded + parser = _setup_manifest_parser() + args, unknown = parser.parse_known_args() + + if args.version: + _show_version() + sys.exit(0) + + setup_logger( + _get_loglevel(args.loglevel, logging.INFO), pretty=_use_pretty_logging() + ) + + path: Optional[pathlib.Path] = None + if args.config is not None: + path = wrap_path(args.config) + try: + pyproject_path = find_pyproject(path) + except FileNotFoundError as e: + if "--help" in unknown or "-h" in unknown: + parser.print_help() + sys.exit(0) + + sys.stderr.write(f"{e}\n") + sys.exit(1) + + try: + manifest = load_manifest(pyproject_path) + config = cli_config.parse(pyproject_path) + except exceptions.PysenError as e: + sys.stderr.write(f"error occured while loading {pyproject_path}: {e}\n") + sys.exit(1) + + base_dir = pyproject_path.parent + return manifest, config, base_dir + + +def cli() -> None: + manifest, config, base_dir = _parse_manifest_options() + runner = Runner(manifest) + + root_parser = _setup_manifest_parser() + manifest_parser = argparse.ArgumentParser( + description=CLI_DESCRIPTION, + parents=[root_parser], + add_help=False, + ) + runner.setup_manifest_argparse( + manifest_parser.add_argument_group("manifest options") + ) + manifest_args, _ = manifest_parser.parse_known_args() + + action_parser = argparse.ArgumentParser(parents=[manifest_parser], add_help=True) + subparsers = action_parser.add_subparsers() + run_parser = subparsers.add_parser("run", help="run target") + run_parser.add_argument( + "targets", + type=str, + help="target to run", + choices=runner.get_targets(manifest_args), + nargs="+", + ) + run_parser.add_argument( + "--error-format", type=str, choices=_ErrorFormat.keys(), default=None + ) + run_parser.add_argument("--no-parallel", action="store_true") + run_parser.set_defaults(func=_start_run) + + run_files_parser = subparsers.add_parser( + "run_files", help="run target with a specified file" + ) + run_files_parser.add_argument( + "target", + type=str, + help="target to run", + choices=runner.get_targets(manifest_args), + ) + run_files_parser.add_argument("files", type=str, help="target file", nargs="+") + run_files_parser.add_argument( + "--error-format", type=str, choices=_ErrorFormat.keys(), default=None + ) + run_files_parser.add_argument("--no-parallel", action="store_true") + run_files_parser.set_defaults(func=_start_run_files) + + generate_parser = subparsers.add_parser("generate", help="generate setting files") + generate_parser.add_argument( + "export_dir", type=str, help="target directory to export" + ) + generate_parser.set_defaults(func=_start_generate) + + list_parser = subparsers.add_parser( + "list", help="list available targets in manifest" + ) + list_parser.set_defaults(func=_start_list) + + action_args = action_parser.parse_args() + # for python 3.6 support, we cannot use add_subparsers(required=True) + if "func" not in action_args: + action_parser.print_help() + sys.exit(1) + + action_args.func(base_dir, runner, config, action_args) diff --git a/pysen/cli_config.py b/pysen/cli_config.py new file mode 100644 index 0000000..b2f69e7 --- /dev/null +++ b/pysen/cli_config.py @@ -0,0 +1,59 @@ +import dataclasses +import pathlib +from typing import Any, Dict, Optional + +import dacite +import tomlkit + +from .exceptions import InvalidConfigurationError +from .pyproject_model import _workaround_tomlkit_unmarshal + + +@dataclasses.dataclass +class CliConfig: + settings_dir: Optional[pathlib.Path] = None + + +def _expand_path(base_dir: pathlib.Path, s: Any) -> pathlib.Path: + if isinstance(s, pathlib.Path): + return s + elif isinstance(s, str): + return base_dir / s + else: + raise dacite.WrongTypeError(pathlib.Path, s) + + +def _load_cli_section(path: pathlib.Path) -> Optional[Dict[str, Any]]: + with path.open("r") as f: + pyproject = tomlkit.loads(f.read()) + + if "tool" not in pyproject or "pysen-cli" not in pyproject["tool"]: + return None + + section = pyproject["tool"]["pysen-cli"] + + data = _workaround_tomlkit_unmarshal(section) + assert isinstance(data, dict) + return data + + +def _parse_dict(data: Dict[str, Any], base_dir: pathlib.Path) -> CliConfig: + dacite_config = dacite.Config( + type_hooks={pathlib.Path: lambda x: _expand_path(base_dir, x)}, + strict=True, + ) + + try: + config = dacite.from_dict(CliConfig, data, dacite_config) + assert isinstance(config, CliConfig) + return config + except dacite.DaciteError as e: + raise InvalidConfigurationError(f"invalid configuration: {e}") from None + + +def parse(path: pathlib.Path) -> Optional[CliConfig]: + base_dir = path.resolve().parent + section = _load_cli_section(path) + if section is None: + return None + return _parse_dict(section, base_dir) diff --git a/pysen/command.py b/pysen/command.py new file mode 100644 index 0000000..79bac7d --- /dev/null +++ b/pysen/command.py @@ -0,0 +1,48 @@ +import pathlib +import subprocess +from abc import ABC, abstractmethod +from typing import Sequence + +from .exceptions import CommandNotFoundError, RunTargetFileNotSupported +from .reporter import Reporter + + +class CommandBase(ABC): + @property + @abstractmethod + def name(self) -> str: + ... + + @abstractmethod + def __call__(self, reporter: Reporter) -> int: + ... + + @property + def has_side_effects(self) -> bool: + return True + + def run(self, reporter: Reporter) -> int: + return self.__call__(reporter) + + def run_files(self, reporter: Reporter, files: Sequence[pathlib.Path]) -> int: + raise RunTargetFileNotSupported(self.name) + + +def check_command_installed(*validation_command: str) -> None: + err = CommandNotFoundError( + f"The command `{' '.join(validation_command)}` failed." + " Make sure it is installed." + ) + try: + retval = subprocess.call( + validation_command, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + ) + except FileNotFoundError: + # This will be raised when self.validation_command[0] does not exist. + raise err + if retval == 127: + # In some cases (e.g. pyenv), FileNotFoundError is not raised. + # Instead, we look at the return code to tell if the command could not be found. + raise err diff --git a/pysen/component.py b/pysen/component.py new file mode 100644 index 0000000..8e151c5 --- /dev/null +++ b/pysen/component.py @@ -0,0 +1,50 @@ +from abc import ABC, abstractmethod +from typing import DefaultDict, Optional, Sequence + +from .command import CommandBase +from .runner_options import PathContext, RunOptions +from .setting import SettingFile +from .source import Source +from .types import ComponentName, TargetName + + +class ComponentBase(ABC): + @property + def name(self) -> Optional[ComponentName]: + return None + + def export_settings( + self, + paths: PathContext, + files: DefaultDict[str, SettingFile], + ) -> None: + pass + + @property + @abstractmethod + def targets(self) -> Sequence[TargetName]: + ... + + @abstractmethod + def create_command( + self, target: str, paths: PathContext, options: RunOptions + ) -> CommandBase: + ... + + +class LintComponentBase(ComponentBase): + def __init__(self, name: str, source: Optional[Source] = None) -> None: + self._name = name + self._source: Source = source or Source(includes=["."]) + + @property + def name(self) -> ComponentName: + return self._name + + @property + def source(self) -> Source: + return self._source + + @source.setter + def source(self, value: Source) -> None: + self._source = value diff --git a/pysen/diagnostic.py b/pysen/diagnostic.py new file mode 100644 index 0000000..09cb267 --- /dev/null +++ b/pysen/diagnostic.py @@ -0,0 +1,69 @@ +import dataclasses +from abc import ABC, abstractmethod +from pathlib import Path +from typing import Optional + + +@dataclasses.dataclass +class Diagnostic: + file_path: Path + start_line: Optional[int] = None + end_line: Optional[int] = None + start_column: Optional[int] = None + message: Optional[str] = None + diff: Optional[str] = None + + def __post_init__(self) -> None: + assert self.file_path.is_absolute() + if self.message is None and self.diff is None: + raise ValueError("either message or diff must be filled") + + +class DiagnosticFormatter(ABC): + @abstractmethod + def format(self, diagnostic: Diagnostic, command_name: str) -> str: + ... + + +def _format_diagnostic_position(diagnostic: Diagnostic) -> str: + file_path = diagnostic.file_path.resolve() + line = diagnostic.start_line or 1 + col = diagnostic.start_column or 1 + return f"{file_path}:{line}:{col}" + + +class _FLCMFormatter(DiagnosticFormatter): + """ + FLCM: File, Line, Column, Message + also see: https://www.gnu.org/prep/standards/html_node/Errors.html + """ + + def format(self, diagnostic: Diagnostic, command_name: str) -> str: + position = _format_diagnostic_position(diagnostic) + message: str + if diagnostic.message: + message = diagnostic.message.replace("\n", "\\n") + elif diagnostic.diff: + message = diagnostic.diff.replace("\n", "\\n") + else: + raise AssertionError() + + return f"{position}:{command_name}: {message}" + + +class _PrettyFormatter(DiagnosticFormatter): + def format(self, diagnostic: Diagnostic, command_name: str) -> str: + position = _format_diagnostic_position(diagnostic) + message: str + if diagnostic.message: + message = diagnostic.message + elif diagnostic.diff: + message = f"\n{diagnostic.diff}" + else: + raise AssertionError() + + return f"[{command_name}] {position}:{message}" + + +FLCMFormatter = _FLCMFormatter() +PrettyFormatter = _PrettyFormatter() diff --git a/pysen/dist_version.py b/pysen/dist_version.py new file mode 100644 index 0000000..f815c07 --- /dev/null +++ b/pysen/dist_version.py @@ -0,0 +1,27 @@ +import logging +from typing import Optional + +import pkg_resources + +from pysen.exceptions import DistributionNotFound +from pysen.py_version import VersionRepresentation + +_logger = logging.getLogger(__name__) + + +def _get_distro(name: str) -> Optional[pkg_resources.Distribution]: + try: + return pkg_resources.get_distribution(name) + except pkg_resources.DistributionNotFound: + _logger.debug(f"distribution {name} not found", exc_info=True) + return None + + +def get_version(name: str) -> VersionRepresentation: + distro = _get_distro(name) + if distro is None: + raise DistributionNotFound( + f"Expected {name} to be installed but pkg_resources could not find it.\n" + f'Hint: Did you install "{name}" in the same Python environment as pysen?' + ) + return VersionRepresentation.from_str(distro.version) diff --git a/pysen/dumper.py b/pysen/dumper.py new file mode 100644 index 0000000..86e8ad9 --- /dev/null +++ b/pysen/dumper.py @@ -0,0 +1,77 @@ +import configparser +import pathlib +from collections import OrderedDict +from typing import Any, Dict, Optional + +import tomlkit +import tomlkit.items +from tomlkit.toml_document import TOMLDocument + +from .setting import SettingFile +from .types import PRIMITIVE_TYPES, SEQUENCE_TYPES + + +def dump_toml(path: pathlib.Path, setting: SettingFile) -> None: + document: TOMLDocument = tomlkit.document() + if path.exists(): + with path.open("r") as f: + document = tomlkit.loads(f.read()) + + # NOTE(igarashi): TOMLDocument inherists Dict + entry = setting.entries() + for p, d in sorted(entry): + updated = SettingFile.update_by_entry(document, p, d) + if isinstance(updated, tomlkit.items.Table): + updated.comment("automatically generated by pysen") + + with path.open("w") as f: + buf = tomlkit.dumps(document) + f.write(buf) + + +def _repr_cfg(data: Any) -> Optional[str]: + if data is None: + return None + elif isinstance(data, PRIMITIVE_TYPES): + return str(data) + elif isinstance(data, SEQUENCE_TYPES): + items = sorted(_repr_cfg(x) for x in data) + return ",".join(x for x in items if x is not None) + else: + raise RuntimeError(f"{type(data)} is not supported in cfg") + + +def dump_cfg(path: pathlib.Path, setting: SettingFile) -> None: + flatten: Dict[str, Dict[str, Any]] = {} + for p, d in setting.entries(): + if len(p) > 1: + raise RuntimeError( + "configparser cannot handle a section whose depth is more than 1" + ) + flatten[p[0]] = d + + config = configparser.ConfigParser(allow_no_value=True) + if path.exists(): + with path.open("r") as f: + config.read_file(f) + + for p, d in sorted(flatten.items()): + config[p] = OrderedDict() + config.set(p, "# automatically generated by pysen", None) + config[p].update( + ((k, _repr_cfg(v)) for k, v in sorted(d.items())) # type: ignore[misc] + ) + + with path.open("w") as f: + config.write(f) + + +def dump(base_dir: pathlib.Path, fname: str, data: SettingFile) -> None: + target_path = base_dir / fname + ext = target_path.suffix + if ext == ".toml": + return dump_toml(target_path, data) + elif ext == ".cfg" or ext == ".ini": + return dump_cfg(target_path, data) + else: + raise RuntimeError(f"unknown extension: {ext}") diff --git a/pysen/error_lines.py b/pysen/error_lines.py new file mode 100644 index 0000000..7228ce7 --- /dev/null +++ b/pysen/error_lines.py @@ -0,0 +1,99 @@ +import logging +import re +from pathlib import Path +from typing import Callable, Iterable, Optional + +import unidiff + +from pysen.diagnostic import Diagnostic +from pysen.exceptions import UnexpectedErrorFormat + +FilePathParserType = Callable[[str], Path] +_logger = logging.getLogger(__name__) + + +def _warn_parse_error(errors: str, logger: Optional[logging.Logger]) -> None: + logger = logger or _logger + logger.warning( + "The following error(s) could not be parsed, so pysen won't format it (them).\n" + f"{errors}\n" + f"If you think this is a bug please report it to the maintainers." + ) + + +def parse_error_lines( + errors: str, logger: Optional[logging.Logger] = None +) -> Iterable[Diagnostic]: + """ + Compatible with flake8, mypy + """ + number = r"(?:0|[1-9]\d*)" + _file_path = r"^(?P.*?)" + _line = fr":(?P{number})" + _column = fr"(:(?P{number}))?" + _message = r": (?P.*$)" + pattern = _file_path + _line + _column + _message + invalid_lines = [] + for el in errors.splitlines(): + m = re.match(pattern, el) + if m is None: + invalid_lines.append(el) + continue + line = int(m.group("line")) + if m.group("column") is None: + column = None + else: + column = int(m.group("column")) + yield Diagnostic( + start_line=line, + end_line=line, + start_column=column, + message=m.group("message").lstrip(" ").rstrip("\n"), + file_path=Path(m.group("file_path")), + ) + if invalid_lines: + _warn_parse_error("\n".join(invalid_lines), logger) + + +def parse_error_diffs( + errors: str, + file_path_parser: FilePathParserType, + logger: Optional[logging.Logger] = None, +) -> Iterable[Diagnostic]: + """ + Compatible with isort, black + """ + + def _is_changed(line: unidiff.patch.Line) -> bool: + return not line.is_context + + try: + patches = unidiff.PatchSet(errors) + except unidiff.errors.UnidiffParseError: + _warn_parse_error(errors, logger) + return + for patch in patches: + for hunk in patch: + source_changes = list(filter(_is_changed, hunk.source_lines())) + if source_changes: + start_line = source_changes[0].source_line_no + end_line = source_changes[-1].source_line_no + else: + target_changes = list(filter(_is_changed, hunk.target_lines())) + assert target_changes, "expected either source or target line number" + start_line = target_changes[0].target_line_no + end_line = target_changes[-1].target_line_no + + try: + file_path = file_path_parser(patch.source_file) + except UnexpectedErrorFormat: + _warn_parse_error(patch, logger) + continue + + yield Diagnostic( + start_line=start_line, + end_line=end_line, + start_column=1, + file_path=file_path, + diff="".join(map(str, filter(_is_changed, hunk))), + ) diff --git a/pysen/exceptions.py b/pysen/exceptions.py new file mode 100644 index 0000000..814f8cd --- /dev/null +++ b/pysen/exceptions.py @@ -0,0 +1,52 @@ +import pathlib + + +class PysenError(Exception): + pass + + +class CommandNotFoundError(PysenError): + pass + + +class PysenSectionNotFoundError(PysenError): + pass + + +class InvalidCommandNameError(PysenError): + def __init__(self, name: str) -> None: + super().__init__(f"invalid command name: {name}") + + +class InvalidConfigurationError(PysenError): + pass + + +class InvalidPluginError(PysenError): + def __init__(self, module_path: str, error: str) -> None: + super().__init__(f"invalid plugin: {module_path}, {error}") + + +class InvalidManifestBuilderError(PysenError): + def __init__(self, path: pathlib.Path, error: str) -> None: + super().__init__(f"invalid manifest builder: {path}, {error}") + + +class UnexpectedErrorFormat(PysenError): + pass + + +class IncompatibleVersionError(PysenError): + pass + + +class DistributionNotFound(PysenError): + pass + + +class InvalidComponentName(PysenError): + pass + + +class RunTargetFileNotSupported(PysenError): + pass diff --git a/pysen/ext/__init__.py b/pysen/ext/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/pysen/ext/black_wrapper.py b/pysen/ext/black_wrapper.py new file mode 100644 index 0000000..52854d1 --- /dev/null +++ b/pysen/ext/black_wrapper.py @@ -0,0 +1,95 @@ +import dataclasses +import functools +import pathlib +from typing import Any, Dict, Iterable, List, Optional, Tuple + +from pysen import process_utils +from pysen.command import check_command_installed +from pysen.dist_version import get_version +from pysen.error_lines import parse_error_diffs +from pysen.exceptions import IncompatibleVersionError +from pysen.path import change_dir +from pysen.py_version import PythonVersion, VersionRepresentation +from pysen.reporter import Reporter +from pysen.setting import SettingBase, to_dash_case + + +@dataclasses.dataclass +class BlackSetting(SettingBase): + line_length: int = 88 + target_version: List[PythonVersion] = dataclasses.field(default_factory=list) + + @staticmethod + def default( + # NOTE(igarashi) safe to use as an argument since it is immutable + py_version: Optional[PythonVersion] = None, + ) -> "BlackSetting": + py_version = py_version or PythonVersion(3, 7) + return BlackSetting(target_version=[py_version]) + + def export(self) -> Tuple[List[str], Dict[str, Any]]: + section_name = ["tool", "black"] + + # TODO(igarashi): refactor these flaky code + # `dataclass.asdict()` converts `PythonVersion` object into an undesired dict like + # `{"major": ...}` as it is defined by a dataclass. + # `SettingBase.asdict` takes `type_hooks` argument, but it doesn't work as expected + # because the hook is called after `dataclass.asdict()` is called. + # In order to dump PythonVersion objects into a desired representation, + # the following code ignores the field when calling `asdict` and replaces it + # with the desired dump after we call `asdict()` + entries = self.asdict( + ignore_fields=["target_version"], naming_rule=to_dash_case + ) + entries["target-version"] = [] + for v in self.target_version: + entries["target-version"].append(v.short_representation) + return section_name, entries + + +def _parse_file_path(file_path: str) -> pathlib.Path: + return pathlib.Path(file_path.split(" ")[0]) + + +@functools.lru_cache(1) +def _check_black_version() -> None: + version = get_version("black") + compatible_versions = [ + VersionRepresentation(19, 10), + VersionRepresentation(20, 8), + ] + + if all(not v.is_compatible(version) for v in compatible_versions): + raise IncompatibleVersionError( + "pysen only supports black versions: " + f"{{{', '.join(v.version for v in compatible_versions)}}}. " + f"version {version} is not supported." + ) + + +def run( + reporter: Reporter, + base_dir: pathlib.Path, + setting_path: pathlib.Path, + sources: Iterable[pathlib.Path], + inplace_edit: bool, +) -> int: + check_command_installed("black", "--version") + _check_black_version() + + targets = [str(d) for d in sources] + if len(targets) == 0: + return 0 + + cmd = ( + ["black", "--config", str(setting_path)] + + (["--diff", "--check"] if not inplace_edit else []) + + targets + ) + with change_dir(base_dir): + ret, stdout, _ = process_utils.run(cmd, reporter) + + diagnostics = parse_error_diffs(stdout, _parse_file_path, logger=reporter.logger) + reporter.report_diagnostics(list(diagnostics)) + + return ret diff --git a/pysen/ext/flake8_wrapper.py b/pysen/ext/flake8_wrapper.py new file mode 100644 index 0000000..ebeedb8 --- /dev/null +++ b/pysen/ext/flake8_wrapper.py @@ -0,0 +1,120 @@ +import copy +import dataclasses +import functools +import pathlib +from typing import Any, Dict, Iterable, List, Optional, Sequence, Tuple + +from pysen import process_utils +from pysen.command import check_command_installed +from pysen.dist_version import get_version +from pysen.error_lines import parse_error_lines +from pysen.exceptions import IncompatibleVersionError +from pysen.path import change_dir +from pysen.reporter import Reporter +from pysen.setting import SettingBase, to_dash_case + +_SettingFileName = "setup.cfg" + + +def _contains(target: Sequence[str], item: str) -> bool: + code_category = item[0].upper() + + for x in target: + if len(x) == 1: + if x.upper() == code_category: + return True + else: + if x == item: + return True + + return False + + +@dataclasses.dataclass +class Flake8Setting(SettingBase): + max_line_length: int = 88 + select: Optional[List[str]] = None + ignore: Optional[List[str]] = None + enable_extensions: Optional[List[str]] = None + max_complexity: Optional[int] = None + + _comments: List[str] = dataclasses.field(default_factory=list) + + @staticmethod + def default() -> "Flake8Setting": + return Flake8Setting( + select=["B", "C", "E", "F", "W", "B950"], + ).to_black_compatible() + + def to_black_compatible(self) -> "Flake8Setting": + new = copy.deepcopy(self) + if new.ignore is None: + new.ignore = [] + + if not _contains(new.ignore, "E203"): + new.ignore.append("E203") + new._comments.append("# E203: black treats : as a binary operator") + + if not _contains(new.ignore, "E231"): + new.ignore.append("E231") + new._comments.append("# E231: black doesn't put a space after ,") + + if not _contains(new.ignore, "E501"): + new.ignore.append("E501") + new._comments.append( + "# E501: black may exceed the line-length to follow other style rules" + ) + + W503_or_504_enabled = _contains(new.ignore, "W503") or _contains( + new.ignore, "W504" + ) + if not W503_or_504_enabled: + new.ignore.append("W503") + new._comments.append( + "# W503 or W504: either one needs to be disabled to select W error codes" + ) + + return new + + def export(self) -> Tuple[Sequence[str], Dict[str, Any]]: + section_name = "flake8" + entries = self.asdict( + omit_none=True, naming_rule=to_dash_case, ignore_fields=["_comments"] + ) + for c in self._comments: + assert c.startswith("#") and c not in entries + entries[c] = None + + return [section_name], entries + + +@functools.lru_cache(1) +def _check_flake8_version() -> None: + version = get_version("flake8") + if version.major != 3 or version.minor < 7: + raise IncompatibleVersionError( + "pysen only supports flake8 version >=3.7, <4. " + f"version {version} is not supported." + ) + + +def run( + reporter: Reporter, + base_dir: pathlib.Path, + setting_path: pathlib.Path, + sources: Iterable[pathlib.Path], +) -> int: + check_command_installed("flake8", "--version") + _check_flake8_version() + targets = [str(d) for d in sources] + if len(targets) == 0: + return 0 + + cmd = ["flake8", "--config", str(setting_path)] + targets + with change_dir(base_dir): + ret, stdout, _ = process_utils.run(cmd, reporter) + + diagnostics = parse_error_lines(stdout, logger=reporter.logger) + reporter.report_diagnostics(list(diagnostics)) + + return ret diff --git a/pysen/ext/isort_wrapper.py b/pysen/ext/isort_wrapper.py new file mode 100644 index 0000000..4b32410 --- /dev/null +++ b/pysen/ext/isort_wrapper.py @@ -0,0 +1,144 @@ +import copy +import dataclasses +import enum +import functools +import pathlib +from typing import Any, Dict, Iterable, List, Optional, Set, Tuple + +from pysen import process_utils +from pysen.command import check_command_installed +from pysen.dist_version import get_version +from pysen.error_lines import parse_error_diffs +from pysen.exceptions import IncompatibleVersionError, UnexpectedErrorFormat +from pysen.path import change_dir +from pysen.py_version import VersionRepresentation +from pysen.reporter import Reporter +from pysen.setting import SettingBase + +_SettingFileName = "pyproject.toml" + + +class IsortSectionName(enum.Enum): + FUTURE = "FUTURE" + STDLIB = "STDLIB" + THIRDPARTY = "THIRDPARTY" + FIRSTPARTY = "FIRSTPARTY" + LOCALFOLDER = "LOCALFOLDER" + + +@functools.lru_cache(1) +def _get_isort_version() -> VersionRepresentation: + version = get_version("isort") + if version.major not in [4, 5]: + raise IncompatibleVersionError( + "pysen only supports isort versions 4 and 5. " + f"version {version} is not supported." + ) + + return version + + +def _check_version_compatibility( + ensure_newline_before_comments: Optional[bool], + version: VersionRepresentation, +) -> None: + if version.major == 4 and ensure_newline_before_comments is not None: + raise IncompatibleVersionError( + "isort option `ensure_newline_before_comments`" + f"is not supported in your isort version {version}" + ) + + +@dataclasses.dataclass +class IsortSetting(SettingBase): + force_grid_wrap: int = 0 + force_single_line: bool = False + include_trailing_comma: bool = True + known_first_party: Optional[Set[str]] = None + known_third_party: Optional[Set[str]] = None + line_length: int = 88 + multi_line_output: int = 3 + default_section: Optional[IsortSectionName] = None + sections: Optional[List[IsortSectionName]] = None + use_parentheses: bool = True + ensure_newline_before_comments: Optional[bool] = None + + @staticmethod + def default() -> "IsortSetting": + return IsortSetting() + + def to_black_compatible(self) -> "IsortSetting": + # NOTE(igarashi) + # multi_line_output: black uses 3 (Vertical Hanging Indent) + # include_trailing_comma: black appends trailing comma + # force_grid_wrap: the property means isort grid-wrap the statement regardless + # of line length if the number of `from` imports is greater than + # the property. black doesn't grid wrap the statemenet if it + # doesn't exceed the line length. + # use_parentheses: use parenthesis for line continuation instead of `\` + new = copy.deepcopy(self) + + new.multi_line_output = 3 + new.include_trailing_comma = True + new.force_grid_wrap = 0 + new.use_parentheses = True + + # See issue #277 + isort_version = _get_isort_version().major + if isort_version >= 5: + new.ensure_newline_before_comments = True + return new + + def export(self) -> Tuple[List[str], Dict[str, Any]]: + section_name = ["tool", "isort"] + + _check_version_compatibility( + self.ensure_newline_before_comments, + _get_isort_version(), + ) + entries = self.asdict( + omit_none=True, type_hooks={IsortSectionName: lambda x: x.value} + ) + return section_name, entries + + +def _parse_file_path(file_path: str) -> pathlib.Path: + ret = file_path.split(" ")[0] + before_suffix = ":before" + after_suffix = ":after" + if ret.endswith(before_suffix): + return pathlib.Path(ret.rsplit(before_suffix, 1)[0]) + elif ret.endswith(after_suffix): + return pathlib.Path(ret.rsplit(after_suffix, 1)[0]) + else: + raise UnexpectedErrorFormat(file_path) + + +def run( + reporter: Reporter, + base_dir: pathlib.Path, + setting_path: pathlib.Path, + sources: Iterable[pathlib.Path], + inplace_edit: bool, +) -> int: + check_command_installed("isort", "--version") + version = _get_isort_version() + + targets = [str(d) for d in sources] + if len(targets) == 0: + return 0 + + cmd = ["isort", "--settings-path", str(setting_path)] + if version.major == 4: + cmd.append("--recursive") + if not inplace_edit: + cmd += ["--diff", "--check-only"] + cmd += targets + + with change_dir(base_dir): + ret, stdout, _ = process_utils.run(cmd, reporter) + + diagnostics = parse_error_diffs(stdout, _parse_file_path, logger=reporter.logger) + reporter.report_diagnostics(list(diagnostics)) + + return ret diff --git a/pysen/ext/mypy_wrapper.py b/pysen/ext/mypy_wrapper.py new file mode 100644 index 0000000..9c44cc0 --- /dev/null +++ b/pysen/ext/mypy_wrapper.py @@ -0,0 +1,223 @@ +import dataclasses +import enum +import functools +import pathlib +from typing import Any, Dict, List, Optional, Sequence, Tuple + +from pysen import process_utils +from pysen.command import check_command_installed +from pysen.dist_version import get_version +from pysen.error_lines import parse_error_lines +from pysen.exceptions import IncompatibleVersionError +from pysen.path import PathLikeType, change_dir, get_relative_path, resolve_path +from pysen.py_version import PythonVersion +from pysen.reporter import Reporter +from pysen.setting import SettingBase + +_IgnoreFields: List[str] = ["_pysen_convert_abspath"] + + +class MypyFollowImports(enum.Enum): + NORMAL = "normal" + SILENT = "silent" + SKIP = "skip" + ERROR = "error" + + +@dataclasses.dataclass +class MypyPlugin: + script: Optional[pathlib.Path] = None + function: Optional[str] = None + + def __post_init__(self) -> None: + if self.script is None and self.function is None: + raise ValueError("either script or function must be specified") + + if self.script is not None and self.function is not None: + raise ValueError("cannot specify both script and function") + + def as_config(self, relative_from: Optional[pathlib.Path] = None) -> str: + if self.function is not None: + return self.function + + assert self.script is not None + if relative_from is not None: + return get_relative_path(self.script, relative_from) + else: + return str(self.script) + + +@dataclasses.dataclass +class MypySetting(SettingBase): + python_version: Optional[PythonVersion] = None + + check_untyped_defs: Optional[bool] = None + disallow_any_decorated: Optional[bool] = None + disallow_any_generics: Optional[bool] = None + disallow_any_unimported: Optional[bool] = None + disallow_incomplete_defs: Optional[bool] = None + disallow_subclassing_any: Optional[bool] = None + disallow_untyped_calls: Optional[bool] = None + disallow_untyped_decorators: Optional[bool] = None + disallow_untyped_defs: Optional[bool] = None + follow_imports: Optional[MypyFollowImports] = None + ignore_errors: Optional[bool] = None + ignore_missing_imports: Optional[bool] = None + mypy_path: Optional[List[PathLikeType]] = None + no_implicit_optional: Optional[bool] = None + pretty: Optional[bool] = None + show_error_codes: Optional[bool] = None + strict_equality: Optional[bool] = None + strict_optional: Optional[bool] = None + warn_redundant_casts: Optional[bool] = None + warn_return_any: Optional[bool] = None + warn_unreachable: Optional[bool] = None + warn_unused_configs: Optional[bool] = None + warn_unused_ignores: Optional[bool] = None + plugins: Optional[List[MypyPlugin]] = None + + # configuration for export settings + _pysen_convert_abspath: bool = False + + @staticmethod + def very_strict(**kwargs: Any) -> "MypySetting": + updates = { + "check_untyped_defs": True, + "disallow_any_decorated": True, + "disallow_any_generics": True, + "disallow_any_unimported": True, + "disallow_incomplete_defs": True, + "disallow_subclassing_any": True, + "disallow_untyped_calls": True, + "disallow_untyped_decorators": True, + "disallow_untyped_defs": True, + "ignore_errors": False, + "ignore_missing_imports": False, + "no_implicit_optional": True, + "show_error_codes": True, + "strict_equality": True, + "strict_optional": True, + "warn_redundant_casts": True, + "warn_return_any": True, + "warn_unreachable": True, + "warn_unused_configs": True, + "warn_unused_ignores": True, + } + updates.update(kwargs) + return MypySetting(**updates) # type: ignore + + @staticmethod + def strict(**kwargs: Any) -> "MypySetting": + updates = { + "disallow_any_decorated": False, + "disallow_any_unimported": False, + "disallow_untyped_decorators": False, + "ignore_missing_imports": True, + } + updates.update(kwargs) + setting = MypySetting.very_strict(**updates) + return setting + + @staticmethod + def entry(**kwargs: Any) -> "MypySetting": + updates = { + "disallow_untyped_calls": False, + "disallow_untyped_defs": False, + "warn_return_any": False, + } + updates.update(kwargs) + setting = MypySetting.strict(**updates) + return setting + + def export( + self, base_dir: pathlib.Path, target_module: Optional[str] = None + ) -> Tuple[Sequence[str], Dict[str, Any]]: + section_name = "mypy" + if target_module is not None: + section_name += f"-{target_module}" + + # TODO(igarashi): refactor these flaky code, see: black.py + entries = self.asdict( + _IgnoreFields + + ["python_version", "mypy_path", "plugins", "follow_imports"], + omit_none=True, + ) + if self.python_version is not None: + entries["python_version"] = self.python_version.version + + if self.mypy_path is not None: + mypy_path: List[str] = [] + for p in self.mypy_path: + path: str + if self._pysen_convert_abspath: + path = get_relative_path(p, base_dir) + else: + path = str(p) + + mypy_path.append(path) + + entries["mypy_path"] = mypy_path + + if self.plugins is not None: + relative_from: Optional[pathlib.Path] = None + if self._pysen_convert_abspath: + relative_from = base_dir + + entries["plugins"] = [p.as_config(relative_from) for p in self.plugins] + + if self.follow_imports is not None: + entries["follow_imports"] = self.follow_imports.value + + return [section_name], entries + + +@dataclasses.dataclass +class MypyTarget: + paths: List[pathlib.Path] + + +@functools.lru_cache(1) +def _check_mypy_version() -> None: + version = get_version("mypy") + if version.major != 0 or version.minor < 770: + raise IncompatibleVersionError( + f"pysen only supports mypy version >=0.770, <1. " + f"version {version} is not supported." + ) + + +def run( + reporter: Reporter, + base_dir: pathlib.Path, + setting_path: pathlib.Path, + target: MypyTarget, + require_diagnostics: bool, +) -> int: + check_command_installed("mypy", "--version") + _check_mypy_version() + + target_paths = [str(resolve_path(base_dir, x)) for x in target.paths] + if len(target_paths) == 0: + return 0 + + extra_options: List[str] = ["--show-absolute-path"] + if require_diagnostics: + extra_options += [ + "--no-color-output", + "--show-column-numbers", + "--no-error-summary", + ] + else: + extra_options += [ + "--pretty", + ] + + cmd = ["mypy"] + extra_options + ["--config-file", str(setting_path)] + target_paths + with change_dir(base_dir): + ret, stdout, _ = process_utils.run(cmd, reporter) + + if require_diagnostics: + diagnostics = parse_error_lines(stdout, logger=reporter.logger) + reporter.report_diagnostics(list(diagnostics)) + + return ret diff --git a/pysen/factory.py b/pysen/factory.py new file mode 100644 index 0000000..f8de1e5 --- /dev/null +++ b/pysen/factory.py @@ -0,0 +1,131 @@ +import dataclasses +import pathlib +from typing import Dict, List, Optional + +from .black import Black, BlackSetting +from .component import ComponentBase +from .flake8 import Flake8, Flake8Setting +from .isort import Isort, IsortSectionName, IsortSetting +from .mypy import ( + Mypy, + MypyFollowImports, + MypyPlugin, + MypyPreset, + MypySetting, + MypyTarget, +) +from .py_version import PythonVersion +from .source import Source + + +@dataclasses.dataclass +class MypyModuleOption: + preset: Optional[MypyPreset] = None + ignore_errors: bool = False + follow_imports: Optional[MypyFollowImports] = None + + def __post_init__(self) -> None: + if self.preset is not None and self.ignore_errors: + raise ValueError("cannot specify both preset and ignore_errors") + + def get_setting(self) -> MypySetting: + if self.ignore_errors: + return MypySetting(ignore_errors=True, follow_imports=self.follow_imports) + + preset: MypyPreset + if self.preset is not None: + preset = self.preset + else: + preset = MypyPreset.STRICT + + return preset.get_setting(follow_imports=self.follow_imports) + + +@dataclasses.dataclass +class ConfigureLintOptions: + enable_black: Optional[bool] = None + enable_flake8: Optional[bool] = None + enable_isort: Optional[bool] = None + enable_mypy: Optional[bool] = None + mypy_preset: Optional[MypyPreset] = None + mypy_modules: Optional[Dict[str, MypyModuleOption]] = None + source: Optional[Source] = None + line_length: Optional[int] = None + py_version: Optional[PythonVersion] = None + isort_known_third_party: Optional[List[str]] = None + isort_known_first_party: Optional[List[str]] = None + isort_default_section: Optional[IsortSectionName] = None + mypy_path: Optional[List[pathlib.Path]] = None + mypy_plugins: Optional[List[MypyPlugin]] = None + mypy_targets: Optional[List[MypyTarget]] = None + + +def configure_lint(options: ConfigureLintOptions) -> List[ComponentBase]: + components: List[ComponentBase] = [] + + python_version: PythonVersion + if options.py_version is not None: + python_version = options.py_version + else: + python_version = PythonVersion(3, 7) + + line_length = options.line_length or 88 + + # NOTE: `isort` may format code in a way that violates `black` rules + # Apply `isort` after `black` to avoid such violation + if options.enable_isort: + isort_setting = IsortSetting.default() + isort_setting.line_length = line_length + isort_setting.default_section = ( + options.isort_default_section or IsortSectionName.THIRDPARTY + ) + if options.isort_known_third_party is not None: + isort_setting.known_third_party = set(options.isort_known_third_party) + if options.isort_known_first_party is not None: + isort_setting.known_first_party = set(options.isort_known_first_party) + + if options.enable_black: + isort_setting = isort_setting.to_black_compatible() + + isort = Isort(setting=isort_setting, source=options.source) + components.append(isort) + + if options.enable_black: + black_setting = BlackSetting.default(python_version) + black_setting.line_length = line_length + black = Black(setting=black_setting, source=options.source) + components.append(black) + + if options.enable_flake8: + flake8_setting = Flake8Setting.default() + flake8_setting.max_line_length = line_length + if options.enable_black: + flake8_setting = flake8_setting.to_black_compatible() + + flake8 = Flake8(setting=flake8_setting, source=options.source) + components.append(flake8) + + if options.enable_mypy: + if options.mypy_preset is not None: + mypy_setting = options.mypy_preset.get_setting() + else: + mypy_setting = MypySetting.strict() + mypy_setting.python_version = python_version + if options.mypy_path is not None: + mypy_setting.mypy_path = list(options.mypy_path) + if options.mypy_plugins is not None: + mypy_setting.plugins = list(options.mypy_plugins) + + mypy_module_settings: Dict[str, MypySetting] = {} + if options.mypy_modules is not None: + for module_name, module_option in options.mypy_modules.items(): + mypy_module_settings[module_name] = module_option.get_setting() + + mypy = Mypy( + setting=mypy_setting, + module_settings=mypy_module_settings, + mypy_targets=options.mypy_targets, + ) + components.append(mypy) + + return components diff --git a/pysen/flake8.py b/pysen/flake8.py new file mode 100644 index 0000000..dc077ab --- /dev/null +++ b/pysen/flake8.py @@ -0,0 +1,84 @@ +import pathlib +from typing import DefaultDict, Optional, Sequence + +from pysen.ext import flake8_wrapper +from pysen.ext.flake8_wrapper import Flake8Setting + +from .command import CommandBase +from .component import LintComponentBase +from .lint_command import LintCommandBase +from .path import resolve_path +from .reporter import Reporter +from .runner_options import PathContext, RunOptions +from .setting import SettingFile +from .source import PythonFileFilter, Source + +_SettingFileName = "setup.cfg" + + +class Flake8Command(LintCommandBase): + def __init__(self, name: str, paths: PathContext, source: Source) -> None: + super().__init__(paths.base_dir, source) + self._name = name + self._setting_path = resolve_path(paths.settings_dir, _SettingFileName) + + @property + def name(self) -> str: + return self._name + + @property + def has_side_effects(self) -> bool: + return False + + def __call__(self, reporter: Reporter) -> int: + sources = self._get_sources(reporter, PythonFileFilter) + reporter.logger.info(f"Checking {len(sources)} files") + return flake8_wrapper.run(reporter, self.base_dir, self._setting_path, sources) + + def run_files(self, reporter: Reporter, files: Sequence[pathlib.Path]) -> int: + covered_files = self._get_covered_files(reporter, files, PythonFileFilter) + + if len(covered_files) == 0: + return 0 + + return flake8_wrapper.run(reporter, self.base_dir, self._setting_path, files) + + +class Flake8(LintComponentBase): + def __init__( + self, + name: str = "flake8", + setting: Optional[Flake8Setting] = None, + source: Optional[Source] = None, + ) -> None: + super().__init__(name, source) + self._setting = setting or Flake8Setting.default() + + @property + def setting(self) -> Flake8Setting: + return self._setting + + @setting.setter + def setting(self, value: Flake8Setting) -> None: + self._setting = value + + def export_settings( + self, + paths: PathContext, + files: DefaultDict[str, SettingFile], + ) -> None: + setting_file = files[_SettingFileName] + section, setting = self._setting.export() + setting_file.set_section(section, setting) + + @property + def targets(self) -> Sequence[str]: + return ["lint"] + + def create_command( + self, target: str, paths: PathContext, options: RunOptions + ) -> CommandBase: + if target == "lint": + return Flake8Command(self.name, paths, self.source) + + raise AssertionError(f"unknown {target}") diff --git a/pysen/git_utils.py b/pysen/git_utils.py new file mode 100644 index 0000000..5440828 --- /dev/null +++ b/pysen/git_utils.py @@ -0,0 +1,99 @@ +import functools +import logging +import os +import pathlib +import threading +from typing import Sequence, Tuple, cast + +_logger = logging.getLogger(__name__) +_lock = threading.Lock() + +try: + import git + + _git_available = True +except ImportError: + _git_available = False + _logger.warning("[pysen.git_utils] git is not available") + + +class GitRepositoryNotFoundError(Exception): + pass + + +def _check_git_enabled() -> bool: + if not _git_available: + return False + + if os.environ.get("PYSEN_IGNORE_GIT", "0") != "0": + return False + + return True + + +def check_git_available(target_dir: pathlib.Path) -> bool: + with _lock: + try: + if not _check_git_enabled(): + return False + + with git.Repo(target_dir, search_parent_directories=True): + return True + except git.InvalidGitRepositoryError: + return False + + +def _list_indexed_files(target_dir: pathlib.Path) -> Sequence[pathlib.Path]: + if not _check_git_enabled(): + return [] + + # Ensure abs_target_dir ends with / + # We avoid pathlib.Path because the loop calling predicate is performance critical. + abs_target_dir = os.path.join(str(target_dir.resolve()), "") + + def predicate(item: Tuple[int, git.Blob]) -> bool: + blob = item[1] + ret: bool = blob.abspath.startswith(abs_target_dir) + return ret + + try: + with git.Repo(target_dir, search_parent_directories=True) as repo: + deleted_files = set( + diff.a_blob.abspath + for diff in repo.index.diff(None) + if diff.change_type == "D" + ) + blobs = set(blob.abspath for _, blob in repo.index.iter_blobs(predicate)) + return [pathlib.Path(abspath) for abspath in blobs - deleted_files] + except git.InvalidGitRepositoryError: + raise GitRepositoryNotFoundError() from None + + +@functools.lru_cache(8) +def list_indexed_files(target_dir: pathlib.Path) -> Sequence[pathlib.Path]: + with _lock: + return _list_indexed_files(target_dir) + + +def _check_tracked(path: pathlib.Path) -> bool: + if not _check_git_enabled(): + return False + + # TODO(igarashi) use git command directly for better performance + abspath = str(path.expanduser().resolve()) + + def predicate(item: Tuple[int, git.Blob]) -> bool: + blob = item[1] + return cast(bool, blob.abspath == abspath) + + try: + with git.Repo(path, search_parent_directories=True) as repo: + items = list(repo.index.iter_blobs(predicate)) + return len(items) > 0 + except git.InvalidGitRepositoryError: + raise GitRepositoryNotFoundError() from None + + +def check_tracked(path: pathlib.Path) -> bool: + with _lock: + return _check_tracked(path) diff --git a/pysen/isort.py b/pysen/isort.py new file mode 100644 index 0000000..36a080b --- /dev/null +++ b/pysen/isort.py @@ -0,0 +1,101 @@ +import pathlib +from typing import DefaultDict, Optional, Sequence + +from .command import CommandBase +from .component import LintComponentBase +from .ext import isort_wrapper +from .ext.isort_wrapper import IsortSectionName # NOQA: F401 +from .ext.isort_wrapper import IsortSetting +from .lint_command import LintCommandBase +from .path import resolve_path +from .reporter import Reporter +from .runner_options import PathContext, RunOptions +from .setting import SettingFile +from .source import PythonFileFilter, Source + +_SettingFileName = "pyproject.toml" + + +class IsortCommand(LintCommandBase): + def __init__( + self, + name: str, + paths: PathContext, + source: Source, + inplace_edit: bool, + ) -> None: + super().__init__(paths.base_dir, source) + self._name = name + self._setting_path = resolve_path(paths.settings_dir, _SettingFileName) + self._inplace_edit = inplace_edit + + @property + def name(self) -> str: + return self._name + + @property + def has_side_effects(self) -> bool: + return self._inplace_edit + + def __call__(self, reporter: Reporter) -> int: + sources = self._get_sources(reporter, PythonFileFilter) + reporter.logger.info(f"Checking {len(sources)} files") + return isort_wrapper.run( + reporter, self.base_dir, self._setting_path, sources, self._inplace_edit + ) + + def run_files(self, reporter: Reporter, files: Sequence[pathlib.Path]) -> int: + covered_files = self._get_covered_files(reporter, files, PythonFileFilter) + + if len(covered_files) == 0: + return 0 + + return isort_wrapper.run( + reporter, + self.base_dir, + self._setting_path, + files, + self._inplace_edit, + ) + + +class Isort(LintComponentBase): + def __init__( + self, + name: str = "isort", + setting: Optional[IsortSetting] = None, + source: Optional[Source] = None, + ) -> None: + super().__init__(name, source) + self._setting = setting or IsortSetting.default() + + @property + def setting(self) -> IsortSetting: + return self._setting + + @setting.setter + def setting(self, value: IsortSetting) -> None: + self._setting = value + + def export_settings( + self, + paths: PathContext, + files: DefaultDict[str, SettingFile], + ) -> None: + setting_file = files[_SettingFileName] + section, setting = self._setting.export() + setting_file.set_section(section, setting) + + @property + def targets(self) -> Sequence[str]: + return ["lint", "format"] + + def create_command( + self, target: str, paths: PathContext, options: RunOptions + ) -> CommandBase: + if target == "lint": + return IsortCommand(self.name, paths, self.source, False) + elif target == "format": + return IsortCommand(self.name, paths, self.source, True) + + raise AssertionError(f"unknown {target}") diff --git a/pysen/lint_command.py b/pysen/lint_command.py new file mode 100644 index 0000000..b629dbf --- /dev/null +++ b/pysen/lint_command.py @@ -0,0 +1,134 @@ +import difflib +import logging +import pathlib +from abc import abstractmethod +from typing import Iterable, List, Optional, Sequence, Set + +from . import git_utils +from .command import CommandBase +from .error_lines import parse_error_diffs +from .reporter import Reporter +from .source import FilePredicateType, Source + + +class LintCommandBase(CommandBase): + def __init__(self, base_dir: pathlib.Path, source: Source) -> None: + self._base_dir = base_dir + self._source = source + + def _get_sources( + self, reporter: Reporter, filter_predicate: FilePredicateType + ) -> Set[pathlib.Path]: + return self.source.resolve_files( + self.base_dir, + filter_predicate, + self.git_enabled(), + reporter, + ) + + def _get_covered_files( + self, + reporter: Reporter, + files: Sequence[pathlib.Path], + filter_predicate: FilePredicateType, + ) -> List[pathlib.Path]: + sources = self._get_sources(reporter, filter_predicate) + covered: List[pathlib.Path] = [] + + for f in files: + if f in sources: + covered.append(f) + else: + reporter.logger.info(f"Skipping {f} for {self.name}") + + return covered + + @property + def base_dir(self) -> pathlib.Path: + return self._base_dir + + def git_enabled(self) -> bool: + return git_utils.check_git_available(self.base_dir) + + @property + def source(self) -> Source: + return self._source + + +class SingleFileLintCommandBase(LintCommandBase): + def _run(self, reporter: Reporter, file_paths: Iterable[pathlib.Path]) -> int: + # NOTE(igarashi): create a list to evaluate check() for all file paths + if all([self.check(file_path, reporter) for file_path in file_paths]): + return 0 + else: + return 1 + + def __call__(self, reporter: Reporter) -> int: + file_paths = self._get_sources(reporter, self.filter) + reporter.logger.info(f"Checking {len(file_paths)} files") + return self._run(reporter, file_paths) + + def run_files(self, reporter: Reporter, files: Sequence[pathlib.Path]) -> int: + covered_files = self._get_covered_files(reporter, files, self.filter) + return self._run(reporter, covered_files) + + @abstractmethod + def filter(self, file_path: pathlib.Path) -> bool: + ... + + @abstractmethod + def check(self, file_path: pathlib.Path, reporter: Reporter) -> bool: + ... + + +class SingleFileFormatCommandBase(SingleFileLintCommandBase): + def __init__( + self, base_dir: pathlib.Path, source: Source, inplace_edit: bool + ) -> None: + super().__init__(base_dir, source) + self._inplace_edit = inplace_edit + + @property + def inplace_edit(self) -> bool: + return self._inplace_edit + + def check(self, file_path: pathlib.Path, reporter: Reporter) -> bool: + formatted = self.format(file_path, reporter) + if formatted is None: + return False + + if self._inplace_edit: + with file_path.open(mode="w") as f: + f.write(formatted) + return True + + else: + with file_path.open() as f: + original = f.readlines() + diff = "".join( + difflib.unified_diff( + original, + formatted.splitlines(True), + fromfile=str(file_path), + tofile=str(file_path), + ) + ) + if len(diff) == 0: + return True + else: + reporter.process_output.log(logging.INFO, diff) + diagnostics = parse_error_diffs( + diff, lambda _: file_path, logger=reporter.logger + ) + reporter.report_diagnostics(list(diagnostics)) + return False + + @abstractmethod + def format(self, file_path: pathlib.Path, reporter: Reporter) -> Optional[str]: + """Returns formatted content without modifying the original file. + Note: + If a file cannot be formatted due to its content (e.g. invalid syntax), + this method should return `None`. + In this case, `SingleFileFormatCommandBase` continues to check other files. + """ + ... diff --git a/pysen/logging_utils.py b/pysen/logging_utils.py new file mode 100644 index 0000000..dffcd08 --- /dev/null +++ b/pysen/logging_utils.py @@ -0,0 +1,171 @@ +import collections +import contextlib +import dataclasses +import logging +import logging.handlers +from typing import DefaultDict, Iterator, List, Optional, cast + +import colorlog +from colorlog import ColoredFormatter + +_PROCESS_STDOUT_LEVEL = 11 +_PROCESS_STDOUT = "STDOUT" +_PROCESS_STDERR_LEVEL = 12 +_PROCESS_STDERR = "STDERR" + +logging.addLevelName(_PROCESS_STDOUT_LEVEL, _PROCESS_STDOUT) +logging.addLevelName(_PROCESS_STDERR_LEVEL, _PROCESS_STDERR) + + +def _concat_logging_path(lhs: str, rhs: str) -> str: + return f"{lhs}.{rhs}" + + +PYSEN_LOGGER_PREFIX = "__PYSEN__" +REPORTER_LOGGER_PREFIX = _concat_logging_path(PYSEN_LOGGER_PREFIX, "reporter") +PROCESS_OUTPUT_LOGGER_PREFIX = _concat_logging_path(PYSEN_LOGGER_PREFIX, "proc") + +pysen_root_logger = logging.getLogger(PYSEN_LOGGER_PREFIX) +pysen_root_logger.propagate = False + +reporter_root_logger = logging.getLogger(REPORTER_LOGGER_PREFIX) +reporter_root_logger.setLevel(logging.CRITICAL) + +process_output_root_logger = logging.getLogger(PROCESS_OUTPUT_LOGGER_PREFIX) +process_output_root_logger.setLevel(logging.INFO) +# NOTE(igarashi): Set NullHandler() so that process_output_root_logger does not call +# the lastResort logger when propagate = False. +# See: https://github.com/python/cpython/blob/c3dd7e45cc5d36bbe2295c2840faabb5c75d83e4/Lib/logging/__init__.py#L1672-L1679 # NOQA +process_output_root_logger.addHandler(logging.NullHandler()) +process_output_root_logger.propagate = False + +_logging_output_colors = { + **colorlog.default_log_colors, + **{_PROCESS_STDOUT: "", _PROCESS_STDERR: "yellow"}, +} + + +def get_reporter_logger(name: str) -> logging.Logger: + return logging.getLogger(_concat_logging_path(REPORTER_LOGGER_PREFIX, name)) + + +def get_process_output_logger(name: str) -> logging.Logger: + return logging.getLogger(_concat_logging_path(PROCESS_OUTPUT_LOGGER_PREFIX, name)) + + +NamedRecords = DefaultDict[str, List[logging.LogRecord]] + + +class _GroupedMemoryHandler(logging.handlers.MemoryHandler): + def __init__(self, target: logging.Handler, capacity: int = 1024) -> None: + super().__init__(capacity, target=target) + self._named_records: NamedRecords = collections.defaultdict(list) + + def emit(self, record: logging.LogRecord) -> None: + self._named_records[record.name.split(".")[-1]].append(record) + + def shouldFlush(self, record: logging.LogRecord) -> bool: + return False + + def setFormatter(self, fmt: logging.Formatter) -> None: + self.target: Optional[logging.Handler] + assert self.target is not None + self.target.setFormatter(fmt) + + def flush(self) -> None: + names = sorted(self._named_records.keys()) + for name in names: + for record in self._named_records[name]: + super().emit(record) + super().flush() + self._named_records = collections.defaultdict(list) + + +def setup_logger(loglevel: int, pretty: bool = True) -> None: + root_logger = logging.getLogger("pysen") + root_logger.setLevel(loglevel) + root_logger.handlers.clear() + + handler = logging.StreamHandler() + handler.setFormatter(_general_formatter(pretty)) + root_logger.addHandler(handler) + + +class _LoggingUnit: + def __init__( + self, loglevel: int, is_grouped: bool, pretty: bool, is_process_enabled: bool + ) -> None: + self._loglevel = loglevel + self._is_grouped = is_grouped + self._pretty = pretty + self._is_process_enabled = is_process_enabled + self._handler = self._create_handler() + + def setup(self) -> None: + self._handler.setLevel(logging.DEBUG) + self._handler.setFormatter(self._create_formatter()) + pysen_root_logger.addHandler(self._handler) + reporter_root_logger.setLevel(self._loglevel) + if self._is_process_enabled: + process_output_root_logger.propagate = True + + def finalize(self) -> None: + self._handler.flush() + pysen_root_logger.removeHandler(self._handler) + process_output_root_logger.propagate = False + + def _create_handler(self) -> logging.Handler: + if self._is_grouped: + return _GroupedMemoryHandler(target=logging.StreamHandler()) + else: + return logging.StreamHandler() + + def _create_formatter(self) -> logging.Formatter: + if self._pretty: + ret: logging.Formatter = _CustomColoredFormatter( + "%(log_color)s%(message)s", + log_colors=_logging_output_colors, + ) + return ret + else: + return logging.Formatter("%(message)s") + + +def _general_formatter(pretty: bool) -> logging.Formatter: + if pretty: + ret: logging.Formatter = ColoredFormatter("%(log_color)s%(message)s") + return ret + else: + return logging.Formatter("%(message)s") + + +def _get_process_output_level_name(loglevel: int) -> str: + if loglevel <= logging.INFO: + return _PROCESS_STDOUT + else: + return _PROCESS_STDERR + + +class _CustomColoredFormatter(colorlog.ColoredFormatter): # type: ignore + def format(self, record: logging.LogRecord) -> str: + if record.name.startswith(PROCESS_OUTPUT_LOGGER_PREFIX): + record.levelname = _get_process_output_level_name(record.levelno) + + return cast(str, super().format(record)) + + +@dataclasses.dataclass(frozen=True) +class CommandLoggingOptions: + is_grouped: bool + pretty: bool + process_output: bool + + @contextlib.contextmanager + def start_logging(self, loglevel: int) -> Iterator[None]: + unit = _LoggingUnit(loglevel, self.is_grouped, self.pretty, self.process_output) + unit.setup() + + try: + yield + finally: + unit.finalize() diff --git a/pysen/manifest.py b/pysen/manifest.py new file mode 100644 index 0000000..b13925b --- /dev/null +++ b/pysen/manifest.py @@ -0,0 +1,213 @@ +import argparse +import collections +import dataclasses +import pathlib +from abc import ABC, abstractmethod +from typing import Callable, DefaultDict, Dict, Iterable, List, Optional, Sequence + +from .command import CommandBase +from .component import ComponentBase +from .dumper import dump +from .exceptions import InvalidComponentName +from .runner_options import PathContext, RunOptions +from .setting import SettingFile +from .types import ComponentName, TargetName + +DumpHandlerType = Callable[ + [pathlib.Path, str, SettingFile], + None, +] + +ParserType = argparse._ActionsContainer +TargetType = List[CommandBase] + + +def get_targets( + components: Sequence[ComponentBase], +) -> Dict[TargetName, List[ComponentName]]: + result: DefaultDict[str, List[ComponentName]] = collections.defaultdict(list) + for c in components: + targets = c.targets + for t in targets: + result[t].append(c.name or "(no name)") + + return dict(result) + + +def get_target( + target: TargetName, + components: Sequence[ComponentBase], + paths: PathContext, + options: RunOptions, +) -> TargetType: + result: TargetType = [] + for c in components: + targets = c.targets + if target in targets: + result.append(c.create_command(target, paths, options)) + + return result + + +def export_settings( + paths: PathContext, + components: Sequence[ComponentBase], + dump_handler: DumpHandlerType, +) -> None: + files: DefaultDict[str, SettingFile] = collections.defaultdict(SettingFile) + for c in components: + c.export_settings(paths, files) + + for fname, setting in files.items(): + try: + dump_handler( + paths.settings_dir, + fname, + setting, + ) + except Exception as err: + raise RuntimeError(f"got an unexpected error while creating {fname}: {err}") + + +class ManifestBase(ABC): + def configure_parser(self, parser: ParserType) -> None: + pass + + @abstractmethod + def export_settings(self, paths: PathContext, args: argparse.Namespace) -> None: + ... + + @abstractmethod + def get_targets( + self, args: argparse.Namespace + ) -> Dict[TargetName, List[ComponentName]]: + ... + + @abstractmethod + def get_target( + self, + target: TargetName, + paths: PathContext, + options: RunOptions, + args: argparse.Namespace, + ) -> TargetType: + ... + + +@dataclasses.dataclass(frozen=True) +class ManifestOptions: + enabled: Optional[List[str]] + disabled: Optional[List[str]] + + @staticmethod + def configure_parser(parser: ParserType) -> None: + group = parser.add_mutually_exclusive_group() + group.add_argument( + "--enable", action="append", help="component names to enable" + ) + group.add_argument( + "--disable", action="append", help="component names to disable" + ) + + @classmethod + def from_parsed_arguments(cls, args: argparse.Namespace) -> "ManifestOptions": + return cls(enabled=args.enable, disabled=args.disable) + + +def _verify_names( + components: List[ComponentBase], names: List[str], option: str +) -> None: + diff = set(names).difference(component.name for component in components) + if diff: + diff_str = ",".join(sorted(diff)) + raise InvalidComponentName( + f"The following component(s) in option {option} were not found: {diff_str}" + ) + + +class Manifest(ManifestBase): + def __init__( + self, + components: Optional[Sequence[ComponentBase]] = None, + dump_handler: Optional[DumpHandlerType] = None, + ) -> None: + self._components: List[ComponentBase] = [] + if components is not None: + self._components = list(components) + + self._dump_handler: DumpHandlerType = dump_handler or dump + + def configure_parser(self, parser: ParserType) -> None: + ManifestOptions.configure_parser(parser) + + @property + def components(self) -> List[ComponentBase]: + return self._components + + def get_component(self, name: str) -> ComponentBase: + for c in self._components: + if c.name == name: + return c + else: + raise KeyError(name) + + def _filter_components(self, args: argparse.Namespace) -> Iterable[ComponentBase]: + options = ManifestOptions.from_parsed_arguments(args) + enable = options.enabled + disable = options.disabled + + if enable is None and disable is None: + yield from self._components + return + + predicate: Callable[[ComponentBase], bool] + if enable is not None: + _verify_names(self._components, enable, "--enable") + + def enable_predicate(x: ComponentBase) -> bool: + assert enable is not None + if x.name is None: + return False + return x.name in enable + + predicate = enable_predicate + elif disable is not None: + _verify_names(self._components, disable, "--disable") + + def disable_predicate(x: ComponentBase) -> bool: + assert disable is not None + if x.name is None: + return True + return x.name not in disable + + predicate = disable_predicate + else: + # NOTE(igarashi): `enable` and `disable` are in a mutually exclusive group + raise AssertionError() + + for x in self._components: + if predicate(x): + yield x + + def export_settings(self, paths: PathContext, args: argparse.Namespace) -> None: + # NOTE(igarashi): tomlkit doesn't keep the comments in the original file. + # It will cause unnecessary changes when pysen doesn't export all configurations + # since some comments like "automatically generated by pysen" would be partially removed + # Therefore, ignore given args (`--disable` and `--enable`) as a workaround. + export_settings(paths, self._components, self._dump_handler) + + def get_targets( + self, args: argparse.Namespace + ) -> Dict[TargetName, List[ComponentName]]: + components = list(self._filter_components(args)) + return get_targets(components) + + def get_target( + self, + target: TargetName, + paths: PathContext, + options: RunOptions, + args: argparse.Namespace, + ) -> TargetType: + components = list(self._filter_components(args)) + return get_target(target, components, paths, options) diff --git a/pysen/manifest_builder.py b/pysen/manifest_builder.py new file mode 100644 index 0000000..9bbf6f1 --- /dev/null +++ b/pysen/manifest_builder.py @@ -0,0 +1,45 @@ +import pathlib +from typing import Optional, Sequence + +from . import py_module +from .component import ComponentBase +from .exceptions import InvalidManifestBuilderError +from .manifest import Manifest, ManifestBase + + +def _build_external( + path: pathlib.Path, + components: Sequence[ComponentBase], + src_path: Optional[pathlib.Path], +) -> ManifestBase: + module = py_module.load(path, "builder") + entry_point = getattr(module, "build", None) + if entry_point is None or not callable(entry_point): + raise InvalidManifestBuilderError( + path, "external builder must have `build` method" + ) + + ret = entry_point(components, src_path) + if not isinstance(ret, ManifestBase): + raise InvalidManifestBuilderError( + path, "`build` must return an instance of ManifestBase" + ) + + return ret + + +def _build( + components: Sequence[ComponentBase], src_path: Optional[pathlib.Path] +) -> ManifestBase: + return Manifest(components) + + +def build( + components: Sequence[ComponentBase], + src_path: Optional[pathlib.Path] = None, + external_builder: Optional[pathlib.Path] = None, +) -> ManifestBase: + if external_builder is not None: + return _build_external(external_builder, components, src_path) + + return _build(components, src_path) diff --git a/pysen/mypy.py b/pysen/mypy.py new file mode 100644 index 0000000..76a474b --- /dev/null +++ b/pysen/mypy.py @@ -0,0 +1,202 @@ +import itertools +import pathlib +from enum import Enum +from typing import Any, Callable, DefaultDict, Dict, List, Mapping, Optional, Sequence + +from .command import CommandBase +from .component import ComponentBase +from .ext import mypy_wrapper +from .ext.mypy_wrapper import ( # NOQA + MypyFollowImports, + MypyPlugin, + MypySetting, + MypyTarget, +) +from .path import is_covered, resolve_path +from .reporter import Reporter +from .runner_options import PathContext, RunOptions +from .setting import SettingFile + +_SettingFileName = "setup.cfg" + + +def _get_differences_from_base( + entries: Dict[str, Any], base_entries: Dict[str, Any] +) -> Dict[str, Any]: + # NOTE(igarashi): do not use `dict(entries.items() - base_entries.items())` + # since entries may contain an unhashable type like lists. + added = entries.keys() - base_entries.keys() + duplicated = entries.keys() & base_entries.keys() + + diff: Dict[str, Any] = {} + + for key in added: + diff[key] = entries[key] + + for key in duplicated: + if entries[key] == base_entries[key]: + continue + + diff[key] = entries[key] + + # NOTE(igarashi): Since there is no way to obtain a default value for each removed item, + # we cannot put them to diff. + # TODO(igarashi): Implement field attribute to each MypySetting option so that + # we can get a default value. Uncomment the following code: + # _IgnoreDifferenceFields = {"_pysen_convert_abspath", "python_version", "mypy_path"} + # removed = base_entries.keys() - entries.keys() - _IgnoreDifferenceFields + + return diff + + +class MypyPreset(Enum): + VERY_STRICT = (MypySetting.very_strict,) + STRICT = (MypySetting.strict,) + ENTRY = (MypySetting.entry,) + + def __init__(self, factory: Callable[..., MypySetting]) -> None: + self._factory = factory + + def get_setting(self, **kwargs: Any) -> MypySetting: + return self._factory(**kwargs) + + +class MypyCommand(CommandBase): + def __init__( + self, + name: str, + paths: PathContext, + mypy_targets: Sequence[MypyTarget], + require_diagnostics: bool, + ) -> None: + self._name = name + self._base_dir = paths.base_dir + self._mypy_targets: List[MypyTarget] = list(mypy_targets) + + self._setting_path = resolve_path(paths.settings_dir, _SettingFileName) + self._require_diagnostics = require_diagnostics + + @property + def name(self) -> str: + return self._name + + @property + def has_side_effects(self) -> bool: + return False + + @property + def base_dir(self) -> pathlib.Path: + return self._base_dir + + @property + def setting_path(self) -> pathlib.Path: + return self._setting_path + + def __call__(self, reporter: Reporter) -> int: + exit_code: int = 0 + num_targets = len(self._mypy_targets) + + if num_targets == 0: + reporter.logger.error( + "No mypy targets specified. " + "You must specify at least one entry in `tools.pysen.lint.mypy_targets`." + ) + return 2 + + for idx, target in enumerate(self._mypy_targets): + reporter.logger.info( + f"[{idx+1}/{num_targets}] Checking {len(target.paths)} entries" + ) + ret = mypy_wrapper.run( + reporter, + self.base_dir, + self.setting_path, + target, + self._require_diagnostics, + ) + if ret != 0: + exit_code = ret + + return exit_code + + def run_files(self, reporter: Reporter, files: Sequence[pathlib.Path]) -> int: + sources = list( + itertools.chain.from_iterable(target.paths for target in self._mypy_targets) + ) + covered_files: List[pathlib.Path] = [] + for f in files: + if is_covered(f, sources): + covered_files.append(f) + else: + reporter.logger.info(f"Skipping {f} for {self._name}") + + if len(covered_files) == 0: + return 0 + + return mypy_wrapper.run( + reporter, + self.base_dir, + self._setting_path, + MypyTarget(covered_files), + self._require_diagnostics, + ) + + +class Mypy(ComponentBase): + def __init__( + self, + name: str = "mypy", + mypy_targets: Optional[Sequence[MypyTarget]] = None, + setting: Optional[MypySetting] = None, + module_settings: Optional[Mapping[str, MypySetting]] = None, + ) -> None: + self._name = name + self._mypy_targets = list(mypy_targets or []) + self._setting: MypySetting = setting or MypySetting() + self._module_settings: Dict[str, MypySetting] = dict(module_settings or {}) + + @property + def name(self) -> str: + return self._name + + @property + def setting(self) -> MypySetting: + return self._setting + + @property + def module_settings(self) -> Dict[str, MypySetting]: + return self._module_settings + + @property + def mypy_targets(self) -> List[MypyTarget]: + return self._mypy_targets + + def export_settings( + self, + paths: PathContext, + files: DefaultDict[str, SettingFile], + ) -> None: + setting_file = files[_SettingFileName] + global_section, global_setting = self._setting.export(paths.base_dir) + setting_file.set_section(global_section, global_setting) + + for module_name, setting in self._module_settings.items(): + section, module_setting = setting.export( + paths.base_dir, target_module=module_name + ) + module_setting = _get_differences_from_base(module_setting, global_setting) + setting_file.set_section(section, module_setting) + + @property + def targets(self) -> Sequence[str]: + return ["lint"] + + def create_command( + self, target: str, paths: PathContext, options: RunOptions + ) -> CommandBase: + if target == "lint": + return MypyCommand( + self.name, paths, self.mypy_targets, options.require_diagnostics + ) + + raise AssertionError(f"unknown {target}") diff --git a/pysen/path.py b/pysen/path.py new file mode 100644 index 0000000..f61e3b5 --- /dev/null +++ b/pysen/path.py @@ -0,0 +1,50 @@ +import contextlib +import os +import pathlib +from typing import Iterable, Iterator, Union + +PathLikeType = Union[pathlib.Path, str] + + +def wrap_path(s: PathLikeType) -> pathlib.Path: + if isinstance(s, pathlib.Path): + return s + return pathlib.Path(s) + + +def resolve_path(base_dir: pathlib.Path, path: PathLikeType) -> pathlib.Path: + return (base_dir / path).expanduser().resolve() + + +def get_relative_path(path: PathLikeType, base_dir: pathlib.Path) -> str: + pth = wrap_path(path) + if not pth.is_absolute(): + return str(pth) + + return os.path.relpath(pth, base_dir) + + +@contextlib.contextmanager +def change_dir(dst: pathlib.Path) -> Iterator[None]: + old = pathlib.Path.cwd() + try: + os.chdir(dst) + yield + finally: + os.chdir(old) + + +def is_covered(path: pathlib.Path, sources: Iterable[pathlib.Path]) -> bool: + """ + Checks if `path` is contained in any of the subdirectories in sources. + See the test cases for details. + """ + path = path.resolve() + abs_path = [source.resolve() for source in sources] + return any(c in abs_path for c in list(path.parents) + [path]) + + +def is_contained(parent: pathlib.Path, child: pathlib.Path) -> bool: + if not parent.is_absolute() or not child.is_absolute(): + raise ValueError("Argument 'parent' and 'child' must be absolute") + return str(child).startswith(str(parent)) diff --git a/pysen/plugin.py b/pysen/plugin.py new file mode 100644 index 0000000..f397ba9 --- /dev/null +++ b/pysen/plugin.py @@ -0,0 +1,14 @@ +import pathlib +from abc import ABC, abstractmethod +from typing import Sequence + +from .component import ComponentBase +from .pyproject_model import Config, PluginConfig + + +class PluginBase(ABC): + @abstractmethod + def load( + self, file_path: pathlib.Path, config: PluginConfig, root: Config + ) -> Sequence[ComponentBase]: + ... diff --git a/pysen/plugin_loader.py b/pysen/plugin_loader.py new file mode 100644 index 0000000..153441f --- /dev/null +++ b/pysen/plugin_loader.py @@ -0,0 +1,68 @@ +import importlib +import pathlib +from types import ModuleType +from typing import Optional + +from . import py_module +from .exceptions import InvalidPluginError +from .plugin import PluginBase +from .py_module import _parse_entry_point + + +def _load(module: ModuleType, func_name: str) -> PluginBase: + entry_point = getattr(module, func_name, None) + if entry_point is None or not callable(entry_point): + raise InvalidPluginError( + module.__file__, f"expected to have `{func_name}` method: {module.__file__}" + ) + + try: + ret = entry_point() + except Exception as e: + raise RuntimeError( + f"an error occured while loading {module.__file__}::{func_name}", + ) from e + + if not isinstance(ret, PluginBase): + raise InvalidPluginError( + module.__file__, f"`{func_name}` must return an instance of PluginBase" + ) + + return ret + + +def load_from_file(path: pathlib.Path) -> PluginBase: + module = py_module.load(path, "plugin") + return _load(module, "plugin") + + +def load_from_module(entry_point: str) -> PluginBase: + parsed = _parse_entry_point(entry_point) + if parsed is None: + raise ValueError(f"invalid entry_point: {entry_point}") + + module_name, func_name = parsed + module = importlib.import_module(module_name) + return _load(module, func_name) + + +def load_plugin( + function: Optional[str] = None, script: Optional[pathlib.Path] = None +) -> PluginBase: + if function is None and script is None: + raise TypeError("must specify either function or script") + + if function is not None and script is not None: + raise TypeError("only one of function or script must be speicifed") + + if function is not None: + try: + return load_from_module(function) + except BaseException: + if script is not None: + pass # fallback + else: + raise + + assert script is not None + return load_from_file(script) diff --git a/pysen/process_utils.py b/pysen/process_utils.py new file mode 100644 index 0000000..7b79fed --- /dev/null +++ b/pysen/process_utils.py @@ -0,0 +1,53 @@ +import contextlib +import logging +import subprocess +from concurrent.futures import ThreadPoolExecutor +from typing import IO, List, Sequence, Tuple + +from .reporter import Reporter + + +def _read_stream(stream: IO[bytes], reporter: Reporter, loglevel: int) -> str: + ret: List[str] = [] + for s in stream: + line = s.decode("utf-8") + ret.append(line) + reporter.process_output.log(loglevel, line.rstrip("\n")) + + return "".join(ret) + + +def run( + cmd: Sequence[str], + reporter: Reporter, + stdout_loglevel: int = logging.INFO, + stderr_loglevel: int = logging.WARNING, +) -> Tuple[int, str, str]: + returncode: int = -1 + stdout: str = "" + stderr: str = "" + + with contextlib.ExitStack() as stack: + reporter.report_command(" ".join(cmd)) + proc = stack.enter_context( + subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE) + ) + try: + pool = stack.enter_context(ThreadPoolExecutor(max_workers=2)) + stdout_task = pool.submit( + _read_stream, proc.stdout, reporter, stdout_loglevel + ) + stderr_task = pool.submit( + _read_stream, proc.stderr, reporter, stderr_loglevel + ) + + proc.wait() + + stdout = stdout_task.result() + stderr = stderr_task.result() + returncode = proc.returncode + except Exception: + proc.kill() + raise + + return returncode, stdout, stderr diff --git a/pysen/py.typed b/pysen/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/pysen/py_module.py b/pysen/py_module.py new file mode 100644 index 0000000..4fa0bbc --- /dev/null +++ b/pysen/py_module.py @@ -0,0 +1,51 @@ +import importlib.machinery +import itertools +import pathlib +import re +import sys +import types +from types import ModuleType +from typing import Optional, Tuple + +_module_counter = itertools.count() + + +ID_REGEX = r"[_A-Za-z][_a-zA-Z0-9]*" +DOTTED_ID_REGEX = rf"{ID_REGEX}(\.{ID_REGEX})*" +ENTRY_POINT_REGEX = rf"^(?P{DOTTED_ID_REGEX})::(?P{ID_REGEX})$" + +entry_point = re.compile(ENTRY_POINT_REGEX) + + +def _parse_entry_point(expr: str) -> Optional[Tuple[str, str]]: + matches = list(entry_point.finditer(expr)) + if len(matches) != 1: + return None + + match = matches[0] + module = match.group("module") + func = match.group("func") + assert module is not None + assert func is not None + return module, func + + +def load(path: pathlib.Path, module_name_prefix: str) -> ModuleType: + if not path.exists() or not path.is_file(): + raise FileNotFoundError(path) + + # NOTE(igarashi): add unique number to module_name to avoid module name conflict + # NOTE(igarashi): since next(itertools.count) doesn't release GIL, it is an atomic operation + counter = next(_module_counter) + module_name = f"pysen._modules.{module_name_prefix}_{counter}" + + loader = importlib.machinery.SourceFileLoader(module_name, str(path)) + module = types.ModuleType(loader.name) + # NOTE(igarashi): typing raises an error if the module is not included in sys.modules. + # To allow loaded modules to use dacite, register them to `sys.modules`. + sys.modules[module_name] = module + + module.__file__ = str(path) + loader.exec_module(module) + + return module diff --git a/pysen/py_version.py b/pysen/py_version.py new file mode 100644 index 0000000..f807bda --- /dev/null +++ b/pysen/py_version.py @@ -0,0 +1,103 @@ +import dataclasses +import re +from typing import Optional + + +def _coerce_type(s: Optional[str]) -> Optional[int]: + if s is None: + return None + else: + return int(s) + + +@dataclasses.dataclass(frozen=True) +class VersionRepresentation: + major: int + minor: int + patch: Optional[int] = None + pre_release: Optional[str] = None + + @property + def version(self) -> str: + s = f"{self.major}.{self.minor}" + if self.patch is not None: + s += f".{self.patch}" + + return s + + @classmethod + def from_str(cls, s: str) -> "VersionRepresentation": + number = r"(?:0|[1-9]\d*)" + major = fr"^(?P{number})" + minor = fr"\.(?P{number})" + patch = fr"(\.(?P{number}))?" + pre_release = fr"(?P(a|b|rc){number})?$" + pattern = major + minor + patch + pre_release + m = re.match(pattern, s) + + if m is not None: + return cls( + major=int(m.group("major")), + minor=int(m.group("minor")), + patch=_coerce_type(m.group("patch")), + pre_release=m.group("pre_release"), + ) + raise ValueError("Invalid version format. See PEP 440 for details.") + + def __str__(self) -> str: + major = self.major + minor = self.minor + if self.patch is None: + patch = "" + else: + # notice the preceding dot (.) + patch = f".{self.patch}" + pre_release = self.pre_release or "" + return f"{major}.{minor}{patch}{pre_release}" + + def __eq__(self, other: object) -> bool: + if isinstance(other, VersionRepresentation): + # we might compare None and [int, str] + return all( + ( + self.major == other.major, + self.minor == other.minor, + self.patch == other.patch, + self.pre_release == other.pre_release, + ) + ) + else: + raise NotImplementedError + + def is_compatible(self, other: "VersionRepresentation") -> bool: + return self.major == other.major and self.minor == other.minor + + +@dataclasses.dataclass(frozen=True) +class PythonVersion(VersionRepresentation): + @property + def full_representation(self) -> str: + return f"Python{self.version}" + + @property + def short_representation(self) -> str: + return f"py{self.major}{self.minor}" + + @staticmethod + def parse_short_representation(value: str) -> "PythonVersion": + try: + return _PythonVersions[value.upper()] + except KeyError: + raise KeyError( + f"invalid value: {value}, must be one of {_PythonVersions.keys()}" + ) from None + + +# NOTE(igarashi): PythonVersion class is immutable +_PythonVersions = { + "PY27": PythonVersion(2, 7), + "PY36": PythonVersion(3, 6), + "PY37": PythonVersion(3, 7), + "PY38": PythonVersion(3, 8), + "PY39": PythonVersion(3, 9), +} diff --git a/pysen/pyproject.py b/pysen/pyproject.py new file mode 100644 index 0000000..67c1f20 --- /dev/null +++ b/pysen/pyproject.py @@ -0,0 +1,148 @@ +import logging +import pathlib +from typing import Callable, List, Optional, Sequence, Set, TypeVar + +import tomlkit + +from .component import ComponentBase +from .exceptions import InvalidConfigurationError +from .factory import configure_lint +from .manifest import ManifestBase +from .manifest_builder import build +from .mypy import Mypy +from .plugin_loader import load_plugin +from .pyproject_model import Config, LintConfig, has_tool_section, parse + +_logger = logging.getLogger(__name__) +TConfig = TypeVar("TConfig") + + +def resolve_inheritance( + path: pathlib.Path, + selector: Callable[[pathlib.Path, Config], TConfig], + base_selector: Callable[[pathlib.Path, TConfig], Optional[pathlib.Path]], + updater: Callable[[TConfig, TConfig], TConfig], + visited: Optional[Set[pathlib.Path]] = None, +) -> TConfig: + visited = visited or set() + if path in visited: + raise InvalidConfigurationError( + f"Circular dependency detected. {path} was visited more than once." + ) + visited.add(path) + + config = parse(path) + if config is None: + raise InvalidConfigurationError( + f"invalid base config: {path} doesn't exists or invalid file" + ) + + section = selector(path, config) + base_path = base_selector(path, section) + + if base_path is None: + return section + + base_section = resolve_inheritance( + base_path, selector, base_selector, updater, visited + ) + return updater(base_section, section) + + +def resolve_lint_config_inheritance(config: LintConfig) -> LintConfig: + def selector(path: pathlib.Path, root: Config) -> LintConfig: + if root.lint is None: + raise InvalidConfigurationError( + f"detected {path} doesn't have [tool.pysen.lint] section." + ) + + return root.lint + + def base_selector(path: pathlib.Path, c: LintConfig) -> Optional[pathlib.Path]: + return c.base + + def updater(lhs: LintConfig, rhs: LintConfig) -> LintConfig: + lhs.update(rhs) + return lhs + + if config.base is None: + return config + + base = resolve_inheritance(config.base, selector, base_selector, updater) + base.update(config) + return base + + +def load_lint_components(config: LintConfig) -> Sequence[ComponentBase]: + config = resolve_lint_config_inheritance(config) + components = configure_lint(config) + + # NOTE(igarashi): set convert_abspath=True to create a relative path from a absolute path + if config.enable_mypy: + mypy = next(c for c in components if isinstance(c, Mypy)) + mypy.setting._pysen_convert_abspath = True + + for _, s in mypy.module_settings.items(): + s._pysen_convert_abspath = True + + return components + + +def load_manifest(path: pathlib.Path) -> ManifestBase: + config = parse(path) + + external_builder: Optional[pathlib.Path] = config.builder + components: List[ComponentBase] = [] + + if config.lint is not None: + components.extend(load_lint_components(config.lint)) + + if config.plugin is not None: + for p in config.plugin: + plugin = load_plugin(function=p.function, script=p.script) + components.extend(plugin.load(path, p, config)) + + return build(components, path, external_builder) + + +def _find_recursive() -> Optional[pathlib.Path]: + current = pathlib.Path.cwd().resolve() + + while True: + path = current / "pyproject.toml" + if path.exists() and path.is_file(): + pyproject = tomlkit.loads(path.read_text()) + if has_tool_section("jiro", pyproject) or has_tool_section( + "pysen", pyproject + ): + _logger.debug(f"successfully found pyproject.toml: {path}") + return path + + _logger.debug(f"found a file, but pysen.tool doesn't exist: {path}") + + # reached root + if current.parent == current: + return None + + current = current.parent + + +def find_pyproject(path: Optional[pathlib.Path] = None) -> pathlib.Path: + if path is not None: + path = path.resolve() + if not path.exists(): + raise FileNotFoundError(f"File not found: {path}") + elif not path.is_file(): + raise FileNotFoundError(f"{path} is not a file") + + return path + else: + p = _find_recursive() + if p is None: + raise FileNotFoundError( + "Could not find a pyproject.toml file " + "containing a [tool.pysen] section " + "in this or any of its parent directories. \n" + "The `--loglevel debug option` may help." + ) + return p diff --git a/pysen/pyproject_model.py b/pysen/pyproject_model.py new file mode 100644 index 0000000..814c7f0 --- /dev/null +++ b/pysen/pyproject_model.py @@ -0,0 +1,423 @@ +import dataclasses +import logging +import pathlib +from typing import Any, Dict, List, Optional + +import dacite +import tomlkit + +from ._version import __version__ +from .exceptions import InvalidConfigurationError, PysenSectionNotFoundError +from .factory import ConfigureLintOptions, MypyModuleOption +from .isort import IsortSectionName +from .mypy import MypyFollowImports, MypyPreset, MypyTarget +from .py_version import PythonVersion, VersionRepresentation +from .source import Source + +_logger = logging.getLogger(__name__) + + +@dataclasses.dataclass +class LintConfig(ConfigureLintOptions): + base: Optional[pathlib.Path] = None + + # * Alias Fields * + # config to create mypy_modules instances + mypy_ignore_packages: Optional[List[str]] = None + + # * Deprecated Fields * + mypy_target_dirs: Optional[List[pathlib.Path]] = None + + def update(self, rhs: "LintConfig") -> None: + # NOTE(igarashi): do not overwrite base by rhs.base as it is required to refer + fields = dataclasses.fields(self) + for field in fields: + field_name = field.name + if field_name == "base": + continue + + # NOTE(igarashi): handle None in rhs as "unset" state + value = getattr(rhs, field_name) + if value is not None: + setattr(self, field_name, value) + + +@dataclasses.dataclass +class PluginConfig: + # NOTE(igarashi): `location` is set from _parse_plugin_configs, not from pyproject.toml + location: str = "" + function: Optional[str] = None + script: Optional[pathlib.Path] = None + config: Optional[Dict[str, Any]] = None + + +@dataclasses.dataclass +class Config: + version: Optional[VersionRepresentation] = None + lint: Optional[LintConfig] = None + builder: Optional[pathlib.Path] = None + plugin: Optional[List[PluginConfig]] = None + + +PLUGIN_PATH_ROOT = "tool.pysen.plugin" + + +def _parse_plugin_configs(base_dir: pathlib.Path, data: Any) -> List[PluginConfig]: + if data is None: + return [] + + if not isinstance(data, dict): + raise dacite.WrongTypeError(dict, data, PLUGIN_PATH_ROOT) + + result: List[PluginConfig] = [] + + for key, value in data.items(): + path = f"{PLUGIN_PATH_ROOT}.{key}" + + if not isinstance(value, dict): + raise dacite.WrongTypeError(dict, value, path) + + # NOTE(igarashi): ensure that `location` is unspecified in pyproject.toml + if "location" in value: + raise dacite.DaciteError(f"unknown filed: {path}.location") + + config = dacite.from_dict( + PluginConfig, + value, + config=dacite.Config( + type_hooks={pathlib.Path: lambda x: _expand_path(base_dir, x)}, + strict=True, + ), + ) + assert isinstance(config, PluginConfig) + config.location = path + + if config.function is None and config.script is None: + raise dacite.DaciteError( + "must specify either function or script for plugin field" + ) + + if config.function is not None and config.script is not None: + raise dacite.DaciteError("only one of function or script must be speicifed") + + result.append(config) + + return result + + +def _parse_mypy_follow_imports(s: Any) -> MypyFollowImports: + if not isinstance(s, str): + raise dacite.WrongTypeError(MypyFollowImports, s) + + try: + return MypyFollowImports[s.upper()] + except KeyError: + raise dacite.DaciteError(f"invalid follow_imports value: {s}") from None + + +def _parse_mypy_preset(s: Any) -> MypyPreset: + if not isinstance(s, str): + raise dacite.WrongTypeError(MypyPreset, s) + + try: + return MypyPreset[s.upper()] + except KeyError: + raise dacite.DaciteError(f"invalid mypy_preset value: {s}") from None + + +def _parse_isort_section_name(s: Any) -> IsortSectionName: + if not isinstance(s, str): + raise dacite.WrongTypeError(MypyPreset, s) + + try: + return IsortSectionName[s.upper()] + except KeyError: + raise dacite.DaciteError(f"invalid default_section value: {s}") from None + + +def _parse_python_version(s: Any) -> PythonVersion: + if not isinstance(s, str): + raise dacite.WrongTypeError(str, s) + + try: + return PythonVersion.parse_short_representation(s) + except KeyError as e: + raise dacite.DaciteError(str(e)) + + +def _expand_path(base_dir: pathlib.Path, s: Any) -> pathlib.Path: + if isinstance(s, pathlib.Path): + return s + elif isinstance(s, str): + return base_dir / s + else: + raise dacite.WrongTypeError(pathlib.Path, s) + + +def _parse_source(base_dir: pathlib.Path, d: Any) -> Source: + if isinstance(d, list): + return Source(includes=[_expand_path(base_dir, x) for x in d]) + elif isinstance(d, dict): + + @dataclasses.dataclass + class _SourceConfig: + includes: Optional[List[pathlib.Path]] = None + include_globs: Optional[List[str]] = None + excludes: Optional[List[pathlib.Path]] = None + exclude_globs: Optional[List[str]] = None + + config = dacite.from_dict( + _SourceConfig, + d, + config=dacite.Config( + type_hooks={pathlib.Path: lambda x: _expand_path(base_dir, x)}, + strict=True, + ), + ) + source = Source(includes=config.includes, excludes=config.excludes) + if config.include_globs is not None: + for i in config.include_globs: + source.add_include(i, glob=True, base_dir=base_dir) + + if config.exclude_globs is not None: + for e in config.exclude_globs: + source.add_exclude(e, glob=True, base_dir=base_dir) + + if len(source.includes) == 0: + source.add_include(".", base_dir=base_dir) + + return source + + else: + raise dacite.DaciteError(f"invalid source value: {d}") from None + + +def _parse_mypy_target(base_dir: pathlib.Path, d: Any) -> MypyTarget: + if not isinstance(d, dict): + raise dacite.WrongTypeError(dict, d, "tool.pysen.lint.mypy_targets") + + target = dacite.from_dict( + MypyTarget, + d, + config=dacite.Config( + type_hooks={pathlib.Path: lambda x: _expand_path(base_dir, x)}, + strict=True, + ), + ) + assert isinstance(target, MypyTarget) + if len(target.paths) == 0: + raise dacite.DaciteError( + "invalid mypy_target: each target must have one or more paths" + ) + + return target + + +def _parse_mypy_targets(base_dir: pathlib.Path, config: Any) -> List[MypyTarget]: + if not isinstance(config, list): + raise dacite.WrongTypeError(List[MypyTarget], config) + + if not all(isinstance(x, dict) for x in config): + raise dacite.DaciteError("tool.pysen.lint.mypy_targets must be a list of dicts") + + return [_parse_mypy_target(base_dir, x) for x in config] + + +def _parse_mypy_modules(config: Any) -> Dict[str, MypyModuleOption]: + if not isinstance(config, dict): + raise dacite.WrongTypeError(Dict[str, MypyModuleOption], config) + + mypy_modules: Dict[str, MypyModuleOption] = {} + + for target_module, option_dict in config.items(): + if not isinstance(target_module, str): + raise dacite.WrongTypeError( + str, target_module, "tool.pysen.lint.mypy_modules" + ) + + if not isinstance(option_dict, dict): + raise dacite.WrongTypeError( + MypyModuleOption, + option_dict, + f'tool.pysen.lint.mypy_modules."{target_module}"', + ) + + try: + module_option = dacite.from_dict( + MypyModuleOption, + option_dict, + config=dacite.Config( + strict=True, + type_hooks={ + MypyPreset: _parse_mypy_preset, + MypyFollowImports: _parse_mypy_follow_imports, + }, + ), + ) + assert isinstance(module_option, MypyModuleOption) + mypy_modules[target_module] = module_option + except ValueError as e: + raise dacite.DaciteError(f"invalid mypy_module: {target_module}, {e}") + + return mypy_modules + + +def _parse_dict(data: Dict[str, Any], base_dir: pathlib.Path) -> Config: + dacite_config = dacite.Config( + type_hooks={ + MypyPreset: _parse_mypy_preset, + IsortSectionName: _parse_isort_section_name, + pathlib.Path: lambda x: _expand_path(base_dir, x), + Source: lambda x: _parse_source(base_dir, x), + List[PluginConfig]: lambda x: _parse_plugin_configs(base_dir, x), + VersionRepresentation: VersionRepresentation.from_str, + PythonVersion: _parse_python_version, + List[MypyTarget]: lambda x: _parse_mypy_targets(base_dir, x), + Dict[str, MypyModuleOption]: _parse_mypy_modules, + }, + strict=True, + ) + + try: + config = dacite.from_dict(Config, data, dacite_config) + assert isinstance(config, Config) + return config + except dacite.DaciteError as e: + raise InvalidConfigurationError(f"invalid configuration: {e}") from None + + +def _workaround_tomlkit_unmarshal(data: Any) -> Any: + if data is None or isinstance(data, tomlkit.items.Null): + return None + elif isinstance(data, dict): + # tomlkit.items.Dict, tomlkit.container.Container + ret: Dict[str, Any] = {} + for k, v in data.items(): + k = _workaround_tomlkit_unmarshal(k) + v = _workaround_tomlkit_unmarshal(v) + ret[k] = v + + return ret + elif isinstance(data, list): + # tomlkit.items.Array + return list([_workaround_tomlkit_unmarshal(v) for v in data]) + elif isinstance(data, tomlkit.items.Bool): + return bool(data) + elif isinstance(data, tomlkit.items.Float): + return float(data) + elif isinstance(data, tomlkit.items.Integer): + return int(data) + elif isinstance(data, tomlkit.items.String): + return str(data) + elif isinstance( + data, (tomlkit.items.DateTime, tomlkit.items.Date, tomlkit.items.Time) + ): + raise NotImplementedError(f"tomlkit type: {type(data)}") + + return data + + +def has_tool_section( + tool_name: str, + pyproject: tomlkit.toml_document.TOMLDocument, +) -> bool: + return "tool" in pyproject and tool_name in pyproject["tool"] + + +def _load_pysen_section(path: pathlib.Path) -> Dict[str, Any]: + if not path.exists(): + raise FileNotFoundError(path) + + with path.open("r") as f: + pyproject = tomlkit.loads(f.read()) + + if has_tool_section("pysen", pyproject): + section = pyproject["tool"]["pysen"] + elif has_tool_section("jiro", pyproject): + _logger.warning( + "jiro section under pyproject.toml is deprecated. Use pysen instead." + ) + section = pyproject["tool"]["jiro"] + else: + raise PysenSectionNotFoundError(str(path)) + + data = _workaround_tomlkit_unmarshal(section) + assert isinstance(data, dict) + return data + + +def _migrate_alias_fields(config: Config) -> None: + if config.lint is not None: + lint_config = config.lint + + if lint_config.mypy_ignore_packages is not None: + mypy_modules: Dict[str, MypyModuleOption] = lint_config.mypy_modules or {} + + for m in lint_config.mypy_ignore_packages: + if m in mypy_modules: + raise dacite.DaciteError( + f"{m} is configured in both mypy_ignore_packages and mypy_modules" + ) + + mypy_modules[m] = MypyModuleOption( + ignore_errors=True, follow_imports=MypyFollowImports.SKIP + ) + + lint_config.mypy_ignore_packages = None + lint_config.mypy_modules = mypy_modules + + +def _migrate_deprecated_fields(path: pathlib.Path, config: Config) -> None: + if config.lint is not None: + if config.lint.mypy_target_dirs is not None: + _logger.warning( + "tool.pysen.mypy_target_dirs is deprecated since 0.6.0, " + f"Use tool.pysen.mypy_targets instead (File: {path})" + ) + config.lint.mypy_targets = [MypyTarget(config.lint.mypy_target_dirs)] + config.lint.mypy_target_dirs = None + + +def _load_version(data: Dict[str, Any]) -> Optional[VersionRepresentation]: + version = data.get("version") + if version is None: + return None + + try: + return VersionRepresentation.from_str(version) + except ValueError as e: + raise InvalidConfigurationError(e) from None + + +def _check_version( + file_path: pathlib.Path, + config_version: Optional[VersionRepresentation], + actual_version: VersionRepresentation, +) -> None: + if config_version is None: + _logger.warning( + "Consider specifying 'version' under [tool.pysen] section in your pyproject.toml " + "to check compliance against the version of the installed pysen. " + f"(File: {file_path})" + ) + elif not config_version.is_compatible(actual_version): + _logger.warning( + f"pyproject.toml specifies version {config_version}, " + f"but the pysen you are using is version {actual_version}, " + "which might not be compatible. " + f"(File: {file_path})" + ) + + +def parse(path: pathlib.Path) -> Config: + path = path.resolve() + base_dir = path.parent + + section = _load_pysen_section(path) + version = _load_version(section) + _check_version(path, version, VersionRepresentation.from_str(__version__)) + + config = _parse_dict(section, base_dir) + _migrate_alias_fields(config) + _migrate_deprecated_fields(path, config) + return config diff --git a/pysen/reporter.py b/pysen/reporter.py new file mode 100644 index 0000000..569a61a --- /dev/null +++ b/pysen/reporter.py @@ -0,0 +1,161 @@ +import contextlib +import io +import logging +import threading +import time +from typing import Any, Iterator, List, Optional, Sequence + +from .diagnostic import Diagnostic, DiagnosticFormatter +from .logging_utils import ( + CommandLoggingOptions, + get_process_output_logger, + get_reporter_logger, +) + +_COMMAND_REPR_MAX_LENGTH = 150 +_OMIT_REPR = "..." + + +def _truncate_command_sequence(cmds: str) -> str: + if len(cmds) <= _COMMAND_REPR_MAX_LENGTH: + return cmds + + prefix_length = _COMMAND_REPR_MAX_LENGTH - len(_OMIT_REPR) + assert prefix_length > 0 + prefix = cmds[:prefix_length] + return f"{prefix}{_OMIT_REPR}" + + +class Reporter: + def __init__(self, name: str) -> None: + self._name = name + self._success: Optional[bool] = None + self._exit_code: Optional[int] = None + + self._commands: List[str] = [] + self._diagnostics: List[Diagnostic] = [] + self._started: Optional[float] = None + self._ended: Optional[float] = None + + self._logger = get_reporter_logger(name) + self._process_output = get_process_output_logger(name) + + @property + def name(self) -> str: + return self._name + + @property + def success(self) -> bool: + assert self._success is not None + return self._success + + @property + def exit_code(self) -> int: + assert self._exit_code is not None + return self._exit_code + + @property + def commands(self) -> List[str]: + assert self._commands is not None + return self._commands + + @property + def elapsed_time(self) -> float: + assert self._started is not None + assert self._ended is not None + return self._ended - self._started + + @property + def diagnostics(self) -> List[Diagnostic]: + return self._diagnostics + + def __enter__(self) -> "Reporter": + self._started = time.time() + self.logger.info(f"Running: {self.name}") + return self + + def __exit__(self, exc_type: Any, exc: Any, tb: Any) -> None: + self._ended = time.time() + + def report_diagnostics(self, diagnostics: Sequence[Diagnostic]) -> None: + self._diagnostics.extend(diagnostics) + + def report_command(self, cmd: str) -> None: + self._logger.debug(f"> {_truncate_command_sequence(cmd)}") + self._commands.append(cmd) + + @property + def logger(self) -> logging.Logger: + return self._logger + + @property + def process_output(self) -> logging.Logger: + return self._process_output + + def set_result(self, success: bool, exit_code: Optional[int] = None) -> None: + self._success = success + self._exit_code = exit_code + + +class ReporterFactory: + def __init__( + self, + pretty: bool = True, + process_output: bool = True, + loglevel: int = logging.INFO, + ) -> None: + self._reporters: List[Reporter] = [] + self._lock = threading.Lock() + self._pretty = pretty + self._process_output = process_output + self._loglevel = loglevel + + def create(self, name: str) -> Reporter: + r = Reporter(name) + with self._lock: + self._reporters.append(r) + return r + + @contextlib.contextmanager + def logging_handlers(self, is_grouped: bool) -> Iterator[None]: + clo = CommandLoggingOptions(is_grouped, self._pretty, self._process_output) + with clo.start_logging(self._loglevel): + yield + + def has_error(self) -> bool: + return not all([r.success for r in self._reporters]) + + @property + def reporters(self) -> List[Reporter]: + return self._reporters + + def format_summary(self) -> str: + with io.StringIO() as buf: + for r in self._reporters: + status_msg = "Failed" + if r.success: + status_msg = "OK" + + buf.write( + "{} .......... {} ({:.2f} sec)\n".format( + r.name, status_msg, r.elapsed_time + ) + ) + + return buf.getvalue() + + def format_error_summary(self) -> str: + with io.StringIO() as buf: + buf.write("Errored:\n") + for r in self._reporters: + if not r.success: + buf.write(" - {}\n".format(r.name)) + return buf.getvalue() + + def format_diagnostic_summary(self, formatter: DiagnosticFormatter) -> str: + ret: List[str] = [] + for r in self._reporters: + for d in r.diagnostics: + ret.append(formatter.format(d, r.name)) + + return "\n".join(ret) diff --git a/pysen/runner.py b/pysen/runner.py new file mode 100644 index 0000000..f84c0bc --- /dev/null +++ b/pysen/runner.py @@ -0,0 +1,143 @@ +import argparse +import concurrent.futures +import contextlib +import logging +import pathlib +import tempfile +from typing import Dict, List, Optional, Sequence + +from . import path +from .command import CommandBase +from .exceptions import ( + CommandNotFoundError, + InvalidCommandNameError, + RunTargetFileNotSupported, +) +from .manifest import ManifestBase, ParserType, TargetType +from .reporter import ReporterFactory +from .runner_options import PathContext, RunOptions +from .types import ComponentName, TargetName + +_logger = logging.getLogger(__name__) + + +def _verify_command_name(command: CommandBase) -> None: + if ":" in command.name: + raise InvalidCommandNameError(command.name) + + +def _has_side_effects(target: TargetType) -> bool: + return any(cmd.has_side_effects for cmd in target) + + +def run_target( + target: TargetType, + reporters: ReporterFactory, + options: RunOptions, + files: Optional[Sequence[pathlib.Path]] = None, +) -> None: + def run_cmd(cmd: CommandBase) -> bool: + _verify_command_name(cmd) + with reporters.create(cmd.name) as r: + exit_code: int + try: + if files is not None: + exit_code = cmd.run_files(reporter=r, files=files) + else: + exit_code = cmd.run(reporter=r) + except CommandNotFoundError: + exit_code = 127 + r.logger.exception("command not found") + except RunTargetFileNotSupported: + exit_code = 0 + r.logger.info(f"{cmd.name} does not support target file execution") + except KeyboardInterrupt: + exit_code = 130 + r.logger.exception("interrupted") + return False + except BaseException: + exit_code = -1 + r.logger.exception("unexpected exception") + finally: + r.set_result(exit_code == 0, exit_code) + return True + + if options.no_parallel: + is_grouped = False + else: + is_grouped = not _has_side_effects(target) + + with reporters.logging_handlers(is_grouped=is_grouped): + if is_grouped: + # TODO: control the maximum number of concurrent threads + _logger.info("Running commands concurrently...") + with concurrent.futures.ThreadPoolExecutor() as executor: + executor.map(run_cmd, target) + _logger.info("... concurrent execution done") + else: + _logger.info("Running commands") + for cmd in target: + if not run_cmd(cmd): + break + + +class Runner: + def __init__(self, manifest: ManifestBase) -> None: + self._manifest = manifest + + def setup_manifest_argparse(self, parser: ParserType) -> None: + self._manifest.configure_parser(parser) + + def parse_manifest_arguments(self, args: Sequence[str]) -> argparse.Namespace: + parser = argparse.ArgumentParser("manifest options") + self.setup_manifest_argparse(parser) + return parser.parse_args(args) + + def export_settings( + self, + base_dir: pathlib.Path, + settings_dir: pathlib.Path, + args: argparse.Namespace, + ) -> None: + self._manifest.export_settings(PathContext(base_dir, settings_dir), args) + + def get_targets(self, args: argparse.Namespace) -> Dict[str, List[ComponentName]]: + return self._manifest.get_targets(args) + + def _get_target( + self, + target_name: TargetName, + paths: PathContext, + options: RunOptions, + args: argparse.Namespace, + ) -> TargetType: + # NOTE(igarashi): make sure if target_name exists before calling get_target(target_name) + targets = self.get_targets(args) + if target_name not in targets: + raise CommandNotFoundError(f"target: {target_name} not found") + + return self._manifest.get_target(target_name, paths, options, args) + + def run( + self, + target_name: str, + base_dir: pathlib.Path, + manifest_args: argparse.Namespace, + reporters: ReporterFactory, + options: RunOptions, + settings_dir: Optional[pathlib.Path] = None, + files: Optional[Sequence[pathlib.Path]] = None, + ) -> None: + with contextlib.ExitStack() as stack: + base_dir = base_dir.resolve() + if settings_dir is not None: + settings_dir = path.resolve_path(base_dir, settings_dir) + settings_dir.mkdir(parents=True, exist_ok=True) + else: + tempdir = stack.enter_context(tempfile.TemporaryDirectory()) + settings_dir = pathlib.Path(tempdir) + + paths = PathContext(base_dir, settings_dir) + self.export_settings(paths.base_dir, paths.settings_dir, manifest_args) + target = self._get_target(target_name, paths, options, manifest_args) + run_target(target, reporters, options, files) diff --git a/pysen/runner_options.py b/pysen/runner_options.py new file mode 100644 index 0000000..864dda6 --- /dev/null +++ b/pysen/runner_options.py @@ -0,0 +1,14 @@ +import dataclasses +import pathlib + + +@dataclasses.dataclass(frozen=True) +class PathContext: + base_dir: pathlib.Path + settings_dir: pathlib.Path + + +@dataclasses.dataclass(frozen=True) +class RunOptions: + require_diagnostics: bool = True + no_parallel: bool = False diff --git a/pysen/setting.py b/pysen/setting.py new file mode 100644 index 0000000..5556f76 --- /dev/null +++ b/pysen/setting.py @@ -0,0 +1,216 @@ +import dataclasses +import pathlib +from collections import OrderedDict +from typing import ( + Any, + Callable, + Dict, + List, + Optional, + Sequence, + Set, + Tuple, + Type, + Union, + cast, +) + +from .types import MAPPING_TYPES, PRIMITIVE_TYPES, SEQUENCE_TYPES + +TypeHookCallback = Callable[[Any], Any] + + +def to_dash_case(s: str) -> str: + # convert field name to dash-case + # Example: snake_case to snake-case + return s.replace("_", "-") + + +@dataclasses.dataclass +class SettingBase: + def asdict( + self, + ignore_fields: Optional[Sequence[str]] = None, + omit_none: bool = False, + naming_rule: Optional[Callable[[str], str]] = None, + renames: Optional[Dict[str, str]] = None, + type_hooks: Optional[Dict[Type[Any], TypeHookCallback]] = None, + ) -> Dict[str, Any]: + entries = dataclasses.asdict(self) + if ignore_fields is not None: + for ignore in ignore_fields: + assert ignore in entries + entries.pop(ignore) + + if omit_none: + empty = [k for k, v in entries.items() if v is None] + for k in empty: + entries.pop(k) + + if renames is not None: + for src, to in renames.items(): + if src in entries: + value = entries.pop(src) + entries[to] = value + + field_name_converter: Callable[[str], str] = lambda s: s + if naming_rule is not None: + field_name_converter = naming_rule + + type_conversion: Dict[Type[Any], TypeHookCallback] = { + pathlib.Path: lambda x: str(x), + } + if type_hooks is not None: + type_conversion.update(type_hooks) + + def convert_types(data: Any) -> Any: + if data is None: + return None + + data_type = type(data) + conversion = type_conversion.get(data_type, None) + if conversion is not None: + return conversion(data) + + if isinstance(data, PRIMITIVE_TYPES): + return data + elif isinstance(data, SEQUENCE_TYPES): + return list(sorted([convert_types(x) for x in data])) + elif isinstance(data, MAPPING_TYPES): + # NOTE(igarashi): key must be a str + return OrderedDict( + (field_name_converter(k), convert_types(v)) + for k, v in sorted(data.items()) + ) + else: + raise RuntimeError( + f"cannot handle type: {data_type}, consider adding type_hooks" + ) + + return cast(Dict[str, Any], convert_types(entries)) + + +SectionPathType = Sequence[str] +SectionDataType = Dict[str, Any] + + +def _sort_object(data: Any) -> Any: + if data is None: + return None + + if isinstance(data, PRIMITIVE_TYPES): + return data + elif isinstance(data, SEQUENCE_TYPES): + return list(sorted(data)) + elif isinstance(data, MAPPING_TYPES): + return OrderedDict((k, _sort_object(v)) for k, v in sorted(data.items())) + else: + raise RuntimeError(f"cannot handle type: {type(data)}") + + +def _create_dict(paths: SectionPathType) -> Any: + element: Dict[str, Any] = {} + for path in paths[::-1]: + element = {path: element} + return element + + +def _traverse_toml( + section_path: SectionPathType, toml: Dict[str, Any], create: bool +) -> Any: + element = toml + current_path: List[str] = [] + for idx, path in enumerate(section_path): + if path in element: + element = element[path] + elif create: + # create remaining items and set create=False so that we don't create nodes anymore + element[path] = _create_dict(section_path[idx + 1 :]) + create = False + + # re-create the reference from toml root + element = _traverse_toml(section_path[: idx + 1], toml, False) + else: + raise ValueError(f"Key {path} does not exist") + + current_path.append(path) + if not isinstance(element, dict): + raise KeyError(f"invalid section, {current_path} exists") + + return element + + +class SettingFile: + def __init__(self) -> None: + self._entries: List[Tuple[SectionPathType, SectionDataType]] = [] + self._structure: Set[Tuple[str, ...]] = set() + + def entries(self) -> List[Tuple[SectionPathType, Dict[str, Any]]]: + return self._entries + + @staticmethod + def update_by_entry( + dst: Dict[str, Any], + section_path: SectionPathType, + section_data: SectionDataType, + replace: bool = True, + ) -> Dict[str, Any]: + data: Dict[str, Any] = _traverse_toml(section_path, dst, True) + parent = _traverse_toml(section_path[:-1], dst, False) + + if replace: + # NOTE(igarashi): tomlkit.TOMLDocument doesn't clear its dictionary by `clear()`, + # so we set a new dict from its parent + parent[section_path[-1]] = {} + data = _traverse_toml(section_path, dst, False) + + # NOTE(igarashi): don't use dict.update so that the items are added + # in alphabetical order. + for key, value in sorted(section_data.items()): + data[key] = _sort_object(value) + + return data + + def as_dict(self) -> Dict[str, Any]: + root: Dict[str, Any] = {} + for entry in self._entries: + SettingFile.update_by_entry(root, entry[0], entry[1]) + + return root + + def __eq__(self, other: Any) -> bool: + if isinstance(other, dict): + return other == self.as_dict() + + return id(other) == id(self) + + def get_section( + self, + path: Union[str, SectionPathType], + default: Optional[SectionDataType] = None, + ) -> SectionDataType: + target: Tuple[str, ...] + if isinstance(path, str): + target = (path,) + else: + target = tuple(path) + + for p, d in self._entries: + if p == target: + return d + + if default is not None: + return default + + raise KeyError(f"section: {path} not found") + + def set_section(self, path: SectionPathType, data: SectionDataType) -> None: + assert isinstance(path, (tuple, list)) + subpath: List[str] = [] + for p in path: + if tuple(subpath) in self._structure: + raise KeyError(f"subpath: {subpath} already exists") + + subpath.append(p) + + self._entries.append((tuple(path), data)) diff --git a/pysen/setuptools.py b/pysen/setuptools.py new file mode 100644 index 0000000..6852dbe --- /dev/null +++ b/pysen/setuptools.py @@ -0,0 +1,194 @@ +import argparse +import functools +import pathlib +import sys +from typing import Any, Callable, Dict, List, Optional, Sequence, Tuple, Type, Union + +import setuptools + +from .manifest import ManifestBase +from .path import PathLikeType, wrap_path +from .pyproject import find_pyproject, load_manifest +from .reporter import ReporterFactory +from .runner import Runner +from .runner_options import RunOptions + +ManifestLikeType = Union[str, pathlib.Path, ManifestBase] +CommandClassType = Type[setuptools.Command] +UNDEFINED = object() + +_PREDEFINED_COMMAND_NAMES = [ + "build_ext", + "build_py", + "develop", + "install", + "test", +] + + +def _get_setuptool_command(name: str) -> CommandClassType: + if name in _PREDEFINED_COMMAND_NAMES: + try: + import importlib + + module = importlib.import_module("setuptools.command.{}".format(name)) + klass = getattr(module, name) + if ( + klass is not None + and isinstance(klass, type) + and issubclass(klass, setuptools.Command) + ): + return klass + except BaseException: + pass # failover + + return setuptools.Command # type: ignore + + +def _get_setuptool_user_options( + klass: CommandClassType, +) -> List[Tuple[str, Optional[str], str]]: + try: + base_user_options = getattr(klass, "user_options", None) + if base_user_options is not None and isinstance( + base_user_options, (list, tuple) + ): + return list(base_user_options) + except BaseException: + pass # failover + + return [] + + +def _create_setuptool_command( + name: str, + runner: Runner, + base_dir: pathlib.Path, + settings_dir: Optional[pathlib.Path], + manifest_args: argparse.Namespace, +) -> CommandClassType: + base_class = _get_setuptool_command(name) + base_user_options = _get_setuptool_user_options(base_class) + + setup_options = base_user_options + + class Cmd(base_class): # type: ignore[valid-type,misc] # NOQA: F821 + user_options = setup_options + + def __invoke_super(self, func: Callable[[], None]) -> None: + if base_class is not setuptools.Command: + func() + + def initialize_options(self) -> None: + self.__invoke_super(super().initialize_options) + + def finalize_options(self) -> None: + self.__invoke_super(super().finalize_options) + + def run(self) -> None: + reporters = ReporterFactory() + options = RunOptions() + runner.run( + name, + base_dir, + manifest_args, + reporters, + options, + settings_dir=settings_dir, + files=None, + ) + print("\n ** execution summary **") + print(reporters.format_summary()) + if reporters.has_error(): + sys.stderr.write(f"{name} finished with error(s)\n") + print(reporters.format_error_summary()) + sys.exit(1) + + self.__invoke_super(super().run) + + Cmd.__name__ = name + return Cmd + + +class SetupPyWrapper: + def __init__(self, cmds: Dict[str, CommandClassType]) -> None: + self._cmds = cmds + + @property + def cmdclass(self) -> Dict[str, CommandClassType]: + return self._cmds + + @functools.wraps(setuptools.setup) + def __call__(self, **kwargs: Any) -> None: + cmdclass = kwargs.pop("cmdclass", {}) + assert isinstance(cmdclass, dict) + configured_cmds = self.cmdclass + # NOTE(igarashi): raise Exception if the key is duplicated + for key in cmdclass.keys(): + if key in configured_cmds: + raise RuntimeError(f"cmdclass: {key} is duplicated") + + cmdclass.update(configured_cmds) + kwargs.update({"cmdclass": cmdclass}) + + setuptools.setup(**kwargs) + + +def _setup( + package_dir: pathlib.Path, + settings_dir: Optional[pathlib.Path], + manifest: ManifestBase, + args: Sequence[str], +) -> SetupPyWrapper: + runner = Runner(manifest) + parsed = runner.parse_manifest_arguments(args) + targets = runner.get_targets(parsed) + setup_commands = { + name: _create_setuptool_command(name, runner, package_dir, settings_dir, parsed) + for name in targets + } + + return SetupPyWrapper(setup_commands) + + +def setup_from_pyproject( + fpath: str, + path: Optional[PathLikeType] = None, + manifest_args: Optional[Sequence[str]] = None, + settings_dir: Optional[pathlib.Path] = None, +) -> SetupPyWrapper: + package_dir = pathlib.Path(fpath).resolve().parent + wrapped: pathlib.Path = package_dir / "pyproject.toml" + if path is not None: + wrapped = wrap_path(path) + + args = manifest_args or [] + pyproject = find_pyproject(wrapped) + manifest = load_manifest(pyproject) + + return _setup(package_dir, settings_dir, manifest, args) + + +def setup( + fpath: str, + manifest: ManifestBase, + manifest_args: Optional[Sequence[str]] = None, + settings_dir: Optional[pathlib.Path] = None, +) -> SetupPyWrapper: + package_dir = pathlib.Path(fpath).resolve().parent + args = manifest_args or [] + + return _setup(package_dir, settings_dir, manifest, args) + + +def generate_setting_files( + fpath: str, + manifest: ManifestBase, + export_dir: pathlib.Path, + manifest_args: Optional[Sequence[str]] = None, +) -> None: + package_dir = pathlib.Path(fpath).resolve().parent + args = manifest_args or [] + runner = Runner(manifest) + parsed = runner.parse_manifest_arguments(args) + runner.export_settings(package_dir, export_dir, parsed) diff --git a/pysen/source.py b/pysen/source.py new file mode 100644 index 0000000..8f9eeac --- /dev/null +++ b/pysen/source.py @@ -0,0 +1,215 @@ +import dataclasses +import pathlib +from typing import Callable, Dict, Iterable, List, Optional, Sequence, Set + +from .git_utils import GitRepositoryNotFoundError, check_tracked, list_indexed_files +from .path import PathLikeType, is_contained, resolve_path +from .reporter import Reporter + +FilePredicateType = Callable[[pathlib.Path], bool] + + +def extension_filter(accept_extensions: Set[str]) -> FilePredicateType: + def impl(path: pathlib.Path) -> bool: + return path.suffix in accept_extensions + + return impl + + +PythonFileFilter = extension_filter({".py", ".pyi"}) + + +@dataclasses.dataclass +class SourceEntrySetting: + glob: bool + base_dir: Optional[pathlib.Path] = None + + +def _resolve( + base_dir: pathlib.Path, p: PathLikeType, setting: SourceEntrySetting +) -> List[pathlib.Path]: + if isinstance(p, pathlib.Path) and setting.glob: + raise RuntimeError("cannot use pathlib.Path when glob=True") + + if setting.base_dir is not None: + base_dir = setting.base_dir + base_dir = base_dir.resolve() + + if not setting.glob: + return [resolve_path(base_dir, p)] + + assert isinstance(p, str) + # NOTE(igarashi): ensure that p is an instance of str + # otherwise, pathlib.glob raises an error when sys.intern() is called + p = str(p) + + # NOTE(igarashi): pathlib.Path.glob(".") raises IndexError in Python 3.7 (might be a bug?) + # To avoid the bug, add a condition to handle the case for the workaround. + if p == ".": + return [resolve_path(base_dir, ".")] + + # run base_dir.glob only if isinstance(p, str) and setting.glob + return [resolve_path(base_dir, g) for g in base_dir.glob(p)] + + +class Source: + def __init__( + self, + includes: Optional[Sequence[PathLikeType]] = None, + excludes: Optional[Sequence[PathLikeType]] = None, + include_globs: Optional[Sequence[PathLikeType]] = None, + exclude_globs: Optional[Sequence[PathLikeType]] = None, + ) -> None: + self._includes: Dict[PathLikeType, SourceEntrySetting] = {} + self._excludes: Dict[PathLikeType, SourceEntrySetting] = {} + + if includes is not None: + for i in includes: + self.add_include(i, glob=False) + + if include_globs is not None: + for i in include_globs: + self.add_include(i, glob=True) + + if excludes is not None: + for e in excludes: + self.add_exclude(e, glob=False) + + if exclude_globs is not None: + for e in exclude_globs: + self.add_exclude(e, glob=True) + + def add_include( + self, + entry: PathLikeType, + *, + glob: bool = False, + base_dir: Optional[pathlib.Path] = None, + ) -> None: + self._includes[entry] = SourceEntrySetting(glob=glob, base_dir=base_dir) + + def add_exclude( + self, + entry: PathLikeType, + *, + glob: bool = False, + base_dir: Optional[pathlib.Path] = None, + ) -> None: + self._excludes[entry] = SourceEntrySetting(glob=glob, base_dir=base_dir) + + def remove_include(self, entry: PathLikeType) -> None: + self._includes.pop(entry) + + def remove_exclude(self, entry: PathLikeType) -> None: + self._excludes.pop(entry) + + @property + def includes(self) -> Dict[PathLikeType, SourceEntrySetting]: + return self._includes + + @property + def excludes(self) -> Dict[PathLikeType, SourceEntrySetting]: + return self._excludes + + def copy(self) -> "Source": + new = Source() + for path, setting in self.includes.items(): + new.add_include(path, glob=setting.glob, base_dir=setting.base_dir) + + for path, setting in self.excludes.items(): + new.add_exclude(path, glob=setting.glob, base_dir=setting.base_dir) + + return new + + def iter_include_entries(self, base_dir: pathlib.Path) -> Iterable[pathlib.Path]: + for include, setting in self._includes.items(): + yield from iter(_resolve(base_dir, include, setting)) + + def iter_exclude_entries(self, base_dir: pathlib.Path) -> Iterable[pathlib.Path]: + for exclude, setting in self._excludes.items(): + yield from iter(_resolve(base_dir, exclude, setting)) + + def _resolve_include_files( + self, + base_dir: pathlib.Path, + filter_predicate: FilePredicateType, + use_git: bool, + reporter: Optional[Reporter] = None, + ) -> Set[pathlib.Path]: + includes = self.iter_include_entries(base_dir) + + included_files: Set[pathlib.Path] = set() + + for include in includes: + if not include.exists(): + continue + if include.is_file(): + # NOTE(igarashi): include this file anyway even though it is + # not a .py file (e.g., script/command) + if use_git: + try: + if not check_tracked(include): + continue + except GitRepositoryNotFoundError: + if reporter is not None: + reporter.logger.warning( + f"{include} is outside repository. ignored." + ) + + included_files.add(include) + else: + if use_git: + try: + included_files.update( + x + for x in list_indexed_files(include) + if filter_predicate(x) + ) + except GitRepositoryNotFoundError: + if reporter is not None: + reporter.logger.warning( + f"{include} is outside repository. ignored." + ) + else: + included_files.update( + x for x in include.glob("**/*") if filter_predicate(x) + ) + + return included_files + + def resolve_files( + self, + base_dir: pathlib.Path, + filter_predicate: Optional[FilePredicateType], + use_git: bool = True, + reporter: Optional[Reporter] = None, + ) -> Set[pathlib.Path]: + """Returns a set of target files. + Note: + `filter_predicate` is used to filter files only when a directory is expanded by + the system. If an user explicitly specifies the path as included, it will be added + to the return object. + + Examples: + >>> s = Source(includes=["foo/", "bar/doc.md"]) + >>> s.resolve_files(base_dir, lambda x: x.suffix == ".py") + [PosixPath("foo/main.py"), PosixPath("foo/module.py"), PosixPath("bar/doc.md")] + """ + + def _default(x: pathlib.Path) -> bool: + return True + + filter_predicate = filter_predicate or _default + + assert base_dir.is_absolute() + + included = self._resolve_include_files( + base_dir, filter_predicate, use_git, reporter + ) + + exclude_entries = list(self.iter_exclude_entries(base_dir)) + + def exclude_filter(path: pathlib.Path) -> bool: + return not any(is_contained(exclude, path) for exclude in exclude_entries) + + return set(filter(exclude_filter, included)) diff --git a/pysen/types.py b/pysen/types.py new file mode 100644 index 0000000..d693f1c --- /dev/null +++ b/pysen/types.py @@ -0,0 +1,6 @@ +PRIMITIVE_TYPES = (int, str, float, bool) +SEQUENCE_TYPES = (tuple, set, list) +MAPPING_TYPES = dict + +ComponentName = str +TargetName = str diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..e7edb5e --- /dev/null +++ b/setup.cfg @@ -0,0 +1,34 @@ +[flake8] +# automatically generated by pysen +# e203: black treats : as a binary operator +# e231: black doesn't put a space after , +# e501: black may exceed the line-length to follow other style rules +# w503 or w504: either one needs to be disabled to select w error codes +ignore = E203,E231,E501,W503 +max-line-length = 88 +select = B,B950,C,E,F,W + +[mypy] +# automatically generated by pysen +check_untyped_defs = True +disallow_any_decorated = False +disallow_any_generics = True +disallow_any_unimported = False +disallow_incomplete_defs = True +disallow_subclassing_any = True +disallow_untyped_calls = True +disallow_untyped_decorators = False +disallow_untyped_defs = True +ignore_errors = False +ignore_missing_imports = True +no_implicit_optional = True +python_version = 3.7 +show_error_codes = True +strict_equality = True +strict_optional = True +warn_redundant_casts = True +warn_return_any = True +warn_unreachable = True +warn_unused_configs = True +warn_unused_ignores = True + diff --git a/setup.py b/setup.py new file mode 100644 index 0000000..f6e390e --- /dev/null +++ b/setup.py @@ -0,0 +1,54 @@ +import pathlib + +from setuptools import find_packages, setup + +BASE_DIR = pathlib.Path(__file__).resolve().parent +exec((BASE_DIR / "pysen/_version.py").read_text()) + + +setup( + name="pysen", + version=__version__, # type: ignore[name-defined] # NOQA: F821 + packages=find_packages(), + description=( + "Python linting made easy. " + "Also a casual yet honorific way to address individuals " + "who have entered an organization prior to you." + ), + long_description=open("README.md").read(), + long_description_content_type="text/markdown", + author="Yuki Igarashi, Toru Ogawa, Ryo Miyajima", + author_email="igarashi@preferred.jp, ogawa@preferred.jp, ryo@preferred.jp", + url="https://github.com/pfnet/pysen", + license="MIT License", + classifiers=[ + "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", + "Programming Language :: Python :: 3.8", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python", + "License :: OSI Approved :: MIT License", + "Operating System :: POSIX", + "Operating System :: MacOS", + "Operating System :: Unix", + ], + install_requires=[ + "GitPython>=3.0.0,<4.0.0", + "colorlog>=4.0.0,<5.0.0", + "dacite>=1.1.0,<2.0.0", + "dataclasses>=0.6,<1.0;python_version<'3.7'", + "tomlkit>=0.5.11,<1.0.0", + "unidiff>=0.6.0,<1.0.0", + ], + extras_require={ + "lint": [ + "black>=19.10b0,<=20.8", + "flake8-bugbear", # flake8 doesn't have a dependency for bugbear plugin + "flake8>=3.7,<4", + "isort>=4.3,<5.2.0", + "mypy>=0.770,<0.800", + ], + }, + package_data={"pysen": ["py.typed"]}, + entry_points={"console_scripts": ["pysen=pysen.cli:cli"]}, +) diff --git a/tests/example_tests/__init__.py b/tests/example_tests/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/example_tests/conftest.py b/tests/example_tests/conftest.py new file mode 100644 index 0000000..c758f69 --- /dev/null +++ b/tests/example_tests/conftest.py @@ -0,0 +1,12 @@ +import pathlib + +import pytest + +BASE_DIR = pathlib.Path(__file__).resolve().parent + + +@pytest.fixture +def example_dir() -> pathlib.Path: + ret = BASE_DIR.parents[1] / "examples" + assert ret.exists() + return ret diff --git a/tests/example_tests/test_advanced_example.py b/tests/example_tests/test_advanced_example.py new file mode 100644 index 0000000..734b013 --- /dev/null +++ b/tests/example_tests/test_advanced_example.py @@ -0,0 +1,16 @@ +import pathlib +import subprocess + +import pytest + +from pysen.path import change_dir + +TARGET_EXAMPLE = "advanced_example" + + +@pytest.mark.examples +def test_cli_run(example_dir: pathlib.Path) -> None: + target = example_dir / TARGET_EXAMPLE + with change_dir(target): + subprocess.run(["pysen", "run", "lint"], check=True) + subprocess.run(["pysen", "run", "--error-format", "gnu", "lint"], check=True) diff --git a/tests/example_tests/test_plugin_example.py b/tests/example_tests/test_plugin_example.py new file mode 100644 index 0000000..68f9375 --- /dev/null +++ b/tests/example_tests/test_plugin_example.py @@ -0,0 +1,19 @@ +import pathlib +import subprocess + +import pytest + +from pysen.path import change_dir + +TARGET_EXAMPLE = "plugin_example" + + +@pytest.mark.examples +def test_cli_run(example_dir: pathlib.Path) -> None: + target = example_dir / TARGET_EXAMPLE + with change_dir(target): + subprocess.run(["pysen", "run", "lint"], check=True) + subprocess.run(["pysen", "run", "hook"]) + subprocess.run(["pysen", "--ignore-lint", "run", "lint"], check=True) + subprocess.run(["pysen", "--ignore-lint", "run", "hook"], check=True) + subprocess.run(["pysen", "run", "--error-format", "gnu", "lint"], check=True) diff --git a/tests/example_tests/test_simple_package.py b/tests/example_tests/test_simple_package.py new file mode 100644 index 0000000..ee31fe3 --- /dev/null +++ b/tests/example_tests/test_simple_package.py @@ -0,0 +1,17 @@ +import pathlib +import subprocess + +import pytest + +from pysen.path import change_dir + +TARGET_EXAMPLE = "simple_package" + + +@pytest.mark.examples +def test_cli_run(example_dir: pathlib.Path) -> None: + target = example_dir / TARGET_EXAMPLE + with change_dir(target): + subprocess.run(["pysen", "run", "lint"], check=True) + subprocess.run(["pysen", "run", "format"], check=True) + subprocess.run(["pysen", "run", "--error-format", "gnu", "lint"], check=True) diff --git a/tests/example_tests/test_sync_cmdclass_pyproject.py b/tests/example_tests/test_sync_cmdclass_pyproject.py new file mode 100644 index 0000000..f758d82 --- /dev/null +++ b/tests/example_tests/test_sync_cmdclass_pyproject.py @@ -0,0 +1,36 @@ +import pathlib +import subprocess + +import pytest +from setuptools import sandbox + +from pysen.path import change_dir + +TARGET_EXAMPLE = "sync_cmdclass_pyproject" + + +@pytest.mark.examples +def test_cli_run(example_dir: pathlib.Path) -> None: + target = example_dir / TARGET_EXAMPLE + with change_dir(target): + subprocess.run(["pysen", "run", "lint"], check=True) + subprocess.run(["pysen", "run", "format"], check=True) + subprocess.run(["pysen", "run", "--error-format", "gnu", "lint"], check=True) + + +@pytest.mark.examples +def test_setuptools_cli(example_dir: pathlib.Path) -> None: + target = example_dir / TARGET_EXAMPLE + setup_py = target / "setup.py" + assert setup_py.exists() + + subprocess.run(["python", str(setup_py), "lint"], check=True) + + +@pytest.mark.xfail +def test_setuptools_sandbox(example_dir: pathlib.Path) -> None: + target = example_dir / TARGET_EXAMPLE + setup_py = target / "setup.py" + assert setup_py.exists() + + sandbox.run_setup(str(setup_py), ["lint"]) diff --git a/tests/fakes/__init__.py b/tests/fakes/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/tests/fakes/component.py b/tests/fakes/component.py new file mode 100644 index 0000000..c5b5b59 --- /dev/null +++ b/tests/fakes/component.py @@ -0,0 +1,101 @@ +import enum +import pathlib +from typing import DefaultDict, Dict, List, Optional, Sequence, Tuple + +from pysen import ComponentBase +from pysen.command import CommandBase +from pysen.diagnostic import Diagnostic +from pysen.reporter import Reporter +from pysen.runner_options import PathContext, RunOptions +from pysen.setting import SettingFile + + +class Operation(enum.Enum): + ADD = "+" + MUL = "*" + + +class FakeCommand(CommandBase): + def __init__( + self, coef: int, op: Operation, ref: List[float], options: RunOptions + ) -> None: + self.coef = coef + self.op = op + self.ref = ref + self.options = options + assert len(ref) == 1 + + @property + def name(self) -> str: + return f"{self.op.value} {self.coef}" + + def __call__(self, reporter: Reporter) -> int: + value = self.ref[0] + coef = float(self.coef) + + if self.op == Operation.ADD: + value += coef + elif self.op == Operation.MUL: + value *= coef + else: + raise AssertionError(f"invalid op: {self.op}") + + self.ref[0] = value + + if value >= 0.0: + return 0 + else: + if self.options.require_diagnostics: + reporter.report_diagnostics( + [Diagnostic(pathlib.Path(".").resolve(), message="")] + ) + return 1 + + +class FakeComponent(ComponentBase): + def __init__( + self, + name: str, + ops: Dict[str, Tuple[int, Operation]], + expected_base_dir: Optional[pathlib.Path], + expected_settings_dir: Optional[pathlib.Path], + ref: List[float], + ) -> None: + self._name = name + self._ops = ops + self._expected_base_dir = expected_base_dir + self._expected_settings_dir = expected_settings_dir + self._ref = ref + assert len(ref) == 1 + + @property + def name(self) -> str: + return self._name + + def export_settings( + self, + paths: PathContext, + files: DefaultDict[str, SettingFile], + ) -> None: + if self._expected_base_dir is not None: + assert paths.base_dir == self._expected_base_dir + if self._expected_settings_dir is not None: + assert paths.settings_dir == self._expected_settings_dir + for name, op in self._ops.items(): + fname = f"{name}.yaml" + setting_file = files[fname] + setting_file.set_section((self.name,), {"coef": op[0], "op": op[1].value}) + + @property + def targets(self) -> Sequence[str]: + return list(self._ops.keys()) + + def create_command( + self, target: str, paths: PathContext, options: RunOptions + ) -> CommandBase: + if self._expected_base_dir is not None: + assert paths.base_dir == self._expected_base_dir + if self._expected_settings_dir is not None: + assert paths.settings_dir == self._expected_settings_dir + op = self._ops[target] + return FakeCommand(op[0], op[1], self._ref, options) diff --git a/tests/fakes/configs/base.toml b/tests/fakes/configs/base.toml new file mode 100644 index 0000000..d37b7dd --- /dev/null +++ b/tests/fakes/configs/base.toml @@ -0,0 +1,28 @@ +[tool.pysen.lint] +base = "base2.toml" +enable_isort = true +line_length = 88 +py_version = "py37" +isort_known_third_party = ["fuga", "piyo"] +isort_known_first_party = ["foo"] + +[tool.pysen.plugin.ok] +function = "foo::bar" + +[tool.pysen.plugin.ok.config] + base = "base2.toml" + message = "hello ok from base" + +[tool.pysen.plugin.error-nosection] +function = "foo::bar" + +[tool.pysen.plugin.error-nosection.config] + base = "base2.toml" + message = "hello error from base" + +[tool.pysen.plugin.error-nofile] +function = "foo::bar" + +[tool.pysen.plugin.error-nofile.config] + base = "base99.toml" + message = "hello error from base" diff --git a/tests/fakes/configs/base2.toml b/tests/fakes/configs/base2.toml new file mode 100644 index 0000000..0eafd9b --- /dev/null +++ b/tests/fakes/configs/base2.toml @@ -0,0 +1,13 @@ +[tool.pysen] +builder = "good_builder.py" + +[tool.pysen.lint] +enable_black = true +line_length = 80 +isort_known_third_party = ["hoge"] + +[tool.pysen.plugin.ok] +function = "foo::bar" + +[tool.pysen.plugin.ok.config] +message = "hello ok from base2" diff --git a/tests/fakes/configs/base3.toml b/tests/fakes/configs/base3.toml new file mode 100644 index 0000000..bbcb5e2 --- /dev/null +++ b/tests/fakes/configs/base3.toml @@ -0,0 +1,6 @@ +[tool.pysen.plugin.ok] +function = "foo::bar" + +[tool.pysen.plugin.ok.config] + base = "base4.toml" + message = "this message is not a message" diff --git a/tests/fakes/configs/base4.toml b/tests/fakes/configs/base4.toml new file mode 100644 index 0000000..324f0e8 --- /dev/null +++ b/tests/fakes/configs/base4.toml @@ -0,0 +1,6 @@ +[tool.pysen.plugin.ok] +function = "foo::bar" + +[tool.pysen.plugin.ok.config] + base = "base3.toml" + message = "this message is not a message" diff --git a/tests/fakes/configs/builder.toml b/tests/fakes/configs/builder.toml new file mode 100644 index 0000000..8d2671b --- /dev/null +++ b/tests/fakes/configs/builder.toml @@ -0,0 +1,2 @@ +[tool.pysen] +builder = "good_builder.py" diff --git a/tests/fakes/configs/empty.toml b/tests/fakes/configs/empty.toml new file mode 100644 index 0000000..fdf4404 --- /dev/null +++ b/tests/fakes/configs/empty.toml @@ -0,0 +1 @@ +[tool.pysen] diff --git a/tests/fakes/configs/error_builder.py b/tests/fakes/configs/error_builder.py new file mode 100644 index 0000000..fe37d2a --- /dev/null +++ b/tests/fakes/configs/error_builder.py @@ -0,0 +1,21 @@ +import pathlib +from typing import Optional, Sequence + +from pysen import ComponentBase, Manifest, ManifestBase, Source, factory + +# raise error while loading +raise BufferError() + + +def build( + components: Sequence[ComponentBase], src_path: Optional[pathlib.Path] +) -> ManifestBase: + assert src_path is not None + components = factory.configure_lint( + factory.ConfigureLintOptions( + enable_flake8=True, + enable_isort=True, + source=Source(includes=[src_path.resolve().parent]), + ) + ) + return Manifest(components) diff --git a/tests/fakes/configs/example.toml b/tests/fakes/configs/example.toml new file mode 100644 index 0000000..e627490 --- /dev/null +++ b/tests/fakes/configs/example.toml @@ -0,0 +1,49 @@ +[tool.pysen-cli] +settings_dir = "hoge" + +[tool.pysen] + +[tool.pysen.lint] +enable_black = true +enable_flake8 = false +enable_isort = true +enable_mypy = true +mypy_preset = "strict" +line_length = 88 +py_version = "py37" +isort_known_first_party = ["alpha"] +isort_known_third_party = ["beta", "gamma"] +mypy_ignore_packages = ["pysen.stubs", "pysen.proto"] +mypy_path = ["pysen-stubs"] +isort_default_section = "thirdparty" + +[tool.pysen.lint.source] + includes = [".", "hoge"] + include_globs = ["**/*.template"] + excludes = ["fuga"] + exclude_globs = ["foo/*_pb2.py"] + +[[tool.pysen.lint.mypy_plugins]] + script = "./sugoi/plugin" + +[[tool.pysen.lint.mypy_plugins]] + function = "sugoi_plugin:entry" + +[tool.pysen.lint.mypy_modules."apple"] + preset = "entry" + follow_imports = "silent" + +[tool.pysen.lint.mypy_modules."banana"] + ignore_errors = true + +[tool.pysen.plugin.hoge] +function = "fakes.plugin::create" + +[tool.pysen.plugin.hoge.config] + enable_c2 = true + +[tool.pysen.plugin.foo] +function = "fakes.plugin::create" + +[tool.pysen.plugin.foo.config] +enable_c1 = true diff --git a/tests/fakes/configs/good_builder.py b/tests/fakes/configs/good_builder.py new file mode 100644 index 0000000..6cae696 --- /dev/null +++ b/tests/fakes/configs/good_builder.py @@ -0,0 +1,27 @@ +import pathlib +from typing import Optional, Sequence + +from pysen import ComponentBase, Manifest, ManifestBase, Source, factory +from pysen.black import Black + + +def build( + components: Sequence[ComponentBase], src_path: Optional[pathlib.Path] +) -> ManifestBase: + black = next((x for x in components if isinstance(x, Black)), None) + source: Source + + if black is not None: + source = black.source + else: + src_path = src_path or pathlib.Path.cwd() + source = Source(includes=[src_path.resolve().parent]) + + components = factory.configure_lint( + factory.ConfigureLintOptions( + enable_flake8=True, + enable_isort=True, + source=source, + ) + ) + return Manifest(components) diff --git a/tests/fakes/configs/invalid_interface_builder.py b/tests/fakes/configs/invalid_interface_builder.py new file mode 100644 index 0000000..a7da1a9 --- /dev/null +++ b/tests/fakes/configs/invalid_interface_builder.py @@ -0,0 +1,18 @@ +import pathlib +from typing import Optional, Sequence + +from pysen import ComponentBase, Manifest, ManifestBase, Source, factory + + +def build2( + components: Sequence[ComponentBase], src_path: Optional[pathlib.Path] +) -> ManifestBase: + assert src_path is not None + components = factory.configure_lint( + factory.ConfigureLintOptions( + enable_flake8=True, + enable_isort=True, + source=Source(includes=[src_path.resolve().parent]), + ) + ) + return Manifest(components) diff --git a/tests/fakes/configs/invalid_return_builder.py b/tests/fakes/configs/invalid_return_builder.py new file mode 100644 index 0000000..e2ed26e --- /dev/null +++ b/tests/fakes/configs/invalid_return_builder.py @@ -0,0 +1,8 @@ +import pathlib +from typing import Optional, Sequence + +from pysen import ComponentBase + + +def build(components: Sequence[ComponentBase], src_path: Optional[pathlib.Path]) -> int: + return 42 diff --git a/tests/fakes/configs/non_pysen_config.toml b/tests/fakes/configs/non_pysen_config.toml new file mode 100644 index 0000000..4a5abf3 --- /dev/null +++ b/tests/fakes/configs/non_pysen_config.toml @@ -0,0 +1,3 @@ +[tool.nonpysen] +line-length = 100 +target-version = ["py37"] diff --git a/tests/fakes/configs/plugin.toml b/tests/fakes/configs/plugin.toml new file mode 100644 index 0000000..66711bd --- /dev/null +++ b/tests/fakes/configs/plugin.toml @@ -0,0 +1,12 @@ +[tool.pysen] + +[tool.pysen.plugin.hoge] +script = "../plugin.py" + +[tool.pysen.plugin.fuga] +function = "fakes.plugins::create" + +[tool.pysen.plugin.fuga.config] + message = "hello" + value = 10.0 + flag = false diff --git a/tests/fakes/configs/simple_source.toml b/tests/fakes/configs/simple_source.toml new file mode 100644 index 0000000..c3e33f3 --- /dev/null +++ b/tests/fakes/configs/simple_source.toml @@ -0,0 +1,6 @@ +[tool.pysen.lint] +enable_mypy = true +mypy_preset = "entry" +line_length = 80 +py_version = "py27" +source = [".", "hoge", "piyo"] diff --git a/tests/fakes/manifest.py b/tests/fakes/manifest.py new file mode 100644 index 0000000..eb16b83 --- /dev/null +++ b/tests/fakes/manifest.py @@ -0,0 +1,78 @@ +import argparse +import pathlib +from typing import Dict, List, Mapping, Optional, Sequence + +from pysen.command import CommandBase +from pysen.manifest import ( + ComponentName, + ManifestBase, + ParserType, + TargetName, + TargetType, +) +from pysen.runner_options import PathContext, RunOptions + + +class FakeManifest(ManifestBase): + def __init__( + self, + expected_base_dir: pathlib.Path, + expected_settings_dir: Optional[pathlib.Path], + num_required: bool, + items: Mapping[str, Sequence[CommandBase]], + special_item: Sequence[CommandBase], + ) -> None: + self._expected_base_dir = expected_base_dir + self._expected_settings_dir = expected_settings_dir + self._num_required = num_required + self._items = {k: list(v) for k, v in items.items()} + self._special_item = list(special_item) + # for test assertions + self._latest_args: Optional[argparse.Namespace] = None + + def configure_parser(self, parser: ParserType) -> None: + parser.add_argument("--special", action="store_true") + parser.add_argument("--num", type=int, default=0, required=self._num_required) + + def _targets(self, args: argparse.Namespace) -> Dict[str, TargetType]: + ret: Dict[str, TargetType] = self._items.copy() + if args.special: + ret["special"] = self._special_item + + length = args.num + if length > 0: + for k in ret.keys(): + v = ret[k][:length] + ret[k] = v + + return ret + + def export_settings(self, paths: PathContext, args: argparse.Namespace) -> None: + assert paths.base_dir == self._expected_base_dir + if self._expected_settings_dir is not None: + assert paths.settings_dir == self._expected_settings_dir + self._latest_args = args + + def get_targets(self, args: argparse.Namespace) -> Dict[str, List[ComponentName]]: + targets = self._targets(args) + return {name: [x.name for x in value] for name, value in targets.items()} + + def get_target( + self, + target: TargetName, + paths: PathContext, + options: RunOptions, + args: argparse.Namespace, + ) -> TargetType: + assert paths.base_dir == self._expected_base_dir + if self._expected_settings_dir is not None: + assert paths.settings_dir == self._expected_settings_dir + return self._targets(args)[target] + + @property + def latest_args(self) -> Optional[argparse.Namespace]: + return self._latest_args + + @property + def clear_latest_args(self) -> None: + self._latest_args = None diff --git a/tests/fakes/plugin.py b/tests/fakes/plugin.py new file mode 100644 index 0000000..263d29b --- /dev/null +++ b/tests/fakes/plugin.py @@ -0,0 +1,65 @@ +import dataclasses +import pathlib +from typing import List, Sequence + +import dacite + +from fakes.component import FakeComponent, Operation +from pysen.component import ComponentBase +from pysen.plugin import PluginBase +from pysen.pyproject_model import Config, PluginConfig + + +@dataclasses.dataclass +class FakePluginConfig: + enable_c1: bool = False + enable_c2: bool = False + + +class FakePlugin(PluginBase): + def __init__(self, name: str) -> None: + self._name = name + + def load( + self, file_path: pathlib.Path, config_data: PluginConfig, root: Config + ) -> Sequence[ComponentBase]: + config = FakePluginConfig() + if config_data.config is not None: + config = dacite.from_dict( + FakePluginConfig, config_data.config, dacite.Config(strict=True) + ) + + r = [0.0] + components: List[ComponentBase] = [] + + if config.enable_c1: + components.append( + FakeComponent( + f"{self._name}_component1", + {"op1": (2, Operation.MUL), "op2": (10, Operation.ADD)}, + None, + None, + r, + ) + ) + + if config.enable_c2: + components.append( + FakeComponent( + f"{self._name}_component2", + {"op1": (3, Operation.MUL), "op3": (-1, Operation.MUL)}, + None, + None, + r, + ) + ) + + return components + + +def create() -> PluginBase: + return FakePlugin("create") + + +def plugin() -> PluginBase: + return FakePlugin("plugin") diff --git a/tests/fakes/py.typed b/tests/fakes/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/tests/fakes/pyproject.toml b/tests/fakes/pyproject.toml new file mode 100644 index 0000000..dedfd11 --- /dev/null +++ b/tests/fakes/pyproject.toml @@ -0,0 +1,3 @@ +[tool.nonpysen] +# this pyproject.toml is used to check if pysen cli finds a pyproject that has [tool.pysen]. +# See test_pyproject.py for more details. diff --git a/tests/test__version.py b/tests/test__version.py new file mode 100644 index 0000000..a97c57b --- /dev/null +++ b/tests/test__version.py @@ -0,0 +1,7 @@ +from pysen._version import __version__ +from pysen.py_version import VersionRepresentation + + +def test__version() -> None: + # version string MUST be in a format the VersionRepresentation understands + VersionRepresentation.from_str(__version__) diff --git a/tests/test_black.py b/tests/test_black.py new file mode 100644 index 0000000..7744852 --- /dev/null +++ b/tests/test_black.py @@ -0,0 +1,10 @@ +from pathlib import Path + +from pysen.ext.black_wrapper import _parse_file_path + + +def test__parse_file_path() -> None: + black_format = ( + "path/test_error_line_parser.py 2020-06-01 07:19:58.515112 +0000" + ) + assert _parse_file_path(black_format) == Path("path/test_error_line_parser.py") diff --git a/tests/test_cli.py b/tests/test_cli.py new file mode 100644 index 0000000..bb8287a --- /dev/null +++ b/tests/test_cli.py @@ -0,0 +1,11 @@ +from unittest import mock + +from pysen.cli import _use_pretty_logging + + +def test__use_pretty_logging() -> None: + with mock.patch("sys.stderr.isatty", return_value=True): + assert _use_pretty_logging() + + with mock.patch("sys.stderr.isatty", return_value=False): + assert not _use_pretty_logging() diff --git a/tests/test_cli_config.py b/tests/test_cli_config.py new file mode 100644 index 0000000..ab4ae93 --- /dev/null +++ b/tests/test_cli_config.py @@ -0,0 +1,19 @@ +import pathlib + +from pysen.cli_config import parse + +BASE_DIR = pathlib.Path(__file__).resolve().parent +CONFIG_DIR = BASE_DIR / "fakes/configs" + + +def test_example() -> None: + config = parse(CONFIG_DIR / "example.toml") + assert config is not None + assert config.settings_dir is not None + assert config.settings_dir == CONFIG_DIR / "hoge" + assert config.settings_dir.is_absolute() + + +def test_parse() -> None: + assert parse(CONFIG_DIR / "example.toml") is not None + assert parse(CONFIG_DIR / "simple_source.toml") is None diff --git a/tests/test_command.py b/tests/test_command.py new file mode 100644 index 0000000..cd08eab --- /dev/null +++ b/tests/test_command.py @@ -0,0 +1,27 @@ +import pathlib +import subprocess +import tempfile +import unittest.mock +from typing import Iterator, List + +import pytest + +from pysen.command import check_command_installed +from pysen.exceptions import CommandNotFoundError + + +@pytest.fixture +def invalid_command() -> Iterator[List[str]]: + with tempfile.TemporaryDirectory() as td: + yield [str(pathlib.Path(td) / "this_command_does_not_exist")] + + +def test_check_command_installed(invalid_command: List[str]) -> None: + check_command_installed("echo", "a") + + with pytest.raises(CommandNotFoundError): + check_command_installed(*invalid_command) + with unittest.mock.patch("subprocess.call", return_value=127): + with pytest.raises(CommandNotFoundError): + assert subprocess.call("hoge") == 127 + check_command_installed("hoge") diff --git a/tests/test_diagnostic.py b/tests/test_diagnostic.py new file mode 100644 index 0000000..f0c09cb --- /dev/null +++ b/tests/test_diagnostic.py @@ -0,0 +1,52 @@ +from pathlib import Path + +import pytest + +from pysen.diagnostic import Diagnostic, FLCMFormatter, _format_diagnostic_position + + +def test_diagnostic_post_init() -> None: + path = Path("/path/to/file") + Diagnostic(file_path=path, message="error") + Diagnostic(file_path=path, diff="diff") + with pytest.raises(ValueError): + Diagnostic(file_path=path) + + +def test_diagnostic_formatter() -> None: + path = Path("/path/to/file") + formatter = FLCMFormatter + err = formatter.format( + Diagnostic( + start_line=10, + end_line=12, + start_column=3, + file_path=path, + message="line1\nline2", + ), + "my_command", + ) + assert err == f"{path.resolve()}:10:3:my_command: line1\\nline2" + + err = formatter.format( + Diagnostic( + start_line=10, + end_line=12, + start_column=3, + file_path=path, + diff="-line1\n+line2\n", + ), + "my_command2", + ) + assert err == f"{path.resolve()}:10:3:my_command2: -line1\\n+line2\\n" + + +def test__format_diagnostic_position() -> None: + path = Path("/path/to/file") + position = _format_diagnostic_position( + Diagnostic(start_line=10, end_line=12, start_column=3, file_path=path, diff="") + ) + assert position == f"{path.resolve()}:10:3" + + position = _format_diagnostic_position(Diagnostic(file_path=path, diff="")) + assert position == f"{path.resolve()}:1:1" diff --git a/tests/test_dumper.py b/tests/test_dumper.py new file mode 100644 index 0000000..ba3e74b --- /dev/null +++ b/tests/test_dumper.py @@ -0,0 +1,126 @@ +import pathlib +import re +import tempfile +from typing import Iterator + +import pytest +import tomlkit + +from pysen import dumper +from pysen.setting import SettingFile + +MULTIPLE_SPACES = re.compile(r"\s+") + + +def normalize_spaces(s: str) -> str: + return MULTIPLE_SPACES.sub(" ", s) + + +@pytest.fixture +def test_data() -> SettingFile: + foo = { + "hoge": True, + "fuga": ["c", "A", "B", "d"], + "piyo": "description", + } + bar = { + "c": "yes", + "a": 1.0, + "b": 12345, + } + + s = SettingFile() + s.set_section(["foo"], foo) + s.set_section(["bar"], bar) + return s + + +@pytest.fixture +def test_nested_data() -> SettingFile: + foo = { + "piyo": "description", + "hoge": True, + "fuga": ["c", "A", "B", "d"], + "hogehoge": {"z": ["1", "3", "2"], "x": "Hello", "y": 42}, + } + bar = { + "c": "yes", + "a": 1.0, + "b": 12345, + "d": {"d-2": "hoge", "d-1": {"d-1-b": True, "d-1-a": False, "d-1-c": True}}, + } + + s = SettingFile() + s.set_section(["tool", "foo"], foo) + s.set_section(["tool", "bar"], bar) + return s + + +@pytest.fixture +def temp_file() -> Iterator[pathlib.Path]: + with tempfile.NamedTemporaryFile() as tmp: + yield pathlib.Path(tmp.name) + + +def _assert_toml(path: pathlib.Path, expected: SettingFile) -> None: + with path.open("r") as f: + data = dict(tomlkit.loads(f.read())) + + assert data == expected.as_dict() + + +def test_dump_toml(temp_file: pathlib.Path, test_nested_data: SettingFile) -> None: + dumper.dump_toml(temp_file, test_nested_data) + expected = """[tool] +[tool.bar] # automatically generated by pysen +a = 1.0 +b = 12345 +c = "yes" +[tool.bar.d] +d-2 = "hoge" +[tool.bar.d.d-1] +d-1-a = false +d-1-b = true +d-1-c = true +[tool.foo] # automatically generated by pysen +fuga = ["A", "B", "c", "d"] +hoge = true +piyo = "description" +[tool.foo.hogehoge] + x = "Hello" + y = 42 + z = ["1", "2", "3"] +""" + + with temp_file.open("r") as f: + actual = f.read() + assert normalize_spaces(actual) == normalize_spaces(expected) + + +def test_dump_cfg(temp_file: pathlib.Path, test_data: SettingFile) -> None: + dumper.dump_cfg(temp_file, test_data) + expected = """[bar] +# automatically generated by pysen +a = 1.0 +b = 12345 +c = yes +[foo] +# automatically generated by pysen +fuga = A,B,c,d +hoge = True +piyo = description +""" + with temp_file.open("r") as f: + actual = f.read() + + assert normalize_spaces(actual) == normalize_spaces(expected) + + +def test_dump(test_data: SettingFile) -> None: + with tempfile.TemporaryDirectory() as tmp: + d = pathlib.Path(tmp) + dumper.dump(d, "hoge.toml", test_data) + dumper.dump(d, "hoge.cfg", test_data) + + with pytest.raises(RuntimeError): + dumper.dump(d, "hoge.bin", test_data) diff --git a/tests/test_error_lines.py b/tests/test_error_lines.py new file mode 100644 index 0000000..65ad8ce --- /dev/null +++ b/tests/test_error_lines.py @@ -0,0 +1,145 @@ +from pathlib import Path +from unittest import mock + +from pysen.error_lines import parse_error_diffs, parse_error_lines +from pysen.ext.black_wrapper import _parse_file_path + +std_err1 = "/path/to/file1.py:70:5: error: Missing return statement [return]\n" +std_err2 = "/path/to/file2.py:71:6: error: Missing return statement [return]\n" +std_err = "".join([std_err1, std_err2]) +mypy_invalid_format = ( + "/home/user/pysen/__init__.py:72: error: unused 'type: ignore' comment\n" +) + + +diff_err1 = """--- /tmp/tmp.py 2020-05-29 13:45:10.907383 +0000 ++++ /tmp/tmp.py 2020-05-29 13:45:12.563367 +0000 +@@ -1,7 +1,8 @@ + class Hoge: +- a=3 ++ a = 3 ++ + + for a in range(3): + print(a) + + for b in range(3): +@@ -17,6 +18,7 @@ + print(a) + + for b in range(3): + print(b) + +-answer_to_everything=42 ++answer_to_everything = 42 ++ +""" # NOQA + +diff_err2 = """--- /home/user/pysen/pysen/cli.py 2020-06-10 05:31:01.167304 +0000 ++++ /home/user/pysen/pysen/cli.py 2020-06-10 05:35:33.973467 +0000 +@@ -147,10 +147,11 @@ + ) + parser.add_argument( + "--version", action="store_true", help="Show pysen version and exit", + ) + import math ++ + parser.add_argument( + "--loglevel", type=str, help="Set loglevel", choices=list(LogLevel.__members__), + ) + return parser + +""" # NOQA + +diff_err3 = """--- /home/user/pysen/pysen/cli.py 2020-06-10 05:31:01.167304 +0000 ++++ /home/user/pysen/pysen/cli.py 2020-06-10 05:35:33.973467 +0000 +@@ -147,10 +147,9 @@ + ) + parser.add_argument( + "--version", action="store_true", help="Show pysen version and exit", + ) + import math +- + parser.add_argument( + "--loglevel", type=str, help="Set loglevel", choices=list(LogLevel.__members__), + ) + return parser + +""" # NOQA + + +def test_standard_parser() -> None: + err1, err2 = parse_error_lines(std_err) + + assert err1.file_path == Path("/path/to/file1.py") + assert err1.start_line == 70 + assert err1.end_line == 70 + assert err1.start_column == 5 + assert err1.message == "error: Missing return statement [return]" + + assert err2.file_path == Path("/path/to/file2.py") + assert err2.start_line == 71 + assert err2.end_line == 71 + assert err2.start_column == 6 + assert err2.message == "error: Missing return statement [return]" + + err3 = list(parse_error_lines(mypy_invalid_format))[0] + + assert err3.file_path == Path("/home/user/pysen/__init__.py") + assert err3.start_line == 72 + assert err3.end_line == 72 + assert err3.start_column is None + assert err3.message == "error: unused 'type: ignore' comment" + + with mock.patch("pysen.error_lines._warn_parse_error") as warn: + error = "invalid_format\n" + list(parse_error_lines(error)) + warn.assert_called_with(error.rstrip("\n"), None) + + logger = mock.Mock() + with mock.patch("pysen.error_lines._warn_parse_error") as warn: + error = "invalid_format\n" + list(parse_error_lines(error, logger)) + warn.assert_called_with(error.rstrip("\n"), logger) + + +def test_diff_parser() -> None: + err1, err2 = parse_error_diffs(diff_err1, _parse_file_path) + + assert err1.start_line == 2 + assert err1.end_line == 2 + assert err1.start_column == 1 + assert err1.file_path == Path("/tmp/tmp.py") + assert err1.diff == "- a=3\n+ a = 3\n+\n" + + assert err2.start_line == 22 + assert err2.end_line == 22 + assert err2.start_column == 1 + assert err2.file_path == Path("/tmp/tmp.py") + assert err2.diff == "-answer_to_everything=42\n+answer_to_everything = 42\n+\n" + with mock.patch("pysen.error_lines._warn_parse_error") as warn: + diff = "---/tmp/tmp.py\n+++/tmp/tmp.py\n@@ -1,7 +1,8 @@\ninvalid\n" + list(parse_error_diffs(diff, _parse_file_path)) + warn.assert_called_with(diff, None) + + logger = mock.Mock() + with mock.patch("pysen.error_lines._warn_parse_error") as warn: + diff = "---/tmp/tmp.py\n+++/tmp/tmp.py\n@@ -1,7 +1,8 @@\ninvalid\n" + list(parse_error_diffs(diff, _parse_file_path, logger)) + warn.assert_called_with(diff, logger) + + # has only target diff + errors = list(parse_error_diffs(diff_err2, _parse_file_path)) + assert len(errors) == 1 + err = errors[0] + assert err.start_line == 152 + assert err.end_line == 152 + assert err.diff == "+\n" + + # has only source diff + errors = list(parse_error_diffs(diff_err3, _parse_file_path)) + assert len(errors) == 1 + err = errors[0] + assert err.start_line == 152 + assert err.end_line == 152 + assert err.diff == "-\n\n" diff --git a/tests/test_factory.py b/tests/test_factory.py new file mode 100644 index 0000000..1f36b5a --- /dev/null +++ b/tests/test_factory.py @@ -0,0 +1,13 @@ +from pysen.black import Black +from pysen.factory import ConfigureLintOptions, configure_lint +from pysen.source import Source + + +def test_configure_lint_default_source() -> None: + components = configure_lint(ConfigureLintOptions(enable_black=True, source=None)) + assert len(components) == 1 + black = components[0] + assert isinstance(black, Black) + source = black.source + assert isinstance(source, Source) + assert source.includes.keys() == {"."} diff --git a/tests/test_flake8.py b/tests/test_flake8.py new file mode 100644 index 0000000..5fbef19 --- /dev/null +++ b/tests/test_flake8.py @@ -0,0 +1,14 @@ +from typing import Any, Dict + +from pysen.ext.flake8_wrapper import Flake8Setting + + +def test_flake8_setting_comment() -> None: + flake8 = Flake8Setting().to_black_compatible() + assert len(flake8._comments) > 0 + section_name, section = flake8.export() + assert section_name == ["flake8"] + comments: Dict[str, Any] = {k: v for k, v in section.items() if k.startswith("#")} + assert len(comments) > 0 + # check that each comment entry doesn't have a value + assert all(x is None for x in comments.values()) diff --git a/tests/test_git_utils.py b/tests/test_git_utils.py new file mode 100644 index 0000000..3a55813 --- /dev/null +++ b/tests/test_git_utils.py @@ -0,0 +1,114 @@ +import pathlib +import tempfile +from typing import Sequence + +import git +import pytest +from _pytest.monkeypatch import MonkeyPatch + +from pysen import git_utils +from pysen.git_utils import GitRepositoryNotFoundError + +BASE_DIR = pathlib.Path(__file__).resolve().parent + + +def test_check_git_available() -> None: + with tempfile.TemporaryDirectory() as d: + tempdir = pathlib.Path(d) + assert git_utils._check_git_enabled() + assert not git_utils.check_git_available(tempdir) + + git.Repo.init(tempdir) + assert git_utils._check_git_enabled() + assert git_utils.check_git_available(tempdir) + + +def test_skip_git_check(monkeypatch: MonkeyPatch) -> None: + monkeypatch.setenv("PYSEN_IGNORE_GIT", "1") + with tempfile.TemporaryDirectory() as d: + tempdir = pathlib.Path(d) + assert not git_utils._check_git_enabled() + assert not git_utils.check_git_available(tempdir) + + git.Repo.init(tempdir) + assert not git_utils._check_git_enabled() + assert not git_utils.check_git_available(tempdir) + + +def test_dont_skip_git_check(monkeypatch: MonkeyPatch) -> None: + monkeypatch.setenv("PYSEN_IGNORE_GIT", "0") + with tempfile.TemporaryDirectory() as d: + tempdir = pathlib.Path(d) + assert not git_utils.check_git_available(tempdir) + + git.Repo.init(tempdir) + assert git_utils.check_git_available(tempdir) + + +def test_list_indexed_files() -> None: + def list_indexed_files(target_dir: pathlib.Path) -> Sequence[pathlib.Path]: + ret = git_utils.list_indexed_files(target_dir) + git_utils.list_indexed_files.cache_clear() + return ret + + with tempfile.TemporaryDirectory() as d: + tempdir = pathlib.Path(d).resolve() + + with pytest.raises(GitRepositoryNotFoundError): + list(list_indexed_files(tempdir)) + + repo = git.Repo.init(tempdir) + + assert list(list_indexed_files(tempdir)) == [] + + test_file = tempdir / "hoge" + test_file.touch() + + assert list(list_indexed_files(tempdir)) == [] + repo.index.add([str(test_file)]) + + assert list(list_indexed_files(tempdir)) == [test_file] + + test_dir = tempdir / "foo" + test_dir2 = tempdir / "foo_2" + + test_dir.mkdir() + test_dir2.mkdir() + + test_file2 = test_dir / "a" + test_file2.touch() + + test_file3 = test_dir2 / "b" + test_file3.touch() + + repo.index.add([str(test_file2), str(test_file3)]) + assert set(list_indexed_files(tempdir)) == { + test_file, + test_file2, + test_file3, + } + assert set(list_indexed_files(tempdir / "foo")) == {test_file2} + assert set(list_indexed_files(tempdir / "foo_2")) == {test_file3} + + assert set(list_indexed_files(tempdir)) == {test_file, test_file2, test_file3} + # a file is removed without being staged + test_file3.unlink() + assert set(list_indexed_files(tempdir)) == {test_file, test_file2} + + +def test_check_tracked() -> None: + with tempfile.TemporaryDirectory() as d: + tempdir = pathlib.Path(d) + + test_file = tempdir / "hoge" + test_file.touch() + + with pytest.raises(GitRepositoryNotFoundError): + git_utils.check_tracked(test_file) + + repo = git.Repo.init(tempdir) + assert not git_utils.check_tracked(test_file) + + repo.index.add([str(test_file)]) + + assert git_utils.check_tracked(test_file) diff --git a/tests/test_init.py b/tests/test_init.py new file mode 100644 index 0000000..627c52c --- /dev/null +++ b/tests/test_init.py @@ -0,0 +1,107 @@ +import argparse +import pathlib +import tempfile +import unittest.mock +from typing import Optional, Sequence + +import pytest + +import pysen +from pysen import ConfigureLintOptions, configure_lint +from pysen.exceptions import CommandNotFoundError +from pysen.manifest import Manifest, TargetType +from pysen.reporter import ReporterFactory +from pysen.runner_options import RunOptions + +BASE_DIR = pathlib.Path(__file__).resolve().parent + + +def test_load_manifest() -> None: + manifest = pysen.load_manifest(BASE_DIR / "fakes/configs/example.toml") + assert manifest is not None + + with pytest.raises(FileNotFoundError): + pysen.load_manifest(BASE_DIR / "no_such_file.toml") + + +def test_build_manifest() -> None: + # NOTE(igarashi): since build_manifest is just a reference for pysen.build_manifest.build, + # we just check if the function does not raise an error in this test. + manifest = pysen.build_manifest( + [], external_builder=BASE_DIR / "fakes/configs/good_builder.py" + ) + assert manifest is not None + + +def test_run() -> None: + with unittest.mock.patch( + "pysen.runner.Runner.export_settings" + ) as mock_export, unittest.mock.patch("pysen.runner.run_target") as mock_run: + assert pysen.run( + BASE_DIR, "lint", pyproject=BASE_DIR / "fakes/configs/example.toml" + ) + mock_export.assert_called() + + # check if settings_dir is correctly handled + mock_export.reset_mock() + with tempfile.TemporaryDirectory() as d: + td = pathlib.Path(d) + assert pysen.run( + BASE_DIR, + "lint", + pyproject=BASE_DIR / "fakes/configs/example.toml", + settings_dir=td, + ) + mock_export.assert_called_once_with( + BASE_DIR, td, argparse.Namespace(disable=None, enable=None) + ) + + with pytest.raises(CommandNotFoundError): + assert pysen.run( + BASE_DIR, "lint2", pyproject=BASE_DIR / "fakes/configs/example.toml" + ) + + components = configure_lint(ConfigureLintOptions(enable_black=True)) + assert pysen.run(BASE_DIR, "lint", components=components) + with pytest.raises(CommandNotFoundError): + assert pysen.run(BASE_DIR, "lint2", components=components) + + manifest = Manifest(components) + assert pysen.run(BASE_DIR, "lint", manifest=manifest) + with pytest.raises(CommandNotFoundError): + assert pysen.run(BASE_DIR, "lint2", manifest=manifest) + + # TODO(igarashi): Add test to check run() handles both args and manifest_args + + with pytest.raises(FileNotFoundError): + pysen.run(BASE_DIR, "lint", pyproject=BASE_DIR / "no_such_file.toml") + + with pytest.raises(ValueError): + pysen.run(BASE_DIR, "lint") + + with pytest.raises(ValueError): + pysen.run( + BASE_DIR, + "lint", + pyproject=BASE_DIR / "fakes/configs/example.toml", + manifest=manifest, + ) + + # NOTE(igarashi): Check that run() returns False when the command reports an error + + def side_effect( + target: TargetType, + reporters: ReporterFactory, + options: RunOptions, + files: Optional[Sequence[pathlib.Path]], + ) -> None: + with reporters.create("hoge") as r: + r.set_result(False, 128) + + mock_run.side_effect = side_effect + + assert not pysen.run( + BASE_DIR, "lint", pyproject=BASE_DIR / "fakes/configs/example.toml" + ) + assert not pysen.run(BASE_DIR, "lint", components=components) + assert not pysen.run(BASE_DIR, "lint", manifest=manifest) diff --git a/tests/test_isort.py b/tests/test_isort.py new file mode 100644 index 0000000..64c9e99 --- /dev/null +++ b/tests/test_isort.py @@ -0,0 +1,121 @@ +from pathlib import Path +from unittest import mock + +import pkg_resources +import pytest + +from pysen.exceptions import ( + DistributionNotFound, + IncompatibleVersionError, + UnexpectedErrorFormat, +) +from pysen.ext.isort_wrapper import ( + IsortSectionName, + IsortSetting, + _check_version_compatibility, + _get_isort_version, + _parse_file_path, +) +from pysen.py_version import VersionRepresentation + + +def test_export() -> None: + setting = IsortSetting( + known_third_party={"alpha", "beta"}, + default_section=IsortSectionName.THIRDPARTY, + sections=[IsortSectionName.FUTURE, IsortSectionName.STDLIB], + force_single_line=True, + line_length=80, + ) + + name, section = setting.export() + assert name == ["tool", "isort"] + + assert section == { + "known_third_party": ["alpha", "beta"], + "default_section": "THIRDPARTY", + "sections": ["FUTURE", "STDLIB"], + "line_length": 80, + "force_single_line": True, + # NOTE(igarashi): the following values are emitted by default + "force_grid_wrap": 0, + "include_trailing_comma": True, + "multi_line_output": 3, + "use_parentheses": True, + } + + +def test_to_black_compatible() -> None: + setting = IsortSetting( + force_single_line=False, + include_trailing_comma=False, + multi_line_output=1, + ensure_newline_before_comments=False, + force_grid_wrap=1, + use_parentheses=False, + ) + with mock.patch( + "pysen.ext.isort_wrapper._get_isort_version", + return_value=VersionRepresentation(5, 0, 0), + ): + black_compat = setting.to_black_compatible() + assert black_compat.multi_line_output == 3 + assert black_compat.include_trailing_comma + assert black_compat.force_grid_wrap == 0 + assert black_compat.use_parentheses + assert black_compat.ensure_newline_before_comments + + +def test__check_version_compatibility() -> None: + with pytest.raises(IncompatibleVersionError): + _check_version_compatibility(True, VersionRepresentation(4, 0)) + with pytest.raises(IncompatibleVersionError): + _check_version_compatibility(False, VersionRepresentation(4, 0)) + _check_version_compatibility(None, VersionRepresentation(4, 0)) + _check_version_compatibility(True, VersionRepresentation(5, 0)) + _check_version_compatibility(False, VersionRepresentation(5, 0)) + _check_version_compatibility(None, VersionRepresentation(5, 0)) + + +def test__parse_file_path() -> None: + isort_format_before = ( + "/path/to/error_line_parser.py:before 2020-06-01 16:18:40.123155" + ) + isort_format_after = ( + "/path/to/error_line_parser.py:after 2020-06-01 16:18:40.123155" + ) + isort_format_invalid = ( + "/path/to/error_line_parser.py 2020-06-01 16:18:40.123155" + ) + + assert _parse_file_path(isort_format_before) == Path( + "/path/to/error_line_parser.py" + ) + assert _parse_file_path(isort_format_after) == Path("/path/to/error_line_parser.py") + with pytest.raises(UnexpectedErrorFormat): + _parse_file_path(isort_format_invalid) + + +def test__get_isort_version() -> None: + def get_version() -> VersionRepresentation: + _get_isort_version.cache_clear() + return _get_isort_version() + + distro = "pkg_resources.get_distribution" + # pass case + with mock.patch(distro, return_value=mock.Mock(version="4.3.21")): + assert get_version() == VersionRepresentation(4, 3, 21) + with mock.patch(distro, return_value=mock.Mock(version="5.1.2")): + assert get_version() == VersionRepresentation(5, 1, 2) + # incompatible version + with pytest.raises(IncompatibleVersionError) as e: + with mock.patch(distro, return_value=mock.Mock(version="3.0.0")): + get_version() + assert "version 3.0.0 is not supported" in str(e) + # isort cannot be imported + with pytest.raises(DistributionNotFound) as e: + with mock.patch( + distro, side_effect=pkg_resources.DistributionNotFound("req", "requires") + ): + get_version() + assert "Expected isort to be installed" in str(e) diff --git a/tests/test_lint_command.py b/tests/test_lint_command.py new file mode 100644 index 0000000..b7284c4 --- /dev/null +++ b/tests/test_lint_command.py @@ -0,0 +1,194 @@ +import logging +import pathlib +from tempfile import TemporaryDirectory +from typing import List, Optional, Set +from unittest import mock + +from pysen.diagnostic import Diagnostic +from pysen.lint_command import ( + LintCommandBase, + SingleFileFormatCommandBase, + SingleFileLintCommandBase, +) +from pysen.reporter import Reporter +from pysen.source import FilePredicateType, Source + +BASE_DIR = pathlib.Path(__file__).resolve().parent + + +class FakeLintCommand(LintCommandBase): + @property + def name(self) -> str: + return "fake" + + def __call__(self, reporter: Reporter) -> int: + pass + + +class FakeHandler(logging.Handler): + def __init__(self) -> None: + super().__init__() + self._messages: List[str] = [] + + @property + def messages(self) -> List[str]: + return self._messages + + def emit(self, record: logging.LogRecord) -> None: + self._messages.append(record.msg) + + +class FakeSource(Source): + def resolve_files( + self, + base_dir: pathlib.Path, + filter_predicate: Optional[FilePredicateType], + use_git: bool = True, + reporter: Optional[Reporter] = None, + ) -> Set[pathlib.Path]: + assert filter_predicate is not None + + files = filter( + filter_predicate, map(pathlib.Path, ["foo.py", "bar.pyi", "baz.txt"]) + ) + return {base_dir / f for f in files} + + +class FakeSingleFileLintCommand(SingleFileLintCommandBase): + @property + def name(self) -> str: + return "fake" + + def filter(self, file_path: pathlib.Path) -> bool: + return file_path.suffix in {".py", ".pyi"} + + def check(self, file_path: pathlib.Path, reporter: Reporter) -> bool: + pass + + +class FakeSingleFileFormatCommand(SingleFileFormatCommandBase): + @property + def name(self) -> str: + return "fake" + + def filter(self, file_path: pathlib.Path) -> bool: + return file_path.suffix in {".py", ".pyi"} + + def format(self, file_path: pathlib.Path, reporter: Reporter) -> Optional[str]: + pass + + +def test_lint_command_base() -> None: + source = Source() + command = FakeLintCommand(BASE_DIR, source) + + assert command.name == "fake" + assert command.base_dir == BASE_DIR + assert command.source == source + + with mock.patch("pysen.git_utils.check_git_available", return_value=True): + assert command.git_enabled() + + with mock.patch("pysen.git_utils.check_git_available", return_value=False): + assert not command.git_enabled() + + +def test_single_file_lint_command_base() -> None: + with TemporaryDirectory() as t: + base_dir = pathlib.Path(t) + + for file_path in {"foo.py", "bar.pyi", "baz.txt"}: + (base_dir / file_path).touch() + + command = FakeSingleFileLintCommand(base_dir, FakeSource()) + with mock.patch.object(command, "check", return_value=True) as check: + reporter = Reporter("fake") + handler = FakeHandler() + reporter.process_output.addHandler(handler) + assert command(reporter) == 0 + assert check.call_count == 2 + assert len(handler.messages) == 0 + assert len(reporter.diagnostics) == 0 + + with mock.patch.object(command, "check") as check: + check.side_effect = [True, False] + reporter = Reporter("fake") + handler = FakeHandler() + reporter.process_output.addHandler(handler) + assert command(reporter) == 1 + assert check.call_count == 2 + assert len(handler.messages) == 0 + assert len(reporter.diagnostics) == 0 + + +def test_single_file_format_command_base() -> None: + with TemporaryDirectory() as t: + base_dir = pathlib.Path(t) + + for file_path in {"foo.py", "bar.pyi", "baz.txt"}: + (base_dir / file_path).touch() + + command = FakeSingleFileFormatCommand( + base_dir, FakeSource(), inplace_edit=False + ) + with mock.patch.object(command, "format", return_value="") as format_method: + reporter = Reporter("fake") + handler = FakeHandler() + reporter.process_output.addHandler(handler) + assert command(reporter) == 0 + assert format_method.call_count == 2 + assert len(handler.messages) == 0 + assert len(reporter.diagnostics) == 0 + + with mock.patch.object(command, "format", return_value="diff") as format_method: + reporter = Reporter("fake") + handler = FakeHandler() + reporter.process_output.addHandler(handler) + assert command(reporter) == 1 + assert format_method.call_count == 2 + assert len(handler.messages) == 2 + assert len(reporter.diagnostics) == 2 + for file_path in {"foo.py", "bar.pyi"}: + assert ( + f"--- {base_dir / file_path}\n" + f"+++ {base_dir / file_path}\n" + "@@ -0,0 +1 @@\n" + "+diff" + ) in handler.messages + assert ( + Diagnostic( + start_line=1, + end_line=1, + start_column=1, + file_path=base_dir / file_path, + diff="+diff", + ) + in reporter.diagnostics + ) + + command = FakeSingleFileFormatCommand(base_dir, FakeSource(), inplace_edit=True) + with mock.patch.object(command, "format", return_value=None) as format_method: + reporter = Reporter("fake") + handler = FakeHandler() + reporter.process_output.addHandler(handler) + assert command(reporter) == 1 + assert format_method.call_count == 2 + assert len(handler.messages) == 0 + assert len(reporter.diagnostics) == 0 + with (base_dir / "foo.py").open() as f: + assert f.read() == "" + with (base_dir / "bar.pyi").open() as f: + assert f.read() == "" + + with mock.patch.object(command, "format", return_value="diff") as format_method: + reporter = Reporter("fake") + handler = FakeHandler() + reporter.process_output.addHandler(handler) + assert command(reporter) == 0 + assert format_method.call_count == 2 + assert len(handler.messages) == 0 + assert len(reporter.diagnostics) == 0 + with (base_dir / "foo.py").open() as f: + assert f.read() == "diff" + with (base_dir / "bar.pyi").open() as f: + assert f.read() == "diff" diff --git a/tests/test_logging_utils.py b/tests/test_logging_utils.py new file mode 100644 index 0000000..ea1754f --- /dev/null +++ b/tests/test_logging_utils.py @@ -0,0 +1,153 @@ +import dataclasses +import logging +from typing import Iterator, List, Tuple +from unittest import mock + +import colorlog +import pytest + +from pysen import logging_utils + +SetupLoggerArgsType = Tuple[logging.Logger, int, logging.Handler] + + +@pytest.fixture +def mock_setup_logger() -> Iterator[List[SetupLoggerArgsType]]: + args: List[SetupLoggerArgsType] = [] + + def record(logger: logging.Logger, level: int, handler: logging.Handler) -> None: + args.append((logger, level, handler)) + + with mock.patch("pysen.logging_utils._setup_logger", side_effect=record): + yield args + + +class TargetHandler(logging.Handler): + def __init__(self) -> None: + self.calls: List[str] = [] + super().__init__() + + def emit(self, record: logging.LogRecord) -> None: + self.calls.append(record.msg) + + +def test_grouped_memory_handler() -> None: + t_handler = TargetHandler() + g_handler = logging_utils._GroupedMemoryHandler(target=t_handler) + + root_logger = logging.getLogger("test_logger") + isort_logger = logging.getLogger("test_logger.isort") + black_logger = logging.getLogger("test_logger.black") + root_logger.addHandler(g_handler) + + root_logger.error("one") + isort_logger.error("isort1") + black_logger.error("black1") + root_logger.error("two") + isort_logger.error("isort2") + black_logger.error("black2") + + g_handler.flush() + + assert t_handler.calls == ["black1", "black2", "isort1", "isort2", "one", "two"] + + formatter = logging.Formatter() + g_handler.setFormatter(formatter) + assert t_handler.formatter == formatter + + +def test__get_process_output_level_name() -> None: + assert ( + logging_utils._get_process_output_level_name(logging.INFO) + == logging_utils._PROCESS_STDOUT + ) + assert ( + logging_utils._get_process_output_level_name(logging.ERROR) + == logging_utils._PROCESS_STDERR + ) + + +@dataclasses.dataclass(frozen=True) +class ReporterLoggingUnitCondition: + is_grouped: bool + pretty: bool + + +def get_handler(logging_unit: logging_utils._LoggingUnit) -> logging.Handler: + logging_unit.setup() + handler = logging_utils.pysen_root_logger.handlers[0] + # finalize to remove handler for ensuing tests + logging_unit.finalize() + return handler + + +class TestReporterLoggingUnit: + def test_setup(self) -> None: + def _get_handler( + loglevel: int, is_grouped: bool, pretty: bool + ) -> logging.Handler: + return get_handler( + logging_utils._LoggingUnit( + loglevel, is_grouped, pretty, is_process_enabled=True + ) + ) + + level = logging.INFO + handler1 = _get_handler(level, True, True) + assert isinstance(handler1, logging_utils._GroupedMemoryHandler) + assert isinstance(handler1.target, logging.StreamHandler) + assert isinstance(handler1.target.formatter, colorlog.ColoredFormatter) + assert ( + handler1.target.formatter.log_colors == logging_utils._logging_output_colors + ) + + handler2 = _get_handler(level, True, False) + assert isinstance(handler2, logging_utils._GroupedMemoryHandler) + assert isinstance(handler2.target, logging.StreamHandler) + assert handler2.target.formatter is not None + assert handler2.target.formatter._fmt == "%(message)s" + + handler3 = _get_handler(level, False, True) + assert isinstance(handler3, logging.StreamHandler) + assert isinstance(handler3.formatter, colorlog.ColoredFormatter) + assert handler3.formatter.log_colors == logging_utils._logging_output_colors + + handler4 = _get_handler(level, False, False) + assert isinstance(handler4, logging.StreamHandler) + assert handler4.formatter is not None + assert handler4.formatter._fmt == "%(message)s" + + def test_setup_finalize(self) -> None: + level = logging.DEBUG + is_grouped = True + pretty = True + is_enabled = True + + root_logger = logging_utils.pysen_root_logger + unit = logging_utils._LoggingUnit(level, is_grouped, pretty, is_enabled) + + unit.setup() + assert logging_utils.reporter_root_logger.level == level + assert logging_utils.process_output_root_logger.level == logging.INFO + + assert len(root_logger.handlers) == 1 + handler = root_logger.handlers[0] + assert isinstance(handler, logging_utils._GroupedMemoryHandler) + assert isinstance(handler.target, logging.StreamHandler) + assert handler.level == level + + reporter_logger = logging_utils.get_reporter_logger("hoge") + process_logger = logging_utils.get_process_output_logger("hoge") + process2_logger = logging_utils.get_process_output_logger("fuga") + + reporter_logger.info("foo") + assert len(handler._named_records) == 1 + process_logger.info("bar") + assert len(handler._named_records) == 1 + process2_logger.info("baz") + assert len(handler._named_records) == 2 + + unit.finalize() + # the buffer should be empty because we flushed + assert len(handler._named_records) == 0 + assert len(root_logger.handlers) == 0 diff --git a/tests/test_manifest.py b/tests/test_manifest.py new file mode 100644 index 0000000..8c71218 --- /dev/null +++ b/tests/test_manifest.py @@ -0,0 +1,184 @@ +import argparse +import pathlib +from typing import Any, Callable, Dict, List, Optional, Sequence + +import pytest + +from fakes.component import FakeComponent, Operation +from pysen import dumper +from pysen.component import ComponentBase +from pysen.exceptions import InvalidComponentName +from pysen.manifest import Manifest, export_settings, get_target, get_targets +from pysen.runner_options import PathContext, RunOptions +from pysen.setting import SettingFile + +FixtureType = Callable[..., Sequence[ComponentBase]] + + +@pytest.fixture +def fake_components() -> FixtureType: + def create( + base_dir: Optional[pathlib.Path], + settings_dir: Optional[pathlib.Path], + ref: Optional[List[float]] = None, + ) -> Sequence[ComponentBase]: + r = ref or [1.0] + return ( + FakeComponent( + "node1", + {"op1": (2, Operation.MUL), "op2": (10, Operation.ADD)}, + base_dir, + settings_dir, + r, + ), + FakeComponent( + "node2", + {"op1": (3, Operation.MUL), "op3": (-1, Operation.MUL)}, + base_dir, + settings_dir, + r, + ), + ) + + return create + + +def test_components(fake_components: FixtureType) -> None: + components = fake_components(pathlib.Path(), pathlib.Path()) + c = Manifest(components) + assert c.components == list(components) + + +def test_default_dump_handler() -> None: + m = Manifest() + assert m._dump_handler == dumper.dump + + +def test_export_settings(fake_components: FixtureType) -> None: + base_dir = pathlib.Path("/foo/bar") + settings_dir = pathlib.Path("/settings") + paths = PathContext(base_dir, settings_dir) + components = fake_components(base_dir, settings_dir) + + dumped: Dict[str, Dict[str, Any]] = {} + + def dump(s_dir: pathlib.Path, fname: str, data: SettingFile) -> None: + assert s_dir == settings_dir + dumped[fname] = data.as_dict() + + export_settings(paths, components, dump) + assert len(dumped.keys()) == 3 + expected = { + "op1.yaml": {"node1": {"coef": 2, "op": "*"}, "node2": {"coef": 3, "op": "*"}}, + "op2.yaml": {"node1": {"coef": 10, "op": "+"}}, + "op3.yaml": {"node2": {"coef": -1, "op": "*"}}, + } + assert expected == dumped + dumped.clear() + + m = Manifest(components, dump_handler=dump) + m.export_settings(paths, argparse.Namespace(enable=None, disable=None)) + assert len(dumped.keys()) == 3 + expected = { + "op1.yaml": {"node1": {"coef": 2, "op": "*"}, "node2": {"coef": 3, "op": "*"}}, + "op2.yaml": {"node1": {"coef": 10, "op": "+"}}, + "op3.yaml": {"node2": {"coef": -1, "op": "*"}}, + } + assert expected == dumped + + dumped.clear() + # NOTE(igarashi): export_settings intentionally ignores enable and disable arguments. + # See manifest.py for more details + m.export_settings(paths, argparse.Namespace(enable=["node1"], disable=None)) + assert len(dumped.keys()) == 3 + expected = { + "op1.yaml": {"node1": {"coef": 2, "op": "*"}, "node2": {"coef": 3, "op": "*"}}, + "op2.yaml": {"node1": {"coef": 10, "op": "+"}}, + "op3.yaml": {"node2": {"coef": -1, "op": "*"}}, + } + assert expected == dumped + + +def test_get_targets(fake_components: FixtureType) -> None: + components = fake_components(None, None) + + targets = get_targets(components) + assert targets == {"op1": ["node1", "node2"], "op2": ["node1"], "op3": ["node2"]} + + m = Manifest(components) + targets = m.get_targets(argparse.Namespace(enable=None, disable=None)) + assert targets == {"op1": ["node1", "node2"], "op2": ["node1"], "op3": ["node2"]} + + targets = m.get_targets(argparse.Namespace(enable=None, disable=["node1"])) + assert targets == {"op1": ["node2"], "op3": ["node2"]} + + targets = m.get_targets(argparse.Namespace(enable=["node1"], disable=None)) + assert targets == {"op1": ["node1"], "op2": ["node1"]} + + with pytest.raises(InvalidComponentName) as e: + m.get_targets(argparse.Namespace(enable=["noexist", "ditto"], disable=None)) + assert ( + "The following component(s) in option --enable were not found: ditto,noexist" + in str(e) + ) + with pytest.raises(InvalidComponentName) as e: + m.get_targets(argparse.Namespace(disable=["ditto"], enable=None)) + assert ( + "The following component(s) in option --disable were not found: ditto" in str(e) + ) + + +def test_get_target(fake_components: FixtureType) -> None: + base_dir = pathlib.Path("/foo/bar") + settings_dir = pathlib.Path("/settings") + paths = PathContext(base_dir, settings_dir) + options = RunOptions() + components = fake_components(base_dir, settings_dir) + + target = get_target("op1", components, paths, options) + assert len(target) == 2 + assert {target[0].name, target[1].name} == {"* 2", "* 3"} + + assert get_target("op99", components, paths, options) == [] + + m = Manifest(components) + + # expected to be same as get_target("op1", component, base_dir) + target = m.get_target( + "op1", paths, options, argparse.Namespace(enable=None, disable=None) + ) + assert len(target) == 2 + assert {target[0].name, target[1].name} == {"* 2", "* 3"} + + target = m.get_target( + "op2", paths, options, argparse.Namespace(enable=None, disable=None) + ) + assert len(target) == 1 + assert {target[0].name} == {"+ 10"} + + assert ( + m.get_target( + "op99", paths, options, argparse.Namespace(enable=None, disable=None) + ) + == [] + ) + + # check if components are filtered by disable option + target = m.get_target( + "op1", paths, options, argparse.Namespace(enable=None, disable=["node1"]) + ) + assert len(target) == 1 + assert {target[0].name} == {"* 3"} + + assert ( + m.get_target( + "op2", paths, options, argparse.Namespace(enable=None, disable=["node1"]) + ) + == [] + ) + + target = m.get_target( + "op3", paths, options, argparse.Namespace(enable=None, disable=["node1"]) + ) + assert len(target) == 1 + assert target[0].name == "* -1" diff --git a/tests/test_manifest_builder.py b/tests/test_manifest_builder.py new file mode 100644 index 0000000..a34bddf --- /dev/null +++ b/tests/test_manifest_builder.py @@ -0,0 +1,98 @@ +import pathlib +import tempfile +from typing import List + +import pytest + +from pysen import ManifestBase +from pysen.black import Black +from pysen.component import ComponentBase +from pysen.exceptions import InvalidManifestBuilderError +from pysen.flake8 import Flake8 +from pysen.manifest import Manifest +from pysen.manifest_builder import _build, _build_external, build +from pysen.source import Source + +CURRENT_FILE = pathlib.Path(__file__).resolve() +BASE_DIR = CURRENT_FILE.parent + + +def get_source_from_good_builder(manifest: ManifestBase) -> Source: + assert isinstance(manifest, Manifest) + flake8 = manifest.get_component("flake8") + assert isinstance(flake8, Flake8) + return flake8.source + + +def test__build_external() -> None: + manifest = _build_external( + BASE_DIR / "fakes/configs/good_builder.py", [], CURRENT_FILE + ) + assert get_source_from_good_builder(manifest).includes.keys() == {BASE_DIR} + + test_source = Source(includes=["/hoge/fuga"]) + manifest = _build_external( + BASE_DIR / "fakes/configs/good_builder.py", + [Black(source=test_source)], + CURRENT_FILE, + ) + assert get_source_from_good_builder(manifest) == test_source + + with tempfile.TemporaryDirectory() as d: + tempdir = pathlib.Path(d) + manifest = _build_external( + BASE_DIR / "fakes/configs/good_builder.py", [], tempdir / "hoge" + ) + assert get_source_from_good_builder(manifest).includes.keys() == {tempdir} + + with pytest.raises(InvalidManifestBuilderError) as ex: + _build_external( + BASE_DIR / "fakes/configs/invalid_interface_builder.py", [], CURRENT_FILE + ) + + assert "external builder must have" in str(ex.value) + + with pytest.raises(InvalidManifestBuilderError) as ex: + _build_external( + BASE_DIR / "fakes/configs/invalid_return_builder.py", [], CURRENT_FILE + ) + + assert "instance of ManifestBase" in str(ex.value) + + +def test__build() -> None: + manifest = _build([], CURRENT_FILE) + assert isinstance(manifest, Manifest) + assert len(manifest.components) == 0 + + black = Black() + manifest = _build([black], CURRENT_FILE) + assert isinstance(manifest, Manifest) + assert manifest.components == [black] + + +def test_build() -> None: + test_source = Source(includes=["/hoge/fuga"]) + test_components: List[ComponentBase] = [Black(source=test_source)] + + manifest = build(test_components, CURRENT_FILE) + assert isinstance(manifest, Manifest) + assert manifest.components == test_components + + manifest = build( + [], + CURRENT_FILE, + external_builder=BASE_DIR / "fakes/configs/good_builder.py", + ) + assert isinstance(manifest, Manifest) + assert len(manifest.components) == 2 + assert get_source_from_good_builder(manifest).includes.keys() == {BASE_DIR} + + manifest = build( + test_components, + CURRENT_FILE, + external_builder=BASE_DIR / "fakes/configs/good_builder.py", + ) + assert isinstance(manifest, Manifest) + assert len(manifest.components) == 2 + assert get_source_from_good_builder(manifest) == test_source diff --git a/tests/test_mypy.py b/tests/test_mypy.py new file mode 100644 index 0000000..9158a3d --- /dev/null +++ b/tests/test_mypy.py @@ -0,0 +1,155 @@ +import collections +import pathlib +from typing import DefaultDict, Iterator +from unittest import mock + +import pytest + +from pysen import mypy +from pysen.mypy import _get_differences_from_base +from pysen.reporter import Reporter +from pysen.runner_options import PathContext, RunOptions +from pysen.setting import SettingFile + +BASE_DIR = pathlib.Path(__file__).resolve().parent + + +@pytest.fixture +def reporter() -> Iterator[Reporter]: + r = Reporter("") + with r: + yield r + + +def test__get_differences_from_base() -> None: + A = {"A": "a", "B": "b", "C": "c", "X": ["1", "2", "3"], "Y": ["a", "b"]} + B = {"A": "a", "C": "c2", "D": "d", "X": ["1", "2", "3"], "Y": ["a", "c"]} + + assert _get_differences_from_base(A, B) == {"B": "b", "C": "c", "Y": ["a", "b"]} + assert _get_differences_from_base(B, A) == {"C": "c2", "D": "d", "Y": ["a", "c"]} + + +def test_mypy_setting() -> None: + s = mypy.MypySetting.very_strict() + assert s == mypy.MypySetting.very_strict() + assert s != mypy.MypySetting.strict() + + section, settings = s.export(BASE_DIR) + assert section == ["mypy"] + assert settings["check_untyped_defs"] + assert "target_module" not in settings + + s.check_untyped_defs = False + section, settings = s.export(BASE_DIR) + assert not settings["check_untyped_defs"] + + section, settings = s.export(BASE_DIR, target_module="hoge.fuga.*") + assert section == ["mypy-hoge.fuga.*"] + + s.mypy_path = [ + "/opt/pysen/stubs", + "stubs2", + pathlib.Path("/usr/pysen/stubs3"), + pathlib.Path("stub4"), + ] + section, settings = s.export(pathlib.Path("/opt/pysen/package/python")) + assert settings["mypy_path"] == [ + "/opt/pysen/stubs", + "stubs2", + "/usr/pysen/stubs3", + "stub4", + ] + + # This option is set by pyproject loader + s._pysen_convert_abspath = True + section, settings = s.export(pathlib.Path("/opt/pysen/package/python")) + assert settings["mypy_path"] == [ + "../../stubs", + "stubs2", + "../../../../usr/pysen/stubs3", + "stub4", + ] + + +def test_settings() -> None: + m = mypy.Mypy( + setting=mypy.MypySetting.very_strict(), + module_settings={"hoge.fuga": mypy.MypySetting.strict()}, + ) + assert m.setting == mypy.MypySetting.very_strict() + assert m.module_settings == {"hoge.fuga": mypy.MypySetting.strict()} + + m = mypy.Mypy() + assert m.setting == mypy.MypySetting() + assert m.module_settings == {} + + +def test_commands(reporter: Reporter) -> None: + m = mypy.Mypy( + mypy_targets=[mypy.MypyTarget([pathlib.Path("/bar"), pathlib.Path("baz")])] + ) + expected_cmds = [ + "mypy", + "--show-absolute-path", + "--no-color-output", + "--show-column-numbers", + "--no-error-summary", + "--config-file", + "/setting/setup.cfg", + "/bar", + "/foo/baz", + ] + cmd = m.create_command( + "lint", + PathContext(pathlib.Path("/foo"), pathlib.Path("/setting")), + RunOptions(), + ) + + with mock.patch("os.chdir", return_value=None): + with mock.patch("pysen.process_utils.run", return_value=(0, "", "")) as patch: + assert cmd(reporter=reporter) == 0 + patch.assert_called_with(expected_cmds, reporter) + + +def test_export_settings() -> None: + m = mypy.Mypy( + setting=mypy.MypySetting( + mypy_path=["hoge"], + plugins=[mypy.MypyPlugin(script=BASE_DIR / pathlib.Path("foo/bar"))], + disallow_any_decorated=False, + ignore_missing_imports=False, + warn_redundant_casts=True, + follow_imports=mypy.MypyFollowImports.ERROR, + _pysen_convert_abspath=True, + ), + module_settings={ + "foo.*": mypy.MypySetting(disallow_any_decorated=True), # duplicated + "bar.baz": mypy.MypySetting( + ignore_missing_imports=True, # duplicated + disallow_any_decorated=False, # same (not emitted in exported settings) + disallow_any_unimported=False, # new + ), + }, + ) + files: DefaultDict[str, SettingFile] = collections.defaultdict(SettingFile) + m.export_settings(PathContext(BASE_DIR, BASE_DIR), files) + + assert files.keys() == {"setup.cfg"} + setting_file = files["setup.cfg"] + expected = { + "mypy": { + "disallow_any_decorated": False, + "follow_imports": "error", + "ignore_missing_imports": False, + "mypy_path": ["hoge"], + "warn_redundant_casts": True, + "plugins": ["foo/bar"], + }, + "mypy-foo.*": {"disallow_any_decorated": True}, + "mypy-bar.baz": { + "disallow_any_unimported": False, + "ignore_missing_imports": True, + }, + } + + assert setting_file.as_dict() == expected diff --git a/tests/test_mypy_wrapper.py b/tests/test_mypy_wrapper.py new file mode 100644 index 0000000..f5378b0 --- /dev/null +++ b/tests/test_mypy_wrapper.py @@ -0,0 +1,41 @@ +import pathlib + +import pytest + +from pysen.ext.mypy_wrapper import MypyPlugin, MypyTarget, run +from pysen.reporter import Reporter + +BASE_DIR = pathlib.Path(__file__).resolve().parent + + +def test_run_zero_source() -> None: + reporter = Reporter("mypy") + assert run(reporter, BASE_DIR, BASE_DIR, MypyTarget([]), True) == 0 + + +def test_mypy_plugin() -> None: + script_plugin = MypyPlugin(script=pathlib.Path("/foo/bar/baz")) + script_plugin2 = MypyPlugin(script=pathlib.Path("./bar/baz")) + function_plugin = MypyPlugin(function="module_x") + function_plugin2 = MypyPlugin(function="module_x:entry") + + with pytest.raises(ValueError): + MypyPlugin() + + with pytest.raises(ValueError): + MypyPlugin(script=pathlib.Path("."), function="module_y") + + assert script_plugin.as_config() == "/foo/bar/baz" + assert script_plugin2.as_config() == "bar/baz" + assert function_plugin.as_config() == "module_x" + assert function_plugin2.as_config() == "module_x:entry" + + base_dir = pathlib.Path("/foo") + assert function_plugin.as_config(base_dir) == "module_x" + assert function_plugin2.as_config(base_dir) == "module_x:entry" + assert script_plugin.as_config(base_dir) == "bar/baz" + assert script_plugin2.as_config(base_dir) == "bar/baz" + + base_dir = pathlib.Path("/hoge") + assert script_plugin.as_config(base_dir) == "../foo/bar/baz" + assert script_plugin2.as_config(base_dir) == "bar/baz" diff --git a/tests/test_path.py b/tests/test_path.py new file mode 100644 index 0000000..c41e478 --- /dev/null +++ b/tests/test_path.py @@ -0,0 +1,65 @@ +import pathlib + +from pysen.path import change_dir, get_relative_path, is_contained, is_covered + +BASE_DIR = pathlib.Path(__file__).resolve().parent + + +def test_get_relative_path() -> None: + base_dir = pathlib.Path("/opt/pysen/python/packages") + assert get_relative_path("foo/A", base_dir) == "foo/A" + assert get_relative_path(pathlib.Path("foo/A"), base_dir) == "foo/A" + + assert get_relative_path("../foo/B", base_dir) == "../foo/B" + assert get_relative_path(pathlib.Path("../foo/B"), base_dir) == "../foo/B" + + assert get_relative_path("/opt/pysen/C", base_dir) == "../../C" + assert get_relative_path(pathlib.Path("/opt/pysen/C"), base_dir) == "../../C" + + assert get_relative_path("/opt/pysen/python2/D", base_dir) == "../../python2/D" + assert ( + get_relative_path("/opt/pysen/python/packages/configs/E", base_dir) + == "configs/E" + ) + assert ( + get_relative_path("/home/user/.config/F", base_dir) + == "../../../../home/user/.config/F" + ) + + +def test_change_dir() -> None: + current = pathlib.Path.cwd() + + with change_dir(BASE_DIR / "fakes"): + assert pathlib.Path.cwd() == BASE_DIR / "fakes" + + assert pathlib.Path.cwd() == current + + +def test_is_covered() -> None: + hoge = pathlib.Path("hoge") + bar = pathlib.Path("bar") + assert is_covered(hoge, [hoge]) + assert is_covered(hoge, [hoge, bar]) + assert is_covered(hoge / "foo", [hoge]) + assert is_covered(hoge / "foo" / "bar", [hoge]) + + assert not is_covered(hoge, []) + assert not is_covered(hoge, [bar]) + assert not is_covered(hoge, [hoge / "subdir"]) + + +def test_is_contained() -> None: + foo = pathlib.Path("foo").resolve() + bar = pathlib.Path("bar") + baz = pathlib.Path("baz") + ufoo = pathlib.Path("~/foo").resolve() + ufooo = pathlib.Path("~/foo").expanduser().resolve() + + assert not is_contained(foo / bar, foo) + assert not is_contained(foo / bar, foo / baz / bar) + assert is_contained(foo, foo / bar) + assert is_contained(foo / bar, foo / bar / baz) + + # user expansion not supported + assert not is_contained(ufoo, ufooo) diff --git a/tests/test_plugin_loader.py b/tests/test_plugin_loader.py new file mode 100644 index 0000000..d64f7a5 --- /dev/null +++ b/tests/test_plugin_loader.py @@ -0,0 +1,76 @@ +import pathlib + +import pytest + +from pysen.plugin import PluginBase +from pysen.plugin_loader import load_from_file, load_from_module, load_plugin +from pysen.pyproject_model import Config, PluginConfig + +CURRENT_FILE = pathlib.Path(__file__).resolve() +BASE_DIR = CURRENT_FILE.parent + + +def test_load_from_file() -> None: + root = Config() + config = PluginConfig( + location="tool.pysen.plugin.hoge", script=BASE_DIR / "fakes/plugin.py" + ) + + plugin = load_from_file(BASE_DIR / "fakes/plugin.py") + assert isinstance(plugin, PluginBase) + components = plugin.load(CURRENT_FILE, config, root) + assert len(components) == 0 + + config.config = {"enable_c1": True} + components = plugin.load(CURRENT_FILE, config, root) + assert len(components) == 1 + assert components[0].name == "plugin_component1" + + +def test_load_from_module() -> None: + root = Config() + config = PluginConfig( + location="tool.pysen.plugin.hoge", function="fakes.plugin::plugin" + ) + + plugin = load_from_module("fakes.plugin::plugin") + assert isinstance(plugin, PluginBase) + components = plugin.load(CURRENT_FILE, config, root) + assert len(components) == 0 + + plugin = load_from_module("fakes.plugin::create") + assert isinstance(plugin, PluginBase) + components = plugin.load(CURRENT_FILE, config, root) + assert len(components) == 0 + + config.config = {"enable_c2": True} + components = plugin.load(CURRENT_FILE, config, root) + assert len(components) == 1 + assert components[0].name == "create_component2" + + +def test_load_plugin() -> None: + def assert_plugin_entry_point(plugin: PluginBase, entry_point: str) -> None: + root = Config() + config = PluginConfig( + location="tool.pysen.plugin.hoge", config={"enable_c1": True} + ) + components = plugin.load(CURRENT_FILE, config, root) + assert len(components) == 1 + assert components[0].name == f"{entry_point}_component1" + + plugin = load_plugin(function="fakes.plugin::create") + assert isinstance(plugin, PluginBase) + assert_plugin_entry_point(plugin, "create") + + plugin = load_plugin(script=BASE_DIR / "fakes/plugin.py") + assert isinstance(plugin, PluginBase) + assert_plugin_entry_point(plugin, "plugin") + + with pytest.raises(TypeError): + load_plugin( + function="fakes.plugin::unknown", script=BASE_DIR / "fakes/plugin.py" + ) + + with pytest.raises(TypeError): + load_plugin() diff --git a/tests/test_process_utils.py b/tests/test_process_utils.py new file mode 100644 index 0000000..fcd8fe0 --- /dev/null +++ b/tests/test_process_utils.py @@ -0,0 +1,160 @@ +import io +import logging +import os +import pathlib +import tempfile +from typing import List + +import pytest + +from pysen.process_utils import _read_stream, run +from pysen.reporter import Reporter + +SAMPLE_DATA = """BytesIO example string. + +Motivation-Driven +Learn or Die +Proud, but Humble +Boldly do what no one has done before""" + +SAMPLE_SCRIPT = """#!/bin/bash +# This string is used to check if trailing new lines are kept. + +echo "Start" +echo "StartError" > /dev/stderr + +# Test sleep +sleep 0.1 + +for x in {0..99} +do + echo "out${x}" + echo "err${x}" > /dev/stderr +done + +echo "End" +echo "EndError" > /dev/stderr + +""" + + +class FakeHandler(logging.Handler): + def __init__(self) -> None: + super().__init__() + self._messages: List[str] = [] + + @property + def messages(self) -> List[str]: + return self._messages + + def emit(self, record: logging.LogRecord) -> None: + self._messages.append(record.msg) + + +class HandlerException(Exception): + pass + + +class FailingHandler(logging.Handler): + def emit(self, record: logging.LogRecord) -> None: + raise HandlerException("somebody screwded up") + + +@pytest.mark.parametrize( + "sample_str", + [SAMPLE_DATA, SAMPLE_SCRIPT], + ids=["sample", "sample_with_trailing_newlines"], +) +def test__read_stream(sample_str: str) -> None: + # NOTE(igarashi): Since BytesIO does not inherit RawIOBase but BufferedIOBase, + # we cannot instantiate BufferedReader from bytes directly. + # We use FileIOBase instead. + with tempfile.TemporaryDirectory() as td: + temp_dir = pathlib.Path(td) + temp_path = temp_dir / "file" + temp_path.write_text(sample_str) + + reporter = Reporter("foo") + handler = FakeHandler() + reporter.process_output.setLevel(logging.INFO) + reporter.process_output.handlers.clear() + reporter.process_output.addHandler(handler) + + ret = _read_stream(io.BytesIO(temp_path.read_bytes()), reporter, logging.INFO) + expected = sample_str + assert ret == expected + assert handler.messages == expected.splitlines() + + handler.messages.clear() + ret = _read_stream(io.BytesIO(temp_path.read_bytes()), reporter, logging.DEBUG) + + assert ret == expected + assert handler.messages == [] + + +def test_run() -> None: + assert os.getenv("LANG", "C") == "C", "Did you run pytest through tox?" + with tempfile.TemporaryDirectory() as td: + temp_dir = pathlib.Path(td) + temp_path = temp_dir / "file" + temp_path.touch() + + reporter = Reporter("foo") + handler = FakeHandler() + reporter.process_output.setLevel(logging.INFO) + reporter.process_output.handlers.clear() + reporter.process_output.addHandler(handler) + + ret, stdout, stderr = run(["ls", str(temp_path)], reporter) + assert ret == 0 + assert "file" in stdout + assert stderr == "" + assert len(handler.messages) > 0 + + handler.messages.clear() + ret, stdout, stderr = run( + ["ls", str(temp_path)], reporter, stdout_loglevel=logging.NOTSET + ) + assert ret == 0 + assert "file" in stdout + assert stderr == "" + assert len(handler.messages) == 0 + + handler.messages.clear() + ret, stdout, stderr = run(["ls", str(temp_dir / "invalid")], reporter) + assert ret != 0 + assert stdout == "" + assert "No such file or directory" in stderr + assert len(handler.messages) > 0 + + handler.messages.clear() + ret, stdout, stderr = run( + ["ls", str(temp_dir / "invalid")], reporter, stderr_loglevel=logging.DEBUG + ) + assert ret != 0 + assert stdout == "" + assert "No such file or directory" in stderr + assert len(handler.messages) == 0 + + # check if run method can handle large output and errors + temp_path.write_text(SAMPLE_SCRIPT) + + ret, stdout, stderr = run(["bash", str(temp_path)], reporter) + + stdout_lines = stdout.splitlines() + stderr_lines = stderr.splitlines() + + assert len(stdout_lines) == 102 + assert len(stderr_lines) == 102 + + assert stdout_lines[0] == "Start" and stderr_lines[0] == "StartError" + assert stdout_lines[-1] == "End" and stderr_lines[-1] == "EndError" + + for x in range(100): + assert stdout_lines[x + 1] == f"out{x}" + assert stderr_lines[x + 1] == f"err{x}" + + # exceptions encountered in sub-threads shall be raised + reporter.process_output.addHandler(FailingHandler()) + with pytest.raises(HandlerException): + ret, stdout, stderr = run(["echo", "mashimashi"], reporter) diff --git a/tests/test_py_module.py b/tests/test_py_module.py new file mode 100644 index 0000000..b2c94cd --- /dev/null +++ b/tests/test_py_module.py @@ -0,0 +1,56 @@ +import pathlib +import sys + +import pytest + +from pysen.py_module import _parse_entry_point, load + +CURRENT_FILE = pathlib.Path(__file__).resolve() +BASE_DIR = CURRENT_FILE.parent + + +def test__parse_entry_point() -> None: + assert _parse_entry_point("hoge") is None + assert _parse_entry_point("hoge.fuga") is None + assert _parse_entry_point("hoge.fuga::piyo") == ("hoge.fuga", "piyo") + assert _parse_entry_point("hoge::piyo") == ("hoge", "piyo") + assert _parse_entry_point("::piyo") is None + assert _parse_entry_point("1::piyo") is None + assert _parse_entry_point("hoge::2") is None + assert _parse_entry_point("_hoge1._fuga2_::_piyo3_") == ( + "_hoge1._fuga2_", + "_piyo3_", + ) + assert _parse_entry_point("hoge::piyo\nfuga") is None + + +def test_load() -> None: + with pytest.raises(FileNotFoundError): + load(BASE_DIR / "hoge", "builder") + + with pytest.raises(FileNotFoundError): + load(BASE_DIR, "builder") + + module = load(BASE_DIR / "fakes/configs/good_builder.py", "foo") + assert module is not None + assert getattr(module, "build") is not None # NOQA: B009 + assert module.__name__.startswith("pysen._modules.foo_") + assert sys.modules[module.__name__] + + module = load(BASE_DIR / "fakes/configs/invalid_interface_builder.py", "bar") + assert module is not None + assert getattr(module, "build2") is not None # NOQA: B009 + assert module.__name__.startswith("pysen._modules.bar_") + assert sys.modules[module.__name__] + + module2 = load(BASE_DIR / "fakes/configs/invalid_return_builder.py", "foo") + assert module2 is not None + assert getattr(module2, "build") is not None # NOQA: B009 + assert sys.modules[module2.__name__] + assert module2.__name__ != module.__name__ + + # NOTE(igarashi): py_module.load currently raises the error that a loading module + # raises as is. It might be better to wrap the exception with our custom error type + # so that the one can know the error is from a module. + with pytest.raises(BufferError): + load(BASE_DIR / "fakes/configs/error_builder.py", "builder") diff --git a/tests/test_py_version.py b/tests/test_py_version.py new file mode 100644 index 0000000..40788a4 --- /dev/null +++ b/tests/test_py_version.py @@ -0,0 +1,84 @@ +import pytest + +from pysen.py_version import PythonVersion, VersionRepresentation + + +def test_python_version() -> None: + py36 = PythonVersion(3, 6) + + assert py36 == PythonVersion(3, 6) + assert py36 != PythonVersion(3, 7) + + assert py36.version == "3.6" + assert py36.full_representation == "Python3.6" + assert py36.short_representation == "py36" + + py368 = PythonVersion(3, 6, 8) + + assert py368 == PythonVersion(3, 6, 8) + assert py368 != PythonVersion(3, 6, 9) + assert py368 != py36 + + assert py368.version == "3.6.8" + assert py368.full_representation == "Python3.6.8" + assert py368.short_representation == "py36" + + +def test_version_ops() -> None: + assert VersionRepresentation(3, 6) == VersionRepresentation(3, 6, None, None) + with pytest.raises(NotImplementedError): + assert VersionRepresentation(3, 6) == "3" + + +def test_is_compatible() -> None: + assert VersionRepresentation(0, 5).is_compatible(VersionRepresentation(0, 5)) + assert VersionRepresentation(0, 5).is_compatible(VersionRepresentation(0, 5, 7)) + assert VersionRepresentation(0, 5).is_compatible(VersionRepresentation(0, 5, 1)) + assert VersionRepresentation(0, 5, 5).is_compatible(VersionRepresentation(0, 5, 4)) + assert VersionRepresentation(0, 5, 5).is_compatible(VersionRepresentation(0, 5, 6)) + assert VersionRepresentation(3, 6).is_compatible(VersionRepresentation(3, 6)) + assert not VersionRepresentation(3, 6).is_compatible(VersionRepresentation(3, 5)) + assert not VersionRepresentation(3, 6, 8).is_compatible( + VersionRepresentation(3, 5, 32) + ) + assert not VersionRepresentation(2, 6).is_compatible(VersionRepresentation(3, 0)) + assert not VersionRepresentation(4, 6).is_compatible(VersionRepresentation(3, 0)) + + +def test_version_from_str() -> None: + def check_version(s: str, expected: VersionRepresentation) -> None: + actual = VersionRepresentation.from_str(s) + assert actual == expected + assert s == str(expected) + + cases = { + ("0.601", VersionRepresentation(0, 601)), + ("3.0.8", VersionRepresentation(3, 0, 8)), + ("3.6.8a1", VersionRepresentation(3, 6, 8, "a1")), + ("3.6a1", VersionRepresentation(3, 6, None, "a1")), + ("3.6b0", VersionRepresentation(3, 6, None, "b0")), + ("3.6rc993", VersionRepresentation(3, 6, None, "rc993")), + } + for case in cases: + check_version(*case) + + with pytest.raises(ValueError): + # prelease phase must be either a, b, or rc + VersionRepresentation.from_str("3.6.8alpha1") + with pytest.raises(ValueError): + # MUST have a pre-release number + VersionRepresentation.from_str("3.6.8a") + with pytest.raises(ValueError): + # MUST NOT start with zero followed by another number + VersionRepresentation.from_str("03.1") + with pytest.raises(ValueError): + # MUST NOT start with zero followed by another number + VersionRepresentation.from_str("00.1") + with pytest.raises(ValueError): + # MUST have minor + VersionRepresentation.from_str("3") + with pytest.raises(ValueError): + # too many dots + VersionRepresentation.from_str("3.0.100.1") + with pytest.raises(ValueError): + VersionRepresentation.from_str("3.") diff --git a/tests/test_pyproject.py b/tests/test_pyproject.py new file mode 100644 index 0000000..51b9fe6 --- /dev/null +++ b/tests/test_pyproject.py @@ -0,0 +1,222 @@ +import dataclasses +import pathlib +from typing import Callable, Optional + +import dacite +import pytest + +from pysen import pyproject +from pysen.black import Black +from pysen.exceptions import InvalidConfigurationError +from pysen.isort import Isort, IsortSectionName +from pysen.manifest import Manifest +from pysen.mypy import Mypy, MypyFollowImports, MypyPreset +from pysen.path import change_dir +from pysen.pyproject_model import Config, LintConfig +from pysen.source import Source + +FILE_PATH = pathlib.Path(__file__).resolve() +BASE_DIR = FILE_PATH.parent +ROOT_DIR = BASE_DIR.parent + +PYPROJECT_PATH = ROOT_DIR / "pyproject.toml" +EXAMPLE_DIR = ROOT_DIR / "examples/simple_package" +EXAMPLE_PYPROJECT_PATH = EXAMPLE_DIR / "pyproject.toml" + +assert PYPROJECT_PATH.exists() +assert EXAMPLE_DIR.exists() +assert EXAMPLE_PYPROJECT_PATH.exists() + + +def test_find_pyproject() -> None: + with change_dir(BASE_DIR): + assert pyproject.find_pyproject() == PYPROJECT_PATH + + with change_dir(BASE_DIR / "fakes/configs"): + assert pyproject.find_pyproject() == PYPROJECT_PATH + + with change_dir(EXAMPLE_DIR): + assert pyproject.find_pyproject() == EXAMPLE_PYPROJECT_PATH + + with pytest.raises(FileNotFoundError): + pyproject.find_pyproject(BASE_DIR) + + with pytest.raises(FileNotFoundError): + pyproject.find_pyproject(BASE_DIR / "no_such_file") + + assert pyproject.find_pyproject(FILE_PATH) == FILE_PATH + + +def test_load_manifest() -> None: + manifest = pyproject.load_manifest(BASE_DIR / "fakes/configs/example.toml") + assert manifest is not None + assert isinstance(manifest, Manifest) + components = manifest.components + assert {x.name for x in components} == { + "black", + "isort", + "mypy", + "create_component1", + "create_component2", + } + mypy = manifest.get_component("mypy") + assert isinstance(mypy, Mypy) + assert mypy.setting.mypy_path == [BASE_DIR / "fakes/configs/pysen-stubs"] + + assert mypy.setting.plugins is not None + assert len(mypy.setting.plugins) == 2 + assert mypy.setting.plugins[0].script == BASE_DIR / "fakes/configs" / pathlib.Path( + "./sugoi/plugin" + ) + assert mypy.setting.plugins[1].function == "sugoi_plugin:entry" + + assert mypy.setting._pysen_convert_abspath + + module_settings = mypy.module_settings + assert len(module_settings) == 4 + assert module_settings.keys() == {"pysen.stubs", "pysen.proto", "apple", "banana"} + assert module_settings["pysen.stubs"].ignore_errors + assert module_settings["pysen.proto"].ignore_errors + assert not module_settings["apple"].ignore_errors + assert module_settings["banana"].ignore_errors + + entry_setting = MypyPreset.ENTRY.get_setting( + follow_imports=MypyFollowImports.SILENT + ) + entry_setting._pysen_convert_abspath = True + assert module_settings["apple"] == entry_setting + + isort = manifest.get_component("isort") + assert isinstance(isort, Isort) + assert isort.setting.default_section == IsortSectionName.THIRDPARTY + + manifest = pyproject.load_manifest(BASE_DIR / "fakes/configs/simple_source.toml") + assert manifest is not None + assert isinstance(manifest, Manifest) + components = manifest.components + assert len(components) == 1 + assert {x.name for x in components} == {"mypy"} + + manifest = pyproject.load_manifest(BASE_DIR / "fakes/configs/builder.toml") + assert manifest is not None + assert isinstance(manifest, Manifest) + components = manifest.components + assert len(components) == 2 + assert {x.name for x in components} == {"flake8", "isort"} + + +def test_resolve_lint_config_inheritance() -> None: + source = Source(includes=[BASE_DIR]) + success = LintConfig( + base=pathlib.Path(BASE_DIR / "fakes/configs/base.toml"), + isort_known_first_party=["alpha"], + source=source, + ) + config = pyproject.resolve_lint_config_inheritance(success) + assert config.isort_known_third_party == ["fuga", "piyo"] # override by base.toml + assert config.isort_known_first_party == ["alpha"] # override by us + assert config.line_length == 88 # override by base.toml + + failure_empty = LintConfig( + base=pathlib.Path(BASE_DIR / "fakes/configs/empty.toml"), + isort_known_first_party=["alpha"], + source=source, + ) + with pytest.raises(InvalidConfigurationError) as ex: + pyproject.resolve_lint_config_inheritance(failure_empty) + assert "doesn't have [tool.pysen.lint] section." in str(ex.value) + + +def test_load_lint_components() -> None: + source = Source(includes=[BASE_DIR]) + success = LintConfig( + base=pathlib.Path(BASE_DIR / "fakes/configs/base.toml"), + isort_known_first_party=["alpha"], + source=source, + ) + components = pyproject.load_lint_components(success) + assert len(components) == 2 + isort = next(x for x in components if isinstance(x, Isort)) # from base.toml + black = next(x for x in components if isinstance(x, Black)) # from base2.toml + assert isort is not None + assert black is not None + assert isort.setting.known_third_party == {"fuga", "piyo"} # override by base.toml + assert isort.setting.known_first_party == {"alpha"} # override by us + assert black.setting.line_length == 88 # override by base.toml + + failure_empty = LintConfig( + base=pathlib.Path(BASE_DIR / "fakes/configs/empty.toml"), + isort_known_first_party=["alpha"], + source=source, + ) + with pytest.raises(InvalidConfigurationError) as ex: + pyproject.load_lint_components(failure_empty) + assert "doesn't have [tool.pysen.lint] section." in str(ex.value) + + +def test_resolve_inheritance() -> None: + @dataclasses.dataclass + class _Model: + message: str + base: Optional[str] = None + + def selector(name: str) -> Callable[[pathlib.Path, Config], _Model]: + section_path = f"tool.pysen.plugin.{name}" + + def impl(path: pathlib.Path, root: Config) -> _Model: + config: Optional[_Model] = None + if root.plugin is not None: + target = next( + (x for x in root.plugin if x.location == section_path), None + ) + if target is not None and target.config is not None: + config = dacite.from_dict( + _Model, target.config, dacite.Config(strict=True) + ) + assert isinstance(config, _Model) + + if config is None: + raise RuntimeError(f"doesn't have [{section_path}] section") + + return config + + return impl + + def base_selector(path: pathlib.Path, config: _Model) -> Optional[pathlib.Path]: + if config.base is None: + return None + return BASE_DIR / "fakes/configs" / config.base + + def updater(lhs: _Model, rhs: _Model) -> _Model: + lhs.message = f"{lhs.message} / {rhs.message}" + return lhs + + config = pyproject.resolve_inheritance( + BASE_DIR / "fakes/configs/base.toml", selector("ok"), base_selector, updater + ) + assert config.message == "hello ok from base2 / hello ok from base" + + with pytest.raises(RuntimeError) as ex: + pyproject.resolve_inheritance( + BASE_DIR / "fakes/configs/base.toml", + selector("error-nosection"), + base_selector, + updater, + ) + + assert "doesn't have [tool.pysen.plugin.error-nosection]" in str(ex.value) + + with pytest.raises(FileNotFoundError): + pyproject.resolve_inheritance( + BASE_DIR / "fakes/configs/base.toml", + selector("error-nofile"), + base_selector, + updater, + ) + + base3 = BASE_DIR / "fakes/configs/base3.toml" + with pytest.raises(InvalidConfigurationError) as e: + pyproject.resolve_inheritance(base3, selector("ok"), base_selector, updater) + assert f"Circular dependency detected. {base3} was visited more than once." in str( + e + ) diff --git a/tests/test_pyproject_model.py b/tests/test_pyproject_model.py new file mode 100644 index 0000000..2a4475b --- /dev/null +++ b/tests/test_pyproject_model.py @@ -0,0 +1,430 @@ +import logging +import pathlib +from typing import Any, List + +import dacite +import pytest +import tomlkit +from _pytest.logging import LogCaptureFixture + +from pysen.factory import MypyModuleOption +from pysen.isort import IsortSectionName +from pysen.mypy import MypyFollowImports, MypyPreset +from pysen.py_version import PythonVersion, VersionRepresentation +from pysen.pyproject_model import ( + Config, + InvalidConfigurationError, + LintConfig, + _load_version, + _migrate_alias_fields, + _migrate_deprecated_fields, + _parse_mypy_modules, + _parse_mypy_target, + _parse_mypy_targets, + _parse_plugin_configs, + _parse_python_version, + _parse_source, + has_tool_section, + parse, +) +from pysen.source import SourceEntrySetting + +BASE_DIR = pathlib.Path(__file__).resolve().parent +CONFIG_DIR = BASE_DIR / "fakes/configs" + + +def test__parse_plugin_configs() -> None: + data_x = { + "script": "hoge.py", + } + data_y = { + "function": "foo.bar::baz", + "config": {"message": "hello"}, + } + data = { + "X": data_x, + "Y": data_y, + } + + configs = _parse_plugin_configs(BASE_DIR, data) + + assert len(configs) == 2 + assert configs[0].location == "tool.pysen.plugin.X" + assert configs[1].location == "tool.pysen.plugin.Y" + assert configs[0].script == BASE_DIR / "hoge.py" + assert configs[1].script is None + assert configs[0].function is None + assert configs[1].function == "foo.bar::baz" + + assert configs[0].config is None + assert configs[1].config == { + "message": "hello", + } + + data_invalid_path = { + "location": "donot set this section from pyproject", + "script": "hoge.py", + } + with pytest.raises(dacite.DaciteError) as ex: + _parse_plugin_configs(BASE_DIR, {"X": data_invalid_path}) + assert "unknown filed" in str(ex.value) + + data_invalid_no_plugin = { + "config": {"message": "hello"}, + } + with pytest.raises(dacite.DaciteError) as ex: + _parse_plugin_configs(BASE_DIR, {"X": data_invalid_no_plugin}) + assert "must specify" in str(ex.value) + + data_invalid_both = { + "function": "foo.bar::baz", + "script": "hoge.py", + "config": {"message": "hello"}, + } + with pytest.raises(dacite.DaciteError) as ex: + _parse_plugin_configs(BASE_DIR, {"X": data_invalid_both}) + assert "only one of" in str(ex.value) + + with pytest.raises(dacite.WrongTypeError): + _parse_plugin_configs(BASE_DIR, []) + + +def test__parse_source() -> None: + source = _parse_source(BASE_DIR, ["foo", "bar/baz"]) + assert len(source.includes) == 2 + assert source.includes.keys() == {BASE_DIR / "foo", BASE_DIR / "bar/baz"} + assert all(not x.glob for x in source.includes.values()) + + source = _parse_source( + BASE_DIR, + { + "includes": ["foo"], + "include_globs": ["bar/*.template"], + "excludes": ["hoge"], + "exclude_globs": ["fuga/*_grpc.py"], + }, + ) + includes = source.includes + assert len(includes) == 2 + assert includes.keys() == {BASE_DIR / "foo", "bar/*.template"} + assert not includes[BASE_DIR / "foo"].glob + assert includes["bar/*.template"].glob + assert includes["bar/*.template"].base_dir == BASE_DIR + + excludes = source.excludes + assert len(excludes) == 2 + assert excludes.keys() == {BASE_DIR / "hoge", "fuga/*_grpc.py"} + assert not excludes[BASE_DIR / "hoge"].glob + assert excludes["fuga/*_grpc.py"].glob + assert excludes["fuga/*_grpc.py"].base_dir == BASE_DIR + + source = _parse_source( + BASE_DIR, + {"excludes": ["hoge"], "exclude_globs": ["fuga/*_grpc.py"]}, + ) + + includes = source.includes + assert len(includes) == 1 + assert includes.keys() == {"."} + assert not includes["."].glob + assert includes["."].base_dir == BASE_DIR + + excludes = source.excludes + assert len(excludes) == 2 + assert excludes.keys() == {BASE_DIR / "hoge", "fuga/*_grpc.py"} + + with pytest.raises(dacite.DaciteError): + _parse_source(BASE_DIR, 1.0) + + +def test__parse_python_version() -> None: + assert _parse_python_version("py37") == PythonVersion(3, 7) + assert _parse_python_version("PY38") == PythonVersion(3, 8) + with pytest.raises(dacite.DaciteError) as ex: + _parse_python_version("PY999") + + assert "one of" in str(ex.value) # ensure that we suggest some options + + with pytest.raises(dacite.WrongTypeError): + _parse_python_version(37) + + +def test__parse_mypy_target() -> None: + base_dir = pathlib.Path("/foo") + + with pytest.raises(dacite.DaciteError): + _parse_mypy_target(base_dir, "a") + + with pytest.raises(dacite.DaciteError): + _parse_mypy_target(base_dir, {"paths": []}) + + target = _parse_mypy_target(base_dir, {"paths": ["a", "b", "/d"]}) + assert target.paths == [base_dir / "a", base_dir / "b", pathlib.Path("/d")] + + +def test__parse_mypy_targets() -> None: + base_dir = pathlib.Path("/foo") + + with pytest.raises(dacite.DaciteError) as e: + _parse_mypy_targets(base_dir, ["a", "b", "/c"]) + + base_dir = pathlib.Path("/foo") + test_data: List[Any] = [{"paths": ["x", "/y", "z"]}, {"paths": ["a", "b"]}] + targets = _parse_mypy_targets(base_dir, test_data) + assert len(targets) == 2 + assert targets[0].paths == [base_dir / "x", pathlib.Path("/y"), base_dir / "z"] + assert targets[1].paths == [base_dir / "a", base_dir / "b"] + + test_data.append("x") + with pytest.raises(dacite.DaciteError) as e: + _parse_mypy_targets(base_dir, test_data) + + assert "tool.pysen.lint.mypy_targets must be a list of dicts" in str(e.value) + + +def test__parse_mypy_modules() -> None: + with pytest.raises(dacite.WrongTypeError): + _parse_mypy_modules("x") + + assert _parse_mypy_modules({}) == {} + + modules = _parse_mypy_modules({"a": {}, "b": {}}) + assert modules.keys() == {"a", "b"} + assert not modules["a"].ignore_errors + assert not modules["b"].ignore_errors + assert modules["a"].preset is None + assert modules["b"].preset is None + + with pytest.raises(dacite.WrongTypeError): + _parse_mypy_modules({"a": "b"}) + + with pytest.raises(dacite.WrongTypeError): + _parse_mypy_modules({1: {}}) + + modules = _parse_mypy_modules( + {"a": {"preset": "entry"}, "b.c": {"ignore_errors": True}} + ) + assert modules.keys() == {"a", "b.c"} + assert not modules["a"].ignore_errors + assert modules["b.c"].ignore_errors + assert modules["a"].preset == MypyPreset.ENTRY + assert modules["b.c"].preset is None + + with pytest.raises(dacite.DaciteError) as e: + _parse_mypy_modules({"a.b.c": {"preset": "entry", "ignore_errors": True}}) + + assert "a.b.c" in str(e.value) + + +def test__migrate_alias_fields() -> None: + config = Config(lint=None) + _migrate_alias_fields(config) + assert config.lint is None + + config = Config(lint=LintConfig(mypy_ignore_packages=["X", "Y.Z.*"])) + _migrate_alias_fields(config) + assert config.lint is not None + assert config.lint.mypy_ignore_packages is None + assert config.lint.mypy_modules is not None + assert config.lint.mypy_modules.keys() == {"X", "Y.Z.*"} + assert config.lint.mypy_modules["X"].ignore_errors + assert config.lint.mypy_modules["Y.Z.*"].ignore_errors + + config = Config( + lint=LintConfig( + mypy_ignore_packages=["X"], mypy_modules={"X": MypyModuleOption()} + ) + ) + with pytest.raises(dacite.DaciteError) as e: + _migrate_alias_fields(config) + + assert "X is configured in both mypy_ignore_packages and mypy_modules" in str( + e.value + ) + + +def test__migrate_deprecated_fields(caplog: LogCaptureFixture) -> None: + base_dir = pathlib.Path("/foo") + setting_path = pathlib.Path("/foo/pyproject.toml") + + caplog.clear() + config = Config(lint=None) + _migrate_deprecated_fields(setting_path, config) + assert config.lint is None + assert caplog.records == [] + + config = Config( + lint=LintConfig( + mypy_target_dirs=[base_dir / "a", base_dir / "b", base_dir / "c"] + ), + ) + _migrate_deprecated_fields(setting_path, config) + assert config.lint is not None + assert config.lint.mypy_target_dirs is None + assert config.lint.mypy_targets is not None and len(config.lint.mypy_targets) == 1 + assert config.lint.mypy_targets[0].paths == [ + base_dir / "a", + base_dir / "b", + base_dir / "c", + ] + assert len(caplog.records) == 1 + record = caplog.records[0] + assert record.levelno == logging.WARNING + assert "tool.pysen.mypy_target_dirs is deprecated" in record.message + assert str(setting_path) in record.message + + +def test_example() -> None: + config = parse(CONFIG_DIR / "example.toml") + assert config is not None + + assert config.builder is None + assert config.lint is not None + lint = config.lint + assert lint.enable_black + assert not lint.enable_flake8 + assert lint.enable_isort + assert lint.enable_mypy + assert lint.line_length == 88 + assert isinstance(lint.line_length, int) + assert lint.py_version == PythonVersion(3, 7) + assert lint.isort_known_first_party == ["alpha"] + assert lint.isort_known_third_party == ["beta", "gamma"] + assert lint.isort_default_section == IsortSectionName.THIRDPARTY + assert lint.mypy_modules is not None + assert lint.mypy_modules.keys() == {"pysen.stubs", "pysen.proto", "apple", "banana"} + assert lint.mypy_modules["pysen.stubs"].ignore_errors + assert lint.mypy_modules["pysen.proto"].ignore_errors + assert not lint.mypy_modules["apple"].ignore_errors + assert lint.mypy_modules["banana"].ignore_errors + + assert lint.mypy_modules["pysen.stubs"].follow_imports == MypyFollowImports.SKIP + assert lint.mypy_modules["pysen.proto"].follow_imports == MypyFollowImports.SKIP + assert lint.mypy_modules["apple"].follow_imports == MypyFollowImports.SILENT + assert lint.mypy_modules["banana"].follow_imports is None + + assert lint.mypy_modules["apple"].preset == MypyPreset.ENTRY + assert lint.mypy_path == [CONFIG_DIR / "pysen-stubs"] + + assert lint.mypy_plugins is not None + assert len(lint.mypy_plugins) == 2 + assert lint.mypy_plugins[0].script == CONFIG_DIR / pathlib.Path("./sugoi/plugin") + assert lint.mypy_plugins[1].function == "sugoi_plugin:entry" + + assert lint.source is not None + source = lint.source + assert source.includes == { + CONFIG_DIR: SourceEntrySetting(glob=False), + CONFIG_DIR / "hoge": SourceEntrySetting(glob=False), + "**/*.template": SourceEntrySetting(glob=True, base_dir=CONFIG_DIR), + } + assert source.excludes == { + CONFIG_DIR / "fuga": SourceEntrySetting(glob=False), + "foo/*_pb2.py": SourceEntrySetting(glob=True, base_dir=CONFIG_DIR), + } + + +def test_simple_source() -> None: + config = parse(CONFIG_DIR / "simple_source.toml") + assert config is not None + + assert config.builder is None + assert config.lint is not None + lint = config.lint + assert lint.enable_mypy + assert lint.line_length == 80 + assert lint.py_version == PythonVersion(2, 7) + assert lint.source is not None + source = lint.source + assert source.includes == { + CONFIG_DIR: SourceEntrySetting(glob=False), + CONFIG_DIR / "hoge": SourceEntrySetting(glob=False), + CONFIG_DIR / "piyo": SourceEntrySetting(glob=False), + } + assert source.excludes == {} + + +def test_builder() -> None: + config = parse(CONFIG_DIR / "builder.toml") + assert config is not None + + assert config.lint is None + assert config.builder is not None + # check if builder is already resolved (is an abspath) + assert config.builder.is_absolute() + assert config.builder == CONFIG_DIR / "good_builder.py" + + +def test_plugin() -> None: + config = parse(CONFIG_DIR / "plugin.toml") + assert config is not None + + assert config.lint is None + assert config.plugin is not None + assert len(config.plugin) == 2 + assert config.plugin[0].location == "tool.pysen.plugin.hoge" + assert config.plugin[0].script == CONFIG_DIR / "../plugin.py" + assert config.plugin[0].function is None + assert config.plugin[0].config is None + + assert config.plugin[1].location == "tool.pysen.plugin.fuga" + assert config.plugin[1].script is None + assert config.plugin[1].function == "fakes.plugins::create" + assert config.plugin[1].config == { + "message": "hello", + "value": 10.0, + "flag": False, + } + + +def test_lint_config_update() -> None: + lhs = LintConfig( + base=pathlib.Path("hoge"), + enable_black=True, + enable_isort=True, + line_length=80, + isort_known_first_party=["hoge", "fuga"], + isort_known_third_party=["piyo"], + ) + rhs = LintConfig( + base=pathlib.Path("fuga"), + enable_black=False, + enable_flake8=True, + isort_known_first_party=["foo"], + py_version=PythonVersion(3, 8), + ) + + lhs.update(rhs) + + assert lhs.base == pathlib.Path("hoge") + assert not lhs.enable_black + assert lhs.enable_flake8 + assert lhs.enable_isort + assert not lhs.enable_mypy + assert lhs.line_length == 80 + assert lhs.isort_known_first_party == ["foo"] + assert lhs.isort_known_third_party == ["piyo"] + assert lhs.py_version == PythonVersion(3, 8) + + +def test_has_tool_section() -> None: + def _check_example_toml(tool_name: str, filename: str) -> bool: + path = CONFIG_DIR / filename + pyproject = tomlkit.loads(path.read_text()) + return has_tool_section(tool_name, pyproject) + + assert not _check_example_toml("pysen", "non_pysen_config.toml") + assert not _check_example_toml("jiro", "non_pysen_config.toml") + assert _check_example_toml("pysen", "base.toml") + assert _check_example_toml("pysen", "base2.toml") + assert _check_example_toml("pysen", "empty.toml") + assert _check_example_toml("pysen", "example.toml") + assert _check_example_toml("pysen", "builder.toml") + assert _check_example_toml("pysen", "simple_source.toml") + + +def test___load_version() -> None: + assert _load_version({}) is None + assert _load_version({"version": "0.1.2a1"}) == VersionRepresentation(0, 1, 2, "a1") + with pytest.raises(InvalidConfigurationError): + _load_version({"version": "none"}) diff --git a/tests/test_reporter.py b/tests/test_reporter.py new file mode 100644 index 0000000..2bde8df --- /dev/null +++ b/tests/test_reporter.py @@ -0,0 +1,106 @@ +import pathlib +from unittest import mock + +import pytest + +from pysen.diagnostic import Diagnostic, FLCMFormatter +from pysen.reporter import ( + _COMMAND_REPR_MAX_LENGTH, + _OMIT_REPR, + Reporter, + ReporterFactory, + _truncate_command_sequence, +) + +BASE_DIR = pathlib.Path(__file__).resolve().parent + + +def test_reporter() -> None: + r = Reporter("fuga") + + with pytest.raises(AssertionError): + r.success + + with pytest.raises(AssertionError): + r.elapsed_time + + with mock.patch("time.time") as m: + m.return_value = 100.0 + with r: + assert r.name == "fuga" + r.report_command("command --flag") + r.set_result(True, 0) + + m.return_value = 123.4 + + assert r.elapsed_time == pytest.approx(23.4) + assert r.success + + r = Reporter("piyo") + with r: + assert r.name == "piyo" + r.set_result(False, 128) + + assert not r.success + + +def test_reporter_report_diagnostics() -> None: + r = Reporter("foo") + d1 = Diagnostic(pathlib.Path("hoge").resolve(), 1, 2, 3, message="hoge") + d2 = Diagnostic(pathlib.Path("fuga").resolve(), 4, 5, 6, message="fuga") + d3 = Diagnostic(pathlib.Path("piyo").resolve(), 7, 8, 9, message="piyo") + + with r: + r.report_diagnostics([d1]) + assert r.diagnostics == [d1] + + r.report_diagnostics([d2, d3]) + assert r.diagnostics == [d1, d2, d3] + + +def test_reporter_factory() -> None: + factory = ReporterFactory() + assert len(factory.reporters) == 0 + + with factory.create("foo") as r: + r.set_result(True, 0) + r.report_diagnostics( + [Diagnostic(BASE_DIR / "hoge.py", 1, 2, 3, message="error")] + ) + + assert len(factory.reporters) == 1 + assert not factory.has_error() + out = factory.format_summary() + assert "foo" in out + + with factory.create("bar") as r: + r.set_result(False, 128) + + assert len(factory.reporters) == 2 + assert factory.has_error() + out = factory.format_summary() + assert "foo" in out and "bar" in out + + err_summary = factory.format_error_summary() + assert "\n - bar\n" in err_summary and "foo" not in err_summary + + out = factory.format_diagnostic_summary(FLCMFormatter) + assert f"{BASE_DIR / 'hoge.py'}:1:3:foo: error" in out + + +def test__truncate_command_sequence() -> None: + assert _truncate_command_sequence("abcde") == "abcde" + + str_a = "a" * _COMMAND_REPR_MAX_LENGTH + assert _truncate_command_sequence(str_a) == str_a + + def assert_truncated(original: str, formatted: str) -> None: + assert original != formatted + assert formatted.endswith(_OMIT_REPR) + assert len(formatted) == _COMMAND_REPR_MAX_LENGTH + + str_b = str_a + "a" + assert_truncated(str_b, _truncate_command_sequence(str_b)) + + str_c = str_a + "a" * 100 + assert_truncated(str_c, _truncate_command_sequence(str_c)) diff --git a/tests/test_runner.py b/tests/test_runner.py new file mode 100644 index 0000000..59651c4 --- /dev/null +++ b/tests/test_runner.py @@ -0,0 +1,423 @@ +import math +import pathlib +import threading +import unittest.mock +from typing import Any, Callable, List, Optional, cast + +import pytest +from _pytest.capture import CaptureFixture + +from fakes.component import FakeCommand, FakeComponent, Operation +from fakes.manifest import FakeManifest +from pysen.command import CommandBase +from pysen.exceptions import CommandNotFoundError, InvalidCommandNameError +from pysen.manifest import Manifest, ManifestBase +from pysen.reporter import Reporter, ReporterFactory +from pysen.runner import Runner, _has_side_effects, _verify_command_name +from pysen.runner_options import PathContext, RunOptions + +FAKE_PATH = pathlib.Path(__file__) +FixtureType = Callable[..., ManifestBase] + + +@pytest.fixture +def fake_manifest() -> FixtureType: + def create( + base_dir: pathlib.Path, + settings_dir: Optional[pathlib.Path], + ref: Optional[List[float]] = None, + dumped: Optional[threading.Event] = None, + ) -> ManifestBase: + r = ref or [0.0] + d = dumped or threading.Event() + + node1 = FakeComponent( + "node1", + {"op1": (2, Operation.MUL), "op2": (10, Operation.ADD)}, + base_dir, + settings_dir, + r, + ) + node2 = FakeComponent( + "node2", + {"op1": (3, Operation.MUL), "op3": (-1, Operation.MUL)}, + base_dir, + settings_dir, + r, + ) + + def dump(*args: Any) -> None: + d.set() + + return Manifest([node1, node2], dump_handler=dump) + + return create + + +@pytest.fixture +def fake_manifest_with_options() -> FixtureType: + def create( + base_dir: pathlib.Path, + settings_dir: Optional[pathlib.Path], + num_required: bool, + ref: Optional[List[float]] = None, + ) -> ManifestBase: + r = ref or [0.0] + options = RunOptions() + + items = { + "op1": [ + FakeCommand(2, Operation.ADD, r, options), + FakeCommand(10, Operation.MUL, r, options), + FakeCommand(-1, Operation.ADD, r, options), + ] + } + special_item = [ + FakeCommand(3, Operation.ADD, r, options), + FakeCommand(2, Operation.MUL, r, options), + ] + + ret = FakeManifest(base_dir, settings_dir, num_required, items, special_item) + return cast(ManifestBase, ret) + + return create + + +def test_export_settings(fake_manifest: FixtureType) -> None: + base_dir = pathlib.Path("/foo") + settings_dir = pathlib.Path("/bar") + dumped = threading.Event() + manifest = fake_manifest(base_dir, settings_dir, dumped=dumped) + runner = Runner(manifest) + assert not dumped.is_set() + args = runner.parse_manifest_arguments([]) + runner.export_settings(base_dir, settings_dir, args) + assert dumped.is_set() + + +def test_get_targets(fake_manifest: FixtureType) -> None: + paths = PathContext(pathlib.Path("/foo"), pathlib.Path("/bar")) + manifest = fake_manifest(paths.base_dir, paths.settings_dir) + runner = Runner(manifest) + args = runner.parse_manifest_arguments([]) + targets = runner.get_targets(args) + assert len(targets) == 3 + assert targets.keys() == {"op1", "op2", "op3"} + assert len(targets["op1"]) == 2 + assert len(targets["op2"]) == 1 + assert len(targets["op3"]) == 1 + + +def test_get_target(fake_manifest: FixtureType) -> None: + paths = PathContext(pathlib.Path("/foo"), pathlib.Path("/bar")) + options = RunOptions() + manifest = fake_manifest(paths.base_dir, paths.settings_dir) + runner = Runner(manifest) + args = runner.parse_manifest_arguments([]) + assert len(runner._get_target("op1", paths, options, args)) == 2 + assert len(runner._get_target("op2", paths, options, args)) == 1 + assert len(runner._get_target("op3", paths, options, args)) == 1 + with pytest.raises(CommandNotFoundError): + runner._get_target("op4", paths, options, args) + + +def test_parse_manifest_arguments( + fake_manifest_with_options: FixtureType, capsys: CaptureFixture +) -> None: + ref = [1.0] + manifest = fake_manifest_with_options( + pathlib.Path("/foo"), None, num_required=False, ref=ref + ) + runner = Runner(manifest) + + parsed = runner.parse_manifest_arguments([]) + assert parsed.num == 0 + assert not parsed.special + + parsed = runner.parse_manifest_arguments(["--special"]) + assert parsed.num == 0 + assert parsed.special + + parsed = runner.parse_manifest_arguments(["--special", "--num", "5"]) + assert parsed.num == 5 + assert parsed.special + + capsys.readouterr() + with pytest.raises(SystemExit): + runner.parse_manifest_arguments(["--hoge"]) + out = capsys.readouterr() + assert "error: unrecognized arguments" in out.err + + manifest = fake_manifest_with_options( + pathlib.Path("/foo"), None, num_required=True, ref=ref + ) + runner = Runner(manifest) + + capsys.readouterr() + with pytest.raises(SystemExit): + runner.parse_manifest_arguments([]) + out = capsys.readouterr() + assert "the following arguments are required" in out.err + + parsed = runner.parse_manifest_arguments(["--num", "2"]) + assert parsed.num == 2 + assert not parsed.special + + +def test_run_with_settings_dir(fake_manifest: FixtureType) -> None: + base_dir = pathlib.Path("/foo") + settings_dir = pathlib.Path("/settings") + options = RunOptions() + ref = [1.0] + manifest = fake_manifest(base_dir, settings_dir, ref) + runner = Runner(manifest) + reporters = ReporterFactory() + + manifest_args = runner.parse_manifest_arguments([]) + with unittest.mock.patch("pathlib.Path.mkdir") as mock: + runner.run( + "op1", + base_dir, + manifest_args, + reporters, + options, + settings_dir=settings_dir, + files=None, + ) + mock.assert_called() + assert math.isclose(ref[0], 6.0) + assert not reporters.has_error() + + +def test_run(fake_manifest: FixtureType) -> None: + base_dir = pathlib.Path("/foo") + ref = [1.0] + manifest = fake_manifest(base_dir, None, ref) + runner = Runner(manifest) + reporters = ReporterFactory() + options = RunOptions() + + manifest_args = runner.parse_manifest_arguments([]) + runner.run( + "op1", + base_dir, + manifest_args=manifest_args, + reporters=reporters, + options=options, + files=None, + ) + assert math.isclose(ref[0], 6.0) + assert not reporters.has_error() + + ref[0] = 1.0 + runner.run( + "op2", + base_dir, + manifest_args=manifest_args, + reporters=reporters, + options=options, + files=None, + ) + assert math.isclose(ref[0], 11.0) + assert not reporters.has_error() + + ref[0] = 1.0 + runner.run( + "op3", + base_dir, + manifest_args=manifest_args, + reporters=reporters, + options=options, + files=None, + ) + assert math.isclose(ref[0], -1.0) + assert reporters.has_error() + reporter = next(r for r in reporters._reporters if r.name == "* -1") + assert len(reporter.diagnostics) == 1 + + # check if options is correctly handled in Command through runner.run + reporters = ReporterFactory() + options = RunOptions(require_diagnostics=False) + ref[0] = 1.0 + runner.run( + "op3", + base_dir, + manifest_args=manifest_args, + reporters=reporters, + options=options, + files=None, + ) + assert math.isclose(ref[0], -1.0) + assert reporters.has_error() + reporter = next(r for r in reporters._reporters if r.name == "* -1") + assert len(reporter.diagnostics) == 0 + + reporters = ReporterFactory() + + with pytest.raises(CommandNotFoundError): + runner.run( + "op4", + base_dir, + manifest_args=manifest_args, + reporters=reporters, + options=options, + files=None, + ) + + ref[0] = 1.0 + manifest_args = runner.parse_manifest_arguments(["--enable", "node1"]) + runner.run( + "op1", + base_dir, + manifest_args=manifest_args, + reporters=reporters, + options=options, + files=None, + ) + assert math.isclose(ref[0], 2.0) + + +def test_run_manifest_args(fake_manifest_with_options: FixtureType) -> None: + base_dir = pathlib.Path("/foo") + ref = [1.0] + manifest = fake_manifest_with_options(base_dir, None, num_required=False, ref=ref) + runner = Runner(manifest) + reporters = ReporterFactory() + options = RunOptions() + + parsed = runner.parse_manifest_arguments([]) + runner.run( + "op1", + base_dir, + manifest_args=parsed, + reporters=reporters, + options=options, + files=None, + ) + assert math.isclose(ref[0], 29.0) + assert not reporters.has_error() + + ref[0] = 1.0 + parsed = runner.parse_manifest_arguments([]) + with pytest.raises(CommandNotFoundError): + runner.run( + "special", + base_dir, + manifest_args=parsed, + reporters=reporters, + options=options, + files=None, + ) + + ref[0] = 1.0 + parsed = runner.parse_manifest_arguments(["--special"]) + runner.run( + "special", + base_dir, + manifest_args=parsed, + reporters=reporters, + options=options, + files=None, + ) + assert math.isclose(ref[0], 8.0) + assert not reporters.has_error() + + ref[0] = 1.0 + parsed = runner.parse_manifest_arguments(["--num", "1", "--special"]) + runner.run( + "special", + base_dir, + manifest_args=parsed, + reporters=reporters, + options=options, + files=None, + ) + assert math.isclose(ref[0], 4.0) + assert not reporters.has_error() + + manifest = fake_manifest_with_options(base_dir, None, num_required=True, ref=ref) + runner = Runner(manifest) + + ref[0] = 1.0 + parsed = runner.parse_manifest_arguments(["--num", "1"]) + runner.run( + "op1", + base_dir, + manifest_args=parsed, + reporters=reporters, + options=options, + files=None, + ) + assert math.isclose(ref[0], 3.0) + assert not reporters.has_error() + + ref[0] = 1.0 + parsed = runner.parse_manifest_arguments(["--num", "2"]) + runner.run( + "op1", + base_dir, + manifest_args=parsed, + reporters=reporters, + options=options, + files=None, + ) + assert math.isclose(ref[0], 30.0) + assert not reporters.has_error() + + +class ValidCommand(CommandBase): + @property + def name(self) -> str: + return "valid" + + def __call__(self, reporter: Reporter) -> int: + return 0 + + +class InvalidCommand(CommandBase): + @property + def name(self) -> str: + return "my:py" + + def __call__(self, reporter: Reporter) -> int: + return 0 + + +def test__verify_command_name() -> None: + _verify_command_name(ValidCommand()) + with pytest.raises(InvalidCommandNameError): + _verify_command_name(InvalidCommand()) + + +class MockCommand(CommandBase): + @property + def name(self) -> str: + "mock" + + def __call__(self, reporter: Reporter) -> int: + return 0 + + @property + def has_side_effects(self) -> bool: + return True + + +class PurelyFunctionalCommand(CommandBase): + @property + def name(self) -> str: + "preferred_command" + + def __call__(self, reporter: Reporter) -> int: + return 0 + + @property + def has_side_effects(self) -> bool: + return False + + +def test__has_side_effects() -> None: + pfc = PurelyFunctionalCommand() + mc = MockCommand() + assert _has_side_effects([pfc, pfc, pfc, mc]) + assert _has_side_effects([mc, mc, mc, mc]) + assert not _has_side_effects([pfc, pfc, pfc, pfc]) diff --git a/tests/test_setting.py b/tests/test_setting.py new file mode 100644 index 0000000..9d1c3e0 --- /dev/null +++ b/tests/test_setting.py @@ -0,0 +1,33 @@ +import pytest +import tomlkit + +from pysen.setting import _create_dict, _traverse_toml + + +def test__create_dict() -> None: + assert _create_dict([]) == {} + assert _create_dict(["foo"]) == {"foo": {}} + assert _create_dict(["foo", "bar"]) == {"foo": {"bar": {}}} + assert _create_dict(["foo", "bar", "baz"]) == {"foo": {"bar": {"baz": {}}}} + + +def test__traverse_toml() -> None: + unordered_document = """ + [tool.poetry] + hoo=1 + [build-system] + foo=42 + [tool.pysen-cli] + bar=43 + """ + document = tomlkit.loads(unordered_document) + tool = document["tool"] + # OutOfOrderTableProxy is problematic as it is a reference and cannot be traversed. + assert isinstance(tool, tomlkit.container.OutOfOrderTableProxy) + tool["hoge"] = {} + with pytest.raises(tomlkit.exceptions.NonExistentKey): + tool["hoge"] + with pytest.raises(ValueError): + _traverse_toml(("tool", "hoge", "answer"), document, False) + _traverse_toml(("tool", "hoge", "answer"), document, True) + assert document["tool"]["hoge"]["answer"] == {} diff --git a/tests/test_setuptools.py b/tests/test_setuptools.py new file mode 100644 index 0000000..895eb8c --- /dev/null +++ b/tests/test_setuptools.py @@ -0,0 +1,64 @@ +import pathlib +from unittest import mock + +import setuptools +import setuptools.command.build_py + +from pysen.setuptools import ( + _PREDEFINED_COMMAND_NAMES, + _create_setuptool_command, + _get_setuptool_command, + _get_setuptool_user_options, +) + +FAKE_PATH = pathlib.Path(__file__).resolve().parent + + +def test__get_setuptool_command() -> None: + build_py = _get_setuptool_command("build_py") + assert build_py is setuptools.command.build_py.build_py + + abstract_init_opt = setuptools.Command.initialize_options + abstract_final_opt = setuptools.Command.finalize_options + abstract_run = setuptools.Command.run + + for name in _PREDEFINED_COMMAND_NAMES: + cmd = _get_setuptool_command(name) + assert cmd is not None + assert issubclass(cmd, setuptools.Command) + assert cmd is not setuptools.Command + + # NOTE(igarashi): assert that all the predefined methods don't have any references to + # the abstract method of setuptools.Command. + assert cmd.initialize_options is not abstract_init_opt + assert cmd.finalize_options is not abstract_final_opt + assert cmd.run is not abstract_run + + cmd = _get_setuptool_command("foo") + assert cmd is not None + assert issubclass(cmd, setuptools.Command) + assert cmd is setuptools.Command + + +def test__get_setuptool_user_options() -> None: + build_py_options = _get_setuptool_user_options(setuptools.command.build_py.build_py) + assert build_py_options is not None + assert len(build_py_options) > 0 + + command_options = _get_setuptool_user_options(setuptools.Command) + assert command_options is not None + assert command_options == [] + + +def test__create_setuptool_command_inheritance() -> None: + build_py = _create_setuptool_command( + "build_py", mock.Mock(), FAKE_PATH, None, mock.Mock() + ) + assert build_py is not None + assert issubclass(build_py, setuptools.command.build_py.build_py) + assert build_py is not setuptools.command.build_py.build_py + + foo = _create_setuptool_command("foo", mock.Mock(), FAKE_PATH, None, mock.Mock()) + assert foo is not None + assert not issubclass(foo, setuptools.command.build_py.build_py) + assert issubclass(foo, setuptools.Command) diff --git a/tests/test_source.py b/tests/test_source.py new file mode 100644 index 0000000..fee79f3 --- /dev/null +++ b/tests/test_source.py @@ -0,0 +1,366 @@ +import contextlib +import pathlib +import tempfile +from typing import Iterator, Set + +import git +import pytest +import tomlkit + +from pysen.path import change_dir +from pysen.source import ( + PythonFileFilter, + Source, + SourceEntrySetting, + _resolve, + extension_filter, +) + +BASE_DIR = pathlib.Path(__file__).resolve().parent + + +def test__resolve() -> None: + current_file = pathlib.Path(__file__) + setting = SourceEntrySetting(glob=False) + assert _resolve(BASE_DIR, current_file.name, setting) == [current_file.absolute()] + assert _resolve(BASE_DIR, current_file.absolute(), setting) == [ + current_file.absolute() + ] + assert _resolve(BASE_DIR, str(current_file.name), setting) == [ + current_file.absolute() + ] + assert _resolve(BASE_DIR, str(current_file.absolute()), setting) == [ + current_file.absolute() + ] + + override_base = pathlib.Path("/opt/pysen") + setting = SourceEntrySetting(glob=False, base_dir=override_base) + assert _resolve(BASE_DIR, current_file.name, setting) == [ + override_base / current_file.name + ] + assert _resolve(BASE_DIR, current_file.absolute(), setting) == [ + current_file.absolute() + ] + assert _resolve(BASE_DIR, str(current_file.name), setting) == [ + override_base / current_file.name + ] + assert _resolve(BASE_DIR, str(current_file.absolute()), setting) == [ + current_file.absolute() + ] + + setting = SourceEntrySetting(glob=True) + + with pytest.raises(RuntimeError): + _resolve(BASE_DIR, pathlib.Path("**/*"), setting) + + resolved = _resolve(BASE_DIR, "**/*.py", setting) + assert len(resolved) > 1 + assert all(str(x).startswith(str(BASE_DIR)) and x.suffix == ".py" for x in resolved) + + # NOTE(igarashi): check base_dir of glob is overriden by setting + setting = SourceEntrySetting(glob=True, base_dir=BASE_DIR / "fakes") + resolved_with_base = _resolve(BASE_DIR, "**/*.py", setting) + assert len(resolved_with_base) > 1 + assert len(resolved) > len(resolved_with_base) + assert all( + str(x).startswith(str(BASE_DIR / "fakes")) and x.suffix == ".py" + for x in resolved_with_base + ) + + # test if _resolve doesn't raise an error though we specify an invalid pattern + resolved = _resolve(BASE_DIR, "hoge!%&#'/+*hoge", setting) + assert len(resolved) == 0 + + # Test if _resolve doesn't raise an error though we use a string-like object. + # This case is intended to check that _resolve converts a string-like object + # to the Python string object since pathlib.glob(x) now raises an error + # if x is not a Python string object due to sys.intern(x). + str_like = tomlkit.item("build") + assert isinstance(str_like, str) + assert type(str_like) is not str + resolved = _resolve(BASE_DIR, str_like, setting) + assert len(resolved) == 0 + + +def test_extension_filter() -> None: + python_file = pathlib.Path("python.py") + cython_file = pathlib.Path("cython.pyx") + stub_file = pathlib.Path("stub.pyi") + markdown_file = pathlib.Path("markdown.md") + text_file = pathlib.Path("text.txt") + c_file = pathlib.Path("source.c") + cpp_file = pathlib.Path("source.cpp") + header_file = pathlib.Path("header.h") + + source = [ + python_file, + cython_file, + stub_file, + markdown_file, + text_file, + c_file, + cpp_file, + header_file, + ] + + assert [x for x in source if PythonFileFilter(x)] == [python_file, stub_file] + + predicate = extension_filter({".cpp", ".c", ".h"}) + assert [x for x in source if predicate(x)] == [c_file, cpp_file, header_file] + + +def test_add_remove() -> None: + source = Source(includes=["hoge", "fuga"], excludes=["piyo"]) + + with pytest.raises(KeyError): + source.remove_include("foo") + with pytest.raises(KeyError): + source.remove_exclude("bar") + + source.add_include("foo") + source.add_include("foo") + source.remove_include("foo") + with pytest.raises(KeyError): + source.remove_include("foo") + + source.add_exclude("bar") + source.add_exclude("bar") + source.remove_exclude("bar") + with pytest.raises(KeyError): + source.remove_exclude("bar") + + source.remove_include("hoge") + source.remove_include("fuga") + source.remove_exclude("piyo") + + +def touch_file(path: pathlib.Path) -> None: + path.parent.mkdir(parents=True, exist_ok=True) + path.touch() + + +@contextlib.contextmanager +def create_git_repository( + tracked_files: Set[str], untracked_files: Set[str] +) -> Iterator[pathlib.Path]: + files = tracked_files.union(untracked_files) + assert len(files) == len(tracked_files) + len( + untracked_files + ) # assert intersection is empty + + with tempfile.TemporaryDirectory() as d: + base_dir = pathlib.Path(d) + with change_dir(base_dir): + repo = git.Repo.init() + for f in files: + touch_file(pathlib.Path(f)) + + repo.index.add(list(tracked_files)) + yield base_dir + + +def test_resolve_files_root_dot() -> None: + tracked_files = { + "A/0.md", + "A/1.py", + "A/stubs/0.pyi", + "A/stubs/1.md", + "A/stubs/2.md", + "0.py", + "1.md", + } + untracked_files = { + "A/2.md", + "A/3.py", + "A/stubs/3.pyi", + "A/stubs/4.md", + "A/stubs/5.md", + "2.py", + "3.md", + } + + source = Source() + source.add_include(".") + + with create_git_repository(tracked_files, untracked_files) as base_dir: + expected_tracked = { + base_dir / "A/1.py", + base_dir / "A/stubs/0.pyi", + base_dir / "0.py", + } + expected_untracked = { + base_dir / "A/3.py", + base_dir / "A/stubs/3.pyi", + base_dir / "2.py", + } + assert ( + source.resolve_files(base_dir, PythonFileFilter, use_git=True) + == expected_tracked + ) + assert source.resolve_files( + base_dir, PythonFileFilter, use_git=False + ) == expected_tracked.union(expected_untracked) + + +def test_resolve_files_include_file() -> None: + tracked_files = { + "A/0.md", + "A/1.py", + "0.py", + "1.md", + } + untracked_files = { + "A/2.md", + "A/3.py", + "2.py", + "3.md", + } + + source = Source() + source.add_include("A") + + with create_git_repository(tracked_files, untracked_files) as base_dir: + expected_tracked = { + base_dir / "A/1.py", + } + expected_untracked = { + base_dir / "A/3.py", + } + assert ( + source.resolve_files(base_dir, PythonFileFilter, use_git=True) + == expected_tracked + ) + assert source.resolve_files( + base_dir, PythonFileFilter, use_git=False + ) == expected_tracked.union(expected_untracked) + + source.add_include("1.md") + source.add_include("3.md") + + expected_tracked.add(base_dir / "1.md") + expected_untracked.add(base_dir / "3.md") + + assert ( + source.resolve_files(base_dir, PythonFileFilter, use_git=True) + == expected_tracked + ) + assert source.resolve_files( + base_dir, PythonFileFilter, use_git=False + ) == expected_tracked.union(expected_untracked) + + source.add_include("**/*.md", glob=True) + + expected_tracked.add(base_dir / "A/0.md") + expected_untracked.add(base_dir / "A/2.md") + + assert ( + source.resolve_files(base_dir, PythonFileFilter, use_git=True) + == expected_tracked + ) + assert source.resolve_files( + base_dir, PythonFileFilter, use_git=False + ) == expected_tracked.union(expected_untracked) + + +def test_resolve_files_exclude() -> None: + tracked_files = { + "A/1.py", + "A/third_party/1.py", + "A/third_party/nested/1.py", + "A/third_party/2.md", + "0.py", + "1.md", + } + untracked_files = { + "A/2.py", + "A/third_party/3.py", + "A/third_party/nested/2.py", + "A/third_party/4.md", + "2.py", + "3.md", + } + + source = Source() + source.add_include(".") + + with create_git_repository(tracked_files, untracked_files) as base_dir: + expected_tracked = { + base_dir / "A/1.py", + base_dir / "A/third_party/1.py", + base_dir / "A/third_party/nested/1.py", + base_dir / "0.py", + } + expected_untracked = { + base_dir / "A/2.py", + base_dir / "A/third_party/3.py", + base_dir / "A/third_party/nested/2.py", + base_dir / "2.py", + } + assert ( + source.resolve_files(base_dir, PythonFileFilter, use_git=True) + == expected_tracked + ) + assert source.resolve_files( + base_dir, PythonFileFilter, use_git=False + ) == expected_tracked.union(expected_untracked) + + source.add_exclude("0.py") + source.add_exclude("2.py") + + expected_tracked.remove(base_dir / "0.py") + expected_untracked.remove(base_dir / "2.py") + + assert ( + source.resolve_files(base_dir, PythonFileFilter, use_git=True) + == expected_tracked + ) + assert source.resolve_files( + base_dir, PythonFileFilter, use_git=False + ) == expected_tracked.union(expected_untracked) + + source.add_exclude("A/third_party/**/*.py", glob=True) + + expected_tracked = { + base_dir / "A/1.py", + } + expected_untracked = { + base_dir / "A/2.py", + } + assert ( + source.resolve_files(base_dir, PythonFileFilter, use_git=True) + == expected_tracked + ) + assert source.resolve_files( + base_dir, PythonFileFilter, use_git=False + ) == expected_tracked.union(expected_untracked) + + +def test_copy() -> None: + source = Source() + source.add_include("hoge", glob=True, base_dir=pathlib.Path("/hoge")) + source.add_include("fuga", glob=False) + source.add_exclude("piyo", glob=True, base_dir=pathlib.Path("/fuga")) + + copied = source.copy() + + assert source.includes == copied.includes + assert source.includes is not copied.includes + assert source.excludes == copied.excludes + assert source.excludes is not copied.excludes + + source.add_include("foo", glob=True) + copied.add_exclude("bar", glob=False) + + assert source.includes.keys() == {"hoge", "fuga", "foo"} + assert copied.includes.keys() == {"hoge", "fuga"} + assert copied.includes == { + "hoge": SourceEntrySetting(glob=True, base_dir=pathlib.Path("/hoge")), + "fuga": SourceEntrySetting(glob=False), + } + + assert source.excludes.keys() == {"piyo"} + assert copied.excludes.keys() == {"piyo", "bar"} + assert copied.excludes == { + "piyo": SourceEntrySetting(glob=True, base_dir=pathlib.Path("/fuga")), + "bar": SourceEntrySetting(glob=False), + } diff --git a/tox.ini b/tox.ini new file mode 100644 index 0000000..0ba95ec --- /dev/null +++ b/tox.ini @@ -0,0 +1,33 @@ +[tox] +envlist = py{36,37}-dacite{110,120,150}-isort43-black20, py37-dacite150-isort{43,50,51}-black{19,20}, py{38,39}-dacite150-isort51-black20, development + +[testenv] +deps = + pytest >=5.4.0,<6.0.0 + dacite110: dacite>=1.1.0,<1.2.0 + dacite120: dacite>=1.2.0,<1.3.0 + dacite150: dacite>=1.5.0,<2.0.0 + isort43: isort>=4.3.0,<5.0.0 + isort50: isort>=5.0.0,<5.1.0 + isort51: isort>=5.1.0,<5.2.0 + black19: black==19.10b0 + black20: black==20.8b1 + flake8==3.8.3 + flake8-bugbear==20.1.4 + mypy==0.782 + +extras = lint +commands = + pytest -m "not examples" + +setenv = + LANG = C + +[testenv:development] +basepython = python3.7 +deps = + pipenv==2020.11.15 +commands = + pipenv sync + pipenv run pysen run lint + pipenv run pytest