From a7a4e49cbcab29f2e0c63a938c4f1c574d46bb20 Mon Sep 17 00:00:00 2001 From: Ryan Reed Date: Tue, 5 Sep 2023 21:18:45 -0400 Subject: [PATCH 01/16] Updating flake8 precommit location --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index d692225..776d3aa 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -3,7 +3,7 @@ repos: rev: "22.6.0" hooks: - id: black -- repo: https://gitlab.com/pycqa/flake8 +- repo: https://github.com/pycqa/flake8 rev: "3.8.4" hooks: - id: flake8 -- 2.30.1 From 2a6c16673938aad7522c45cfef546affba0ac2fc Mon Sep 17 00:00:00 2001 From: Ryan Reed Date: Tue, 5 Sep 2023 21:19:21 -0400 Subject: [PATCH 02/16] Removing poetry.lock from gitignore --- .gitignore | 1 - 1 file changed, 1 deletion(-) diff --git a/.gitignore b/.gitignore index 4779690..86553d5 100644 --- a/.gitignore +++ b/.gitignore @@ -27,7 +27,6 @@ wheels/ .installed.cfg *.egg MANIFEST -poetry.lock # PyInstaller # Usually these files are written by a python script from a template -- 2.30.1 From 2919cbd9cccbbbc80269d32649d069fd1b083530 Mon Sep 17 00:00:00 2001 From: Ryan Reed Date: Tue, 5 Sep 2023 21:20:59 -0400 Subject: [PATCH 03/16] Updating dependencies --- poetry.lock | 675 +++++++++++++++++++++++++++++++++++++++++++++++++ pyproject.toml | 6 +- 2 files changed, 679 insertions(+), 2 deletions(-) create mode 100644 poetry.lock diff --git a/poetry.lock b/poetry.lock new file mode 100644 index 0000000..345be78 --- /dev/null +++ b/poetry.lock @@ -0,0 +1,675 @@ +# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. + +[[package]] +name = "black" +version = "23.3.0" +description = "The uncompromising code formatter." +optional = false +python-versions = ">=3.7" +files = [ + {file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"}, + {file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"}, + {file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"}, + {file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"}, + {file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"}, + {file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"}, + {file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"}, + {file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"}, + {file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"}, + {file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"}, + {file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"}, + {file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"}, + {file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"}, + {file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"}, + {file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"}, + {file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"}, + {file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"}, + {file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"}, + {file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"}, + {file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"}, + {file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"}, + {file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"}, + {file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"}, + {file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"}, + {file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"}, +] + +[package.dependencies] +click = ">=8.0.0" +mypy-extensions = ">=0.4.3" +packaging = ">=22.0" +pathspec = ">=0.9.0" +platformdirs = ">=2" +tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} +typed-ast = {version = ">=1.4.2", markers = "python_version < \"3.8\" and implementation_name == \"cpython\""} +typing-extensions = {version = ">=3.10.0.0", markers = "python_version < \"3.10\""} + +[package.extras] +colorama = ["colorama (>=0.4.3)"] +d = ["aiohttp (>=3.7.4)"] +jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] +uvloop = ["uvloop (>=0.15.2)"] + +[[package]] +name = "cfgv" +version = "3.3.1" +description = "Validate configuration and produce human readable error messages." +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "cfgv-3.3.1-py2.py3-none-any.whl", hash = "sha256:c6a0883f3917a037485059700b9e75da2464e6c27051014ad85ba6aaa5884426"}, + {file = "cfgv-3.3.1.tar.gz", hash = "sha256:f5a830efb9ce7a445376bb66ec94c638a9787422f96264c98edc6bdeed8ab736"}, +] + +[[package]] +name = "click" +version = "8.1.7" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +files = [ + {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, + {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "coverage" +version = "7.2.7" +description = "Code coverage measurement for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, + {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, + {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, + {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, + {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, + {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, + {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, + {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, + {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, + {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, + {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, + {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, + {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, + {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, + {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, + {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, + {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, + {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, + {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, + {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, + {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, + {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, + {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, + {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, + {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, + {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, + {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, + {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, + {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, + {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, + {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, + {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, + {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, +] + +[package.dependencies] +tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} + +[package.extras] +toml = ["tomli"] + +[[package]] +name = "distlib" +version = "0.3.7" +description = "Distribution utilities" +optional = false +python-versions = "*" +files = [ + {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, + {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.1.3" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +files = [ + {file = "exceptiongroup-1.1.3-py3-none-any.whl", hash = "sha256:343280667a4585d195ca1cf9cef84a4e178c4b6cf2274caef9859782b567d5e3"}, + {file = "exceptiongroup-1.1.3.tar.gz", hash = "sha256:097acd85d473d75af5bb98e41b61ff7fe35efe6675e4f9370ec6ec5126d160e9"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "filelock" +version = "3.12.2" +description = "A platform independent file lock." +optional = false +python-versions = ">=3.7" +files = [ + {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"}, + {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, +] + +[package.extras] +docs = ["furo (>=2023.5.20)", "sphinx (>=7.0.1)", "sphinx-autodoc-typehints (>=1.23,!=1.23.4)"] +testing = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "diff-cover (>=7.5)", "pytest (>=7.3.1)", "pytest-cov (>=4.1)", "pytest-mock (>=3.10)", "pytest-timeout (>=2.1)"] + +[[package]] +name = "flake8" +version = "5.0.4" +description = "the modular source code checker: pep8 pyflakes and co" +optional = false +python-versions = ">=3.6.1" +files = [ + {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, + {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=1.1.0,<4.3", markers = "python_version < \"3.8\""} +mccabe = ">=0.7.0,<0.8.0" +pycodestyle = ">=2.9.0,<2.10.0" +pyflakes = ">=2.5.0,<2.6.0" + +[[package]] +name = "identify" +version = "2.5.24" +description = "File identification library for Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "identify-2.5.24-py2.py3-none-any.whl", hash = "sha256:986dbfb38b1140e763e413e6feb44cd731faf72d1909543178aa79b0e258265d"}, + {file = "identify-2.5.24.tar.gz", hash = "sha256:0aac67d5b4812498056d28a9a512a483f5085cc28640b02b258a59dac34301d4"}, +] + +[package.extras] +license = ["ukkonen"] + +[[package]] +name = "importlib-metadata" +version = "4.2.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.6" +files = [ + {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"}, + {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, +] + +[package.dependencies] +typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} +zipp = ">=0.5" + +[package.extras] +docs = ["jaraco.packaging (>=8.2)", "rst.linker (>=1.9)", "sphinx"] +testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pep517", "pyfakefs", "pytest (>=4.6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-flake8", "pytest-mypy"] + +[[package]] +name = "iniconfig" +version = "2.0.0" +description = "brain-dead simple config-ini parsing" +optional = false +python-versions = ">=3.7" +files = [ + {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, + {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, +] + +[[package]] +name = "mccabe" +version = "0.7.0" +description = "McCabe checker, plugin for flake8" +optional = false +python-versions = ">=3.6" +files = [ + {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, + {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, +] + +[[package]] +name = "mypy-extensions" +version = "1.0.0" +description = "Type system extensions for programs checked with the mypy type checker." +optional = false +python-versions = ">=3.5" +files = [ + {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, + {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, +] + +[[package]] +name = "nodeenv" +version = "1.8.0" +description = "Node.js virtual environment builder" +optional = false +python-versions = ">=2.7,!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*" +files = [ + {file = "nodeenv-1.8.0-py2.py3-none-any.whl", hash = "sha256:df865724bb3c3adc86b3876fa209771517b0cfe596beff01a92700e0e8be4cec"}, + {file = "nodeenv-1.8.0.tar.gz", hash = "sha256:d51e0c37e64fbf47d017feac3145cdbb58836d7eee8c6f6d3b6880c5456227d2"}, +] + +[package.dependencies] +setuptools = "*" + +[[package]] +name = "packaging" +version = "23.1" +description = "Core utilities for Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, + {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, +] + +[[package]] +name = "pathspec" +version = "0.11.2" +description = "Utility library for gitignore style pattern matching of file paths." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, + {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, +] + +[[package]] +name = "platformdirs" +version = "2.6.2" +description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." +optional = false +python-versions = ">=3.7" +files = [ + {file = "platformdirs-2.6.2-py3-none-any.whl", hash = "sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490"}, + {file = "platformdirs-2.6.2.tar.gz", hash = "sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.4", markers = "python_version < \"3.8\""} + +[package.extras] +docs = ["furo (>=2022.12.7)", "proselint (>=0.13)", "sphinx (>=5.3)", "sphinx-autodoc-typehints (>=1.19.5)"] +test = ["appdirs (==1.4.4)", "covdefaults (>=2.2.2)", "pytest (>=7.2)", "pytest-cov (>=4)", "pytest-mock (>=3.10)"] + +[[package]] +name = "pluggy" +version = "1.2.0" +description = "plugin and hook calling mechanisms for python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, + {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, +] + +[package.dependencies] +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} + +[package.extras] +dev = ["pre-commit", "tox"] +testing = ["pytest", "pytest-benchmark"] + +[[package]] +name = "pre-commit" +version = "2.21.0" +description = "A framework for managing and maintaining multi-language pre-commit hooks." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pre_commit-2.21.0-py2.py3-none-any.whl", hash = "sha256:e2f91727039fc39a92f58a588a25b87f936de6567eed4f0e673e0507edc75bad"}, + {file = "pre_commit-2.21.0.tar.gz", hash = "sha256:31ef31af7e474a8d8995027fefdfcf509b5c913ff31f2015b4ec4beb26a6f658"}, +] + +[package.dependencies] +cfgv = ">=2.0.0" +identify = ">=1.0.0" +importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} +nodeenv = ">=0.11.1" +pyyaml = ">=5.1" +virtualenv = ">=20.10.0" + +[[package]] +name = "pycodestyle" +version = "2.9.1" +description = "Python style guide checker" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, + {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, +] + +[[package]] +name = "pyflakes" +version = "2.5.0" +description = "passive checker of Python programs" +optional = false +python-versions = ">=3.6" +files = [ + {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, + {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, +] + +[[package]] +name = "pytest" +version = "7.4.1" +description = "pytest: simple powerful testing with Python" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-7.4.1-py3-none-any.whl", hash = "sha256:460c9a59b14e27c602eb5ece2e47bec99dc5fc5f6513cf924a7d03a578991b1f"}, + {file = "pytest-7.4.1.tar.gz", hash = "sha256:2f2301e797521b23e4d2585a0a3d7b5e50fdddaaf7e7d6773ea26ddb17c213ab"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "sys_platform == \"win32\""} +exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} +iniconfig = "*" +packaging = "*" +pluggy = ">=0.12,<2.0" +tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} + +[package.extras] +testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] + +[[package]] +name = "pytest-cov" +version = "4.1.0" +description = "Pytest plugin for measuring coverage." +optional = false +python-versions = ">=3.7" +files = [ + {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, + {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, +] + +[package.dependencies] +coverage = {version = ">=5.2.1", extras = ["toml"]} +pytest = ">=4.6" + +[package.extras] +testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] + +[[package]] +name = "pytest-sugar" +version = "0.9.7" +description = "pytest-sugar is a plugin for pytest that changes the default look and feel of pytest (e.g. progressbar, show tests that fail instantly)." +optional = false +python-versions = "*" +files = [ + {file = "pytest-sugar-0.9.7.tar.gz", hash = "sha256:f1e74c1abfa55f7241cf7088032b6e378566f16b938f3f08905e2cf4494edd46"}, + {file = "pytest_sugar-0.9.7-py2.py3-none-any.whl", hash = "sha256:8cb5a4e5f8bbcd834622b0235db9e50432f4cbd71fef55b467fe44e43701e062"}, +] + +[package.dependencies] +packaging = ">=21.3" +pytest = ">=6.2.0" +termcolor = ">=2.1.0" + +[package.extras] +dev = ["black", "flake8", "pre-commit"] + +[[package]] +name = "python-dotenv" +version = "0.21.1" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.7" +files = [ + {file = "python-dotenv-0.21.1.tar.gz", hash = "sha256:1c93de8f636cde3ce377292818d0e440b6e45a82f215c3744979151fa8151c49"}, + {file = "python_dotenv-0.21.1-py3-none-any.whl", hash = "sha256:41e12e0318bebc859fcc4d97d4db8d20ad21721a6aa5047dd59f090391cb549a"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "pyyaml" +version = "6.0.1" +description = "YAML parser and emitter for Python" +optional = false +python-versions = ">=3.6" +files = [ + {file = "PyYAML-6.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d858aa552c999bc8a8d57426ed01e40bef403cd8ccdd0fc5f6f04a00414cac2a"}, + {file = "PyYAML-6.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fd66fc5d0da6d9815ba2cebeb4205f95818ff4b79c3ebe268e75d961704af52f"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:69b023b2b4daa7548bcfbd4aa3da05b3a74b772db9e23b982788168117739938"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:81e0b275a9ecc9c0c0c07b4b90ba548307583c125f54d5b6946cfee6360c733d"}, + {file = "PyYAML-6.0.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ba336e390cd8e4d1739f42dfe9bb83a3cc2e80f567d8805e11b46f4a943f5515"}, + {file = "PyYAML-6.0.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:326c013efe8048858a6d312ddd31d56e468118ad4cdeda36c719bf5bb6192290"}, + {file = "PyYAML-6.0.1-cp310-cp310-win32.whl", hash = "sha256:bd4af7373a854424dabd882decdc5579653d7868b8fb26dc7d0e99f823aa5924"}, + {file = "PyYAML-6.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:fd1592b3fdf65fff2ad0004b5e363300ef59ced41c2e6b3a99d4089fa8c5435d"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:6965a7bc3cf88e5a1c3bd2e0b5c22f8d677dc88a455344035f03399034eb3007"}, + {file = "PyYAML-6.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f003ed9ad21d6a4713f0a9b5a7a0a79e08dd0f221aff4525a2be4c346ee60aab"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:42f8152b8dbc4fe7d96729ec2b99c7097d656dc1213a3229ca5383f973a5ed6d"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:062582fca9fabdd2c8b54a3ef1c978d786e0f6b3a1510e0ac93ef59e0ddae2bc"}, + {file = "PyYAML-6.0.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d2b04aac4d386b172d5b9692e2d2da8de7bfb6c387fa4f801fbf6fb2e6ba4673"}, + {file = "PyYAML-6.0.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e7d73685e87afe9f3b36c799222440d6cf362062f78be1013661b00c5c6f678b"}, + {file = "PyYAML-6.0.1-cp311-cp311-win32.whl", hash = "sha256:1635fd110e8d85d55237ab316b5b011de701ea0f29d07611174a1b42f1444741"}, + {file = "PyYAML-6.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:bf07ee2fef7014951eeb99f56f39c9bb4af143d8aa3c21b1677805985307da34"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:855fb52b0dc35af121542a76b9a84f8d1cd886ea97c84703eaa6d88e37a2ad28"}, + {file = "PyYAML-6.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:40df9b996c2b73138957fe23a16a4f0ba614f4c0efce1e9406a184b6d07fa3a9"}, + {file = "PyYAML-6.0.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c22bec3fbe2524cde73d7ada88f6566758a8f7227bfbf93a408a9d86bcc12a0"}, + {file = "PyYAML-6.0.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8d4e9c88387b0f5c7d5f281e55304de64cf7f9c0021a3525bd3b1c542da3b0e4"}, + {file = "PyYAML-6.0.1-cp312-cp312-win32.whl", hash = "sha256:d483d2cdf104e7c9fa60c544d92981f12ad66a457afae824d146093b8c294c54"}, + {file = "PyYAML-6.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:0d3304d8c0adc42be59c5f8a4d9e3d7379e6955ad754aa9d6ab7a398b59dd1df"}, + {file = "PyYAML-6.0.1-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:50550eb667afee136e9a77d6dc71ae76a44df8b3e51e41b77f6de2932bfe0f47"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1fe35611261b29bd1de0070f0b2f47cb6ff71fa6595c077e42bd0c419fa27b98"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:704219a11b772aea0d8ecd7058d0082713c3562b4e271b849ad7dc4a5c90c13c"}, + {file = "PyYAML-6.0.1-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afd7e57eddb1a54f0f1a974bc4391af8bcce0b444685d936840f125cf046d5bd"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win32.whl", hash = "sha256:fca0e3a251908a499833aa292323f32437106001d436eca0e6e7833256674585"}, + {file = "PyYAML-6.0.1-cp36-cp36m-win_amd64.whl", hash = "sha256:f22ac1c3cac4dbc50079e965eba2c1058622631e526bd9afd45fedd49ba781fa"}, + {file = "PyYAML-6.0.1-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:b1275ad35a5d18c62a7220633c913e1b42d44b46ee12554e5fd39c70a243d6a3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18aeb1bf9a78867dc38b259769503436b7c72f7a1f1f4c93ff9a17de54319b27"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:596106435fa6ad000c2991a98fa58eeb8656ef2325d7e158344fb33864ed87e3"}, + {file = "PyYAML-6.0.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:baa90d3f661d43131ca170712d903e6295d1f7a0f595074f151c0aed377c9b9c"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win32.whl", hash = "sha256:9046c58c4395dff28dd494285c82ba00b546adfc7ef001486fbf0324bc174fba"}, + {file = "PyYAML-6.0.1-cp37-cp37m-win_amd64.whl", hash = "sha256:4fb147e7a67ef577a588a0e2c17b6db51dda102c71de36f8549b6816a96e1867"}, + {file = "PyYAML-6.0.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:1d4c7e777c441b20e32f52bd377e0c409713e8bb1386e1099c2415f26e479595"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a0cd17c15d3bb3fa06978b4e8958dcdc6e0174ccea823003a106c7d4d7899ac5"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28c119d996beec18c05208a8bd78cbe4007878c6dd15091efb73a30e90539696"}, + {file = "PyYAML-6.0.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e07cbde391ba96ab58e532ff4803f79c4129397514e1413a7dc761ccd755735"}, + {file = "PyYAML-6.0.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:49a183be227561de579b4a36efbb21b3eab9651dd81b1858589f796549873dd6"}, + {file = "PyYAML-6.0.1-cp38-cp38-win32.whl", hash = "sha256:184c5108a2aca3c5b3d3bf9395d50893a7ab82a38004c8f61c258d4428e80206"}, + {file = "PyYAML-6.0.1-cp38-cp38-win_amd64.whl", hash = "sha256:1e2722cc9fbb45d9b87631ac70924c11d3a401b2d7f410cc0e3bbf249f2dca62"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9eb6caa9a297fc2c2fb8862bc5370d0303ddba53ba97e71f08023b6cd73d16a8"}, + {file = "PyYAML-6.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:c8098ddcc2a85b61647b2590f825f3db38891662cfc2fc776415143f599bb859"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5773183b6446b2c99bb77e77595dd486303b4faab2b086e7b17bc6bef28865f6"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b786eecbdf8499b9ca1d697215862083bd6d2a99965554781d0d8d1ad31e13a0"}, + {file = "PyYAML-6.0.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc1bf2925a1ecd43da378f4db9e4f799775d6367bdb94671027b73b393a7c42c"}, + {file = "PyYAML-6.0.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:04ac92ad1925b2cff1db0cfebffb6ffc43457495c9b3c39d3fcae417d7125dc5"}, + {file = "PyYAML-6.0.1-cp39-cp39-win32.whl", hash = "sha256:faca3bdcf85b2fc05d06ff3fbc1f83e1391b3e724afa3feba7d13eeab355484c"}, + {file = "PyYAML-6.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:510c9deebc5c0225e8c96813043e62b680ba2f9c50a08d3724c7f28a747d1486"}, + {file = "PyYAML-6.0.1.tar.gz", hash = "sha256:bfdf460b1736c775f2ba9f6a92bca30bc2095067b8a9d77876d1fad6cc3b4a43"}, +] + +[[package]] +name = "setuptools" +version = "68.0.0" +description = "Easily download, build, install, upgrade, and uninstall Python packages" +optional = false +python-versions = ">=3.7" +files = [ + {file = "setuptools-68.0.0-py3-none-any.whl", hash = "sha256:11e52c67415a381d10d6b462ced9cfb97066179f0e871399e006c4ab101fc85f"}, + {file = "setuptools-68.0.0.tar.gz", hash = "sha256:baf1fdb41c6da4cd2eae722e135500da913332ab3f2f5c7d33af9b492acb5235"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-hoverxref (<2)", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (==0.8.3)", "sphinx-reredirects", "sphinxcontrib-towncrier"] +testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pip-run (>=8.8)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] +testing-integration = ["build[virtualenv]", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] + +[[package]] +name = "termcolor" +version = "2.3.0" +description = "ANSI color formatting for output in terminal" +optional = false +python-versions = ">=3.7" +files = [ + {file = "termcolor-2.3.0-py3-none-any.whl", hash = "sha256:3afb05607b89aed0ffe25202399ee0867ad4d3cb4180d98aaf8eefa6a5f7d475"}, + {file = "termcolor-2.3.0.tar.gz", hash = "sha256:b5b08f68937f138fe92f6c089b99f1e2da0ae56c52b78bf7075fd95420fd9a5a"}, +] + +[package.extras] +tests = ["pytest", "pytest-cov"] + +[[package]] +name = "tomli" +version = "2.0.1" +description = "A lil' TOML parser" +optional = false +python-versions = ">=3.7" +files = [ + {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, + {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, +] + +[[package]] +name = "typed-ast" +version = "1.5.5" +description = "a fork of Python 2 and 3 ast modules with type comment support" +optional = false +python-versions = ">=3.6" +files = [ + {file = "typed_ast-1.5.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4bc1efe0ce3ffb74784e06460f01a223ac1f6ab31c6bc0376a21184bf5aabe3b"}, + {file = "typed_ast-1.5.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5f7a8c46a8b333f71abd61d7ab9255440d4a588f34a21f126bbfc95f6049e686"}, + {file = "typed_ast-1.5.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597fc66b4162f959ee6a96b978c0435bd63791e31e4f410622d19f1686d5e769"}, + {file = "typed_ast-1.5.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d41b7a686ce653e06c2609075d397ebd5b969d821b9797d029fccd71fdec8e04"}, + {file = "typed_ast-1.5.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5fe83a9a44c4ce67c796a1b466c270c1272e176603d5e06f6afbc101a572859d"}, + {file = "typed_ast-1.5.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d5c0c112a74c0e5db2c75882a0adf3133adedcdbfd8cf7c9d6ed77365ab90a1d"}, + {file = "typed_ast-1.5.5-cp310-cp310-win_amd64.whl", hash = "sha256:e1a976ed4cc2d71bb073e1b2a250892a6e968ff02aa14c1f40eba4f365ffec02"}, + {file = "typed_ast-1.5.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c631da9710271cb67b08bd3f3813b7af7f4c69c319b75475436fcab8c3d21bee"}, + {file = "typed_ast-1.5.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b445c2abfecab89a932b20bd8261488d574591173d07827c1eda32c457358b18"}, + {file = "typed_ast-1.5.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc95ffaaab2be3b25eb938779e43f513e0e538a84dd14a5d844b8f2932593d88"}, + {file = "typed_ast-1.5.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61443214d9b4c660dcf4b5307f15c12cb30bdfe9588ce6158f4a005baeb167b2"}, + {file = "typed_ast-1.5.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6eb936d107e4d474940469e8ec5b380c9b329b5f08b78282d46baeebd3692dc9"}, + {file = "typed_ast-1.5.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e48bf27022897577d8479eaed64701ecaf0467182448bd95759883300ca818c8"}, + {file = "typed_ast-1.5.5-cp311-cp311-win_amd64.whl", hash = "sha256:83509f9324011c9a39faaef0922c6f720f9623afe3fe220b6d0b15638247206b"}, + {file = "typed_ast-1.5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:44f214394fc1af23ca6d4e9e744804d890045d1643dd7e8229951e0ef39429b5"}, + {file = "typed_ast-1.5.5-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:118c1ce46ce58fda78503eae14b7664163aa735b620b64b5b725453696f2a35c"}, + {file = "typed_ast-1.5.5-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:be4919b808efa61101456e87f2d4c75b228f4e52618621c77f1ddcaae15904fa"}, + {file = "typed_ast-1.5.5-cp36-cp36m-musllinux_1_1_aarch64.whl", hash = "sha256:fc2b8c4e1bc5cd96c1a823a885e6b158f8451cf6f5530e1829390b4d27d0807f"}, + {file = "typed_ast-1.5.5-cp36-cp36m-musllinux_1_1_x86_64.whl", hash = "sha256:16f7313e0a08c7de57f2998c85e2a69a642e97cb32f87eb65fbfe88381a5e44d"}, + {file = "typed_ast-1.5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:2b946ef8c04f77230489f75b4b5a4a6f24c078be4aed241cfabe9cbf4156e7e5"}, + {file = "typed_ast-1.5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2188bc33d85951ea4ddad55d2b35598b2709d122c11c75cffd529fbc9965508e"}, + {file = "typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0635900d16ae133cab3b26c607586131269f88266954eb04ec31535c9a12ef1e"}, + {file = "typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57bfc3cf35a0f2fdf0a88a3044aafaec1d2f24d8ae8cd87c4f58d615fb5b6311"}, + {file = "typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:fe58ef6a764de7b4b36edfc8592641f56e69b7163bba9f9c8089838ee596bfb2"}, + {file = "typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d09d930c2d1d621f717bb217bf1fe2584616febb5138d9b3e8cdd26506c3f6d4"}, + {file = "typed_ast-1.5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:d40c10326893ecab8a80a53039164a224984339b2c32a6baf55ecbd5b1df6431"}, + {file = "typed_ast-1.5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fd946abf3c31fb50eee07451a6aedbfff912fcd13cf357363f5b4e834cc5e71a"}, + {file = "typed_ast-1.5.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ed4a1a42df8a3dfb6b40c3d2de109e935949f2f66b19703eafade03173f8f437"}, + {file = "typed_ast-1.5.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:045f9930a1550d9352464e5149710d56a2aed23a2ffe78946478f7b5416f1ede"}, + {file = "typed_ast-1.5.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:381eed9c95484ceef5ced626355fdc0765ab51d8553fec08661dce654a935db4"}, + {file = "typed_ast-1.5.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bfd39a41c0ef6f31684daff53befddae608f9daf6957140228a08e51f312d7e6"}, + {file = "typed_ast-1.5.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8c524eb3024edcc04e288db9541fe1f438f82d281e591c548903d5b77ad1ddd4"}, + {file = "typed_ast-1.5.5-cp38-cp38-win_amd64.whl", hash = "sha256:7f58fabdde8dcbe764cef5e1a7fcb440f2463c1bbbec1cf2a86ca7bc1f95184b"}, + {file = "typed_ast-1.5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:042eb665ff6bf020dd2243307d11ed626306b82812aba21836096d229fdc6a10"}, + {file = "typed_ast-1.5.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:622e4a006472b05cf6ef7f9f2636edc51bda670b7bbffa18d26b255269d3d814"}, + {file = "typed_ast-1.5.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1efebbbf4604ad1283e963e8915daa240cb4bf5067053cf2f0baadc4d4fb51b8"}, + {file = "typed_ast-1.5.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0aefdd66f1784c58f65b502b6cf8b121544680456d1cebbd300c2c813899274"}, + {file = "typed_ast-1.5.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:48074261a842acf825af1968cd912f6f21357316080ebaca5f19abbb11690c8a"}, + {file = "typed_ast-1.5.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:429ae404f69dc94b9361bb62291885894b7c6fb4640d561179548c849f8492ba"}, + {file = "typed_ast-1.5.5-cp39-cp39-win_amd64.whl", hash = "sha256:335f22ccb244da2b5c296e6f96b06ee9bed46526db0de38d2f0e5a6597b81155"}, + {file = "typed_ast-1.5.5.tar.gz", hash = "sha256:94282f7a354f36ef5dbce0ef3467ebf6a258e370ab33d5b40c249fa996e590dd"}, +] + +[[package]] +name = "typing-extensions" +version = "4.7.1" +description = "Backported and Experimental Type Hints for Python 3.7+" +optional = false +python-versions = ">=3.7" +files = [ + {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, + {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, +] + +[[package]] +name = "virtualenv" +version = "20.16.2" +description = "Virtual Python Environment builder" +optional = false +python-versions = ">=3.6" +files = [ + {file = "virtualenv-20.16.2-py2.py3-none-any.whl", hash = "sha256:635b272a8e2f77cb051946f46c60a54ace3cb5e25568228bd6b57fc70eca9ff3"}, + {file = "virtualenv-20.16.2.tar.gz", hash = "sha256:0ef5be6d07181946891f5abc8047fda8bc2f0b4b9bf222c64e6e8963baee76db"}, +] + +[package.dependencies] +distlib = ">=0.3.1,<1" +filelock = ">=3.2,<4" +importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} +platformdirs = ">=2,<3" + +[package.extras] +docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=21.3)"] +testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "packaging (>=20.0)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)"] + +[[package]] +name = "zipp" +version = "3.15.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.7" +files = [ + {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, + {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, +] + +[package.extras] +docs = ["furo", "jaraco.packaging (>=9)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +testing = ["big-O", "flake8 (<5)", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=1.3)", "pytest-flake8", "pytest-mypy (>=0.9.1)"] + +[metadata] +lock-version = "2.0" +python-versions = "^3.7" +content-hash = "94cb00f578fd77fc60ed2c971ccca28c00d9470362ac4bb858055c494e46e693" diff --git a/pyproject.toml b/pyproject.toml index 84845e9..455c406 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -10,8 +10,10 @@ readme = "README.md" python = "^3.7" [tool.poetry.dev-dependencies] -black = "==22.6" -flake8 = "==3.8.4" +# black = "==22.6" +# flake8 = "==3.8.4" +black = "*" +flake8 = "*" pre-commit = "*" pytest = "*" pytest-cov = "*" -- 2.30.1 From 058aeceaeec9f0d6997b0d26335e5c248d0ebc44 Mon Sep 17 00:00:00 2001 From: Ryan Reed Date: Tue, 5 Sep 2023 21:25:38 -0400 Subject: [PATCH 04/16] Minor formatting --- .pre-commit-config.yaml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 776d3aa..b55c8dd 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -2,7 +2,7 @@ repos: - repo: https://github.com/psf/black rev: "22.6.0" hooks: - - id: black + - id: black - repo: https://github.com/pycqa/flake8 rev: "3.8.4" hooks: -- 2.30.1 From 56292725d9afa0dbb0eb855931df195a3f0ba789 Mon Sep 17 00:00:00 2001 From: Ryan Reed Date: Tue, 5 Sep 2023 21:35:27 -0400 Subject: [PATCH 05/16] Initial rewrite to utilize central file --- src/transpose/__init__.py | 4 +- src/transpose/console.py | 155 ++++++++++++++------- src/transpose/transpose.py | 267 ++++++++++++++++++++++++++----------- src/transpose/utils.py | 60 --------- 4 files changed, 301 insertions(+), 185 deletions(-) diff --git a/src/transpose/__init__.py b/src/transpose/__init__.py index c8d96b7..6526dc8 100644 --- a/src/transpose/__init__.py +++ b/src/transpose/__init__.py @@ -5,8 +5,8 @@ from importlib.metadata import version from .logger import create_logger DEFAULT_XDG_PATH = os.environ.get("XDG_DATA_HOME", f"{os.environ['HOME']}/.local/share") -DEFAULT_CACHE_FILENAME = ".transpose.json" -DEFAULT_STORE_PATH = f"{DEFAULT_XDG_PATH}/transpose" +STORE_PATH = f"{DEFAULT_XDG_PATH}/transpose" +DEFAULT_STORE_PATH = os.environ.get("TRANSPOSE_STORE_PATH", STORE_PATH) version = version("transpose") diff --git a/src/transpose/console.py b/src/transpose/console.py index 9976cd9..20259c9 100644 --- a/src/transpose/console.py +++ b/src/transpose/console.py @@ -1,47 +1,63 @@ import argparse -import os -from transpose import Transpose, version, DEFAULT_STORE_PATH, DEFAULT_CACHE_FILENAME +from transpose import Transpose, version, DEFAULT_STORE_PATH +from .exceptions import TransposeError def entry_point() -> None: args = parse_arguments() + config_path = f"{args.store_path}/transpose.json" + + try: + run(args, config_path) + except TransposeError as e: + print(f"Tranpose Error: {e}") - t = Transpose( - target_path=args.target_path, - cache_filename=args.cache_filename, - ) + +def run(args, config_path) -> None: + t = Transpose(config_path) if args.action == "apply": - t.apply() - elif args.action == "create": - t.create(stored_path=args.stored_path) + t.apply(args.name, force=args.force) elif args.action == "restore": - t.restore() + t.restore(args.name, force=args.force) elif args.action == "store": - t.store(store_path=args.store_path, name=args.name) + t.store(args.name, args.target_path) + elif args.action == "config": + if args.config_action == "add": + t.config.add(args.name, args.path) + t.config.save(config_path) + elif args.config_action == "get": + print(t.config.get(args.name)) + elif args.config_action == "list": + for name in t.config.entries: + print(f"\t{name:<30} -> {t.config.entries[name].path}") + elif args.config_action == "remove": + t.config.remove(args.name) + t.config.save(config_path) + elif args.config_action == "update": + t.config.update(args.name, args.path) + t.config.save(config_path) def parse_arguments(args=None): - cache_filename = os.environ.get("TRANSPOSE_CACHE_FILENAME", DEFAULT_CACHE_FILENAME) - store_path = os.environ.get("TRANSPOSE_STORE_PATH", DEFAULT_STORE_PATH) - base_parser = argparse.ArgumentParser(add_help=False) - base_parser.add_argument( - "--cache-filename", - dest="cache_filename", - nargs="?", - default=cache_filename, - help="The name of the cache file added to the target directory (default: %(default)s)", - ) parser = argparse.ArgumentParser( parents=[base_parser], description=""" - Move and symlink a path for easier management + Move and symlink a path for easy, central management """, ) parser.add_argument("--version", action="version", version=f"Transpose {version}") + parser.add_argument( + "-s", + "--store-path", + dest="store_path", + nargs="?", + default=DEFAULT_STORE_PATH, + help="The location to store the moved entities (default: %(default)s)", + ) subparsers = parser.add_subparsers( help="Transpose Action", dest="action", required=True @@ -49,27 +65,14 @@ def parse_arguments(args=None): apply_parser = subparsers.add_parser( "apply", - help="Recreate the symlink from the cache file (useful after moving store loction)", + help="Recreate the symlink for an entity (useful after moving store locations)", parents=[base_parser], ) apply_parser.add_argument( - "target_path", - help="The path to the directory to locate the cache file", - ) - - create_parser = subparsers.add_parser( - "create", - help="Create the cache file from an already stored path. Only creates the cache file.", - parents=[base_parser], - ) - create_parser.add_argument( - "target_path", - help="The path to the directory that should by a symlink", - ) - create_parser.add_argument( - "stored_path", - help="The path that is currently stored (the target of the symlink)", + "name", + help="The name of the stored entity to apply", ) + apply_parser.add_argument("--force", dest="force", action="store_true") restore_parser = subparsers.add_parser( "restore", @@ -77,9 +80,10 @@ def parse_arguments(args=None): parents=[base_parser], ) restore_parser.add_argument( - "target_path", - help="The path to the directory to restore", + "name", + help="The name of the stored entity to restore", ) + restore_parser.add_argument("--force", dest="force", action="store_true") store_parser = subparsers.add_parser( "store", @@ -96,13 +100,68 @@ def parse_arguments(args=None): default=None, help="The name of the directory that will be created in the store path (default: target_path)", ) - store_parser.add_argument( - "-s", - "--store-path", - dest="store_path", - nargs="?", - default=store_path, - help="The path to where the targets should be stored (default: %(default)s)", + + config_parser = subparsers.add_parser( + "config", + help="Modify the transpose config file without any filesystem changes", + parents=[base_parser], + ) + config_subparsers = config_parser.add_subparsers( + help="Transpose Config Action", dest="config_action", required=True + ) + + config_add_parser = config_subparsers.add_parser( + "add", + help="Add an entry manually to the tranpose config", + parents=[base_parser], + ) + config_add_parser.add_argument( + "name", + help="The name of the entry in the store path", + ) + config_add_parser.add_argument( + "path", + help="The path of the directory that should be symlinked to the store", + ) + + config_get_parser = config_subparsers.add_parser( + "get", + help="Retrieve the settings of a specific entity, such as the path", + parents=[base_parser], + ) + config_get_parser.add_argument( + "name", + help="The name of the entry in the store path", + ) + + config_subparsers.add_parser( + "list", + help="List the names of all entities in the transpose config", + parents=[base_parser], + ) + + config_remove_parser = config_subparsers.add_parser( + "remove", + help="Remove an entry from the config", + parents=[base_parser], + ) + config_remove_parser.add_argument( + "name", + help="The name of the entry in the store path", + ) + + config_update_parser = config_subparsers.add_parser( + "update", + help="Update an entry of the transpose config", + parents=[base_parser], + ) + config_update_parser.add_argument( + "name", + help="The name of the entry in the store path", + ) + config_update_parser.add_argument( + "path", + help="The path of the directory that should be symlinked to the store", ) return parser.parse_args(args) diff --git a/src/transpose/transpose.py b/src/transpose/transpose.py index c359c66..15c06dc 100644 --- a/src/transpose/transpose.py +++ b/src/transpose/transpose.py @@ -1,111 +1,228 @@ -import pathlib +from dataclasses import asdict, dataclass, field +from pathlib import Path +# from typing import Self + +import json + +from . import version as transpose_version from .exceptions import TransposeError -from .utils import check_path, create_cache, get_cache, move, remove, symlink +from .utils import move, remove, symlink -class Transpose: - def __init__( - self, - target_path: str, - cache_filename: str = None, - ) -> None: - self.target_path = pathlib.Path(target_path) +@dataclass +class TransposeEntry: + name: str + path: str - if not cache_filename: - cache_filename = ".transpose.json" - self.cache_filename = cache_filename - self.cache_path = pathlib.Path(self.target_path).joinpath(cache_filename) - def apply(self) -> None: +@dataclass +class TransposeConfig: + entries: dict = field(default_factory=dict) + version: str = field(default=transpose_version) + + def add(self, name: str, path: str) -> None: """ - Recreate the symlink from an existing cache file + Add a new entry to the entries + + Args: + name: The name of the entry (must not exist) + path: The path where the entry originally exists + + Returns: + None """ - if not self.cache_path.exists(): - raise TransposeError( - f"Cache file does not exist indicating target is not managed by Transpose: {self.cache_path}" - ) + if self.entries.get(name): + raise TransposeError(f"'{name}' already exists") + + self.entries[name] = TransposeEntry(name=name, path=path) - cache = get_cache(self.cache_path) - original_path = pathlib.Path(cache["original_path"]).expanduser() + def get(self, name: str) -> TransposeEntry: + """ + Get an entry by the name - if original_path.is_symlink(): - remove(original_path) + Args: + name: The name of the entry (must exist) - symlink(target_path=self.cache_path.parent, symlink_path=original_path) + Returns: + TransposeEntry + """ + try: + return self.entries[name] + except KeyError: + raise TransposeError(f"'{name}' does not exist in Transpose config entries") - def create(self, stored_path: str) -> None: + def remove(self, name: str) -> None: """ - Create the cache file from the target directory and stored directory + Remove an entry by name - This is useful if a path is already stored somewhere else but the cache file is missing + Args: + name: The name of the entry (must exist) - Ideally, the target should be a symlink or not exist so a restore or apply can function + Returns: + None """ - stored_path = pathlib.Path(stored_path) - if not stored_path.exists(): - raise TransposeError(f"Stored path does not exist: {stored_path}") + try: + del self.entries[name] + except KeyError: + raise TransposeError(f"'{name}' does not exist in Transpose config entries") - self.cache_path = stored_path.joinpath(self.cache_filename) + def update(self, name: str, path: str) -> None: + """ + Update an entry by name - create_cache( - cache_path=self.cache_path, - original_path=self.target_path, - ) + Args: + name: The name of the entry (must exist) + path: The path where the entry originally exists - def restore(self) -> None: + Returns: + None """ - Restores a previously Transpose managed directory to it's previous location. + try: + self.entries[name].path = path + except KeyError: + raise TransposeError(f"'{name}' does not exist in Transpose config entries") + + @staticmethod + def load(config_path: str): # -> Self: + in_config = json.load(open(config_path, "r")) + config = TransposeConfig() + try: + for name in in_config["entries"]: + config.add(name, in_config["entries"][name]["path"]) + except (KeyError, TypeError) as e: + raise TransposeError(f"Unrecognized Transpose config file format: {e}") + + return config + + def save(self, config_path: str) -> None: """ - if not self.cache_path.exists(): - raise TransposeError( - f"Cache file does not exist indicating target is not managed by Transpose: {self.cache_path}" - ) - if not self.target_path.exists(): - raise TransposeError(f"Target path does not exist: {self.target_path}") + Save the Config to a location in JSON format - cache = get_cache(self.cache_path) - original_path = pathlib.Path(cache["original_path"]).expanduser() + Args: + path: The path to save the json file - if original_path.is_symlink(): - remove(original_path) - elif original_path.exists(): - raise TransposeError( - f"Original path in cache file already exists: {original_path}" - ) + Returns: + None + """ + config_path = Path(config_path) + config_path.parent.mkdir(parents=True, exist_ok=True) + + with open(str(config_path), "w") as f: + json.dump(self.to_dict(), f) + + def to_dict(self) -> dict: + return asdict(self) - try: - move(source=self.target_path, destination=original_path) - except FileNotFoundError: - raise TransposeError( - f"Original path, {original_path}, does not exist. Use '-f' to create the path" - ) - new_cache_path = pathlib.Path(original_path).joinpath(self.cache_filename) - remove(new_cache_path) +class Transpose: + config: TransposeConfig + config_path: Path + store_path: Path + + def __init__(self, config_path: str) -> None: + self.config = TransposeConfig.load(config_path) + self.config_path = Path(config_path) + self.store_path = self.config_path.parent + + if not self.store_path.exists(): + self.store_path.mkdir(parents=True) - def store(self, store_path: str, name: str = None) -> None: + def apply(self, name: str, force: bool = False) -> None: """ - Moves a directory to a central location and creates a symlink to the old path. + Create/recreate the symlink to an existing entry + + Args: + name: The name of the entry (must exist) + force: If enabled and path already exists, move the path to '{path}-bak' + + Returns: + None """ - if name is None: - name = self.target_path.name + if not self.config.entries.get(name): + raise TransposeError(f"Entry does not exist: '{name}'") + + if self.config.entries[name].path.exists(): + if self.config.entries[name].path.is_symlink(): + remove(self.config.entries[name].path) + elif force: # Backup the existing path, just in case + move( + self.config.entries[name].path, + self.config.entries[name].path.joinpath("-bak"), + ) + else: + raise TransposeError( + f"Entry path already exists, cannot restore (force required): '{self.config.entries[name].path}'" + ) + + symlink( + target_path=self.store_path.joinpath(name), + symlink_path=self.config.entries[name].path, + ) - new_location = pathlib.Path(store_path).joinpath(name) + def restore(self, name: str, force: bool = False) -> None: + """ + Remove the symlink and move the stored entry back to it's original path + + Args: + name: The name of the entry (must exist) + force: If enabled and path already exists, move the path to '{path}-bak' + + Returns: + None + """ + if not self.config.entries.get(name): + raise TransposeError(f"Could not locate entry by name: '{name}'") + + if self.config.entries[name].path.exists(): + if self.config.entries[name].path.is_symlink(): + remove(self.config.entries[name].path) + elif force: # Backup the existing path, just in case + move( + self.config.entries[name].path, + self.config.entries[name].path.joinpath("-bak"), + ) + else: + raise TransposeError( + f"Entry path already exists, cannot restore (force required): '{self.config.entries[name].path}'" + ) + + move(self.store_path.joinpath(name), self.config.entries[name].path) + + self.config.remove(name) + self.config.save(self.config_path) + + def store(self, name: str, source_path: str) -> None: + """ + Move the source path to the store path, create a symlink, and update the config + + Args: + name: The name of the entry (must exist) + source_path: The directory or file to be stored - if not check_path(path=self.target_path): + Returns: + None + """ + if self.config.entries.get(name): raise TransposeError( - f"Target path, {self.target_path}, does not exist. Cannot continue." + f"Entry already exists: '{name}' ({self.config.entries[name].path})" ) - if check_path(path=new_location): + + storage_path = self.store_path.joinpath(name) + if storage_path.exists(): + raise TransposeError(f"Store path already exists: '{storage_path}'") + + source_path = self.config.entries[name].path + if not source_path.exists(): + raise TransposeError(f"Source path does not exist: '{source_path}'") + + if not source_path.is_dir() and not source_path.is_file(): raise TransposeError( - f"Store path, {new_location}, already exists. Cannot continue." + f"Source path must be a directory or file: '{source_path}'" ) - create_cache( - cache_path=self.cache_path, - original_path=self.target_path, - ) + move(source=source_path, destination=storage_path) + symlink(target_path=storage_path, symlink_path=source_path) - move(source=self.target_path, destination=new_location) - symlink(target_path=new_location, symlink_path=self.target_path) + self.config.add(name, source_path) + self.config.save(self.config_path) diff --git a/src/transpose/utils.py b/src/transpose/utils.py index ef7735a..be7a115 100644 --- a/src/transpose/utils.py +++ b/src/transpose/utils.py @@ -1,66 +1,6 @@ -import json import shutil from pathlib import Path -from typing import Dict - -from . import version - - -def check_path(path: Path, is_symlink: bool = False) -> bool: - """ - Checks whether a path exists and is a directory (doesn't support single files) - - Args: - path: The location to the path being verified - is_symlink: Should this path be a symlink? - - Returns: - bool - """ - if is_symlink and not path.is_symlink(): - return False - if not is_symlink and path.is_symlink(): - return False - if not path.exists(): - return False - if not path.is_dir(): - return False - - return True - - -def create_cache(cache_path: Path, original_path: Path) -> None: - """ - Create a cache file for transpose settings in the stored directory - - Args: - cache_path: Path to store the cache file - original_path: Path where the stored directory originated - - Returns: - None - """ - template = { - "version": version, - "original_path": str(original_path.absolute()).replace(str(Path.home()), "~"), - } - - with open(str(cache_path), "w") as f: - json.dump(template, f) - - -def get_cache(cache_path: Path) -> Dict: - """ - Read a JSON cache file - - Args: - cache_path: Path to the Transpose cache file - - Returns: - dict: Cache file contents - """ - return json.load(open(cache_path, "r")) def move(source: Path, destination: Path) -> None: -- 2.30.1 From 959554282a380a7f70dff5691196db6265232e61 Mon Sep 17 00:00:00 2001 From: Ryan Reed Date: Tue, 5 Sep 2023 21:54:26 -0400 Subject: [PATCH 06/16] Cleaning up entry references --- src/transpose/transpose.py | 34 +++++++++++++++------------------- 1 file changed, 15 insertions(+), 19 deletions(-) diff --git a/src/transpose/transpose.py b/src/transpose/transpose.py index 15c06dc..8fc4e7d 100644 --- a/src/transpose/transpose.py +++ b/src/transpose/transpose.py @@ -142,22 +142,20 @@ class Transpose: if not self.config.entries.get(name): raise TransposeError(f"Entry does not exist: '{name}'") - if self.config.entries[name].path.exists(): - if self.config.entries[name].path.is_symlink(): - remove(self.config.entries[name].path) + entry = self.config.entries[name] + if entry.path.exists(): + if entry.path.is_symlink(): + remove(entry.path) elif force: # Backup the existing path, just in case - move( - self.config.entries[name].path, - self.config.entries[name].path.joinpath("-bak"), - ) + move(entry.path, entry.path.joinpath("-bak")) else: raise TransposeError( - f"Entry path already exists, cannot restore (force required): '{self.config.entries[name].path}'" + f"Entry path already exists, cannot restore (force required): '{entry.path}'" ) symlink( target_path=self.store_path.joinpath(name), - symlink_path=self.config.entries[name].path, + symlink_path=entry.path, ) def restore(self, name: str, force: bool = False) -> None: @@ -174,20 +172,18 @@ class Transpose: if not self.config.entries.get(name): raise TransposeError(f"Could not locate entry by name: '{name}'") - if self.config.entries[name].path.exists(): - if self.config.entries[name].path.is_symlink(): - remove(self.config.entries[name].path) + entry = self.config.entries[name] + if entry.path.exists(): + if entry.path.is_symlink(): + remove(entry.path) elif force: # Backup the existing path, just in case - move( - self.config.entries[name].path, - self.config.entries[name].path.joinpath("-bak"), - ) + move(entry.path, entry.path.joinpath("-bak")) else: raise TransposeError( - f"Entry path already exists, cannot restore (force required): '{self.config.entries[name].path}'" + f"Entry path already exists, cannot restore (force required): '{entry.path}'" ) - move(self.store_path.joinpath(name), self.config.entries[name].path) + move(self.store_path.joinpath(name), entry.path) self.config.remove(name) self.config.save(self.config_path) @@ -205,7 +201,7 @@ class Transpose: """ if self.config.entries.get(name): raise TransposeError( - f"Entry already exists: '{name}' ({self.config.entries[name].path})" + f"Entry already exists: {name} -> {self.config.entries[name].path}" ) storage_path = self.store_path.joinpath(name) -- 2.30.1 From 7c53cf705e471c1a90851ae2eaaf58d217c5db6c Mon Sep 17 00:00:00 2001 From: Ryan Reed Date: Tue, 5 Sep 2023 21:54:41 -0400 Subject: [PATCH 07/16] Correcting source_path location on store --- src/transpose/transpose.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/transpose/transpose.py b/src/transpose/transpose.py index 8fc4e7d..c1e02b3 100644 --- a/src/transpose/transpose.py +++ b/src/transpose/transpose.py @@ -208,7 +208,7 @@ class Transpose: if storage_path.exists(): raise TransposeError(f"Store path already exists: '{storage_path}'") - source_path = self.config.entries[name].path + source_path = Path(source_path) if not source_path.exists(): raise TransposeError(f"Source path does not exist: '{source_path}'") -- 2.30.1 From f6ffe77f37ec065393af83de6b4663b6af8324db Mon Sep 17 00:00:00 2001 From: Ryan Reed Date: Wed, 6 Sep 2023 16:59:31 -0400 Subject: [PATCH 08/16] Tests: Updating console --- tests/test_console.py | 82 ++++++++++++++++++++++++++++--------------- 1 file changed, 54 insertions(+), 28 deletions(-) diff --git a/tests/test_console.py b/tests/test_console.py index a6fb113..4071c08 100644 --- a/tests/test_console.py +++ b/tests/test_console.py @@ -4,53 +4,77 @@ from transpose.console import parse_arguments def test_parse_arguments(): - # Missing required argument - action - with pytest.raises(SystemExit): + with pytest.raises(SystemExit): # Missing required args: action parse_arguments() args = parse_arguments( [ - "store", - "--cache-filename", - "test-cache-file.json", "--store-path", "/mnt/store", - "MyTarget", + "store", "/tmp/some/path", + "MyTarget", ] ) - assert args.cache_filename == "test-cache-file.json" assert args.store_path == "/mnt/store" def test_parse_arguments_apply(): - # Missing required argument - target_path - with pytest.raises(SystemExit): + with pytest.raises(SystemExit): # Missing required args: name args = parse_arguments(["apply"]) - args = parse_arguments(["apply", "/tmp/some/path"]) + args = parse_arguments(["apply", "SomeName"]) assert args.action == "apply" - assert args.target_path == "/tmp/some/path" + assert args.name == "SomeName" + assert args.force is False + + args = parse_arguments(["apply", "SomeName", "--force"]) + assert args.force is True + + +def test_parse_arguments_config(): + with pytest.raises(SystemExit): # Missing required args: config_action + parse_arguments(["config"]) + +def test_parse_arguments_config_add(): + with pytest.raises(SystemExit): # Missing required args: name, path + args = parse_arguments(["config", "add"]) -def test_parse_arguments_create(): - # Missing required argument - target_path store_path - with pytest.raises(SystemExit): - args = parse_arguments(["create"]) + with pytest.raises(SystemExit): # Missing required args: path + args = parse_arguments(["config", "add", "SomeName"]) - # Missing required argument - stored_path - with pytest.raises(SystemExit): - args = parse_arguments(["create", "/tmp/target_path"]) + args = parse_arguments(["config", "add", "SomeName", "/var/tmp/something"]) + assert args.config_action == "add" + assert args.name == "SomeName" + assert args.path == "/var/tmp/something" - args = parse_arguments(["create", "/tmp/target_path", "/tmp/stored_path"]) - assert args.action == "create" - assert args.target_path == "/tmp/target_path" - assert args.stored_path == "/tmp/stored_path" + +def test_parse_arguments_config_get(): + with pytest.raises(SystemExit): # Missing required args: name + args = parse_arguments(["config", "get"]) + + args = parse_arguments(["config", "get", "SomeName"]) + assert args.config_action == "get" + assert args.name == "SomeName" + + +def test_parse_arguments_config_list(): + args = parse_arguments(["config", "list"]) + assert args.config_action == "list" + + +def test_parse_arguments_config_remove(): + with pytest.raises(SystemExit): # Missing required args: name + args = parse_arguments(["config", "remove"]) + + args = parse_arguments(["config", "remove", "SomeName"]) + assert args.config_action == "remove" + assert args.name == "SomeName" def test_parse_arguments_store(): - # Missing required argument - target_path - with pytest.raises(SystemExit): + with pytest.raises(SystemExit): # Missing required args: target_path args = parse_arguments(["store"]) args = parse_arguments(["store", "/tmp/some/path"]) @@ -63,10 +87,12 @@ def test_parse_arguments_store(): def test_parse_arguments_restore(): - # Missing required argument - target_path - with pytest.raises(SystemExit): + with pytest.raises(SystemExit): # Missing required args: name args = parse_arguments(["restore"]) - args = parse_arguments(["restore", "/tmp/some/path"]) + args = parse_arguments(["restore", "SomeName"]) assert args.action == "restore" - assert args.target_path == "/tmp/some/path" + assert args.name == "SomeName" + + args = parse_arguments(["restore", "SomeName", "--force"]) + assert args.force is True -- 2.30.1 From d68fcd5146a5feb4f71a15d62e84cb53d2bb6868 Mon Sep 17 00:00:00 2001 From: Ryan Reed Date: Wed, 6 Sep 2023 18:38:27 -0400 Subject: [PATCH 09/16] Tests: Updating Utils --- tests/test_utils.py | 90 +++++++++--------------------------- tests/utils.py | 109 +++++++++++++++++++++----------------------- 2 files changed, 73 insertions(+), 126 deletions(-) diff --git a/tests/test_utils.py b/tests/test_utils.py index 62a1c46..2eb5f02 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,87 +1,41 @@ import json import pathlib -from transpose import version, DEFAULT_CACHE_FILENAME -from transpose.utils import check_path, create_cache, get_cache, move, remove, symlink +from transpose import version +from transpose.utils import move, remove, symlink -from .utils import CACHE_FILE_CONTENTS, STORE_DIR, SYMLINK_DIR, TARGET_DIR, setup_store - - -@setup_store() -def test_check_path(): - existing_dir = pathlib.Path(TARGET_DIR) - nonexisting_dir = pathlib.Path("nonexistent") - symlink_dir = pathlib.Path(SYMLINK_DIR) - - cache_path = pathlib.Path(TARGET_DIR).joinpath(DEFAULT_CACHE_FILENAME) - - assert check_path(existing_dir) is True - assert check_path(nonexisting_dir) is False - assert check_path(symlink_dir, is_symlink=True) is True - assert check_path(symlink_dir) is False - assert check_path(existing_dir, is_symlink=True) is False - assert check_path(cache_path) is False - - -@setup_store() -def test_cache_create(): - cache_file = "test_cache_file.json" - - cache_path = pathlib.Path(TARGET_DIR).joinpath(cache_file) - original_path = pathlib.Path("/tmp/some/random/path") - - create_cache(cache_path=cache_path, original_path=original_path) - - cache = json.load(open(cache_path, "r")) - - assert cache_path.exists() - assert cache["original_path"] == str(original_path) - assert cache["version"] == version - - -@setup_store() -def test_cache_get(): - cache_path = pathlib.Path(TARGET_DIR).joinpath(DEFAULT_CACHE_FILENAME) - cache = get_cache(cache_path) - - assert cache["version"] == CACHE_FILE_CONTENTS["version"] - assert cache["original_path"] == CACHE_FILE_CONTENTS["original_path"] +from .utils import ( + TARGET_PATH, + ENTRY_STORE_PATH, + STORE_PATH, + SYMLINK_TEST_PATH, + setup_store, +) @setup_store() def test_file_move(): - source_path = pathlib.Path(TARGET_DIR) - destination_path = pathlib.Path(STORE_DIR) - - move(source=source_path.absolute(), destination=destination_path.absolute()) - assert not source_path.exists() - assert destination_path.exists() + destination = STORE_PATH.joinpath("test_move") + move(source=TARGET_PATH.absolute(), destination=destination.absolute()) + assert not TARGET_PATH.exists() + assert destination.exists() @setup_store() def test_file_remove(): - cache_path = pathlib.Path(TARGET_DIR).joinpath(DEFAULT_CACHE_FILENAME) - symlink_filepath = pathlib.Path(TARGET_DIR).joinpath(SYMLINK_DIR) - target_filepath = pathlib.Path(TARGET_DIR) + SYMLINK_TEST_PATH.symlink_to(ENTRY_STORE_PATH) + remove(path=TARGET_PATH) + remove(path=SYMLINK_TEST_PATH) - remove(path=cache_path) - remove(path=symlink_filepath) - remove(path=target_filepath) - - assert not cache_path.exists() # Should be able to remove files - assert not symlink_filepath.exists() # Should be able to remove symlinks - assert target_filepath.exists() # Should not be able to remove directories + assert TARGET_PATH.exists() # Should not be able to remove directories + assert not ENTRY_STORE_PATH.exists() # Should be able to remove symlinks @setup_store() def test_file_symlink(): - symlink_name = "test_link" - symlink_filepath = pathlib.Path(symlink_name) - target_filepath = pathlib.Path(TARGET_DIR) - - symlink(target_path=target_filepath, symlink_path=symlink_filepath) + symlink(target_path=TARGET_PATH, symlink_path=SYMLINK_TEST_PATH) - assert target_filepath.exists() - assert symlink_filepath.is_symlink() - assert symlink_filepath.readlink() == target_filepath.resolve() + assert TARGET_PATH.exists() + assert SYMLINK_TEST_PATH.is_symlink() + assert SYMLINK_TEST_PATH.readlink() == TARGET_PATH.resolve() diff --git a/tests/utils.py b/tests/utils.py index 898d900..b515558 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,101 +1,94 @@ -import os import json -import pathlib from contextlib import contextmanager +from pathlib import Path +from shutil import rmtree from tempfile import TemporaryDirectory -from transpose import DEFAULT_CACHE_FILENAME, version +from transpose import version -STORE_DIR = "store" -STORED_DIR = "my_app" # Directory already in storage -SYMLINK_DIR = "symlink_test" -TARGET_DIR = "source" +ENTRY_NAME = "MyName" +TESTS_PATH = Path("tests-temp") +STORE_PATH = TESTS_PATH.joinpath("store") +TARGET_PATH = TESTS_PATH.joinpath("source") +SYMLINK_TEST_PATH = TESTS_PATH.joinpath("symlink_test") -CACHE_FILE_CONTENTS = {"version": version, "original_path": TARGET_DIR} +ENTRY_STORE_PATH = STORE_PATH.joinpath(ENTRY_NAME) +TRANSPOSE_CONFIG_PATH = STORE_PATH.joinpath("transpose.json") + +TRANSPOSE_CONFIG = { + "version": version, + "entries": {ENTRY_NAME: {"name": ENTRY_NAME, "path": str(TARGET_PATH)}}, +} @contextmanager def setup_apply(): """ Create the following directory structure: - temp/ + tests-temp/ + ├── store/ + │ └── transpose.json ├── target/ - │ └── .transpose.json # contains {"version": version, "original_path": "source/"} └── symlink_test/ -> source/ """ - old_dir = os.getcwd() - with TemporaryDirectory("tests-temp") as td: - try: - os.chdir(td) - - os.mkdir(STORE_DIR) - os.symlink(STORE_DIR, SYMLINK_DIR) + try: + with TemporaryDirectory(str(TESTS_PATH)): + STORE_PATH.mkdir(parents=True, exist_ok=True) + TARGET_PATH.mkdir(parents=True, exist_ok=True) + SYMLINK_TEST_PATH.symlink_to(TARGET_PATH.resolve()) - target_cache_path = pathlib.Path(STORE_DIR).joinpath(DEFAULT_CACHE_FILENAME) - with open(str(target_cache_path), "w") as f: - json.dump(CACHE_FILE_CONTENTS, f) + with open(str(TRANSPOSE_CONFIG_PATH), "w") as f: + json.dump(TRANSPOSE_CONFIG, f) yield - finally: - os.chdir(old_dir) + finally: + # This shouldn't be necessary but is for some reason + rmtree(TESTS_PATH) @contextmanager def setup_restore(): """ Create the following directory structure: - temp/ - ├── source/ + tests-temp/ └── store/ - └── my_app/ - └── .transpose.json # contains {"version": version, "original_path": "source/"} + ├── MyName/ + └── transpose.json """ - old_dir = os.getcwd() - with TemporaryDirectory("tests-temp") as td: - try: - os.chdir(td) + try: + with TemporaryDirectory(str(TESTS_PATH)): + ENTRY_STORE_PATH.mkdir(parents=True, exist_ok=True) + TARGET_PATH.mkdir(parents=True, exist_ok=True) - os.mkdir(TARGET_DIR) - os.mkdir(STORE_DIR) - os.mkdir(f"{STORE_DIR}/{STORED_DIR}") - - target_cache_path = pathlib.Path(f"{STORE_DIR}/{STORED_DIR}").joinpath( - DEFAULT_CACHE_FILENAME - ) - with open(str(target_cache_path), "w") as f: - json.dump(CACHE_FILE_CONTENTS, f) + with open(str(TRANSPOSE_CONFIG_PATH), "w") as f: + json.dump(TRANSPOSE_CONFIG, f) yield - finally: - os.chdir(old_dir) + finally: + # This shouldn't be necessary but is for some reason + rmtree(TESTS_PATH) @contextmanager def setup_store(): """ Create the following directory structure: - temp/ + tests-temp/ ├── source/ - │ └── .transpose.json # contains {"version": version, "original_path": "source/"} └── store/ + └── transpose.json """ - old_dir = os.getcwd() - with TemporaryDirectory("tests-temp") as td: - try: - os.chdir(td) - - os.mkdir(TARGET_DIR) - os.mkdir(STORE_DIR) - os.symlink(TARGET_DIR, SYMLINK_DIR) + try: + with TemporaryDirectory(str(TESTS_PATH)): + TARGET_PATH.mkdir(parents=True, exist_ok=True) + STORE_PATH.mkdir(parents=True, exist_ok=True) - target_cache_path = pathlib.Path(TARGET_DIR).joinpath( - DEFAULT_CACHE_FILENAME - ) - with open(str(target_cache_path), "w") as f: - json.dump(CACHE_FILE_CONTENTS, f) + with open(str(TRANSPOSE_CONFIG_PATH), "w") as f: + json.dump(TRANSPOSE_CONFIG, f) yield - finally: - os.chdir(old_dir) + finally: + # This shouldn't be necessary but is for some reason + rmtree(TESTS_PATH) -- 2.30.1 From 31006b9763087cfbcceeef1b3dbb5a14156aede6 Mon Sep 17 00:00:00 2001 From: Ryan Reed Date: Wed, 6 Sep 2023 20:48:21 -0400 Subject: [PATCH 10/16] Cleaning up paths and error messages --- src/transpose/__init__.py | 2 +- src/transpose/transpose.py | 37 ++++++++++++++++--------------------- 2 files changed, 17 insertions(+), 22 deletions(-) diff --git a/src/transpose/__init__.py b/src/transpose/__init__.py index 6526dc8..3e467f4 100644 --- a/src/transpose/__init__.py +++ b/src/transpose/__init__.py @@ -12,4 +12,4 @@ version = version("transpose") logger = create_logger(__package__) -from .transpose import Transpose # noqa: E402 +from .transpose import Transpose, TransposeConfig, TransposeEntry # noqa: E402 diff --git a/src/transpose/transpose.py b/src/transpose/transpose.py index c1e02b3..3fea08b 100644 --- a/src/transpose/transpose.py +++ b/src/transpose/transpose.py @@ -35,7 +35,7 @@ class TransposeConfig: if self.entries.get(name): raise TransposeError(f"'{name}' already exists") - self.entries[name] = TransposeEntry(name=name, path=path) + self.entries[name] = TransposeEntry(name=name, path=str(path)) def get(self, name: str) -> TransposeEntry: """ @@ -140,22 +140,22 @@ class Transpose: None """ if not self.config.entries.get(name): - raise TransposeError(f"Entry does not exist: '{name}'") + raise TransposeError(f"Entry does not exist: '{name}'") - entry = self.config.entries[name] - if entry.path.exists(): - if entry.path.is_symlink(): - remove(entry.path) + entry_path = Path(self.config.entries[name].path) + if entry_path.exists(): + if entry_path.is_symlink(): + remove(entry_path) elif force: # Backup the existing path, just in case - move(entry.path, entry.path.joinpath("-bak")) + move(entry_path, entry_path.with_suffix(".backup")) else: raise TransposeError( - f"Entry path already exists, cannot restore (force required): '{entry.path}'" + f"Entry path already exists, cannot apply (force required): '{entry_path}'" ) symlink( target_path=self.store_path.joinpath(name), - symlink_path=entry.path, + symlink_path=entry_path, ) def restore(self, name: str, force: bool = False) -> None: @@ -172,18 +172,18 @@ class Transpose: if not self.config.entries.get(name): raise TransposeError(f"Could not locate entry by name: '{name}'") - entry = self.config.entries[name] - if entry.path.exists(): - if entry.path.is_symlink(): - remove(entry.path) + entry_path = Path(self.config.entries[name].path) + if entry_path.exists(): + if entry_path.is_symlink(): + remove(entry_path) elif force: # Backup the existing path, just in case - move(entry.path, entry.path.joinpath("-bak")) + move(entry_path, entry_path.with_suffix(".backup")) else: raise TransposeError( - f"Entry path already exists, cannot restore (force required): '{entry.path}'" + f"Entry path already exists, cannot restore (force required): '{entry_path}'" ) - move(self.store_path.joinpath(name), entry.path) + move(self.store_path.joinpath(name), entry_path) self.config.remove(name) self.config.save(self.config_path) @@ -212,11 +212,6 @@ class Transpose: if not source_path.exists(): raise TransposeError(f"Source path does not exist: '{source_path}'") - if not source_path.is_dir() and not source_path.is_file(): - raise TransposeError( - f"Source path must be a directory or file: '{source_path}'" - ) - move(source=source_path, destination=storage_path) symlink(target_path=storage_path, symlink_path=source_path) -- 2.30.1 From 67448a36e80da09347a156d666232fafdf959485 Mon Sep 17 00:00:00 2001 From: Ryan Reed Date: Wed, 6 Sep 2023 20:49:32 -0400 Subject: [PATCH 11/16] Tests: Updating Tranpose tests --- tests/test_transpose.py | 194 +++++++++++++++++++++------------------- tests/utils.py | 15 ++-- 2 files changed, 109 insertions(+), 100 deletions(-) diff --git a/tests/test_transpose.py b/tests/test_transpose.py index 67239c4..4bfd6a9 100644 --- a/tests/test_transpose.py +++ b/tests/test_transpose.py @@ -2,139 +2,147 @@ import json import pathlib import pytest -from transpose import Transpose +from transpose import Transpose, TransposeConfig, TransposeEntry from transpose.exceptions import TransposeError from .utils import ( - STORE_DIR, - STORED_DIR, - TARGET_DIR, + ENTRY_NAME, + ENTRY_STORE_PATH, + STORE_PATH, + TARGET_PATH, + TRANSPOSE_CONFIG_PATH, setup_restore, setup_store, setup_apply, ) +@setup_store() def test_init(): - t = Transpose(target_path=TARGET_DIR) - assert t.cache_filename == ".transpose.json" - assert t.cache_path == pathlib.Path(TARGET_DIR).joinpath(".transpose.json") - - t = Transpose(target_path=TARGET_DIR, cache_filename=".transpose.txt") - assert t.cache_filename == ".transpose.txt" - assert t.cache_path == pathlib.Path(TARGET_DIR).joinpath(".transpose.txt") + t = Transpose(config_path=TRANSPOSE_CONFIG_PATH) + assert t.config.entries.get(ENTRY_NAME) + assert t.config_path == TRANSPOSE_CONFIG_PATH + assert t.store_path == TRANSPOSE_CONFIG_PATH.parent @setup_apply() def test_apply(): - store_path = pathlib.Path(STORE_DIR) - target_path = pathlib.Path(TARGET_DIR) + t = Transpose(config_path=TRANSPOSE_CONFIG_PATH) + + # Success + t.apply(ENTRY_NAME) + assert TARGET_PATH.is_symlink() + assert ENTRY_STORE_PATH.is_dir() - t = Transpose(target_path=STORE_DIR) + with pytest.raises(TransposeError, match="Entry does not exist"): + t.apply("BadName") - with open(t.cache_path, "r") as f: - cache = json.load(f) + # Will remove the symlink created above and reapply + # TODO: Check symlink path + t.apply(ENTRY_NAME) + assert TARGET_PATH.is_symlink() + assert ENTRY_STORE_PATH.is_dir() - # Test cache doesn't exist - t.cache_path.unlink() - with pytest.raises(TransposeError): - t.apply() + # Target already exists, force not set + TARGET_PATH.unlink() + TARGET_PATH.mkdir() + with pytest.raises(TransposeError, match="Entry path already exists"): + t.apply(ENTRY_NAME) - with open(t.cache_path, "w") as f: - json.dump(cache, f) + # Target already exists, force set (Create backup of original path) + t.apply(ENTRY_NAME, force=True) + backup_path = TARGET_PATH.with_suffix(".backup") - pathlib.Path(cache["original_path"]).symlink_to("bad/path") + assert backup_path.is_dir() + assert TARGET_PATH.is_symlink() + assert ENTRY_STORE_PATH.is_dir() + + +@setup_restore() +def test_restore(): + t = Transpose(config_path=TRANSPOSE_CONFIG_PATH) # Success - t.apply() + t.restore(ENTRY_NAME) + assert TARGET_PATH.is_dir() + assert not TARGET_PATH.is_symlink() + assert not ENTRY_STORE_PATH.exists() - assert store_path.is_dir() and not store_path.is_symlink() - assert target_path.is_dir() and target_path.is_symlink() + with pytest.raises(TransposeError, match="Could not locate entry by name"): + t.restore("BadName") @setup_restore() -def test_create(): - target_path = pathlib.Path(TARGET_DIR) - stored_path = pathlib.Path(STORE_DIR).joinpath(STORED_DIR) +def test_restore_path_conflicts(): + t = Transpose(config_path=TRANSPOSE_CONFIG_PATH) - t = Transpose(target_path=str(target_path)) + # Target already exists, force not set + TARGET_PATH.mkdir() + with pytest.raises(TransposeError, match="Entry path already exists"): + t.restore(ENTRY_NAME) - # Missing stored path - stored_path.rename("tmp") - with pytest.raises(TransposeError): - t.create(stored_path=stored_path) - pathlib.Path("tmp").rename(stored_path) + t.restore(ENTRY_NAME, force=True) + backup_path = TARGET_PATH.with_suffix(".backup") - cache_path = stored_path.joinpath(t.cache_filename) + assert backup_path.is_dir() + assert TARGET_PATH.is_dir() + assert not TARGET_PATH.is_symlink() + assert not ENTRY_STORE_PATH.exists() + assert not t.config.entries.get(ENTRY_NAME) - # Successful Create - t.create(stored_path=stored_path) - assert t.cache_path == cache_path - assert cache_path.exists() - with open(t.cache_path, "r") as f: - cache = json.load(f) +@setup_store() +def test_store(): + t = Transpose(config_path=TRANSPOSE_CONFIG_PATH) - assert cache["original_path"] == str(target_path.absolute()) + # Success + t.store("TestEntry", TARGET_PATH) + assert TARGET_PATH.is_symlink() + assert STORE_PATH.joinpath("TestEntry").is_dir() + assert t.config.entries["TestEntry"].path == str(TARGET_PATH) @setup_store() -def test_store(): - t = Transpose(target_path=TARGET_DIR) - t.store(store_path=STORE_DIR) +def test_store_conflicts(): + t = Transpose(config_path=TRANSPOSE_CONFIG_PATH) + + with pytest.raises(TransposeError, match="Entry already exists"): + t.store(ENTRY_NAME, TARGET_PATH) - target_path = pathlib.Path(TARGET_DIR) - store_path = pathlib.Path(STORE_DIR).joinpath(target_path.name) + with pytest.raises(TransposeError, match="Source path does not exist"): + t.store("TestEntry", "UnknownPath/") - # Successful Store - assert store_path.is_dir() and not store_path.is_symlink() - assert target_path.is_dir() and target_path.is_symlink() - assert t.cache_path.is_file() + STORE_PATH.joinpath("TestEntry").mkdir() + with pytest.raises(TransposeError, match="Store path already exists"): + t.store("TestEntry", TARGET_PATH) + STORE_PATH.joinpath("TestEntry").rmdir() @setup_store() -def test_store_named(): - t = Transpose(target_path=TARGET_DIR) - t.store(store_path=STORE_DIR, name="TestStore") +def test_config_add(): + pass # TODO - target_path = pathlib.Path(TARGET_DIR) - store_path = pathlib.Path(STORE_DIR).joinpath("TestStore") - # Successful Store - assert store_path.is_dir() and not store_path.is_symlink() - assert target_path.is_dir() and target_path.is_symlink() - assert t.cache_path.is_file() +@setup_store() +def test_config_get(): + pass # TODO -@setup_restore() -def test_restore(): - target_path = pathlib.Path(TARGET_DIR) - stored_path = pathlib.Path(STORE_DIR).joinpath(STORED_DIR) - - t = Transpose(target_path=str(stored_path)) - - # Missing Cache File - cache = t.cache_path.read_text() - t.cache_path.unlink() - with pytest.raises(TransposeError): - t.restore() - t.cache_path.write_text(cache) - cache = json.loads(cache) - - # Missing Target Path (original path) - t.target_path.rename("newpath") - with pytest.raises(TransposeError): - t.restore() - pathlib.Path("newpath").rename(t.target_path) - - # Original Path is a symlink - Should be removed and successfully restore - original_path = pathlib.Path(cache["original_path"]) - original_path.rename("newpath") - original_path.symlink_to("newpath") - - # Successful - t.restore() - - assert target_path.is_dir() and not target_path.is_symlink() - assert not stored_path.exists() - assert not t.cache_path.exists() +@setup_store() +def test_config_list(): + pass # TODO + + +@setup_store() +def test_config_remove(): + pass # TODO + + +@setup_store() +def test_config_save(): + pass # TODO + + +@setup_store() +def test_config_load(): + pass # TODO diff --git a/tests/utils.py b/tests/utils.py index b515558..2f44f61 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -29,14 +29,14 @@ def setup_apply(): Create the following directory structure: tests-temp/ ├── store/ - │ └── transpose.json - ├── target/ + │ ├── transpose.json + │ └── MyName/ └── symlink_test/ -> source/ """ try: with TemporaryDirectory(str(TESTS_PATH)): STORE_PATH.mkdir(parents=True, exist_ok=True) - TARGET_PATH.mkdir(parents=True, exist_ok=True) + ENTRY_STORE_PATH.mkdir(parents=True, exist_ok=True) SYMLINK_TEST_PATH.symlink_to(TARGET_PATH.resolve()) with open(str(TRANSPOSE_CONFIG_PATH), "w") as f: @@ -53,14 +53,15 @@ def setup_restore(): """ Create the following directory structure: tests-temp/ - └── store/ - ├── MyName/ - └── transpose.json + ├── store/ + │ ├── MyName/ + │ └── transpose.json + └── symlink_test -> store/MyName """ try: with TemporaryDirectory(str(TESTS_PATH)): ENTRY_STORE_PATH.mkdir(parents=True, exist_ok=True) - TARGET_PATH.mkdir(parents=True, exist_ok=True) + SYMLINK_TEST_PATH.symlink_to(TARGET_PATH) with open(str(TRANSPOSE_CONFIG_PATH), "w") as f: json.dump(TRANSPOSE_CONFIG, f) -- 2.30.1 From f80c62f0c8e068e9d73918d84bb72e1c1f31b17f Mon Sep 17 00:00:00 2001 From: Ryan Reed Date: Wed, 6 Sep 2023 21:14:14 -0400 Subject: [PATCH 12/16] Tests: Updating TranposeConfig tests --- tests/test_transpose.py | 61 +++++++++++++++++++++++++++++++++++------ 1 file changed, 53 insertions(+), 8 deletions(-) diff --git a/tests/test_transpose.py b/tests/test_transpose.py index 4bfd6a9..f4f97eb 100644 --- a/tests/test_transpose.py +++ b/tests/test_transpose.py @@ -10,6 +10,7 @@ from .utils import ( ENTRY_STORE_PATH, STORE_PATH, TARGET_PATH, + TRANSPOSE_CONFIG, TRANSPOSE_CONFIG_PATH, setup_restore, setup_store, @@ -120,29 +121,73 @@ def test_store_conflicts(): @setup_store() def test_config_add(): - pass # TODO + config = TransposeConfig.load(TRANSPOSE_CONFIG_PATH) + + with pytest.raises(TransposeError, match=f"'{ENTRY_NAME}' already exists"): + config.add(ENTRY_NAME, TARGET_PATH) + + config.add("NewEntry", TARGET_PATH) + assert config.entries.get("NewEntry") + assert config.entries["NewEntry"].path == str(TARGET_PATH) @setup_store() def test_config_get(): - pass # TODO + config = TransposeConfig.load(TRANSPOSE_CONFIG_PATH) + with pytest.raises( + TransposeError, match="does not exist in Transpose config entries" + ): + config.get("UnknownEntry") -@setup_store() -def test_config_list(): - pass # TODO + assert config.get(ENTRY_NAME).path == str(TARGET_PATH) @setup_store() def test_config_remove(): - pass # TODO + config = TransposeConfig.load(TRANSPOSE_CONFIG_PATH) + + with pytest.raises( + TransposeError, match="does not exist in Transpose config entries" + ): + config.remove("UnknownEntry") + + config.remove(ENTRY_NAME) + assert not config.entries.get(ENTRY_NAME) + + +@setup_store() +def test_config_update(): + config = TransposeConfig.load(TRANSPOSE_CONFIG_PATH) + + with pytest.raises( + TransposeError, match="does not exist in Transpose config entries" + ): + config.update("UnknownEntry", "/some/new/path") + + config.update(ENTRY_NAME, "/some/new/path") + assert config.entries[ENTRY_NAME].path == "/some/new/path" @setup_store() def test_config_save(): - pass # TODO + config = TransposeConfig.load(TRANSPOSE_CONFIG_PATH) + config.save(STORE_PATH.joinpath("test.json")) + + with open(STORE_PATH.joinpath("test.json"), "r") as f: + saved_config = json.load(f) + + assert ( + config.entries[ENTRY_NAME].path == saved_config["entries"][ENTRY_NAME]["path"] + ) @setup_store() def test_config_load(): - pass # TODO + config = TransposeConfig.load(TRANSPOSE_CONFIG_PATH) + + assert config.entries.get(ENTRY_NAME) + assert ( + config.entries[ENTRY_NAME].path + == TRANSPOSE_CONFIG["entries"][ENTRY_NAME]["path"] + ) -- 2.30.1 From ec36f7ca5e9abb15639b7f2c6c659871d46ba071 Mon Sep 17 00:00:00 2001 From: Ryan Reed Date: Wed, 6 Sep 2023 21:21:07 -0400 Subject: [PATCH 13/16] Updating version and documentation --- README.md | 52 ++++++++++++++++++++++++++++++++++++++------------ pyproject.toml | 4 ++-- 2 files changed, 42 insertions(+), 14 deletions(-) diff --git a/README.md b/README.md index 3a0cf7f..b440583 100644 --- a/README.md +++ b/README.md @@ -14,6 +14,8 @@ A tool for moving and symlinking directories to a central location * [Storing a Directory](#storing-a-directory) * [Restoring a Stored Directory](#restoring-a-stored-directory) * [Applying a Previously Transpose Managed Directory](#applying-a-previously-transpose-managed-directory) + * [Modifying Tranpose Config Directly](#modifying-tranpose-config-directly) +* [Development](#development) @@ -44,18 +46,19 @@ pip install . ## Quick Reference ``` -transpose store ~/.config/zsh # Move ~/.config/zsh -> ~/.local/share/transpose/zsh, create symlink, create cache -transpose restore ~/.local/share/transpose/zsh # Remove symlink, move ~/.local/share/transpose/zsh -> ~/.config/zsh, remove cache -transpose apply ~/.local/share/transpose/zsh # Recreate symlink in cache location -transpose create ~/.config/zsh ~/.local/share/transpose/zsh # Recreate cache file +transpose store ~/.config/zsh # Move ~/.config/zsh -> ~/.local/share/transpose/zsh, create symlink, create cache +transpose restore zsh # Remove symlink, move ~/.local/share/transpose/zsh_config -> ~/.config/zsh, remove cache +transpose apply zsh # Recreate symlink in store path (useful after moving Store Path location) -transpose store -s /mnt/backups ~/.config/zsh zsh_config # Move ~/.config/zsh -> /mnt/backups/zsh_config, create symlink, create cache -transpose restore --cache-filename .mycache.json /mnt/backups/zsh_config # Use /mnt/backup/.zsh_config.json for restoring a stored directory +transpose store -s /mnt/backups ~/.config/zsh zsh_config # Move ~/.config/zsh -> /mnt/backups/zsh_config, create symlink ``` ## Usage +See `transpose --help` for more information on each comment + + ### Storing a Directory Storing a directory will: @@ -76,7 +79,6 @@ The above will (assuming using all the defaults): Note: The name on the end (`My Documents` above), can be ommitted. The stored name will use the target name (e.g. `Documents` above) - ### Restoring a Stored Directory Restoring a directory will: @@ -85,21 +87,47 @@ Restoring a directory will: 2. Move the stored directory to the `original_path` ``` -transpose restore "/home/user/.local/share/transpose/My Documents" +transpose restore Game1 ``` The above will (assuming all the defaults): -1. Remove the symlink at `/home/user/Documents` (from cache file) -2. Move `$XDG_DATA_HOME/transpose/My Documents` to `/home/user/Documents` +1. Remove the symlink at `/home/user/Documents/games/MyGame` (from settings file) +2. Move `$XDG_DATA_HOME/transpose/Game1` to `/home/user/Documents/games/MyGame` ### Applying a Previously Transpose Managed Directory -This will recreate the symlink based on the cache file within the directory. +This will recreate the symlink based on the config file within the directory. This is most useful when moving the stored directory. ``` -transpose apple "/home/user/.local/share/transpose/My Documents" +transpose apply "Game1" +``` + + +### Modifying Tranpose Config Directly + +It's possible to modify the tranpose configuration file, `STORE_PATH/transpose.json`, using the console: + +``` +transpose config add "NewEntry" "/path/to/location" +transpose config get "NewEntry" +transpose config list +transpose config remove "NewEntry" +``` + + +## Development + +``` +poetry install +poetry add --dev black +poetry update # Only to update to latest versions, update poetry.lock + +poetry run python src/transpose/console.py +poetry run pytest --cov=transpose --cov-report html tests + +poetry shell ``` diff --git a/pyproject.toml b/pyproject.toml index 455c406..b776761 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,7 @@ [tool.poetry] name = "transpose" -version = "1.1.0" -description = "Move and symlink a path" +version = "2.0.0" +description = "Move and symlink a path to a central location" authors = ["Ryan Reed"] license = "GPLv3" readme = "README.md" -- 2.30.1 From e8b989541ea2dbb5281bd00d5c06ac83fd2e01c6 Mon Sep 17 00:00:00 2001 From: Ryan Reed Date: Wed, 6 Sep 2023 21:55:46 -0400 Subject: [PATCH 14/16] Adding an upgrade script for convenience --- scripts/upgrade-2.0.py | 28 ++++++++++++++++++++++++++++ 1 file changed, 28 insertions(+) create mode 100644 scripts/upgrade-2.0.py diff --git a/scripts/upgrade-2.0.py b/scripts/upgrade-2.0.py new file mode 100644 index 0000000..7225e6c --- /dev/null +++ b/scripts/upgrade-2.0.py @@ -0,0 +1,28 @@ +""" +Loops through STORE_PATH, looking for */.transpose files, create new transpose config file + + Note: This does not remove the v1 */.transpose files, just in case. Must be done if desired. +""" +from pathlib import Path + +import json + +from transpose import TransposeConfig, DEFAULT_STORE_PATH + + +def main() -> None: + store_path = Path(DEFAULT_STORE_PATH) + + config = TransposeConfig() + + entries = store_path.glob("*/*.transpose.json") + for entry in entries: + with open(entry, "r") as f: + d = json.load(f) + config.add(Path(entry).parent.parts[-1], d["original_path"]) + + config.save(store_path.joinpath("transpose.json")) + + +if __name__ == "__main__": + main() -- 2.30.1 From 3588f648686fb348341a9f646ae54f24ed7814ec Mon Sep 17 00:00:00 2001 From: Ryan Reed Date: Wed, 6 Sep 2023 22:38:25 -0400 Subject: [PATCH 15/16] Minor comment update --- scripts/upgrade-2.0.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/upgrade-2.0.py b/scripts/upgrade-2.0.py index 7225e6c..45f06d2 100644 --- a/scripts/upgrade-2.0.py +++ b/scripts/upgrade-2.0.py @@ -1,7 +1,7 @@ """ Loops through STORE_PATH, looking for */.transpose files, create new transpose config file - Note: This does not remove the v1 */.transpose files, just in case. Must be done if desired. + Note: This does not remove the v1 */.transpose files, just in case. Must be done manually if desired. """ from pathlib import Path -- 2.30.1 From 89b7ad362553c49fa44c094de2f540aa3032afbf Mon Sep 17 00:00:00 2001 From: Ryan Reed Date: Thu, 7 Sep 2023 18:20:17 -0400 Subject: [PATCH 16/16] Minor comment updates --- src/transpose/transpose.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/transpose/transpose.py b/src/transpose/transpose.py index 3fea08b..eec06ce 100644 --- a/src/transpose/transpose.py +++ b/src/transpose/transpose.py @@ -146,7 +146,7 @@ class Transpose: if entry_path.exists(): if entry_path.is_symlink(): remove(entry_path) - elif force: # Backup the existing path, just in case + elif force: # Backup the existing path move(entry_path, entry_path.with_suffix(".backup")) else: raise TransposeError( @@ -176,7 +176,7 @@ class Transpose: if entry_path.exists(): if entry_path.is_symlink(): remove(entry_path) - elif force: # Backup the existing path, just in case + elif force: # Backup the existing path move(entry_path, entry_path.with_suffix(".backup")) else: raise TransposeError( @@ -193,7 +193,7 @@ class Transpose: Move the source path to the store path, create a symlink, and update the config Args: - name: The name of the entry (must exist) + name: The name of the entry source_path: The directory or file to be stored Returns: -- 2.30.1