pax_global_header00006660000000000000000000000064144647273360014532gustar00rootroot0000000000000052 comment=c35ef69768aed5a1a900164f80761a288836b015 aiofiles-23.2.1/000077500000000000000000000000001446472733600134125ustar00rootroot00000000000000aiofiles-23.2.1/.github/000077500000000000000000000000001446472733600147525ustar00rootroot00000000000000aiofiles-23.2.1/.github/FUNDING.yml000066400000000000000000000000551446472733600165670ustar00rootroot00000000000000--- tidelift: "pypi/aiofiles" github: Tinche aiofiles-23.2.1/.github/SECURITY.md000066400000000000000000000003011446472733600165350ustar00rootroot00000000000000## Security contact information To report a security vulnerability, please use the [Tidelift security contact](https://tidelift.com/security). Tidelift will coordinate the fix and disclosure. aiofiles-23.2.1/.github/workflows/000077500000000000000000000000001446472733600170075ustar00rootroot00000000000000aiofiles-23.2.1/.github/workflows/main.yml000066400000000000000000000062521446472733600204630ustar00rootroot00000000000000--- name: CI on: push: branches: ["main"] pull_request: branches: ["main"] workflow_dispatch: jobs: tests: name: "Python ${{ matrix.python-version }}" runs-on: ${{ matrix.os }} strategy: matrix: os: [ubuntu-latest, windows-latest] python-version: ["3.7", "3.8", "3.9", "3.10", "3.11", "3.12", "pypy-3.9"] steps: - uses: "actions/checkout@v3" - uses: "actions/setup-python@v4" with: python-version: "${{ matrix.python-version }}" allow-prereleases: true - name: "Install dependencies" run: | python -VV python -m site python -m pip install --upgrade pip wheel pdm python -m pip install --upgrade tox tox-gh-actions - name: "Run tox targets for ${{ matrix.python-version }}" run: "python -m tox" - name: "Upload coverage data" uses: "actions/upload-artifact@v3" with: name: "coverage-data" path: ".coverage.*" if-no-files-found: "ignore" if: runner.os == 'Linux' coverage: name: "Combine & check coverage." needs: "tests" runs-on: "ubuntu-latest" steps: - uses: "actions/checkout@v3" - uses: "actions/setup-python@v4" with: cache: "pip" python-version: "3.11" - run: "python -Im pip install --upgrade coverage[toml]" - uses: "actions/download-artifact@v3" with: name: "coverage-data" - name: "Combine coverage" run: | python -Im coverage combine python -Im coverage html --skip-covered --skip-empty python -Im coverage json # Report and write to summary. python -Im coverage report | sed 's/^/ /' >> $GITHUB_STEP_SUMMARY export TOTAL=$(python -c "import json;print(json.load(open('coverage.json'))['totals']['percent_covered_display'])") echo "total=$TOTAL" >> $GITHUB_ENV - name: "Upload HTML report." uses: "actions/upload-artifact@v3" with: name: "html-report" path: "htmlcov" - name: "Make badge" if: github.ref == 'refs/heads/main' uses: "schneegans/dynamic-badges-action@v1.4.0" with: # GIST_TOKEN is a GitHub personal access token with scope "gist". auth: ${{ secrets.GIST_TOKEN }} gistID: 882f02e3df32136c847ba90d2688f06e filename: covbadge.json label: Coverage message: ${{ env.total }}% minColorRange: 50 maxColorRange: 90 valColorRange: ${{ env.total }} package: name: "Build & verify package" runs-on: "ubuntu-latest" steps: - uses: "actions/checkout@v3" - uses: "actions/setup-python@v4" with: python-version: "3.x" - name: "Install PDM and twine" run: "python -m pip install pdm twine check-wheel-contents" - name: "Build package" run: "pdm build" - name: "List result" run: "ls -l dist" - name: "Check wheel contents" run: "check-wheel-contents dist/*.whl" - name: "Check long_description" run: "python -m twine check dist/*" aiofiles-23.2.1/.gitignore000066400000000000000000000022521446472733600154030ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] # C extensions *.so # Distribution / packaging .Python env/ pyvenv/ build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ *.egg-info/ .installed.cfg *.egg .venv* # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *,cover # Translations *.mo *.pot # Django stuff: *.log # Sphinx documentation docs/_build/ # PyBuilder target/ # IDEA IDE files .idea/ *.iml .pytest_cache # VScode .vscode/ # pdm # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. #pdm.lock # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it # in version control. # https://pdm.fming.dev/#use-with-ide .pdm.toml .pdm-python .pdm-build/ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm __pypackages__/ aiofiles-23.2.1/LICENSE000066400000000000000000000260751446472733600144310ustar00rootroot00000000000000Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "{}" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright {yyyy} {name of copyright owner} Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. aiofiles-23.2.1/MANIFEST.in000066400000000000000000000000201446472733600151400ustar00rootroot00000000000000include LICENSE aiofiles-23.2.1/Makefile000066400000000000000000000001731446472733600150530ustar00rootroot00000000000000.PHONY: test lint test: pdm run pytest -x --ff tests lint: pdm run flake8 src tests && pdm run black --check src tests aiofiles-23.2.1/NOTICE000066400000000000000000000000671446472733600143210ustar00rootroot00000000000000Asyncio support for files Copyright 2016 Tin Tvrtkovic aiofiles-23.2.1/README.md000066400000000000000000000205711446472733600146760ustar00rootroot00000000000000# aiofiles: file support for asyncio [![PyPI](https://img.shields.io/pypi/v/aiofiles.svg)](https://pypi.python.org/pypi/aiofiles) [![Build](https://github.com/Tinche/aiofiles/workflows/CI/badge.svg)](https://github.com/Tinche/aiofiles/actions) [![Coverage](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/Tinche/882f02e3df32136c847ba90d2688f06e/raw/covbadge.json)](https://github.com/Tinche/aiofiles/actions/workflows/main.yml) [![Supported Python versions](https://img.shields.io/pypi/pyversions/aiofiles.svg)](https://github.com/Tinche/aiofiles) [![Black](https://img.shields.io/badge/code%20style-black-000000.svg)](https://github.com/psf/black) **aiofiles** is an Apache2 licensed library, written in Python, for handling local disk files in asyncio applications. Ordinary local file IO is blocking, and cannot easily and portably be made asynchronous. This means doing file IO may interfere with asyncio applications, which shouldn't block the executing thread. aiofiles helps with this by introducing asynchronous versions of files that support delegating operations to a separate thread pool. ```python async with aiofiles.open('filename', mode='r') as f: contents = await f.read() print(contents) 'My file contents' ``` Asynchronous iteration is also supported. ```python async with aiofiles.open('filename') as f: async for line in f: ... ``` Asynchronous interface to tempfile module. ```python async with aiofiles.tempfile.TemporaryFile('wb') as f: await f.write(b'Hello, World!') ``` ## Features - a file API very similar to Python's standard, blocking API - support for buffered and unbuffered binary files, and buffered text files - support for `async`/`await` ([PEP 492](https://peps.python.org/pep-0492/)) constructs - async interface to tempfile module ## Installation To install aiofiles, simply: ```bash $ pip install aiofiles ``` ## Usage Files are opened using the `aiofiles.open()` coroutine, which in addition to mirroring the builtin `open` accepts optional `loop` and `executor` arguments. If `loop` is absent, the default loop will be used, as per the set asyncio policy. If `executor` is not specified, the default event loop executor will be used. In case of success, an asynchronous file object is returned with an API identical to an ordinary file, except the following methods are coroutines and delegate to an executor: - `close` - `flush` - `isatty` - `read` - `readall` - `read1` - `readinto` - `readline` - `readlines` - `seek` - `seekable` - `tell` - `truncate` - `writable` - `write` - `writelines` In case of failure, one of the usual exceptions will be raised. `aiofiles.stdin`, `aiofiles.stdout`, `aiofiles.stderr`, `aiofiles.stdin_bytes`, `aiofiles.stdout_bytes`, and `aiofiles.stderr_bytes` provide async access to `sys.stdin`, `sys.stdout`, `sys.stderr`, and their corresponding `.buffer` properties. The `aiofiles.os` module contains executor-enabled coroutine versions of several useful `os` functions that deal with files: - `stat` - `statvfs` - `sendfile` - `rename` - `renames` - `replace` - `remove` - `unlink` - `mkdir` - `makedirs` - `rmdir` - `removedirs` - `link` - `symlink` - `readlink` - `listdir` - `scandir` - `access` - `path.exists` - `path.isfile` - `path.isdir` - `path.islink` - `path.ismount` - `path.getsize` - `path.getatime` - `path.getctime` - `path.samefile` - `path.sameopenfile` ### Tempfile **aiofiles.tempfile** implements the following interfaces: - TemporaryFile - NamedTemporaryFile - SpooledTemporaryFile - TemporaryDirectory Results return wrapped with a context manager allowing use with async with and async for. ```python async with aiofiles.tempfile.NamedTemporaryFile('wb+') as f: await f.write(b'Line1\n Line2') await f.seek(0) async for line in f: print(line) async with aiofiles.tempfile.TemporaryDirectory() as d: filename = os.path.join(d, "file.ext") ``` ### Writing tests for aiofiles Real file IO can be mocked by patching `aiofiles.threadpool.sync_open` as desired. The return type also needs to be registered with the `aiofiles.threadpool.wrap` dispatcher: ```python aiofiles.threadpool.wrap.register(mock.MagicMock)( lambda *args, **kwargs: threadpool.AsyncBufferedIOBase(*args, **kwargs)) async def test_stuff(): data = 'data' mock_file = mock.MagicMock() with mock.patch('aiofiles.threadpool.sync_open', return_value=mock_file) as mock_open: async with aiofiles.open('filename', 'w') as f: await f.write(data) mock_file.write.assert_called_once_with(data) ``` ### History #### 23.2.1 (2023-08-09) - Import `os.statvfs` conditionally to fix importing on non-UNIX systems. [#171](https://github.com/Tinche/aiofiles/issues/171) [#172](https://github.com/Tinche/aiofiles/pull/172) #### 23.2.0 (2023-08-09) - aiofiles is now tested on Python 3.12 too. [#166](https://github.com/Tinche/aiofiles/issues/166) [#168](https://github.com/Tinche/aiofiles/pull/168) - On Python 3.12, `aiofiles.tempfile.NamedTemporaryFile` now accepts a `delete_on_close` argument, just like the stdlib version. - On Python 3.12, `aiofiles.tempfile.NamedTemporaryFile` no longer exposes a `delete` attribute, just like the stdlib version. - Added `aiofiles.os.statvfs` and `aiofiles.os.path.ismount`. [#162](https://github.com/Tinche/aiofiles/pull/162) - Use [PDM](https://pdm.fming.dev/latest/) instead of Poetry. [#169](https://github.com/Tinche/aiofiles/pull/169) #### 23.1.0 (2023-02-09) - Added `aiofiles.os.access`. [#146](https://github.com/Tinche/aiofiles/pull/146) - Removed `aiofiles.tempfile.temptypes.AsyncSpooledTemporaryFile.softspace`. [#151](https://github.com/Tinche/aiofiles/pull/151) - Added `aiofiles.stdin`, `aiofiles.stdin_bytes`, and other stdio streams. [#154](https://github.com/Tinche/aiofiles/pull/154) - Transition to `asyncio.get_running_loop` (vs `asyncio.get_event_loop`) internally. #### 22.1.0 (2022-09-04) - Added `aiofiles.os.path.islink`. [#126](https://github.com/Tinche/aiofiles/pull/126) - Added `aiofiles.os.readlink`. [#125](https://github.com/Tinche/aiofiles/pull/125) - Added `aiofiles.os.symlink`. [#124](https://github.com/Tinche/aiofiles/pull/124) - Added `aiofiles.os.unlink`. [#123](https://github.com/Tinche/aiofiles/pull/123) - Added `aiofiles.os.link`. [#121](https://github.com/Tinche/aiofiles/pull/121) - Added `aiofiles.os.renames`. [#120](https://github.com/Tinche/aiofiles/pull/120) - Added `aiofiles.os.{listdir, scandir}`. [#143](https://github.com/Tinche/aiofiles/pull/143) - Switched to CalVer. - Dropped Python 3.6 support. If you require it, use version 0.8.0. - aiofiles is now tested on Python 3.11. #### 0.8.0 (2021-11-27) - aiofiles is now tested on Python 3.10. - Added `aiofiles.os.replace`. [#107](https://github.com/Tinche/aiofiles/pull/107) - Added `aiofiles.os.{makedirs, removedirs}`. - Added `aiofiles.os.path.{exists, isfile, isdir, getsize, getatime, getctime, samefile, sameopenfile}`. [#63](https://github.com/Tinche/aiofiles/pull/63) - Added `suffix`, `prefix`, `dir` args to `aiofiles.tempfile.TemporaryDirectory`. [#116](https://github.com/Tinche/aiofiles/pull/116) #### 0.7.0 (2021-05-17) - Added the `aiofiles.tempfile` module for async temporary files. [#56](https://github.com/Tinche/aiofiles/pull/56) - Switched to Poetry and GitHub actions. - Dropped 3.5 support. #### 0.6.0 (2020-10-27) - `aiofiles` is now tested on ppc64le. - Added `name` and `mode` properties to async file objects. [#82](https://github.com/Tinche/aiofiles/pull/82) - Fixed a DeprecationWarning internally. [#75](https://github.com/Tinche/aiofiles/pull/75) - Python 3.9 support and tests. #### 0.5.0 (2020-04-12) - Python 3.8 support. Code base modernization (using `async/await` instead of `asyncio.coroutine`/`yield from`). - Added `aiofiles.os.remove`, `aiofiles.os.rename`, `aiofiles.os.mkdir`, `aiofiles.os.rmdir`. [#62](https://github.com/Tinche/aiofiles/pull/62) #### 0.4.0 (2018-08-11) - Python 3.7 support. - Removed Python 3.3/3.4 support. If you use these versions, stick to aiofiles 0.3.x. #### 0.3.2 (2017-09-23) - The LICENSE is now included in the sdist. [#31](https://github.com/Tinche/aiofiles/pull/31) #### 0.3.1 (2017-03-10) - Introduced a changelog. - `aiofiles.os.sendfile` will now work if the standard `os` module contains a `sendfile` function. ### Contributing Contributions are very welcome. Tests can be run with `tox`, please ensure the coverage at least stays the same before you submit a pull request. aiofiles-23.2.1/pdm.lock000066400000000000000000000756401446472733600150600ustar00rootroot00000000000000# This file is @generated by PDM. # It is not intended for manual editing. [metadata] groups = ["default", "lint", "test"] cross_platform = true static_urls = false lock_version = "4.3" content_hash = "sha256:252ff80ac0b18f02691c9ddf3493abfdf79ab6af3dfb2487ce04e5e1f3f676cc" [[package]] name = "black" version = "23.3.0" requires_python = ">=3.7" summary = "The uncompromising code formatter." dependencies = [ "click>=8.0.0", "mypy-extensions>=0.4.3", "packaging>=22.0", "pathspec>=0.9.0", "platformdirs>=2", "tomli>=1.1.0; python_version < \"3.11\"", "typed-ast>=1.4.2; python_version < \"3.8\" and implementation_name == \"cpython\"", "typing-extensions>=3.10.0.0; python_version < \"3.10\"", ] files = [ {file = "black-23.3.0-cp310-cp310-macosx_10_16_arm64.whl", hash = "sha256:0945e13506be58bf7db93ee5853243eb368ace1c08a24c65ce108986eac65915"}, {file = "black-23.3.0-cp310-cp310-macosx_10_16_universal2.whl", hash = "sha256:67de8d0c209eb5b330cce2469503de11bca4085880d62f1628bd9972cc3366b9"}, {file = "black-23.3.0-cp310-cp310-macosx_10_16_x86_64.whl", hash = "sha256:7c3eb7cea23904399866c55826b31c1f55bbcd3890ce22ff70466b907b6775c2"}, {file = "black-23.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:32daa9783106c28815d05b724238e30718f34155653d4d6e125dc7daec8e260c"}, {file = "black-23.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:35d1381d7a22cc5b2be2f72c7dfdae4072a3336060635718cc7e1ede24221d6c"}, {file = "black-23.3.0-cp311-cp311-macosx_10_16_arm64.whl", hash = "sha256:a8a968125d0a6a404842fa1bf0b349a568634f856aa08ffaff40ae0dfa52e7c6"}, {file = "black-23.3.0-cp311-cp311-macosx_10_16_universal2.whl", hash = "sha256:c7ab5790333c448903c4b721b59c0d80b11fe5e9803d8703e84dcb8da56fec1b"}, {file = "black-23.3.0-cp311-cp311-macosx_10_16_x86_64.whl", hash = "sha256:a6f6886c9869d4daae2d1715ce34a19bbc4b95006d20ed785ca00fa03cba312d"}, {file = "black-23.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f3c333ea1dd6771b2d3777482429864f8e258899f6ff05826c3a4fcc5ce3f70"}, {file = "black-23.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:11c410f71b876f961d1de77b9699ad19f939094c3a677323f43d7a29855fe326"}, {file = "black-23.3.0-cp37-cp37m-macosx_10_16_x86_64.whl", hash = "sha256:1d06691f1eb8de91cd1b322f21e3bfc9efe0c7ca1f0e1eb1db44ea367dff656b"}, {file = "black-23.3.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:50cb33cac881766a5cd9913e10ff75b1e8eb71babf4c7104f2e9c52da1fb7de2"}, {file = "black-23.3.0-cp37-cp37m-win_amd64.whl", hash = "sha256:e114420bf26b90d4b9daa597351337762b63039752bdf72bf361364c1aa05925"}, {file = "black-23.3.0-cp38-cp38-macosx_10_16_arm64.whl", hash = "sha256:48f9d345675bb7fbc3dd85821b12487e1b9a75242028adad0333ce36ed2a6d27"}, {file = "black-23.3.0-cp38-cp38-macosx_10_16_universal2.whl", hash = "sha256:714290490c18fb0126baa0fca0a54ee795f7502b44177e1ce7624ba1c00f2331"}, {file = "black-23.3.0-cp38-cp38-macosx_10_16_x86_64.whl", hash = "sha256:064101748afa12ad2291c2b91c960be28b817c0c7eaa35bec09cc63aa56493c5"}, {file = "black-23.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:562bd3a70495facf56814293149e51aa1be9931567474993c7942ff7d3533961"}, {file = "black-23.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:e198cf27888ad6f4ff331ca1c48ffc038848ea9f031a3b40ba36aced7e22f2c8"}, {file = "black-23.3.0-cp39-cp39-macosx_10_16_arm64.whl", hash = "sha256:3238f2aacf827d18d26db07524e44741233ae09a584273aa059066d644ca7b30"}, {file = "black-23.3.0-cp39-cp39-macosx_10_16_universal2.whl", hash = "sha256:f0bd2f4a58d6666500542b26354978218a9babcdc972722f4bf90779524515f3"}, {file = "black-23.3.0-cp39-cp39-macosx_10_16_x86_64.whl", hash = "sha256:92c543f6854c28a3c7f39f4d9b7694f9a6eb9d3c5e2ece488c327b6e7ea9b266"}, {file = "black-23.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a150542a204124ed00683f0db1f5cf1c2aaaa9cc3495b7a3b5976fb136090ab"}, {file = "black-23.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:6b39abdfb402002b8a7d030ccc85cf5afff64ee90fa4c5aebc531e3ad0175ddb"}, {file = "black-23.3.0-py3-none-any.whl", hash = "sha256:ec751418022185b0c1bb7d7736e6933d40bbb14c14a0abcf9123d1b159f98dd4"}, {file = "black-23.3.0.tar.gz", hash = "sha256:1c7b8d606e728a41ea1ccbd7264677e494e87cf630e399262ced92d4a8dac940"}, ] [[package]] name = "click" version = "8.1.6" requires_python = ">=3.7" summary = "Composable command line interface toolkit" dependencies = [ "colorama; platform_system == \"Windows\"", "importlib-metadata; python_version < \"3.8\"", ] files = [ {file = "click-8.1.6-py3-none-any.whl", hash = "sha256:fa244bb30b3b5ee2cae3da8f55c9e5e0c0e86093306301fb418eb9dc40fbded5"}, {file = "click-8.1.6.tar.gz", hash = "sha256:48ee849951919527a045bfe3bf7baa8a959c423134e1a5b98c05c20ba75a1cbd"}, ] [[package]] name = "colorama" version = "0.4.6" requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" summary = "Cross-platform colored terminal text." files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] [[package]] name = "coverage" version = "7.2.7" requires_python = ">=3.7" summary = "Code coverage measurement for Python" files = [ {file = "coverage-7.2.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d39b5b4f2a66ccae8b7263ac3c8170994b65266797fb96cbbfd3fb5b23921db8"}, {file = "coverage-7.2.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6d040ef7c9859bb11dfeb056ff5b3872436e3b5e401817d87a31e1750b9ae2fb"}, {file = "coverage-7.2.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba90a9563ba44a72fda2e85302c3abc71c5589cea608ca16c22b9804262aaeb6"}, {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7d9405291c6928619403db1d10bd07888888ec1abcbd9748fdaa971d7d661b2"}, {file = "coverage-7.2.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:31563e97dae5598556600466ad9beea39fb04e0229e61c12eaa206e0aa202063"}, {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:ebba1cd308ef115925421d3e6a586e655ca5a77b5bf41e02eb0e4562a111f2d1"}, {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:cb017fd1b2603ef59e374ba2063f593abe0fc45f2ad9abdde5b4d83bd922a353"}, {file = "coverage-7.2.7-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d62a5c7dad11015c66fbb9d881bc4caa5b12f16292f857842d9d1871595f4495"}, {file = "coverage-7.2.7-cp310-cp310-win32.whl", hash = "sha256:ee57190f24fba796e36bb6d3aa8a8783c643d8fa9760c89f7a98ab5455fbf818"}, {file = "coverage-7.2.7-cp310-cp310-win_amd64.whl", hash = "sha256:f75f7168ab25dd93110c8a8117a22450c19976afbc44234cbf71481094c1b850"}, {file = "coverage-7.2.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:06a9a2be0b5b576c3f18f1a241f0473575c4a26021b52b2a85263a00f034d51f"}, {file = "coverage-7.2.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5baa06420f837184130752b7c5ea0808762083bf3487b5038d68b012e5937dbe"}, {file = "coverage-7.2.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdec9e8cbf13a5bf63290fc6013d216a4c7232efb51548594ca3631a7f13c3a3"}, {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:52edc1a60c0d34afa421c9c37078817b2e67a392cab17d97283b64c5833f427f"}, {file = "coverage-7.2.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:63426706118b7f5cf6bb6c895dc215d8a418d5952544042c8a2d9fe87fcf09cb"}, {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:afb17f84d56068a7c29f5fa37bfd38d5aba69e3304af08ee94da8ed5b0865833"}, {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:48c19d2159d433ccc99e729ceae7d5293fbffa0bdb94952d3579983d1c8c9d97"}, {file = "coverage-7.2.7-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:0e1f928eaf5469c11e886fe0885ad2bf1ec606434e79842a879277895a50942a"}, {file = "coverage-7.2.7-cp311-cp311-win32.whl", hash = "sha256:33d6d3ea29d5b3a1a632b3c4e4f4ecae24ef170b0b9ee493883f2df10039959a"}, {file = "coverage-7.2.7-cp311-cp311-win_amd64.whl", hash = "sha256:5b7540161790b2f28143191f5f8ec02fb132660ff175b7747b95dcb77ac26562"}, {file = "coverage-7.2.7-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:f2f67fe12b22cd130d34d0ef79206061bfb5eda52feb6ce0dba0644e20a03cf4"}, {file = "coverage-7.2.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a342242fe22407f3c17f4b499276a02b01e80f861f1682ad1d95b04018e0c0d4"}, {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:171717c7cb6b453aebac9a2ef603699da237f341b38eebfee9be75d27dc38e01"}, {file = "coverage-7.2.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:49969a9f7ffa086d973d91cec8d2e31080436ef0fb4a359cae927e742abfaaa6"}, {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b46517c02ccd08092f4fa99f24c3b83d8f92f739b4657b0f146246a0ca6a831d"}, {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:a3d33a6b3eae87ceaefa91ffdc130b5e8536182cd6dfdbfc1aa56b46ff8c86de"}, {file = "coverage-7.2.7-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:976b9c42fb2a43ebf304fa7d4a310e5f16cc99992f33eced91ef6f908bd8f33d"}, {file = "coverage-7.2.7-cp312-cp312-win32.whl", hash = "sha256:8de8bb0e5ad103888d65abef8bca41ab93721647590a3f740100cd65c3b00511"}, {file = "coverage-7.2.7-cp312-cp312-win_amd64.whl", hash = "sha256:9e31cb64d7de6b6f09702bb27c02d1904b3aebfca610c12772452c4e6c21a0d3"}, {file = "coverage-7.2.7-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:58c2ccc2f00ecb51253cbe5d8d7122a34590fac9646a960d1430d5b15321d95f"}, {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d22656368f0e6189e24722214ed8d66b8022db19d182927b9a248a2a8a2f67eb"}, {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a895fcc7b15c3fc72beb43cdcbdf0ddb7d2ebc959edac9cef390b0d14f39f8a9"}, {file = "coverage-7.2.7-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e84606b74eb7de6ff581a7915e2dab7a28a0517fbe1c9239eb227e1354064dcd"}, {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:0a5f9e1dbd7fbe30196578ca36f3fba75376fb99888c395c5880b355e2875f8a"}, {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:419bfd2caae268623dd469eff96d510a920c90928b60f2073d79f8fe2bbc5959"}, {file = "coverage-7.2.7-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:2aee274c46590717f38ae5e4650988d1af340fe06167546cc32fe2f58ed05b02"}, {file = "coverage-7.2.7-cp37-cp37m-win32.whl", hash = "sha256:61b9a528fb348373c433e8966535074b802c7a5d7f23c4f421e6c6e2f1697a6f"}, {file = "coverage-7.2.7-cp37-cp37m-win_amd64.whl", hash = "sha256:b1c546aca0ca4d028901d825015dc8e4d56aac4b541877690eb76490f1dc8ed0"}, {file = "coverage-7.2.7-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:54b896376ab563bd38453cecb813c295cf347cf5906e8b41d340b0321a5433e5"}, {file = "coverage-7.2.7-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:3d376df58cc111dc8e21e3b6e24606b5bb5dee6024f46a5abca99124b2229ef5"}, {file = "coverage-7.2.7-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5e330fc79bd7207e46c7d7fd2bb4af2963f5f635703925543a70b99574b0fea9"}, {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e9d683426464e4a252bf70c3498756055016f99ddaec3774bf368e76bbe02b6"}, {file = "coverage-7.2.7-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d13c64ee2d33eccf7437961b6ea7ad8673e2be040b4f7fd4fd4d4d28d9ccb1e"}, {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:b7aa5f8a41217360e600da646004f878250a0d6738bcdc11a0a39928d7dc2050"}, {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:8fa03bce9bfbeeef9f3b160a8bed39a221d82308b4152b27d82d8daa7041fee5"}, {file = "coverage-7.2.7-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:245167dd26180ab4c91d5e1496a30be4cd721a5cf2abf52974f965f10f11419f"}, {file = "coverage-7.2.7-cp38-cp38-win32.whl", hash = "sha256:d2c2db7fd82e9b72937969bceac4d6ca89660db0a0967614ce2481e81a0b771e"}, {file = "coverage-7.2.7-cp38-cp38-win_amd64.whl", hash = "sha256:2e07b54284e381531c87f785f613b833569c14ecacdcb85d56b25c4622c16c3c"}, {file = "coverage-7.2.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:537891ae8ce59ef63d0123f7ac9e2ae0fc8b72c7ccbe5296fec45fd68967b6c9"}, {file = "coverage-7.2.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:06fb182e69f33f6cd1d39a6c597294cff3143554b64b9825d1dc69d18cc2fff2"}, {file = "coverage-7.2.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:201e7389591af40950a6480bd9edfa8ed04346ff80002cec1a66cac4549c1ad7"}, {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f6951407391b639504e3b3be51b7ba5f3528adbf1a8ac3302b687ecababf929e"}, {file = "coverage-7.2.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f48351d66575f535669306aa7d6d6f71bc43372473b54a832222803eb956fd1"}, {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b29019c76039dc3c0fd815c41392a044ce555d9bcdd38b0fb60fb4cd8e475ba9"}, {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:81c13a1fc7468c40f13420732805a4c38a105d89848b7c10af65a90beff25250"}, {file = "coverage-7.2.7-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:975d70ab7e3c80a3fe86001d8751f6778905ec723f5b110aed1e450da9d4b7f2"}, {file = "coverage-7.2.7-cp39-cp39-win32.whl", hash = "sha256:7ee7d9d4822c8acc74a5e26c50604dff824710bc8de424904c0982e25c39c6cb"}, {file = "coverage-7.2.7-cp39-cp39-win_amd64.whl", hash = "sha256:eb393e5ebc85245347950143969b241d08b52b88a3dc39479822e073a1a8eb27"}, {file = "coverage-7.2.7-pp37.pp38.pp39-none-any.whl", hash = "sha256:b7b4c971f05e6ae490fef852c218b0e79d4e52f79ef0c8475566584a8fb3e01d"}, {file = "coverage-7.2.7.tar.gz", hash = "sha256:924d94291ca674905fe9481f12294eb11f2d3d3fd1adb20314ba89e94f44ed59"}, ] [[package]] name = "distlib" version = "0.3.7" summary = "Distribution utilities" files = [ {file = "distlib-0.3.7-py2.py3-none-any.whl", hash = "sha256:2e24928bc811348f0feb63014e97aaae3037f2cf48712d51ae61df7fd6075057"}, {file = "distlib-0.3.7.tar.gz", hash = "sha256:9dafe54b34a028eafd95039d5e5d4851a13734540f1331060d31c9916e7147a8"}, ] [[package]] name = "exceptiongroup" version = "1.1.2" requires_python = ">=3.7" summary = "Backport of PEP 654 (exception groups)" files = [ {file = "exceptiongroup-1.1.2-py3-none-any.whl", hash = "sha256:e346e69d186172ca7cf029c8c1d16235aa0e04035e5750b4b95039e65204328f"}, {file = "exceptiongroup-1.1.2.tar.gz", hash = "sha256:12c3e887d6485d16943a309616de20ae5582633e0a2eda17f4e10fd61c1e8af5"}, ] [[package]] name = "filelock" version = "3.12.2" requires_python = ">=3.7" summary = "A platform independent file lock." files = [ {file = "filelock-3.12.2-py3-none-any.whl", hash = "sha256:cbb791cdea2a72f23da6ac5b5269ab0a0d161e9ef0100e653b69049a7706d1ec"}, {file = "filelock-3.12.2.tar.gz", hash = "sha256:002740518d8aa59a26b0c76e10fb8c6e15eae825d34b6fdf670333fd7b938d81"}, ] [[package]] name = "flake8" version = "5.0.4" requires_python = ">=3.6.1" summary = "the modular source code checker: pep8 pyflakes and co" dependencies = [ "importlib-metadata<4.3,>=1.1.0; python_version < \"3.8\"", "mccabe<0.8.0,>=0.7.0", "pycodestyle<2.10.0,>=2.9.0", "pyflakes<2.6.0,>=2.5.0", ] files = [ {file = "flake8-5.0.4-py2.py3-none-any.whl", hash = "sha256:7a1cf6b73744f5806ab95e526f6f0d8c01c66d7bbe349562d22dfca20610b248"}, {file = "flake8-5.0.4.tar.gz", hash = "sha256:6fbe320aad8d6b95cec8b8e47bc933004678dc63095be98528b7bdd2a9f510db"}, ] [[package]] name = "importlib-metadata" version = "4.2.0" requires_python = ">=3.6" summary = "Read metadata from Python packages" dependencies = [ "typing-extensions>=3.6.4; python_version < \"3.8\"", "zipp>=0.5", ] files = [ {file = "importlib_metadata-4.2.0-py3-none-any.whl", hash = "sha256:057e92c15bc8d9e8109738a48db0ccb31b4d9d5cfbee5a8670879a30be66304b"}, {file = "importlib_metadata-4.2.0.tar.gz", hash = "sha256:b7e52a1f8dec14a75ea73e0891f3060099ca1d8e6a462a4dff11c3e119ea1b31"}, ] [[package]] name = "iniconfig" version = "2.0.0" requires_python = ">=3.7" summary = "brain-dead simple config-ini parsing" files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] [[package]] name = "isort" version = "5.11.5" requires_python = ">=3.7.0" summary = "A Python utility / library to sort Python imports." files = [ {file = "isort-5.11.5-py3-none-any.whl", hash = "sha256:ba1d72fb2595a01c7895a5128f9585a5cc4b6d395f1c8d514989b9a7eb2a8746"}, {file = "isort-5.11.5.tar.gz", hash = "sha256:6be1f76a507cb2ecf16c7cf14a37e41609ca082330be4e3436a18ef74add55db"}, ] [[package]] name = "mccabe" version = "0.7.0" requires_python = ">=3.6" summary = "McCabe checker, plugin for flake8" files = [ {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, ] [[package]] name = "mypy-extensions" version = "1.0.0" requires_python = ">=3.5" summary = "Type system extensions for programs checked with the mypy type checker." files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] [[package]] name = "packaging" version = "23.1" requires_python = ">=3.7" summary = "Core utilities for Python packages" files = [ {file = "packaging-23.1-py3-none-any.whl", hash = "sha256:994793af429502c4ea2ebf6bf664629d07c1a9fe974af92966e4b8d2df7edc61"}, {file = "packaging-23.1.tar.gz", hash = "sha256:a392980d2b6cffa644431898be54b0045151319d1e7ec34f0cfed48767dd334f"}, ] [[package]] name = "pathspec" version = "0.11.2" requires_python = ">=3.7" summary = "Utility library for gitignore style pattern matching of file paths." files = [ {file = "pathspec-0.11.2-py3-none-any.whl", hash = "sha256:1d6ed233af05e679efb96b1851550ea95bbb64b7c490b0f5aa52996c11e92a20"}, {file = "pathspec-0.11.2.tar.gz", hash = "sha256:e0d8d0ac2f12da61956eb2306b69f9469b42f4deb0f3cb6ed47b9cce9996ced3"}, ] [[package]] name = "platformdirs" version = "2.6.2" requires_python = ">=3.7" summary = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." dependencies = [ "typing-extensions>=4.4; python_version < \"3.8\"", ] files = [ {file = "platformdirs-2.6.2-py3-none-any.whl", hash = "sha256:83c8f6d04389165de7c9b6f0c682439697887bca0aa2f1c87ef1826be3584490"}, {file = "platformdirs-2.6.2.tar.gz", hash = "sha256:e1fea1fe471b9ff8332e229df3cb7de4f53eeea4998d3b6bfff542115e998bd2"}, ] [[package]] name = "pluggy" version = "1.2.0" requires_python = ">=3.7" summary = "plugin and hook calling mechanisms for python" dependencies = [ "importlib-metadata>=0.12; python_version < \"3.8\"", ] files = [ {file = "pluggy-1.2.0-py3-none-any.whl", hash = "sha256:c2fd55a7d7a3863cba1a013e4e2414658b1d07b6bc57b3919e0c63c9abb99849"}, {file = "pluggy-1.2.0.tar.gz", hash = "sha256:d12f0c4b579b15f5e054301bb226ee85eeeba08ffec228092f8defbaa3a4c4b3"}, ] [[package]] name = "py" version = "1.11.0" requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" summary = "library with cross-python path, ini-parsing, io, code, log facilities" files = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] [[package]] name = "pycodestyle" version = "2.9.1" requires_python = ">=3.6" summary = "Python style guide checker" files = [ {file = "pycodestyle-2.9.1-py2.py3-none-any.whl", hash = "sha256:d1735fc58b418fd7c5f658d28d943854f8a849b01a5d0a1e6f3f3fdd0166804b"}, {file = "pycodestyle-2.9.1.tar.gz", hash = "sha256:2c9607871d58c76354b697b42f5d57e1ada7d261c261efac224b664affdc5785"}, ] [[package]] name = "pyflakes" version = "2.5.0" requires_python = ">=3.6" summary = "passive checker of Python programs" files = [ {file = "pyflakes-2.5.0-py2.py3-none-any.whl", hash = "sha256:4579f67d887f804e67edb544428f264b7b24f435b263c4614f384135cea553d2"}, {file = "pyflakes-2.5.0.tar.gz", hash = "sha256:491feb020dca48ccc562a8c0cbe8df07ee13078df59813b83959cbdada312ea3"}, ] [[package]] name = "pytest" version = "7.4.0" requires_python = ">=3.7" summary = "pytest: simple powerful testing with Python" dependencies = [ "colorama; sys_platform == \"win32\"", "exceptiongroup>=1.0.0rc8; python_version < \"3.11\"", "importlib-metadata>=0.12; python_version < \"3.8\"", "iniconfig", "packaging", "pluggy<2.0,>=0.12", "tomli>=1.0.0; python_version < \"3.11\"", ] files = [ {file = "pytest-7.4.0-py3-none-any.whl", hash = "sha256:78bf16451a2eb8c7a2ea98e32dc119fd2aa758f1d5d66dbf0a59d69a3969df32"}, {file = "pytest-7.4.0.tar.gz", hash = "sha256:b4bf8c45bd59934ed84001ad51e11b4ee40d40a1229d2c79f9c592b0a3f6bd8a"}, ] [[package]] name = "pytest-asyncio" version = "0.21.1" requires_python = ">=3.7" summary = "Pytest support for asyncio" dependencies = [ "pytest>=7.0.0", "typing-extensions>=3.7.2; python_version < \"3.8\"", ] files = [ {file = "pytest-asyncio-0.21.1.tar.gz", hash = "sha256:40a7eae6dded22c7b604986855ea48400ab15b069ae38116e8c01238e9eeb64d"}, {file = "pytest_asyncio-0.21.1-py3-none-any.whl", hash = "sha256:8666c1c8ac02631d7c51ba282e0c69a8a452b211ffedf2599099845da5c5c37b"}, ] [[package]] name = "six" version = "1.16.0" requires_python = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" summary = "Python 2 and 3 compatibility utilities" files = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] [[package]] name = "tomli" version = "2.0.1" requires_python = ">=3.7" summary = "A lil' TOML parser" files = [ {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, ] [[package]] name = "tox" version = "3.28.0" requires_python = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" summary = "tox is a generic virtualenv management and test command line tool" dependencies = [ "colorama>=0.4.1; platform_system == \"Windows\"", "filelock>=3.0.0", "importlib-metadata>=0.12; python_version < \"3.8\"", "packaging>=14", "pluggy>=0.12.0", "py>=1.4.17", "six>=1.14.0", "tomli>=2.0.1; python_version >= \"3.7\" and python_version < \"3.11\"", "virtualenv!=20.0.0,!=20.0.1,!=20.0.2,!=20.0.3,!=20.0.4,!=20.0.5,!=20.0.6,!=20.0.7,>=16.0.0", ] files = [ {file = "tox-3.28.0-py2.py3-none-any.whl", hash = "sha256:57b5ab7e8bb3074edc3c0c0b4b192a4f3799d3723b2c5b76f1fa9f2d40316eea"}, {file = "tox-3.28.0.tar.gz", hash = "sha256:d0d28f3fe6d6d7195c27f8b054c3e99d5451952b54abdae673b71609a581f640"}, ] [[package]] name = "typed-ast" version = "1.5.5" requires_python = ">=3.6" summary = "a fork of Python 2 and 3 ast modules with type comment support" files = [ {file = "typed_ast-1.5.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4bc1efe0ce3ffb74784e06460f01a223ac1f6ab31c6bc0376a21184bf5aabe3b"}, {file = "typed_ast-1.5.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5f7a8c46a8b333f71abd61d7ab9255440d4a588f34a21f126bbfc95f6049e686"}, {file = "typed_ast-1.5.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:597fc66b4162f959ee6a96b978c0435bd63791e31e4f410622d19f1686d5e769"}, {file = "typed_ast-1.5.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d41b7a686ce653e06c2609075d397ebd5b969d821b9797d029fccd71fdec8e04"}, {file = "typed_ast-1.5.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:5fe83a9a44c4ce67c796a1b466c270c1272e176603d5e06f6afbc101a572859d"}, {file = "typed_ast-1.5.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:d5c0c112a74c0e5db2c75882a0adf3133adedcdbfd8cf7c9d6ed77365ab90a1d"}, {file = "typed_ast-1.5.5-cp310-cp310-win_amd64.whl", hash = "sha256:e1a976ed4cc2d71bb073e1b2a250892a6e968ff02aa14c1f40eba4f365ffec02"}, {file = "typed_ast-1.5.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c631da9710271cb67b08bd3f3813b7af7f4c69c319b75475436fcab8c3d21bee"}, {file = "typed_ast-1.5.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:b445c2abfecab89a932b20bd8261488d574591173d07827c1eda32c457358b18"}, {file = "typed_ast-1.5.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cc95ffaaab2be3b25eb938779e43f513e0e538a84dd14a5d844b8f2932593d88"}, {file = "typed_ast-1.5.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:61443214d9b4c660dcf4b5307f15c12cb30bdfe9588ce6158f4a005baeb167b2"}, {file = "typed_ast-1.5.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:6eb936d107e4d474940469e8ec5b380c9b329b5f08b78282d46baeebd3692dc9"}, {file = "typed_ast-1.5.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e48bf27022897577d8479eaed64701ecaf0467182448bd95759883300ca818c8"}, {file = "typed_ast-1.5.5-cp311-cp311-win_amd64.whl", hash = "sha256:83509f9324011c9a39faaef0922c6f720f9623afe3fe220b6d0b15638247206b"}, {file = "typed_ast-1.5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:2188bc33d85951ea4ddad55d2b35598b2709d122c11c75cffd529fbc9965508e"}, {file = "typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0635900d16ae133cab3b26c607586131269f88266954eb04ec31535c9a12ef1e"}, {file = "typed_ast-1.5.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:57bfc3cf35a0f2fdf0a88a3044aafaec1d2f24d8ae8cd87c4f58d615fb5b6311"}, {file = "typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:fe58ef6a764de7b4b36edfc8592641f56e69b7163bba9f9c8089838ee596bfb2"}, {file = "typed_ast-1.5.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:d09d930c2d1d621f717bb217bf1fe2584616febb5138d9b3e8cdd26506c3f6d4"}, {file = "typed_ast-1.5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:d40c10326893ecab8a80a53039164a224984339b2c32a6baf55ecbd5b1df6431"}, {file = "typed_ast-1.5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:fd946abf3c31fb50eee07451a6aedbfff912fcd13cf357363f5b4e834cc5e71a"}, {file = "typed_ast-1.5.5-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ed4a1a42df8a3dfb6b40c3d2de109e935949f2f66b19703eafade03173f8f437"}, {file = "typed_ast-1.5.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:045f9930a1550d9352464e5149710d56a2aed23a2ffe78946478f7b5416f1ede"}, {file = "typed_ast-1.5.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:381eed9c95484ceef5ced626355fdc0765ab51d8553fec08661dce654a935db4"}, {file = "typed_ast-1.5.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:bfd39a41c0ef6f31684daff53befddae608f9daf6957140228a08e51f312d7e6"}, {file = "typed_ast-1.5.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8c524eb3024edcc04e288db9541fe1f438f82d281e591c548903d5b77ad1ddd4"}, {file = "typed_ast-1.5.5-cp38-cp38-win_amd64.whl", hash = "sha256:7f58fabdde8dcbe764cef5e1a7fcb440f2463c1bbbec1cf2a86ca7bc1f95184b"}, {file = "typed_ast-1.5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:042eb665ff6bf020dd2243307d11ed626306b82812aba21836096d229fdc6a10"}, {file = "typed_ast-1.5.5-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:622e4a006472b05cf6ef7f9f2636edc51bda670b7bbffa18d26b255269d3d814"}, {file = "typed_ast-1.5.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1efebbbf4604ad1283e963e8915daa240cb4bf5067053cf2f0baadc4d4fb51b8"}, {file = "typed_ast-1.5.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0aefdd66f1784c58f65b502b6cf8b121544680456d1cebbd300c2c813899274"}, {file = "typed_ast-1.5.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:48074261a842acf825af1968cd912f6f21357316080ebaca5f19abbb11690c8a"}, {file = "typed_ast-1.5.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:429ae404f69dc94b9361bb62291885894b7c6fb4640d561179548c849f8492ba"}, {file = "typed_ast-1.5.5-cp39-cp39-win_amd64.whl", hash = "sha256:335f22ccb244da2b5c296e6f96b06ee9bed46526db0de38d2f0e5a6597b81155"}, {file = "typed_ast-1.5.5.tar.gz", hash = "sha256:94282f7a354f36ef5dbce0ef3467ebf6a258e370ab33d5b40c249fa996e590dd"}, ] [[package]] name = "typing-extensions" version = "4.7.1" requires_python = ">=3.7" summary = "Backported and Experimental Type Hints for Python 3.7+" files = [ {file = "typing_extensions-4.7.1-py3-none-any.whl", hash = "sha256:440d5dd3af93b060174bf433bccd69b0babc3b15b1a8dca43789fd7f61514b36"}, {file = "typing_extensions-4.7.1.tar.gz", hash = "sha256:b75ddc264f0ba5615db7ba217daeb99701ad295353c45f9e95963337ceeeffb2"}, ] [[package]] name = "virtualenv" version = "20.16.2" requires_python = ">=3.6" summary = "Virtual Python Environment builder" dependencies = [ "distlib<1,>=0.3.1", "filelock<4,>=3.2", "importlib-metadata>=0.12; python_version < \"3.8\"", "platformdirs<3,>=2", ] files = [ {file = "virtualenv-20.16.2-py2.py3-none-any.whl", hash = "sha256:635b272a8e2f77cb051946f46c60a54ace3cb5e25568228bd6b57fc70eca9ff3"}, {file = "virtualenv-20.16.2.tar.gz", hash = "sha256:0ef5be6d07181946891f5abc8047fda8bc2f0b4b9bf222c64e6e8963baee76db"}, ] [[package]] name = "zipp" version = "3.15.0" requires_python = ">=3.7" summary = "Backport of pathlib-compatible object wrapper for zip files" files = [ {file = "zipp-3.15.0-py3-none-any.whl", hash = "sha256:48904fc76a60e542af151aded95726c1a5c34ed43ab4134b597665c86d7ad556"}, {file = "zipp-3.15.0.tar.gz", hash = "sha256:112929ad649da941c23de50f356a2b5570c954b65150642bccdd66bf194d224b"}, ] aiofiles-23.2.1/pyproject.toml000066400000000000000000000031241446472733600163260ustar00rootroot00000000000000[project] name = "aiofiles" version = "23.2.1" description = "File support for asyncio." authors = [ {name = "Tin Tvrtkovic", email = "tinchester@gmail.com"}, ] dependencies = [] requires-python = ">=3.7" readme = "README.md" license = {text = "Apache-2.0"} classifiers = [ "Development Status :: 5 - Production/Stable", "License :: OSI Approved :: Apache Software License", "Operating System :: OS Independent", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Framework :: AsyncIO", ] [project.urls] Changelog = "https://github.com/Tinche/aiofiles#history" "Bug Tracker" = "https://github.com/Tinche/aiofiles/issues" repository = "https://github.com/Tinche/aiofiles" [tool.pdm.dev-dependencies] test = [ "pytest>=7.2.0", "pytest-asyncio>=0.19.0", "coverage>=6.4.4", "tox>=3.25.1", ] lint = [ "black>=22.8.0", "flake8>=5.0.4", "isort==5.11.5", ] [build-system] requires = ["hatchling"] build-backend = "hatchling.build" [tool.coverage.run] parallel = true source_pkgs = ["aiofiles"] [tool.coverage.paths] source = [ "src", ".tox/*/lib/python*/site-packages", ".tox/pypy*/site-packages", ] [tool.black] skip-magic-trailing-comma = true [tool.isort] profile = "black" known_first_party = ["aiofiles"] aiofiles-23.2.1/src/000077500000000000000000000000001446472733600142015ustar00rootroot00000000000000aiofiles-23.2.1/src/aiofiles/000077500000000000000000000000001446472733600157745ustar00rootroot00000000000000aiofiles-23.2.1/src/aiofiles/__init__.py000066400000000000000000000005301446472733600201030ustar00rootroot00000000000000"""Utilities for asyncio-friendly file handling.""" from .threadpool import ( open, stdin, stdout, stderr, stdin_bytes, stdout_bytes, stderr_bytes, ) from . import tempfile __all__ = [ "open", "tempfile", "stdin", "stdout", "stderr", "stdin_bytes", "stdout_bytes", "stderr_bytes", ] aiofiles-23.2.1/src/aiofiles/base.py000066400000000000000000000051741446472733600172670ustar00rootroot00000000000000"""Various base classes.""" from types import coroutine from collections.abc import Coroutine from asyncio import get_running_loop class AsyncBase: def __init__(self, file, loop, executor): self._file = file self._executor = executor self._ref_loop = loop @property def _loop(self): return self._ref_loop or get_running_loop() def __aiter__(self): """We are our own iterator.""" return self def __repr__(self): return super().__repr__() + " wrapping " + repr(self._file) async def __anext__(self): """Simulate normal file iteration.""" line = await self.readline() if line: return line else: raise StopAsyncIteration class AsyncIndirectBase(AsyncBase): def __init__(self, name, loop, executor, indirect): self._indirect = indirect self._name = name super().__init__(None, loop, executor) @property def _file(self): return self._indirect() @_file.setter def _file(self, v): pass # discard writes class _ContextManager(Coroutine): __slots__ = ("_coro", "_obj") def __init__(self, coro): self._coro = coro self._obj = None def send(self, value): return self._coro.send(value) def throw(self, typ, val=None, tb=None): if val is None: return self._coro.throw(typ) elif tb is None: return self._coro.throw(typ, val) else: return self._coro.throw(typ, val, tb) def close(self): return self._coro.close() @property def gi_frame(self): return self._coro.gi_frame @property def gi_running(self): return self._coro.gi_running @property def gi_code(self): return self._coro.gi_code def __next__(self): return self.send(None) @coroutine def __iter__(self): resp = yield from self._coro return resp def __await__(self): resp = yield from self._coro return resp async def __anext__(self): resp = await self._coro return resp async def __aenter__(self): self._obj = await self._coro return self._obj async def __aexit__(self, exc_type, exc, tb): self._obj.close() self._obj = None class AiofilesContextManager(_ContextManager): """An adjusted async context manager for aiofiles.""" async def __aexit__(self, exc_type, exc_val, exc_tb): await get_running_loop().run_in_executor( None, self._obj._file.__exit__, exc_type, exc_val, exc_tb ) self._obj = None aiofiles-23.2.1/src/aiofiles/os.py000066400000000000000000000017121446472733600167700ustar00rootroot00000000000000"""Async executor versions of file functions from the os module.""" import os from . import ospath as path from .ospath import wrap __all__ = [ "path", "stat", "statvfs", "rename", "renames", "replace", "remove", "unlink", "mkdir", "makedirs", "rmdir", "removedirs", "link", "symlink", "readlink", "listdir", "scandir", "access", "sendfile", "wrap", ] stat = wrap(os.stat) rename = wrap(os.rename) renames = wrap(os.renames) replace = wrap(os.replace) remove = wrap(os.remove) unlink = wrap(os.unlink) mkdir = wrap(os.mkdir) makedirs = wrap(os.makedirs) rmdir = wrap(os.rmdir) removedirs = wrap(os.removedirs) link = wrap(os.link) symlink = wrap(os.symlink) readlink = wrap(os.readlink) listdir = wrap(os.listdir) scandir = wrap(os.scandir) access = wrap(os.access) if hasattr(os, "sendfile"): sendfile = wrap(os.sendfile) if hasattr(os, "statvfs"): statvfs = wrap(os.statvfs) aiofiles-23.2.1/src/aiofiles/ospath.py000066400000000000000000000013741446472733600176510ustar00rootroot00000000000000"""Async executor versions of file functions from the os.path module.""" import asyncio from functools import partial, wraps from os import path def wrap(func): @wraps(func) async def run(*args, loop=None, executor=None, **kwargs): if loop is None: loop = asyncio.get_running_loop() pfunc = partial(func, *args, **kwargs) return await loop.run_in_executor(executor, pfunc) return run exists = wrap(path.exists) isfile = wrap(path.isfile) isdir = wrap(path.isdir) islink = wrap(path.islink) ismount = wrap(path.ismount) getsize = wrap(path.getsize) getmtime = wrap(path.getmtime) getatime = wrap(path.getatime) getctime = wrap(path.getctime) samefile = wrap(path.samefile) sameopenfile = wrap(path.sameopenfile) aiofiles-23.2.1/src/aiofiles/tempfile/000077500000000000000000000000001446472733600176015ustar00rootroot00000000000000aiofiles-23.2.1/src/aiofiles/tempfile/__init__.py000066400000000000000000000240041446472733600217120ustar00rootroot00000000000000import asyncio from functools import partial, singledispatch from io import BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOBase from tempfile import NamedTemporaryFile as syncNamedTemporaryFile from tempfile import SpooledTemporaryFile as syncSpooledTemporaryFile from tempfile import TemporaryDirectory as syncTemporaryDirectory from tempfile import TemporaryFile as syncTemporaryFile from tempfile import _TemporaryFileWrapper as syncTemporaryFileWrapper from ..base import AiofilesContextManager from ..threadpool.binary import AsyncBufferedIOBase, AsyncBufferedReader, AsyncFileIO from ..threadpool.text import AsyncTextIOWrapper from .temptypes import AsyncSpooledTemporaryFile, AsyncTemporaryDirectory import sys __all__ = [ "NamedTemporaryFile", "TemporaryFile", "SpooledTemporaryFile", "TemporaryDirectory", ] # ================================================================ # Public methods for async open and return of temp file/directory # objects with async interface # ================================================================ if sys.version_info >= (3, 12): def NamedTemporaryFile( mode="w+b", buffering=-1, encoding=None, newline=None, suffix=None, prefix=None, dir=None, delete=True, delete_on_close=True, loop=None, executor=None, ): """Async open a named temporary file""" return AiofilesContextManager( _temporary_file( named=True, mode=mode, buffering=buffering, encoding=encoding, newline=newline, suffix=suffix, prefix=prefix, dir=dir, delete=delete, delete_on_close=delete_on_close, loop=loop, executor=executor, ) ) else: def NamedTemporaryFile( mode="w+b", buffering=-1, encoding=None, newline=None, suffix=None, prefix=None, dir=None, delete=True, loop=None, executor=None, ): """Async open a named temporary file""" return AiofilesContextManager( _temporary_file( named=True, mode=mode, buffering=buffering, encoding=encoding, newline=newline, suffix=suffix, prefix=prefix, dir=dir, delete=delete, loop=loop, executor=executor, ) ) def TemporaryFile( mode="w+b", buffering=-1, encoding=None, newline=None, suffix=None, prefix=None, dir=None, loop=None, executor=None, ): """Async open an unnamed temporary file""" return AiofilesContextManager( _temporary_file( named=False, mode=mode, buffering=buffering, encoding=encoding, newline=newline, suffix=suffix, prefix=prefix, dir=dir, loop=loop, executor=executor, ) ) def SpooledTemporaryFile( max_size=0, mode="w+b", buffering=-1, encoding=None, newline=None, suffix=None, prefix=None, dir=None, loop=None, executor=None, ): """Async open a spooled temporary file""" return AiofilesContextManager( _spooled_temporary_file( max_size=max_size, mode=mode, buffering=buffering, encoding=encoding, newline=newline, suffix=suffix, prefix=prefix, dir=dir, loop=loop, executor=executor, ) ) def TemporaryDirectory(suffix=None, prefix=None, dir=None, loop=None, executor=None): """Async open a temporary directory""" return AiofilesContextManagerTempDir( _temporary_directory( suffix=suffix, prefix=prefix, dir=dir, loop=loop, executor=executor ) ) # ========================================================= # Internal coroutines to open new temp files/directories # ========================================================= if sys.version_info >= (3, 12): async def _temporary_file( named=True, mode="w+b", buffering=-1, encoding=None, newline=None, suffix=None, prefix=None, dir=None, delete=True, delete_on_close=True, loop=None, executor=None, max_size=0, ): """Async method to open a temporary file with async interface""" if loop is None: loop = asyncio.get_running_loop() if named: cb = partial( syncNamedTemporaryFile, mode=mode, buffering=buffering, encoding=encoding, newline=newline, suffix=suffix, prefix=prefix, dir=dir, delete=delete, delete_on_close=delete_on_close, ) else: cb = partial( syncTemporaryFile, mode=mode, buffering=buffering, encoding=encoding, newline=newline, suffix=suffix, prefix=prefix, dir=dir, ) f = await loop.run_in_executor(executor, cb) # Wrap based on type of underlying IO object if type(f) is syncTemporaryFileWrapper: # _TemporaryFileWrapper was used (named files) result = wrap(f.file, f, loop=loop, executor=executor) result._closer = f._closer return result else: # IO object was returned directly without wrapper return wrap(f, f, loop=loop, executor=executor) else: async def _temporary_file( named=True, mode="w+b", buffering=-1, encoding=None, newline=None, suffix=None, prefix=None, dir=None, delete=True, loop=None, executor=None, max_size=0, ): """Async method to open a temporary file with async interface""" if loop is None: loop = asyncio.get_running_loop() if named: cb = partial( syncNamedTemporaryFile, mode=mode, buffering=buffering, encoding=encoding, newline=newline, suffix=suffix, prefix=prefix, dir=dir, delete=delete, ) else: cb = partial( syncTemporaryFile, mode=mode, buffering=buffering, encoding=encoding, newline=newline, suffix=suffix, prefix=prefix, dir=dir, ) f = await loop.run_in_executor(executor, cb) # Wrap based on type of underlying IO object if type(f) is syncTemporaryFileWrapper: # _TemporaryFileWrapper was used (named files) result = wrap(f.file, f, loop=loop, executor=executor) # add delete property result.delete = f.delete return result else: # IO object was returned directly without wrapper return wrap(f, f, loop=loop, executor=executor) async def _spooled_temporary_file( max_size=0, mode="w+b", buffering=-1, encoding=None, newline=None, suffix=None, prefix=None, dir=None, loop=None, executor=None, ): """Open a spooled temporary file with async interface""" if loop is None: loop = asyncio.get_running_loop() cb = partial( syncSpooledTemporaryFile, max_size=max_size, mode=mode, buffering=buffering, encoding=encoding, newline=newline, suffix=suffix, prefix=prefix, dir=dir, ) f = await loop.run_in_executor(executor, cb) # Single interface provided by SpooledTemporaryFile for all modes return AsyncSpooledTemporaryFile(f, loop=loop, executor=executor) async def _temporary_directory( suffix=None, prefix=None, dir=None, loop=None, executor=None ): """Async method to open a temporary directory with async interface""" if loop is None: loop = asyncio.get_running_loop() cb = partial(syncTemporaryDirectory, suffix, prefix, dir) f = await loop.run_in_executor(executor, cb) return AsyncTemporaryDirectory(f, loop=loop, executor=executor) class AiofilesContextManagerTempDir(AiofilesContextManager): """With returns the directory location, not the object (matching sync lib)""" async def __aenter__(self): self._obj = await self._coro return self._obj.name @singledispatch def wrap(base_io_obj, file, *, loop=None, executor=None): """Wrap the object with interface based on type of underlying IO""" raise TypeError("Unsupported IO type: {}".format(base_io_obj)) @wrap.register(TextIOBase) def _(base_io_obj, file, *, loop=None, executor=None): return AsyncTextIOWrapper(file, loop=loop, executor=executor) @wrap.register(BufferedWriter) def _(base_io_obj, file, *, loop=None, executor=None): return AsyncBufferedIOBase(file, loop=loop, executor=executor) @wrap.register(BufferedReader) @wrap.register(BufferedRandom) def _(base_io_obj, file, *, loop=None, executor=None): return AsyncBufferedReader(file, loop=loop, executor=executor) @wrap.register(FileIO) def _(base_io_obj, file, *, loop=None, executor=None): return AsyncFileIO(file, loop=loop, executor=executor) aiofiles-23.2.1/src/aiofiles/tempfile/temptypes.py000066400000000000000000000040551446472733600222110ustar00rootroot00000000000000"""Async wrappers for spooled temp files and temp directory objects""" from functools import partial from ..base import AsyncBase from ..threadpool.utils import ( cond_delegate_to_executor, delegate_to_executor, proxy_property_directly, ) @delegate_to_executor("fileno", "rollover") @cond_delegate_to_executor( "close", "flush", "isatty", "read", "readline", "readlines", "seek", "tell", "truncate", ) @proxy_property_directly("closed", "encoding", "mode", "name", "newlines") class AsyncSpooledTemporaryFile(AsyncBase): """Async wrapper for SpooledTemporaryFile class""" async def _check(self): if self._file._rolled: return max_size = self._file._max_size if max_size and self._file.tell() > max_size: await self.rollover() async def write(self, s): """Implementation to anticipate rollover""" if self._file._rolled: cb = partial(self._file.write, s) return await self._loop.run_in_executor(self._executor, cb) else: file = self._file._file # reference underlying base IO object rv = file.write(s) await self._check() return rv async def writelines(self, iterable): """Implementation to anticipate rollover""" if self._file._rolled: cb = partial(self._file.writelines, iterable) return await self._loop.run_in_executor(self._executor, cb) else: file = self._file._file # reference underlying base IO object rv = file.writelines(iterable) await self._check() return rv @delegate_to_executor("cleanup") @proxy_property_directly("name") class AsyncTemporaryDirectory: """Async wrapper for TemporaryDirectory class""" def __init__(self, file, loop, executor): self._file = file self._loop = loop self._executor = executor async def close(self): await self.cleanup() aiofiles-23.2.1/src/aiofiles/threadpool/000077500000000000000000000000001446472733600201355ustar00rootroot00000000000000aiofiles-23.2.1/src/aiofiles/threadpool/__init__.py000066400000000000000000000061431446472733600222520ustar00rootroot00000000000000"""Handle files using a thread pool executor.""" import asyncio import sys from functools import partial, singledispatch from io import ( BufferedIOBase, BufferedRandom, BufferedReader, BufferedWriter, FileIO, TextIOBase, ) from types import coroutine from ..base import AiofilesContextManager from .binary import ( AsyncBufferedIOBase, AsyncBufferedReader, AsyncFileIO, AsyncIndirectBufferedIOBase, ) from .text import AsyncTextIndirectIOWrapper, AsyncTextIOWrapper sync_open = open __all__ = ( "open", "stdin", "stdout", "stderr", "stdin_bytes", "stdout_bytes", "stderr_bytes", ) def open( file, mode="r", buffering=-1, encoding=None, errors=None, newline=None, closefd=True, opener=None, *, loop=None, executor=None, ): return AiofilesContextManager( _open( file, mode=mode, buffering=buffering, encoding=encoding, errors=errors, newline=newline, closefd=closefd, opener=opener, loop=loop, executor=executor, ) ) @coroutine def _open( file, mode="r", buffering=-1, encoding=None, errors=None, newline=None, closefd=True, opener=None, *, loop=None, executor=None, ): """Open an asyncio file.""" if loop is None: loop = asyncio.get_running_loop() cb = partial( sync_open, file, mode=mode, buffering=buffering, encoding=encoding, errors=errors, newline=newline, closefd=closefd, opener=opener, ) f = yield from loop.run_in_executor(executor, cb) return wrap(f, loop=loop, executor=executor) @singledispatch def wrap(file, *, loop=None, executor=None): raise TypeError("Unsupported io type: {}.".format(file)) @wrap.register(TextIOBase) def _(file, *, loop=None, executor=None): return AsyncTextIOWrapper(file, loop=loop, executor=executor) @wrap.register(BufferedWriter) @wrap.register(BufferedIOBase) def _(file, *, loop=None, executor=None): return AsyncBufferedIOBase(file, loop=loop, executor=executor) @wrap.register(BufferedReader) @wrap.register(BufferedRandom) def _(file, *, loop=None, executor=None): return AsyncBufferedReader(file, loop=loop, executor=executor) @wrap.register(FileIO) def _(file, *, loop=None, executor=None): return AsyncFileIO(file, loop=loop, executor=executor) stdin = AsyncTextIndirectIOWrapper("sys.stdin", None, None, indirect=lambda: sys.stdin) stdout = AsyncTextIndirectIOWrapper( "sys.stdout", None, None, indirect=lambda: sys.stdout ) stderr = AsyncTextIndirectIOWrapper( "sys.stderr", None, None, indirect=lambda: sys.stderr ) stdin_bytes = AsyncIndirectBufferedIOBase( "sys.stdin.buffer", None, None, indirect=lambda: sys.stdin.buffer ) stdout_bytes = AsyncIndirectBufferedIOBase( "sys.stdout.buffer", None, None, indirect=lambda: sys.stdout.buffer ) stderr_bytes = AsyncIndirectBufferedIOBase( "sys.stderr.buffer", None, None, indirect=lambda: sys.stderr.buffer ) aiofiles-23.2.1/src/aiofiles/threadpool/binary.py000066400000000000000000000044131446472733600217750ustar00rootroot00000000000000from ..base import AsyncBase, AsyncIndirectBase from .utils import delegate_to_executor, proxy_method_directly, proxy_property_directly @delegate_to_executor( "close", "flush", "isatty", "read", "read1", "readinto", "readline", "readlines", "seek", "seekable", "tell", "truncate", "writable", "write", "writelines", ) @proxy_method_directly("detach", "fileno", "readable") @proxy_property_directly("closed", "raw", "name", "mode") class AsyncBufferedIOBase(AsyncBase): """The asyncio executor version of io.BufferedWriter and BufferedIOBase.""" @delegate_to_executor("peek") class AsyncBufferedReader(AsyncBufferedIOBase): """The asyncio executor version of io.BufferedReader and Random.""" @delegate_to_executor( "close", "flush", "isatty", "read", "readall", "readinto", "readline", "readlines", "seek", "seekable", "tell", "truncate", "writable", "write", "writelines", ) @proxy_method_directly("fileno", "readable") @proxy_property_directly("closed", "name", "mode") class AsyncFileIO(AsyncBase): """The asyncio executor version of io.FileIO.""" @delegate_to_executor( "close", "flush", "isatty", "read", "read1", "readinto", "readline", "readlines", "seek", "seekable", "tell", "truncate", "writable", "write", "writelines", ) @proxy_method_directly("detach", "fileno", "readable") @proxy_property_directly("closed", "raw", "name", "mode") class AsyncIndirectBufferedIOBase(AsyncIndirectBase): """The indirect asyncio executor version of io.BufferedWriter and BufferedIOBase.""" @delegate_to_executor("peek") class AsyncIndirectBufferedReader(AsyncIndirectBufferedIOBase): """The indirect asyncio executor version of io.BufferedReader and Random.""" @delegate_to_executor( "close", "flush", "isatty", "read", "readall", "readinto", "readline", "readlines", "seek", "seekable", "tell", "truncate", "writable", "write", "writelines", ) @proxy_method_directly("fileno", "readable") @proxy_property_directly("closed", "name", "mode") class AsyncIndirectFileIO(AsyncIndirectBase): """The indirect asyncio executor version of io.FileIO.""" aiofiles-23.2.1/src/aiofiles/threadpool/text.py000066400000000000000000000023071446472733600214750ustar00rootroot00000000000000from ..base import AsyncBase, AsyncIndirectBase from .utils import delegate_to_executor, proxy_method_directly, proxy_property_directly @delegate_to_executor( "close", "flush", "isatty", "read", "readable", "readline", "readlines", "seek", "seekable", "tell", "truncate", "write", "writable", "writelines", ) @proxy_method_directly("detach", "fileno", "readable") @proxy_property_directly( "buffer", "closed", "encoding", "errors", "line_buffering", "newlines", "name", "mode", ) class AsyncTextIOWrapper(AsyncBase): """The asyncio executor version of io.TextIOWrapper.""" @delegate_to_executor( "close", "flush", "isatty", "read", "readable", "readline", "readlines", "seek", "seekable", "tell", "truncate", "write", "writable", "writelines", ) @proxy_method_directly("detach", "fileno", "readable") @proxy_property_directly( "buffer", "closed", "encoding", "errors", "line_buffering", "newlines", "name", "mode", ) class AsyncTextIndirectIOWrapper(AsyncIndirectBase): """The indirect asyncio executor version of io.TextIOWrapper.""" aiofiles-23.2.1/src/aiofiles/threadpool/utils.py000066400000000000000000000035141446472733600216520ustar00rootroot00000000000000import functools def delegate_to_executor(*attrs): def cls_builder(cls): for attr_name in attrs: setattr(cls, attr_name, _make_delegate_method(attr_name)) return cls return cls_builder def proxy_method_directly(*attrs): def cls_builder(cls): for attr_name in attrs: setattr(cls, attr_name, _make_proxy_method(attr_name)) return cls return cls_builder def proxy_property_directly(*attrs): def cls_builder(cls): for attr_name in attrs: setattr(cls, attr_name, _make_proxy_property(attr_name)) return cls return cls_builder def cond_delegate_to_executor(*attrs): def cls_builder(cls): for attr_name in attrs: setattr(cls, attr_name, _make_cond_delegate_method(attr_name)) return cls return cls_builder def _make_delegate_method(attr_name): async def method(self, *args, **kwargs): cb = functools.partial(getattr(self._file, attr_name), *args, **kwargs) return await self._loop.run_in_executor(self._executor, cb) return method def _make_proxy_method(attr_name): def method(self, *args, **kwargs): return getattr(self._file, attr_name)(*args, **kwargs) return method def _make_proxy_property(attr_name): def proxy_property(self): return getattr(self._file, attr_name) return property(proxy_property) def _make_cond_delegate_method(attr_name): """For spooled temp files, delegate only if rolled to file object""" async def method(self, *args, **kwargs): if self._file._rolled: cb = functools.partial(getattr(self._file, attr_name), *args, **kwargs) return await self._loop.run_in_executor(self._executor, cb) else: return getattr(self._file, attr_name)(*args, **kwargs) return method aiofiles-23.2.1/tests/000077500000000000000000000000001446472733600145545ustar00rootroot00000000000000aiofiles-23.2.1/tests/resources/000077500000000000000000000000001446472733600165665ustar00rootroot00000000000000aiofiles-23.2.1/tests/resources/multiline_file.txt000066400000000000000000000000331446472733600223240ustar00rootroot00000000000000line 1 line 2 line 3 line 4aiofiles-23.2.1/tests/resources/test_file1.txt000066400000000000000000000000121446472733600213570ustar00rootroot000000000000000123456789aiofiles-23.2.1/tests/test_os.py000066400000000000000000000407071446472733600166160ustar00rootroot00000000000000"""Tests for asyncio's os module.""" import asyncio import os import platform from os import stat from os.path import dirname, exists, isdir, join from pathlib import Path import pytest import aiofiles.os @pytest.mark.asyncio async def test_stat(): """Test the stat call.""" filename = join(dirname(__file__), "resources", "test_file1.txt") stat_res = await aiofiles.os.stat(filename) assert stat_res.st_size == 10 @pytest.mark.skipif(platform.system() == "Windows", reason="No statvfs on Windows") @pytest.mark.asyncio async def test_statvfs(): """Test the statvfs call.""" statvfs_res = await aiofiles.os.statvfs("/") assert statvfs_res.f_bsize == os.statvfs("/").f_bsize @pytest.mark.asyncio async def test_remove(): """Test the remove call.""" filename = join(dirname(__file__), "resources", "test_file2.txt") with open(filename, "w") as f: f.write("Test file for remove call") assert exists(filename) await aiofiles.os.remove(filename) assert exists(filename) is False @pytest.mark.asyncio async def test_unlink(): """Test the unlink call.""" filename = join(dirname(__file__), "resources", "test_file2.txt") with open(filename, "w") as f: f.write("Test file for unlink call") assert exists(filename) await aiofiles.os.unlink(filename) assert exists(filename) is False @pytest.mark.asyncio async def test_mkdir_and_rmdir(): """Test the mkdir and rmdir call.""" directory = join(dirname(__file__), "resources", "test_dir") await aiofiles.os.mkdir(directory) assert isdir(directory) await aiofiles.os.rmdir(directory) assert exists(directory) is False @pytest.mark.asyncio async def test_rename(): """Test the rename call.""" old_filename = join(dirname(__file__), "resources", "test_file1.txt") new_filename = join(dirname(__file__), "resources", "test_file2.txt") await aiofiles.os.rename(old_filename, new_filename) assert exists(old_filename) is False and exists(new_filename) await aiofiles.os.rename(new_filename, old_filename) assert exists(old_filename) and exists(new_filename) is False @pytest.mark.asyncio async def test_renames(): """Test the renames call.""" old_filename = join(dirname(__file__), "resources", "test_file1.txt") new_filename = join( dirname(__file__), "resources", "subdirectory", "test_file2.txt" ) await aiofiles.os.renames(old_filename, new_filename) assert exists(old_filename) is False and exists(new_filename) await aiofiles.os.renames(new_filename, old_filename) assert ( exists(old_filename) and exists(new_filename) is False and exists(dirname(new_filename)) is False ) @pytest.mark.asyncio async def test_replace(): """Test the replace call.""" old_filename = join(dirname(__file__), "resources", "test_file1.txt") new_filename = join(dirname(__file__), "resources", "test_file2.txt") await aiofiles.os.replace(old_filename, new_filename) assert exists(old_filename) is False and exists(new_filename) await aiofiles.os.replace(new_filename, old_filename) assert exists(old_filename) and exists(new_filename) is False with open(new_filename, "w") as f: f.write("Test file") assert exists(old_filename) and exists(new_filename) await aiofiles.os.replace(old_filename, new_filename) assert exists(old_filename) is False and exists(new_filename) await aiofiles.os.replace(new_filename, old_filename) assert exists(old_filename) and exists(new_filename) is False @pytest.mark.skipif( "2.4" < platform.release() < "2.6.33", reason="sendfile() syscall doesn't allow file->file", ) @pytest.mark.skipif( platform.system() in ("Darwin", "Windows"), reason="sendfile() doesn't work on mac and Win", ) @pytest.mark.asyncio async def test_sendfile_file(tmpdir): """Test the sendfile functionality, file-to-file.""" filename = join(dirname(__file__), "resources", "test_file1.txt") tmp_filename = tmpdir.join("tmp.bin") with open(filename) as f: contents = f.read() input_file = await aiofiles.open(filename) output_file = await aiofiles.open(str(tmp_filename), mode="w+") size = (await aiofiles.os.stat(filename)).st_size input_fd = input_file.fileno() output_fd = output_file.fileno() await aiofiles.os.sendfile(output_fd, input_fd, 0, size) await output_file.seek(0) actual_contents = await output_file.read() actual_size = (await aiofiles.os.stat(str(tmp_filename))).st_size assert contents == actual_contents assert size == actual_size @pytest.mark.skipif( platform.system() in ("Windows"), reason="sendfile() doesn't work on Win" ) @pytest.mark.asyncio async def test_sendfile_socket(unused_tcp_port): """Test the sendfile functionality, file-to-socket.""" filename = join(dirname(__file__), "resources", "test_file1.txt") with open(filename, mode="rb") as f: contents = f.read() async def serve_file(_, writer): out_fd = writer.transport.get_extra_info("socket").fileno() size = (await aiofiles.os.stat(filename)).st_size in_file = await aiofiles.open(filename) try: in_fd = in_file.fileno() await aiofiles.os.sendfile(out_fd, in_fd, 0, size) finally: await in_file.close() await writer.drain() writer.close() server = await asyncio.start_server(serve_file, port=unused_tcp_port) reader, writer = await asyncio.open_connection("127.0.0.1", unused_tcp_port) actual_contents = await reader.read() writer.close() assert contents == actual_contents server.close() await server.wait_closed() @pytest.mark.asyncio async def test_exists(): """Test path.exists call.""" filename = join(dirname(__file__), "resources", "test_file1.txt") result = await aiofiles.os.path.exists(filename) assert result @pytest.mark.asyncio async def test_isfile(): """Test path.isfile call.""" filename = join(dirname(__file__), "resources", "test_file1.txt") result = await aiofiles.os.path.isfile(filename) assert result @pytest.mark.asyncio async def test_isdir(): """Test path.isdir call.""" filename = join(dirname(__file__), "resources") result = await aiofiles.os.path.isdir(filename) assert result @pytest.mark.asyncio async def test_islink(): """Test the path.islink call.""" src_filename = join(dirname(__file__), "resources", "test_file1.txt") dst_filename = join(dirname(__file__), "resources", "test_file2.txt") await aiofiles.os.symlink(src_filename, dst_filename) assert await aiofiles.os.path.islink(dst_filename) await aiofiles.os.remove(dst_filename) @pytest.mark.asyncio async def test_ismount(): """Test the path.ismount call.""" filename = join(dirname(__file__), "resources") assert not await aiofiles.os.path.ismount(filename) assert await aiofiles.os.path.ismount("/") @pytest.mark.asyncio async def test_getsize(): """Test path.getsize call.""" filename = join(dirname(__file__), "resources", "test_file1.txt") result = await aiofiles.os.path.getsize(filename) assert result == 10 @pytest.mark.asyncio async def test_samefile(): """Test path.samefile call.""" filename = join(dirname(__file__), "resources", "test_file1.txt") result = await aiofiles.os.path.samefile(filename, filename) assert result @pytest.mark.asyncio async def test_sameopenfile(): """Test path.samefile call.""" filename = join(dirname(__file__), "resources", "test_file1.txt") result = await aiofiles.os.path.samefile(filename, filename) assert result @pytest.mark.asyncio async def test_getmtime(): """Test path.getmtime call.""" filename = join(dirname(__file__), "resources", "test_file1.txt") result = await aiofiles.os.path.getmtime(filename) assert result @pytest.mark.asyncio async def test_getatime(): """Test path.getatime call.""" filename = join(dirname(__file__), "resources", "test_file1.txt") result = await aiofiles.os.path.getatime(filename) assert result @pytest.mark.asyncio async def test_getctime(): """Test path. call.""" filename = join(dirname(__file__), "resources", "test_file1.txt") result = await aiofiles.os.path.getctime(filename) assert result @pytest.mark.asyncio async def test_link(): """Test the link call.""" src_filename = join(dirname(__file__), "resources", "test_file1.txt") dst_filename = join(dirname(__file__), "resources", "test_file2.txt") initial_src_nlink = stat(src_filename).st_nlink await aiofiles.os.link(src_filename, dst_filename) assert ( exists(src_filename) and exists(dst_filename) and (stat(src_filename).st_ino == stat(dst_filename).st_ino) and (stat(src_filename).st_nlink == initial_src_nlink + 1) and (stat(dst_filename).st_nlink == 2) ) await aiofiles.os.remove(dst_filename) assert ( exists(src_filename) and exists(dst_filename) is False and (stat(src_filename).st_nlink == initial_src_nlink) ) @pytest.mark.asyncio async def test_symlink(): """Test the symlink call.""" src_filename = join(dirname(__file__), "resources", "test_file1.txt") dst_filename = join(dirname(__file__), "resources", "test_file2.txt") await aiofiles.os.symlink(src_filename, dst_filename) assert ( exists(src_filename) and exists(dst_filename) and stat(src_filename).st_ino == stat(dst_filename).st_ino ) await aiofiles.os.remove(dst_filename) assert exists(src_filename) and exists(dst_filename) is False @pytest.mark.skipif( platform.system() == "Windows", reason="Doesn't work on Win properly" ) @pytest.mark.asyncio async def test_readlink(): """Test the readlink call.""" src_filename = join(dirname(__file__), "resources", "test_file1.txt") dst_filename = join(dirname(__file__), "resources", "test_file2.txt") await aiofiles.os.symlink(src_filename, dst_filename) symlinked_path = await aiofiles.os.readlink(dst_filename) assert src_filename == symlinked_path await aiofiles.os.remove(dst_filename) @pytest.mark.asyncio async def test_listdir_empty_dir(): """Test the listdir call when the dir is empty.""" directory = join(dirname(__file__), "resources", "empty_dir") await aiofiles.os.mkdir(directory) dir_list = await aiofiles.os.listdir(directory) assert dir_list == [] await aiofiles.os.rmdir(directory) @pytest.mark.asyncio async def test_listdir_dir_with_only_one_file(): """Test the listdir call when the dir has one file.""" some_dir = join(dirname(__file__), "resources", "some_dir") some_file = join(some_dir, "some_file.txt") await aiofiles.os.mkdir(some_dir) with open(some_file, "w") as f: f.write("Test file") dir_list = await aiofiles.os.listdir(some_dir) assert "some_file.txt" in dir_list await aiofiles.os.remove(some_file) await aiofiles.os.rmdir(some_dir) @pytest.mark.asyncio async def test_listdir_dir_with_only_one_dir(): """Test the listdir call when the dir has one dir.""" some_dir = join(dirname(__file__), "resources", "some_dir") other_dir = join(some_dir, "other_dir") await aiofiles.os.mkdir(some_dir) await aiofiles.os.mkdir(other_dir) dir_list = await aiofiles.os.listdir(some_dir) assert "other_dir" in dir_list await aiofiles.os.rmdir(other_dir) await aiofiles.os.rmdir(some_dir) @pytest.mark.asyncio async def test_listdir_dir_with_multiple_files(): """Test the listdir call when the dir has multiple files.""" some_dir = join(dirname(__file__), "resources", "some_dir") some_file = join(some_dir, "some_file.txt") other_file = join(some_dir, "other_file.txt") await aiofiles.os.mkdir(some_dir) with open(some_file, "w") as f: f.write("Test file") with open(other_file, "w") as f: f.write("Test file") dir_list = await aiofiles.os.listdir(some_dir) assert "some_file.txt" in dir_list assert "other_file.txt" in dir_list await aiofiles.os.remove(some_file) await aiofiles.os.remove(other_file) await aiofiles.os.rmdir(some_dir) @pytest.mark.asyncio async def test_listdir_dir_with_a_file_and_a_dir(): """Test the listdir call when the dir has files and other dirs.""" some_dir = join(dirname(__file__), "resources", "some_dir") other_dir = join(some_dir, "other_dir") some_file = join(some_dir, "some_file.txt") await aiofiles.os.mkdir(some_dir) await aiofiles.os.mkdir(other_dir) with open(some_file, "w") as f: f.write("Test file") dir_list = await aiofiles.os.listdir(some_dir) assert "some_file.txt" in dir_list assert "other_dir" in dir_list await aiofiles.os.remove(some_file) await aiofiles.os.rmdir(other_dir) await aiofiles.os.rmdir(some_dir) @pytest.mark.asyncio async def test_listdir_non_existing_dir(): """Test the listdir call when the dir doesn't exist.""" some_dir = join(dirname(__file__), "resources", "some_dir") with pytest.raises(FileNotFoundError): await aiofiles.os.listdir(some_dir) @pytest.mark.asyncio async def test_scantdir_empty_dir(): """Test the scandir call when the dir is empty.""" empty_dir = join(dirname(__file__), "resources", "empty_dir") await aiofiles.os.mkdir(empty_dir) dir_iterator = await aiofiles.os.scandir(empty_dir) dir_list = [] for dir_entity in dir_iterator: dir_list.append(dir_entity) assert dir_list == [] await aiofiles.os.rmdir(empty_dir) @pytest.mark.asyncio async def test_scandir_dir_with_only_one_file(): """Test the scandir call when the dir has one file.""" some_dir = join(dirname(__file__), "resources", "some_dir") some_file = join(some_dir, "some_file.txt") await aiofiles.os.mkdir(some_dir) with open(some_file, "w") as f: f.write("Test file") dir_iterator = await aiofiles.os.scandir(some_dir) some_file_entity = next(dir_iterator) assert some_file_entity.name == "some_file.txt" await aiofiles.os.remove(some_file) await aiofiles.os.rmdir(some_dir) @pytest.mark.asyncio async def test_scandir_dir_with_only_one_dir(): """Test the scandir call when the dir has one dir.""" some_dir = join(dirname(__file__), "resources", "some_dir") other_dir = join(some_dir, "other_dir") await aiofiles.os.mkdir(some_dir) await aiofiles.os.mkdir(other_dir) dir_iterator = await aiofiles.os.scandir(some_dir) other_dir_entity = next(dir_iterator) assert other_dir_entity.name == "other_dir" await aiofiles.os.rmdir(other_dir) await aiofiles.os.rmdir(some_dir) @pytest.mark.asyncio async def test_scandir_non_existing_dir(): """Test the scandir call when the dir doesn't exist.""" some_dir = join(dirname(__file__), "resources", "some_dir") with pytest.raises(FileNotFoundError): await aiofiles.os.scandir(some_dir) @pytest.mark.skipif(platform.system() == "Windows", reason="Doesn't work on Win") @pytest.mark.asyncio async def test_access(): temp_file = Path(__file__).parent.joinpath("resources", "os_access_temp.txt") temp_dir = Path(__file__).parent.joinpath("resources", "os_access_temp") # prepare if temp_file.exists(): os.remove(temp_file) assert not temp_file.exists() temp_file.touch() if temp_dir.exists(): os.rmdir(temp_dir) assert not temp_dir.exists() os.mkdir(temp_dir) data = [ # full access [0o777, os.F_OK, True], [0o777, os.R_OK, True], [0o777, os.W_OK, True], [0o777, os.X_OK, True], # chmod -x [0o666, os.F_OK, True], [0o666, os.R_OK, True], [0o666, os.W_OK, True], [0o666, os.X_OK, False], # chmod -w [0o444, os.F_OK, True], [0o444, os.R_OK, True], [0o444, os.W_OK, False], [0o444, os.X_OK, False], # chmod -r [0o000, os.F_OK, True], [0o000, os.R_OK, False], [0o000, os.W_OK, False], [0o000, os.X_OK, False], ] for ch, mode, access in data: print("mode:{}, access:{}".format(mode, access)) temp_file.chmod(ch) temp_dir.chmod(ch) assert await aiofiles.os.access(temp_file, mode) == access assert await aiofiles.os.access(temp_dir, mode) == access # not exists os.remove(temp_file) os.rmdir(temp_dir) for mode in [os.F_OK, os.R_OK, os.W_OK, os.X_OK]: print("mode:{}".format(mode)) assert not await aiofiles.os.access(temp_file, mode) assert not await aiofiles.os.access(temp_dir, mode) aiofiles-23.2.1/tests/test_simple.py000066400000000000000000000035221446472733600174600ustar00rootroot00000000000000"""Simple tests verifying basic functionality.""" import asyncio from aiofiles import threadpool import pytest @pytest.mark.asyncio async def test_serve_small_bin_file_sync(event_loop, tmpdir, unused_tcp_port): """Fire up a small simple file server, and fetch a file. The file is read into memory synchronously, so this test doesn't actually test anything except the general test concept. """ # First we'll write a small file. filename = "test.bin" file_content = b"0123456789" file = tmpdir.join(filename) file.write_binary(file_content) async def serve_file(reader, writer): full_filename = str(file) with open(full_filename, "rb") as f: writer.write(f.read()) writer.close() server = await asyncio.start_server(serve_file, port=unused_tcp_port) reader, _ = await asyncio.open_connection(host="localhost", port=unused_tcp_port) payload = await reader.read() assert payload == file_content server.close() await server.wait_closed() @pytest.mark.asyncio async def test_serve_small_bin_file(event_loop, tmpdir, unused_tcp_port): """Fire up a small simple file server, and fetch a file.""" # First we'll write a small file. filename = "test.bin" file_content = b"0123456789" file = tmpdir.join(filename) file.write_binary(file_content) async def serve_file(reader, writer): full_filename = str(file) f = await threadpool.open(full_filename, mode="rb") writer.write((await f.read())) await f.close() writer.close() server = await asyncio.start_server(serve_file, port=unused_tcp_port) reader, _ = await asyncio.open_connection(host="localhost", port=unused_tcp_port) payload = await reader.read() assert payload == file_content server.close() await server.wait_closed() aiofiles-23.2.1/tests/test_stdio.py000066400000000000000000000012331446472733600173060ustar00rootroot00000000000000import pytest from aiofiles import stderr, stderr_bytes, stdin, stdin_bytes, stdout, stdout_bytes @pytest.mark.asyncio async def test_stdio(capsys): await stdout.write("hello") await stderr.write("world") out, err = capsys.readouterr() assert out == "hello" assert err == "world" with pytest.raises(OSError): await stdin.read() @pytest.mark.asyncio async def test_stdio_bytes(capsysbinary): await stdout_bytes.write(b"hello") await stderr_bytes.write(b"world") out, err = capsysbinary.readouterr() assert out == b"hello" assert err == b"world" with pytest.raises(OSError): await stdin_bytes.read() aiofiles-23.2.1/tests/test_tempfile.py000066400000000000000000000107171446472733600200000ustar00rootroot00000000000000import io import os import platform import sys import pytest from aiofiles import tempfile @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r+", "w+", "rb+", "wb+"]) async def test_temporary_file(mode): """Test temporary file.""" data = b"Hello World!\n" if "b" in mode else "Hello World!\n" async with tempfile.TemporaryFile(mode=mode) as f: for i in range(3): await f.write(data) await f.flush() await f.seek(0) async for line in f: assert line == data @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r+", "w+", "rb+", "wb+"]) @pytest.mark.skipif( sys.version_info >= (3, 12), reason=("3.12+ doesn't support tempfile.NamedTemporaryFile.delete"), ) async def test_named_temporary_file(mode): data = b"Hello World!" if "b" in mode else "Hello World!" filename = None async with tempfile.NamedTemporaryFile(mode=mode) as f: await f.write(data) await f.flush() await f.seek(0) assert await f.read() == data filename = f.name assert os.path.exists(filename) assert os.path.isfile(filename) assert f.delete assert not os.path.exists(filename) @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r+", "w+", "rb+", "wb+"]) @pytest.mark.skipif( sys.version_info < (3, 12), reason=("3.12+ doesn't support tempfile.NamedTemporaryFile.delete"), ) async def test_named_temporary_file_312(mode): data = b"Hello World!" if "b" in mode else "Hello World!" filename = None async with tempfile.NamedTemporaryFile(mode=mode) as f: await f.write(data) await f.flush() await f.seek(0) assert await f.read() == data filename = f.name assert os.path.exists(filename) assert os.path.isfile(filename) assert not os.path.exists(filename) @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r+", "w+", "rb+", "wb+"]) @pytest.mark.skipif( sys.version_info < (3, 12), reason=("3.12+ supports delete_on_close") ) async def test_named_temporary_delete_on_close(mode): data = b"Hello World!" if "b" in mode else "Hello World!" filename = None async with tempfile.NamedTemporaryFile(mode=mode, delete_on_close=True) as f: await f.write(data) await f.flush() await f.close() filename = f.name assert not os.path.exists(filename) async with tempfile.NamedTemporaryFile(mode=mode, delete_on_close=False) as f: await f.write(data) await f.flush() await f.close() filename = f.name assert os.path.exists(filename) assert not os.path.exists(filename) @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r+", "w+", "rb+", "wb+"]) async def test_spooled_temporary_file(mode): """Test spooled temporary file.""" data = b"Hello World!" if "b" in mode else "Hello World!" async with tempfile.SpooledTemporaryFile(max_size=len(data) + 1, mode=mode) as f: await f.write(data) await f.flush() if "b" in mode: assert type(f._file._file) is io.BytesIO await f.write(data) await f.flush() if "b" in mode: assert type(f._file._file) is not io.BytesIO await f.seek(0) assert await f.read() == data + data @pytest.mark.skipif( platform.system() == "Windows", reason="Doesn't work on Win properly" ) @pytest.mark.asyncio @pytest.mark.parametrize( "test_string, newlines", [("LF\n", "\n"), ("CRLF\r\n", "\r\n")] ) async def test_spooled_temporary_file_newlines(test_string, newlines): """ Test `newlines` property in spooled temporary file. issue https://github.com/Tinche/aiofiles/issues/118 """ async with tempfile.SpooledTemporaryFile(mode="w+") as f: await f.write(test_string) await f.flush() await f.seek(0) assert f.newlines is None await f.read() assert f.newlines == newlines @pytest.mark.asyncio @pytest.mark.parametrize("prefix, suffix", [("a", "b"), ("c", "d"), ("e", "f")]) async def test_temporary_directory(prefix, suffix, tmp_path): """Test temporary directory.""" dir_path = None async with tempfile.TemporaryDirectory( suffix=suffix, prefix=prefix, dir=tmp_path ) as d: dir_path = d assert os.path.exists(dir_path) assert os.path.isdir(dir_path) assert d[-1] == suffix assert d.split(os.sep)[-1][0] == prefix assert not os.path.exists(dir_path) aiofiles-23.2.1/tests/threadpool/000077500000000000000000000000001446472733600167155ustar00rootroot00000000000000aiofiles-23.2.1/tests/threadpool/test_binary.py000066400000000000000000000237661446472733600216300ustar00rootroot00000000000000"""PEP 0492/Python 3.5+ tests for binary files.""" import io from os.path import dirname, join from aiofiles.threadpool import open as aioopen import pytest @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["rb", "rb+", "ab+"]) @pytest.mark.parametrize("buffering", [-1, 0]) async def test_simple_iteration(mode, buffering): """Test iterating over lines from a file.""" filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") async with aioopen(filename, mode=mode, buffering=buffering) as file: # Append mode needs us to seek. await file.seek(0) counter = 1 # The old iteration pattern: while True: line = await file.readline() if not line: break assert line.strip() == b"line " + str(counter).encode() counter += 1 counter = 1 await file.seek(0) # The new iteration pattern: async for line in file: assert line.strip() == b"line " + str(counter).encode() counter += 1 assert file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["rb", "rb+", "ab+"]) @pytest.mark.parametrize("buffering", [-1, 0]) async def test_simple_readlines(mode, buffering): """Test the readlines functionality.""" filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") with open(filename, mode="rb") as f: expected = f.readlines() async with aioopen(str(filename), mode=mode) as file: # Append mode needs us to seek. await file.seek(0) actual = await file.readlines() assert actual == expected @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["rb+", "wb", "ab"]) @pytest.mark.parametrize("buffering", [-1, 0]) async def test_simple_flush(mode, buffering, tmpdir): """Test flushing to a file.""" filename = "file.bin" full_file = tmpdir.join(filename) if "r" in mode: full_file.ensure() # Read modes want it to already exist. async with aioopen(str(full_file), mode=mode, buffering=buffering) as file: await file.write(b"0") # Shouldn't flush. if buffering == -1: assert b"" == full_file.read_binary() else: assert b"0" == full_file.read_binary() await file.flush() assert b"0" == full_file.read_binary() @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["rb+", "wb+", "ab+"]) async def test_simple_peek(mode, tmpdir): """Test flushing to a file.""" filename = "file.bin" full_file = tmpdir.join(filename) full_file.write_binary(b"0123456789") async with aioopen(str(full_file), mode=mode) as file: if "a" in mode: await file.seek(0) # Rewind for append modes. peeked = await file.peek(1) # Technically it's OK for the peek to return less bytes than requested. if peeked: assert peeked.startswith(b"0") read = await file.read(1) assert peeked.startswith(read) @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["rb", "rb+", "ab+"]) @pytest.mark.parametrize("buffering", [-1, 0]) async def test_simple_read(mode, buffering): """Just read some bytes from a test file.""" filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") async with aioopen(filename, mode=mode, buffering=buffering) as file: await file.seek(0) # Needed for the append mode. actual = await file.read() assert b"" == (await file.read()) assert actual == open(filename, mode="rb").read() @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["rb", "rb+", "ab+"]) @pytest.mark.parametrize("buffering", [-1, 0]) async def test_staggered_read(mode, buffering): """Read bytes repeatedly.""" filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") async with aioopen(filename, mode=mode, buffering=buffering) as file: await file.seek(0) # Needed for the append mode. actual = [] while True: byte = await file.read(1) if byte: actual.append(byte) else: break assert b"" == (await file.read()) expected = [] with open(filename, mode="rb") as f: while True: byte = f.read(1) if byte: expected.append(byte) else: break assert actual == expected @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["rb", "rb+", "ab+"]) @pytest.mark.parametrize("buffering", [-1, 0]) async def test_simple_seek(mode, buffering, tmpdir): """Test seeking and then reading.""" filename = "bigfile.bin" content = b"0123456789" * 4 * io.DEFAULT_BUFFER_SIZE full_file = tmpdir.join(filename) full_file.write_binary(content) async with aioopen(str(full_file), mode=mode, buffering=buffering) as file: await file.seek(4) assert b"4" == (await file.read(1)) @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["wb", "rb", "rb+", "wb+", "ab", "ab+"]) @pytest.mark.parametrize("buffering", [-1, 0]) async def test_simple_close_ctx_mgr_iter(mode, buffering, tmpdir): """Open a file, read a byte, and close it.""" filename = "bigfile.bin" content = b"0" * 4 * io.DEFAULT_BUFFER_SIZE full_file = tmpdir.join(filename) full_file.write_binary(content) async with aioopen(str(full_file), mode=mode, buffering=buffering) as file: assert not file.closed assert not file._file.closed assert file.closed assert file._file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["wb", "rb", "rb+", "wb+", "ab", "ab+"]) @pytest.mark.parametrize("buffering", [-1, 0]) async def test_simple_close_ctx_mgr(mode, buffering, tmpdir): """Open a file, read a byte, and close it.""" filename = "bigfile.bin" content = b"0" * 4 * io.DEFAULT_BUFFER_SIZE full_file = tmpdir.join(filename) full_file.write_binary(content) file = await aioopen(str(full_file), mode=mode, buffering=buffering) assert not file.closed assert not file._file.closed await file.close() assert file.closed assert file._file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["rb", "rb+", "ab+"]) @pytest.mark.parametrize("buffering", [-1, 0]) async def test_simple_readinto(mode, buffering): """Test the readinto functionality.""" filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") async with aioopen(filename, mode=mode, buffering=buffering) as file: await file.seek(0) # Needed for the append mode. array = bytearray(4) bytes_read = await file.readinto(array) assert bytes_read == 4 assert array == open(filename, mode="rb").read(4) @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["rb+", "wb", "ab+"]) @pytest.mark.parametrize("buffering", [-1, 0]) async def test_simple_truncate(mode, buffering, tmpdir): """Test truncating files.""" filename = "bigfile.bin" content = b"0123456789" * 4 * io.DEFAULT_BUFFER_SIZE full_file = tmpdir.join(filename) full_file.write_binary(content) async with aioopen(str(full_file), mode=mode, buffering=buffering) as file: # The append modes want us to seek first. await file.seek(0) if "w" in mode: # We've just erased the entire file. await file.write(content) await file.flush() await file.seek(0) await file.truncate() assert b"" == full_file.read_binary() @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["wb", "rb+", "wb+", "ab", "ab+"]) @pytest.mark.parametrize("buffering", [-1, 0]) async def test_simple_write(mode, buffering, tmpdir): """Test writing into a file.""" filename = "bigfile.bin" content = b"0" * 4 * io.DEFAULT_BUFFER_SIZE full_file = tmpdir.join(filename) if "r" in mode: full_file.ensure() # Read modes want it to already exist. async with aioopen(str(full_file), mode=mode, buffering=buffering) as file: bytes_written = await file.write(content) assert bytes_written == len(content) assert content == full_file.read_binary() @pytest.mark.asyncio async def test_simple_detach(tmpdir): """Test detaching for buffered streams.""" filename = "file.bin" full_file = tmpdir.join(filename) full_file.write_binary(b"0123456789") with pytest.raises(ValueError): async with aioopen(str(full_file), mode="rb") as file: raw_file = file.detach() assert raw_file with pytest.raises(ValueError): await file.read() assert b"0123456789" == raw_file.read(10) @pytest.mark.asyncio async def test_simple_readall(tmpdir): """Test the readall function by reading a large file in. Only RawIOBase supports readall(). """ filename = "bigfile.bin" content = b"0" * 4 * io.DEFAULT_BUFFER_SIZE # Hopefully several reads. sync_file = tmpdir.join(filename) sync_file.write_binary(content) file = await aioopen(str(sync_file), mode="rb", buffering=0) actual = await file.readall() assert actual == content await file.close() assert file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["rb", "rb+", "ab+"]) @pytest.mark.parametrize("buffering", [-1, 0]) async def test_name_property(mode, buffering): """Test iterating over lines from a file.""" filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") async with aioopen(filename, mode=mode, buffering=buffering) as file: assert file.name == filename assert file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["rb", "rb+", "ab+"]) @pytest.mark.parametrize("buffering", [-1, 0]) async def test_mode_property(mode, buffering): """Test iterating over lines from a file.""" filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") async with aioopen(filename, mode=mode, buffering=buffering) as file: assert file.mode == mode assert file.closed aiofiles-23.2.1/tests/threadpool/test_concurrency.py000066400000000000000000000040351446472733600226620ustar00rootroot00000000000000"""Test concurrency properties of the implementation.""" from os.path import dirname from os.path import join import time import asyncio import pytest import aiofiles.threadpool @pytest.mark.asyncio async def test_slow_file(monkeypatch, unused_tcp_port): """Monkey patch open and file.read(), and assert the loop still works.""" filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") with open(filename, mode="rb") as f: contents = f.read() def new_open(*args, **kwargs): time.sleep(1) return open(*args, **kwargs) monkeypatch.setattr(aiofiles.threadpool, "sync_open", value=new_open) async def serve_file(_, writer): file = await aiofiles.threadpool.open(filename, mode="rb") try: while True: data = await file.read(1) if not data: break writer.write(data) await writer.drain() await writer.drain() finally: writer.close() await file.close() async def return_one(_, writer): writer.write(b"1") await writer.drain() writer.close() counter = 0 async def spam_client(): nonlocal counter while True: r, w = await asyncio.open_connection("127.0.0.1", port=30001) assert (await r.read()) == b"1" counter += 1 w.close() await asyncio.sleep(0.01) file_server = await asyncio.start_server(serve_file, port=unused_tcp_port) spam_server = await asyncio.start_server(return_one, port=30001) spam_task = asyncio.ensure_future(spam_client()) reader, writer = await asyncio.open_connection("127.0.0.1", port=unused_tcp_port) actual_contents = await reader.read() writer.close() await asyncio.sleep(0) file_server.close() spam_server.close() await file_server.wait_closed() await spam_server.wait_closed() spam_task.cancel() assert actual_contents == contents assert counter > 30 aiofiles-23.2.1/tests/threadpool/test_open.py000066400000000000000000000013121446472733600212640ustar00rootroot00000000000000"""Test the open functionality.""" from aiofiles.threadpool import open as aioopen, wrap import pytest @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r", "rb"]) async def test_file_not_found(mode): filename = "non_existent" try: open(filename, mode=mode) except Exception as e: expected = e assert expected try: await aioopen(filename, mode=mode) except Exception as e: actual = e assert actual assert actual.errno == expected.errno assert str(actual) == str(expected) def test_unsupported_wrap(): """A type error should be raised when wrapping something unsupported.""" with pytest.raises(TypeError): wrap(int) aiofiles-23.2.1/tests/threadpool/test_text.py000066400000000000000000000174151446472733600213220ustar00rootroot00000000000000"""PEP 0492/Python 3.5+ tests for text files.""" import io from os.path import dirname, join from aiofiles.threadpool import open as aioopen import pytest @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r", "r+", "a+"]) async def test_simple_iteration(mode): """Test iterating over lines from a file.""" filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") async with aioopen(filename, mode=mode) as file: # Append mode needs us to seek. await file.seek(0) counter = 1 # The old iteration pattern: while True: line = await file.readline() if not line: break assert line.strip() == "line " + str(counter) counter += 1 await file.seek(0) counter = 1 # The new iteration pattern: async for line in file: assert line.strip() == "line " + str(counter) counter += 1 assert file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r", "r+", "a+"]) async def test_simple_readlines(mode): """Test the readlines functionality.""" filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") with open(filename, mode="r") as f: expected = f.readlines() async with aioopen(filename, mode=mode) as file: # Append mode needs us to seek. await file.seek(0) actual = await file.readlines() assert file.closed assert actual == expected @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r+", "w", "a"]) async def test_simple_flush(mode, tmpdir): """Test flushing to a file.""" filename = "file.bin" full_file = tmpdir.join(filename) if "r" in mode: full_file.ensure() # Read modes want it to already exist. async with aioopen(str(full_file), mode=mode) as file: await file.write("0") # Shouldn't flush. assert "" == full_file.read_text(encoding="utf8") await file.flush() assert "0" == full_file.read_text(encoding="utf8") assert file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r", "r+", "a+"]) async def test_simple_read(mode): """Just read some bytes from a test file.""" filename = join(dirname(__file__), "..", "resources", "test_file1.txt") async with aioopen(filename, mode=mode) as file: await file.seek(0) # Needed for the append mode. actual = await file.read() assert "" == (await file.read()) assert actual == open(filename, mode="r").read() assert file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["w", "a"]) async def test_simple_read_fail(mode, tmpdir): """Try reading some bytes and fail.""" filename = "bigfile.bin" content = "0123456789" * 4 * io.DEFAULT_BUFFER_SIZE full_file = tmpdir.join(filename) full_file.write(content) with pytest.raises(ValueError): async with aioopen(str(full_file), mode=mode) as file: await file.seek(0) # Needed for the append mode. await file.read() assert file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r", "r+", "a+"]) async def test_staggered_read(mode): """Read bytes repeatedly.""" filename = join(dirname(__file__), "..", "resources", "test_file1.txt") async with aioopen(filename, mode=mode) as file: await file.seek(0) # Needed for the append mode. actual = [] while True: char = await file.read(1) if char: actual.append(char) else: break assert "" == (await file.read()) expected = [] with open(filename, mode="r") as f: while True: char = f.read(1) if char: expected.append(char) else: break assert actual == expected assert file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r", "r+", "a+"]) async def test_simple_seek(mode, tmpdir): """Test seeking and then reading.""" filename = "bigfile.bin" content = "0123456789" * 4 * io.DEFAULT_BUFFER_SIZE full_file = tmpdir.join(filename) full_file.write(content) async with aioopen(str(full_file), mode=mode) as file: await file.seek(4) assert "4" == (await file.read(1)) assert file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["w", "r", "r+", "w+", "a", "a+"]) async def test_simple_close(mode, tmpdir): """Open a file, read a byte, and close it.""" filename = "bigfile.bin" content = "0" * 4 * io.DEFAULT_BUFFER_SIZE full_file = tmpdir.join(filename) full_file.write(content) async with aioopen(str(full_file), mode=mode) as file: assert not file.closed assert not file._file.closed assert file.closed assert file._file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r+", "w", "a+"]) async def test_simple_truncate(mode, tmpdir): """Test truncating files.""" filename = "bigfile.bin" content = "0123456789" * 4 * io.DEFAULT_BUFFER_SIZE full_file = tmpdir.join(filename) full_file.write(content) async with aioopen(str(full_file), mode=mode) as file: # The append modes want us to seek first. await file.seek(0) if "w" in mode: # We've just erased the entire file. await file.write(content) await file.flush() await file.seek(0) await file.truncate() assert "" == full_file.read() @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["w", "r+", "w+", "a", "a+"]) async def test_simple_write(mode, tmpdir): """Test writing into a file.""" filename = "bigfile.bin" content = "0" * 4 * io.DEFAULT_BUFFER_SIZE full_file = tmpdir.join(filename) if "r" in mode: full_file.ensure() # Read modes want it to already exist. async with aioopen(str(full_file), mode=mode) as file: bytes_written = await file.write(content) assert bytes_written == len(content) assert content == full_file.read() assert file.closed @pytest.mark.asyncio async def test_simple_detach(tmpdir): """Test detaching for buffered streams.""" filename = "file.bin" full_file = tmpdir.join(filename) full_file.write("0123456789") with pytest.raises(ValueError): # Close will error out. async with aioopen(str(full_file), mode="r") as file: raw_file = file.detach() assert raw_file with pytest.raises(ValueError): await file.read() assert b"0123456789" == raw_file.read(10) @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r", "r+", "a+"]) async def test_simple_iteration_ctx_mgr(mode): """Test iterating over lines from a file.""" filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") async with aioopen(filename, mode=mode) as file: assert not file.closed await file.seek(0) counter = 1 async for line in file: assert line.strip() == "line " + str(counter) counter += 1 assert file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r", "r+", "a+"]) async def test_name_property(mode): """Test iterating over lines from a file.""" filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") async with aioopen(filename, mode=mode) as file: assert file.name == filename assert file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r", "r+", "a+"]) async def test_mode_property(mode): """Test iterating over lines from a file.""" filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") async with aioopen(filename, mode=mode) as file: assert file.mode == mode assert file.closed aiofiles-23.2.1/tox.ini000066400000000000000000000011701446472733600147240ustar00rootroot00000000000000[gh-actions] python = 3.7: py37 3.8: py38 3.9: py39 3.10: py310 3.11: py311, lint 3.12: py312 pypy-3.7: pypy3 [tox] envlist = py37, py38, py39, py310, py311, py312, pypy3, lint isolated_build = true skipsdist = true [testenv:lint] skip_install = true basepython = python3.11 allowlist_externals = make pdm commands = pdm install -G lint make lint [testenv] allowlist_externals = pdm setenv = PDM_IGNORE_SAVED_PYTHON="1" commands = pdm install -G test coverage run -m pytest tests {posargs} passenv = CI package = wheel wheel_build_env = .pkg [flake8] max-line-length = 88