pax_global_header00006660000000000000000000000064141504445010014510gustar00rootroot0000000000000052 comment=d010ff4d789598213334a32ec3d3f55caaab766c aiofiles-0.8.0/000077500000000000000000000000001415044450100133105ustar00rootroot00000000000000aiofiles-0.8.0/.github/000077500000000000000000000000001415044450100146505ustar00rootroot00000000000000aiofiles-0.8.0/.github/workflows/000077500000000000000000000000001415044450100167055ustar00rootroot00000000000000aiofiles-0.8.0/.github/workflows/main.yml000066400000000000000000000035771415044450100203700ustar00rootroot00000000000000--- name: CI on: push: branches: ["master"] pull_request: branches: ["master"] workflow_dispatch: jobs: tests: name: "Python ${{ matrix.python-version }}" runs-on: "ubuntu-latest" env: USING_COVERAGE: "3.6,3.7,3.8,3.9,3.10" strategy: matrix: python-version: ["3.6", "3.7", "3.8", "3.9", "3.10.0-rc.2", "pypy-3.7"] steps: - uses: "actions/checkout@v2" - uses: "actions/setup-python@v2" with: python-version: "${{ matrix.python-version }}" - name: "Install dependencies" run: | set -xe python -VV python -m site python -m pip install --upgrade pip wheel poetry==1.2.0a2 python -m pip install --upgrade coverage[toml] virtualenv tox tox-gh-actions - name: "Run tox targets for ${{ matrix.python-version }}" run: "python -m tox" # We always use a modern Python version for combining coverage to prevent # parsing errors in older versions for modern code. - uses: "actions/setup-python@v2" with: python-version: "3.9" - name: "Upload coverage to Codecov" if: "contains(env.USING_COVERAGE, matrix.python-version)" uses: "codecov/codecov-action@v1" with: fail_ci_if_error: true package: name: "Build & verify package" runs-on: "ubuntu-latest" steps: - uses: "actions/checkout@v2" - uses: "actions/setup-python@v2" with: python-version: "3.9" - name: "Install poetry and twine" run: "python -m pip install poetry twine check-wheel-contents" - name: "Build package" run: "poetry build" - name: "List result" run: "ls -l dist" - name: "Check wheel contents" run: "check-wheel-contents dist/*.whl" - name: "Check long_description" run: "python -m twine check dist/*" aiofiles-0.8.0/.gitignore000066400000000000000000000014061415044450100153010ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] # C extensions *.so # Distribution / packaging .Python env/ pyvenv/ build/ develop-eggs/ dist/ downloads/ eggs/ .eggs/ lib/ lib64/ parts/ sdist/ var/ *.egg-info/ .installed.cfg *.egg # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .coverage.* .cache nosetests.xml coverage.xml *,cover # Translations *.mo *.pot # Django stuff: *.log # Sphinx documentation docs/_build/ # PyBuilder target/ # IDEA IDE files .idea/ *.iml .pytest_cache # VScode .vscode/ aiofiles-0.8.0/LICENSE000066400000000000000000000260751415044450100143270ustar00rootroot00000000000000Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: (a) You must give any other recipients of the Work or Derivative Works a copy of this License; and (b) You must cause any modified files to carry prominent notices stating that You changed the files; and (c) You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and (d) If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS APPENDIX: How to apply the Apache License to your work. To apply the Apache License to your work, attach the following boilerplate notice, with the fields enclosed by brackets "{}" replaced with your own identifying information. (Don't include the brackets!) The text should be enclosed in the appropriate comment syntax for the file format. We also recommend that a file or class name and description of purpose be included on the same "printed page" as the copyright notice for easier identification within third-party archives. Copyright {yyyy} {name of copyright owner} Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. aiofiles-0.8.0/MANIFEST.in000066400000000000000000000000201415044450100150360ustar00rootroot00000000000000include LICENSE aiofiles-0.8.0/Makefile000066400000000000000000000000641415044450100147500ustar00rootroot00000000000000.PHONY: test test: poetry run pytest -x --ff testsaiofiles-0.8.0/README.rst000066400000000000000000000144351415044450100150060ustar00rootroot00000000000000aiofiles: file support for asyncio ================================== .. image:: https://img.shields.io/pypi/v/aiofiles.svg :target: https://pypi.python.org/pypi/aiofiles .. image:: https://travis-ci.org/Tinche/aiofiles.svg?branch=master :target: https://travis-ci.org/Tinche/aiofiles .. image:: https://codecov.io/gh/Tinche/aiofiles/branch/master/graph/badge.svg :target: https://codecov.io/gh/Tinche/aiofiles .. image:: https://img.shields.io/pypi/pyversions/aiofiles.svg :target: https://github.com/Tinche/aiofiles :alt: Supported Python versions **aiofiles** is an Apache2 licensed library, written in Python, for handling local disk files in asyncio applications. Ordinary local file IO is blocking, and cannot easily and portably made asynchronous. This means doing file IO may interfere with asyncio applications, which shouldn't block the executing thread. aiofiles helps with this by introducing asynchronous versions of files that support delegating operations to a separate thread pool. .. code-block:: python async with aiofiles.open('filename', mode='r') as f: contents = await f.read() print(contents) 'My file contents' Asynchronous iteration is also supported. .. code-block:: python async with aiofiles.open('filename') as f: async for line in f: ... Asynchronous interface to tempfile module. .. code-block:: python async with aiofiles.tempfile.TemporaryFile('wb') as f: await f.write(b'Hello, World!') Features -------- - a file API very similar to Python's standard, blocking API - support for buffered and unbuffered binary files, and buffered text files - support for ``async``/``await`` (:PEP:`492`) constructs - async interface to tempfile module Installation ------------ To install aiofiles, simply: .. code-block:: bash $ pip install aiofiles Usage ----- Files are opened using the ``aiofiles.open()`` coroutine, which in addition to mirroring the builtin ``open`` accepts optional ``loop`` and ``executor`` arguments. If ``loop`` is absent, the default loop will be used, as per the set asyncio policy. If ``executor`` is not specified, the default event loop executor will be used. In case of success, an asynchronous file object is returned with an API identical to an ordinary file, except the following methods are coroutines and delegate to an executor: * ``close`` * ``flush`` * ``isatty`` * ``read`` * ``readall`` * ``read1`` * ``readinto`` * ``readline`` * ``readlines`` * ``seek`` * ``seekable`` * ``tell`` * ``truncate`` * ``writable`` * ``write`` * ``writelines`` In case of failure, one of the usual exceptions will be raised. The ``aiofiles.os`` module contains executor-enabled coroutine versions of several useful ``os`` functions that deal with files: * ``stat`` * ``sendfile`` * ``rename`` * ``replace`` * ``remove`` * ``mkdir`` * ``makedirs`` * ``rmdir`` * ``removedirs`` * ``path.exists`` * ``path.isfile`` * ``path.isdir`` * ``path.getsize`` * ``path.getatime`` * ``path.getctime`` * ``path.samefile`` * ``path.sameopenfile`` Tempfile ~~~~~~~~ **aiofiles.tempfile** implements the following interfaces: - TemporaryFile - NamedTemporaryFile - SpooledTemporaryFile - TemporaryDirectory Results return wrapped with a context manager allowing use with async with and async for. .. code-block:: python async with aiofiles.tempfile.NamedTemporaryFile('wb+') as f: await f.write(b'Line1\n Line2') await f.seek(0) async for line in f: print(line) async with aiofiles.tempfile.TemporaryDirectory() as d: filename = os.path.join(d, "file.ext") Writing tests for aiofiles ~~~~~~~~~~~~~~~~~~~~~~~~~~ Real file IO can be mocked by patching ``aiofiles.threadpool.sync_open`` as desired. The return type also needs to be registered with the ``aiofiles.threadpool.wrap`` dispatcher: .. code-block:: python aiofiles.threadpool.wrap.register(mock.MagicMock)( lambda *args, **kwargs: threadpool.AsyncBufferedIOBase(*args, **kwargs)) async def test_stuff(): data = 'data' mock_file = mock.MagicMock() with mock.patch('aiofiles.threadpool.sync_open', return_value=mock_file) as mock_open: async with aiofiles.open('filename', 'w') as f: await f.write(data) mock_file.write.assert_called_once_with(data) History ~~~~~~~ 0.8.0 (2021-11-27) `````````````````` * aiofiles is now tested on Python 3.10. * Added ``aiofiles.os.replace``. `#107 `_ * Added ``aiofiles.os.{makedirs, removedirs}``. * Added ``aiofiles.os.path.{exists, isfile, isdir, getsize, getatime, getctime, samefile, sameopenfile}``. `#63 `_ * Added `suffix`, `prefix`, `dir` args to ``aiofiles.tempfile.TemporaryDirectory``. `#116 `_ 0.7.0 (2021-05-17) `````````````````` - Added the ``aiofiles.tempfile`` module for async temporary files. `#56 `_ - Switched to Poetry and GitHub actions. - Dropped 3.5 support. 0.6.0 (2020-10-27) `````````````````` - `aiofiles` is now tested on ppc64le. - Added `name` and `mode` properties to async file objects. `#82 `_ - Fixed a DeprecationWarning internally. `#75 `_ - Python 3.9 support and tests. 0.5.0 (2020-04-12) `````````````````` - Python 3.8 support. Code base modernization (using ``async/await`` instead of ``asyncio.coroutine``/``yield from``). - Added ``aiofiles.os.remove``, ``aiofiles.os.rename``, ``aiofiles.os.mkdir``, ``aiofiles.os.rmdir``. `#62 `_ 0.4.0 (2018-08-11) `````````````````` - Python 3.7 support. - Removed Python 3.3/3.4 support. If you use these versions, stick to aiofiles 0.3.x. 0.3.2 (2017-09-23) `````````````````` - The LICENSE is now included in the sdist. `#31 `_ 0.3.1 (2017-03-10) `````````````````` - Introduced a changelog. - ``aiofiles.os.sendfile`` will now work if the standard ``os`` module contains a ``sendfile`` function. Contributing ~~~~~~~~~~~~ Contributions are very welcome. Tests can be run with ``tox``, please ensure the coverage at least stays the same before you submit a pull request. aiofiles-0.8.0/poetry.lock000066400000000000000000000540351415044450100155130ustar00rootroot00000000000000[[package]] name = "atomicwrites" version = "1.4.0" description = "Atomic file writes." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "attrs" version = "21.2.0" description = "Classes Without Boilerplate" category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [package.extras] dev = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface", "furo", "sphinx", "sphinx-notfound-page", "pre-commit"] docs = ["furo", "sphinx", "zope.interface", "sphinx-notfound-page"] tests = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins", "zope.interface"] tests_no_zope = ["coverage[toml] (>=5.0.2)", "hypothesis", "pympler", "pytest (>=4.3.0)", "six", "mypy", "pytest-mypy-plugins"] [[package]] name = "backports.entry-points-selectable" version = "1.1.0" description = "Compatibility shim providing selectable entry points for older implementations" category = "dev" optional = false python-versions = ">=2.7" [package.dependencies] importlib-metadata = {version = "*", markers = "python_version < \"3.8\""} [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] testing = ["pytest (>=4.6)", "pytest-flake8", "pytest-cov", "pytest-black (>=0.3.7)", "pytest-mypy", "pytest-checkdocs (>=2.4)", "pytest-enabler (>=1.0.1)"] [[package]] name = "colorama" version = "0.4.4" description = "Cross-platform colored terminal text." category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" [[package]] name = "coverage" version = "5.5" description = "Code coverage measurement for Python" category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, <4" [package.extras] toml = ["toml"] [[package]] name = "distlib" version = "0.3.2" description = "Distribution utilities" category = "dev" optional = false python-versions = "*" [[package]] name = "filelock" version = "3.0.12" description = "A platform independent file lock." category = "dev" optional = false python-versions = "*" [[package]] name = "importlib-metadata" version = "4.8.1" description = "Read metadata from Python packages" category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] typing-extensions = {version = ">=3.6.4", markers = "python_version < \"3.8\""} zipp = ">=0.5" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] perf = ["ipython"] testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "packaging", "pep517", "pyfakefs", "flufl.flake8", "pytest-perf (>=0.9.2)", "pytest-black (>=0.3.7)", "pytest-mypy", "importlib-resources (>=1.3)"] [[package]] name = "importlib-resources" version = "5.2.2" description = "Read resources from Python packages" category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] zipp = {version = ">=3.1.0", markers = "python_version < \"3.10\""} [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "pytest-black (>=0.3.7)", "pytest-mypy"] [[package]] name = "iniconfig" version = "1.1.1" description = "iniconfig: brain-dead simple config-ini parsing" category = "dev" optional = false python-versions = "*" [[package]] name = "packaging" version = "21.0" description = "Core utilities for Python packages" category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] pyparsing = ">=2.0.2" [[package]] name = "platformdirs" version = "2.3.0" description = "A small Python module for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." category = "dev" optional = false python-versions = ">=3.6" [package.extras] docs = ["Sphinx (>=4)", "furo (>=2021.7.5b38)", "proselint (>=0.10.2)", "sphinx-autodoc-typehints (>=1.12)"] test = ["appdirs (==1.4.4)", "pytest (>=6)", "pytest-cov (>=2.7)", "pytest-mock (>=3.6)"] [[package]] name = "pluggy" version = "1.0.0" description = "plugin and hook calling mechanisms for python" category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] [[package]] name = "py" version = "1.10.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" [[package]] name = "pyparsing" version = "2.4.7" description = "Python parsing module" category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "pytest" version = "6.2.5" description = "pytest: simple powerful testing with Python" category = "dev" optional = false python-versions = ">=3.6" [package.dependencies] atomicwrites = {version = ">=1.0", markers = "sys_platform == \"win32\""} attrs = ">=19.2.0" colorama = {version = "*", markers = "sys_platform == \"win32\""} importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<2.0" py = ">=1.8.2" toml = "*" [package.extras] testing = ["argcomplete", "hypothesis (>=3.56)", "mock", "nose", "requests", "xmlschema"] [[package]] name = "pytest-asyncio" version = "0.15.1" description = "Pytest support for asyncio." category = "dev" optional = false python-versions = ">= 3.6" [package.dependencies] pytest = ">=5.4.0" [package.extras] testing = ["coverage", "hypothesis (>=5.7.1)"] [[package]] name = "six" version = "1.16.0" description = "Python 2 and 3 compatibility utilities" category = "dev" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "toml" version = "0.10.2" description = "Python Library for Tom's Obvious, Minimal Language" category = "dev" optional = false python-versions = ">=2.6, !=3.0.*, !=3.1.*, !=3.2.*" [[package]] name = "tox" version = "3.24.4" description = "tox is a generic virtualenv management and test command line tool" category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [package.dependencies] colorama = {version = ">=0.4.1", markers = "platform_system == \"Windows\""} filelock = ">=3.0.0" importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} packaging = ">=14" pluggy = ">=0.12.0" py = ">=1.4.17" six = ">=1.14.0" toml = ">=0.9.4" virtualenv = ">=16.0.0,<20.0.0 || >20.0.0,<20.0.1 || >20.0.1,<20.0.2 || >20.0.2,<20.0.3 || >20.0.3,<20.0.4 || >20.0.4,<20.0.5 || >20.0.5,<20.0.6 || >20.0.6,<20.0.7 || >20.0.7" [package.extras] docs = ["pygments-github-lexers (>=0.0.5)", "sphinx (>=2.0.0)", "sphinxcontrib-autoprogram (>=0.1.5)", "towncrier (>=18.5.0)"] testing = ["flaky (>=3.4.0)", "freezegun (>=0.3.11)", "psutil (>=5.6.1)", "pytest (>=4.0.0)", "pytest-cov (>=2.5.1)", "pytest-mock (>=1.10.0)", "pytest-randomly (>=1.0.0)", "pytest-xdist (>=1.22.2)", "pathlib2 (>=2.3.3)"] [[package]] name = "typing-extensions" version = "3.10.0.2" description = "Backported and Experimental Type Hints for Python 3.5+" category = "dev" optional = false python-versions = "*" [[package]] name = "virtualenv" version = "20.8.0" description = "Virtual Python Environment builder" category = "dev" optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,>=2.7" [package.dependencies] "backports.entry-points-selectable" = ">=1.0.4" distlib = ">=0.3.1,<1" filelock = ">=3.0.0,<4" importlib-metadata = {version = ">=0.12", markers = "python_version < \"3.8\""} importlib-resources = {version = ">=1.0", markers = "python_version < \"3.7\""} platformdirs = ">=2,<3" six = ">=1.9.0,<2" [package.extras] docs = ["proselint (>=0.10.2)", "sphinx (>=3)", "sphinx-argparse (>=0.2.5)", "sphinx-rtd-theme (>=0.4.3)", "towncrier (>=19.9.0rc1)"] testing = ["coverage (>=4)", "coverage-enable-subprocess (>=1)", "flaky (>=3)", "pytest (>=4)", "pytest-env (>=0.6.2)", "pytest-freezegun (>=0.4.1)", "pytest-mock (>=2)", "pytest-randomly (>=1)", "pytest-timeout (>=1)", "packaging (>=20.0)"] [[package]] name = "zipp" version = "3.5.0" description = "Backport of pathlib-compatible object wrapper for zip files" category = "dev" optional = false python-versions = ">=3.6" [package.extras] docs = ["sphinx", "jaraco.packaging (>=8.2)", "rst.linker (>=1.9)"] testing = ["pytest (>=4.6)", "pytest-checkdocs (>=2.4)", "pytest-flake8", "pytest-cov", "pytest-enabler (>=1.0.1)", "jaraco.itertools", "func-timeout", "pytest-black (>=0.3.7)", "pytest-mypy"] [metadata] lock-version = "1.1" python-versions = "^3.6" content-hash = "50939ecdc645332d6d6d2daa4ab974cf6816d17793584110983d5abf51fe527b" [metadata.files] atomicwrites = [ {file = "atomicwrites-1.4.0-py2.py3-none-any.whl", hash = "sha256:6d1784dea7c0c8d4a5172b6c620f40b6e4cbfdf96d783691f2e1302a7b88e197"}, {file = "atomicwrites-1.4.0.tar.gz", hash = "sha256:ae70396ad1a434f9c7046fd2dd196fc04b12f9e91ffb859164193be8b6168a7a"}, ] attrs = [ {file = "attrs-21.2.0-py2.py3-none-any.whl", hash = "sha256:149e90d6d8ac20db7a955ad60cf0e6881a3f20d37096140088356da6c716b0b1"}, {file = "attrs-21.2.0.tar.gz", hash = "sha256:ef6aaac3ca6cd92904cdd0d83f629a15f18053ec84e6432106f7a4d04ae4f5fb"}, ] "backports.entry-points-selectable" = [ {file = "backports.entry_points_selectable-1.1.0-py2.py3-none-any.whl", hash = "sha256:a6d9a871cde5e15b4c4a53e3d43ba890cc6861ec1332c9c2428c92f977192acc"}, {file = "backports.entry_points_selectable-1.1.0.tar.gz", hash = "sha256:988468260ec1c196dab6ae1149260e2f5472c9110334e5d51adcb77867361f6a"}, ] colorama = [ {file = "colorama-0.4.4-py2.py3-none-any.whl", hash = "sha256:9f47eda37229f68eee03b24b9748937c7dc3868f906e8ba69fbcbdd3bc5dc3e2"}, {file = "colorama-0.4.4.tar.gz", hash = "sha256:5941b2b48a20143d2267e95b1c2a7603ce057ee39fd88e7329b0c292aa16869b"}, ] coverage = [ {file = "coverage-5.5-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:b6d534e4b2ab35c9f93f46229363e17f63c53ad01330df9f2d6bd1187e5eaacf"}, {file = "coverage-5.5-cp27-cp27m-manylinux1_i686.whl", hash = "sha256:b7895207b4c843c76a25ab8c1e866261bcfe27bfaa20c192de5190121770672b"}, {file = "coverage-5.5-cp27-cp27m-manylinux1_x86_64.whl", hash = "sha256:c2723d347ab06e7ddad1a58b2a821218239249a9e4365eaff6649d31180c1669"}, {file = "coverage-5.5-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:900fbf7759501bc7807fd6638c947d7a831fc9fdf742dc10f02956ff7220fa90"}, {file = "coverage-5.5-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:004d1880bed2d97151facef49f08e255a20ceb6f9432df75f4eef018fdd5a78c"}, {file = "coverage-5.5-cp27-cp27m-win32.whl", hash = "sha256:06191eb60f8d8a5bc046f3799f8a07a2d7aefb9504b0209aff0b47298333302a"}, {file = "coverage-5.5-cp27-cp27m-win_amd64.whl", hash = "sha256:7501140f755b725495941b43347ba8a2777407fc7f250d4f5a7d2a1050ba8e82"}, {file = "coverage-5.5-cp27-cp27mu-manylinux1_i686.whl", hash = "sha256:372da284cfd642d8e08ef606917846fa2ee350f64994bebfbd3afb0040436905"}, {file = "coverage-5.5-cp27-cp27mu-manylinux1_x86_64.whl", hash = "sha256:8963a499849a1fc54b35b1c9f162f4108017b2e6db2c46c1bed93a72262ed083"}, {file = "coverage-5.5-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:869a64f53488f40fa5b5b9dcb9e9b2962a66a87dab37790f3fcfb5144b996ef5"}, {file = "coverage-5.5-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:4a7697d8cb0f27399b0e393c0b90f0f1e40c82023ea4d45d22bce7032a5d7b81"}, {file = "coverage-5.5-cp310-cp310-macosx_10_14_x86_64.whl", hash = "sha256:8d0a0725ad7c1a0bcd8d1b437e191107d457e2ec1084b9f190630a4fb1af78e6"}, {file = "coverage-5.5-cp310-cp310-manylinux1_x86_64.whl", hash = "sha256:51cb9476a3987c8967ebab3f0fe144819781fca264f57f89760037a2ea191cb0"}, {file = "coverage-5.5-cp310-cp310-win_amd64.whl", hash = "sha256:c0891a6a97b09c1f3e073a890514d5012eb256845c451bd48f7968ef939bf4ae"}, {file = "coverage-5.5-cp35-cp35m-macosx_10_9_x86_64.whl", hash = "sha256:3487286bc29a5aa4b93a072e9592f22254291ce96a9fbc5251f566b6b7343cdb"}, {file = "coverage-5.5-cp35-cp35m-manylinux1_i686.whl", hash = "sha256:deee1077aae10d8fa88cb02c845cfba9b62c55e1183f52f6ae6a2df6a2187160"}, {file = "coverage-5.5-cp35-cp35m-manylinux1_x86_64.whl", hash = "sha256:f11642dddbb0253cc8853254301b51390ba0081750a8ac03f20ea8103f0c56b6"}, {file = "coverage-5.5-cp35-cp35m-manylinux2010_i686.whl", hash = "sha256:6c90e11318f0d3c436a42409f2749ee1a115cd8b067d7f14c148f1ce5574d701"}, {file = "coverage-5.5-cp35-cp35m-manylinux2010_x86_64.whl", hash = "sha256:30c77c1dc9f253283e34c27935fded5015f7d1abe83bc7821680ac444eaf7793"}, {file = "coverage-5.5-cp35-cp35m-win32.whl", hash = "sha256:9a1ef3b66e38ef8618ce5fdc7bea3d9f45f3624e2a66295eea5e57966c85909e"}, {file = "coverage-5.5-cp35-cp35m-win_amd64.whl", hash = "sha256:972c85d205b51e30e59525694670de6a8a89691186012535f9d7dbaa230e42c3"}, {file = "coverage-5.5-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:af0e781009aaf59e25c5a678122391cb0f345ac0ec272c7961dc5455e1c40066"}, {file = "coverage-5.5-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:74d881fc777ebb11c63736622b60cb9e4aee5cace591ce274fb69e582a12a61a"}, {file = "coverage-5.5-cp36-cp36m-manylinux1_x86_64.whl", hash = "sha256:92b017ce34b68a7d67bd6d117e6d443a9bf63a2ecf8567bb3d8c6c7bc5014465"}, {file = "coverage-5.5-cp36-cp36m-manylinux2010_i686.whl", hash = "sha256:d636598c8305e1f90b439dbf4f66437de4a5e3c31fdf47ad29542478c8508bbb"}, {file = "coverage-5.5-cp36-cp36m-manylinux2010_x86_64.whl", hash = "sha256:41179b8a845742d1eb60449bdb2992196e211341818565abded11cfa90efb821"}, {file = "coverage-5.5-cp36-cp36m-win32.whl", hash = "sha256:040af6c32813fa3eae5305d53f18875bedd079960822ef8ec067a66dd8afcd45"}, {file = "coverage-5.5-cp36-cp36m-win_amd64.whl", hash = "sha256:5fec2d43a2cc6965edc0bb9e83e1e4b557f76f843a77a2496cbe719583ce8184"}, {file = "coverage-5.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:18ba8bbede96a2c3dde7b868de9dcbd55670690af0988713f0603f037848418a"}, {file = "coverage-5.5-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:2910f4d36a6a9b4214bb7038d537f015346f413a975d57ca6b43bf23d6563b53"}, {file = "coverage-5.5-cp37-cp37m-manylinux1_x86_64.whl", hash = "sha256:f0b278ce10936db1a37e6954e15a3730bea96a0997c26d7fee88e6c396c2086d"}, {file = "coverage-5.5-cp37-cp37m-manylinux2010_i686.whl", hash = "sha256:796c9c3c79747146ebd278dbe1e5c5c05dd6b10cc3bcb8389dfdf844f3ead638"}, {file = "coverage-5.5-cp37-cp37m-manylinux2010_x86_64.whl", hash = "sha256:53194af30d5bad77fcba80e23a1441c71abfb3e01192034f8246e0d8f99528f3"}, {file = "coverage-5.5-cp37-cp37m-win32.whl", hash = "sha256:184a47bbe0aa6400ed2d41d8e9ed868b8205046518c52464fde713ea06e3a74a"}, {file = "coverage-5.5-cp37-cp37m-win_amd64.whl", hash = "sha256:2949cad1c5208b8298d5686d5a85b66aae46d73eec2c3e08c817dd3513e5848a"}, {file = "coverage-5.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:217658ec7187497e3f3ebd901afdca1af062b42cfe3e0dafea4cced3983739f6"}, {file = "coverage-5.5-cp38-cp38-manylinux1_i686.whl", hash = "sha256:1aa846f56c3d49205c952d8318e76ccc2ae23303351d9270ab220004c580cfe2"}, {file = "coverage-5.5-cp38-cp38-manylinux1_x86_64.whl", hash = "sha256:24d4a7de75446be83244eabbff746d66b9240ae020ced65d060815fac3423759"}, {file = "coverage-5.5-cp38-cp38-manylinux2010_i686.whl", hash = "sha256:d1f8bf7b90ba55699b3a5e44930e93ff0189aa27186e96071fac7dd0d06a1873"}, {file = "coverage-5.5-cp38-cp38-manylinux2010_x86_64.whl", hash = "sha256:970284a88b99673ccb2e4e334cfb38a10aab7cd44f7457564d11898a74b62d0a"}, {file = "coverage-5.5-cp38-cp38-win32.whl", hash = "sha256:01d84219b5cdbfc8122223b39a954820929497a1cb1422824bb86b07b74594b6"}, {file = "coverage-5.5-cp38-cp38-win_amd64.whl", hash = "sha256:2e0d881ad471768bf6e6c2bf905d183543f10098e3b3640fc029509530091502"}, {file = "coverage-5.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:d1f9ce122f83b2305592c11d64f181b87153fc2c2bbd3bb4a3dde8303cfb1a6b"}, {file = "coverage-5.5-cp39-cp39-manylinux1_i686.whl", hash = "sha256:13c4ee887eca0f4c5a247b75398d4114c37882658300e153113dafb1d76de529"}, {file = "coverage-5.5-cp39-cp39-manylinux1_x86_64.whl", hash = "sha256:52596d3d0e8bdf3af43db3e9ba8dcdaac724ba7b5ca3f6358529d56f7a166f8b"}, {file = "coverage-5.5-cp39-cp39-manylinux2010_i686.whl", hash = "sha256:2cafbbb3af0733db200c9b5f798d18953b1a304d3f86a938367de1567f4b5bff"}, {file = "coverage-5.5-cp39-cp39-manylinux2010_x86_64.whl", hash = "sha256:44d654437b8ddd9eee7d1eaee28b7219bec228520ff809af170488fd2fed3e2b"}, {file = "coverage-5.5-cp39-cp39-win32.whl", hash = "sha256:d314ed732c25d29775e84a960c3c60808b682c08d86602ec2c3008e1202e3bb6"}, {file = "coverage-5.5-cp39-cp39-win_amd64.whl", hash = "sha256:13034c4409db851670bc9acd836243aeee299949bd5673e11844befcb0149f03"}, {file = "coverage-5.5-pp36-none-any.whl", hash = "sha256:f030f8873312a16414c0d8e1a1ddff2d3235655a2174e3648b4fa66b3f2f1079"}, {file = "coverage-5.5-pp37-none-any.whl", hash = "sha256:2a3859cb82dcbda1cfd3e6f71c27081d18aa251d20a17d87d26d4cd216fb0af4"}, {file = "coverage-5.5.tar.gz", hash = "sha256:ebe78fe9a0e874362175b02371bdfbee64d8edc42a044253ddf4ee7d3c15212c"}, ] distlib = [ {file = "distlib-0.3.2-py2.py3-none-any.whl", hash = "sha256:23e223426b28491b1ced97dc3bbe183027419dfc7982b4fa2f05d5f3ff10711c"}, {file = "distlib-0.3.2.zip", hash = "sha256:106fef6dc37dd8c0e2c0a60d3fca3e77460a48907f335fa28420463a6f799736"}, ] filelock = [ {file = "filelock-3.0.12-py3-none-any.whl", hash = "sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836"}, {file = "filelock-3.0.12.tar.gz", hash = "sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59"}, ] importlib-metadata = [ {file = "importlib_metadata-4.8.1-py3-none-any.whl", hash = "sha256:b618b6d2d5ffa2f16add5697cf57a46c76a56229b0ed1c438322e4e95645bd15"}, {file = "importlib_metadata-4.8.1.tar.gz", hash = "sha256:f284b3e11256ad1e5d03ab86bb2ccd6f5339688ff17a4d797a0fe7df326f23b1"}, ] importlib-resources = [ {file = "importlib_resources-5.2.2-py3-none-any.whl", hash = "sha256:2480d8e07d1890056cb53c96e3de44fead9c62f2ba949b0f2e4c4345f4afa977"}, {file = "importlib_resources-5.2.2.tar.gz", hash = "sha256:a65882a4d0fe5fbf702273456ba2ce74fe44892c25e42e057aca526b702a6d4b"}, ] iniconfig = [ {file = "iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3"}, {file = "iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32"}, ] packaging = [ {file = "packaging-21.0-py3-none-any.whl", hash = "sha256:c86254f9220d55e31cc94d69bade760f0847da8000def4dfe1c6b872fd14ff14"}, {file = "packaging-21.0.tar.gz", hash = "sha256:7dc96269f53a4ccec5c0670940a4281106dd0bb343f47b7471f779df49c2fbe7"}, ] platformdirs = [ {file = "platformdirs-2.3.0-py3-none-any.whl", hash = "sha256:8003ac87717ae2c7ee1ea5a84a1a61e87f3fbd16eb5aadba194ea30a9019f648"}, {file = "platformdirs-2.3.0.tar.gz", hash = "sha256:15b056538719b1c94bdaccb29e5f81879c7f7f0f4a153f46086d155dffcd4f0f"}, ] pluggy = [ {file = "pluggy-1.0.0-py2.py3-none-any.whl", hash = "sha256:74134bbf457f031a36d68416e1509f34bd5ccc019f0bcc952c7b909d06b37bd3"}, {file = "pluggy-1.0.0.tar.gz", hash = "sha256:4224373bacce55f955a878bf9cfa763c1e360858e330072059e10bad68531159"}, ] py = [ {file = "py-1.10.0-py2.py3-none-any.whl", hash = "sha256:3b80836aa6d1feeaa108e046da6423ab8f6ceda6468545ae8d02d9d58d18818a"}, {file = "py-1.10.0.tar.gz", hash = "sha256:21b81bda15b66ef5e1a777a21c4dcd9c20ad3efd0b3f817e7a809035269e1bd3"}, ] pyparsing = [ {file = "pyparsing-2.4.7-py2.py3-none-any.whl", hash = "sha256:ef9d7589ef3c200abe66653d3f1ab1033c3c419ae9b9bdb1240a85b024efc88b"}, {file = "pyparsing-2.4.7.tar.gz", hash = "sha256:c203ec8783bf771a155b207279b9bccb8dea02d8f0c9e5f8ead507bc3246ecc1"}, ] pytest = [ {file = "pytest-6.2.5-py3-none-any.whl", hash = "sha256:7310f8d27bc79ced999e760ca304d69f6ba6c6649c0b60fb0e04a4a77cacc134"}, {file = "pytest-6.2.5.tar.gz", hash = "sha256:131b36680866a76e6781d13f101efb86cf674ebb9762eb70d3082b6f29889e89"}, ] pytest-asyncio = [ {file = "pytest-asyncio-0.15.1.tar.gz", hash = "sha256:2564ceb9612bbd560d19ca4b41347b54e7835c2f792c504f698e05395ed63f6f"}, {file = "pytest_asyncio-0.15.1-py3-none-any.whl", hash = "sha256:3042bcdf1c5d978f6b74d96a151c4cfb9dcece65006198389ccd7e6c60eb1eea"}, ] six = [ {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, ] toml = [ {file = "toml-0.10.2-py2.py3-none-any.whl", hash = "sha256:806143ae5bfb6a3c6e736a764057db0e6a0e05e338b5630894a5f779cabb4f9b"}, {file = "toml-0.10.2.tar.gz", hash = "sha256:b3bda1d108d5dd99f4a20d24d9c348e91c4db7ab1b749200bded2f839ccbe68f"}, ] tox = [ {file = "tox-3.24.4-py2.py3-none-any.whl", hash = "sha256:5e274227a53dc9ef856767c21867377ba395992549f02ce55eb549f9fb9a8d10"}, {file = "tox-3.24.4.tar.gz", hash = "sha256:c30b57fa2477f1fb7c36aa1d83292d5c2336cd0018119e1b1c17340e2c2708ca"}, ] typing-extensions = [ {file = "typing_extensions-3.10.0.2-py2-none-any.whl", hash = "sha256:d8226d10bc02a29bcc81df19a26e56a9647f8b0a6d4a83924139f4a8b01f17b7"}, {file = "typing_extensions-3.10.0.2-py3-none-any.whl", hash = "sha256:f1d25edafde516b146ecd0613dabcc61409817af4766fbbcfb8d1ad4ec441a34"}, {file = "typing_extensions-3.10.0.2.tar.gz", hash = "sha256:49f75d16ff11f1cd258e1b988ccff82a3ca5570217d7ad8c5f48205dd99a677e"}, ] virtualenv = [ {file = "virtualenv-20.8.0-py2.py3-none-any.whl", hash = "sha256:a4b987ec31c3c9996cf1bc865332f967fe4a0512c41b39652d6224f696e69da5"}, {file = "virtualenv-20.8.0.tar.gz", hash = "sha256:4da4ac43888e97de9cf4fdd870f48ed864bbfd133d2c46cbdec941fed4a25aef"}, ] zipp = [ {file = "zipp-3.5.0-py3-none-any.whl", hash = "sha256:957cfda87797e389580cb8b9e3870841ca991e2125350677b2ca83a0e99390a3"}, {file = "zipp-3.5.0.tar.gz", hash = "sha256:f5812b1e007e48cff63449a5e9f4e7ebea716b4111f9c4f9a645f91d579bf0c4"}, ] aiofiles-0.8.0/pyproject.toml000066400000000000000000000006631415044450100162310ustar00rootroot00000000000000[tool.poetry] name = "aiofiles" version = "0.8.0" description = "File support for asyncio." authors = ["Tin Tvrtkovic "] license = "Apache-2.0" readme = "README.rst" [tool.poetry.dependencies] python = "^3.6" [tool.poetry.dev-dependencies] pytest = "^6.2.2" pytest-asyncio = "^0.15.1" coverage = "^5.5" tox = "^3.23.0" [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" aiofiles-0.8.0/src/000077500000000000000000000000001415044450100140775ustar00rootroot00000000000000aiofiles-0.8.0/src/aiofiles/000077500000000000000000000000001415044450100156725ustar00rootroot00000000000000aiofiles-0.8.0/src/aiofiles/__init__.py000066400000000000000000000002101415044450100177740ustar00rootroot00000000000000"""Utilities for asyncio-friendly file handling.""" from .threadpool import open from . import tempfile __all__ = ["open", "tempfile"] aiofiles-0.8.0/src/aiofiles/base.py000066400000000000000000000041021415044450100171530ustar00rootroot00000000000000"""Various base classes.""" from types import coroutine from collections.abc import Coroutine class AsyncBase: def __init__(self, file, loop, executor): self._file = file self._loop = loop self._executor = executor def __aiter__(self): """We are our own iterator.""" return self def __repr__(self): return super().__repr__() + " wrapping " + repr(self._file) async def __anext__(self): """Simulate normal file iteration.""" line = await self.readline() if line: return line else: raise StopAsyncIteration class _ContextManager(Coroutine): __slots__ = ("_coro", "_obj") def __init__(self, coro): self._coro = coro self._obj = None def send(self, value): return self._coro.send(value) def throw(self, typ, val=None, tb=None): if val is None: return self._coro.throw(typ) elif tb is None: return self._coro.throw(typ, val) else: return self._coro.throw(typ, val, tb) def close(self): return self._coro.close() @property def gi_frame(self): return self._coro.gi_frame @property def gi_running(self): return self._coro.gi_running @property def gi_code(self): return self._coro.gi_code def __next__(self): return self.send(None) @coroutine def __iter__(self): resp = yield from self._coro return resp def __await__(self): resp = yield from self._coro return resp async def __anext__(self): resp = await self._coro return resp async def __aenter__(self): self._obj = await self._coro return self._obj async def __aexit__(self, exc_type, exc, tb): self._obj.close() self._obj = None class AiofilesContextManager(_ContextManager): """An adjusted async context manager for aiofiles.""" async def __aexit__(self, exc_type, exc_val, exc_tb): await self._obj.close() self._obj = None aiofiles-0.8.0/src/aiofiles/os.py000066400000000000000000000013171415044450100166670ustar00rootroot00000000000000"""Async executor versions of file functions from the os module.""" import asyncio from functools import partial, wraps import os def wrap(func): @wraps(func) async def run(*args, loop=None, executor=None, **kwargs): if loop is None: loop = asyncio.get_event_loop() pfunc = partial(func, *args, **kwargs) return await loop.run_in_executor(executor, pfunc) return run from . import ospath as path stat = wrap(os.stat) rename = wrap(os.rename) replace = wrap(os.replace) remove = wrap(os.remove) mkdir = wrap(os.mkdir) makedirs = wrap(os.makedirs) rmdir = wrap(os.rmdir) removedirs = wrap(os.removedirs) if hasattr(os, "sendfile"): sendfile = wrap(os.sendfile) aiofiles-0.8.0/src/aiofiles/ospath.py000066400000000000000000000006031415044450100175410ustar00rootroot00000000000000"""Async executor versions of file functions from the os.path module.""" from .os import wrap from os import path exists = wrap(path.exists) isfile = wrap(path.isfile) isdir = wrap(path.isdir) getsize = wrap(path.getsize) getmtime = wrap(path.getmtime) getatime = wrap(path.getatime) getctime = wrap(path.getctime) samefile = wrap(path.samefile) sameopenfile = wrap(path.sameopenfile) aiofiles-0.8.0/src/aiofiles/tempfile/000077500000000000000000000000001415044450100174775ustar00rootroot00000000000000aiofiles-0.8.0/src/aiofiles/tempfile/__init__.py000066400000000000000000000161021415044450100216100ustar00rootroot00000000000000# Imports import asyncio from tempfile import ( TemporaryFile as syncTemporaryFile, NamedTemporaryFile as syncNamedTemporaryFile, SpooledTemporaryFile as syncSpooledTemporaryFile, TemporaryDirectory as syncTemporaryDirectory, _TemporaryFileWrapper as syncTemporaryFileWrapper, ) from io import FileIO, TextIOBase, BufferedReader, BufferedWriter, BufferedRandom from functools import partial, singledispatch from ..base import AiofilesContextManager from ..threadpool.text import AsyncTextIOWrapper from ..threadpool.binary import AsyncBufferedIOBase, AsyncBufferedReader, AsyncFileIO from .temptypes import AsyncSpooledTemporaryFile, AsyncTemporaryDirectory __all__ = [ "NamedTemporaryFile", "TemporaryFile", "SpooledTemporaryFile", "TemporaryDirectory", ] # ================================================================ # Public methods for async open and return of temp file/directory # objects with async interface # ================================================================ def NamedTemporaryFile( mode="w+b", buffering=-1, encoding=None, newline=None, suffix=None, prefix=None, dir=None, delete=True, loop=None, executor=None, ): """Async open a named temporary file""" return AiofilesContextManager( _temporary_file( named=True, mode=mode, buffering=buffering, encoding=encoding, newline=newline, suffix=suffix, prefix=prefix, dir=dir, delete=delete, loop=loop, executor=executor, ) ) def TemporaryFile( mode="w+b", buffering=-1, encoding=None, newline=None, suffix=None, prefix=None, dir=None, loop=None, executor=None, ): """Async open an unnamed temporary file""" return AiofilesContextManager( _temporary_file( named=False, mode=mode, buffering=buffering, encoding=encoding, newline=newline, suffix=suffix, prefix=prefix, dir=dir, loop=loop, executor=executor, ) ) def SpooledTemporaryFile( max_size=0, mode="w+b", buffering=-1, encoding=None, newline=None, suffix=None, prefix=None, dir=None, loop=None, executor=None, ): """Async open a spooled temporary file""" return AiofilesContextManager( _spooled_temporary_file( max_size=max_size, mode=mode, buffering=buffering, encoding=encoding, newline=newline, suffix=suffix, prefix=prefix, dir=dir, loop=loop, executor=executor, ) ) def TemporaryDirectory(suffix=None, prefix=None, dir=None, loop=None, executor=None): """Async open a temporary directory""" return AiofilesContextManagerTempDir( _temporary_directory( suffix=suffix, prefix=prefix, dir=dir, loop=loop, executor=executor ) ) # ========================================================= # Internal coroutines to open new temp files/directories # ========================================================= async def _temporary_file( named=True, mode="w+b", buffering=-1, encoding=None, newline=None, suffix=None, prefix=None, dir=None, delete=True, loop=None, executor=None, max_size=0, ): """Async method to open a temporary file with async interface""" if loop is None: loop = asyncio.get_event_loop() if named: cb = partial( syncNamedTemporaryFile, mode=mode, buffering=buffering, encoding=encoding, newline=newline, suffix=suffix, prefix=prefix, dir=dir, delete=delete, ) else: cb = partial( syncTemporaryFile, mode=mode, buffering=buffering, encoding=encoding, newline=newline, suffix=suffix, prefix=prefix, dir=dir, ) f = await loop.run_in_executor(executor, cb) # Wrap based on type of underlying IO object if type(f) is syncTemporaryFileWrapper: # _TemporaryFileWrapper was used (named files) result = wrap(f.file, f, loop=loop, executor=executor) # add delete property result.delete = f.delete return result else: # IO object was returned directly without wrapper return wrap(f, f, loop=loop, executor=executor) async def _spooled_temporary_file( max_size=0, mode="w+b", buffering=-1, encoding=None, newline=None, suffix=None, prefix=None, dir=None, loop=None, executor=None, ): """Open a spooled temporary file with async interface""" if loop is None: loop = asyncio.get_event_loop() cb = partial( syncSpooledTemporaryFile, max_size=max_size, mode=mode, buffering=buffering, encoding=encoding, newline=newline, suffix=suffix, prefix=prefix, dir=dir, ) f = await loop.run_in_executor(executor, cb) # Single interface provided by SpooledTemporaryFile for all modes return AsyncSpooledTemporaryFile(f, loop=loop, executor=executor) async def _temporary_directory( suffix=None, prefix=None, dir=None, loop=None, executor=None ): """Async method to open a temporary directory with async interface""" if loop is None: loop = asyncio.get_event_loop() cb = partial(syncTemporaryDirectory, suffix, prefix, dir) f = await loop.run_in_executor(executor, cb) return AsyncTemporaryDirectory(f, loop=loop, executor=executor) class AiofilesContextManagerTempDir(AiofilesContextManager): """With returns the directory location, not the object (matching sync lib)""" async def __aenter__(self): self._obj = await self._coro return self._obj.name @singledispatch def wrap(base_io_obj, file, *, loop=None, executor=None): """Wrap the object with interface based on type of underlying IO""" raise TypeError("Unsupported IO type: {}".format(base_io_obj)) @wrap.register(TextIOBase) def _(base_io_obj, file, *, loop=None, executor=None): return AsyncTextIOWrapper(file, loop=loop, executor=executor) @wrap.register(BufferedWriter) def _(base_io_obj, file, *, loop=None, executor=None): return AsyncBufferedIOBase(file, loop=loop, executor=executor) @wrap.register(BufferedReader) @wrap.register(BufferedRandom) def _(base_io_obj, file, *, loop=None, executor=None): return AsyncBufferedReader(file, loop=loop, executor=executor) @wrap.register(FileIO) def _(base_io_obj, file, *, loop=None, executor=None): return AsyncFileIO(file, loop=loop, executor=executor) aiofiles-0.8.0/src/aiofiles/tempfile/temptypes.py000066400000000000000000000041711415044450100221060ustar00rootroot00000000000000"""Async wrappers for spooled temp files and temp directory objects""" # Imports import asyncio from types import coroutine from ..base import AsyncBase from ..threadpool.utils import ( delegate_to_executor, proxy_property_directly, cond_delegate_to_executor, ) from functools import partial @delegate_to_executor("fileno", "rollover") @cond_delegate_to_executor( "close", "flush", "isatty", "newlines", "read", "readline", "readlines", "seek", "tell", "truncate", ) @proxy_property_directly("closed", "encoding", "mode", "name", "softspace") class AsyncSpooledTemporaryFile(AsyncBase): """Async wrapper for SpooledTemporaryFile class""" async def _check(self): if self._file._rolled: return max_size = self._file._max_size if max_size and self._file.tell() > max_size: await self.rollover() async def write(self, s): """Implementation to anticipate rollover""" if self._file._rolled: cb = partial(self._file.write, s) return await self._loop.run_in_executor(self._executor, cb) else: file = self._file._file # reference underlying base IO object rv = file.write(s) await self._check() return rv async def writelines(self, iterable): """Implementation to anticipate rollover""" if self._file._rolled: cb = partial(self._file.writelines, iterable) return await self._loop.run_in_executor(self._executor, cb) else: file = self._file._file # reference underlying base IO object rv = file.writelines(iterable) await self._check() return rv @delegate_to_executor("cleanup") @proxy_property_directly("name") class AsyncTemporaryDirectory: """Async wrapper for TemporaryDirectory class""" def __init__(self, file, loop, executor): self._file = file self._loop = loop self._executor = executor async def close(self): await self.cleanup() aiofiles-0.8.0/src/aiofiles/threadpool/000077500000000000000000000000001415044450100200335ustar00rootroot00000000000000aiofiles-0.8.0/src/aiofiles/threadpool/__init__.py000066400000000000000000000043411415044450100221460ustar00rootroot00000000000000"""Handle files using a thread pool executor.""" import asyncio from types import coroutine from io import ( FileIO, TextIOBase, BufferedReader, BufferedWriter, BufferedRandom, ) from functools import partial, singledispatch from .binary import AsyncBufferedIOBase, AsyncBufferedReader, AsyncFileIO from .text import AsyncTextIOWrapper from ..base import AiofilesContextManager sync_open = open __all__ = ("open",) def open( file, mode="r", buffering=-1, encoding=None, errors=None, newline=None, closefd=True, opener=None, *, loop=None, executor=None ): return AiofilesContextManager( _open( file, mode=mode, buffering=buffering, encoding=encoding, errors=errors, newline=newline, closefd=closefd, opener=opener, loop=loop, executor=executor, ) ) @coroutine def _open( file, mode="r", buffering=-1, encoding=None, errors=None, newline=None, closefd=True, opener=None, *, loop=None, executor=None ): """Open an asyncio file.""" if loop is None: loop = asyncio.get_event_loop() cb = partial( sync_open, file, mode=mode, buffering=buffering, encoding=encoding, errors=errors, newline=newline, closefd=closefd, opener=opener, ) f = yield from loop.run_in_executor(executor, cb) return wrap(f, loop=loop, executor=executor) @singledispatch def wrap(file, *, loop=None, executor=None): raise TypeError("Unsupported io type: {}.".format(file)) @wrap.register(TextIOBase) def _(file, *, loop=None, executor=None): return AsyncTextIOWrapper(file, loop=loop, executor=executor) @wrap.register(BufferedWriter) def _(file, *, loop=None, executor=None): return AsyncBufferedIOBase(file, loop=loop, executor=executor) @wrap.register(BufferedReader) @wrap.register(BufferedRandom) def _(file, *, loop=None, executor=None): return AsyncBufferedReader(file, loop=loop, executor=executor) @wrap.register(FileIO) def _(file, *, loop=None, executor=None): return AsyncFileIO(file, loop, executor) aiofiles-0.8.0/src/aiofiles/threadpool/binary.py000066400000000000000000000022171415044450100216730ustar00rootroot00000000000000from ..base import AsyncBase from .utils import ( delegate_to_executor, proxy_method_directly, proxy_property_directly, ) @delegate_to_executor( "close", "flush", "isatty", "read", "read1", "readinto", "readline", "readlines", "seek", "seekable", "tell", "truncate", "writable", "write", "writelines", ) @proxy_method_directly("detach", "fileno", "readable") @proxy_property_directly("closed", "raw", "name", "mode") class AsyncBufferedIOBase(AsyncBase): """The asyncio executor version of io.BufferedWriter.""" @delegate_to_executor("peek") class AsyncBufferedReader(AsyncBufferedIOBase): """The asyncio executor version of io.BufferedReader and Random.""" @delegate_to_executor( "close", "flush", "isatty", "read", "readall", "readinto", "readline", "readlines", "seek", "seekable", "tell", "truncate", "writable", "write", "writelines", ) @proxy_method_directly("fileno", "readable") @proxy_property_directly("closed", "name", "mode") class AsyncFileIO(AsyncBase): """The asyncio executor version of io.FileIO.""" aiofiles-0.8.0/src/aiofiles/threadpool/text.py000066400000000000000000000012311415044450100213660ustar00rootroot00000000000000from ..base import AsyncBase from .utils import ( delegate_to_executor, proxy_method_directly, proxy_property_directly, ) @delegate_to_executor( "close", "flush", "isatty", "read", "readable", "readline", "readlines", "seek", "seekable", "tell", "truncate", "write", "writable", "writelines", ) @proxy_method_directly("detach", "fileno", "readable") @proxy_property_directly( "buffer", "closed", "encoding", "errors", "line_buffering", "newlines", "name", "mode", ) class AsyncTextIOWrapper(AsyncBase): """The asyncio executor version of io.TextIOWrapper.""" aiofiles-0.8.0/src/aiofiles/threadpool/utils.py000066400000000000000000000035701415044450100215520ustar00rootroot00000000000000import functools from types import coroutine def delegate_to_executor(*attrs): def cls_builder(cls): for attr_name in attrs: setattr(cls, attr_name, _make_delegate_method(attr_name)) return cls return cls_builder def proxy_method_directly(*attrs): def cls_builder(cls): for attr_name in attrs: setattr(cls, attr_name, _make_proxy_method(attr_name)) return cls return cls_builder def proxy_property_directly(*attrs): def cls_builder(cls): for attr_name in attrs: setattr(cls, attr_name, _make_proxy_property(attr_name)) return cls return cls_builder def cond_delegate_to_executor(*attrs): def cls_builder(cls): for attr_name in attrs: setattr(cls, attr_name, _make_cond_delegate_method(attr_name)) return cls return cls_builder def _make_delegate_method(attr_name): @coroutine def method(self, *args, **kwargs): cb = functools.partial(getattr(self._file, attr_name), *args, **kwargs) return (yield from self._loop.run_in_executor(self._executor, cb)) return method def _make_proxy_method(attr_name): def method(self, *args, **kwargs): return getattr(self._file, attr_name)(*args, **kwargs) return method def _make_proxy_property(attr_name): def proxy_property(self): return getattr(self._file, attr_name) return property(proxy_property) def _make_cond_delegate_method(attr_name): """For spooled temp files, delegate only if rolled to file object""" async def method(self, *args, **kwargs): if self._file._rolled: cb = functools.partial(getattr(self._file, attr_name), *args, **kwargs) return await self._loop.run_in_executor(self._executor, cb) else: return getattr(self._file, attr_name)(*args, **kwargs) return method aiofiles-0.8.0/test_requirements.txt000066400000000000000000000001061415044450100176300ustar00rootroot00000000000000coverage==4.5.1 pytest-asyncio==0.9.0 pytest==3.7.1 pytest-cov==2.5.1 aiofiles-0.8.0/tests/000077500000000000000000000000001415044450100144525ustar00rootroot00000000000000aiofiles-0.8.0/tests/resources/000077500000000000000000000000001415044450100164645ustar00rootroot00000000000000aiofiles-0.8.0/tests/resources/multiline_file.txt000066400000000000000000000000331415044450100222220ustar00rootroot00000000000000line 1 line 2 line 3 line 4aiofiles-0.8.0/tests/resources/test_file1.txt000066400000000000000000000000121415044450100212550ustar00rootroot000000000000000123456789aiofiles-0.8.0/tests/test_os.py000066400000000000000000000144501415044450100165100ustar00rootroot00000000000000"""Tests for asyncio's os module.""" import aiofiles.os import asyncio from os.path import join, dirname, exists, isdir import pytest import platform @pytest.mark.asyncio async def test_stat(): """Test the stat call.""" filename = join(dirname(__file__), "resources", "test_file1.txt") stat_res = await aiofiles.os.stat(filename) assert stat_res.st_size == 10 @pytest.mark.asyncio async def test_remove(): """Test the remove call.""" filename = join(dirname(__file__), "resources", "test_file2.txt") with open(filename, "w") as f: f.write("Test file for remove call") assert exists(filename) await aiofiles.os.remove(filename) assert exists(filename) is False @pytest.mark.asyncio async def test_mkdir_and_rmdir(): """Test the mkdir and rmdir call.""" directory = join(dirname(__file__), "resources", "test_dir") await aiofiles.os.mkdir(directory) assert isdir(directory) await aiofiles.os.rmdir(directory) assert exists(directory) is False @pytest.mark.asyncio async def test_rename(): """Test the rename call.""" old_filename = join(dirname(__file__), "resources", "test_file1.txt") new_filename = join(dirname(__file__), "resources", "test_file2.txt") await aiofiles.os.rename(old_filename, new_filename) assert exists(old_filename) is False and exists(new_filename) await aiofiles.os.rename(new_filename, old_filename) assert exists(old_filename) and exists(new_filename) is False @pytest.mark.asyncio async def test_replace(): """Test the replace call.""" old_filename = join(dirname(__file__), "resources", "test_file1.txt") new_filename = join(dirname(__file__), "resources", "test_file2.txt") await aiofiles.os.replace(old_filename, new_filename) assert exists(old_filename) is False and exists(new_filename) await aiofiles.os.replace(new_filename, old_filename) assert exists(old_filename) and exists(new_filename) is False with open(new_filename, "w") as f: f.write("Test file") assert exists(old_filename) and exists(new_filename) await aiofiles.os.replace(old_filename, new_filename) assert exists(old_filename) is False and exists(new_filename) await aiofiles.os.replace(new_filename, old_filename) assert exists(old_filename) and exists(new_filename) is False @pytest.mark.skipif( "2.4" < platform.release() < "2.6.33", reason="sendfile() syscall doesn't allow file->file", ) @pytest.mark.skipif( platform.system() == "Darwin", reason="sendfile() doesn't work on mac", ) @pytest.mark.asyncio async def test_sendfile_file(tmpdir): """Test the sendfile functionality, file-to-file.""" filename = join(dirname(__file__), "resources", "test_file1.txt") tmp_filename = tmpdir.join("tmp.bin") with open(filename) as f: contents = f.read() input_file = await aiofiles.open(filename) output_file = await aiofiles.open(str(tmp_filename), mode="w+") size = (await aiofiles.os.stat(filename)).st_size input_fd = input_file.fileno() output_fd = output_file.fileno() await aiofiles.os.sendfile(output_fd, input_fd, 0, size) await output_file.seek(0) actual_contents = await output_file.read() actual_size = (await aiofiles.os.stat(str(tmp_filename))).st_size assert contents == actual_contents assert size == actual_size @pytest.mark.asyncio async def test_sendfile_socket(unused_tcp_port): """Test the sendfile functionality, file-to-socket.""" filename = join(dirname(__file__), "resources", "test_file1.txt") with open(filename, mode="rb") as f: contents = f.read() async def serve_file(_, writer): out_fd = writer.transport.get_extra_info("socket").fileno() size = (await aiofiles.os.stat(filename)).st_size in_file = await aiofiles.open(filename) try: in_fd = in_file.fileno() await aiofiles.os.sendfile(out_fd, in_fd, 0, size) finally: await in_file.close() await writer.drain() writer.close() server = await asyncio.start_server(serve_file, port=unused_tcp_port) reader, writer = await asyncio.open_connection("127.0.0.1", unused_tcp_port) actual_contents = await reader.read() writer.close() assert contents == actual_contents server.close() await server.wait_closed() @pytest.mark.asyncio async def test_exists(): """Test path.exists call.""" filename = join(dirname(__file__), "resources", "test_file1.txt") result = await aiofiles.os.path.exists(filename) assert result @pytest.mark.asyncio async def test_isfile(): """Test path.isfile call.""" filename = join(dirname(__file__), "resources", "test_file1.txt") result = await aiofiles.os.path.isfile(filename) assert result @pytest.mark.asyncio async def test_isdir(): """Test path.isdir call.""" filename = join(dirname(__file__), "resources") result = await aiofiles.os.path.isdir(filename) assert result @pytest.mark.asyncio async def test_getsize(): """Test path.getsize call.""" filename = join(dirname(__file__), "resources", "test_file1.txt") result = await aiofiles.os.path.getsize(filename) assert result == 10 @pytest.mark.asyncio async def test_samefile(): """Test path.samefile call.""" filename = join(dirname(__file__), "resources", "test_file1.txt") result = await aiofiles.os.path.samefile(filename, filename) assert result @pytest.mark.asyncio async def test_sameopenfile(): """Test path.samefile call.""" filename = join(dirname(__file__), "resources", "test_file1.txt") result = await aiofiles.os.path.samefile(filename, filename) assert result @pytest.mark.asyncio async def test_getmtime(): """Test path.getmtime call.""" filename = join(dirname(__file__), "resources", "test_file1.txt") result = await aiofiles.os.path.getmtime(filename) assert result @pytest.mark.asyncio async def test_getatime(): """Test path.getatime call.""" filename = join(dirname(__file__), "resources", "test_file1.txt") result = await aiofiles.os.path.getatime(filename) assert result @pytest.mark.asyncio async def test_getctime(): """Test path. call.""" filename = join(dirname(__file__), "resources", "test_file1.txt") result = await aiofiles.os.path.getctime(filename) assert result aiofiles-0.8.0/tests/test_simple.py000066400000000000000000000035221415044450100173560ustar00rootroot00000000000000"""Simple tests verifying basic functionality.""" import asyncio from aiofiles import threadpool import pytest @pytest.mark.asyncio async def test_serve_small_bin_file_sync(event_loop, tmpdir, unused_tcp_port): """Fire up a small simple file server, and fetch a file. The file is read into memory synchronously, so this test doesn't actually test anything except the general test concept. """ # First we'll write a small file. filename = "test.bin" file_content = b"0123456789" file = tmpdir.join(filename) file.write_binary(file_content) async def serve_file(reader, writer): full_filename = str(file) with open(full_filename, "rb") as f: writer.write(f.read()) writer.close() server = await asyncio.start_server(serve_file, port=unused_tcp_port) reader, _ = await asyncio.open_connection(host="localhost", port=unused_tcp_port) payload = await reader.read() assert payload == file_content server.close() await server.wait_closed() @pytest.mark.asyncio async def test_serve_small_bin_file(event_loop, tmpdir, unused_tcp_port): """Fire up a small simple file server, and fetch a file.""" # First we'll write a small file. filename = "test.bin" file_content = b"0123456789" file = tmpdir.join(filename) file.write_binary(file_content) async def serve_file(reader, writer): full_filename = str(file) f = await threadpool.open(full_filename, mode="rb") writer.write((await f.read())) await f.close() writer.close() server = await asyncio.start_server(serve_file, port=unused_tcp_port) reader, _ = await asyncio.open_connection(host="localhost", port=unused_tcp_port) payload = await reader.read() assert payload == file_content server.close() await server.wait_closed() aiofiles-0.8.0/tests/test_tempfile.py000066400000000000000000000044131415044450100176720ustar00rootroot00000000000000import asyncio import pytest from aiofiles import tempfile import os import io @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r+", "w+", "rb+", "wb+"]) async def test_temporary_file(mode): """Test temporary file.""" data = b"Hello World!\n" if "b" in mode else "Hello World!\n" async with tempfile.TemporaryFile(mode=mode) as f: for i in range(3): await f.write(data) await f.flush() await f.seek(0) async for line in f: assert line == data @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r+", "w+", "rb+", "wb+"]) async def test_named_temporary_file(mode): """Test named temporary file.""" data = b"Hello World!" if "b" in mode else "Hello World!" filename = None async with tempfile.NamedTemporaryFile(mode=mode) as f: await f.write(data) await f.flush() await f.seek(0) assert await f.read() == data filename = f.name assert os.path.exists(filename) assert os.path.isfile(filename) assert f.delete assert not os.path.exists(filename) @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r+", "w+", "rb+", "wb+"]) async def test_spooled_temporary_file(mode): """Test spooled temporary file.""" data = b"Hello World!" if "b" in mode else "Hello World!" async with tempfile.SpooledTemporaryFile(max_size=len(data) + 1, mode=mode) as f: await f.write(data) await f.flush() if "b" in mode: assert type(f._file._file) is io.BytesIO await f.write(data) await f.flush() if "b" in mode: assert type(f._file._file) is not io.BytesIO await f.seek(0) assert await f.read() == data + data @pytest.mark.asyncio @pytest.mark.parametrize("prefix, suffix", [("a", "b"), ("c", "d"), ("e", "f")]) async def test_temporary_directory(prefix, suffix, tmp_path): """Test temporary directory.""" dir_path = None async with tempfile.TemporaryDirectory( suffix=suffix, prefix=prefix, dir=tmp_path ) as d: dir_path = d assert os.path.exists(dir_path) assert os.path.isdir(dir_path) assert d[-1] == suffix assert d.split(os.sep)[-1][0] == prefix assert not os.path.exists(dir_path) aiofiles-0.8.0/tests/threadpool/000077500000000000000000000000001415044450100166135ustar00rootroot00000000000000aiofiles-0.8.0/tests/threadpool/test_binary.py000066400000000000000000000237661415044450100215260ustar00rootroot00000000000000"""PEP 0492/Python 3.5+ tests for binary files.""" import io from os.path import dirname, join from aiofiles.threadpool import open as aioopen import pytest @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["rb", "rb+", "ab+"]) @pytest.mark.parametrize("buffering", [-1, 0]) async def test_simple_iteration(mode, buffering): """Test iterating over lines from a file.""" filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") async with aioopen(filename, mode=mode, buffering=buffering) as file: # Append mode needs us to seek. await file.seek(0) counter = 1 # The old iteration pattern: while True: line = await file.readline() if not line: break assert line.strip() == b"line " + str(counter).encode() counter += 1 counter = 1 await file.seek(0) # The new iteration pattern: async for line in file: assert line.strip() == b"line " + str(counter).encode() counter += 1 assert file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["rb", "rb+", "ab+"]) @pytest.mark.parametrize("buffering", [-1, 0]) async def test_simple_readlines(mode, buffering): """Test the readlines functionality.""" filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") with open(filename, mode="rb") as f: expected = f.readlines() async with aioopen(str(filename), mode=mode) as file: # Append mode needs us to seek. await file.seek(0) actual = await file.readlines() assert actual == expected @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["rb+", "wb", "ab"]) @pytest.mark.parametrize("buffering", [-1, 0]) async def test_simple_flush(mode, buffering, tmpdir): """Test flushing to a file.""" filename = "file.bin" full_file = tmpdir.join(filename) if "r" in mode: full_file.ensure() # Read modes want it to already exist. async with aioopen(str(full_file), mode=mode, buffering=buffering) as file: await file.write(b"0") # Shouldn't flush. if buffering == -1: assert b"" == full_file.read_binary() else: assert b"0" == full_file.read_binary() await file.flush() assert b"0" == full_file.read_binary() @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["rb+", "wb+", "ab+"]) async def test_simple_peek(mode, tmpdir): """Test flushing to a file.""" filename = "file.bin" full_file = tmpdir.join(filename) full_file.write_binary(b"0123456789") async with aioopen(str(full_file), mode=mode) as file: if "a" in mode: await file.seek(0) # Rewind for append modes. peeked = await file.peek(1) # Technically it's OK for the peek to return less bytes than requested. if peeked: assert peeked.startswith(b"0") read = await file.read(1) assert peeked.startswith(read) @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["rb", "rb+", "ab+"]) @pytest.mark.parametrize("buffering", [-1, 0]) async def test_simple_read(mode, buffering): """Just read some bytes from a test file.""" filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") async with aioopen(filename, mode=mode, buffering=buffering) as file: await file.seek(0) # Needed for the append mode. actual = await file.read() assert b"" == (await file.read()) assert actual == open(filename, mode="rb").read() @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["rb", "rb+", "ab+"]) @pytest.mark.parametrize("buffering", [-1, 0]) async def test_staggered_read(mode, buffering): """Read bytes repeatedly.""" filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") async with aioopen(filename, mode=mode, buffering=buffering) as file: await file.seek(0) # Needed for the append mode. actual = [] while True: byte = await file.read(1) if byte: actual.append(byte) else: break assert b"" == (await file.read()) expected = [] with open(filename, mode="rb") as f: while True: byte = f.read(1) if byte: expected.append(byte) else: break assert actual == expected @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["rb", "rb+", "ab+"]) @pytest.mark.parametrize("buffering", [-1, 0]) async def test_simple_seek(mode, buffering, tmpdir): """Test seeking and then reading.""" filename = "bigfile.bin" content = b"0123456789" * 4 * io.DEFAULT_BUFFER_SIZE full_file = tmpdir.join(filename) full_file.write_binary(content) async with aioopen(str(full_file), mode=mode, buffering=buffering) as file: await file.seek(4) assert b"4" == (await file.read(1)) @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["wb", "rb", "rb+", "wb+", "ab", "ab+"]) @pytest.mark.parametrize("buffering", [-1, 0]) async def test_simple_close_ctx_mgr_iter(mode, buffering, tmpdir): """Open a file, read a byte, and close it.""" filename = "bigfile.bin" content = b"0" * 4 * io.DEFAULT_BUFFER_SIZE full_file = tmpdir.join(filename) full_file.write_binary(content) async with aioopen(str(full_file), mode=mode, buffering=buffering) as file: assert not file.closed assert not file._file.closed assert file.closed assert file._file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["wb", "rb", "rb+", "wb+", "ab", "ab+"]) @pytest.mark.parametrize("buffering", [-1, 0]) async def test_simple_close_ctx_mgr(mode, buffering, tmpdir): """Open a file, read a byte, and close it.""" filename = "bigfile.bin" content = b"0" * 4 * io.DEFAULT_BUFFER_SIZE full_file = tmpdir.join(filename) full_file.write_binary(content) file = await aioopen(str(full_file), mode=mode, buffering=buffering) assert not file.closed assert not file._file.closed await file.close() assert file.closed assert file._file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["rb", "rb+", "ab+"]) @pytest.mark.parametrize("buffering", [-1, 0]) async def test_simple_readinto(mode, buffering): """Test the readinto functionality.""" filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") async with aioopen(filename, mode=mode, buffering=buffering) as file: await file.seek(0) # Needed for the append mode. array = bytearray(4) bytes_read = await file.readinto(array) assert bytes_read == 4 assert array == open(filename, mode="rb").read(4) @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["rb+", "wb", "ab+"]) @pytest.mark.parametrize("buffering", [-1, 0]) async def test_simple_truncate(mode, buffering, tmpdir): """Test truncating files.""" filename = "bigfile.bin" content = b"0123456789" * 4 * io.DEFAULT_BUFFER_SIZE full_file = tmpdir.join(filename) full_file.write_binary(content) async with aioopen(str(full_file), mode=mode, buffering=buffering) as file: # The append modes want us to seek first. await file.seek(0) if "w" in mode: # We've just erased the entire file. await file.write(content) await file.flush() await file.seek(0) await file.truncate() assert b"" == full_file.read_binary() @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["wb", "rb+", "wb+", "ab", "ab+"]) @pytest.mark.parametrize("buffering", [-1, 0]) async def test_simple_write(mode, buffering, tmpdir): """Test writing into a file.""" filename = "bigfile.bin" content = b"0" * 4 * io.DEFAULT_BUFFER_SIZE full_file = tmpdir.join(filename) if "r" in mode: full_file.ensure() # Read modes want it to already exist. async with aioopen(str(full_file), mode=mode, buffering=buffering) as file: bytes_written = await file.write(content) assert bytes_written == len(content) assert content == full_file.read_binary() @pytest.mark.asyncio async def test_simple_detach(tmpdir): """Test detaching for buffered streams.""" filename = "file.bin" full_file = tmpdir.join(filename) full_file.write_binary(b"0123456789") with pytest.raises(ValueError): async with aioopen(str(full_file), mode="rb") as file: raw_file = file.detach() assert raw_file with pytest.raises(ValueError): await file.read() assert b"0123456789" == raw_file.read(10) @pytest.mark.asyncio async def test_simple_readall(tmpdir): """Test the readall function by reading a large file in. Only RawIOBase supports readall(). """ filename = "bigfile.bin" content = b"0" * 4 * io.DEFAULT_BUFFER_SIZE # Hopefully several reads. sync_file = tmpdir.join(filename) sync_file.write_binary(content) file = await aioopen(str(sync_file), mode="rb", buffering=0) actual = await file.readall() assert actual == content await file.close() assert file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["rb", "rb+", "ab+"]) @pytest.mark.parametrize("buffering", [-1, 0]) async def test_name_property(mode, buffering): """Test iterating over lines from a file.""" filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") async with aioopen(filename, mode=mode, buffering=buffering) as file: assert file.name == filename assert file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["rb", "rb+", "ab+"]) @pytest.mark.parametrize("buffering", [-1, 0]) async def test_mode_property(mode, buffering): """Test iterating over lines from a file.""" filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") async with aioopen(filename, mode=mode, buffering=buffering) as file: assert file.mode == mode assert file.closed aiofiles-0.8.0/tests/threadpool/test_concurrency.py000066400000000000000000000040351415044450100225600ustar00rootroot00000000000000"""Test concurrency properties of the implementation.""" from os.path import dirname from os.path import join import time import asyncio import pytest import aiofiles.threadpool @pytest.mark.asyncio async def test_slow_file(monkeypatch, unused_tcp_port): """Monkey patch open and file.read(), and assert the loop still works.""" filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") with open(filename, mode="rb") as f: contents = f.read() def new_open(*args, **kwargs): time.sleep(1) return open(*args, **kwargs) monkeypatch.setattr(aiofiles.threadpool, "sync_open", value=new_open) async def serve_file(_, writer): file = await aiofiles.threadpool.open(filename, mode="rb") try: while True: data = await file.read(1) if not data: break writer.write(data) await writer.drain() await writer.drain() finally: writer.close() await file.close() async def return_one(_, writer): writer.write(b"1") await writer.drain() writer.close() counter = 0 async def spam_client(): nonlocal counter while True: r, w = await asyncio.open_connection("127.0.0.1", port=30001) assert (await r.read()) == b"1" counter += 1 w.close() await asyncio.sleep(0.01) file_server = await asyncio.start_server(serve_file, port=unused_tcp_port) spam_server = await asyncio.start_server(return_one, port=30001) spam_task = asyncio.ensure_future(spam_client()) reader, writer = await asyncio.open_connection("127.0.0.1", port=unused_tcp_port) actual_contents = await reader.read() writer.close() await asyncio.sleep(0) file_server.close() spam_server.close() await file_server.wait_closed() await spam_server.wait_closed() spam_task.cancel() assert actual_contents == contents assert counter > 30 aiofiles-0.8.0/tests/threadpool/test_open.py000066400000000000000000000013121415044450100211620ustar00rootroot00000000000000"""Test the open functionality.""" from aiofiles.threadpool import open as aioopen, wrap import pytest @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r", "rb"]) async def test_file_not_found(mode): filename = "non_existent" try: open(filename, mode=mode) except Exception as e: expected = e assert expected try: await aioopen(filename, mode=mode) except Exception as e: actual = e assert actual assert actual.errno == expected.errno assert str(actual) == str(expected) def test_unsupported_wrap(): """A type error should be raised when wrapping something unsupported.""" with pytest.raises(TypeError): wrap(int) aiofiles-0.8.0/tests/threadpool/test_text.py000066400000000000000000000174151415044450100212200ustar00rootroot00000000000000"""PEP 0492/Python 3.5+ tests for text files.""" import io from os.path import dirname, join from aiofiles.threadpool import open as aioopen import pytest @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r", "r+", "a+"]) async def test_simple_iteration(mode): """Test iterating over lines from a file.""" filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") async with aioopen(filename, mode=mode) as file: # Append mode needs us to seek. await file.seek(0) counter = 1 # The old iteration pattern: while True: line = await file.readline() if not line: break assert line.strip() == "line " + str(counter) counter += 1 await file.seek(0) counter = 1 # The new iteration pattern: async for line in file: assert line.strip() == "line " + str(counter) counter += 1 assert file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r", "r+", "a+"]) async def test_simple_readlines(mode): """Test the readlines functionality.""" filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") with open(filename, mode="r") as f: expected = f.readlines() async with aioopen(filename, mode=mode) as file: # Append mode needs us to seek. await file.seek(0) actual = await file.readlines() assert file.closed assert actual == expected @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r+", "w", "a"]) async def test_simple_flush(mode, tmpdir): """Test flushing to a file.""" filename = "file.bin" full_file = tmpdir.join(filename) if "r" in mode: full_file.ensure() # Read modes want it to already exist. async with aioopen(str(full_file), mode=mode) as file: await file.write("0") # Shouldn't flush. assert "" == full_file.read_text(encoding="utf8") await file.flush() assert "0" == full_file.read_text(encoding="utf8") assert file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r", "r+", "a+"]) async def test_simple_read(mode): """Just read some bytes from a test file.""" filename = join(dirname(__file__), "..", "resources", "test_file1.txt") async with aioopen(filename, mode=mode) as file: await file.seek(0) # Needed for the append mode. actual = await file.read() assert "" == (await file.read()) assert actual == open(filename, mode="r").read() assert file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["w", "a"]) async def test_simple_read_fail(mode, tmpdir): """Try reading some bytes and fail.""" filename = "bigfile.bin" content = "0123456789" * 4 * io.DEFAULT_BUFFER_SIZE full_file = tmpdir.join(filename) full_file.write(content) with pytest.raises(ValueError): async with aioopen(str(full_file), mode=mode) as file: await file.seek(0) # Needed for the append mode. await file.read() assert file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r", "r+", "a+"]) async def test_staggered_read(mode): """Read bytes repeatedly.""" filename = join(dirname(__file__), "..", "resources", "test_file1.txt") async with aioopen(filename, mode=mode) as file: await file.seek(0) # Needed for the append mode. actual = [] while True: char = await file.read(1) if char: actual.append(char) else: break assert "" == (await file.read()) expected = [] with open(filename, mode="r") as f: while True: char = f.read(1) if char: expected.append(char) else: break assert actual == expected assert file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r", "r+", "a+"]) async def test_simple_seek(mode, tmpdir): """Test seeking and then reading.""" filename = "bigfile.bin" content = "0123456789" * 4 * io.DEFAULT_BUFFER_SIZE full_file = tmpdir.join(filename) full_file.write(content) async with aioopen(str(full_file), mode=mode) as file: await file.seek(4) assert "4" == (await file.read(1)) assert file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["w", "r", "r+", "w+", "a", "a+"]) async def test_simple_close(mode, tmpdir): """Open a file, read a byte, and close it.""" filename = "bigfile.bin" content = "0" * 4 * io.DEFAULT_BUFFER_SIZE full_file = tmpdir.join(filename) full_file.write(content) async with aioopen(str(full_file), mode=mode) as file: assert not file.closed assert not file._file.closed assert file.closed assert file._file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r+", "w", "a+"]) async def test_simple_truncate(mode, tmpdir): """Test truncating files.""" filename = "bigfile.bin" content = "0123456789" * 4 * io.DEFAULT_BUFFER_SIZE full_file = tmpdir.join(filename) full_file.write(content) async with aioopen(str(full_file), mode=mode) as file: # The append modes want us to seek first. await file.seek(0) if "w" in mode: # We've just erased the entire file. await file.write(content) await file.flush() await file.seek(0) await file.truncate() assert "" == full_file.read() @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["w", "r+", "w+", "a", "a+"]) async def test_simple_write(mode, tmpdir): """Test writing into a file.""" filename = "bigfile.bin" content = "0" * 4 * io.DEFAULT_BUFFER_SIZE full_file = tmpdir.join(filename) if "r" in mode: full_file.ensure() # Read modes want it to already exist. async with aioopen(str(full_file), mode=mode) as file: bytes_written = await file.write(content) assert bytes_written == len(content) assert content == full_file.read() assert file.closed @pytest.mark.asyncio async def test_simple_detach(tmpdir): """Test detaching for buffered streams.""" filename = "file.bin" full_file = tmpdir.join(filename) full_file.write("0123456789") with pytest.raises(ValueError): # Close will error out. async with aioopen(str(full_file), mode="r") as file: raw_file = file.detach() assert raw_file with pytest.raises(ValueError): await file.read() assert b"0123456789" == raw_file.read(10) @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r", "r+", "a+"]) async def test_simple_iteration_ctx_mgr(mode): """Test iterating over lines from a file.""" filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") async with aioopen(filename, mode=mode) as file: assert not file.closed await file.seek(0) counter = 1 async for line in file: assert line.strip() == "line " + str(counter) counter += 1 assert file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r", "r+", "a+"]) async def test_name_property(mode): """Test iterating over lines from a file.""" filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") async with aioopen(filename, mode=mode) as file: assert file.name == filename assert file.closed @pytest.mark.asyncio @pytest.mark.parametrize("mode", ["r", "r+", "a+"]) async def test_mode_property(mode): """Test iterating over lines from a file.""" filename = join(dirname(__file__), "..", "resources", "multiline_file.txt") async with aioopen(filename, mode=mode) as file: assert file.mode == mode assert file.closed aiofiles-0.8.0/tox.ini000066400000000000000000000007731415044450100146320ustar00rootroot00000000000000[gh-actions] python = 3.6: py36 3.7: py37 3.8: py38 3.9: py39 3.10: py310 pypy-3.7: pypy3 [tox] envlist = py36, py37, py38, py39, py310, pypy3 isolated_build = True [testenv:lint] skip_install = true basepython = python3.9 extras = dev deps = flake8 black commands = flake8 src tests black --check --verbose src tests [testenv] whitelist_externals = poetry commands = poetry install -v --no-root coverage run --source aiofiles -m pytest tests passenv = CI