pax_global_header00006660000000000000000000000064147025121060014510gustar00rootroot0000000000000052 comment=10644a0e07ad180c5b756aba272ee6b0dbd12df8 tinydb-4.8.2/000077500000000000000000000000001470251210600130145ustar00rootroot00000000000000tinydb-4.8.2/.coveragerc000066400000000000000000000003071470251210600151350ustar00rootroot00000000000000[run] branch = True [report] exclude_lines = pragma: no cover raise NotImplementedError.* warnings\.warn.* def __repr__ def __str__ def main() if __name__ == .__main__.: tinydb-4.8.2/.github/000077500000000000000000000000001470251210600143545ustar00rootroot00000000000000tinydb-4.8.2/.github/stale.yml000066400000000000000000000013521470251210600162100ustar00rootroot00000000000000# Number of days of inactivity before an issue becomes stale daysUntilStale: 30 # Number of days of inactivity before a stale issue is closed daysUntilClose: 7 # Issues with these labels will never be considered stale exemptLabels: - bug - pinned - contributions-welcome # Label to use when marking an issue as stale staleLabel: stale # Comment to post when marking an issue as stale. Set to `false` to disable markComment: > This issue has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Feel free to reopen this if needed. Thank you for your contributions :heart: # Comment to post when closing a stale issue. Set to `false` to disable closeComment: false tinydb-4.8.2/.github/workflows/000077500000000000000000000000001470251210600164115ustar00rootroot00000000000000tinydb-4.8.2/.github/workflows/ci-workflow.yml000066400000000000000000000027671470251210600214130ustar00rootroot00000000000000name: Python CI on: push: {} pull_request: branches: [ master ] jobs: build: runs-on: ${{ matrix.os }} strategy: fail-fast: false matrix: python-version: - "3.8" - "3.9" - "3.10" - "3.11" - "3.12" - "3.13" os: [ubuntu-latest, macos-latest, windows-latest] include: - python-version: "pypy-3.9" os: ubuntu-latest - python-version: "pypy-3.10" os: ubuntu-latest steps: - uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} uses: actions/setup-python@v4 with: python-version: ${{ matrix.python-version }} allow-prereleases: true - name: Install dependencies run: | python -m pip install --upgrade pip twine pip install poetry poetry install - name: Run test suite run: | poetry run py.test -v --cov=tinydb - name: Perform type check run: | poetry run pip install pytest-mypy poetry run pytest --mypy -m mypy tinydb tests - name: Verify dist package format run: | poetry build twine check dist/* if: ${{ contains(matrix.python-version, '3.12') }} - name: Upload coverage result if: ${{ env.COVERALLS_REPO_TOKEN != 'windows-latest' }} env: COVERALLS_REPO_TOKEN: ${{ secrets.COVERALLS_REPO_TOKEN }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} run: | poetry run coveralls tinydb-4.8.2/.github/workflows/publish-workflow.yml000066400000000000000000000014551470251210600224570ustar00rootroot00000000000000name: Upload Python Package on: push: tags: - v*.*.* jobs: publish: runs-on: ubuntu-latest steps: - uses: actions/checkout@v2 - name: Set up Python uses: actions/setup-python@v2 with: python-version: '3.x' - name: Install dependencies run: | python -m pip install --upgrade pip pip install poetry poetry install - name: Publish package env: POETRY_PYPI_TOKEN_PYPI: ${{ secrets.POETRY_PYPI_TOKEN_PYPI }} run: | poetry publish --build - name: Create Release uses: actions/create-release@v1 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} with: tag_name: ${{ github.ref }} release_name: ${{ github.ref }} draft: false prerelease: falsetinydb-4.8.2/.gitignore000066400000000000000000000005571470251210600150130ustar00rootroot00000000000000*.py[cod] # C extensions *.so # Packages *.egg *.egg-info dist build eggs parts bin var sdist develop-eggs .installed.cfg lib lib64 __pycache__ # Installer logs pip-log.txt # Unit test / coverage reports .coverage .tox nosetests.xml .pytest_cache/ # Translations *.mo # Mr Developer .mr.developer.cfg .project .pydevproject # Pycharm .idea *.db.yml .DS_Storetinydb-4.8.2/.readthedocs.yml000066400000000000000000000002041470251210600160760ustar00rootroot00000000000000version: 2 python: version: 3.8 install: - method: pip path: . extra_requirements: - docs formats: alltinydb-4.8.2/CONTRIBUTING.rst000066400000000000000000000034541470251210600154630ustar00rootroot00000000000000Contribution Guidelines ####################### Whether reporting bugs, discussing improvements and new ideas or writing extensions: Contributions to TinyDB are welcome! Here's how to get started: 1. Check for open issues or open a fresh issue to start a discussion around a feature idea or a bug 2. Fork `the repository `_ on GitHub, create a new branch off the `master` branch and start making your changes (known as `GitHub Flow `_) 3. Write a test which shows that the bug was fixed or that the feature works as expected 4. Send a pull request and bug the maintainer until it gets merged and published :) Philosophy of TinyDB ******************** TinyDB aims to be simple and fun to use. Therefore two key values are simplicity and elegance of interfaces and code. These values will contradict each other from time to time. In these cases , try using as little magic as possible. In any case don't forget documenting code that isn't clear at first glance. Code Conventions **************** In general the TinyDB source should always follow `PEP 8 `_. Exceptions are allowed in well justified and documented cases. However we make a small exception concerning docstrings: When using multiline docstrings, keep the opening and closing triple quotes on their own lines and add an empty line after it. .. code-block:: python def some_function(): """ Documentation ... """ # implementation ... Version Numbers *************** TinyDB follows the `SemVer versioning guidelines `_. This implies that backwards incompatible changes in the API will increment the major version. So think twice before making such changes. tinydb-4.8.2/LICENSE000066400000000000000000000020701470251210600140200ustar00rootroot00000000000000Copyright (C) 2013 Markus Siemens Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. tinydb-4.8.2/MANIFEST.in000066400000000000000000000000541470251210600145510ustar00rootroot00000000000000include LICENSE recursive-include tests *.pytinydb-4.8.2/README.rst000066400000000000000000000121641470251210600145070ustar00rootroot00000000000000.. image:: https://raw.githubusercontent.com/msiemens/tinydb/master/artwork/logo.png :height: 150px |Build Status| |Coverage| |Version| Quick Links *********** - `Example Code`_ - `Supported Python Versions`_ - `Documentation `_ - `Changelog `_ - `Extensions `_ - `Contributing`_ Introduction ************ TinyDB is a lightweight document oriented database optimized for your happiness :) It's written in pure Python and has no external dependencies. The target are small apps that would be blown away by a SQL-DB or an external database server. TinyDB is: - **tiny:** The current source code has 1800 lines of code (with about 40% documentation) and 1600 lines tests. - **document oriented:** Like MongoDB_, you can store any document (represented as ``dict``) in TinyDB. - **optimized for your happiness:** TinyDB is designed to be simple and fun to use by providing a simple and clean API. - **written in pure Python:** TinyDB neither needs an external server (as e.g. `PyMongo `_) nor any dependencies from PyPI. - **works on Python 3.8+ and PyPy3:** TinyDB works on all modern versions of Python and PyPy. - **powerfully extensible:** You can easily extend TinyDB by writing new storages or modify the behaviour of storages with Middlewares. - **100% test coverage:** No explanation needed. To dive straight into all the details, head over to the `TinyDB docs `_. You can also discuss everything related to TinyDB like general development, extensions or showcase your TinyDB-based projects on the `discussion forum `_. Supported Python Versions ************************* TinyDB has been tested with Python 3.8 - 3.13 and PyPy3. Project Status ************** This project is in maintenance mode. It has reached a mature, stable state where significant new features or architectural changes are not planned. That said, there will still be releases for bugfixes or features contributed by the community. Read more about what this means in particular `here `_. Example Code ************ .. code-block:: python >>> from tinydb import TinyDB, Query >>> db = TinyDB('/path/to/db.json') >>> db.insert({'int': 1, 'char': 'a'}) >>> db.insert({'int': 1, 'char': 'b'}) Query Language ============== .. code-block:: python >>> User = Query() >>> # Search for a field value >>> db.search(User.name == 'John') [{'name': 'John', 'age': 22}, {'name': 'John', 'age': 37}] >>> # Combine two queries with logical and >>> db.search((User.name == 'John') & (User.age <= 30)) [{'name': 'John', 'age': 22}] >>> # Combine two queries with logical or >>> db.search((User.name == 'John') | (User.name == 'Bob')) [{'name': 'John', 'age': 22}, {'name': 'John', 'age': 37}, {'name': 'Bob', 'age': 42}] >>> # Negate a query with logical not >>> db.search(~(User.name == 'John')) [{'name': 'Megan', 'age': 27}, {'name': 'Bob', 'age': 42}] >>> # Apply transformation to field with `map` >>> db.search((User.age.map(lambda x: x + x) == 44)) >>> [{'name': 'John', 'age': 22}] >>> # More possible comparisons: != < > <= >= >>> # More possible checks: where(...).matches(regex), where(...).test(your_test_func) Tables ====== .. code-block:: python >>> table = db.table('name') >>> table.insert({'value': True}) >>> table.all() [{'value': True}] Using Middlewares ================= .. code-block:: python >>> from tinydb.storages import JSONStorage >>> from tinydb.middlewares import CachingMiddleware >>> db = TinyDB('/path/to/db.json', storage=CachingMiddleware(JSONStorage)) Contributing ************ Whether reporting bugs, discussing improvements and new ideas or writing extensions: Contributions to TinyDB are welcome! Here's how to get started: 1. Check for open issues or open a fresh issue to start a discussion around a feature idea or a bug 2. Fork `the repository `_ on Github, create a new branch off the `master` branch and start making your changes (known as `GitHub Flow `_) 3. Write a test which shows that the bug was fixed or that the feature works as expected 4. Send a pull request and bug the maintainer until it gets merged and published ☺ .. |Build Status| image:: https://img.shields.io/azure-devops/build/msiemens/3e5baa75-12ec-43ac-9728-89823ee8c7e2/2.svg?style=flat-square :target: https://dev.azure.com/msiemens/github/_build?definitionId=2 .. |Coverage| image:: http://img.shields.io/coveralls/msiemens/tinydb.svg?style=flat-square :target: https://coveralls.io/r/msiemens/tinydb .. |Version| image:: http://img.shields.io/pypi/v/tinydb.svg?style=flat-square :target: https://pypi.python.org/pypi/tinydb/ .. _Buzhug: http://buzhug.sourceforge.net/ .. _CodernityDB: https://github.com/perchouli/codernitydb .. _MongoDB: http://mongodb.org/ tinydb-4.8.2/artwork/000077500000000000000000000000001470251210600145055ustar00rootroot00000000000000tinydb-4.8.2/artwork/logo.png000066400000000000000000000063421470251210600161600ustar00rootroot00000000000000PNG  IHDRwb pHYs  tEXtSoftwareAdobe ImageReadyqe< oIDATx읿n"}nV+m&GG|x#m4$MAZGC4nַOu^k4CS:4E.H?B `~ hZ4_+O BH!90-71Gwksͱ|}ỶPG6>Vٚ4Rx73@.)^5>A@ z;f߿,Gܔ+2W6i6's_s{dҏV Xl'uC#`̅vdC~ɍ ƥ_vmeb3C Gف{blc7+Na < do`6?6_1D TTe"PsZ@@"dub0G4%/ ȺBkqʍ!`M :$}D(s ,KHi>@ =:@ {,)b"^wD"qcf='A $2̛Z/6"T"}`Pg[Y'DDnӏm;C*'80ĜXX.빮ozZ+zFWEleoi[!&6.Y+:l(l@o >쭹we#h~yv&<YlM\o 0jMO~vt~!P17=Ymxǖƙew/ */{#ezyGVfbfcak*0E6D+by,T)`B(p">#ٕi5?*njuO.>xڳ(/*yձg+![8g(*+0@ 74e0{\~`,MOBcZ9з}iKs,v:@ )fW׮Wt H5X6ޯh]e;Ҹ/ ]NJy!qB*jDzؐQ6 AO=90=c-qXZSZH ۲'wо&MV&=yH>k3TB_*ͱ0ǃmzFsX<U\҈/(&fDlrJ09YZk;EFCZ!"`9s^aGeӇm~m9ZB''6A OviDU2WX<CY5Eu+ޯM:|J֪54WsI,{rmDepڐkH+`H;/Ib3H-<,zPj\tE҈ 1kP/G0:045}u*gJOscd՛@zg|P&Շ 4)` SOig7OTQ꾢ʰ.Bt5jw` `fYu*v: *օ Cؕ$6gE sayiDzN`Ӈ[zJZ$`z}ub?$^55i߯"1u"s]=}`PąR5 .=| Emln[=LJ7m#`PMcqJ6* 뛞 Ʈ> guʗm0 ow﷯)IӴE &VSFK'T!@Qj;WvE(Υx0N/% MOvfx0Źq_⼆}KTNOA0q tinydb-4.8.2/docs/000077500000000000000000000000001470251210600137445ustar00rootroot00000000000000tinydb-4.8.2/docs/.gitignore000066400000000000000000000000071470251210600157310ustar00rootroot00000000000000_build/tinydb-4.8.2/docs/Makefile000066400000000000000000000151521470251210600154100ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/TinyDB.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/TinyDB.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/TinyDB" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/TinyDB" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." tinydb-4.8.2/docs/_static/000077500000000000000000000000001470251210600153725ustar00rootroot00000000000000tinydb-4.8.2/docs/_static/logo.png000066400000000000000000000063421470251210600170450ustar00rootroot00000000000000PNG  IHDRwb pHYs  tEXtSoftwareAdobe ImageReadyqe< oIDATx읿n"}nV+m&GG|x#m4$MAZGC4nַOu^k4CS:4E.H?B `~ hZ4_+O BH!90-71Gwksͱ|}ỶPG6>Vٚ4Rx73@.)^5>A@ z;f߿,Gܔ+2W6i6's_s{dҏV Xl'uC#`̅vdC~ɍ ƥ_vmeb3C Gف{blc7+Na < do`6?6_1D TTe"PsZ@@"dub0G4%/ ȺBkqʍ!`M :$}D(s ,KHi>@ =:@ {,)b"^wD"qcf='A $2̛Z/6"T"}`Pg[Y'DDnӏm;C*'80ĜXX.빮ozZ+zFWEleoi[!&6.Y+:l(l@o >쭹we#h~yv&<YlM\o 0jMO~vt~!P17=Ymxǖƙew/ */{#ezyGVfbfcak*0E6D+by,T)`B(p">#ٕi5?*njuO.>xڳ(/*yձg+![8g(*+0@ 74e0{\~`,MOBcZ9з}iKs,v:@ )fW׮Wt H5X6ޯh]e;Ҹ/ ]NJy!qB*jDzؐQ6 AO=90=c-qXZSZH ۲'wо&MV&=yH>k3TB_*ͱ0ǃmzFsX<U\҈/(&fDlrJ09YZk;EFCZ!"`9s^aGeӇm~m9ZB''6A OviDU2WX<CY5Eu+ޯM:|J֪54WsI,{rmDepڐkH+`H;/Ib3H-<,zPj\tE҈ 1kP/G0:045}u*gJOscd՛@zg|P&Շ 4)` SOig7OTQ꾢ʰ.Bt5jw` `fYu*v: *օ Cؕ$6gE sayiDzN`Ӈ[zJZ$`z}ub?$^55i߯"1u"s]=}`PąR5 .=| Emln[=LJ7m#`PMcqJ6* 뛞 Ʈ> guʗm0 ow﷯)IӴE &VSFK'T!@Qj;WvE(Υx0N/% MOvfx0Źq_⼆}KTNOA0qUseful Links tinydb-4.8.2/docs/_templates/sidebarlogo.html000066400000000000000000000002061470251210600212570ustar00rootroot00000000000000 tinydb-4.8.2/docs/_themes/000077500000000000000000000000001470251210600153705ustar00rootroot00000000000000tinydb-4.8.2/docs/_themes/.gitignore000066400000000000000000000000261470251210600173560ustar00rootroot00000000000000*.pyc *.pyo .DS_Store tinydb-4.8.2/docs/_themes/LICENSE000066400000000000000000000033751470251210600164050ustar00rootroot00000000000000Copyright (c) 2010 by Armin Ronacher. Some rights reserved. Redistribution and use in source and binary forms of the theme, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * The names of the contributors may not be used to endorse or promote products derived from this software without specific prior written permission. We kindly ask you to only use these themes in an unmodified manner just for Flask and Flask-related products, not for unrelated projects. If you like the visual style and want to use it for your own projects, please consider making some larger changes to the themes (such as changing font faces, sizes, colors or margins). THIS THEME IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS THEME, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. tinydb-4.8.2/docs/_themes/README000066400000000000000000000021051470251210600162460ustar00rootroot00000000000000Flask Sphinx Styles =================== This repository contains sphinx styles for Flask and Flask related projects. To use this style in your Sphinx documentation, follow this guide: 1. put this folder as _themes into your docs folder. Alternatively you can also use git submodules to check out the contents there. 2. add this to your conf.py: sys.path.append(os.path.abspath('_themes')) html_theme_path = ['_themes'] html_theme = 'flask' The following themes exist: - 'flask' - the standard flask documentation theme for large projects - 'flask_small' - small one-page theme. Intended to be used by very small addon libraries for flask. The following options exist for the flask_small theme: [options] index_logo = '' filename of a picture in _static to be used as replacement for the h1 in the index.rst file. index_logo_height = 120px height of the index logo github_fork = '' repository name on github for the "fork me" badge tinydb-4.8.2/docs/_themes/flask/000077500000000000000000000000001470251210600164705ustar00rootroot00000000000000tinydb-4.8.2/docs/_themes/flask/layout.html000066400000000000000000000015161470251210600206760ustar00rootroot00000000000000{%- extends "basic/layout.html" %} {%- block extrahead %} {{ super() }} {% if theme_touch_icon %} {% endif %} {% endblock %} {%- block relbar2 %}{% endblock %} {% block header %} {{ super() }} {% if pagename == 'index' %}
{% endif %} {% endblock %} {%- block footer %} {% if pagename == 'index' %}
{% endif %} {%- endblock %} tinydb-4.8.2/docs/_themes/flask/page.html000066400000000000000000000010061470251210600202670ustar00rootroot00000000000000{%- extends "basic/page.html" %} {% block body %} {{ super() }} {%- if prev or next and pagename != 'index' %}

{%- if prev %} « {{ prev.title }} {% if next %}|{% endif %} {%- endif %} {%- if next %} {{ next.title }} » {%- endif %}

{%- endif %} {% endblock %} tinydb-4.8.2/docs/_themes/flask/relations.html000066400000000000000000000007651470251210600213660ustar00rootroot00000000000000

Navigation

tinydb-4.8.2/docs/_themes/flask/static/000077500000000000000000000000001470251210600177575ustar00rootroot00000000000000tinydb-4.8.2/docs/_themes/flask/static/flasky.css_t000066400000000000000000000225521470251210600223130ustar00rootroot00000000000000/* * flasky.css_t * ~~~~~~~~~~~~ * * :copyright: Copyright 2010 by Armin Ronacher. * :license: Flask Design License, see LICENSE for details. */ {% set page_width = '940px' %} {% set sidebar_width = '220px' %} {% set font_family = "'Open Sans', sans-serif" %} {% set monospace_font_family = "'Source Code Pro', 'Consolas', 'Menlo', 'Deja Vu Sans Mono', 'Bitstream Vera Sans Mono', monospace" %} {% set accent_color = '#2d4e84' %}{# original: #004B6B #} {% set accent_color_alternate = '#2069e1' %}{# original: #6D4100 #} @import url(http://fonts.googleapis.com/css?family=Open+Sans:400,700,400italic|Source+Code+Pro); @import url("basic.css"); /* -- page layout ----------------------------------------------------------- */ html { overflow-y: scroll; } body { font-family: {{ font_family }}; font-size: 17px; background-color: white; color: #000; margin: 0; padding: 0; } div.document { width: {{ page_width }}; margin: 30px auto 0 auto; } div.documentwrapper { float: left; width: 100%; } div.bodywrapper { margin: 0 0 0 {{ sidebar_width }}; } div.sphinxsidebar { width: {{ sidebar_width }}; } hr { border: 1px solid #B1B4B6; } div.body { background-color: #ffffff; color: #3E4349; padding: 0 30px 0 30px; } img.floatingflask { padding: 0 0 10px 10px; float: right; } div.footer { width: {{ page_width }}; margin: 20px auto 30px auto; font-size: 14px; color: #888; text-align: right; } div.footer a { color: #888; } div.related { display: none; } div.sphinxsidebar a { color: #444; text-decoration: none; border-bottom: 1px dotted #999; } div.sphinxsidebar a:hover { border-bottom: 1px solid #999; } div.sphinxsidebar { font-size: 14px; line-height: 1.5; } div.sphinxsidebarwrapper { padding: 18px 10px; } div.sphinxsidebarwrapper p.logo { padding: 0 0 20px 0; margin: 0; text-align: center; } div.sphinxsidebar h3, div.sphinxsidebar h4 { font-family: {{ font_family }}; color: #444; font-size: 24px; font-weight: normal; margin: 0 0 5px 0; padding: 0; } div.sphinxsidebar h4 { font-size: 20px; } div.sphinxsidebar h3 a { color: #444; } div.sphinxsidebar p.logo a, div.sphinxsidebar h3 a, div.sphinxsidebar p.logo a:hover, div.sphinxsidebar h3 a:hover { border: none; } div.sphinxsidebar p { color: #555; margin: 10px 0; } div.sphinxsidebar ul { margin: 10px 0; padding: 0; color: #000; } div.sphinxsidebar input { border: 1px solid #ccc; font-family: {{ font_family }}; font-size: 1em; } /* -- body styles ----------------------------------------------------------- */ a { color: {{ accent_color }}; text-decoration: underline; } a:hover { color: {{ accent_color_alternate }}; text-decoration: underline; } div.body h1, div.body h2, div.body h3, div.body h4, div.body h5, div.body h6 { font-family: {{ font_family }}; font-weight: normal; margin: 30px 0px 10px 0px; padding: 0; } {% if theme_index_logo %} div.indexwrapper h1 { text-indent: -999999px; background: url({{ theme_index_logo }}) no-repeat center center; height: {{ theme_index_logo_height }}; } {% endif %} div.body h1 { margin-top: 0; padding-top: 0; font-size: 240%; } div.body h2 { font-size: 180%; } div.body h3 { font-size: 150%; } div.body h4 { font-size: 130%; } div.body h5 { font-size: 100%; } div.body h6 { font-size: 100%; } a.headerlink { color: #ddd; padding: 0 4px; text-decoration: none; } a.headerlink:hover { color: #444; background: #eaeaea; } div.body p, div.body dd, div.body li { line-height: 1.4em; } div.admonition { background: #fafafa; margin: 20px -30px; padding: 10px 30px; border-top: 1px solid #ccc; border-bottom: 1px solid #ccc; } div.admonition tt.xref, div.admonition a tt { border-bottom: 1px solid #fafafa; } dd div.admonition { margin-left: -60px; padding-left: 60px; } div.admonition p.admonition-title { font-family: {{ font_family }}; font-weight: normal; font-size: 24px; margin: 0 0 10px 0; padding: 0; line-height: 1; } div.admonition p.last { margin-bottom: 0; } div.highlight { background-color: white; } dt:target, .highlight { background: #FAF3E8; } div.note { background-color: #eee; border: 1px solid #ccc; } div.seealso { background-color: #ffc; border: 1px solid #ff6; } div.topic { background-color: #eee; } p.admonition-title { display: inline; } p.admonition-title:after { content: ":"; } pre, tt { font-family: {{ monospace_font_family }}; font-size: 0.9em; } img.screenshot { } tt.descname, tt.descclassname { font-size: 0.95em; } tt.descname { padding-right: 0.08em; } img.screenshot { -moz-box-shadow: 2px 2px 4px #eee; -webkit-box-shadow: 2px 2px 4px #eee; box-shadow: 2px 2px 4px #eee; } table.docutils { border: 1px solid #888; -moz-box-shadow: 2px 2px 4px #eee; -webkit-box-shadow: 2px 2px 4px #eee; box-shadow: 2px 2px 4px #eee; } table.docutils td, table.docutils th { border: 1px solid #888; padding: 0.25em 0.7em; } table.field-list, table.footnote { border: none; -moz-box-shadow: none; -webkit-box-shadow: none; box-shadow: none; } table.footnote { margin: 15px 0; width: 100%; border: 1px solid #eee; background: #fdfdfd; font-size: 0.9em; } table.footnote + table.footnote { margin-top: -15px; border-top: none; } table.field-list th { padding: 0 0.8em 0 0; } table.field-list td { padding: 0; } table.footnote td.label { width: 0px; padding: 0.3em 0 0.3em 0.5em; } table.footnote td { padding: 0.3em 0.5em; } dl { margin: 0; padding: 0; } dl dd { margin-left: 30px; } blockquote { margin: 0 0 0 30px; padding: 0; } ul, ol { margin: 10px 0 10px 30px; padding: 0; } pre { background: #eee; padding: 7px 30px; margin: 15px -30px; line-height: 1.3em; } dl pre, blockquote pre, li pre { margin-left: -60px; padding-left: 60px; } dl dl pre { margin-left: -90px; padding-left: 90px; } tt { background-color: #ecf0f3; color: #222; /* padding: 1px 2px; */ } tt.xref, a tt { background-color: #FBFBFB; border-bottom: 1px solid white; } a.reference { text-decoration: none; border-bottom: 1px dotted {{ accent_color }}; } a.reference:hover { border-bottom: 1px solid {{ accent_color_alternate }}; } a.footnote-reference { text-decoration: none; font-size: 0.7em; vertical-align: top; border-bottom: 1px dotted {{ accent_color }}; } a.footnote-reference:hover { border-bottom: 1px solid {{ accent_color_alternate }}; } a:hover tt { background: #EEE; } @media screen and (max-width: 870px) { div.sphinxsidebar { display: none; } div.document { width: 100%; } div.documentwrapper { margin-left: 0; margin-top: 0; margin-right: 0; margin-bottom: 0; } div.bodywrapper { margin-top: 0; margin-right: 0; margin-bottom: 0; margin-left: 0; } ul { margin-left: 0; } .document { width: auto; } .footer { width: auto; } .bodywrapper { margin: 0; } .footer { width: auto; } .github { display: none; } } @media screen and (max-width: 875px) { body { margin: 0; padding: 20px 30px; } div.documentwrapper { float: none; background: white; } div.sphinxsidebar { display: block; float: none; width: 102.5%; margin: 50px -30px -20px -30px; padding: 10px 20px; background: #333; color: white; } div.sphinxsidebar h3, div.sphinxsidebar h4, div.sphinxsidebar p, div.sphinxsidebar h3 a { color: white; } div.sphinxsidebar a { color: #aaa; } div.sphinxsidebar p.logo { display: none; } div.document { width: 100%; margin: 0; } div.related { display: block; margin: 0; padding: 10px 0 20px 0; } div.related ul, div.related ul li { margin: 0; padding: 0; } div.footer { display: none; } div.bodywrapper { margin: 0; } div.body { min-height: 0; padding: 0; } .rtd_doc_footer { display: none; } .document { width: auto; } .footer { width: auto; } .footer { width: auto; } .github { display: none; } } /* scrollbars */ ::-webkit-scrollbar { width: 6px; height: 6px; } ::-webkit-scrollbar-button:start:decrement, ::-webkit-scrollbar-button:end:increment { display: block; height: 10px; } ::-webkit-scrollbar-button:vertical:increment { background-color: #fff; } ::-webkit-scrollbar-track-piece { background-color: #eee; -webkit-border-radius: 3px; } ::-webkit-scrollbar-thumb:vertical { height: 50px; background-color: #ccc; -webkit-border-radius: 3px; } ::-webkit-scrollbar-thumb:horizontal { width: 50px; background-color: #ccc; -webkit-border-radius: 3px; } /* misc. */ .revsys-inline { display: none!important; } .admonition.warning { background-color: #F5CDCD; border-color: #7B1B1B; } tinydb-4.8.2/docs/_themes/flask/theme.conf000066400000000000000000000001411470251210600204350ustar00rootroot00000000000000[theme] inherit = basic stylesheet = flasky.css pygments_style = flask_theme_support.FlaskyStyle tinydb-4.8.2/docs/_themes/flask_theme_support.py000066400000000000000000000074331470251210600220270ustar00rootroot00000000000000# flasky extensions. flasky pygments style based on tango style from pygments.style import Style from pygments.token import Keyword, Name, Comment, String, Error, \ Number, Operator, Generic, Whitespace, Punctuation, Other, Literal class FlaskyStyle(Style): background_color = "#f8f8f8" default_style = "" styles = { # No corresponding class for the following: # Text: "", # class: '' Whitespace: "underline #f8f8f8", # class: 'w' Error: "#a40000 border:#ef2929", # class: 'err' Other: "#000000", # class 'x' Comment: "italic #8f5902", # class: 'c' Comment.Preproc: "noitalic", # class: 'cp' Keyword: "bold #004461", # class: 'k' Keyword.Constant: "bold #004461", # class: 'kc' Keyword.Declaration: "bold #004461", # class: 'kd' Keyword.Namespace: "bold #004461", # class: 'kn' Keyword.Pseudo: "bold #004461", # class: 'kp' Keyword.Reserved: "bold #004461", # class: 'kr' Keyword.Type: "bold #004461", # class: 'kt' Operator: "#582800", # class: 'o' Operator.Word: "bold #004461", # class: 'ow' - like keywords Punctuation: "bold #000000", # class: 'p' # because special names such as Name.Class, Name.Function, etc. # are not recognized as such later in the parsing, we choose them # to look the same as ordinary variables. Name: "#000000", # class: 'n' Name.Attribute: "#c4a000", # class: 'na' - to be revised Name.Builtin: "#004461", # class: 'nb' Name.Builtin.Pseudo: "#3465a4", # class: 'bp' Name.Class: "#000000", # class: 'nc' - to be revised Name.Constant: "#000000", # class: 'no' - to be revised Name.Decorator: "#888", # class: 'nd' - to be revised Name.Entity: "#ce5c00", # class: 'ni' Name.Exception: "bold #cc0000", # class: 'ne' Name.Function: "#000000", # class: 'nf' Name.Property: "#000000", # class: 'py' Name.Label: "#f57900", # class: 'nl' Name.Namespace: "#000000", # class: 'nn' - to be revised Name.Other: "#000000", # class: 'nx' Name.Tag: "bold #004461", # class: 'nt' - like a keyword Name.Variable: "#000000", # class: 'nv' - to be revised Name.Variable.Class: "#000000", # class: 'vc' - to be revised Name.Variable.Global: "#000000", # class: 'vg' - to be revised Name.Variable.Instance: "#000000", # class: 'vi' - to be revised Number: "#990000", # class: 'm' Literal: "#000000", # class: 'l' Literal.Date: "#000000", # class: 'ld' String: "#4e9a06", # class: 's' String.Backtick: "#4e9a06", # class: 'sb' String.Char: "#4e9a06", # class: 'sc' String.Doc: "italic #8f5902", # class: 'sd' - like a comment String.Double: "#4e9a06", # class: 's2' String.Escape: "#4e9a06", # class: 'se' String.Heredoc: "#4e9a06", # class: 'sh' String.Interpol: "#4e9a06", # class: 'si' String.Other: "#4e9a06", # class: 'sx' String.Regex: "#4e9a06", # class: 'sr' String.Single: "#4e9a06", # class: 's1' String.Symbol: "#4e9a06", # class: 'ss' Generic: "#000000", # class: 'g' Generic.Deleted: "#a40000", # class: 'gd' Generic.Emph: "italic #000000", # class: 'ge' Generic.Error: "#ef2929", # class: 'gr' Generic.Heading: "bold #000080", # class: 'gh' Generic.Inserted: "#00A000", # class: 'gi' Generic.Output: "#888", # class: 'go' Generic.Prompt: "#745334", # class: 'gp' Generic.Strong: "bold #000000", # class: 'gs' Generic.Subheading: "bold #800080", # class: 'gu' Generic.Traceback: "bold #a40000", # class: 'gt' } tinydb-4.8.2/docs/api.rst000066400000000000000000000052301470251210600152470ustar00rootroot00000000000000.. _api_docs: API Documentation ================= ``tinydb.database`` ------------------- .. autoclass:: tinydb.database.TinyDB :members: :private-members: :member-order: bysource .. _table_api: ``tinydb.table`` ---------------- .. autoclass:: tinydb.table.Table :members: :special-members: :exclude-members: __dict__, __weakref__ :member-order: bysource .. autoclass:: tinydb.table.Document :members: :special-members: :exclude-members: __dict__, __weakref__ :member-order: bysource .. py:attribute:: doc_id The document's id ``tinydb.queries`` ------------------ .. autoclass:: tinydb.queries.Query :members: :special-members: :exclude-members: __weakref__ :member-order: bysource .. autoclass:: tinydb.queries.QueryInstance :members: :special-members: :exclude-members: __weakref__ :member-order: bysource ``tinydb.operations`` --------------------- .. automodule:: tinydb.operations :members: :special-members: :exclude-members: __weakref__ :member-order: bysource ``tinydb.storage`` ------------------ .. automodule:: tinydb.storages :members: JSONStorage, MemoryStorage :special-members: :exclude-members: __weakref__ .. class:: Storage The abstract base class for all Storages. A Storage (de)serializes the current state of the database and stores it in some place (memory, file on disk, ...). .. method:: read() Read the last stored state. .. method:: write(data) Write the current state of the database to the storage. .. method:: close() Optional: Close open file handles, etc. ``tinydb.middlewares`` ---------------------- .. automodule:: tinydb.middlewares :members: CachingMiddleware :special-members: :exclude-members: __weakref__ .. class:: Middleware The base class for all Middlewares. Middlewares hook into the read/write process of TinyDB allowing you to extend the behaviour by adding caching, logging, ... If ``read()`` or ``write()`` are not overloaded, they will be forwarded directly to the storage instance. .. attribute:: storage :type: :class:`.Storage` Access to the underlying storage instance. .. method:: read() Read the last stored state. .. method:: write(data) Write the current state of the database to the storage. .. method:: close() Optional: Close open file handles, etc. ``tinydb.utils`` ---------------- .. autoclass:: tinydb.utils.LRUCache :members: :special-members: tinydb-4.8.2/docs/changelog.rst000066400000000000000000000540531470251210600164340ustar00rootroot00000000000000Changelog ========= Version Numbering ^^^^^^^^^^^^^^^^^ TinyDB follows the SemVer versioning guidelines. For more information, see `semver.org `_ .. note:: When new methods are added to the ``Query`` API, this may result in breaking existing code that uses the property syntax to access document fields (e.g. ``Query().some.nested.field``) where the field name is equal to the newly added query method. Thus, breaking changes may occur in feature releases even though they don't change the public API in a backwards-incompatible manner. To prevent this from happening, one can use the dict access syntax (``Query()['some']['nested']['field']``) that will not break even when new methods are added to the ``Query`` API. unreleased ^^^^^^^^^^ - *nothing yet* v4.8.2 (2024-10-12) ^^^^^^^^^^^^^^^^^^^ - Fix: Correctly update query cache when search results have changed (see `issue 560 `_). v4.8.1 (2024-10-07) ^^^^^^^^^^^^^^^^^^^ - Feature: Allow persisting empty tables (see `pull request 518 `_). - Fix: Make replacing ``doc_id`` type work properly (see `issue 545 `_). v4.8.0 (2023-06-12) ^^^^^^^^^^^^^^^^^^^ - Feature: Allow retrieve multiple documents by document ID using ``Table.get(doc_ids=[...])`` (see `pull request 504 `_). v4.7.1 (2023-01-14) ^^^^^^^^^^^^^^^^^^^ - Improvement: Improve typing annotations (see `pull request 477 `_). - Improvement: Fix some typos in the documentation (see `pull request 479 `_ and `pull request 498 `_). v4.7.0 (2022-02-19) ^^^^^^^^^^^^^^^^^^^ - Feature: Allow inserting ``Document`` instances using ``Table.insert_multiple`` (see `pull request 455 `_). - Performance: Only convert document IDs of a table when returning documents. This improves performance the ``Table.count`` and ``Table.get`` operations and also for ``Table.search`` when only returning a few documents (see `pull request 460 `_). - Internal change: Run all ``Table`` tests ``JSONStorage`` in addition to ``MemoryStorage``. v4.6.1 (2022-01-18) ^^^^^^^^^^^^^^^^^^^ - Fix: Make using callables as queries work again (see `issue 454 `__) v4.6.0 (2022-01-17) ^^^^^^^^^^^^^^^^^^^ - Feature: Add `map()` query operation to apply a transformation to a document or field when evaluating a query (see `pull request 445 `_). **Note**: This may break code that queries for a field named ``map`` using the ``Query`` APIs property access syntax - Feature: Add support for `typing-extensions `_ v4 - Documentation: Fix a couple of typos in the documentation (see `pull request 446 `_, `pull request 449 `_ and `pull request 453 `_) v4.5.2 (2021-09-23) ^^^^^^^^^^^^^^^^^^^ - Fix: Make ``Table.delete()``'s argument priorities consistent with other table methods. This means that if you pass both ``cond`` as well as ``doc_ids`` to ``Table.delete()``, the latter will be preferred (see `issue 424 `__) v4.5.1 (2021-07-17) ^^^^^^^^^^^^^^^^^^^ - Fix: Correctly install ``typing-extensions`` on Python 3.7 (see `issue 413 `__) v4.5.0 (2021-06-25) ^^^^^^^^^^^^^^^^^^^ - Feature: Better type hinting/IntelliSense for PyCharm, VS Code and MyPy (see `issue 372 `__). PyCharm and VS Code should work out of the box, for MyPy see :ref:`MyPy Type Checking ` v4.4.0 (2021-02-11) ^^^^^^^^^^^^^^^^^^^ - Feature: Add operation for searching for all documents that match a ``dict`` fragment (see `issue 300 `_) - Fix: Correctly handle queries that use fields that are also Query methods, e.g. ``Query()['test']`` for searching for documents with a ``test`` field (see `issue 373 `_) v4.3.0 (2020-11-14) ^^^^^^^^^^^^^^^^^^^ - Feature: Add operation for updating multiple documents: ``update_multiple`` (see `issue 346 `_) - Improvement: Expose type information for MyPy typechecking (PEP 561) (see `pull request 352 `_) v4.2.0 (2020-10-03) ^^^^^^^^^^^^^^^^^^^ - Feature: Add support for specifying document IDs during insertion (see `issue 303 `_) - Internal change: Use ``OrderedDict.move_to_end()`` in the query cache (see `issue 338 `_) v4.1.1 (2020-05-08) ^^^^^^^^^^^^^^^^^^^ - Fix: Don't install dev-dependencies when installing from PyPI (see `issue 315 `_) v4.1.0 (2020-05-07) ^^^^^^^^^^^^^^^^^^^ - Feature: Add a no-op query ``Query().noop()`` (see `issue 313 `_) - Feature: Add a ``access_mode`` flag to ``JSONStorage`` to allow opening files read-only (see `issue 297 `_) - Fix: Don't drop the first document that's being inserted when inserting data on an existing database (see `issue 314 `_) v4.0.0 (2020-05-02) ^^^^^^^^^^^^^^^^^^^ :ref:`Upgrade Notes ` Breaking Changes ---------------- - Python 2 support has been removed, see `issue 284 `_ for background - API changes: - Removed classes: ``DataProxy``, ``StorageProxy`` - Attributes removed from ``TinyDB`` in favor of customizing ``TinyDB``'s behavior by subclassing it and overloading ``__init__(...)`` and ``table(...)``: - ``DEFAULT_TABLE`` - ``DEFAULT_TABLE_KWARGS`` - ``DEFAULT_STORAGE`` - Arguments removed from ``TinyDB(...)``: - ``default_table``: replace with ``TinyDB.default_table_name = 'name'`` - ``table_class``: replace with ``TinyDB.table_class = Class`` - ``TinyDB.contains(...)``'s ``doc_ids`` parameter has been renamed to ``doc_id`` and now only takes a single document ID - ``TinyDB.purge_tables(...)`` has been renamed to ``TinyDB.drop_tables(...)`` - ``TinyDB.purge_table(...)`` has been renamed to ``TinyDB.drop_table(...)`` - ``TinyDB.write_back(...)`` has been removed - ``TinyDB.process_elements(...)`` has been removed - ``Table.purge()`` has been renamed to ``Table.truncate()`` - Evaluating an empty ``Query()`` without any test operators will now result in an exception, use ``Query().noop()`` (introduced in v4.1.0) instead - ``ujson`` support has been removed, see `issue 263 `_ and `issue 306 `_ for background - The deprecated Element ID API has been removed (e.g. using the ``Element`` class or ``eids`` parameter) in favor the Document API, see `pull request 158 `_ for details on the replacement Improvements ------------ - TinyDB's internal architecture has been reworked to be more simple and streamlined in order to make it easier to customize TinyDB's behavior - With the new architecture, TinyDB performance will improve for many applications Bugfixes -------- - Don't break the tests when ``ujson`` is installed (see `issue 262 `_) - Fix performance when reading data (see `issue 250 `_) - Fix inconsistent purge function names (see `issue 103 `_) v3.15.1 (2019-10-26) ^^^^^^^^^^^^^^^^^^^^ - Internal change: fix missing values handling for ``LRUCache`` v3.15.0 (2019-10-12) ^^^^^^^^^^^^^^^^^^^^ - Feature: allow setting the parameters of TinyDB's default table (see `issue 278 `_) v3.14.2 (2019-09-13) ^^^^^^^^^^^^^^^^^^^^ - Internal change: support correct iteration for ``LRUCache`` objects v3.14.1 (2019-07-03) ^^^^^^^^^^^^^^^^^^^^ - Internal change: fix Query class to permit subclass creation (see `pull request 270 `_) v3.14.0 (2019-06-18) ^^^^^^^^^^^^^^^^^^^^ - Change: support for ``ujson`` is now deprecated (see `issue 263 `_) v3.13.0 (2019-03-16) ^^^^^^^^^^^^^^^^^^^^ - Feature: direct access to a TinyDB instance's storage (see `issue 258 `_) v3.12.2 (2018-12-12) ^^^^^^^^^^^^^^^^^^^^ - Internal change: convert documents to dicts during insertion (see `pull request 256 `_) - Internal change: use tuple literals instead of tuple class/constructor (see `pull request 247 `_) - Infra: ensure YAML tests are run (see `pull request 252 `_) v3.12.1 (2018-11-09) ^^^^^^^^^^^^^^^^^^^^ - Fix: Don't break when searching the same query multiple times (see `pull request 249 `_) - Internal change: allow ``collections.abc.Mutable`` as valid document types (see `pull request 245 `_) v3.12.0 (2018-11-06) ^^^^^^^^^^^^^^^^^^^^ - Feature: Add encoding option to ``JSONStorage`` (see `pull request 238 `_) - Internal change: allow ``collections.abc.Mutable`` as valid document types (see `pull request 245 `_) v3.11.1 (2018-09-13) ^^^^^^^^^^^^^^^^^^^^ - Bugfix: Make path queries (``db.search(where('key))``) work again (see `issue 232 `_) - Improvement: Add custom ``repr`` representations for main classes (see `pull request 229 `_) v3.11.0 (2018-08-20) ^^^^^^^^^^^^^^^^^^^^ - **Drop official support for Python 3.3**. Python 3.3 has reached its official End Of Life as of September 29, 2017. It will probably continue to work, but will not be tested against (`issue 217 `_) - Feature: Allow extending TinyDB with a custom storage proxy class (see `pull request 224 `_) - Bugfix: Return list of document IDs for upsert when creating a new document (see `issue 223 `_) v3.10.0 (2018-07-21) ^^^^^^^^^^^^^^^^^^^^ - Feature: Add support for regex flags (see `pull request 216 `_) v3.9.0 (2018-04-24) ^^^^^^^^^^^^^^^^^^^ - Feature: Allow setting a table class for single table only (see `issue 197 `_) - Internal change: call fsync after flushing ``JSONStorage`` (see `issue 208 `_) v3.8.1 (2018-03-26) ^^^^^^^^^^^^^^^^^^^ - Bugfix: Don't install tests as a package anymore (see `pull request #195 `_) v3.8.0 (2018-03-01) ^^^^^^^^^^^^^^^^^^^ - Feature: Allow disabling the query cache with ``db.table(name, cache_size=0)`` (see `pull request #187 `_) - Feature: Add ``db.write_back(docs)`` for replacing documents (see `pull request #184 `_) v3.7.0 (2017-11-11) ^^^^^^^^^^^^^^^^^^^ - Feature: ``one_of`` for checking if a value is contained in a list (see `issue 164 `_) - Feature: Upsert (insert if document doesn't exist, otherwise update; see https://forum.m-siemens.de/d/30-primary-key-well-sort-of) - Internal change: don't read from storage twice during initialization (see https://forum.m-siemens.de/d/28-reads-the-whole-data-file-twice) v3.6.0 (2017-10-05) ^^^^^^^^^^^^^^^^^^^ - Allow updating all documents using ``db.update(fields)`` (see `issue #157 `_). - Rename elements to documents. Document IDs now available with ``doc.doc_id``, using ``doc.eid`` is now deprecated (see `pull request #158 `_) v3.5.0 (2017-08-30) ^^^^^^^^^^^^^^^^^^^ - Expose the table name via ``table.name`` (see `issue #147 `_). - Allow better subclassing of the ``TinyDB`` class (see `pull request #150 `_). v3.4.1 (2017-08-23) ^^^^^^^^^^^^^^^^^^^ - Expose TinyDB version via ``import tinyb; tinydb.__version__`` (see `issue #148 `_). v3.4.0 (2017-08-08) ^^^^^^^^^^^^^^^^^^^ - Add new update operations: ``add(key, value)``, ``subtract(key, value)``, and ``set(key, value)`` (see `pull request #145 `_). v3.3.1 (2017-06-27) ^^^^^^^^^^^^^^^^^^^ - Use relative imports to allow vendoring TinyDB in other packages (see `pull request #142 `_). v3.3.0 (2017-06-05) ^^^^^^^^^^^^^^^^^^^ - Allow iterating over a database or table yielding all documents (see `pull request #139 `_). v3.2.3 (2017-04-22) ^^^^^^^^^^^^^^^^^^^ - Fix bug with accidental modifications to the query cache when modifying the list of search results (see `issue #132 `_). v3.2.2 (2017-01-16) ^^^^^^^^^^^^^^^^^^^ - Fix the ``Query`` constructor to prevent wrong usage (see `issue #117 `_). v3.2.1 (2016-06-29) ^^^^^^^^^^^^^^^^^^^ - Fix a bug with queries on documents that have a ``path`` key (see `pull request #107 `_). - Don't write to the database file needlessly when opening the database (see `pull request #104 `_). v3.2.0 (2016-04-25) ^^^^^^^^^^^^^^^^^^^ - Add a way to specify the default table name via :ref:`default_table ` (see `pull request #98 `_). - Add ``db.purge_table(name)`` to remove a single table (see `pull request #100 `_). - Along the way: celebrating 100 issues and pull requests! Thanks everyone for every single contribution! - Extend API documentation (see `issue #96 `_). v3.1.3 (2016-02-14) ^^^^^^^^^^^^^^^^^^^ - Fix a bug when using unhashable documents (lists, dicts) with ``Query.any`` or ``Query.all`` queries (see `a forum post by karibul `_). v3.1.2 (2016-01-30) ^^^^^^^^^^^^^^^^^^^ - Fix a bug when using unhashable documents (lists, dicts) with ``Query.any`` or ``Query.all`` queries (see `a forum post by karibul `_). v3.1.1 (2016-01-23) ^^^^^^^^^^^^^^^^^^^ - Inserting a dictionary with data that is not JSON serializable doesn't lead to corrupt files anymore (see `issue #89 `_). - Fix a bug in the LRU cache that may lead to an invalid query cache (see `issue #87 `_). v3.1.0 (2015-12-31) ^^^^^^^^^^^^^^^^^^^ - ``db.update(...)`` and ``db.remove(...)`` now return affected document IDs (see `issue #83 `_). - Inserting an invalid document (i.e. not a ``dict``) now raises an error instead of corrupting the database (see `issue #74 `_). v3.0.0 (2015-11-13) ^^^^^^^^^^^^^^^^^^^ - Overhauled Query model: - ``where('...').contains('...')`` has been renamed to ``where('...').search('...')``. - Support for ORM-like usage: ``User = Query(); db.search(User.name == 'John')``. - ``where('foo')`` is an alias for ``Query().foo``. - ``where('foo').has('bar')`` is replaced by either ``where('foo').bar`` or ``Query().foo.bar``. - In case the key is not a valid Python identifier, array notation can be used: ``where('a.b.c')`` is now ``Query()['a.b.c']``. - Checking for the existence of a key has to be done explicitly: ``where('foo').exists()``. - Migrations from v1 to v2 have been removed. - ``SmartCacheTable`` has been moved to `msiemens/tinydb-smartcache`_. - Serialization has been moved to `msiemens/tinydb-serialization`_. - Empty storages are now expected to return ``None`` instead of raising ``ValueError``. (see `issue #67 `_. .. _msiemens/tinydb-smartcache: https://github.com/msiemens/tinydb-smartcache .. _msiemens/tinydb-serialization: https://github.com/msiemens/tinydb-serialization v2.4.0 (2015-08-14) ^^^^^^^^^^^^^^^^^^^ - Allow custom parameters for custom test functions (see `issue #63 `_ and `pull request #64 `_). v2.3.2 (2015-05-20) ^^^^^^^^^^^^^^^^^^^ - Fix a forgotten debug output in the ``SerializationMiddleware`` (see `issue #55 `_). - Fix an "ignored exception" warning when using the ``CachingMiddleware`` (see `pull request #54 `_) - Fix a problem with symlinks when checking out TinyDB on OSX Yosemite (see `issue #52 `_). v2.3.1 (2015-04-30) ^^^^^^^^^^^^^^^^^^^ - Hopefully fix a problem with using TinyDB as a dependency in a ``setup.py`` script (see `issue #51 `_). v2.3.0 (2015-04-08) ^^^^^^^^^^^^^^^^^^^ - Added support for custom serialization. That way, you can teach TinyDB to store ``datetime`` objects in a JSON file :) (see `issue #48 `_ and `pull request #50 `_) - Fixed a performance regression when searching became slower with every search (see `issue #49 `_) - Internal code has been cleaned up v2.2.2 (2015-02-12) ^^^^^^^^^^^^^^^^^^^ - Fixed a data loss when using ``CachingMiddleware`` together with ``JSONStorage`` (see `issue #47 `_) v2.2.1 (2015-01-09) ^^^^^^^^^^^^^^^^^^^ - Fixed handling of IDs with the JSON backend that converted integers to strings (see `issue #45 `_) v2.2.0 (2014-11-10) ^^^^^^^^^^^^^^^^^^^ - Extended ``any`` and ``all`` queries to take lists as conditions (see `pull request #38 `_) - Fixed an ``decode error`` when installing TinyDB in a non-UTF-8 environment (see `pull request #37 `_) - Fixed some issues with ``CachingMiddleware`` in combination with ``JSONStorage`` (see `pull request #39 `_) v2.1.0 (2014-10-14) ^^^^^^^^^^^^^^^^^^^ - Added ``where(...).contains(regex)`` (see `issue #32 `_) - Fixed a bug that corrupted data after reopening a database (see `issue #34 `_) v2.0.1 (2014-09-22) ^^^^^^^^^^^^^^^^^^^ - Fixed handling of Unicode data in Python 2 (see `issue #28 `_). v2.0.0 (2014-09-05) ^^^^^^^^^^^^^^^^^^^ :ref:`Upgrade Notes ` .. warning:: TinyDB changed the way data is stored. You may need to migrate your databases to the new scheme. Check out the :ref:`Upgrade Notes ` for details. - The syntax ``query in db`` has been removed, use ``db.contains`` instead. - The ``ConcurrencyMiddleware`` has been removed due to a insecure implementation (see `issue #18 `_). Consider :ref:`tinyrecord` instead. - Better support for working with :ref:`Document IDs `. - Added support for `nested comparisons `_. - Added ``all`` and ``any`` `comparisons on lists `_. - Added optional :`_. - The query cache is now a :ref:`fixed size LRU cache `. v1.4.0 (2014-07-22) ^^^^^^^^^^^^^^^^^^^ - Added ``insert_multiple`` function (see `issue #8 `_). v1.3.0 (2014-07-02) ^^^^^^^^^^^^^^^^^^^ - Fixed `bug #7 `_: IDs not unique. - Extended the API: ``db.count(where(...))`` and ``db.contains(where(...))``. - The syntax ``query in db`` is now **deprecated** and replaced by ``db.contains``. v1.2.0 (2014-06-19) ^^^^^^^^^^^^^^^^^^^ - Added ``update`` method (see `issue #6 `_). v1.1.1 (2014-06-14) ^^^^^^^^^^^^^^^^^^^ - Merged `PR #5 `_: Fix minor documentation typos and style issues. v1.1.0 (2014-05-06) ^^^^^^^^^^^^^^^^^^^ - Improved the docs and fixed some typos. - Refactored some internal code. - Fixed a bug with multiple ``TinyDB?`` instances. v1.0.1 (2014-04-26) ^^^^^^^^^^^^^^^^^^^ - Fixed a bug in ``JSONStorage`` that broke the database when removing entries. v1.0.0 (2013-07-20) ^^^^^^^^^^^^^^^^^^^ - First official release – consider TinyDB stable now. tinydb-4.8.2/docs/conf.py000066400000000000000000000214631470251210600152510ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # TinyDB documentation build configuration file, created by # sphinx-quickstart on Sat Jul 13 20:14:55 2013. # # This file is execfile()d with the current directory set to its containing # dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import os import sys import pkg_resources # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. # sys.path.insert(0, os.path.abspath('.')) # -- General configuration ---------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. # needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.coverage', 'sphinx.ext.viewcode', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.extlinks'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. # source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'TinyDB' copyright = u'2021, Markus Siemens' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. try: release = pkg_resources.get_distribution('tinydb').version except pkg_resources.DistributionNotFound: print('To build the documentation, The distribution information of TinyDB') print('has to be available. Either install the package into your') print('development environment or run "pip install -e ." to setup the') print('metadata. A virtualenv is recommended!') sys.exit(1) del pkg_resources if 'dev' in release: release = release.split('dev')[0] + 'dev' version = '.'.join(release.split('.')[:2]) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: # today = '' # Else, today_fmt is used as the format for a strftime call. # today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. # default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. # add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). # add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. # show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. # modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. # keep_warnings = False # -- Options for HTML output -------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. # html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. # html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. # html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". # html_title = None # A shorter title for the navigation bar. Default is the same as html_title. # html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. # html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. # html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. # html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. # html_use_smartypants = True # Custom sidebar templates, maps document names to template names. html_sidebars = { 'index': ['sidebarlogo.html', 'links.html', 'searchbox.html'], '**': ['sidebarlogo.html', 'localtoc.html', 'links.html', 'searchbox.html'] } # Additional templates that should be rendered to pages, maps page names to # template names. # html_additional_pages = {} # If false, no module index is generated. # html_domain_indices = True # If false, no index is generated. # html_use_index = True # If true, the index is split into individual pages for each letter. # html_split_index = False # If true, links to the reST sources are added to the pages. html_show_sourcelink = False # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. # html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. # html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. # html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). # html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'TinyDBdoc' # -- Options for LaTeX output ------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). # 'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). # 'pointsize': '10pt', # Additional stuff for the LaTeX preamble. # 'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass # [howto/manual]). latex_documents = [ ('index', 'TinyDB.tex', u'TinyDB Documentation', u'Markus Siemens', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. # latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. # latex_use_parts = False # If true, show page references after internal links. # latex_show_pagerefs = False # If true, show URL addresses after external links. # latex_show_urls = False # Documents to append as an appendix to all manuals. # latex_appendices = [] # If false, no module index is generated. # latex_domain_indices = True # -- Options for manual page output ------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'tinydb', u'TinyDB Documentation', [u'Markus Siemens'], 1) ] # If true, show URL addresses after external links. # man_show_urls = False # -- Options for Texinfo output ----------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'TinyDB', u'TinyDB Documentation', u'Markus Siemens', 'TinyDB', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. # texinfo_appendices = [] # If false, no module index is generated. # texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. # texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. # texinfo_no_detailmenu = False extlinks = {'issue': ('https://https://github.com/msiemens/tinydb/issues/%s', 'issue ')} sys.path.append(os.path.abspath('_themes')) html_theme_path = ['_themes'] html_theme = 'flask' todo_include_todos = True tinydb-4.8.2/docs/contribute.rst000066400000000000000000000034541470251210600166620ustar00rootroot00000000000000Contribution Guidelines ####################### Whether reporting bugs, discussing improvements and new ideas or writing extensions: Contributions to TinyDB are welcome! Here's how to get started: 1. Check for open issues or open a fresh issue to start a discussion around a feature idea or a bug 2. Fork `the repository `_ on Github, create a new branch off the `master` branch and start making your changes (known as `GitHub Flow `_) 3. Write a test which shows that the bug was fixed or that the feature works as expected 4. Send a pull request and bug the maintainer until it gets merged and published :) Philosophy of TinyDB ******************** TinyDB aims to be simple and fun to use. Therefore two key values are simplicity and elegance of interfaces and code. These values will contradict each other from time to time. In these cases , try using as little magic as possible. In any case don't forget documenting code that isn't clear at first glance. Code Conventions **************** In general the TinyDB source should always follow `PEP 8 `_. Exceptions are allowed in well justified and documented cases. However we make a small exception concerning docstrings: When using multiline docstrings, keep the opening and closing triple quotes on their own lines and add an empty line after it. .. code-block:: python def some_function(): """ Documentation ... """ # implementation ... Version Numbers *************** TinyDB follows the `SemVer versioning guidelines `_. This implies that backwards incompatible changes in the API will increment the major version. So think twice before making such changes. tinydb-4.8.2/docs/extend.rst000066400000000000000000000142501470251210600157670ustar00rootroot00000000000000How to Extend TinyDB ==================== There are three main ways to extend TinyDB and modify its behaviour: 1. custom storages, 2. custom middlewares, 3. use hooks and overrides, and 4. subclassing ``TinyDB`` and ``Table``. Let's look at them in this order. Write a Custom Storage ---------------------- First, we have support for custom storages. By default TinyDB comes with an in-memory storage and a JSON file storage. But of course you can add your own. Let's look how you could add a `YAML `_ storage using `PyYAML `_: .. code-block:: python import yaml class YAMLStorage(Storage): def __init__(self, filename): # (1) self.filename = filename def read(self): with open(self.filename) as handle: try: data = yaml.safe_load(handle.read()) # (2) return data except yaml.YAMLError: return None # (3) def write(self, data): with open(self.filename, 'w+') as handle: yaml.dump(data, handle) def close(self): # (4) pass There are some things we should look closer at: 1. The constructor will receive all arguments passed to TinyDB when creating the database instance (except ``storage`` which TinyDB itself consumes). In other words calling ``TinyDB('something', storage=YAMLStorage)`` will pass ``'something'`` as an argument to ``YAMLStorage``. 2. We use ``yaml.safe_load`` as recommended by the `PyYAML documentation `_ when processing data from a potentially untrusted source. 3. If the storage is uninitialized, TinyDB expects the storage to return ``None`` so it can do any internal initialization that is necessary. 4. If your storage needs any cleanup (like closing file handles) before an instance is destroyed, you can put it in the ``close()`` method. To run these, you'll either have to run ``db.close()`` on your ``TinyDB`` instance or use it as a context manager, like this: .. code-block:: python with TinyDB('db.yml', storage=YAMLStorage) as db: # ... Finally, using the YAML storage is very straight-forward: .. code-block:: python db = TinyDB('db.yml', storage=YAMLStorage) # ... Write Custom Middleware ------------------------- Sometimes you don't want to write a new storage module but rather modify the behaviour of an existing one. As an example we'll build middleware that filters out empty items. Because middleware acts as a wrapper around a storage, they needs a ``read()`` and a ``write(data)`` method. In addition, they can access the underlying storage via ``self.storage``. Before we start implementing we should look at the structure of the data that the middleware receives. Here's what the data that goes through the middleware looks like: .. code-block:: python { '_default': { 1: {'key': 'value'}, 2: {'key': 'value'}, # other items }, # other tables } Thus, we'll need two nested loops: 1. Process every table 2. Process every item Now let's implement that: .. code-block:: python class RemoveEmptyItemsMiddleware(Middleware): def __init__(self, storage_cls): # Any middleware *has* to call the super constructor # with storage_cls super().__init__(storage_cls) # (1) def read(self): data = self.storage.read() for table_name in data: table_data = data[table_name] for doc_id in table_data: item = table_data[doc_id] if item == {}: del table_data[doc_id] return data def write(self, data): for table_name in data: table_data = data[table_name] for doc_id in table_data: item = table_data[doc_id] if item == {}: del table_data[doc_id] self.storage.write(data) def close(self): self.storage.close() Note that the constructor calls the middleware constructor (1) and passes the storage class to the middleware constructor. To wrap storage with this new middleware, we use it like this: .. code-block:: python db = TinyDB(storage=RemoveEmptyItemsMiddleware(SomeStorageClass)) Here ``SomeStorageClass`` should be replaced with the storage you want to use. If you leave it empty, the default storage will be used (which is the ``JSONStorage``). Use hooks and overrides ----------------------- .. _extend_hooks: There are cases when neither creating a custom storage nor using a custom middleware will allow you to adapt TinyDB in the way you need. In this case you can modify TinyDB's behavior by using predefined hooks and override points. For example you can configure the name of the default table by setting ``TinyDB.default_table_name``: .. code-block:: python TinyDB.default_table_name = 'my_table_name' Both :class:`~tinydb.database.TinyDB` and the :class:`~tinydb.table.Table` classes allow modifying their behavior using hooks and overrides. To use ``Table``'s overrides, you can access the class using ``TinyDB.table_class``: .. code-block:: python TinyDB.table_class.default_query_cache_capacity = 100 Read the :ref:`api_docs` for more details on the available hooks and override points. Subclassing ``TinyDB`` and ``Table`` ------------------------------------ Finally, there's the last option to modify TinyDB's behavior. That way you can change how TinyDB itself works more deeply than using the other extension mechanisms. When creating a subclass you can use it by using hooks and overrides to override the default classes that TinyDB uses: .. code-block:: python class MyTable(Table): # Add your method overrides ... TinyDB.table_class = MyTable # Continue using TinyDB as usual TinyDB's source code is documented with extensions in mind, explaining how everything works even for internal methods and classes. Feel free to dig into the source and adapt everything you need for your projects. tinydb-4.8.2/docs/extensions.rst000066400000000000000000000062531470251210600167030ustar00rootroot00000000000000Extensions ========== Here are some extensions that might be useful to you: ``aiotinydb`` ************* | **Repo:** https://github.com/ASMfreaK/aiotinydb | **Status:** *stable* | **Description:** asyncio compatibility shim for TinyDB. Enables usage of TinyDB in asyncio-aware contexts without slow synchronous IO. ``BetterJSONStorage`` ********************* | **Repo:** https://github.com/MrPigss/BetterJSONStorage | **Status:** *stable* | **Description:** BetterJSONStorage is a faster 'Storage Type' for TinyDB. It uses the faster Orjson library for parsing the JSON and BLOSC for compression. ``tinydb-appengine`` ******************** | **Repo:** https://github.com/imalento/tinydb-appengine | **Status:** *stable* | **Description:** ``tinydb-appengine`` provides TinyDB storage for App Engine. You can use JSON readonly. ``tinydb-serialization`` ************************ | **Repo:** https://github.com/msiemens/tinydb-serialization | **Status:** *stable* | **Description:** ``tinydb-serialization`` provides serialization for objects that TinyDB otherwise couldn't handle. ``tinydb-smartcache`` ********************* | **Repo:** https://github.com/msiemens/tinydb-smartcache | **Status:** *stable* | **Description:** ``tinydb-smartcache`` provides a smart query cache for TinyDB. It updates the query cache when inserting/removing/updating documents so the cache doesn't get invalidated. It's useful if you perform lots of queries while the data changes only little. ``TinyDBTimestamps`` ******************** | **Repo:** https://github.com/pachacamac/TinyDBTimestamps | **Status:** *experimental* | **Description:** Automatically add create at/ update at timestamps to TinyDB documents. ``tinyindex`` ************* | **Repo:** https://github.com/eugene-eeo/tinyindex | **Status:** *experimental* | **Description:** Document indexing for TinyDB. Basically ensures deterministic (as long as there aren't any changes to the table) yielding of documents. ``tinymongo`` ************* | **Repo:** https://github.com/schapman1974/tinymongo | **Status:** *experimental* | **Description:** A simple wrapper that allows to use TinyDB as a flat file drop-in replacement for MongoDB. ``TinyMP`` ************* | **Repo:** https://github.com/alshapton/TinyMP | **Status:** *no longer maintained* | **Description:** A MessagePack-based storage extension to tinydb using http://msgpack.org .. _tinyrecord: ``tinyrecord`` ************** | **Repo:** https://github.com/eugene-eeo/tinyrecord | **Status:** *stable* | **Description:** Tinyrecord is a library which implements experimental atomic transaction support for the TinyDB NoSQL database. It uses a record-first then execute architecture which allows us to minimize the time that we are within a thread lock. tinydb-4.8.2/docs/getting-started.rst000066400000000000000000000113751470251210600176120ustar00rootroot00000000000000:tocdepth: 3 Getting Started =============== Installing TinyDB ----------------- To install TinyDB from PyPI, run:: $ pip install tinydb You can also grab the latest development version from GitHub_. After downloading and unpacking it, you can install it using:: $ pip install . Basic Usage ----------- Let's cover the basics before going more into detail. We'll start by setting up a TinyDB database: >>> from tinydb import TinyDB, Query >>> db = TinyDB('db.json') You now have a TinyDB database that stores its data in ``db.json``. What about inserting some data? TinyDB expects the data to be Python ``dict``\s: >>> db.insert({'type': 'apple', 'count': 7}) >>> db.insert({'type': 'peach', 'count': 3}) .. note:: The ``insert`` method returns the inserted document's ID. Read more about it here: :ref:`document_ids`. Now you can get all documents stored in the database by running: >>> db.all() [{'count': 7, 'type': 'apple'}, {'count': 3, 'type': 'peach'}] You can also iter over stored documents: >>> for item in db: >>> print(item) {'count': 7, 'type': 'apple'} {'count': 3, 'type': 'peach'} Of course you'll also want to search for specific documents. Let's try: >>> Fruit = Query() >>> db.search(Fruit.type == 'peach') [{'count': 3, 'type': 'peach'}] >>> db.search(Fruit.count > 5) [{'count': 7, 'type': 'apple'}] Next we'll update the ``count`` field of the apples: >>> db.update({'count': 10}, Fruit.type == 'apple') >>> db.all() [{'count': 10, 'type': 'apple'}, {'count': 3, 'type': 'peach'}] In the same manner you can also remove documents: >>> db.remove(Fruit.count < 5) >>> db.all() [{'count': 10, 'type': 'apple'}] And of course you can throw away all data to start with an empty database: >>> db.truncate() >>> db.all() [] Recap ***** Before we dive deeper, let's recapitulate the basics: +-------------------------------+---------------------------------------------------------------+ | **Inserting** | +-------------------------------+---------------------------------------------------------------+ | ``db.insert(...)`` | Insert a document | +-------------------------------+---------------------------------------------------------------+ | **Getting data** | +-------------------------------+---------------------------------------------------------------+ | ``db.all()`` | Get all documents | +-------------------------------+---------------------------------------------------------------+ | ``iter(db)`` | Iter over all documents | +-------------------------------+---------------------------------------------------------------+ | ``db.search(query)`` | Get a list of documents matching the query | +-------------------------------+---------------------------------------------------------------+ | **Updating** | +-------------------------------+---------------------------------------------------------------+ | ``db.update(fields, query)`` | Update all documents matching the query to contain ``fields`` | +-------------------------------+---------------------------------------------------------------+ | **Removing** | +-------------------------------+---------------------------------------------------------------+ | ``db.remove(query)`` | Remove all documents matching the query | +-------------------------------+---------------------------------------------------------------+ | ``db.truncate()`` | Remove all documents | +-------------------------------+---------------------------------------------------------------+ | **Querying** | +-------------------------------+---------------------------------------------------------------+ | ``Query()`` | Create a new query object | +-------------------------------+---------------------------------------------------------------+ | ``Query().field == 2`` | Match any document that has a key ``field`` with value | | | ``== 2`` (also possible: ``!=``, ``>``, ``>=``, ``<``, ``<=``)| +-------------------------------+---------------------------------------------------------------+ .. References .. _GitHub: http://github.com/msiemens/tinydb/ tinydb-4.8.2/docs/index.rst000066400000000000000000000013431470251210600156060ustar00rootroot00000000000000Welcome to TinyDB! ================== Welcome to TinyDB, your tiny, document oriented database optimized for your happiness :) >>> from tinydb import TinyDB, Query >>> db = TinyDB('path/to/db.json') >>> User = Query() >>> db.insert({'name': 'John', 'age': 22}) >>> db.search(User.name == 'John') [{'name': 'John', 'age': 22}] User's Guide ------------ .. toctree:: :maxdepth: 2 intro getting-started usage Extending TinyDB ---------------- .. toctree:: :maxdepth: 2 Extending TinyDB TinyDB Extensions API Reference ------------- .. toctree:: :maxdepth: 2 api Additional Notes ---------------- .. toctree:: :maxdepth: 2 contribute changelog Upgrade Notes tinydb-4.8.2/docs/intro.rst000066400000000000000000000036531470251210600156400ustar00rootroot00000000000000Introduction ============ Great that you've taken time to check out the TinyDB docs! Before we begin looking at TinyDB itself, let's take some time to see whether you should use TinyDB. Why Use TinyDB? --------------- - **tiny:** The current source code has 1800 lines of code (with about 40% documentation) and 1600 lines tests. - **document oriented:** Like MongoDB_, you can store any document (represented as ``dict``) in TinyDB. - **optimized for your happiness:** TinyDB is designed to be simple and fun to use by providing a simple and clean API. - **written in pure Python:** TinyDB neither needs an external server (as e.g. `PyMongo `_) nor any dependencies from PyPI. - **works on Python 3.5+ and PyPy:** TinyDB works on all modern versions of Python and PyPy. - **powerfully extensible:** You can easily extend TinyDB by writing new storages or modify the behaviour of storages with Middlewares. - **100% test coverage:** No explanation needed. In short: If you need a simple database with a clean API that just works without lots of configuration, TinyDB might be the right choice for you. Why **Not** Use TinyDB? ----------------------- - You need **advanced features** like: - access from multiple processes or threads (e.g. when using Flask!), - creating indexes for tables, - an HTTP server, - managing relationships between tables or similar, - `ACID guarantees `_. - You are really concerned about **performance** and need a high speed database. To put it plainly: If you need advanced features or high performance, TinyDB is the wrong database for you – consider using databases like SQLite_, Buzhug_, CodernityDB_ or MongoDB_. .. References .. _Buzhug: https://buzhug.sourceforge.net/ .. _CodernityDB: http://labs.codernity.com/codernitydb/ .. _MongoDB: https://mongodb.org/ .. _SQLite: https://www.sqlite.org/ tinydb-4.8.2/docs/make.bat000066400000000000000000000144731470251210600153620ustar00rootroot00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . set I18NSPHINXOPTS=%SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. texinfo to make Texinfo files echo. gettext to make PO message catalogs echo. changes to make an overview over all changed/added/deprecated items echo. xml to make Docutils-native XML files echo. pseudoxml to make pseudoxml-XML files for display purposes echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) %SPHINXBUILD% 2> nul if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\TinyDB.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\TinyDB.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdf" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdfja" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf-ja cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "texinfo" ( %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo if errorlevel 1 exit /b 1 echo. echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. goto end ) if "%1" == "gettext" ( %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale if errorlevel 1 exit /b 1 echo. echo.Build finished. The message catalogs are in %BUILDDIR%/locale. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) if "%1" == "xml" ( %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml if errorlevel 1 exit /b 1 echo. echo.Build finished. The XML files are in %BUILDDIR%/xml. goto end ) if "%1" == "pseudoxml" ( %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml if errorlevel 1 exit /b 1 echo. echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. goto end ) :end tinydb-4.8.2/docs/upgrade.rst000066400000000000000000000045061470251210600161320ustar00rootroot00000000000000Upgrading to Newer Releases =========================== Version 4.0 ----------- .. _upgrade_v4_0: - API changes: - Replace ``TinyDB.purge_tables(...)`` with ``TinyDB.drop_tables(...)`` - Replace ``TinyDB.purge_table(...)`` with ``TinyDB.drop_table(...)`` - Replace ``Table.purge()`` with ``Table.truncate()`` - Replace ``TinyDB(default_table='name')`` with ``TinyDB.default_table_name = 'name'`` - Replace ``TinyDB(table_class=Class)`` with ``TinyDB.table_class = Class`` - If you were using ``TinyDB.DEFAULT_TABLE``, ``TinyDB.DEFAULT_TABLE_KWARGS``, or ``TinyDB.DEFAULT_STORAGE``: Use the new methods for customizing TinyDB described in :ref:`How to Extend TinyDB ` Version 3.0 ----------- .. _upgrade_v3_0: Breaking API Changes ^^^^^^^^^^^^^^^^^^^^ - Querying (see `Issue #62 `_): - ``where('...').contains('...')`` has been renamed to ``where('...').search('...')``. - ``where('foo').has('bar')`` is replaced by either ``where('foo').bar`` or ``Query().foo.bar``. - In case the key is not a valid Python identifier, array notation can be used: ``where('a.b.c')`` is now ``Query()['a.b.c']``. - Checking for the existence of a key has to be done explicitly: ``where('foo').exists()``. - ``SmartCacheTable`` has been moved to `msiemens/tinydb-smartcache`_. - Serialization has been moved to `msiemens/tinydb-serialization`_. - Empty storages are now expected to return ``None`` instead of raising ``ValueError`` (see `Issue #67 `_). .. _msiemens/tinydb-smartcache: https://github.com/msiemens/tinydb-smartcache .. _msiemens/tinydb-serialization: https://github.com/msiemens/tinydb-serialization .. _upgrade_v2_0: Version 2.0 ----------- Breaking API Changes ^^^^^^^^^^^^^^^^^^^^ - The syntax ``query in db`` is not supported any more. Use ``db.contains(...)`` instead. - The ``ConcurrencyMiddleware`` has been removed due to a insecure implementation (see `Issue #18 `_). Consider :ref:`tinyrecord` instead. Apart from that the API remains compatible to v1.4 and prior. For migration from v1 to v2, check out the `v2.0 documentation `_ tinydb-4.8.2/docs/usage.rst000066400000000000000000000637211470251210600156130ustar00rootroot00000000000000:tocdepth: 3 .. toctree:: :maxdepth: 2 Advanced Usage ============== Remarks on Storage ------------------ Before we dive deeper into the usage of TinyDB, we should stop for a moment and discuss how TinyDB stores data. To convert your data to a format that is writable to disk TinyDB uses the `Python JSON `_ module by default. It's great when only simple data types are involved but it cannot handle more complex data types like custom classes. On Python 2 it also converts strings to Unicode strings upon reading (described `here `_). If that causes problems, you can write :doc:`your own storage `, that uses a more powerful (but also slower) library like `pickle `_ or `PyYAML `_. .. hint:: Opening multiple TinyDB instances on the same data (e.g. with the ``JSONStorage``) may result in unexpected behavior due to query caching. See query_caching_ on how to disable the query cache. Queries ------- With that out of the way, let's start with TinyDB's rich set of queries. There are two main ways to construct queries. The first one resembles the syntax of popular ORM tools: >>> from tinydb import Query >>> User = Query() >>> db.search(User.name == 'John') As you can see, we first create a new Query object and then use it to specify which fields to check. Searching for nested fields is just as easy: >>> db.search(User.birthday.year == 1990) Not all fields can be accessed this way if the field name is not a valid Python identifier. In this case, you can switch to dict access notation: >>> # This would be invalid Python syntax: >>> db.search(User.country-code == 'foo') >>> # Use this instead: >>> db.search(User['country-code'] == 'foo') In addition, you can use arbitrary transform function where a field would be, for example: >>> from unidecode import unidecode >>> db.search(User.name.map(unidecode) == 'Jose') >>> # will match 'José' etc. The second, traditional way of constructing queries is as follows: >>> from tinydb import where >>> db.search(where('field') == 'value') Using ``where('field')`` is a shorthand for the following code: >>> db.search(Query()['field'] == 'value') Accessing nested fields with this syntax can be achieved like this: >>> db.search(where('birthday').year == 1900) >>> db.search(where('birthday')['year'] == 1900) Advanced queries ................ In the :doc:`getting-started` you've learned about the basic comparisons (``==``, ``<``, ``>``, ...). In addition to these TinyDB supports the following queries: >>> # Existence of a field: >>> db.search(User.name.exists()) >>> # Regex: >>> # Full item has to match the regex: >>> db.search(User.name.matches('[aZ]*')) >>> # Case insensitive search for 'John': >>> import re >>> db.search(User.name.matches('John', flags=re.IGNORECASE)) >>> # Any part of the item has to match the regex: >>> db.search(User.name.search('b+')) >>> # Custom test: >>> test_func = lambda s: s == 'John' >>> db.search(User.name.test(test_func)) >>> # Custom test with parameters: >>> def test_func(val, m, n): >>> return m <= val <= n >>> db.search(User.age.test(test_func, 0, 21)) >>> db.search(User.age.test(test_func, 21, 99)) Another case is if you have a ``dict`` where you want to find all documents that match this ``dict``. We call this searching for a fragment: >>> db.search(Query().fragment({'foo': True, 'bar': False})) [{'foo': True, 'bar': False, 'foobar: 'yes!'}] You also can search for documents where a specific field matches the fragment: >>> db.search(Query().field.fragment({'foo': True, 'bar': False})) [{'field': {'foo': True, 'bar': False, 'foobar: 'yes!'}] When a field contains a list, you also can use the ``any`` and ``all`` methods. There are two ways to use them: with lists of values and with nested queries. Let's start with the first one. Assuming we have a user object with a groups list like this: >>> db.insert({'name': 'user1', 'groups': ['user']}) >>> db.insert({'name': 'user2', 'groups': ['admin', 'user']}) >>> db.insert({'name': 'user3', 'groups': ['sudo', 'user']}) Now we can use the following queries: >>> # User's groups include at least one value from ['admin', 'sudo'] >>> db.search(User.groups.any(['admin', 'sudo'])) [{'name': 'user2', 'groups': ['admin', 'user']}, {'name': 'user3', 'groups': ['sudo', 'user']}] >>> >>> # User's groups include all values from ['admin', 'user'] >>> db.search(User.groups.all(['admin', 'user'])) [{'name': 'user2', 'groups': ['admin', 'user']}] In some cases you may want to have more complex ``any``/``all`` queries. This is where nested queries come in as helpful. Let's set up a table like this: >>> Group = Query() >>> Permission = Query() >>> groups = db.table('groups') >>> groups.insert({ 'name': 'user', 'permissions': [{'type': 'read'}]}) >>> groups.insert({ 'name': 'sudo', 'permissions': [{'type': 'read'}, {'type': 'sudo'}]}) >>> groups.insert({ 'name': 'admin', 'permissions': [{'type': 'read'}, {'type': 'write'}, {'type': 'sudo'}]}) Now let's search this table using nested ``any``/``all`` queries: >>> # Group has a permission with type 'read' >>> groups.search(Group.permissions.any(Permission.type == 'read')) [{'name': 'user', 'permissions': [{'type': 'read'}]}, {'name': 'sudo', 'permissions': [{'type': 'read'}, {'type': 'sudo'}]}, {'name': 'admin', 'permissions': [{'type': 'read'}, {'type': 'write'}, {'type': 'sudo'}]}] >>> # Group has ONLY permission 'read' >>> groups.search(Group.permissions.all(Permission.type == 'read')) [{'name': 'user', 'permissions': [{'type': 'read'}]}] As you can see, ``any`` tests if there is *at least one* document matching the query while ``all`` ensures *all* documents match the query. The opposite operation, checking if a single item is contained in a list, is also possible using ``one_of``: >>> db.search(User.name.one_of(['jane', 'john'])) Query modifiers ............... TinyDB also allows you to use logical operations to modify and combine queries: >>> # Negate a query: >>> db.search(~ (User.name == 'John')) >>> # Logical AND: >>> db.search((User.name == 'John') & (User.age <= 30)) >>> # Logical OR: >>> db.search((User.name == 'John') | (User.name == 'Bob')) .. note:: When using ``&`` or ``|``, make sure you wrap the conditions on both sides with parentheses or Python will mess up the comparison. Also, when using negation (``~``) you'll have to wrap the query you want to negate in parentheses. The reason for these requirements is that Python's binary operators that are used for query modifiers have a higher operator precedence than comparison operators. Simply put, ``~ User.name == 'John'`` is parsed by Python as ``(~User.name) == 'John'`` instead of ``~(User.name == 'John')``. See also the Python `docs on operator precedence `_ for details. You can compose queries dynamically by using the no-op query ``Query().noop()``. Recap ..... Let's review the query operations we've learned: +-------------------------------------+---------------------------------------------------------------------+ | **Queries** | +-------------------------------------+---------------------------------------------------------------------+ | ``Query().field.exists()`` | Match any document where a field called ``field`` exists | +-------------------------------------+---------------------------------------------------------------------+ | ``Query().field.matches(regex)`` | Match any document with the whole field matching the | | | regular expression | +-------------------------------------+---------------------------------------------------------------------+ | ``Query().field.search(regex)`` | Match any document with a substring of the field matching | | | the regular expression | +-------------------------------------+---------------------------------------------------------------------+ | ``Query().field.test(func, *args)`` | Matches any document for which the function returns | | | ``True`` | +-------------------------------------+---------------------------------------------------------------------+ | ``Query().field.all(query | list)`` | If given a query, matches all documents where all documents | | | in the list ``field`` match the query. | | | If given a list, matches all documents where all documents | | | in the list ``field`` are a member of the given list | +-------------------------------------+---------------------------------------------------------------------+ | ``Query().field.any(query | list)`` | If given a query, matches all documents where at least one | | | document in the list ``field`` match the query. | | | If given a list, matches all documents where at least one | | | documents in the list ``field`` are a member of the given | | | list | +-------------------------------------+---------------------------------------------------------------------+ | ``Query().field.one_of(list)`` | Match if the field is contained in the list | +-------------------------------------+---------------------------------------------------------------------+ | **Logical operations on queries** | +-------------------------------------+---------------------------------------------------------------------+ | ``~ (query)`` | Match documents that don't match the query (logical NOT) | +-------------------------------------+---------------------------------------------------------------------+ | ``(query1) & (query2)`` | Match documents that match both queries (logical AND) | +-------------------------------------+---------------------------------------------------------------------+ | ``(query1) | (query2)`` | Match documents that match at least one of the queries (logical OR) | +-------------------------------------+---------------------------------------------------------------------+ Handling Data ------------- Next, let's look at some more ways to insert, update and retrieve data from your database. Inserting data .............. As already described you can insert a document using ``db.insert(...)``. In case you want to insert multiple documents, you can use ``db.insert_multiple(...)``: >>> db.insert_multiple([ {'name': 'John', 'age': 22}, {'name': 'John', 'age': 37}]) >>> db.insert_multiple({'int': 1, 'value': i} for i in range(2)) Also in some cases it may be useful to specify the document ID yourself when inserting data. You can do that by using the :class:`~tinydb.table.Document` class: >>> db.insert(Document({'name': 'John', 'age': 22}, doc_id=12)) 12 The same is possible when using ``db.insert_multiple(...)``: >>> db.insert_multiple([ Document({'name': 'John', 'age': 22}, doc_id=12), Document({'name': 'Jane', 'age': 24}, doc_id=14), ]) [12, 14] .. note:: Inserting a ``Document`` with an ID that already exists will result in a ``ValueError`` being raised. Updating data ............. Sometimes you want to update all documents in your database. In this case, you can leave out the ``query`` argument: >>> db.update({'foo': 'bar'}) When passing a dict to ``db.update(fields, query)``, it only allows you to update a document by adding or overwriting its values. But sometimes you may need to e.g. remove one field or increment its value. In that case you can pass a function instead of ``fields``: >>> from tinydb.operations import delete >>> db.update(delete('key1'), User.name == 'John') This will remove the key ``key1`` from all matching documents. TinyDB comes with these operations: - ``delete(key)``: delete a key from the document - ``increment(key)``: increment the value of a key - ``decrement(key)``: decrement the value of a key - ``add(key, value)``: add ``value`` to the value of a key (also works for strings) - ``subtract(key, value)``: subtract ``value`` from the value of a key - ``set(key, value)``: set ``key`` to ``value`` Of course you also can write your own operations: >>> def your_operation(your_arguments): ... def transform(doc): ... # do something with the document ... # ... ... return transform ... >>> db.update(your_operation(arguments), query) In order to perform multiple update operations at once, you can use the ``update_multiple`` method like this: >>> db.update_multiple([ ... ({'int': 2}, where('char') == 'a'), ... ({'int': 4}, where('char') == 'b'), ... ]) You also can mix normal updates with update operations: >>> db.update_multiple([ ... ({'int': 2}, where('char') == 'a'), ... ({delete('int'), where('char') == 'b'), ... ]) Data access and modification ---------------------------- Upserting data .............. In some cases you'll need a mix of both ``update`` and ``insert``: ``upsert``. This operation is provided a document and a query. If it finds any documents matching the query, they will be updated with the data from the provided document. On the other hand, if no matching document is found, it inserts the provided document into the table: >>> db.upsert({'name': 'John', 'logged-in': True}, User.name == 'John') This will update all users with the name John to have ``logged-in`` set to ``True``. If no matching user is found, a new document is inserted with both the name set and the ``logged-in`` flag. To use the ID of the document as matching criterion a :class:`~tinydb.table.Document` with ``doc_id`` is passed instead of a query: >>> db.upsert(Document({'name': 'John', 'logged-in': True}, doc_id=12)) Retrieving data ............... There are several ways to retrieve data from your database. For instance you can get the number of stored documents: >>> len(db) 3 .. hint:: This will return the number of documents in the default table (see the notes on the :ref:`default table `). Then of course you can use ``db.search(...)`` as described in the :doc:`getting-started` section. But sometimes you want to get only one matching document. Instead of using >>> try: ... result = db.search(User.name == 'John')[0] ... except IndexError: ... pass you can use ``db.get(...)``: >>> db.get(User.name == 'John') {'name': 'John', 'age': 22} >>> db.get(User.name == 'Bobby') None .. caution:: If multiple documents match the query, probably a random one of them will be returned! Often you don't want to search for documents but only know whether they are stored in the database. In this case ``db.contains(...)`` is your friend: >>> db.contains(User.name == 'John') In a similar manner you can look up the number of documents matching a query: >>> db.count(User.name == 'John') 2 Recap ^^^^^ Let's summarize the ways to handle data: +-------------------------------+---------------------------------------------------------------+ | **Inserting data** | +-------------------------------+---------------------------------------------------------------+ | ``db.insert_multiple(...)`` | Insert multiple documents | +-------------------------------+---------------------------------------------------------------+ | **Updating data** | +-------------------------------+---------------------------------------------------------------+ | ``db.update(operation, ...)`` | Update all matching documents with a special operation | +-------------------------------+---------------------------------------------------------------+ | **Retrieving data** | +-------------------------------+---------------------------------------------------------------+ | ``len(db)`` | Get the number of documents in the database | +-------------------------------+---------------------------------------------------------------+ | ``db.get(query)`` | Get one document matching the query | +-------------------------------+---------------------------------------------------------------+ | ``db.contains(query)`` | Check if the database contains a matching document | +-------------------------------+---------------------------------------------------------------+ | ``db.count(query)`` | Get the number of matching documents | +-------------------------------+---------------------------------------------------------------+ .. note:: This was a new feature in v3.6.0 .. _document_ids: Using Document IDs ------------------ Internally TinyDB associates an ID with every document you insert. It's returned after inserting a document: >>> db.insert({'name': 'John', 'age': 22}) 3 >>> db.insert_multiple([{...}, {...}, {...}]) [4, 5, 6] In addition you can get the ID of already inserted documents using ``document.doc_id``. This works both with ``get`` and ``all``: >>> el = db.get(User.name == 'John') >>> el.doc_id 3 >>> el = db.all()[0] >>> el.doc_id 1 >>> el = db.all()[-1] >>> el.doc_id 12 Different TinyDB methods also work with IDs, namely: ``update``, ``remove``, ``contains`` and ``get``. The first two also return a list of affected IDs. >>> db.update({'value': 2}, doc_ids=[1, 2]) >>> db.contains(doc_id=1) True >>> db.remove(doc_ids=[1, 2]) >>> db.get(doc_id=3) {...} >>> db.get(doc_ids=[1, 2]) [{...}, {...}] Using ``doc_id``/``doc_ids`` instead of ``Query()`` again is slightly faster in operation. Recap ..... Let's sum up the way TinyDB supports working with IDs: +-------------------------------------+------------------------------------------------------------+ | **Getting a document's ID** | +-------------------------------------+------------------------------------------------------------+ | ``db.insert(...)`` | Returns the inserted document's ID | +-------------------------------------+------------------------------------------------------------+ | ``db.insert_multiple(...)`` | Returns the inserted documents' ID | +-------------------------------------+------------------------------------------------------------+ | ``document.doc_id`` | Get the ID of a document fetched from the db | +-------------------------------------+------------------------------------------------------------+ | **Working with IDs** | +-------------------------------------+------------------------------------------------------------+ | ``db.get(doc_id=...)`` | Get the document with the given ID | +-------------------------------------+------------------------------------------------------------+ | ``db.contains(doc_id=...)`` | Check if the db contains a document with the given | | | IDs | +-------------------------------------+------------------------------------------------------------+ | ``db.update({...}, doc_ids=[...])`` | Update all documents with the given IDs | +-------------------------------------+------------------------------------------------------------+ | ``db.remove(doc_ids=[...])`` | Remove all documents with the given IDs | +-------------------------------------+------------------------------------------------------------+ Tables ------ TinyDB supports working with multiple tables. They behave just the same as the ``TinyDB`` class. To create and use a table, use ``db.table(name)``. >>> table = db.table('table_name') >>> table.insert({'value': True}) >>> table.all() [{'value': True}] >>> for row in table: >>> print(row) {'value': True} To remove a table from a database, use: >>> db.drop_table('table_name') If on the other hand you want to remove all tables, use the counterpart: >>> db.drop_tables() Finally, you can get a list with the names of all tables in your database: >>> db.tables() {'_default', 'table_name'} .. _default_table: Default Table ............. TinyDB uses a table named ``_default`` as the default table. All operations on the database object (like ``db.insert(...)``) operate on this table. The name of this table can be modified by setting the ``default_table_name`` class variable to modify the default table name for all instances: >>> #1: for a single instance only >>> db = TinyDB(storage=SomeStorage) >>> db.default_table_name = 'my-default' >>> #2: for all instances >>> TinyDB.default_table_name = 'my-default' .. _query_caching: Query Caching ............. TinyDB caches query result for performance. That way re-running a query won't have to read the data from the storage as long as the database hasn't been modified. You can optimize the query cache size by passing the ``cache_size`` to the ``table(...)`` function: >>> table = db.table('table_name', cache_size=30) .. hint:: You can set ``cache_size`` to ``None`` to make the cache unlimited in size. Also, you can set ``cache_size`` to 0 to disable it. .. hint:: It's not possible to open the same table multiple times with different settings. After the first invocation, all the subsequent calls will return the same table with the same settings as the first one. .. hint:: The TinyDB query cache doesn't check if the underlying storage that the database uses has been modified by an external process. In this case the query cache may return outdated results. To clear the cache and read data from the storage again you can use ``db.clear_cache()``. .. hint:: When using an unlimited cache size and ``test()`` queries, TinyDB will store a reference to the test function. As a result of that behavior long-running applications that use ``lambda`` functions as a test function may experience memory leaks. Storage & Middleware -------------------- Storage Types ............. TinyDB comes with two storage types: JSON and in-memory. By default TinyDB stores its data in JSON files so you have to specify the path where to store it: >>> from tinydb import TinyDB, where >>> db = TinyDB('path/to/db.json') To use the in-memory storage, use: >>> from tinydb.storages import MemoryStorage >>> db = TinyDB(storage=MemoryStorage) .. hint:: All arguments except for the ``storage`` argument are forwarded to the underlying storage. For the JSON storage you can use this to pass additional keyword arguments to Python's `json.dump(...) `_ method. For example, you can set it to create prettified JSON files like this: >>> db = TinyDB('db.json', sort_keys=True, indent=4, separators=(',', ': ')) To modify the default storage for all ``TinyDB`` instances, set the ``default_storage_class`` class variable: >>> TinyDB.default_storage_class = MemoryStorage In case you need to access the storage instance directly, you can use the ``storage`` property of your TinyDB instance. This may be useful to call method directly on the storage or middleware: >>> db = TinyDB(storage=CachingMiddleware(MemoryStorage)) >>> db.storage.flush() Middleware .......... Middleware wraps around existing storage allowing you to customize their behaviour. >>> from tinydb.storages import JSONStorage >>> from tinydb.middlewares import CachingMiddleware >>> db = TinyDB('/path/to/db.json', storage=CachingMiddleware(JSONStorage)) .. hint:: You can nest middleware: >>> db = TinyDB('/path/to/db.json', storage=FirstMiddleware(SecondMiddleware(JSONStorage))) CachingMiddleware ^^^^^^^^^^^^^^^^^ The ``CachingMiddleware`` improves speed by reducing disk I/O. It caches all read operations and writes data to disk after a configured number of write operations. To make sure that all data is safely written when closing the table, use one of these ways: .. code-block:: python # Using a context manager: with database as db: # Your operations .. code-block:: python # Using the close function db.close() .. _mypy_type_checking: MyPy Type Checking ------------------ TinyDB comes with type annotations that MyPy can use to make sure you're using the API correctly. Unfortunately, MyPy doesn't understand all code patterns that TinyDB uses. For that reason TinyDB ships a MyPy plugin that helps correctly type checking code that uses TinyDB. To use it, add it to the plugins list in the `MyPy configuration file `_ (typically located in ``setup.cfg`` or ``mypy.ini``): .. code-block:: ini [mypy] plugins = tinydb.mypy_plugin What's next ----------- Congratulations, you've made through the user guide! Now go and build something awesome or dive deeper into TinyDB with these resources: - Want to learn how to customize TinyDB (storages, middlewares) and what extensions exist? Check out :doc:`extend` and :doc:`extensions`. - Want to study the API in detail? Read :doc:`api`. - Interested in contributing to the TinyDB development guide? Go on to the :doc:`contribute`. tinydb-4.8.2/mypy.ini000066400000000000000000000000471470251210600145140ustar00rootroot00000000000000[mypy] plugins = tinydb/mypy_plugin.py tinydb-4.8.2/poetry.lock000066400000000000000000002063411470251210600152160ustar00rootroot00000000000000# This file is automatically @generated by Poetry 1.6.1 and should not be changed by hand. [[package]] name = "alabaster" version = "0.7.13" description = "A configurable sidebar-enabled Sphinx theme" optional = false python-versions = ">=3.6" files = [ {file = "alabaster-0.7.13-py3-none-any.whl", hash = "sha256:1ee19aca801bbabb5ba3f5f258e4422dfa86f82f3e9cefb0859b283cdd7f62a3"}, {file = "alabaster-0.7.13.tar.gz", hash = "sha256:a27a4a084d5e690e16e01e03ad2b2e552c61a65469419b907243193de1a84ae2"}, ] [[package]] name = "attrs" version = "24.2.0" description = "Classes Without Boilerplate" optional = false python-versions = ">=3.7" files = [ {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, ] [package.extras] benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] [[package]] name = "babel" version = "2.16.0" description = "Internationalization utilities" optional = false python-versions = ">=3.8" files = [ {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, ] [package.dependencies] pytz = {version = ">=2015.7", markers = "python_version < \"3.9\""} [package.extras] dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] [[package]] name = "certifi" version = "2024.8.30" description = "Python package for providing Mozilla's CA Bundle." optional = false python-versions = ">=3.6" files = [ {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, ] [[package]] name = "charset-normalizer" version = "3.3.2" description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." optional = false python-versions = ">=3.7.0" files = [ {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, ] [[package]] name = "colorama" version = "0.4.6" description = "Cross-platform colored terminal text." optional = false python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" files = [ {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, ] [[package]] name = "coverage" version = "6.5.0" description = "Code coverage measurement for Python" optional = false python-versions = ">=3.7" files = [ {file = "coverage-6.5.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ef8674b0ee8cc11e2d574e3e2998aea5df5ab242e012286824ea3c6970580e53"}, {file = "coverage-6.5.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:784f53ebc9f3fd0e2a3f6a78b2be1bd1f5575d7863e10c6e12504f240fd06660"}, {file = "coverage-6.5.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b4a5be1748d538a710f87542f22c2cad22f80545a847ad91ce45e77417293eb4"}, {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83516205e254a0cb77d2d7bb3632ee019d93d9f4005de31dca0a8c3667d5bc04"}, {file = "coverage-6.5.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:af4fffaffc4067232253715065e30c5a7ec6faac36f8fc8d6f64263b15f74db0"}, {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:97117225cdd992a9c2a5515db1f66b59db634f59d0679ca1fa3fe8da32749cae"}, {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:a1170fa54185845505fbfa672f1c1ab175446c887cce8212c44149581cf2d466"}, {file = "coverage-6.5.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:11b990d520ea75e7ee8dcab5bc908072aaada194a794db9f6d7d5cfd19661e5a"}, {file = "coverage-6.5.0-cp310-cp310-win32.whl", hash = "sha256:5dbec3b9095749390c09ab7c89d314727f18800060d8d24e87f01fb9cfb40b32"}, {file = "coverage-6.5.0-cp310-cp310-win_amd64.whl", hash = "sha256:59f53f1dc5b656cafb1badd0feb428c1e7bc19b867479ff72f7a9dd9b479f10e"}, {file = "coverage-6.5.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4a5375e28c5191ac38cca59b38edd33ef4cc914732c916f2929029b4bfb50795"}, {file = "coverage-6.5.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c4ed2820d919351f4167e52425e096af41bfabacb1857186c1ea32ff9983ed75"}, {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:33a7da4376d5977fbf0a8ed91c4dffaaa8dbf0ddbf4c8eea500a2486d8bc4d7b"}, {file = "coverage-6.5.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a8fb6cf131ac4070c9c5a3e21de0f7dc5a0fbe8bc77c9456ced896c12fcdad91"}, {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a6b7d95969b8845250586f269e81e5dfdd8ff828ddeb8567a4a2eaa7313460c4"}, {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:1ef221513e6f68b69ee9e159506d583d31aa3567e0ae84eaad9d6ec1107dddaa"}, {file = "coverage-6.5.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:cca4435eebea7962a52bdb216dec27215d0df64cf27fc1dd538415f5d2b9da6b"}, {file = "coverage-6.5.0-cp311-cp311-win32.whl", hash = "sha256:98e8a10b7a314f454d9eff4216a9a94d143a7ee65018dd12442e898ee2310578"}, {file = "coverage-6.5.0-cp311-cp311-win_amd64.whl", hash = "sha256:bc8ef5e043a2af066fa8cbfc6e708d58017024dc4345a1f9757b329a249f041b"}, {file = "coverage-6.5.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4433b90fae13f86fafff0b326453dd42fc9a639a0d9e4eec4d366436d1a41b6d"}, {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4f05d88d9a80ad3cac6244d36dd89a3c00abc16371769f1340101d3cb899fc3"}, {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:94e2565443291bd778421856bc975d351738963071e9b8839ca1fc08b42d4bef"}, {file = "coverage-6.5.0-cp37-cp37m-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:027018943386e7b942fa832372ebc120155fd970837489896099f5cfa2890f79"}, {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:255758a1e3b61db372ec2736c8e2a1fdfaf563977eedbdf131de003ca5779b7d"}, {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:851cf4ff24062c6aec510a454b2584f6e998cada52d4cb58c5e233d07172e50c"}, {file = "coverage-6.5.0-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:12adf310e4aafddc58afdb04d686795f33f4d7a6fa67a7a9d4ce7d6ae24d949f"}, {file = "coverage-6.5.0-cp37-cp37m-win32.whl", hash = "sha256:b5604380f3415ba69de87a289a2b56687faa4fe04dbee0754bfcae433489316b"}, {file = "coverage-6.5.0-cp37-cp37m-win_amd64.whl", hash = "sha256:4a8dbc1f0fbb2ae3de73eb0bdbb914180c7abfbf258e90b311dcd4f585d44bd2"}, {file = "coverage-6.5.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:d900bb429fdfd7f511f868cedd03a6bbb142f3f9118c09b99ef8dc9bf9643c3c"}, {file = "coverage-6.5.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:2198ea6fc548de52adc826f62cb18554caedfb1d26548c1b7c88d8f7faa8f6ba"}, {file = "coverage-6.5.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c4459b3de97b75e3bd6b7d4b7f0db13f17f504f3d13e2a7c623786289dd670e"}, {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:20c8ac5386253717e5ccc827caad43ed66fea0efe255727b1053a8154d952398"}, {file = "coverage-6.5.0-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b07130585d54fe8dff3d97b93b0e20290de974dc8177c320aeaf23459219c0b"}, {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:dbdb91cd8c048c2b09eb17713b0c12a54fbd587d79adcebad543bc0cd9a3410b"}, {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:de3001a203182842a4630e7b8d1a2c7c07ec1b45d3084a83d5d227a3806f530f"}, {file = "coverage-6.5.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:e07f4a4a9b41583d6eabec04f8b68076ab3cd44c20bd29332c6572dda36f372e"}, {file = "coverage-6.5.0-cp38-cp38-win32.whl", hash = "sha256:6d4817234349a80dbf03640cec6109cd90cba068330703fa65ddf56b60223a6d"}, {file = "coverage-6.5.0-cp38-cp38-win_amd64.whl", hash = "sha256:7ccf362abd726b0410bf8911c31fbf97f09f8f1061f8c1cf03dfc4b6372848f6"}, {file = "coverage-6.5.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:633713d70ad6bfc49b34ead4060531658dc6dfc9b3eb7d8a716d5873377ab745"}, {file = "coverage-6.5.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:95203854f974e07af96358c0b261f1048d8e1083f2de9b1c565e1be4a3a48cfc"}, {file = "coverage-6.5.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b9023e237f4c02ff739581ef35969c3739445fb059b060ca51771e69101efffe"}, {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:265de0fa6778d07de30bcf4d9dc471c3dc4314a23a3c6603d356a3c9abc2dfcf"}, {file = "coverage-6.5.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8f830ed581b45b82451a40faabb89c84e1a998124ee4212d440e9c6cf70083e5"}, {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7b6be138d61e458e18d8e6ddcddd36dd96215edfe5f1168de0b1b32635839b62"}, {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:42eafe6778551cf006a7c43153af1211c3aaab658d4d66fa5fcc021613d02518"}, {file = "coverage-6.5.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:723e8130d4ecc8f56e9a611e73b31219595baa3bb252d539206f7bbbab6ffc1f"}, {file = "coverage-6.5.0-cp39-cp39-win32.whl", hash = "sha256:d9ecf0829c6a62b9b573c7bb6d4dcd6ba8b6f80be9ba4fc7ed50bf4ac9aecd72"}, {file = "coverage-6.5.0-cp39-cp39-win_amd64.whl", hash = "sha256:fc2af30ed0d5ae0b1abdb4ebdce598eafd5b35397d4d75deb341a614d333d987"}, {file = "coverage-6.5.0-pp36.pp37.pp38-none-any.whl", hash = "sha256:1431986dac3923c5945271f169f59c45b8802a114c8f548d611f2015133df77a"}, {file = "coverage-6.5.0.tar.gz", hash = "sha256:f642e90754ee3e06b0e7e51bce3379590e76b7f76b708e1a71ff043f87025c84"}, ] [package.dependencies] tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} [package.extras] toml = ["tomli"] [[package]] name = "coveralls" version = "3.3.1" description = "Show coverage stats online via coveralls.io" optional = false python-versions = ">= 3.5" files = [ {file = "coveralls-3.3.1-py2.py3-none-any.whl", hash = "sha256:f42015f31d386b351d4226389b387ae173207058832fbf5c8ec4b40e27b16026"}, {file = "coveralls-3.3.1.tar.gz", hash = "sha256:b32a8bb5d2df585207c119d6c01567b81fba690c9c10a753bfe27a335bfc43ea"}, ] [package.dependencies] coverage = ">=4.1,<6.0.dev0 || >6.1,<6.1.1 || >6.1.1,<7.0" docopt = ">=0.6.1" requests = ">=1.0.0" [package.extras] yaml = ["PyYAML (>=3.10)"] [[package]] name = "docopt" version = "0.6.2" description = "Pythonic argument parser, that will make you smile" optional = false python-versions = "*" files = [ {file = "docopt-0.6.2.tar.gz", hash = "sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"}, ] [[package]] name = "docutils" version = "0.20.1" description = "Docutils -- Python Documentation Utilities" optional = false python-versions = ">=3.7" files = [ {file = "docutils-0.20.1-py3-none-any.whl", hash = "sha256:96f387a2c5562db4476f09f13bbab2192e764cac08ebbf3a34a95d9b1e4a59d6"}, {file = "docutils-0.20.1.tar.gz", hash = "sha256:f08a4e276c3a1583a86dce3e34aba3fe04d02bba2dd51ed16106244e8a923e3b"}, ] [[package]] name = "exceptiongroup" version = "1.2.2" description = "Backport of PEP 654 (exception groups)" optional = false python-versions = ">=3.7" files = [ {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, ] [package.extras] test = ["pytest (>=6)"] [[package]] name = "filelock" version = "3.16.1" description = "A platform independent file lock." optional = false python-versions = ">=3.8" files = [ {file = "filelock-3.16.1-py3-none-any.whl", hash = "sha256:2082e5703d51fbf98ea75855d9d5527e33d8ff23099bec374a134febee6946b0"}, {file = "filelock-3.16.1.tar.gz", hash = "sha256:c249fbfcd5db47e5e2d6d62198e565475ee65e4831e2561c8e313fa7eb961435"}, ] [package.extras] docs = ["furo (>=2024.8.6)", "sphinx (>=8.0.2)", "sphinx-autodoc-typehints (>=2.4.1)"] testing = ["covdefaults (>=2.3)", "coverage (>=7.6.1)", "diff-cover (>=9.2)", "pytest (>=8.3.3)", "pytest-asyncio (>=0.24)", "pytest-cov (>=5)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.26.4)"] typing = ["typing-extensions (>=4.12.2)"] [[package]] name = "idna" version = "3.10" description = "Internationalized Domain Names in Applications (IDNA)" optional = false python-versions = ">=3.6" files = [ {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, ] [package.extras] all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] [[package]] name = "imagesize" version = "1.4.1" description = "Getting image size from png/jpeg/jpeg2000/gif file" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" files = [ {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, ] [[package]] name = "importlib-metadata" version = "8.5.0" description = "Read metadata from Python packages" optional = false python-versions = ">=3.8" files = [ {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, ] [package.dependencies] zipp = ">=3.20" [package.extras] check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] perf = ["ipython"] test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] type = ["pytest-mypy"] [[package]] name = "iniconfig" version = "2.0.0" description = "brain-dead simple config-ini parsing" optional = false python-versions = ">=3.7" files = [ {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, ] [[package]] name = "jinja2" version = "3.1.4" description = "A very fast and expressive template engine." optional = false python-versions = ">=3.7" files = [ {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, ] [package.dependencies] MarkupSafe = ">=2.0" [package.extras] i18n = ["Babel (>=2.7)"] [[package]] name = "markupsafe" version = "2.1.5" description = "Safely add untrusted strings to HTML/XML markup." optional = false python-versions = ">=3.7" files = [ {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:a17a92de5231666cfbe003f0e4b9b3a7ae3afb1ec2845aadc2bacc93ff85febc"}, {file = "MarkupSafe-2.1.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:72b6be590cc35924b02c78ef34b467da4ba07e4e0f0454a2c5907f473fc50ce5"}, {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e61659ba32cf2cf1481e575d0462554625196a1f2fc06a1c777d3f48e8865d46"}, {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2174c595a0d73a3080ca3257b40096db99799265e1c27cc5a610743acd86d62f"}, {file = "MarkupSafe-2.1.5-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ae2ad8ae6ebee9d2d94b17fb62763125f3f374c25618198f40cbb8b525411900"}, {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:075202fa5b72c86ad32dc7d0b56024ebdbcf2048c0ba09f1cde31bfdd57bcfff"}, {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:598e3276b64aff0e7b3451b72e94fa3c238d452e7ddcd893c3ab324717456bad"}, {file = "MarkupSafe-2.1.5-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fce659a462a1be54d2ffcacea5e3ba2d74daa74f30f5f143fe0c58636e355fdd"}, {file = "MarkupSafe-2.1.5-cp310-cp310-win32.whl", hash = "sha256:d9fad5155d72433c921b782e58892377c44bd6252b5af2f67f16b194987338a4"}, {file = "MarkupSafe-2.1.5-cp310-cp310-win_amd64.whl", hash = "sha256:bf50cd79a75d181c9181df03572cdce0fbb75cc353bc350712073108cba98de5"}, {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:629ddd2ca402ae6dbedfceeba9c46d5f7b2a61d9749597d4307f943ef198fc1f"}, {file = "MarkupSafe-2.1.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5b7b716f97b52c5a14bffdf688f971b2d5ef4029127f1ad7a513973cfd818df2"}, {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6ec585f69cec0aa07d945b20805be741395e28ac1627333b1c5b0105962ffced"}, {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b91c037585eba9095565a3556f611e3cbfaa42ca1e865f7b8015fe5c7336d5a5"}, {file = "MarkupSafe-2.1.5-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7502934a33b54030eaf1194c21c692a534196063db72176b0c4028e140f8f32c"}, {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:0e397ac966fdf721b2c528cf028494e86172b4feba51d65f81ffd65c63798f3f"}, {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:c061bb86a71b42465156a3ee7bd58c8c2ceacdbeb95d05a99893e08b8467359a"}, {file = "MarkupSafe-2.1.5-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3a57fdd7ce31c7ff06cdfbf31dafa96cc533c21e443d57f5b1ecc6cdc668ec7f"}, {file = "MarkupSafe-2.1.5-cp311-cp311-win32.whl", hash = "sha256:397081c1a0bfb5124355710fe79478cdbeb39626492b15d399526ae53422b906"}, {file = "MarkupSafe-2.1.5-cp311-cp311-win_amd64.whl", hash = "sha256:2b7c57a4dfc4f16f7142221afe5ba4e093e09e728ca65c51f5620c9aaeb9a617"}, {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1"}, {file = "MarkupSafe-2.1.5-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4"}, {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee"}, {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5"}, {file = "MarkupSafe-2.1.5-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b"}, {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a"}, {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f"}, {file = "MarkupSafe-2.1.5-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169"}, {file = "MarkupSafe-2.1.5-cp312-cp312-win32.whl", hash = "sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad"}, {file = "MarkupSafe-2.1.5-cp312-cp312-win_amd64.whl", hash = "sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:c8b29db45f8fe46ad280a7294f5c3ec36dbac9491f2d1c17345be8e69cc5928f"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ec6a563cff360b50eed26f13adc43e61bc0c04d94b8be985e6fb24b81f6dcfdf"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a549b9c31bec33820e885335b451286e2969a2d9e24879f83fe904a5ce59d70a"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f11aa001c540f62c6166c7726f71f7573b52c68c31f014c25cc7901deea0b52"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:7b2e5a267c855eea6b4283940daa6e88a285f5f2a67f2220203786dfa59b37e9"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:2d2d793e36e230fd32babe143b04cec8a8b3eb8a3122d2aceb4a371e6b09b8df"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:ce409136744f6521e39fd8e2a24c53fa18ad67aa5bc7c2cf83645cce5b5c4e50"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-win32.whl", hash = "sha256:4096e9de5c6fdf43fb4f04c26fb114f61ef0bf2e5604b6ee3019d51b69e8c371"}, {file = "MarkupSafe-2.1.5-cp37-cp37m-win_amd64.whl", hash = "sha256:4275d846e41ecefa46e2015117a9f491e57a71ddd59bbead77e904dc02b1bed2"}, {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:656f7526c69fac7f600bd1f400991cc282b417d17539a1b228617081106feb4a"}, {file = "MarkupSafe-2.1.5-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:97cafb1f3cbcd3fd2b6fbfb99ae11cdb14deea0736fc2b0952ee177f2b813a46"}, {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f3fbcb7ef1f16e48246f704ab79d79da8a46891e2da03f8783a5b6fa41a9532"}, {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fa9db3f79de01457b03d4f01b34cf91bc0048eb2c3846ff26f66687c2f6d16ab"}, {file = "MarkupSafe-2.1.5-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffee1f21e5ef0d712f9033568f8344d5da8cc2869dbd08d87c84656e6a2d2f68"}, {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:5dedb4db619ba5a2787a94d877bc8ffc0566f92a01c0ef214865e54ecc9ee5e0"}, {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:30b600cf0a7ac9234b2638fbc0fb6158ba5bdcdf46aeb631ead21248b9affbc4"}, {file = "MarkupSafe-2.1.5-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:8dd717634f5a044f860435c1d8c16a270ddf0ef8588d4887037c5028b859b0c3"}, {file = "MarkupSafe-2.1.5-cp38-cp38-win32.whl", hash = "sha256:daa4ee5a243f0f20d528d939d06670a298dd39b1ad5f8a72a4275124a7819eff"}, {file = "MarkupSafe-2.1.5-cp38-cp38-win_amd64.whl", hash = "sha256:619bc166c4f2de5caa5a633b8b7326fbe98e0ccbfacabd87268a2b15ff73a029"}, {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:7a68b554d356a91cce1236aa7682dc01df0edba8d043fd1ce607c49dd3c1edcf"}, {file = "MarkupSafe-2.1.5-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:db0b55e0f3cc0be60c1f19efdde9a637c32740486004f20d1cff53c3c0ece4d2"}, {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e53af139f8579a6d5f7b76549125f0d94d7e630761a2111bc431fd820e163b8"}, {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17b950fccb810b3293638215058e432159d2b71005c74371d784862b7e4683f3"}, {file = "MarkupSafe-2.1.5-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c31f53cdae6ecfa91a77820e8b151dba54ab528ba65dfd235c80b086d68a465"}, {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:bff1b4290a66b490a2f4719358c0cdcd9bafb6b8f061e45c7a2460866bf50c2e"}, {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:bc1667f8b83f48511b94671e0e441401371dfd0f0a795c7daa4a3cd1dde55bea"}, {file = "MarkupSafe-2.1.5-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5049256f536511ee3f7e1b3f87d1d1209d327e818e6ae1365e8653d7e3abb6a6"}, {file = "MarkupSafe-2.1.5-cp39-cp39-win32.whl", hash = "sha256:00e046b6dd71aa03a41079792f8473dc494d564611a8f89bbbd7cb93295ebdcf"}, {file = "MarkupSafe-2.1.5-cp39-cp39-win_amd64.whl", hash = "sha256:fa173ec60341d6bb97a89f5ea19c85c5643c1e7dedebc22f5181eb73573142c5"}, {file = "MarkupSafe-2.1.5.tar.gz", hash = "sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b"}, ] [[package]] name = "mypy" version = "1.11.2" description = "Optional static typing for Python" optional = false python-versions = ">=3.8" files = [ {file = "mypy-1.11.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d42a6dd818ffce7be66cce644f1dff482f1d97c53ca70908dff0b9ddc120b77a"}, {file = "mypy-1.11.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:801780c56d1cdb896eacd5619a83e427ce436d86a3bdf9112527f24a66618fef"}, {file = "mypy-1.11.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:41ea707d036a5307ac674ea172875f40c9d55c5394f888b168033177fce47383"}, {file = "mypy-1.11.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6e658bd2d20565ea86da7d91331b0eed6d2eee22dc031579e6297f3e12c758c8"}, {file = "mypy-1.11.2-cp310-cp310-win_amd64.whl", hash = "sha256:478db5f5036817fe45adb7332d927daa62417159d49783041338921dcf646fc7"}, {file = "mypy-1.11.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385"}, {file = "mypy-1.11.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca"}, {file = "mypy-1.11.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104"}, {file = "mypy-1.11.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4"}, {file = "mypy-1.11.2-cp311-cp311-win_amd64.whl", hash = "sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6"}, {file = "mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318"}, {file = "mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36"}, {file = "mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987"}, {file = "mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca"}, {file = "mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70"}, {file = "mypy-1.11.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:37c7fa6121c1cdfcaac97ce3d3b5588e847aa79b580c1e922bb5d5d2902df19b"}, {file = "mypy-1.11.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4a8a53bc3ffbd161b5b2a4fff2f0f1e23a33b0168f1c0778ec70e1a3d66deb86"}, {file = "mypy-1.11.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2ff93107f01968ed834f4256bc1fc4475e2fecf6c661260066a985b52741ddce"}, {file = "mypy-1.11.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:edb91dded4df17eae4537668b23f0ff6baf3707683734b6a818d5b9d0c0c31a1"}, {file = "mypy-1.11.2-cp38-cp38-win_amd64.whl", hash = "sha256:ee23de8530d99b6db0573c4ef4bd8f39a2a6f9b60655bf7a1357e585a3486f2b"}, {file = "mypy-1.11.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:801ca29f43d5acce85f8e999b1e431fb479cb02d0e11deb7d2abb56bdaf24fd6"}, {file = "mypy-1.11.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:af8d155170fcf87a2afb55b35dc1a0ac21df4431e7d96717621962e4b9192e70"}, {file = "mypy-1.11.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f7821776e5c4286b6a13138cc935e2e9b6fde05e081bdebf5cdb2bb97c9df81d"}, {file = "mypy-1.11.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:539c570477a96a4e6fb718b8d5c3e0c0eba1f485df13f86d2970c91f0673148d"}, {file = "mypy-1.11.2-cp39-cp39-win_amd64.whl", hash = "sha256:3f14cd3d386ac4d05c5a39a51b84387403dadbd936e17cb35882134d4f8f0d24"}, {file = "mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12"}, {file = "mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79"}, ] [package.dependencies] mypy-extensions = ">=1.0.0" tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} typing-extensions = ">=4.6.0" [package.extras] dmypy = ["psutil (>=4.0)"] install-types = ["pip"] mypyc = ["setuptools (>=50)"] reports = ["lxml"] [[package]] name = "mypy-extensions" version = "1.0.0" description = "Type system extensions for programs checked with the mypy type checker." optional = false python-versions = ">=3.5" files = [ {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, ] [[package]] name = "packaging" version = "24.1" description = "Core utilities for Python packages" optional = false python-versions = ">=3.8" files = [ {file = "packaging-24.1-py3-none-any.whl", hash = "sha256:5b8f2217dbdbd2f7f384c41c628544e6d52f2d0f53c6d0c3ea61aa5d1d7ff124"}, {file = "packaging-24.1.tar.gz", hash = "sha256:026ed72c8ed3fcce5bf8950572258698927fd1dbda10a5e981cdf0ac37f4f002"}, ] [[package]] name = "pluggy" version = "1.5.0" description = "plugin and hook calling mechanisms for python" optional = false python-versions = ">=3.8" files = [ {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, ] [package.extras] dev = ["pre-commit", "tox"] testing = ["pytest", "pytest-benchmark"] [[package]] name = "py" version = "1.11.0" description = "library with cross-python path, ini-parsing, io, code, log facilities" optional = false python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" files = [ {file = "py-1.11.0-py2.py3-none-any.whl", hash = "sha256:607c53218732647dff4acdfcd50cb62615cedf612e72d1724fb1a0cc6405b378"}, {file = "py-1.11.0.tar.gz", hash = "sha256:51c75c4126074b472f746a24399ad32f6053d1b34b68d2fa41e558e6f4a98719"}, ] [[package]] name = "pycodestyle" version = "2.12.1" description = "Python style guide checker" optional = false python-versions = ">=3.8" files = [ {file = "pycodestyle-2.12.1-py2.py3-none-any.whl", hash = "sha256:46f0fb92069a7c28ab7bb558f05bfc0110dac69a0cd23c61ea0040283a9d78b3"}, {file = "pycodestyle-2.12.1.tar.gz", hash = "sha256:6838eae08bbce4f6accd5d5572075c63626a15ee3e6f842df996bf62f6d73521"}, ] [[package]] name = "pygments" version = "2.18.0" description = "Pygments is a syntax highlighting package written in Python." optional = false python-versions = ">=3.8" files = [ {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, ] [package.extras] windows-terminal = ["colorama (>=0.4.6)"] [[package]] name = "pytest" version = "7.4.4" description = "pytest: simple powerful testing with Python" optional = false python-versions = ">=3.7" files = [ {file = "pytest-7.4.4-py3-none-any.whl", hash = "sha256:b090cdf5ed60bf4c45261be03239c2c1c22df034fbffe691abe93cd80cea01d8"}, {file = "pytest-7.4.4.tar.gz", hash = "sha256:2cf0005922c6ace4a3e2ec8b4080eb0d9753fdc93107415332f50ce9e7994280"}, ] [package.dependencies] colorama = {version = "*", markers = "sys_platform == \"win32\""} exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} iniconfig = "*" packaging = "*" pluggy = ">=0.12,<2.0" tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} [package.extras] testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] [[package]] name = "pytest-cov" version = "4.1.0" description = "Pytest plugin for measuring coverage." optional = false python-versions = ">=3.7" files = [ {file = "pytest-cov-4.1.0.tar.gz", hash = "sha256:3904b13dfbfec47f003b8e77fd5b589cd11904a21ddf1ab38a64f204d6a10ef6"}, {file = "pytest_cov-4.1.0-py3-none-any.whl", hash = "sha256:6ba70b9e97e69fcc3fb45bfeab2d0a138fb65c4d0d6a41ef33983ad114be8c3a"}, ] [package.dependencies] coverage = {version = ">=5.2.1", extras = ["toml"]} pytest = ">=4.6" [package.extras] testing = ["fields", "hunter", "process-tests", "pytest-xdist", "six", "virtualenv"] [[package]] name = "pytest-mypy" version = "0.10.3" description = "Mypy static type checker plugin for Pytest" optional = false python-versions = ">=3.6" files = [ {file = "pytest-mypy-0.10.3.tar.gz", hash = "sha256:f8458f642323f13a2ca3e2e61509f7767966b527b4d8adccd5032c3e7b4fd3db"}, {file = "pytest_mypy-0.10.3-py3-none-any.whl", hash = "sha256:7638d0d3906848fc1810cb2f5cc7fceb4cc5c98524aafcac58f28620e3102053"}, ] [package.dependencies] attrs = ">=19.0" filelock = ">=3.0" mypy = [ {version = ">=0.900", markers = "python_version >= \"3.11\""}, {version = ">=0.780", markers = "python_version >= \"3.9\" and python_version < \"3.11\""}, {version = ">=0.700", markers = "python_version >= \"3.8\" and python_version < \"3.9\""}, ] pytest = [ {version = ">=6.2", markers = "python_version >= \"3.10\""}, {version = ">=4.6", markers = "python_version >= \"3.6\" and python_version < \"3.10\""}, ] [[package]] name = "pytest-pycodestyle" version = "2.3.1" description = "pytest plugin to run pycodestyle" optional = false python-versions = "~=3.7" files = [ {file = "pytest-pycodestyle-2.3.1.tar.gz", hash = "sha256:2901327b8e6beab90298a9803074483efe560e191bef81d9e18119b141222830"}, ] [package.dependencies] py = "*" pycodestyle = "*" pytest = ">=7.0" [package.extras] tests = ["pytest-isort"] [[package]] name = "pytz" version = "2024.2" description = "World timezone definitions, modern and historical" optional = false python-versions = "*" files = [ {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, ] [[package]] name = "pyyaml" version = "6.0.2" description = "YAML parser and emitter for Python" optional = false python-versions = ">=3.8" files = [ {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, ] [[package]] name = "requests" version = "2.32.3" description = "Python HTTP for Humans." optional = false python-versions = ">=3.8" files = [ {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, ] [package.dependencies] certifi = ">=2017.4.17" charset-normalizer = ">=2,<4" idna = ">=2.5,<4" urllib3 = ">=1.21.1,<3" [package.extras] socks = ["PySocks (>=1.5.6,!=1.5.7)"] use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] [[package]] name = "snowballstemmer" version = "2.2.0" description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." optional = false python-versions = "*" files = [ {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, ] [[package]] name = "sphinx" version = "7.1.2" description = "Python documentation generator" optional = false python-versions = ">=3.8" files = [ {file = "sphinx-7.1.2-py3-none-any.whl", hash = "sha256:d170a81825b2fcacb6dfd5a0d7f578a053e45d3f2b153fecc948c37344eb4cbe"}, {file = "sphinx-7.1.2.tar.gz", hash = "sha256:780f4d32f1d7d1126576e0e5ecc19dc32ab76cd24e950228dcf7b1f6d3d9e22f"}, ] [package.dependencies] alabaster = ">=0.7,<0.8" babel = ">=2.9" colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} docutils = ">=0.18.1,<0.21" imagesize = ">=1.3" importlib-metadata = {version = ">=4.8", markers = "python_version < \"3.10\""} Jinja2 = ">=3.0" packaging = ">=21.0" Pygments = ">=2.13" requests = ">=2.25.0" snowballstemmer = ">=2.0" sphinxcontrib-applehelp = "*" sphinxcontrib-devhelp = "*" sphinxcontrib-htmlhelp = ">=2.0.0" sphinxcontrib-jsmath = "*" sphinxcontrib-qthelp = "*" sphinxcontrib-serializinghtml = ">=1.1.5" [package.extras] docs = ["sphinxcontrib-websupport"] lint = ["docutils-stubs", "flake8 (>=3.5.0)", "flake8-simplify", "isort", "mypy (>=0.990)", "ruff", "sphinx-lint", "types-requests"] test = ["cython", "filelock", "html5lib", "pytest (>=4.6)"] [[package]] name = "sphinxcontrib-applehelp" version = "1.0.4" description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" optional = false python-versions = ">=3.8" files = [ {file = "sphinxcontrib-applehelp-1.0.4.tar.gz", hash = "sha256:828f867945bbe39817c210a1abfd1bc4895c8b73fcaade56d45357a348a07d7e"}, {file = "sphinxcontrib_applehelp-1.0.4-py3-none-any.whl", hash = "sha256:29d341f67fb0f6f586b23ad80e072c8e6ad0b48417db2bde114a4c9746feb228"}, ] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] name = "sphinxcontrib-devhelp" version = "1.0.2" description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp document." optional = false python-versions = ">=3.5" files = [ {file = "sphinxcontrib-devhelp-1.0.2.tar.gz", hash = "sha256:ff7f1afa7b9642e7060379360a67e9c41e8f3121f2ce9164266f61b9f4b338e4"}, {file = "sphinxcontrib_devhelp-1.0.2-py2.py3-none-any.whl", hash = "sha256:8165223f9a335cc1af7ffe1ed31d2871f325254c0423bc0c4c7cd1c1e4734a2e"}, ] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] name = "sphinxcontrib-htmlhelp" version = "2.0.1" description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" optional = false python-versions = ">=3.8" files = [ {file = "sphinxcontrib-htmlhelp-2.0.1.tar.gz", hash = "sha256:0cbdd302815330058422b98a113195c9249825d681e18f11e8b1f78a2f11efff"}, {file = "sphinxcontrib_htmlhelp-2.0.1-py3-none-any.whl", hash = "sha256:c38cb46dccf316c79de6e5515e1770414b797162b23cd3d06e67020e1d2a6903"}, ] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] test = ["html5lib", "pytest"] [[package]] name = "sphinxcontrib-jsmath" version = "1.0.1" description = "A sphinx extension which renders display math in HTML via JavaScript" optional = false python-versions = ">=3.5" files = [ {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, ] [package.extras] test = ["flake8", "mypy", "pytest"] [[package]] name = "sphinxcontrib-qthelp" version = "1.0.3" description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp document." optional = false python-versions = ">=3.5" files = [ {file = "sphinxcontrib-qthelp-1.0.3.tar.gz", hash = "sha256:4c33767ee058b70dba89a6fc5c1892c0d57a54be67ddd3e7875a18d14cba5a72"}, {file = "sphinxcontrib_qthelp-1.0.3-py2.py3-none-any.whl", hash = "sha256:bd9fc24bcb748a8d51fd4ecaade681350aa63009a347a8c14e637895444dfab6"}, ] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] name = "sphinxcontrib-serializinghtml" version = "1.1.5" description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)." optional = false python-versions = ">=3.5" files = [ {file = "sphinxcontrib-serializinghtml-1.1.5.tar.gz", hash = "sha256:aa5f6de5dfdf809ef505c4895e51ef5c9eac17d0f287933eb49ec495280b6952"}, {file = "sphinxcontrib_serializinghtml-1.1.5-py2.py3-none-any.whl", hash = "sha256:352a9a00ae864471d3a7ead8d7d79f5fc0b57e8b3f95e9867eb9eb28999b92fd"}, ] [package.extras] lint = ["docutils-stubs", "flake8", "mypy"] test = ["pytest"] [[package]] name = "tomli" version = "2.0.2" description = "A lil' TOML parser" optional = false python-versions = ">=3.8" files = [ {file = "tomli-2.0.2-py3-none-any.whl", hash = "sha256:2ebe24485c53d303f690b0ec092806a085f07af5a5aa1464f3931eec36caaa38"}, {file = "tomli-2.0.2.tar.gz", hash = "sha256:d46d457a85337051c36524bc5349dd91b1877838e2979ac5ced3e710ed8a60ed"}, ] [[package]] name = "types-pyyaml" version = "6.0.12.20240917" description = "Typing stubs for PyYAML" optional = false python-versions = ">=3.8" files = [ {file = "types-PyYAML-6.0.12.20240917.tar.gz", hash = "sha256:d1405a86f9576682234ef83bcb4e6fff7c9305c8b1fbad5e0bcd4f7dbdc9c587"}, {file = "types_PyYAML-6.0.12.20240917-py3-none-any.whl", hash = "sha256:392b267f1c0fe6022952462bf5d6523f31e37f6cea49b14cee7ad634b6301570"}, ] [[package]] name = "typing-extensions" version = "4.12.2" description = "Backported and Experimental Type Hints for Python 3.8+" optional = false python-versions = ">=3.8" files = [ {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] name = "urllib3" version = "2.2.3" description = "HTTP library with thread-safe connection pooling, file post, and more." optional = false python-versions = ">=3.8" files = [ {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, ] [package.extras] brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] h2 = ["h2 (>=4,<5)"] socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] zstd = ["zstandard (>=0.18.0)"] [[package]] name = "zipp" version = "3.20.2" description = "Backport of pathlib-compatible object wrapper for zip files" optional = false python-versions = ">=3.8" files = [ {file = "zipp-3.20.2-py3-none-any.whl", hash = "sha256:a817ac80d6cf4b23bf7f2828b7cabf326f15a001bea8b1f9b49631780ba28350"}, {file = "zipp-3.20.2.tar.gz", hash = "sha256:bc9eb26f4506fda01b81bcde0ca78103b6e62f991b381fec825435c836edbc29"}, ] [package.extras] check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] cover = ["pytest-cov"] doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] enabler = ["pytest-enabler (>=2.2)"] test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] type = ["pytest-mypy"] [metadata] lock-version = "2.0" python-versions = "^3.8" content-hash = "04f19e68fb715bd48c7d6e50ac574d3c3f8f18477805bdf94589b4969c416549" tinydb-4.8.2/pyproject.toml000066400000000000000000000034661470251210600157410ustar00rootroot00000000000000[tool.poetry] name = "tinydb" version = "4.8.2" description = "TinyDB is a tiny, document oriented database optimized for your happiness :)" authors = ["Markus Siemens "] license = "MIT" readme = "README.rst" homepage = "https://github.com/msiemens/tinydb" documentation = "https://tinydb.readthedocs.org/" keywords = ["database", "nosql"] classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: MIT License", "Topic :: Database", "Topic :: Database :: Database Engines/Servers", "Topic :: Utilities", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.8", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Operating System :: OS Independent", "Typing :: Typed", ] packages = [ { include = "tinydb" }, { include = "tests", format = "sdist" } ] [tool.poetry.urls] "Changelog" = "https://tinydb.readthedocs.io/en/latest/changelog.html" "Issues" = "https://github.com/msiemens/tinydb/issues" [tool.poetry.dependencies] python = "^3.8" [tool.poetry.dev-dependencies] pytest = "^7.2.0" pytest-pycodestyle = "^2.3.1" pytest-cov = "^4.0.0" pycodestyle = "^2.10.0" sphinx = "^7.0.0" coveralls = "^3.3.1" pyyaml = "^6.0" pytest-mypy = { version = "^0.10.2", markers = "platform_python_implementation != 'PyPy'" } types-PyYAML = "^6.0.0" [build-system] requires = ["poetry-core"] build-backend = "poetry.core.masonry.api" tinydb-4.8.2/pytest.ini000066400000000000000000000001061470251210600150420ustar00rootroot00000000000000[pytest] addopts=--verbose --cov-append --cov-report term --cov tinydbtinydb-4.8.2/tests/000077500000000000000000000000001470251210600141565ustar00rootroot00000000000000tinydb-4.8.2/tests/__init__.py000066400000000000000000000000001470251210600162550ustar00rootroot00000000000000tinydb-4.8.2/tests/conftest.py000066400000000000000000000011611470251210600163540ustar00rootroot00000000000000import os.path import tempfile from pathlib import Path import pytest # type: ignore from tinydb.middlewares import CachingMiddleware from tinydb.storages import MemoryStorage from tinydb import TinyDB, JSONStorage @pytest.fixture(params=['memory', 'json']) def db(request, tmp_path: Path): if request.param == 'json': db_ = TinyDB(tmp_path / 'test.db', storage=JSONStorage) else: db_ = TinyDB(storage=MemoryStorage) db_.drop_tables() db_.insert_multiple({'int': 1, 'char': c} for c in 'abc') yield db_ @pytest.fixture def storage(): return CachingMiddleware(MemoryStorage)() tinydb-4.8.2/tests/test_middlewares.py000066400000000000000000000045541470251210600200770ustar00rootroot00000000000000import os from tinydb import TinyDB from tinydb.middlewares import CachingMiddleware from tinydb.storages import MemoryStorage, JSONStorage doc = {'none': [None, None], 'int': 42, 'float': 3.1415899999999999, 'list': ['LITE', 'RES_ACID', 'SUS_DEXT'], 'dict': {'hp': 13, 'sp': 5}, 'bool': [True, False, True, False]} def test_caching(storage): # Write contents storage.write(doc) # Verify contents assert doc == storage.read() def test_caching_read(): db = TinyDB(storage=CachingMiddleware(MemoryStorage)) assert db.all() == [] def test_caching_write_many(storage): storage.WRITE_CACHE_SIZE = 3 # Storage should be still empty assert storage.memory is None # Write contents for x in range(2): storage.write(doc) assert storage.memory is None # Still cached storage.write(doc) # Verify contents: Cache should be emptied and written to storage assert storage.memory def test_caching_flush(storage): # Write contents for _ in range(CachingMiddleware.WRITE_CACHE_SIZE - 1): storage.write(doc) # Not yet flushed... assert storage.memory is None storage.write(doc) # Verify contents: Cache should be emptied and written to storage assert storage.memory def test_caching_flush_manually(storage): # Write contents storage.write(doc) storage.flush() # Verify contents: Cache should be emptied and written to storage assert storage.memory def test_caching_write(storage): # Write contents storage.write(doc) storage.close() # Verify contents: Cache should be emptied and written to storage assert storage.storage.memory def test_nested(): storage = CachingMiddleware(MemoryStorage) storage() # Initialization # Write contents storage.write(doc) # Verify contents assert doc == storage.read() def test_caching_json_write(tmpdir): path = str(tmpdir.join('test.db')) with TinyDB(path, storage=CachingMiddleware(JSONStorage)) as db: db.insert({'key': 'value'}) # Verify database filesize statinfo = os.stat(path) assert statinfo.st_size != 0 # Assert JSON file has been closed assert db._storage._handle.closed del db # Reopen database with TinyDB(path, storage=CachingMiddleware(JSONStorage)) as db: assert db.all() == [{'key': 'value'}] tinydb-4.8.2/tests/test_operations.py000066400000000000000000000017751470251210600177640ustar00rootroot00000000000000from tinydb import where from tinydb.operations import delete, increment, decrement, add, subtract, set def test_delete(db): db.update(delete('int'), where('char') == 'a') assert 'int' not in db.get(where('char') == 'a') def test_add_int(db): db.update(add('int', 5), where('char') == 'a') assert db.get(where('char') == 'a')['int'] == 6 def test_add_str(db): db.update(add('char', 'xyz'), where('char') == 'a') assert db.get(where('char') == 'axyz')['int'] == 1 def test_subtract(db): db.update(subtract('int', 5), where('char') == 'a') assert db.get(where('char') == 'a')['int'] == -4 def test_set(db): db.update(set('char', 'xyz'), where('char') == 'a') assert db.get(where('char') == 'xyz')['int'] == 1 def test_increment(db): db.update(increment('int'), where('char') == 'a') assert db.get(where('char') == 'a')['int'] == 2 def test_decrement(db): db.update(decrement('int'), where('char') == 'a') assert db.get(where('char') == 'a')['int'] == 0 tinydb-4.8.2/tests/test_queries.py000066400000000000000000000300531470251210600172450ustar00rootroot00000000000000import re import pytest from tinydb.queries import Query, where def test_no_path(): with pytest.raises(ValueError): _ = Query() == 2 def test_path_exists(): query = Query()['value'].exists() assert query == where('value').exists() assert query({'value': 1}) assert not query({'something': 1}) assert hash(query) assert hash(query) != hash(where('asd')) query = Query()['value']['val'].exists() assert query == where('value')['val'].exists() assert query({'value': {'val': 2}}) assert not query({'value': 1}) assert not query({'value': {'asd': 1}}) assert not query({'something': 1}) assert hash(query) assert hash(query) != hash(where('asd')) def test_path_and(): query = Query()['value'].exists() & (Query()['value'] == 5) assert query({'value': 5}) assert not query({'value': 10}) assert not query({'something': 1}) assert hash(query) assert hash(query) != hash(where('value')) def test_callable_in_path_with_map(): double = lambda x: x + x query = Query().value.map(double) == 10 assert query({'value': 5}) assert not query({'value': 10}) def test_callable_in_path_with_chain(): rekey = lambda x: {'y': x['a'], 'z': x['b']} query = Query().map(rekey).z == 10 assert query({'a': 5, 'b': 10}) def test_eq(): query = Query().value == 1 assert query({'value': 1}) assert not query({'value': 2}) assert hash(query) query = Query().value == [0, 1] assert query({'value': [0, 1]}) assert not query({'value': [0, 1, 2]}) assert hash(query) def test_ne(): query = Query().value != 1 assert query({'value': 0}) assert query({'value': 2}) assert not query({'value': 1}) assert hash(query) query = Query().value != [0, 1] assert query({'value': [0, 1, 2]}) assert not query({'value': [0, 1]}) assert hash(query) def test_lt(): query = Query().value < 1 assert query({'value': 0}) assert not query({'value': 1}) assert not query({'value': 2}) assert hash(query) def test_le(): query = Query().value <= 1 assert query({'value': 0}) assert query({'value': 1}) assert not query({'value': 2}) assert hash(query) def test_gt(): query = Query().value > 1 assert query({'value': 2}) assert not query({'value': 1}) assert hash(query) def test_ge(): query = Query().value >= 1 assert query({'value': 2}) assert query({'value': 1}) assert not query({'value': 0}) assert hash(query) def test_or(): query = ( (Query().val1 == 1) | (Query().val2 == 2) ) assert query({'val1': 1}) assert query({'val2': 2}) assert query({'val1': 1, 'val2': 2}) assert not query({'val1': '', 'val2': ''}) assert hash(query) def test_and(): query = ( (Query().val1 == 1) & (Query().val2 == 2) ) assert query({'val1': 1, 'val2': 2}) assert not query({'val1': 1}) assert not query({'val2': 2}) assert not query({'val1': '', 'val2': ''}) assert hash(query) def test_not(): query = ~ (Query().val1 == 1) assert query({'val1': 5, 'val2': 2}) assert not query({'val1': 1, 'val2': 2}) assert hash(query) query = ( (~ (Query().val1 == 1)) & (Query().val2 == 2) ) assert query({'val1': '', 'val2': 2}) assert query({'val2': 2}) assert not query({'val1': 1, 'val2': 2}) assert not query({'val1': 1}) assert not query({'val1': '', 'val2': ''}) assert hash(query) def test_has_key(): query = Query().val3.exists() assert query({'val3': 1}) assert not query({'val1': 1, 'val2': 2}) assert hash(query) def test_regex(): query = Query().val.matches(r'\d{2}\.') assert query({'val': '42.'}) assert not query({'val': '44'}) assert not query({'val': 'ab.'}) assert not query({'val': 155}) assert not query({'val': False}) assert not query({'': None}) assert hash(query) query = Query().val.search(r'\d+') assert query({'val': 'ab3'}) assert not query({'val': 'abc'}) assert not query({'val': ''}) assert not query({'val': True}) assert not query({'': None}) assert hash(query) query = Query().val.search(r'JOHN', flags=re.IGNORECASE) assert query({'val': 'john'}) assert query({'val': 'xJohNx'}) assert not query({'val': 'JOH'}) assert not query({'val': 12}) assert not query({'': None}) assert hash(query) def test_custom(): def test(value): return value == 42 query = Query().val.test(test) assert query({'val': 42}) assert not query({'val': 40}) assert not query({'val': '44'}) assert not query({'': None}) assert hash(query) def in_list(value, l): return value in l query = Query().val.test(in_list, tuple([25, 35])) assert not query({'val': 20}) assert query({'val': 25}) assert not query({'val': 30}) assert query({'val': 35}) assert not query({'val': 36}) assert hash(query) def test_custom_with_params(): def test(value, minimum, maximum): return minimum <= value <= maximum query = Query().val.test(test, 1, 10) assert query({'val': 5}) assert not query({'val': 0}) assert not query({'val': 11}) assert not query({'': None}) assert hash(query) def test_any(): query = Query().followers.any(Query().name == 'don') assert query({'followers': [{'name': 'don'}, {'name': 'john'}]}) assert not query({'followers': 1}) assert not query({}) assert hash(query) query = Query().followers.any(Query().num.matches('\\d+')) assert query({'followers': [{'num': '12'}, {'num': 'abc'}]}) assert not query({'followers': [{'num': 'abc'}]}) assert hash(query) query = Query().followers.any(['don', 'jon']) assert query({'followers': ['don', 'greg', 'bill']}) assert not query({'followers': ['greg', 'bill']}) assert not query({}) assert hash(query) query = Query().followers.any([{'name': 'don'}, {'name': 'john'}]) assert query({'followers': [{'name': 'don'}, {'name': 'greg'}]}) assert not query({'followers': [{'name': 'greg'}]}) assert hash(query) def test_all(): query = Query().followers.all(Query().name == 'don') assert query({'followers': [{'name': 'don'}]}) assert not query({'followers': [{'name': 'don'}, {'name': 'john'}]}) assert hash(query) query = Query().followers.all(Query().num.matches('\\d+')) assert query({'followers': [{'num': '123'}, {'num': '456'}]}) assert not query({'followers': [{'num': '123'}, {'num': 'abc'}]}) assert hash(query) query = Query().followers.all(['don', 'john']) assert query({'followers': ['don', 'john', 'greg']}) assert not query({'followers': ['don', 'greg']}) assert not query({}) assert hash(query) query = Query().followers.all([{'name': 'jane'}, {'name': 'john'}]) assert query({'followers': [{'name': 'john'}, {'name': 'jane'}]}) assert query({'followers': [{'name': 'john'}, {'name': 'jane'}, {'name': 'bob'}]}) assert not query({'followers': [{'name': 'john'}, {'name': 'bob'}]}) assert hash(query) def test_has(): query = Query().key1.key2.exists() str(query) # This used to cause a bug... assert query({'key1': {'key2': {'key3': 1}}}) assert query({'key1': {'key2': 1}}) assert not query({'key1': 3}) assert not query({'key1': {'key1': 1}}) assert not query({'key2': {'key1': 1}}) assert hash(query) query = Query().key1.key2 == 1 assert query({'key1': {'key2': 1}}) assert not query({'key1': {'key2': 2}}) assert hash(query) # Nested has: key exists query = Query().key1.key2.key3.exists() assert query({'key1': {'key2': {'key3': 1}}}) # Not a dict assert not query({'key1': 1}) assert not query({'key1': {'key2': 1}}) # Wrong key assert not query({'key1': {'key2': {'key0': 1}}}) assert not query({'key1': {'key0': {'key3': 1}}}) assert not query({'key0': {'key2': {'key3': 1}}}) assert hash(query) # Nested has: check for value query = Query().key1.key2.key3 == 1 assert query({'key1': {'key2': {'key3': 1}}}) assert not query({'key1': {'key2': {'key3': 0}}}) assert hash(query) # Test special methods: regex matches query = Query().key1.value.matches(r'\d+') assert query({'key1': {'value': '123'}}) assert not query({'key2': {'value': '123'}}) assert not query({'key2': {'value': 'abc'}}) assert hash(query) # Test special methods: regex contains query = Query().key1.value.search(r'\d+') assert query({'key1': {'value': 'a2c'}}) assert not query({'key2': {'value': 'a2c'}}) assert not query({'key2': {'value': 'abc'}}) assert hash(query) # Test special methods: nested has and regex matches query = Query().key1.x.y.matches(r'\d+') assert query({'key1': {'x': {'y': '123'}}}) assert not query({'key1': {'x': {'y': 'abc'}}}) assert hash(query) # Test special method: nested has and regex contains query = Query().key1.x.y.search(r'\d+') assert query({'key1': {'x': {'y': 'a2c'}}}) assert not query({'key1': {'x': {'y': 'abc'}}}) assert hash(query) # Test special methods: custom test query = Query().key1.int.test(lambda x: x == 3) assert query({'key1': {'int': 3}}) assert hash(query) def test_one_of(): query = Query().key1.one_of(['value 1', 'value 2']) assert query({'key1': 'value 1'}) assert query({'key1': 'value 2'}) assert not query({'key1': 'value 3'}) def test_hash(): d = { Query().key1 == 2: True, Query().key1.key2.key3.exists(): True, Query().key1.exists() & Query().key2.exists(): True, Query().key1.exists() | Query().key2.exists(): True, } assert (Query().key1 == 2) in d assert (Query().key1.key2.key3.exists()) in d assert (Query()['key1.key2'].key3.exists()) not in d # Commutative property of & and | assert (Query().key1.exists() & Query().key2.exists()) in d assert (Query().key2.exists() & Query().key1.exists()) in d assert (Query().key1.exists() | Query().key2.exists()) in d assert (Query().key2.exists() | Query().key1.exists()) in d def test_orm_usage(): data = {'name': 'John', 'age': {'year': 2000}} User = Query() query1 = User.name == 'John' query2 = User.age.year == 2000 assert query1(data) assert query2(data) def test_repr(): Fruit = Query() assert repr(Fruit) == "Query()" assert repr(Fruit.type == 'peach') == "QueryImpl('==', ('type',), 'peach')" def test_subclass(): # Test that a new query test method in a custom subclass is properly usable class MyQueryClass(Query): def equal_double(self, rhs): return self._generate_test( lambda value: value == rhs * 2, ('equal_double', self._path, rhs) ) query = MyQueryClass().val.equal_double('42') assert query({'val': '4242'}) assert not query({'val': '42'}) assert not query({'': None}) assert hash(query) def test_noop(): query = Query().noop() assert query({'foo': True}) assert query({'foo': None}) assert query({}) def test_equality(): q = Query() assert (q.foo == 2) != 0 assert (q.foo == 'yes') != '' def test_empty_query_error(): with pytest.raises(RuntimeError, match='Empty query was evaluated'): Query()({}) def test_fragment(): query = Query().fragment({'a': 4, 'b': True}) assert query({'a': 4, 'b': True, 'c': 'yes'}) assert not query({'a': 4, 'c': 'yes'}) assert not query({'b': True, 'c': 'yes'}) assert not query({'a': 5, 'b': True, 'c': 'yes'}) assert not query({'a': 4, 'b': 'no', 'c': 'yes'}) def test_fragment_with_path(): query = Query().doc.fragment({'a': 4, 'b': True}) assert query({'doc': {'a': 4, 'b': True, 'c': 'yes'}}) assert not query({'a': 4, 'b': True, 'c': 'yes'}) assert not query({'doc': {'a': 4, 'c': 'yes'}}) def test_get_item(): query = Query()['test'] == 1 assert query({'test': 1}) assert not query({'test': 0}) tinydb-4.8.2/tests/test_storages.py000066400000000000000000000145311470251210600174220ustar00rootroot00000000000000import json import os import random import tempfile import pytest from tinydb import TinyDB, where from tinydb.storages import JSONStorage, MemoryStorage, Storage, touch from tinydb.table import Document random.seed() doc = {'none': [None, None], 'int': 42, 'float': 3.1415899999999999, 'list': ['LITE', 'RES_ACID', 'SUS_DEXT'], 'dict': {'hp': 13, 'sp': 5}, 'bool': [True, False, True, False]} def test_json(tmpdir): # Write contents path = str(tmpdir.join('test.db')) storage = JSONStorage(path) storage.write(doc) # Verify contents assert doc == storage.read() storage.close() def test_json_kwargs(tmpdir): db_file = tmpdir.join('test.db') db = TinyDB(str(db_file), sort_keys=True, indent=4, separators=(',', ': ')) # Write contents db.insert({'b': 1}) db.insert({'a': 1}) assert db_file.read() == '''{ "_default": { "1": { "b": 1 }, "2": { "a": 1 } } }''' db.close() def test_json_readwrite(tmpdir): """ Regression test for issue #1 """ path = str(tmpdir.join('test.db')) # Create TinyDB instance db = TinyDB(path, storage=JSONStorage) item = {'name': 'A very long entry'} item2 = {'name': 'A short one'} def get(s): return db.get(where('name') == s) db.insert(item) assert get('A very long entry') == item db.remove(where('name') == 'A very long entry') assert get('A very long entry') is None db.insert(item2) assert get('A short one') == item2 db.remove(where('name') == 'A short one') assert get('A short one') is None db.close() def test_json_read(tmpdir): r"""Open a database only for reading""" path = str(tmpdir.join('test.db')) with pytest.raises(FileNotFoundError): db = TinyDB(path, storage=JSONStorage, access_mode='r') # Create small database db = TinyDB(path, storage=JSONStorage) db.insert({'b': 1}) db.insert({'a': 1}) db.close() # Access in read mode db = TinyDB(path, storage=JSONStorage, access_mode='r') assert db.get(where('a') == 1) == {'a': 1} # reading is fine with pytest.raises(IOError): db.insert({'c': 1}) # writing is not db.close() def test_create_dirs(): temp_dir = tempfile.gettempdir() while True: dname = os.path.join(temp_dir, str(random.getrandbits(20))) if not os.path.exists(dname): db_dir = dname db_file = os.path.join(db_dir, 'db.json') break with pytest.raises(IOError): JSONStorage(db_file) JSONStorage(db_file, create_dirs=True).close() assert os.path.exists(db_file) # Use create_dirs with already existing directory JSONStorage(db_file, create_dirs=True).close() assert os.path.exists(db_file) os.remove(db_file) os.rmdir(db_dir) def test_json_invalid_directory(): with pytest.raises(IOError): with TinyDB('/this/is/an/invalid/path/db.json', storage=JSONStorage): pass def test_in_memory(): # Write contents storage = MemoryStorage() storage.write(doc) # Verify contents assert doc == storage.read() # Test case for #21 other = MemoryStorage() other.write({}) assert other.read() != storage.read() def test_in_memory_close(): with TinyDB(storage=MemoryStorage) as db: db.insert({}) def test_custom(): # noinspection PyAbstractClass class MyStorage(Storage): pass with pytest.raises(TypeError): MyStorage() def test_read_once(): count = 0 # noinspection PyAbstractClass class MyStorage(Storage): def __init__(self): self.memory = None def read(self): nonlocal count count += 1 return self.memory def write(self, data): self.memory = data with TinyDB(storage=MyStorage) as db: assert count == 0 db.table(db.default_table_name) assert count == 0 db.all() assert count == 1 db.insert({'foo': 'bar'}) assert count == 3 # One for getting the next ID, one for the insert db.all() assert count == 4 def test_custom_with_exception(): class MyStorage(Storage): def read(self): pass def write(self, data): pass def __init__(self): raise ValueError() def close(self): raise RuntimeError() with pytest.raises(ValueError): with TinyDB(storage=MyStorage) as db: pass def test_yaml(tmpdir): """ :type tmpdir: py._path.local.LocalPath """ try: import yaml except ImportError: return pytest.skip('PyYAML not installed') def represent_doc(dumper, data): # Represent `Document` objects as their dict's string representation # which PyYAML understands return dumper.represent_data(dict(data)) yaml.add_representer(Document, represent_doc) class YAMLStorage(Storage): def __init__(self, filename): self.filename = filename touch(filename, False) def read(self): with open(self.filename) as handle: data = yaml.safe_load(handle.read()) return data def write(self, data): with open(self.filename, 'w') as handle: yaml.dump(data, handle) def close(self): pass # Write contents path = str(tmpdir.join('test.db')) db = TinyDB(path, storage=YAMLStorage) db.insert(doc) assert db.all() == [doc] db.update({'name': 'foo'}) assert '!' not in tmpdir.join('test.db').read() assert db.contains(where('name') == 'foo') assert len(db) == 1 def test_encoding(tmpdir): japanese_doc = {"Test": u"こんにちは世界"} path = str(tmpdir.join('test.db')) # cp936 is used for japanese encodings jap_storage = JSONStorage(path, encoding="cp936") jap_storage.write(japanese_doc) try: exception = json.decoder.JSONDecodeError except AttributeError: exception = ValueError with pytest.raises(exception): # cp037 is used for english encodings eng_storage = JSONStorage(path, encoding="cp037") eng_storage.read() jap_storage = JSONStorage(path, encoding="cp936") assert japanese_doc == jap_storage.read() tinydb-4.8.2/tests/test_tables.py000066400000000000000000000100151470251210600170360ustar00rootroot00000000000000import re import pytest from tinydb import where def test_next_id(db): db.truncate() assert db._get_next_id() == 1 assert db._get_next_id() == 2 assert db._get_next_id() == 3 def test_tables_list(db): db.table('table1').insert({'a': 1}) db.table('table2').insert({'a': 1}) assert db.tables() == {'_default', 'table1', 'table2'} def test_one_table(db): table1 = db.table('table1') table1.insert_multiple({'int': 1, 'char': c} for c in 'abc') assert table1.get(where('int') == 1)['char'] == 'a' assert table1.get(where('char') == 'b')['char'] == 'b' def test_multiple_tables(db): table1 = db.table('table1') table2 = db.table('table2') table3 = db.table('table3') table1.insert({'int': 1, 'char': 'a'}) table2.insert({'int': 1, 'char': 'b'}) table3.insert({'int': 1, 'char': 'c'}) assert table1.count(where('char') == 'a') == 1 assert table2.count(where('char') == 'b') == 1 assert table3.count(where('char') == 'c') == 1 db.drop_tables() assert len(table1) == 0 assert len(table2) == 0 assert len(table3) == 0 def test_caching(db): table1 = db.table('table1') table2 = db.table('table1') assert table1 is table2 def test_query_cache(db): query1 = where('int') == 1 assert db.count(query1) == 3 assert query1 in db._query_cache assert db.count(query1) == 3 assert query1 in db._query_cache query2 = where('int') == 0 assert db.count(query2) == 0 assert query2 in db._query_cache assert db.count(query2) == 0 assert query2 in db._query_cache def test_query_cache_with_mutable_callable(db): table = db.table('table') table.insert({'val': 5}) mutable = 5 increase = lambda x: x + mutable assert where('val').is_cacheable() assert not where('val').map(increase).is_cacheable() assert not (where('val').map(increase) == 10).is_cacheable() search = where('val').map(increase) == 10 assert table.count(search) == 1 # now `increase` would yield 15, not 10 mutable = 10 assert table.count(search) == 0 assert len(table._query_cache) == 0 def test_zero_cache_size(db): table = db.table('table3', cache_size=0) query = where('int') == 1 table.insert({'int': 1}) table.insert({'int': 1}) assert table.count(query) == 2 assert table.count(where('int') == 2) == 0 assert len(table._query_cache) == 0 def test_query_cache_size(db): table = db.table('table3', cache_size=1) query = where('int') == 1 table.insert({'int': 1}) table.insert({'int': 1}) assert table.count(query) == 2 assert table.count(where('int') == 2) == 0 assert len(table._query_cache) == 1 def test_lru_cache(db): # Test integration into TinyDB table = db.table('table3', cache_size=2) query = where('int') == 1 table.search(query) table.search(where('int') == 2) table.search(where('int') == 3) assert query not in table._query_cache table.remove(where('int') == 1) assert not table._query_cache.lru table.search(query) assert len(table._query_cache) == 1 table.clear_cache() assert len(table._query_cache) == 0 def test_table_is_iterable(db): table = db.table('table1') table.insert_multiple({'int': i} for i in range(3)) assert [r for r in table] == table.all() def test_table_name(db): name = 'table3' table = db.table(name) assert name == table.name with pytest.raises(AttributeError): table.name = 'foo' def test_table_repr(db): name = 'table4' table = db.table(name) assert re.match( r">", repr(table)) def test_truncate_table(db): db.truncate() assert db._get_next_id() == 1 def test_persist_table(db): db.table("persisted", persist_empty=True) assert "persisted" in db.tables() db.table("nonpersisted", persist_empty=False) assert "nonpersisted" not in db.tables() tinydb-4.8.2/tests/test_tinydb.py000066400000000000000000000432761470251210600170740ustar00rootroot00000000000000import re from collections.abc import Mapping import pytest from tinydb import TinyDB, where, Query from tinydb.middlewares import Middleware, CachingMiddleware from tinydb.storages import MemoryStorage, JSONStorage from tinydb.table import Document def test_drop_tables(db: TinyDB): db.drop_tables() db.insert({}) db.drop_tables() assert len(db) == 0 def test_all(db: TinyDB): db.drop_tables() for i in range(10): db.insert({}) assert len(db.all()) == 10 def test_insert(db: TinyDB): db.drop_tables() db.insert({'int': 1, 'char': 'a'}) assert db.count(where('int') == 1) == 1 db.drop_tables() db.insert({'int': 1, 'char': 'a'}) db.insert({'int': 1, 'char': 'b'}) db.insert({'int': 1, 'char': 'c'}) assert db.count(where('int') == 1) == 3 assert db.count(where('char') == 'a') == 1 def test_insert_ids(db: TinyDB): db.drop_tables() assert db.insert({'int': 1, 'char': 'a'}) == 1 assert db.insert({'int': 1, 'char': 'a'}) == 2 def test_insert_with_doc_id(db: TinyDB): db.drop_tables() assert db.insert({'int': 1, 'char': 'a'}) == 1 assert db.insert(Document({'int': 1, 'char': 'a'}, 12)) == 12 assert db.insert(Document({'int': 1, 'char': 'a'}, 77)) == 77 assert db.insert({'int': 1, 'char': 'a'}) == 78 def test_insert_with_duplicate_doc_id(db: TinyDB): db.drop_tables() assert db.insert({'int': 1, 'char': 'a'}) == 1 with pytest.raises(ValueError): db.insert(Document({'int': 1, 'char': 'a'}, 1)) def test_insert_multiple(db: TinyDB): db.drop_tables() assert not db.contains(where('int') == 1) # Insert multiple from list db.insert_multiple([{'int': 1, 'char': 'a'}, {'int': 1, 'char': 'b'}, {'int': 1, 'char': 'c'}]) assert db.count(where('int') == 1) == 3 assert db.count(where('char') == 'a') == 1 # Insert multiple from generator function def generator(): for j in range(10): yield {'int': j} db.drop_tables() db.insert_multiple(generator()) for i in range(10): assert db.count(where('int') == i) == 1 assert db.count(where('int').exists()) == 10 # Insert multiple from inline generator db.drop_tables() db.insert_multiple({'int': i} for i in range(10)) for i in range(10): assert db.count(where('int') == i) == 1 def test_insert_multiple_with_ids(db: TinyDB): db.drop_tables() # Insert multiple from list assert db.insert_multiple([{'int': 1, 'char': 'a'}, {'int': 1, 'char': 'b'}, {'int': 1, 'char': 'c'}]) == [1, 2, 3] def test_insert_multiple_with_doc_ids(db: TinyDB): db.drop_tables() assert db.insert_multiple([ Document({'int': 1, 'char': 'a'}, 12), Document({'int': 1, 'char': 'b'}, 77) ]) == [12, 77] assert db.get(doc_id=12) == {'int': 1, 'char': 'a'} assert db.get(doc_id=77) == {'int': 1, 'char': 'b'} with pytest.raises(ValueError): db.insert_multiple([Document({'int': 1, 'char': 'a'}, 12)]) def test_insert_invalid_type_raises_error(db: TinyDB): with pytest.raises(ValueError, match='Document is not a Mapping'): # object() as an example of a non-mapping-type db.insert(object()) # type: ignore def test_insert_valid_mapping_type(db: TinyDB): class CustomDocument(Mapping): def __init__(self, data): self.data = data def __getitem__(self, key): return self.data[key] def __iter__(self): return iter(self.data) def __len__(self): return len(self.data) db.drop_tables() db.insert(CustomDocument({'int': 1, 'char': 'a'})) assert db.count(where('int') == 1) == 1 def test_custom_mapping_type_with_json(tmpdir): class CustomDocument(Mapping): def __init__(self, data): self.data = data def __getitem__(self, key): return self.data[key] def __iter__(self): return iter(self.data) def __len__(self): return len(self.data) # Insert db = TinyDB(str(tmpdir.join('test.db'))) db.drop_tables() db.insert(CustomDocument({'int': 1, 'char': 'a'})) assert db.count(where('int') == 1) == 1 # Insert multiple db.insert_multiple([ CustomDocument({'int': 2, 'char': 'a'}), CustomDocument({'int': 3, 'char': 'a'}) ]) assert db.count(where('int') == 1) == 1 assert db.count(where('int') == 2) == 1 assert db.count(where('int') == 3) == 1 # Write back doc_id = db.get(where('int') == 3).doc_id db.update(CustomDocument({'int': 4, 'char': 'a'}), doc_ids=[doc_id]) assert db.count(where('int') == 3) == 0 assert db.count(where('int') == 4) == 1 def test_remove(db: TinyDB): db.remove(where('char') == 'b') assert len(db) == 2 assert db.count(where('int') == 1) == 2 def test_remove_all_fails(db: TinyDB): with pytest.raises(RuntimeError): db.remove() def test_remove_multiple(db: TinyDB): db.remove(where('int') == 1) assert len(db) == 0 def test_remove_ids(db: TinyDB): db.remove(doc_ids=[1, 2]) assert len(db) == 1 def test_remove_returns_ids(db: TinyDB): assert db.remove(where('char') == 'b') == [2] def test_update(db: TinyDB): assert len(db) == 3 db.update({'int': 2}, where('char') == 'a') assert db.count(where('int') == 2) == 1 assert db.count(where('int') == 1) == 2 def test_update_all(db: TinyDB): assert db.count(where('int') == 1) == 3 db.update({'newField': True}) assert db.count(where('newField') == True) == 3 # noqa def test_update_returns_ids(db: TinyDB): db.drop_tables() assert db.insert({'int': 1, 'char': 'a'}) == 1 assert db.insert({'int': 1, 'char': 'a'}) == 2 assert db.update({'char': 'b'}, where('int') == 1) == [1, 2] def test_update_transform(db: TinyDB): def increment(field): def transform(el): el[field] += 1 return transform def delete(field): def transform(el): del el[field] return transform assert db.count(where('int') == 1) == 3 db.update(increment('int'), where('char') == 'a') db.update(delete('char'), where('char') == 'a') assert db.count(where('int') == 2) == 1 assert db.count(where('char') == 'a') == 0 assert db.count(where('int') == 1) == 2 def test_update_ids(db: TinyDB): db.update({'int': 2}, doc_ids=[1, 2]) assert db.count(where('int') == 2) == 2 def test_update_multiple(db: TinyDB): assert len(db) == 3 db.update_multiple([ ({'int': 2}, where('char') == 'a'), ({'int': 4}, where('char') == 'b'), ]) assert db.count(where('int') == 1) == 1 assert db.count(where('int') == 2) == 1 assert db.count(where('int') == 4) == 1 def test_update_multiple_operation(db: TinyDB): def increment(field): def transform(el): el[field] += 1 return transform assert db.count(where('int') == 1) == 3 db.update_multiple([ (increment('int'), where('char') == 'a'), (increment('int'), where('char') == 'b') ]) assert db.count(where('int') == 2) == 2 def test_upsert(db: TinyDB): assert len(db) == 3 # Document existing db.upsert({'int': 5}, where('char') == 'a') assert db.count(where('int') == 5) == 1 # Document missing assert db.upsert({'int': 9, 'char': 'x'}, where('char') == 'x') == [4] assert db.count(where('int') == 9) == 1 def test_upsert_by_id(db: TinyDB): assert len(db) == 3 # Single document existing extant_doc = Document({'char': 'v'}, doc_id=1) assert db.upsert(extant_doc) == [1] doc = db.get(where('char') == 'v') assert isinstance(doc, Document) assert doc is not None assert doc.doc_id == 1 assert len(db) == 3 # Single document missing missing_doc = Document({'int': 5, 'char': 'w'}, doc_id=5) assert db.upsert(missing_doc) == [5] doc = db.get(where('char') == 'w') assert isinstance(doc, Document) assert doc is not None assert doc.doc_id == 5 assert len(db) == 4 # Missing doc_id and condition with pytest.raises(ValueError, match=r"(?=.*\bdoc_id\b)(?=.*\bquery\b)"): db.upsert({'no_Document': 'no_query'}) # Make sure we didn't break anything assert db.insert({'check': '_next_id'}) == 6 def test_search(db: TinyDB): assert not db._query_cache assert len(db.search(where('int') == 1)) == 3 assert len(db._query_cache) == 1 assert len(db.search(where('int') == 1)) == 3 # Query result from cache def test_search_path(db: TinyDB): assert not db._query_cache assert len(db.search(where('int').exists())) == 3 assert len(db._query_cache) == 1 assert len(db.search(where('asd').exists())) == 0 assert len(db.search(where('int').exists())) == 3 # Query result from cache def test_search_no_results_cache(db: TinyDB): assert len(db.search(where('missing').exists())) == 0 assert len(db.search(where('missing').exists())) == 0 def test_get(db: TinyDB): item = db.get(where('char') == 'b') assert isinstance(item, Document) assert item is not None assert item['char'] == 'b' def test_get_ids(db: TinyDB): el = db.all()[0] assert db.get(doc_id=el.doc_id) == el assert db.get(doc_id=float('NaN')) is None # type: ignore def test_get_multiple_ids(db: TinyDB): el = db.all() assert db.get(doc_ids=[x.doc_id for x in el]) == el def test_get_invalid(db: TinyDB): with pytest.raises(RuntimeError): db.get() def test_count(db: TinyDB): assert db.count(where('int') == 1) == 3 assert db.count(where('char') == 'd') == 0 def test_contains(db: TinyDB): assert db.contains(where('int') == 1) assert not db.contains(where('int') == 0) def test_contains_ids(db: TinyDB): assert db.contains(doc_id=1) assert db.contains(doc_id=2) assert not db.contains(doc_id=88) def test_contains_invalid(db: TinyDB): with pytest.raises(RuntimeError): db.contains() def test_get_idempotent(db: TinyDB): u = db.get(where('int') == 1) z = db.get(where('int') == 1) assert u == z def test_multiple_dbs(): """ Regression test for issue #3 """ db1 = TinyDB(storage=MemoryStorage) db2 = TinyDB(storage=MemoryStorage) db1.insert({'int': 1, 'char': 'a'}) db1.insert({'int': 1, 'char': 'b'}) db1.insert({'int': 1, 'value': 5.0}) db2.insert({'color': 'blue', 'animal': 'turtle'}) assert len(db1) == 3 assert len(db2) == 1 def test_storage_closed_once(): class Storage: def __init__(self): self.closed = False def read(self): return {} def write(self, data): pass def close(self): assert not self.closed self.closed = True with TinyDB(storage=Storage) as db: db.close() del db # If db.close() is called during cleanup, the assertion will fail and throw # and exception def test_unique_ids(tmpdir): """ :type tmpdir: py._path.local.LocalPath """ path = str(tmpdir.join('db.json')) # Verify ids are unique when reopening the DB and inserting with TinyDB(path) as _db: _db.insert({'x': 1}) with TinyDB(path) as _db: _db.insert({'x': 1}) with TinyDB(path) as _db: data = _db.all() assert data[0].doc_id != data[1].doc_id # Verify ids stay unique when inserting/removing with TinyDB(path) as _db: _db.drop_tables() _db.insert_multiple({'x': i} for i in range(5)) _db.remove(where('x') == 2) assert len(_db) == 4 ids = [e.doc_id for e in _db.all()] assert len(ids) == len(set(ids)) def test_lastid_after_open(tmpdir): """ Regression test for issue #34 :type tmpdir: py._path.local.LocalPath """ NUM = 100 path = str(tmpdir.join('db.json')) with TinyDB(path) as _db: _db.insert_multiple({'i': i} for i in range(NUM)) with TinyDB(path) as _db: assert _db._get_next_id() - 1 == NUM def test_doc_ids_json(tmpdir): """ Regression test for issue #45 """ path = str(tmpdir.join('db.json')) with TinyDB(path) as _db: _db.drop_tables() assert _db.insert({'int': 1, 'char': 'a'}) == 1 assert _db.insert({'int': 1, 'char': 'a'}) == 2 _db.drop_tables() assert _db.insert_multiple([{'int': 1, 'char': 'a'}, {'int': 1, 'char': 'b'}, {'int': 1, 'char': 'c'}]) == [1, 2, 3] assert _db.contains(doc_id=1) assert _db.contains(doc_id=2) assert not _db.contains(doc_id=88) _db.update({'int': 2}, doc_ids=[1, 2]) assert _db.count(where('int') == 2) == 2 el = _db.all()[0] assert _db.get(doc_id=el.doc_id) == el assert _db.get(doc_id=float('NaN')) is None _db.remove(doc_ids=[1, 2]) assert len(_db) == 1 def test_insert_string(tmpdir): path = str(tmpdir.join('db.json')) with TinyDB(path) as _db: data = [{'int': 1}, {'int': 2}] _db.insert_multiple(data) with pytest.raises(ValueError): _db.insert([1, 2, 3]) # Fails with pytest.raises(ValueError): _db.insert({'bark'}) # Fails assert data == _db.all() _db.insert({'int': 3}) # Does not fail def test_insert_invalid_dict(tmpdir): path = str(tmpdir.join('db.json')) with TinyDB(path) as _db: data = [{'int': 1}, {'int': 2}] _db.insert_multiple(data) with pytest.raises(TypeError): _db.insert({'int': _db}) # Fails assert data == _db.all() _db.insert({'int': 3}) # Does not fail def test_gc(tmpdir): # See https://github.com/msiemens/tinydb/issues/92 path = str(tmpdir.join('db.json')) db = TinyDB(path) table = db.table('foo') table.insert({'something': 'else'}) table.insert({'int': 13}) assert len(table.search(where('int') == 13)) == 1 assert table.all() == [{'something': 'else'}, {'int': 13}] db.close() def test_drop_table(): db = TinyDB(storage=MemoryStorage) default_table_name = db.table(db.default_table_name).name assert [] == list(db.tables()) db.drop_table(default_table_name) db.insert({'a': 1}) assert [default_table_name] == list(db.tables()) db.drop_table(default_table_name) assert [] == list(db.tables()) table_name = 'some-other-table' db = TinyDB(storage=MemoryStorage) db.table(table_name).insert({'a': 1}) assert {table_name} == db.tables() db.drop_table(table_name) assert set() == db.tables() assert table_name not in db._tables db.drop_table('non-existent-table-name') assert set() == db.tables() def test_empty_write(tmpdir): path = str(tmpdir.join('db.json')) class ReadOnlyMiddleware(Middleware): def write(self, data): raise AssertionError('No write for unchanged db') TinyDB(path).close() TinyDB(path, storage=ReadOnlyMiddleware(JSONStorage)).close() def test_query_cache(): db = TinyDB(storage=MemoryStorage) db.insert_multiple([ {'name': 'foo', 'value': 42}, {'name': 'bar', 'value': -1337} ]) query = where('value') > 0 results = db.search(query) assert len(results) == 1 # Modify the db instance to not return any results when # bypassing the query cache db._tables[db.table(db.default_table_name).name]._read_table = lambda: {} # Make sure we got an independent copy of the result list results.extend([1]) assert db.search(query) == [{'name': 'foo', 'value': 42}] def test_tinydb_is_iterable(db: TinyDB): assert [r for r in db] == db.all() def test_repr(tmpdir): path = str(tmpdir.join('db.json')) db = TinyDB(path) db.insert({'a': 1}) assert re.match( r"", repr(db)) def test_delete(tmpdir): path = str(tmpdir.join('db.json')) db = TinyDB(path, ensure_ascii=False) q = Query() db.insert({'network': {'id': '114', 'name': 'ok', 'rpc': 'dac', 'ticker': 'mkay'}}) assert db.search(q.network.id == '114') == [ {'network': {'id': '114', 'name': 'ok', 'rpc': 'dac', 'ticker': 'mkay'}} ] db.remove(q.network.id == '114') assert db.search(q.network.id == '114') == [] def test_insert_multiple_with_single_dict(db: TinyDB): with pytest.raises(ValueError): d = {'first': 'John', 'last': 'smith'} db.insert_multiple(d) # type: ignore db.close() def test_access_storage(): assert isinstance(TinyDB(storage=MemoryStorage).storage, MemoryStorage) assert isinstance(TinyDB(storage=CachingMiddleware(MemoryStorage)).storage, CachingMiddleware) def test_empty_db_len(): db = TinyDB(storage=MemoryStorage) assert len(db) == 0 def test_insert_on_existing_db(tmpdir): path = str(tmpdir.join('db.json')) db = TinyDB(path, ensure_ascii=False) db.insert({'foo': 'bar'}) assert len(db) == 1 db.close() db = TinyDB(path, ensure_ascii=False) db.insert({'foo': 'bar'}) db.insert({'foo': 'bar'}) assert len(db) == 3 def test_storage_access(): db = TinyDB(storage=MemoryStorage) assert isinstance(db.storage, MemoryStorage) def test_lambda_query(): db = TinyDB(storage=MemoryStorage) db.insert({'foo': 'bar'}) query = lambda doc: doc.get('foo') == 'bar' query.is_cacheable = lambda: False assert db.search(query) == [{'foo': 'bar'}] assert not db._query_cache tinydb-4.8.2/tests/test_utils.py000066400000000000000000000043211470251210600167270ustar00rootroot00000000000000import pytest from tinydb.utils import LRUCache, freeze, FrozenDict def test_lru_cache(): cache = LRUCache(capacity=3) cache["a"] = 1 cache["b"] = 2 cache["c"] = 3 _ = cache["a"] # move to front in lru queue cache["d"] = 4 # move oldest item out of lru queue try: _ = cache['f'] except KeyError: pass assert cache.lru == ["c", "a", "d"] def test_lru_cache_set_multiple(): cache = LRUCache(capacity=3) cache["a"] = 1 cache["a"] = 2 cache["a"] = 3 cache["a"] = 4 assert cache.lru == ["a"] def test_lru_cache_set_update(): cache = LRUCache(capacity=3) cache["a"] = 1 cache["a"] = 2 assert cache["a"] == 2 def test_lru_cache_get(): cache = LRUCache(capacity=3) cache["a"] = 1 cache["b"] = 1 cache["c"] = 1 cache.get("a") cache["d"] = 4 assert cache.lru == ["c", "a", "d"] def test_lru_cache_delete(): cache = LRUCache(capacity=3) cache["a"] = 1 cache["b"] = 2 del cache["a"] try: del cache['f'] except KeyError: pass assert cache.lru == ["b"] def test_lru_cache_clear(): cache = LRUCache(capacity=3) cache["a"] = 1 cache["b"] = 2 cache.clear() assert cache.lru == [] def test_lru_cache_unlimited(): cache = LRUCache() for i in range(100): cache[i] = i assert len(cache.lru) == 100 def test_lru_cache_unlimited_explicit(): cache = LRUCache(capacity=None) for i in range(100): cache[i] = i assert len(cache.lru) == 100 def test_lru_cache_iteration_works(): cache = LRUCache() count = 0 for _ in cache: assert False, 'there should be no elements in the cache' assert count == 0 def test_freeze(): frozen = freeze([0, 1, 2, {'a': [1, 2, 3]}, {1, 2}]) assert isinstance(frozen, tuple) assert isinstance(frozen[3], FrozenDict) assert isinstance(frozen[3]['a'], tuple) assert isinstance(frozen[4], frozenset) with pytest.raises(TypeError): frozen[0] = 10 with pytest.raises(TypeError): frozen[3]['a'] = 10 with pytest.raises(TypeError): frozen[3].pop('a') with pytest.raises(TypeError): frozen[3].update({'a': 9}) tinydb-4.8.2/tinydb/000077500000000000000000000000001470251210600143055ustar00rootroot00000000000000tinydb-4.8.2/tinydb/__init__.py000066400000000000000000000016531470251210600164230ustar00rootroot00000000000000""" TinyDB is a tiny, document oriented database optimized for your happiness :) TinyDB stores different types of Python data types using a configurable storage mechanism. It comes with a syntax for querying data and storing data in multiple tables. .. codeauthor:: Markus Siemens Usage example: >>> from tinydb import TinyDB, where >>> from tinydb.storages import MemoryStorage >>> db = TinyDB(storage=MemoryStorage) >>> db.insert({'data': 5}) # Insert into '_default' table >>> db.search(where('data') == 5) [{'data': 5, '_id': 1}] >>> # Now let's create a new table >>> tbl = db.table('our_table') >>> for i in range(10): ... tbl.insert({'data': i}) ... >>> len(tbl.search(where('data') < 5)) 5 """ from .queries import Query, where from .storages import Storage, JSONStorage from .database import TinyDB from .version import __version__ __all__ = ('TinyDB', 'Storage', 'JSONStorage', 'Query', 'where') tinydb-4.8.2/tinydb/database.py000066400000000000000000000210101470251210600164150ustar00rootroot00000000000000""" This module contains the main component of TinyDB: the database. """ from typing import Dict, Iterator, Set, Type from . import JSONStorage from .storages import Storage from .table import Table, Document from .utils import with_typehint # The table's base class. This is used to add type hinting from the Table # class to TinyDB. Currently, this supports PyCharm, Pyright/VS Code and MyPy. TableBase: Type[Table] = with_typehint(Table) class TinyDB(TableBase): """ The main class of TinyDB. The ``TinyDB`` class is responsible for creating the storage class instance that will store this database's documents, managing the database tables as well as providing access to the default table. For table management, a simple ``dict`` is used that stores the table class instances accessible using their table name. Default table access is provided by forwarding all unknown method calls and property access operations to the default table by implementing ``__getattr__``. When creating a new instance, all arguments and keyword arguments (except for ``storage``) will be passed to the storage class that is provided. If no storage class is specified, :class:`~tinydb.storages.JSONStorage` will be used. .. admonition:: Customization For customization, the following class variables can be set: - ``table_class`` defines the class that is used to create tables, - ``default_table_name`` defines the name of the default table, and - ``default_storage_class`` will define the class that will be used to create storage instances if no other storage is passed. .. versionadded:: 4.0 .. admonition:: Data Storage Model Data is stored using a storage class that provides persistence for a ``dict`` instance. This ``dict`` contains all tables and their data. The data is modelled like this:: { 'table1': { 0: {document...}, 1: {document...}, }, 'table2': { ... } } Each entry in this ``dict`` uses the table name as its key and a ``dict`` of documents as its value. The document ``dict`` contains document IDs as keys and the documents themselves as values. :param storage: The class of the storage to use. Will be initialized with ``args`` and ``kwargs``. """ #: The class that will be used to create table instances #: #: .. versionadded:: 4.0 table_class = Table #: The name of the default table #: #: .. versionadded:: 4.0 default_table_name = '_default' #: The class that will be used by default to create storage instances #: #: .. versionadded:: 4.0 default_storage_class = JSONStorage def __init__(self, *args, **kwargs) -> None: """ Create a new instance of TinyDB. """ storage = kwargs.pop('storage', self.default_storage_class) # Prepare the storage self._storage: Storage = storage(*args, **kwargs) self._opened = True self._tables: Dict[str, Table] = {} def __repr__(self): args = [ 'tables={}'.format(list(self.tables())), 'tables_count={}'.format(len(self.tables())), 'default_table_documents_count={}'.format(self.__len__()), 'all_tables_documents_count={}'.format( ['{}={}'.format(table, len(self.table(table))) for table in self.tables()]), ] return '<{} {}>'.format(type(self).__name__, ', '.join(args)) def table(self, name: str, **kwargs) -> Table: """ Get access to a specific table. If the table hasn't been accessed yet, a new table instance will be created using the :attr:`~tinydb.database.TinyDB.table_class` class. Otherwise, the previously created table instance will be returned. All further options besides the name are passed to the table class which by default is :class:`~tinydb.table.Table`. Check its documentation for further parameters you can pass. :param name: The name of the table. :param kwargs: Keyword arguments to pass to the table class constructor """ if name in self._tables: return self._tables[name] table = self.table_class(self.storage, name, **kwargs) self._tables[name] = table return table def tables(self) -> Set[str]: """ Get the names of all tables in the database. :returns: a set of table names """ # TinyDB stores data as a dict of tables like this: # # { # '_default': { # 0: {document...}, # 1: {document...}, # }, # 'table1': { # ... # } # } # # To get a set of table names, we thus construct a set of this main # dict which returns a set of the dict keys which are the table names. # # Storage.read() may return ``None`` if the database file is empty, # so we need to consider this case to and return an empty set in this # case. return set(self.storage.read() or {}) def drop_tables(self) -> None: """ Drop all tables from the database. **CANNOT BE REVERSED!** """ # We drop all tables from this database by writing an empty dict # to the storage thereby returning to the initial state with no tables. self.storage.write({}) # After that we need to remember to empty the ``_tables`` dict, so we'll # create new table instances when a table is accessed again. self._tables.clear() def drop_table(self, name: str) -> None: """ Drop a specific table from the database. **CANNOT BE REVERSED!** :param name: The name of the table to drop. """ # If the table is currently opened, we need to forget the table class # instance if name in self._tables: del self._tables[name] data = self.storage.read() # The database is uninitialized, there's nothing to do if data is None: return # The table does not exist, there's nothing to do if name not in data: return # Remove the table from the data dict del data[name] # Store the updated data back to the storage self.storage.write(data) @property def storage(self) -> Storage: """ Get the storage instance used for this TinyDB instance. :return: This instance's storage :rtype: Storage """ return self._storage def close(self) -> None: """ Close the database. This may be needed if the storage instance used for this database needs to perform cleanup operations like closing file handles. To ensure this method is called, the TinyDB instance can be used as a context manager:: with TinyDB('data.json') as db: db.insert({'foo': 'bar'}) Upon leaving this context, the ``close`` method will be called. """ self._opened = False self.storage.close() def __enter__(self): """ Use the database as a context manager. Using the database as a context manager ensures that the :meth:`~tinydb.database.TinyDB.close` method is called upon leaving the context. :return: The current instance """ return self def __exit__(self, *args): """ Close the storage instance when leaving a context. """ if self._opened: self.close() def __getattr__(self, name): """ Forward all unknown attribute calls to the default table instance. """ return getattr(self.table(self.default_table_name), name) # Here we forward magic methods to the default table instance. These are # not handled by __getattr__ so we need to forward them manually here def __len__(self): """ Get the total number of documents in the default table. >>> db = TinyDB('db.json') >>> len(db) 0 """ return len(self.table(self.default_table_name)) def __iter__(self) -> Iterator[Document]: """ Return an iterator for the default table's documents. """ return iter(self.table(self.default_table_name)) tinydb-4.8.2/tinydb/middlewares.py000066400000000000000000000075461470251210600171730ustar00rootroot00000000000000""" Contains the :class:`base class ` for middlewares and implementations. """ from typing import Optional from tinydb import Storage class Middleware: """ The base class for all Middlewares. Middlewares hook into the read/write process of TinyDB allowing you to extend the behaviour by adding caching, logging, ... Your middleware's ``__init__`` method has to call the parent class constructor so the middleware chain can be configured properly. """ def __init__(self, storage_cls) -> None: self._storage_cls = storage_cls self.storage: Storage = None # type: ignore def __call__(self, *args, **kwargs): """ Create the storage instance and store it as self.storage. Usually a user creates a new TinyDB instance like this:: TinyDB(storage=StorageClass) The storage keyword argument is used by TinyDB this way:: self.storage = storage(*args, **kwargs) As we can see, ``storage(...)`` runs the constructor and returns the new storage instance. Using Middlewares, the user will call:: The 'real' storage class v TinyDB(storage=Middleware(StorageClass)) ^ Already an instance! So, when running ``self.storage = storage(*args, **kwargs)`` Python now will call ``__call__`` and TinyDB will expect the return value to be the storage (or Middleware) instance. Returning the instance is simple, but we also got the underlying (*real*) StorageClass as an __init__ argument that still is not an instance. So, we initialize it in __call__ forwarding any arguments we receive from TinyDB (``TinyDB(arg1, kwarg1=value, storage=...)``). In case of nested Middlewares, calling the instance as if it was a class results in calling ``__call__`` what initializes the next nested Middleware that itself will initialize the next Middleware and so on. """ self.storage = self._storage_cls(*args, **kwargs) return self def __getattr__(self, name): """ Forward all unknown attribute calls to the underlying storage, so we remain as transparent as possible. """ return getattr(self.__dict__['storage'], name) class CachingMiddleware(Middleware): """ Add some caching to TinyDB. This Middleware aims to improve the performance of TinyDB by writing only the last DB state every :attr:`WRITE_CACHE_SIZE` time and reading always from cache. """ #: The number of write operations to cache before writing to disc WRITE_CACHE_SIZE = 1000 def __init__(self, storage_cls): # Initialize the parent constructor super().__init__(storage_cls) # Prepare the cache self.cache = None self._cache_modified_count = 0 def read(self): if self.cache is None: # Empty cache: read from the storage self.cache = self.storage.read() # Return the cached data return self.cache def write(self, data): # Store data in cache self.cache = data self._cache_modified_count += 1 # Check if we need to flush the cache if self._cache_modified_count >= self.WRITE_CACHE_SIZE: self.flush() def flush(self): """ Flush all unwritten data to disk. """ if self._cache_modified_count > 0: # Force-flush the cache by writing the data to the storage self.storage.write(self.cache) self._cache_modified_count = 0 def close(self): # Flush potentially unwritten data self.flush() # Let the storage clean up too self.storage.close() tinydb-4.8.2/tinydb/mypy_plugin.py000066400000000000000000000020561470251210600172360ustar00rootroot00000000000000from typing import TypeVar, Optional, Callable, Dict from mypy.nodes import NameExpr from mypy.options import Options from mypy.plugin import Plugin, DynamicClassDefContext T = TypeVar('T') CB = Optional[Callable[[T], None]] DynamicClassDef = DynamicClassDefContext class TinyDBPlugin(Plugin): def __init__(self, options: Options): super().__init__(options) self.named_placeholders: Dict[str, str] = {} def get_dynamic_class_hook(self, fullname: str) -> CB[DynamicClassDef]: if fullname == 'tinydb.utils.with_typehint': def hook(ctx: DynamicClassDefContext): klass = ctx.call.args[0] assert isinstance(klass, NameExpr) type_name = klass.fullname assert type_name is not None qualified = self.lookup_fully_qualified(type_name) assert qualified is not None ctx.api.add_symbol_table_node(ctx.name, qualified) return hook return None def plugin(_version: str): return TinyDBPlugin tinydb-4.8.2/tinydb/operations.py000066400000000000000000000022031470251210600170370ustar00rootroot00000000000000""" A collection of update operations for TinyDB. They are used for updates like this: >>> db.update(delete('foo'), where('foo') == 2) This would delete the ``foo`` field from all documents where ``foo`` equals 2. """ def delete(field): """ Delete a given field from the document. """ def transform(doc): del doc[field] return transform def add(field, n): """ Add ``n`` to a given field in the document. """ def transform(doc): doc[field] += n return transform def subtract(field, n): """ Subtract ``n`` to a given field in the document. """ def transform(doc): doc[field] -= n return transform def set(field, val): """ Set a given field to ``val``. """ def transform(doc): doc[field] = val return transform def increment(field): """ Increment a given field in the document by 1. """ def transform(doc): doc[field] += 1 return transform def decrement(field): """ Decrement a given field in the document by 1. """ def transform(doc): doc[field] -= 1 return transform tinydb-4.8.2/tinydb/py.typed000066400000000000000000000000001470251210600157720ustar00rootroot00000000000000tinydb-4.8.2/tinydb/queries.py000066400000000000000000000370361470251210600163450ustar00rootroot00000000000000""" Contains the querying interface. Starting with :class:`~tinydb.queries.Query` you can construct complex queries: >>> ((where('f1') == 5) & (where('f2') != 2)) | where('s').matches(r'^\\w+$') (('f1' == 5) and ('f2' != 2)) or ('s' ~= ^\\w+$ ) Queries are executed by using the ``__call__``: >>> q = where('val') == 5 >>> q({'val': 5}) True >>> q({'val': 1}) False """ import re from typing import Mapping, Tuple, Callable, Any, Union, List, Optional, Protocol from .utils import freeze __all__ = ('Query', 'QueryLike', 'where') def is_sequence(obj): return hasattr(obj, '__iter__') class QueryLike(Protocol): """ A typing protocol that acts like a query. Something that we use as a query must have two properties: 1. It must be callable, accepting a `Mapping` object and returning a boolean that indicates whether the value matches the query, and 2. it must have a stable hash that will be used for query caching. In addition, to mark a query as non-cacheable (e.g. if it involves some remote lookup) it needs to have a method called ``is_cacheable`` that returns ``False``. This query protocol is used to make MyPy correctly support the query pattern that TinyDB uses. See also https://mypy.readthedocs.io/en/stable/protocols.html#simple-user-defined-protocols """ def __call__(self, value: Mapping) -> bool: ... def __hash__(self) -> int: ... class QueryInstance: """ A query instance. This is the object on which the actual query operations are performed. The :class:`~tinydb.queries.Query` class acts like a query builder and generates :class:`~tinydb.queries.QueryInstance` objects which will evaluate their query against a given document when called. Query instances can be combined using logical OR and AND and inverted using logical NOT. In order to be usable in a query cache, a query needs to have a stable hash value with the same query always returning the same hash. That way a query instance can be used as a key in a dictionary. """ def __init__(self, test: Callable[[Mapping], bool], hashval: Optional[Tuple]): self._test = test self._hash = hashval def is_cacheable(self) -> bool: return self._hash is not None def __call__(self, value: Mapping) -> bool: """ Evaluate the query to check if it matches a specified value. :param value: The value to check. :return: Whether the value matches this query. """ return self._test(value) def __hash__(self) -> int: # We calculate the query hash by using the ``hashval`` object which # describes this query uniquely, so we can calculate a stable hash # value by simply hashing it return hash(self._hash) def __repr__(self): return 'QueryImpl{}'.format(self._hash) def __eq__(self, other: object): if isinstance(other, QueryInstance): return self._hash == other._hash return False # --- Query modifiers ----------------------------------------------------- def __and__(self, other: 'QueryInstance') -> 'QueryInstance': # We use a frozenset for the hash as the AND operation is commutative # (a & b == b & a) and the frozenset does not consider the order of # elements if self.is_cacheable() and other.is_cacheable(): hashval = ('and', frozenset([self._hash, other._hash])) else: hashval = None return QueryInstance(lambda value: self(value) and other(value), hashval) def __or__(self, other: 'QueryInstance') -> 'QueryInstance': # We use a frozenset for the hash as the OR operation is commutative # (a | b == b | a) and the frozenset does not consider the order of # elements if self.is_cacheable() and other.is_cacheable(): hashval = ('or', frozenset([self._hash, other._hash])) else: hashval = None return QueryInstance(lambda value: self(value) or other(value), hashval) def __invert__(self) -> 'QueryInstance': hashval = ('not', self._hash) if self.is_cacheable() else None return QueryInstance(lambda value: not self(value), hashval) class Query(QueryInstance): """ TinyDB Queries. Allows building queries for TinyDB databases. There are two main ways of using queries: 1) ORM-like usage: >>> User = Query() >>> db.search(User.name == 'John Doe') >>> db.search(User['logged-in'] == True) 2) Classical usage: >>> db.search(where('value') == True) Note that ``where(...)`` is a shorthand for ``Query(...)`` allowing for a more fluent syntax. Besides the methods documented here you can combine queries using the binary AND and OR operators: >>> # Binary AND: >>> db.search((where('field1').exists()) & (where('field2') == 5)) >>> # Binary OR: >>> db.search((where('field1').exists()) | (where('field2') == 5)) Queries are executed by calling the resulting object. They expect to get the document to test as the first argument and return ``True`` or ``False`` depending on whether the documents match the query or not. """ def __init__(self) -> None: # The current path of fields to access when evaluating the object self._path: Tuple[Union[str, Callable], ...] = () # Prevent empty queries to be evaluated def notest(_): raise RuntimeError('Empty query was evaluated') super().__init__( test=notest, hashval=(None,) ) def __repr__(self): return '{}()'.format(type(self).__name__) def __hash__(self): return super().__hash__() def __getattr__(self, item: str): # Generate a new query object with the new query path # We use type(self) to get the class of the current query in case # someone uses a subclass of ``Query`` query = type(self)() # Now we add the accessed item to the query path ... query._path = self._path + (item,) # ... and update the query hash query._hash = ('path', query._path) if self.is_cacheable() else None return query def __getitem__(self, item: str): # A different syntax for ``__getattr__`` # We cannot call ``getattr(item)`` here as it would try to resolve # the name as a method name first, only then call our ``__getattr__`` # method. By calling ``__getattr__`` directly, we make sure that # calling e.g. ``Query()['test']`` will always generate a query for a # document's ``test`` field instead of returning a reference to the # ``Query.test`` method return self.__getattr__(item) def _generate_test( self, test: Callable[[Any], bool], hashval: Tuple, allow_empty_path: bool = False ) -> QueryInstance: """ Generate a query based on a test function that first resolves the query path. :param test: The test the query executes. :param hashval: The hash of the query. :return: A :class:`~tinydb.queries.QueryInstance` object """ if not self._path and not allow_empty_path: raise ValueError('Query has no path') def runner(value): try: # Resolve the path for part in self._path: if isinstance(part, str): value = value[part] else: value = part(value) except (KeyError, TypeError): return False else: # Perform the specified test return test(value) return QueryInstance( lambda value: runner(value), (hashval if self.is_cacheable() else None) ) def __eq__(self, rhs: Any): """ Test a dict value for equality. >>> Query().f1 == 42 :param rhs: The value to compare against """ return self._generate_test( lambda value: value == rhs, ('==', self._path, freeze(rhs)) ) def __ne__(self, rhs: Any): """ Test a dict value for inequality. >>> Query().f1 != 42 :param rhs: The value to compare against """ return self._generate_test( lambda value: value != rhs, ('!=', self._path, freeze(rhs)) ) def __lt__(self, rhs: Any) -> QueryInstance: """ Test a dict value for being lower than another value. >>> Query().f1 < 42 :param rhs: The value to compare against """ return self._generate_test( lambda value: value < rhs, ('<', self._path, rhs) ) def __le__(self, rhs: Any) -> QueryInstance: """ Test a dict value for being lower than or equal to another value. >>> where('f1') <= 42 :param rhs: The value to compare against """ return self._generate_test( lambda value: value <= rhs, ('<=', self._path, rhs) ) def __gt__(self, rhs: Any) -> QueryInstance: """ Test a dict value for being greater than another value. >>> Query().f1 > 42 :param rhs: The value to compare against """ return self._generate_test( lambda value: value > rhs, ('>', self._path, rhs) ) def __ge__(self, rhs: Any) -> QueryInstance: """ Test a dict value for being greater than or equal to another value. >>> Query().f1 >= 42 :param rhs: The value to compare against """ return self._generate_test( lambda value: value >= rhs, ('>=', self._path, rhs) ) def exists(self) -> QueryInstance: """ Test for a dict where a provided key exists. >>> Query().f1.exists() """ return self._generate_test( lambda _: True, ('exists', self._path) ) def matches(self, regex: str, flags: int = 0) -> QueryInstance: """ Run a regex test against a dict value (whole string has to match). >>> Query().f1.matches(r'^\\w+$') :param regex: The regular expression to use for matching :param flags: regex flags to pass to ``re.match`` """ def test(value): if not isinstance(value, str): return False return re.match(regex, value, flags) is not None return self._generate_test(test, ('matches', self._path, regex)) def search(self, regex: str, flags: int = 0) -> QueryInstance: """ Run a regex test against a dict value (only substring string has to match). >>> Query().f1.search(r'^\\w+$') :param regex: The regular expression to use for matching :param flags: regex flags to pass to ``re.match`` """ def test(value): if not isinstance(value, str): return False return re.search(regex, value, flags) is not None return self._generate_test(test, ('search', self._path, regex)) def test(self, func: Callable[[Mapping], bool], *args) -> QueryInstance: """ Run a user-defined test function against a dict value. >>> def test_func(val): ... return val == 42 ... >>> Query().f1.test(test_func) .. warning:: The test function provided needs to be deterministic (returning the same value when provided with the same arguments), otherwise this may mess up the query cache that :class:`~tinydb.table.Table` implements. :param func: The function to call, passing the dict as the first argument :param args: Additional arguments to pass to the test function """ return self._generate_test( lambda value: func(value, *args), ('test', self._path, func, args) ) def any(self, cond: Union[QueryInstance, List[Any]]) -> QueryInstance: """ Check if a condition is met by any document in a list, where a condition can also be a sequence (e.g. list). >>> Query().f1.any(Query().f2 == 1) Matches:: {'f1': [{'f2': 1}, {'f2': 0}]} >>> Query().f1.any([1, 2, 3]) Matches:: {'f1': [1, 2]} {'f1': [3, 4, 5]} :param cond: Either a query that at least one document has to match or a list of which at least one document has to be contained in the tested document. """ if callable(cond): def test(value): return is_sequence(value) and any(cond(e) for e in value) else: def test(value): return is_sequence(value) and any(e in cond for e in value) return self._generate_test( lambda value: test(value), ('any', self._path, freeze(cond)) ) def all(self, cond: Union['QueryInstance', List[Any]]) -> QueryInstance: """ Check if a condition is met by all documents in a list, where a condition can also be a sequence (e.g. list). >>> Query().f1.all(Query().f2 == 1) Matches:: {'f1': [{'f2': 1}, {'f2': 1}]} >>> Query().f1.all([1, 2, 3]) Matches:: {'f1': [1, 2, 3, 4, 5]} :param cond: Either a query that all documents have to match or a list which has to be contained in the tested document. """ if callable(cond): def test(value): return is_sequence(value) and all(cond(e) for e in value) else: def test(value): return is_sequence(value) and all(e in value for e in cond) return self._generate_test( lambda value: test(value), ('all', self._path, freeze(cond)) ) def one_of(self, items: List[Any]) -> QueryInstance: """ Check if the value is contained in a list or generator. >>> Query().f1.one_of(['value 1', 'value 2']) :param items: The list of items to check with """ return self._generate_test( lambda value: value in items, ('one_of', self._path, freeze(items)) ) def fragment(self, document: Mapping) -> QueryInstance: def test(value): for key in document: if key not in value or value[key] != document[key]: return False return True return self._generate_test( lambda value: test(value), ('fragment', freeze(document)), allow_empty_path=True ) def noop(self) -> QueryInstance: """ Always evaluate to ``True``. Useful for having a base value when composing queries dynamically. """ return QueryInstance( lambda value: True, () ) def map(self, fn: Callable[[Any], Any]) -> 'Query': """ Add a function to the query path. Similar to __getattr__ but for arbitrary functions. """ query = type(self)() # Now we add the callable to the query path ... query._path = self._path + (fn,) # ... and kill the hash - callable objects can be mutable, so it's # harmful to cache their results. query._hash = None return query def where(key: str) -> Query: """ A shorthand for ``Query()[key]`` """ return Query()[key] tinydb-4.8.2/tinydb/storages.py000066400000000000000000000117341470251210600165140ustar00rootroot00000000000000""" Contains the :class:`base class ` for storages and implementations. """ import io import json import os import warnings from abc import ABC, abstractmethod from typing import Dict, Any, Optional __all__ = ('Storage', 'JSONStorage', 'MemoryStorage') def touch(path: str, create_dirs: bool): """ Create a file if it doesn't exist yet. :param path: The file to create. :param create_dirs: Whether to create all missing parent directories. """ if create_dirs: base_dir = os.path.dirname(path) # Check if we need to create missing parent directories if not os.path.exists(base_dir): os.makedirs(base_dir) # Create the file by opening it in 'a' mode which creates the file if it # does not exist yet but does not modify its contents with open(path, 'a'): pass class Storage(ABC): """ The abstract base class for all Storages. A Storage (de)serializes the current state of the database and stores it in some place (memory, file on disk, ...). """ # Using ABCMeta as metaclass allows instantiating only storages that have # implemented read and write @abstractmethod def read(self) -> Optional[Dict[str, Dict[str, Any]]]: """ Read the current state. Any kind of deserialization should go here. Return ``None`` here to indicate that the storage is empty. """ raise NotImplementedError('To be overridden!') @abstractmethod def write(self, data: Dict[str, Dict[str, Any]]) -> None: """ Write the current state of the database to the storage. Any kind of serialization should go here. :param data: The current state of the database. """ raise NotImplementedError('To be overridden!') def close(self) -> None: """ Optional: Close open file handles, etc. """ pass class JSONStorage(Storage): """ Store the data in a JSON file. """ def __init__(self, path: str, create_dirs=False, encoding=None, access_mode='r+', **kwargs): """ Create a new instance. Also creates the storage file, if it doesn't exist and the access mode is appropriate for writing. Note: Using an access mode other than `r` or `r+` will probably lead to data loss or data corruption! :param path: Where to store the JSON data. :param access_mode: mode in which the file is opened (r, r+) :type access_mode: str """ super().__init__() self._mode = access_mode self.kwargs = kwargs if access_mode not in ('r', 'rb', 'r+', 'rb+'): warnings.warn( 'Using an `access_mode` other than \'r\', \'rb\', \'r+\' ' 'or \'rb+\' can cause data loss or corruption' ) # Create the file if it doesn't exist and creating is allowed by the # access mode if any([character in self._mode for character in ('+', 'w', 'a')]): # any of the writing modes touch(path, create_dirs=create_dirs) # Open the file for reading/writing self._handle = open(path, mode=self._mode, encoding=encoding) def close(self) -> None: self._handle.close() def read(self) -> Optional[Dict[str, Dict[str, Any]]]: # Get the file size by moving the cursor to the file end and reading # its location self._handle.seek(0, os.SEEK_END) size = self._handle.tell() if not size: # File is empty, so we return ``None`` so TinyDB can properly # initialize the database return None else: # Return the cursor to the beginning of the file self._handle.seek(0) # Load the JSON contents of the file return json.load(self._handle) def write(self, data: Dict[str, Dict[str, Any]]): # Move the cursor to the beginning of the file just in case self._handle.seek(0) # Serialize the database state using the user-provided arguments serialized = json.dumps(data, **self.kwargs) # Write the serialized data to the file try: self._handle.write(serialized) except io.UnsupportedOperation: raise IOError('Cannot write to the database. Access mode is "{0}"'.format(self._mode)) # Ensure the file has been written self._handle.flush() os.fsync(self._handle.fileno()) # Remove data that is behind the new cursor in case the file has # gotten shorter self._handle.truncate() class MemoryStorage(Storage): """ Store the data as JSON in memory. """ def __init__(self): """ Create a new instance. """ super().__init__() self.memory = None def read(self) -> Optional[Dict[str, Dict[str, Any]]]: return self.memory def write(self, data: Dict[str, Dict[str, Any]]): self.memory = data tinydb-4.8.2/tinydb/table.py000066400000000000000000000632171470251210600157570ustar00rootroot00000000000000""" This module implements tables, the central place for accessing and manipulating data in TinyDB. """ from typing import ( Callable, Dict, Iterable, Iterator, List, Mapping, Optional, Union, cast, Tuple ) from .queries import QueryLike from .storages import Storage from .utils import LRUCache __all__ = ('Document', 'Table') class Document(dict): """ A document stored in the database. This class provides a way to access both a document's content and its ID using ``doc.doc_id``. """ def __init__(self, value: Mapping, doc_id: int): super().__init__(value) self.doc_id = doc_id class Table: """ Represents a single TinyDB table. It provides methods for accessing and manipulating documents. .. admonition:: Query Cache As an optimization, a query cache is implemented using a :class:`~tinydb.utils.LRUCache`. This class mimics the interface of a normal ``dict``, but starts to remove the least-recently used entries once a threshold is reached. The query cache is updated on every search operation. When writing data, the whole cache is discarded as the query results may have changed. .. admonition:: Customization For customization, the following class variables can be set: - ``document_class`` defines the class that is used to represent documents, - ``document_id_class`` defines the class that is used to represent document IDs, - ``query_cache_class`` defines the class that is used for the query cache - ``default_query_cache_capacity`` defines the default capacity of the query cache .. versionadded:: 4.0 :param storage: The storage instance to use for this table :param name: The table name :param cache_size: Maximum capacity of query cache :param persist_empty: Store new table even with no operations on it """ #: The class used to represent documents #: #: .. versionadded:: 4.0 document_class = Document #: The class used to represent a document ID #: #: .. versionadded:: 4.0 document_id_class = int #: The class used for caching query results #: #: .. versionadded:: 4.0 query_cache_class = LRUCache #: The default capacity of the query cache #: #: .. versionadded:: 4.0 default_query_cache_capacity = 10 def __init__( self, storage: Storage, name: str, cache_size: int = default_query_cache_capacity, persist_empty: bool = False ): """ Create a table instance. """ self._storage = storage self._name = name self._query_cache: LRUCache[QueryLike, List[Document]] \ = self.query_cache_class(capacity=cache_size) self._next_id = None if persist_empty: self._update_table(lambda table: table.clear()) def __repr__(self): args = [ 'name={!r}'.format(self.name), 'total={}'.format(len(self)), 'storage={}'.format(self._storage), ] return '<{} {}>'.format(type(self).__name__, ', '.join(args)) @property def name(self) -> str: """ Get the table name. """ return self._name @property def storage(self) -> Storage: """ Get the table storage instance. """ return self._storage def insert(self, document: Mapping) -> int: """ Insert a new document into the table. :param document: the document to insert :returns: the inserted document's ID """ # Make sure the document implements the ``Mapping`` interface if not isinstance(document, Mapping): raise ValueError('Document is not a Mapping') # First, we get the document ID for the new document if isinstance(document, self.document_class): # For a `Document` object we use the specified ID doc_id = document.doc_id # We also reset the stored next ID so the next insert won't # re-use document IDs by accident when storing an old value self._next_id = None else: # In all other cases we use the next free ID doc_id = self._get_next_id() # Now, we update the table and add the document def updater(table: dict): if doc_id in table: raise ValueError(f'Document with ID {str(doc_id)} ' f'already exists') # By calling ``dict(document)`` we convert the data we got to a # ``dict`` instance even if it was a different class that # implemented the ``Mapping`` interface table[doc_id] = dict(document) # See below for details on ``Table._update`` self._update_table(updater) return doc_id def insert_multiple(self, documents: Iterable[Mapping]) -> List[int]: """ Insert multiple documents into the table. :param documents: an Iterable of documents to insert :returns: a list containing the inserted documents' IDs """ doc_ids = [] def updater(table: dict): for document in documents: # Make sure the document implements the ``Mapping`` interface if not isinstance(document, Mapping): raise ValueError('Document is not a Mapping') if isinstance(document, self.document_class): # Check if document does not override an existing document if document.doc_id in table: raise ValueError( f'Document with ID {str(document.doc_id)} ' f'already exists' ) # Store the doc_id, so we can return all document IDs # later. Then save the document with its doc_id and # skip the rest of the current loop doc_id = document.doc_id doc_ids.append(doc_id) table[doc_id] = dict(document) continue # Generate new document ID for this document # Store the doc_id, so we can return all document IDs # later, then save the document with the new doc_id doc_id = self._get_next_id() doc_ids.append(doc_id) table[doc_id] = dict(document) # See below for details on ``Table._update`` self._update_table(updater) return doc_ids def all(self) -> List[Document]: """ Get all documents stored in the table. :returns: a list with all documents. """ # iter(self) (implemented in Table.__iter__ provides an iterator # that returns all documents in this table. We use it to get a list # of all documents by using the ``list`` constructor to perform the # conversion. return list(iter(self)) def search(self, cond: QueryLike) -> List[Document]: """ Search for all documents matching a 'where' cond. :param cond: the condition to check against :returns: list of matching documents """ # First, we check the query cache to see if it has results for this # query cached_results = self._query_cache.get(cond) if cached_results is not None: return cached_results[:] # Perform the search by applying the query to all documents. # Then, only if the document matches the query, convert it # to the document class and document ID class. docs = [ self.document_class(doc, self.document_id_class(doc_id)) for doc_id, doc in self._read_table().items() if cond(doc) ] # Only cache cacheable queries. # # This weird `getattr` dance is needed to make MyPy happy as # it doesn't know that a query might have a `is_cacheable` method # that is not declared in the `QueryLike` protocol due to it being # optional. # See: https://github.com/python/mypy/issues/1424 # # Note also that by default we expect custom query objects to be # cacheable (which means they need to have a stable hash value). # This is to keep consistency with TinyDB's behavior before # `is_cacheable` was introduced which assumed that all queries # are cacheable. is_cacheable: Callable[[], bool] = getattr(cond, 'is_cacheable', lambda: True) if is_cacheable(): # Update the query cache self._query_cache[cond] = docs[:] return docs def get( self, cond: Optional[QueryLike] = None, doc_id: Optional[int] = None, doc_ids: Optional[List] = None ) -> Optional[Union[Document, List[Document]]]: """ Get exactly one document specified by a query or a document ID. However, if multiple document IDs are given then returns all documents in a list. Returns ``None`` if the document doesn't exist. :param cond: the condition to check against :param doc_id: the document's ID :param doc_ids: the document's IDs(multiple) :returns: the document(s) or ``None`` """ table = self._read_table() if doc_id is not None: # Retrieve a document specified by its ID raw_doc = table.get(str(doc_id), None) if raw_doc is None: return None # Convert the raw data to the document class return self.document_class(raw_doc, doc_id) elif doc_ids is not None: # Filter the table by extracting out all those documents which # have doc id specified in the doc_id list. # Since document IDs will be unique, we make it a set to ensure # constant time lookup doc_ids_set = set(str(doc_id) for doc_id in doc_ids) # Now return the filtered documents in form of list return [ self.document_class(doc, self.document_id_class(doc_id)) for doc_id, doc in table.items() if doc_id in doc_ids_set ] elif cond is not None: # Find a document specified by a query # The trailing underscore in doc_id_ is needed so MyPy # doesn't think that `doc_id_` (which is a string) needs # to have the same type as `doc_id` which is this function's # parameter and is an optional `int`. for doc_id_, doc in self._read_table().items(): if cond(doc): return self.document_class( doc, self.document_id_class(doc_id_) ) return None raise RuntimeError('You have to pass either cond or doc_id or doc_ids') def contains( self, cond: Optional[QueryLike] = None, doc_id: Optional[int] = None ) -> bool: """ Check whether the database contains a document matching a query or an ID. If ``doc_id`` is set, it checks if the db contains the specified ID. :param cond: the condition use :param doc_id: the document ID to look for """ if doc_id is not None: # Documents specified by ID return self.get(doc_id=doc_id) is not None elif cond is not None: # Document specified by condition return self.get(cond) is not None raise RuntimeError('You have to pass either cond or doc_id') def update( self, fields: Union[Mapping, Callable[[Mapping], None]], cond: Optional[QueryLike] = None, doc_ids: Optional[Iterable[int]] = None, ) -> List[int]: """ Update all matching documents to have a given set of fields. :param fields: the fields that the matching documents will have or a method that will update the documents :param cond: which documents to update :param doc_ids: a list of document IDs :returns: a list containing the updated document's ID """ # Define the function that will perform the update if callable(fields): def perform_update(table, doc_id): # Update documents by calling the update function provided by # the user fields(table[doc_id]) else: def perform_update(table, doc_id): # Update documents by setting all fields from the provided data table[doc_id].update(fields) if doc_ids is not None: # Perform the update operation for documents specified by a list # of document IDs updated_ids = list(doc_ids) def updater(table: dict): # Call the processing callback with all document IDs for doc_id in updated_ids: perform_update(table, doc_id) # Perform the update operation (see _update_table for details) self._update_table(updater) return updated_ids elif cond is not None: # Perform the update operation for documents specified by a query # Collect affected doc_ids updated_ids = [] def updater(table: dict): _cond = cast(QueryLike, cond) # We need to convert the keys iterator to a list because # we may remove entries from the ``table`` dict during # iteration and doing this without the list conversion would # result in an exception (RuntimeError: dictionary changed size # during iteration) for doc_id in list(table.keys()): # Pass through all documents to find documents matching the # query. Call the processing callback with the document ID if _cond(table[doc_id]): # Add ID to list of updated documents updated_ids.append(doc_id) # Perform the update (see above) perform_update(table, doc_id) # Perform the update operation (see _update_table for details) self._update_table(updater) return updated_ids else: # Update all documents unconditionally updated_ids = [] def updater(table: dict): # Process all documents for doc_id in list(table.keys()): # Add ID to list of updated documents updated_ids.append(doc_id) # Perform the update (see above) perform_update(table, doc_id) # Perform the update operation (see _update_table for details) self._update_table(updater) return updated_ids def update_multiple( self, updates: Iterable[ Tuple[Union[Mapping, Callable[[Mapping], None]], QueryLike] ], ) -> List[int]: """ Update all matching documents to have a given set of fields. :returns: a list containing the updated document's ID """ # Define the function that will perform the update def perform_update(fields, table, doc_id): if callable(fields): # Update documents by calling the update function provided # by the user fields(table[doc_id]) else: # Update documents by setting all fields from the provided # data table[doc_id].update(fields) # Perform the update operation for documents specified by a query # Collect affected doc_ids updated_ids = [] def updater(table: dict): # We need to convert the keys iterator to a list because # we may remove entries from the ``table`` dict during # iteration and doing this without the list conversion would # result in an exception (RuntimeError: dictionary changed size # during iteration) for doc_id in list(table.keys()): for fields, cond in updates: _cond = cast(QueryLike, cond) # Pass through all documents to find documents matching the # query. Call the processing callback with the document ID if _cond(table[doc_id]): # Add ID to list of updated documents updated_ids.append(doc_id) # Perform the update (see above) perform_update(fields, table, doc_id) # Perform the update operation (see _update_table for details) self._update_table(updater) return updated_ids def upsert(self, document: Mapping, cond: Optional[QueryLike] = None) -> List[int]: """ Update documents, if they exist, insert them otherwise. Note: This will update *all* documents matching the query. Document argument can be a tinydb.table.Document object if you want to specify a doc_id. :param document: the document to insert or the fields to update :param cond: which document to look for, optional if you've passed a Document with a doc_id :returns: a list containing the updated documents' IDs """ # Extract doc_id if isinstance(document, self.document_class) and hasattr(document, 'doc_id'): doc_ids: Optional[List[int]] = [document.doc_id] else: doc_ids = None # Make sure we can actually find a matching document if doc_ids is None and cond is None: raise ValueError("If you don't specify a search query, you must " "specify a doc_id. Hint: use a table.Document " "object.") # Perform the update operation try: updated_docs: Optional[List[int]] = self.update(document, cond, doc_ids) except KeyError: # This happens when a doc_id is specified, but it's missing updated_docs = None # If documents have been updated: return their IDs if updated_docs: return updated_docs # There are no documents that match the specified query -> insert the # data as a new document return [self.insert(document)] def remove( self, cond: Optional[QueryLike] = None, doc_ids: Optional[Iterable[int]] = None, ) -> List[int]: """ Remove all matching documents. :param cond: the condition to check against :param doc_ids: a list of document IDs :returns: a list containing the removed documents' ID """ if doc_ids is not None: # This function returns the list of IDs for the documents that have # been removed. When removing documents identified by a set of # document IDs, it's this list of document IDs we need to return # later. # We convert the document ID iterator into a list, so we can both # use the document IDs to remove the specified documents and # to return the list of affected document IDs removed_ids = list(doc_ids) def updater(table: dict): for doc_id in removed_ids: table.pop(doc_id) # Perform the remove operation self._update_table(updater) return removed_ids if cond is not None: removed_ids = [] # This updater function will be called with the table data # as its first argument. See ``Table._update`` for details on this # operation def updater(table: dict): # We need to convince MyPy (the static type checker) that # the ``cond is not None`` invariant still holds true when # the updater function is called _cond = cast(QueryLike, cond) # We need to convert the keys iterator to a list because # we may remove entries from the ``table`` dict during # iteration and doing this without the list conversion would # result in an exception (RuntimeError: dictionary changed size # during iteration) for doc_id in list(table.keys()): if _cond(table[doc_id]): # Add document ID to list of removed document IDs removed_ids.append(doc_id) # Remove document from the table table.pop(doc_id) # Perform the remove operation self._update_table(updater) return removed_ids raise RuntimeError('Use truncate() to remove all documents') def truncate(self) -> None: """ Truncate the table by removing all documents. """ # Update the table by resetting all data self._update_table(lambda table: table.clear()) # Reset document ID counter self._next_id = None def count(self, cond: QueryLike) -> int: """ Count the documents matching a query. :param cond: the condition use """ return len(self.search(cond)) def clear_cache(self) -> None: """ Clear the query cache. """ self._query_cache.clear() def __len__(self): """ Count the total number of documents in this table. """ return len(self._read_table()) def __iter__(self) -> Iterator[Document]: """ Iterate over all documents stored in the table. :returns: an iterator over all documents. """ # Iterate all documents and their IDs for doc_id, doc in self._read_table().items(): # Convert documents to the document class yield self.document_class(doc, self.document_id_class(doc_id)) def _get_next_id(self): """ Return the ID for a newly inserted document. """ # If we already know the next ID if self._next_id is not None: next_id = self._next_id self._next_id = next_id + 1 return next_id # Determine the next document ID by finding out the max ID value # of the current table documents # Read the table documents table = self._read_table() # If the table is empty, set the initial ID if not table: next_id = 1 self._next_id = next_id + 1 return next_id # Determine the next ID based on the maximum ID that's currently in use max_id = max(self.document_id_class(i) for i in table.keys()) next_id = max_id + 1 # The next ID we will return AFTER this call needs to be larger than # the current next ID we calculated self._next_id = next_id + 1 return next_id def _read_table(self) -> Dict[str, Mapping]: """ Read the table data from the underlying storage. Documents and doc_ids are NOT yet transformed, as we may not want to convert *all* documents when returning only one document for example. """ # Retrieve the tables from the storage tables = self._storage.read() if tables is None: # The database is empty return {} # Retrieve the current table's data try: table = tables[self.name] except KeyError: # The table does not exist yet, so it is empty return {} return table def _update_table(self, updater: Callable[[Dict[int, Mapping]], None]): """ Perform a table update operation. The storage interface used by TinyDB only allows to read/write the complete database data, but not modifying only portions of it. Thus, to only update portions of the table data, we first perform a read operation, perform the update on the table data and then write the updated data back to the storage. As a further optimization, we don't convert the documents into the document class, as the table data will *not* be returned to the user. """ tables = self._storage.read() if tables is None: # The database is empty tables = {} try: raw_table = tables[self.name] except KeyError: # The table does not exist yet, so it is empty raw_table = {} # Convert the document IDs to the document ID class. # This is required as the rest of TinyDB expects the document IDs # to be an instance of ``self.document_id_class`` but the storage # might convert dict keys to strings. table = { self.document_id_class(doc_id): doc for doc_id, doc in raw_table.items() } # Perform the table update operation updater(table) # Convert the document IDs back to strings. # This is required as some storages (most notably the JSON file format) # don't support IDs other than strings. tables[self.name] = { str(doc_id): doc for doc_id, doc in table.items() } # Write the newly updated data back to the storage self._storage.write(tables) # Clear the query cache, as the table contents have changed self.clear_cache() tinydb-4.8.2/tinydb/utils.py000066400000000000000000000110311470251210600160130ustar00rootroot00000000000000""" Utility functions. """ from collections import OrderedDict, abc from typing import List, Iterator, TypeVar, Generic, Union, Optional, Type, \ TYPE_CHECKING K = TypeVar('K') V = TypeVar('V') D = TypeVar('D') T = TypeVar('T') __all__ = ('LRUCache', 'freeze', 'with_typehint') def with_typehint(baseclass: Type[T]): """ Add type hints from a specified class to a base class: >>> class Foo(with_typehint(Bar)): ... pass This would add type hints from class ``Bar`` to class ``Foo``. Note that while PyCharm and Pyright (for VS Code) understand this pattern, MyPy does not. For that reason TinyDB has a MyPy plugin in ``mypy_plugin.py`` that adds support for this pattern. """ if TYPE_CHECKING: # In the case of type checking: pretend that the target class inherits # from the specified base class return baseclass # Otherwise: just inherit from `object` like a regular Python class return object class LRUCache(abc.MutableMapping, Generic[K, V]): """ A least-recently used (LRU) cache with a fixed cache size. This class acts as a dictionary but has a limited size. If the number of entries in the cache exceeds the cache size, the least-recently accessed entry will be discarded. This is implemented using an ``OrderedDict``. On every access the accessed entry is moved to the front by re-inserting it into the ``OrderedDict``. When adding an entry and the cache size is exceeded, the last entry will be discarded. """ def __init__(self, capacity=None) -> None: self.capacity = capacity self.cache: OrderedDict[K, V] = OrderedDict() @property def lru(self) -> List[K]: return list(self.cache.keys()) @property def length(self) -> int: return len(self.cache) def clear(self) -> None: self.cache.clear() def __len__(self) -> int: return self.length def __contains__(self, key: object) -> bool: return key in self.cache def __setitem__(self, key: K, value: V) -> None: self.set(key, value) def __delitem__(self, key: K) -> None: del self.cache[key] def __getitem__(self, key) -> V: value = self.get(key) if value is None: raise KeyError(key) return value def __iter__(self) -> Iterator[K]: return iter(self.cache) def get(self, key: K, default: Optional[D] = None) -> Optional[Union[V, D]]: value = self.cache.get(key) if value is not None: self.cache.move_to_end(key, last=True) return value return default def set(self, key: K, value: V): if self.cache.get(key): self.cache[key] = value self.cache.move_to_end(key, last=True) else: self.cache[key] = value # Check, if the cache is full and we have to remove old items # If the queue is of unlimited size, self.capacity is NaN and # x > NaN is always False in Python and the cache won't be cleared. if self.capacity is not None and self.length > self.capacity: self.cache.popitem(last=False) class FrozenDict(dict): """ An immutable dictionary. This is used to generate stable hashes for queries that contain dicts. Usually, Python dicts are not hashable because they are mutable. This class removes the mutability and implements the ``__hash__`` method. """ def __hash__(self): # Calculate the has by hashing a tuple of all dict items return hash(tuple(sorted(self.items()))) def _immutable(self, *args, **kws): raise TypeError('object is immutable') # Disable write access to the dict __setitem__ = _immutable __delitem__ = _immutable clear = _immutable setdefault = _immutable # type: ignore popitem = _immutable def update(self, e=None, **f): raise TypeError('object is immutable') def pop(self, k, d=None): raise TypeError('object is immutable') def freeze(obj): """ Freeze an object by making it immutable and thus hashable. """ if isinstance(obj, dict): # Transform dicts into ``FrozenDict``s return FrozenDict((k, freeze(v)) for k, v in obj.items()) elif isinstance(obj, list): # Transform lists into tuples return tuple(freeze(el) for el in obj) elif isinstance(obj, set): # Transform sets into ``frozenset``s return frozenset(obj) else: # Don't handle all other objects return obj tinydb-4.8.2/tinydb/version.py000066400000000000000000000000261470251210600163420ustar00rootroot00000000000000__version__ = '4.8.2'