pax_global_header00006660000000000000000000000064135764725070014532gustar00rootroot0000000000000052 comment=5e7813053f721ab37acd4ed1e50b41009886f5b2 tinydb-3.15.2/000077500000000000000000000000001357647250700131135ustar00rootroot00000000000000tinydb-3.15.2/.coveragerc000066400000000000000000000003071357647250700152340ustar00rootroot00000000000000[run] branch = True [report] exclude_lines = pragma: no cover raise NotImplementedError.* warnings\.warn.* def __repr__ def __str__ def main() if __name__ == .__main__.: tinydb-3.15.2/.github/000077500000000000000000000000001357647250700144535ustar00rootroot00000000000000tinydb-3.15.2/.github/stale.yml000066400000000000000000000013521357647250700163070ustar00rootroot00000000000000# Number of days of inactivity before an issue becomes stale daysUntilStale: 30 # Number of days of inactivity before a stale issue is closed daysUntilClose: 7 # Issues with these labels will never be considered stale exemptLabels: - bug - pinned - contributions-welcome # Label to use when marking an issue as stale staleLabel: stale # Comment to post when marking an issue as stale. Set to `false` to disable markComment: > This issue has been automatically marked as stale because it has not had recent activity. It will be closed if no further activity occurs. Feel free to reopen this if needed. Thank you for your contributions :heart: # Comment to post when closing a stale issue. Set to `false` to disable closeComment: false tinydb-3.15.2/.gitignore000066400000000000000000000005571357647250700151120ustar00rootroot00000000000000*.py[cod] # C extensions *.so # Packages *.egg *.egg-info dist build eggs parts bin var sdist develop-eggs .installed.cfg lib lib64 __pycache__ # Installer logs pip-log.txt # Unit test / coverage reports .coverage .tox nosetests.xml .pytest_cache/ # Translations *.mo # Mr Developer .mr.developer.cfg .project .pydevproject # Pycharm .idea *.db.yml .DS_Storetinydb-3.15.2/CONTRIBUTING.rst000066400000000000000000000034541357647250700155620ustar00rootroot00000000000000Contribution Guidelines ####################### Whether reporting bugs, discussing improvements and new ideas or writing extensions: Contributions to TinyDB are welcome! Here's how to get started: 1. Check for open issues or open a fresh issue to start a discussion around a feature idea or a bug 2. Fork `the repository `_ on GitHub, create a new branch off the `master` branch and start making your changes (known as `GitHub Flow `_) 3. Write a test which shows that the bug was fixed or that the feature works as expected 4. Send a pull request and bug the maintainer until it gets merged and published :) Philosophy of TinyDB ******************** TinyDB aims to be simple and fun to use. Therefore two key values are simplicity and elegance of interfaces and code. These values will contradict each other from time to time. In these cases , try using as little magic as possible. In any case don't forget documenting code that isn't clear at first glance. Code Conventions **************** In general the TinyDB source should always follow `PEP 8 `_. Exceptions are allowed in well justified and documented cases. However we make a small exception concerning docstrings: When using multiline docstrings, keep the opening and closing triple quotes on their own lines and add an empty line after it. .. code-block:: python def some_function(): """ Documentation ... """ # implementation ... Version Numbers *************** TinyDB follows the `SemVer versioning guidelines `_. This implies that backwards incompatible changes in the API will increment the major version. So think twice before making such changes. tinydb-3.15.2/LICENSE000066400000000000000000000020701357647250700141170ustar00rootroot00000000000000Copyright (C) 2013 Markus Siemens Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. tinydb-3.15.2/MANIFEST.in000066400000000000000000000000541357647250700146500ustar00rootroot00000000000000include LICENSE recursive-include tests *.pytinydb-3.15.2/Pipfile000066400000000000000000000003501357647250700144240ustar00rootroot00000000000000[[source]] url = "https://pypi.python.org/simple" verify_ssl = true name = "pypi" [packages] [dev-packages] Sphinx = "*" pytest-cov = "*" pytest-runner = "*" pycodestyle = "*" pyyaml = ">=4.2b1" [pipenv] allow_prereleases = true tinydb-3.15.2/Pipfile.lock000066400000000000000000000400701357647250700153560ustar00rootroot00000000000000{ "_meta": { "hash": { "sha256": "0e94adba482362d31352a6efafd6325b76a508d5ffc1add0e529f17265e3766a" }, "pipfile-spec": 6, "requires": {}, "sources": [ { "name": "pypi", "url": "https://pypi.python.org/simple", "verify_ssl": true } ] }, "default": {}, "develop": { "alabaster": { "hashes": [ "sha256:446438bdcca0e05bd45ea2de1668c1d9b032e1a9154c2c259092d77031ddd359", "sha256:a661d72d58e6ea8a57f7a86e37d86716863ee5e92788398526d58b26a4e4dc02" ], "version": "==0.7.12" }, "atomicwrites": { "hashes": [ "sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4", "sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6" ], "version": "==1.3.0" }, "attrs": { "hashes": [ "sha256:69c0dbf2ed392de1cb5ec704444b08a5ef81680a61cb899dc08127123af36a79", "sha256:f0b870f674851ecbfbbbd364d6b5cbdff9dcedbc7f3f5e18a6891057f21fe399" ], "version": "==19.1.0" }, "babel": { "hashes": [ "sha256:6778d85147d5d85345c14a26aada5e478ab04e39b078b0745ee6870c2b5cf669", "sha256:8cba50f48c529ca3fa18cf81fa9403be176d374ac4d60738b839122dfaaa3d23" ], "version": "==2.6.0" }, "certifi": { "hashes": [ "sha256:59b7658e26ca9c7339e00f8f4636cdfe59d34fa37b9b04f6f9e9926b3cece1a5", "sha256:b26104d6835d1f5e49452a26eb2ff87fe7090b89dfcaee5ea2212697e1e1d7ae" ], "version": "==2019.3.9" }, "chardet": { "hashes": [ "sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae", "sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691" ], "version": "==3.0.4" }, "coverage": { "hashes": [ "sha256:029c69deaeeeae1b15bc6c59f0ffa28aa8473721c614a23f2c2976dec245cd12", "sha256:02abbbebc6e9d5abe13cd28b5e963dedb6ffb51c146c916d17b18f141acd9947", "sha256:1bbfe5b82a3921d285e999c6d256c1e16b31c554c29da62d326f86c173d30337", "sha256:210c02f923df33a8d0e461c86fdcbbb17228ff4f6d92609fc06370a98d283c2d", "sha256:2d0807ba935f540d20b49d5bf1c0237b90ce81e133402feda906e540003f2f7a", "sha256:35d7a013874a7c927ce997350d314144ffc5465faf787bb4e46e6c4f381ef562", "sha256:3636f9d0dcb01aed4180ef2e57a4e34bb4cac3ecd203c2a23db8526d86ab2fb4", "sha256:42f4be770af2455a75e4640f033a82c62f3fb0d7a074123266e143269d7010ef", "sha256:48440b25ba6cda72d4c638f3a9efa827b5b87b489c96ab5f4ff597d976413156", "sha256:4dac8dfd1acf6a3ac657475dfdc66c621f291b1b7422a939cc33c13ac5356473", "sha256:4e8474771c69c2991d5eab65764289a7dd450bbea050bc0ebb42b678d8222b42", "sha256:551f10ddfeff56a1325e5a34eff304c5892aa981fd810babb98bfee77ee2fb17", "sha256:5b104982f1809c1577912519eb249f17d9d7e66304ad026666cb60a5ef73309c", "sha256:5c62aef73dfc87bfcca32cee149a1a7a602bc74bac72223236b0023543511c88", "sha256:633151f8d1ad9467b9f7e90854a7f46ed8f2919e8bc7d98d737833e8938fc081", "sha256:772207b9e2d5bf3f9d283b88915723e4e92d9a62c83f44ec92b9bd0cd685541b", "sha256:7d5e02f647cd727afc2659ec14d4d1cc0508c47e6cfb07aea33d7aa9ca94d288", "sha256:a9798a4111abb0f94584000ba2a2c74841f2cfe5f9254709756367aabbae0541", "sha256:b38ea741ab9e35bfa7015c93c93bbd6a1623428f97a67083fc8ebd366238b91f", "sha256:b6a5478c904236543c0347db8a05fac6fc0bd574c870e7970faa88e1d9890044", "sha256:c6248bfc1de36a3844685a2e10ba17c18119ba6252547f921062a323fb31bff1", "sha256:c705ab445936457359b1424ef25ccc0098b0491b26064677c39f1d14a539f056", "sha256:d95a363d663ceee647291131dbd213af258df24f41350246842481ec3709bd33", "sha256:e27265eb80cdc5dab55a40ef6f890e04ecc618649ad3da5265f128b141f93f78", "sha256:ebc276c9cb5d917bd2ae959f84ffc279acafa9c9b50b0fa436ebb70bbe2166ea", "sha256:f4d229866d030863d0fe3bf297d6d11e6133ca15bbb41ed2534a8b9a3d6bd061", "sha256:f95675bd88b51474d4fe5165f3266f419ce754ffadfb97f10323931fa9ac95e5", "sha256:f95bc54fb6d61b9f9ff09c4ae8ff6a3f5edc937cda3ca36fc937302a7c152bf1", "sha256:fd0f6be53de40683584e5331c341e65a679dbe5ec489a0697cec7c2ef1a48cda" ], "version": "==5.0a4" }, "docutils": { "hashes": [ "sha256:02aec4bd92ab067f6ff27a38a38a41173bf01bed8f89157768c1573f53e474a6", "sha256:51e64ef2ebfb29cae1faa133b3710143496eca21c530f3f71424d77687764274", "sha256:7a4bd47eaf6596e1295ecb11361139febe29b084a87bf005bf899f9a42edc3c6" ], "version": "==0.14" }, "idna": { "hashes": [ "sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407", "sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c" ], "version": "==2.8" }, "imagesize": { "hashes": [ "sha256:3f349de3eb99145973fefb7dbe38554414e5c30abd0c8e4b970a7c9d09f3a1d8", "sha256:f3832918bc3c66617f92e35f5d70729187676313caa60c187eb0f28b8fe5e3b5" ], "version": "==1.1.0" }, "jinja2": { "hashes": [ "sha256:065c4f02ebe7f7cf559e49ee5a95fb800a9e4528727aec6f24402a5374c65013", "sha256:14dd6caf1527abb21f08f86c784eac40853ba93edb79552aa1e4b8aef1b61c7b" ], "version": "==2.10.1" }, "markupsafe": { "hashes": [ "sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473", "sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161", "sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235", "sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5", "sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff", "sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b", "sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1", "sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e", "sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183", "sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66", "sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1", "sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1", "sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e", "sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b", "sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905", "sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735", "sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d", "sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e", "sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d", "sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c", "sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21", "sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2", "sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5", "sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b", "sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6", "sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f", "sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f", "sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7" ], "version": "==1.1.1" }, "more-itertools": { "hashes": [ "sha256:2112d2ca570bb7c3e53ea1a35cd5df42bb0fd10c45f0fb97178679c3c03d64c7", "sha256:c3e4748ba1aad8dba30a4886b0b1a2004f9a863837b8654e7059eebf727afa5a" ], "markers": "python_version > '2.7'", "version": "==7.0.0" }, "packaging": { "hashes": [ "sha256:0c98a5d0be38ed775798ece1b9727178c4469d9c3b4ada66e8e6b7849f8732af", "sha256:9e1cbf8c12b1f1ce0bb5344b8d7ecf66a6f8a6e91bcb0c84593ed6d3ab5c4ab3" ], "version": "==19.0" }, "pluggy": { "hashes": [ "sha256:19ecf9ce9db2fce065a7a0586e07cfb4ac8614fe96edf628a264b1c70116cf8f", "sha256:84d306a647cc805219916e62aab89caa97a33a1dd8c342e87a37f91073cd4746" ], "version": "==0.9.0" }, "py": { "hashes": [ "sha256:64f65755aee5b381cea27766a3a147c3f15b9b6b9ac88676de66ba2ae36793fa", "sha256:dc639b046a6e2cff5bbe40194ad65936d6ba360b52b3c3fe1d08a82dd50b5e53" ], "version": "==1.8.0" }, "pycodestyle": { "hashes": [ "sha256:95a2219d12372f05704562a14ec30bc76b05a5b297b21a5dfe3f6fac3491ae56", "sha256:e40a936c9a450ad81df37f549d676d127b1b66000a6c500caa2b085bc0ca976c" ], "index": "pypi", "version": "==2.5.0" }, "pygments": { "hashes": [ "sha256:5ffada19f6203563680669ee7f53b64dabbeb100eb51b61996085e99c03b284a", "sha256:e8218dd399a61674745138520d0d4cf2621d7e032439341bc3f647bff125818d" ], "version": "==2.3.1" }, "pyparsing": { "hashes": [ "sha256:1873c03321fc118f4e9746baf201ff990ceb915f433f23b395f5580d1840cb2a", "sha256:9b6323ef4ab914af344ba97510e966d64ba91055d6b9afa6b30799340e89cc03" ], "version": "==2.4.0" }, "pytest": { "hashes": [ "sha256:3773f4c235918987d51daf1db66d51c99fac654c81d6f2f709a046ab446d5e5d", "sha256:b7802283b70ca24d7119b32915efa7c409982f59913c1a6c0640aacf118b95f5" ], "version": "==4.4.1" }, "pytest-cov": { "hashes": [ "sha256:0ab664b25c6aa9716cbf203b17ddb301932383046082c081b9848a0edf5add33", "sha256:230ef817450ab0699c6cc3c9c8f7a829c34674456f2ed8df1fe1d39780f7c87f" ], "index": "pypi", "version": "==2.6.1" }, "pytest-runner": { "hashes": [ "sha256:00ad6cd754ce55b01b868a6d00b77161e4d2006b3918bde882376a0a884d0df4", "sha256:e946c7dbdc8c0c2ffa44e7b45450f68e7f08cb133983134fa63a1d1486c2b36b" ], "index": "pypi", "version": "==4.4" }, "pytz": { "hashes": [ "sha256:303879e36b721603cc54604edcac9d20401bdbe31e1e4fdee5b9f98d5d31dfda", "sha256:d747dd3d23d77ef44c6a3526e274af6efeb0a6f1afd5a69ba4d5be4098c8e141" ], "version": "==2019.1" }, "pyyaml": { "hashes": [ "sha256:1adecc22f88d38052fb787d959f003811ca858b799590a5eaa70e63dca50308c", "sha256:436bc774ecf7c103814098159fbb84c2715d25980175292c648f2da143909f95", "sha256:460a5a4248763f6f37ea225d19d5c205677d8d525f6a83357ca622ed541830c2", "sha256:5a22a9c84653debfbf198d02fe592c176ea548cccce47553f35f466e15cf2fd4", "sha256:7a5d3f26b89d688db27822343dfa25c599627bc92093e788956372285c6298ad", "sha256:9372b04a02080752d9e6f990179a4ab840227c6e2ce15b95e1278456664cf2ba", "sha256:a5dcbebee834eaddf3fa7366316b880ff4062e4bcc9787b78c7fbb4a26ff2dd1", "sha256:aee5bab92a176e7cd034e57f46e9df9a9862a71f8f37cad167c6fc74c65f5b4e", "sha256:c51f642898c0bacd335fc119da60baae0824f2cde95b0330b56c0553439f0673", "sha256:c68ea4d3ba1705da1e0d85da6684ac657912679a649e8868bd850d2c299cce13", "sha256:e23d0cc5299223dcc37885dae624f382297717e459ea24053709675a976a3e19" ], "index": "pypi", "version": "==5.1" }, "requests": { "hashes": [ "sha256:502a824f31acdacb3a35b6690b5fbf0bc41d63a24a45c4004352b0242707598e", "sha256:7bf2a778576d825600030a110f3c0e3e8edc51dfaafe1c146e39a2027784957b" ], "version": "==2.21.0" }, "six": { "hashes": [ "sha256:3350809f0555b11f552448330d0b52d5f24c91a322ea4a15ef22629740f3761c", "sha256:d16a0141ec1a18405cd4ce8b4613101da75da0e9a7aec5bdd4fa804d0e0eba73" ], "version": "==1.12.0" }, "snowballstemmer": { "hashes": [ "sha256:919f26a68b2c17a7634da993d91339e288964f93c274f1343e3bbbe2096e1128", "sha256:9f3bcd3c401c3e862ec0ebe6d2c069ebc012ce142cce209c098ccb5b09136e89" ], "version": "==1.2.1" }, "sphinx": { "hashes": [ "sha256:423280646fb37944dd3c85c58fb92a20d745793a9f6c511f59da82fa97cd404b", "sha256:de930f42600a4fef993587633984cc5027dedba2464bcf00ddace26b40f8d9ce" ], "index": "pypi", "version": "==2.0.1" }, "sphinxcontrib-applehelp": { "hashes": [ "sha256:edaa0ab2b2bc74403149cb0209d6775c96de797dfd5b5e2a71981309efab3897", "sha256:fb8dee85af95e5c30c91f10e7eb3c8967308518e0f7488a2828ef7bc191d0d5d" ], "version": "==1.0.1" }, "sphinxcontrib-devhelp": { "hashes": [ "sha256:6c64b077937330a9128a4da74586e8c2130262f014689b4b89e2d08ee7294a34", "sha256:9512ecb00a2b0821a146736b39f7aeb90759834b07e81e8cc23a9c70bacb9981" ], "version": "==1.0.1" }, "sphinxcontrib-htmlhelp": { "hashes": [ "sha256:4670f99f8951bd78cd4ad2ab962f798f5618b17675c35c5ac3b2132a14ea8422", "sha256:d4fd39a65a625c9df86d7fa8a2d9f3cd8299a3a4b15db63b50aac9e161d8eff7" ], "version": "==1.0.2" }, "sphinxcontrib-jsmath": { "hashes": [ "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8" ], "version": "==1.0.1" }, "sphinxcontrib-qthelp": { "hashes": [ "sha256:513049b93031beb1f57d4daea74068a4feb77aa5630f856fcff2e50de14e9a20", "sha256:79465ce11ae5694ff165becda529a600c754f4bc459778778c7017374d4d406f" ], "version": "==1.0.2" }, "sphinxcontrib-serializinghtml": { "hashes": [ "sha256:c0efb33f8052c04fd7a26c0a07f1678e8512e0faec19f4aa8f2473a8b81d5227", "sha256:db6615af393650bf1151a6cd39120c29abaf93cc60db8c48eb2dddbfdc3a9768" ], "version": "==1.1.3" }, "urllib3": { "hashes": [ "sha256:4c291ca23bbb55c76518905869ef34bdd5f0e46af7afe6861e8375643ffee1a0", "sha256:9a247273df709c4fedb38c711e44292304f73f39ab01beda9f6b9fc375669ac3" ], "version": "==1.24.2" } } } tinydb-3.15.2/README.rst000066400000000000000000000111551357647250700146050ustar00rootroot00000000000000.. image:: https://raw.githubusercontent.com/msiemens/tinydb/master/artwork/logo.png :scale: 100% :height: 150px |Build Status| |Coverage| |Version| Quick Links *********** - `Example Code`_ - `Supported Python Versions`_ - `Documentation `_ - `Changelog `_ - `Extensions `_ - `Contributing`_ Introduction ************ TinyDB is a lightweight document oriented database optimized for your happiness :) It's written in pure Python and has no external dependencies. The target are small apps that would be blown away by a SQL-DB or an external database server. TinyDB is: - **tiny:** The current source code has 1600 lines of code (with about 33% documentation) and 1800 lines of tests. For comparison: Buzhug_ has about 2500 lines of code (w/o tests), CodernityDB_ has about 7000 lines of code (w/o tests). - **document oriented:** Like MongoDB_, you can store any document (represented as ``dict``) in TinyDB. - **optimized for your happiness:** TinyDB is designed to be simple and fun to use by providing a simple and clean API. - **written in pure Python:** TinyDB neither needs an external server (as e.g. `PyMongo `_) nor any dependencies from PyPI. - **works on Python 2.7 and 3.3 – 3.7 and PyPy:** TinyDB works on all modern versions of Python and PyPy. - **powerfully extensible:** You can easily extend TinyDB by writing new storages or modify the behaviour of storages with Middlewares. - **100% test coverage:** No explanation needed. To dive straight into all the details, head over to the `TinyDB docs `_. You can also discuss everything related to TinyDB like general development, extensions or showcase your TinyDB-based projects on the `discussion forum `_. Supported Python Versions ************************* TinyDB has been tested with Python 2.7, 3.3 - 3.6 and PyPy. Example Code ************ .. code-block:: python >>> from tinydb import TinyDB, Query >>> db = TinyDB('/path/to/db.json') >>> db.insert({'int': 1, 'char': 'a'}) >>> db.insert({'int': 1, 'char': 'b'}) Query Language ============== .. code-block:: python >>> User = Query() >>> # Search for a field value >>> db.search(User.name == 'John') [{'name': 'John', 'age': 22}, {'name': 'John', 'age': 37}] >>> # Combine two queries with logical and >>> db.search((User.name == 'John') & (User.age <= 30)) [{'name': 'John', 'age': 22}] >>> # Combine two queries with logical or >>> db.search((User.name == 'John') | (User.name == 'Bob')) [{'name': 'John', 'age': 22}, {'name': 'John', 'age': 37}, {'name': 'Bob', 'age': 42}] >>> # More possible comparisons: != < > <= >= >>> # More possible checks: where(...).matches(regex), where(...).test(your_test_func) Tables ====== .. code-block:: python >>> table = db.table('name') >>> table.insert({'value': True}) >>> table.all() [{'value': True}] Using Middlewares ================= .. code-block:: python >>> from tinydb.storages import JSONStorage >>> from tinydb.middlewares import CachingMiddleware >>> db = TinyDB('/path/to/db.json', storage=CachingMiddleware(JSONStorage)) Contributing ************ Whether reporting bugs, discussing improvements and new ideas or writing extensions: Contributions to TinyDB are welcome! Here's how to get started: 1. Check for open issues or open a fresh issue to start a discussion around a feature idea or a bug 2. Fork `the repository `_ on Github, create a new branch off the `master` branch and start making your changes (known as `GitHub Flow `_) 3. Write a test which shows that the bug was fixed or that the feature works as expected 4. Send a pull request and bug the maintainer until it gets merged and published ☺ .. |Build Status| image:: https://img.shields.io/azure-devops/build/msiemens/3e5baa75-12ec-43ac-9728-89823ee8c7e2/2.svg?style=flat-square :target: https://dev.azure.com/msiemens/github/_build?definitionId=2 .. |Coverage| image:: http://img.shields.io/coveralls/msiemens/tinydb.svg?style=flat-square :target: https://coveralls.io/r/msiemens/tinydb .. |Version| image:: http://img.shields.io/pypi/v/tinydb.svg?style=flat-square :target: https://pypi.python.org/pypi/tinydb/ .. _Buzhug: http://buzhug.sourceforge.net/ .. _CodernityDB: https://github.com/perchouli/codernitydb .. _MongoDB: http://mongodb.org/ tinydb-3.15.2/artwork/000077500000000000000000000000001357647250700146045ustar00rootroot00000000000000tinydb-3.15.2/artwork/logo.png000066400000000000000000000063421357647250700162570ustar00rootroot00000000000000PNG  IHDRwb pHYs  tEXtSoftwareAdobe ImageReadyqe< oIDATx읿n"}nV+m&GG|x#m4$MAZGC4nַOu^k4CS:4E.H?B `~ hZ4_+O BH!90-71Gwksͱ|}ỶPG6>Vٚ4Rx73@.)^5>A@ z;f߿,Gܔ+2W6i6's_s{dҏV Xl'uC#`̅vdC~ɍ ƥ_vmeb3C Gف{blc7+Na < do`6?6_1D TTe"PsZ@@"dub0G4%/ ȺBkqʍ!`M :$}D(s ,KHi>@ =:@ {,)b"^wD"qcf='A $2̛Z/6"T"}`Pg[Y'DDnӏm;C*'80ĜXX.빮ozZ+zFWEleoi[!&6.Y+:l(l@o >쭹we#h~yv&<YlM\o 0jMO~vt~!P17=Ymxǖƙew/ */{#ezyGVfbfcak*0E6D+by,T)`B(p">#ٕi5?*njuO.>xڳ(/*yձg+![8g(*+0@ 74e0{\~`,MOBcZ9з}iKs,v:@ )fW׮Wt H5X6ޯh]e;Ҹ/ ]NJy!qB*jDzؐQ6 AO=90=c-qXZSZH ۲'wо&MV&=yH>k3TB_*ͱ0ǃmzFsX<U\҈/(&fDlrJ09YZk;EFCZ!"`9s^aGeӇm~m9ZB''6A OviDU2WX<CY5Eu+ޯM:|J֪54WsI,{rmDepڐkH+`H;/Ib3H-<,zPj\tE҈ 1kP/G0:045}u*gJOscd՛@zg|P&Շ 4)` SOig7OTQ꾢ʰ.Bt5jw` `fYu*v: *օ Cؕ$6gE sayiDzN`Ӈ[zJZ$`z}ub?$^55i߯"1u"s]=}`PąR5 .=| Emln[=LJ7m#`PMcqJ6* 뛞 Ʈ> guʗm0 ow﷯)IӴE &VSFK'T!@Qj;WvE(Υx0N/% MOvfx0Źq_⼆}KTNOA0q tinydb-3.15.2/azure-pipelines.yml000066400000000000000000000021611357647250700167520ustar00rootroot00000000000000# Python package # Create and test a Python package on multiple Python versions. # Add steps that analyze code, save the dist with the build record, publish to a PyPI-compatible index, and more: # https://docs.microsoft.com/azure/devops/pipelines/languages/python trigger: - master - v3 pool: vmImage: 'ubuntu-latest' strategy: matrix: Python27: python.version: '2.7' Python34: python.version: '3.4' Python35: python.version: '3.5' Python36: python.version: '3.6' Python37: python.version: '3.7' PyPy27: python.version: 'pypy2' PyPy3: python.version: 'pypy3' steps: - task: UsePythonVersion@0 inputs: versionSpec: '$(python.version)' displayName: 'Use Python $(python.version)' - script: | python -m pip install --upgrade pip pip install -U pytest pytest-azurepipelines pytest-cov coveralls wheel twine displayName: 'Install dependencies' - script: | pytest --cov-report=html:$(pwd)/htmlcov displayName: 'Run tests' - script: | python setup.py sdist bdist_wheel twine check dist/* displayName: 'Check dist package format' tinydb-3.15.2/docs/000077500000000000000000000000001357647250700140435ustar00rootroot00000000000000tinydb-3.15.2/docs/.gitignore000066400000000000000000000000071357647250700160300ustar00rootroot00000000000000_build/tinydb-3.15.2/docs/Makefile000066400000000000000000000151521357647250700155070ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/TinyDB.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/TinyDB.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/TinyDB" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/TinyDB" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." tinydb-3.15.2/docs/_static/000077500000000000000000000000001357647250700154715ustar00rootroot00000000000000tinydb-3.15.2/docs/_static/logo.png000066400000000000000000000063421357647250700171440ustar00rootroot00000000000000PNG  IHDRwb pHYs  tEXtSoftwareAdobe ImageReadyqe< oIDATx읿n"}nV+m&GG|x#m4$MAZGC4nַOu^k4CS:4E.H?B `~ hZ4_+O BH!90-71Gwksͱ|}ỶPG6>Vٚ4Rx73@.)^5>A@ z;f߿,Gܔ+2W6i6's_s{dҏV Xl'uC#`̅vdC~ɍ ƥ_vmeb3C Gف{blc7+Na < do`6?6_1D TTe"PsZ@@"dub0G4%/ ȺBkqʍ!`M :$}D(s ,KHi>@ =:@ {,)b"^wD"qcf='A $2̛Z/6"T"}`Pg[Y'DDnӏm;C*'80ĜXX.빮ozZ+zFWEleoi[!&6.Y+:l(l@o >쭹we#h~yv&<YlM\o 0jMO~vt~!P17=Ymxǖƙew/ */{#ezyGVfbfcak*0E6D+by,T)`B(p">#ٕi5?*njuO.>xڳ(/*yձg+![8g(*+0@ 74e0{\~`,MOBcZ9з}iKs,v:@ )fW׮Wt H5X6ޯh]e;Ҹ/ ]NJy!qB*jDzؐQ6 AO=90=c-qXZSZH ۲'wо&MV&=yH>k3TB_*ͱ0ǃmzFsX<U\҈/(&fDlrJ09YZk;EFCZ!"`9s^aGeӇm~m9ZB''6A OviDU2WX<CY5Eu+ޯM:|J֪54WsI,{rmDepڐkH+`H;/Ib3H-<,zPj\tE҈ 1kP/G0:045}u*gJOscd՛@zg|P&Շ 4)` SOig7OTQ꾢ʰ.Bt5jw` `fYu*v: *օ Cؕ$6gE sayiDzN`Ӈ[zJZ$`z}ub?$^55i߯"1u"s]=}`PąR5 .=| Emln[=LJ7m#`PMcqJ6* 뛞 Ʈ> guʗm0 ow﷯)IӴE &VSFK'T!@Qj;WvE(Υx0N/% MOvfx0Źq_⼆}KTNOA0qUseful Links tinydb-3.15.2/docs/_templates/sidebarlogo.html000066400000000000000000000002061357647250700213560ustar00rootroot00000000000000 tinydb-3.15.2/docs/_themes/000077500000000000000000000000001357647250700154675ustar00rootroot00000000000000tinydb-3.15.2/docs/_themes/.gitignore000066400000000000000000000000261357647250700174550ustar00rootroot00000000000000*.pyc *.pyo .DS_Store tinydb-3.15.2/docs/_themes/LICENSE000066400000000000000000000033751357647250700165040ustar00rootroot00000000000000Copyright (c) 2010 by Armin Ronacher. Some rights reserved. Redistribution and use in source and binary forms of the theme, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * The names of the contributors may not be used to endorse or promote products derived from this software without specific prior written permission. We kindly ask you to only use these themes in an unmodified manner just for Flask and Flask-related products, not for unrelated projects. If you like the visual style and want to use it for your own projects, please consider making some larger changes to the themes (such as changing font faces, sizes, colors or margins). THIS THEME IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS THEME, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. tinydb-3.15.2/docs/_themes/README000066400000000000000000000021051357647250700163450ustar00rootroot00000000000000Flask Sphinx Styles =================== This repository contains sphinx styles for Flask and Flask related projects. To use this style in your Sphinx documentation, follow this guide: 1. put this folder as _themes into your docs folder. Alternatively you can also use git submodules to check out the contents there. 2. add this to your conf.py: sys.path.append(os.path.abspath('_themes')) html_theme_path = ['_themes'] html_theme = 'flask' The following themes exist: - 'flask' - the standard flask documentation theme for large projects - 'flask_small' - small one-page theme. Intended to be used by very small addon libraries for flask. The following options exist for the flask_small theme: [options] index_logo = '' filename of a picture in _static to be used as replacement for the h1 in the index.rst file. index_logo_height = 120px height of the index logo github_fork = '' repository name on github for the "fork me" badge tinydb-3.15.2/docs/_themes/flask/000077500000000000000000000000001357647250700165675ustar00rootroot00000000000000tinydb-3.15.2/docs/_themes/flask/layout.html000066400000000000000000000015161357647250700207750ustar00rootroot00000000000000{%- extends "basic/layout.html" %} {%- block extrahead %} {{ super() }} {% if theme_touch_icon %} {% endif %} {% endblock %} {%- block relbar2 %}{% endblock %} {% block header %} {{ super() }} {% if pagename == 'index' %}
{% endif %} {% endblock %} {%- block footer %} {% if pagename == 'index' %}
{% endif %} {%- endblock %} tinydb-3.15.2/docs/_themes/flask/page.html000066400000000000000000000010061357647250700203660ustar00rootroot00000000000000{%- extends "basic/page.html" %} {% block body %} {{ super() }} {%- if prev or next and pagename != 'index' %}

{%- if prev %} « {{ prev.title }} {% if next %}|{% endif %} {%- endif %} {%- if next %} {{ next.title }} » {%- endif %}

{%- endif %} {% endblock %} tinydb-3.15.2/docs/_themes/flask/relations.html000066400000000000000000000007651357647250700214650ustar00rootroot00000000000000

Navigation

tinydb-3.15.2/docs/_themes/flask/static/000077500000000000000000000000001357647250700200565ustar00rootroot00000000000000tinydb-3.15.2/docs/_themes/flask/static/flasky.css_t000066400000000000000000000225521357647250700224120ustar00rootroot00000000000000/* * flasky.css_t * ~~~~~~~~~~~~ * * :copyright: Copyright 2010 by Armin Ronacher. * :license: Flask Design License, see LICENSE for details. */ {% set page_width = '940px' %} {% set sidebar_width = '220px' %} {% set font_family = "'Open Sans', sans-serif" %} {% set monospace_font_family = "'Source Code Pro', 'Consolas', 'Menlo', 'Deja Vu Sans Mono', 'Bitstream Vera Sans Mono', monospace" %} {% set accent_color = '#2d4e84' %}{# original: #004B6B #} {% set accent_color_alternate = '#2069e1' %}{# original: #6D4100 #} @import url(http://fonts.googleapis.com/css?family=Open+Sans:400,700,400italic|Source+Code+Pro); @import url("basic.css"); /* -- page layout ----------------------------------------------------------- */ html { overflow-y: scroll; } body { font-family: {{ font_family }}; font-size: 17px; background-color: white; color: #000; margin: 0; padding: 0; } div.document { width: {{ page_width }}; margin: 30px auto 0 auto; } div.documentwrapper { float: left; width: 100%; } div.bodywrapper { margin: 0 0 0 {{ sidebar_width }}; } div.sphinxsidebar { width: {{ sidebar_width }}; } hr { border: 1px solid #B1B4B6; } div.body { background-color: #ffffff; color: #3E4349; padding: 0 30px 0 30px; } img.floatingflask { padding: 0 0 10px 10px; float: right; } div.footer { width: {{ page_width }}; margin: 20px auto 30px auto; font-size: 14px; color: #888; text-align: right; } div.footer a { color: #888; } div.related { display: none; } div.sphinxsidebar a { color: #444; text-decoration: none; border-bottom: 1px dotted #999; } div.sphinxsidebar a:hover { border-bottom: 1px solid #999; } div.sphinxsidebar { font-size: 14px; line-height: 1.5; } div.sphinxsidebarwrapper { padding: 18px 10px; } div.sphinxsidebarwrapper p.logo { padding: 0 0 20px 0; margin: 0; text-align: center; } div.sphinxsidebar h3, div.sphinxsidebar h4 { font-family: {{ font_family }}; color: #444; font-size: 24px; font-weight: normal; margin: 0 0 5px 0; padding: 0; } div.sphinxsidebar h4 { font-size: 20px; } div.sphinxsidebar h3 a { color: #444; } div.sphinxsidebar p.logo a, div.sphinxsidebar h3 a, div.sphinxsidebar p.logo a:hover, div.sphinxsidebar h3 a:hover { border: none; } div.sphinxsidebar p { color: #555; margin: 10px 0; } div.sphinxsidebar ul { margin: 10px 0; padding: 0; color: #000; } div.sphinxsidebar input { border: 1px solid #ccc; font-family: {{ font_family }}; font-size: 1em; } /* -- body styles ----------------------------------------------------------- */ a { color: {{ accent_color }}; text-decoration: underline; } a:hover { color: {{ accent_color_alternate }}; text-decoration: underline; } div.body h1, div.body h2, div.body h3, div.body h4, div.body h5, div.body h6 { font-family: {{ font_family }}; font-weight: normal; margin: 30px 0px 10px 0px; padding: 0; } {% if theme_index_logo %} div.indexwrapper h1 { text-indent: -999999px; background: url({{ theme_index_logo }}) no-repeat center center; height: {{ theme_index_logo_height }}; } {% endif %} div.body h1 { margin-top: 0; padding-top: 0; font-size: 240%; } div.body h2 { font-size: 180%; } div.body h3 { font-size: 150%; } div.body h4 { font-size: 130%; } div.body h5 { font-size: 100%; } div.body h6 { font-size: 100%; } a.headerlink { color: #ddd; padding: 0 4px; text-decoration: none; } a.headerlink:hover { color: #444; background: #eaeaea; } div.body p, div.body dd, div.body li { line-height: 1.4em; } div.admonition { background: #fafafa; margin: 20px -30px; padding: 10px 30px; border-top: 1px solid #ccc; border-bottom: 1px solid #ccc; } div.admonition tt.xref, div.admonition a tt { border-bottom: 1px solid #fafafa; } dd div.admonition { margin-left: -60px; padding-left: 60px; } div.admonition p.admonition-title { font-family: {{ font_family }}; font-weight: normal; font-size: 24px; margin: 0 0 10px 0; padding: 0; line-height: 1; } div.admonition p.last { margin-bottom: 0; } div.highlight { background-color: white; } dt:target, .highlight { background: #FAF3E8; } div.note { background-color: #eee; border: 1px solid #ccc; } div.seealso { background-color: #ffc; border: 1px solid #ff6; } div.topic { background-color: #eee; } p.admonition-title { display: inline; } p.admonition-title:after { content: ":"; } pre, tt { font-family: {{ monospace_font_family }}; font-size: 0.9em; } img.screenshot { } tt.descname, tt.descclassname { font-size: 0.95em; } tt.descname { padding-right: 0.08em; } img.screenshot { -moz-box-shadow: 2px 2px 4px #eee; -webkit-box-shadow: 2px 2px 4px #eee; box-shadow: 2px 2px 4px #eee; } table.docutils { border: 1px solid #888; -moz-box-shadow: 2px 2px 4px #eee; -webkit-box-shadow: 2px 2px 4px #eee; box-shadow: 2px 2px 4px #eee; } table.docutils td, table.docutils th { border: 1px solid #888; padding: 0.25em 0.7em; } table.field-list, table.footnote { border: none; -moz-box-shadow: none; -webkit-box-shadow: none; box-shadow: none; } table.footnote { margin: 15px 0; width: 100%; border: 1px solid #eee; background: #fdfdfd; font-size: 0.9em; } table.footnote + table.footnote { margin-top: -15px; border-top: none; } table.field-list th { padding: 0 0.8em 0 0; } table.field-list td { padding: 0; } table.footnote td.label { width: 0px; padding: 0.3em 0 0.3em 0.5em; } table.footnote td { padding: 0.3em 0.5em; } dl { margin: 0; padding: 0; } dl dd { margin-left: 30px; } blockquote { margin: 0 0 0 30px; padding: 0; } ul, ol { margin: 10px 0 10px 30px; padding: 0; } pre { background: #eee; padding: 7px 30px; margin: 15px -30px; line-height: 1.3em; } dl pre, blockquote pre, li pre { margin-left: -60px; padding-left: 60px; } dl dl pre { margin-left: -90px; padding-left: 90px; } tt { background-color: #ecf0f3; color: #222; /* padding: 1px 2px; */ } tt.xref, a tt { background-color: #FBFBFB; border-bottom: 1px solid white; } a.reference { text-decoration: none; border-bottom: 1px dotted {{ accent_color }}; } a.reference:hover { border-bottom: 1px solid {{ accent_color_alternate }}; } a.footnote-reference { text-decoration: none; font-size: 0.7em; vertical-align: top; border-bottom: 1px dotted {{ accent_color }}; } a.footnote-reference:hover { border-bottom: 1px solid {{ accent_color_alternate }}; } a:hover tt { background: #EEE; } @media screen and (max-width: 870px) { div.sphinxsidebar { display: none; } div.document { width: 100%; } div.documentwrapper { margin-left: 0; margin-top: 0; margin-right: 0; margin-bottom: 0; } div.bodywrapper { margin-top: 0; margin-right: 0; margin-bottom: 0; margin-left: 0; } ul { margin-left: 0; } .document { width: auto; } .footer { width: auto; } .bodywrapper { margin: 0; } .footer { width: auto; } .github { display: none; } } @media screen and (max-width: 875px) { body { margin: 0; padding: 20px 30px; } div.documentwrapper { float: none; background: white; } div.sphinxsidebar { display: block; float: none; width: 102.5%; margin: 50px -30px -20px -30px; padding: 10px 20px; background: #333; color: white; } div.sphinxsidebar h3, div.sphinxsidebar h4, div.sphinxsidebar p, div.sphinxsidebar h3 a { color: white; } div.sphinxsidebar a { color: #aaa; } div.sphinxsidebar p.logo { display: none; } div.document { width: 100%; margin: 0; } div.related { display: block; margin: 0; padding: 10px 0 20px 0; } div.related ul, div.related ul li { margin: 0; padding: 0; } div.footer { display: none; } div.bodywrapper { margin: 0; } div.body { min-height: 0; padding: 0; } .rtd_doc_footer { display: none; } .document { width: auto; } .footer { width: auto; } .footer { width: auto; } .github { display: none; } } /* scrollbars */ ::-webkit-scrollbar { width: 6px; height: 6px; } ::-webkit-scrollbar-button:start:decrement, ::-webkit-scrollbar-button:end:increment { display: block; height: 10px; } ::-webkit-scrollbar-button:vertical:increment { background-color: #fff; } ::-webkit-scrollbar-track-piece { background-color: #eee; -webkit-border-radius: 3px; } ::-webkit-scrollbar-thumb:vertical { height: 50px; background-color: #ccc; -webkit-border-radius: 3px; } ::-webkit-scrollbar-thumb:horizontal { width: 50px; background-color: #ccc; -webkit-border-radius: 3px; } /* misc. */ .revsys-inline { display: none!important; } .admonition.warning { background-color: #F5CDCD; border-color: #7B1B1B; } tinydb-3.15.2/docs/_themes/flask/theme.conf000066400000000000000000000001411357647250700205340ustar00rootroot00000000000000[theme] inherit = basic stylesheet = flasky.css pygments_style = flask_theme_support.FlaskyStyle tinydb-3.15.2/docs/_themes/flask_theme_support.py000066400000000000000000000114131357647250700221170ustar00rootroot00000000000000# flasky extensions. flasky pygments style based on tango style from pygments.style import Style from pygments.token import Keyword, Name, Comment, String, Error, \ Number, Operator, Generic, Whitespace, Punctuation, Other, Literal class FlaskyStyle(Style): background_color = "#f8f8f8" default_style = "" styles = { # No corresponding class for the following: #Text: "", # class: '' Whitespace: "underline #f8f8f8", # class: 'w' Error: "#a40000 border:#ef2929", # class: 'err' Other: "#000000", # class 'x' Comment: "italic #8f5902", # class: 'c' Comment.Preproc: "noitalic", # class: 'cp' Keyword: "bold #004461", # class: 'k' Keyword.Constant: "bold #004461", # class: 'kc' Keyword.Declaration: "bold #004461", # class: 'kd' Keyword.Namespace: "bold #004461", # class: 'kn' Keyword.Pseudo: "bold #004461", # class: 'kp' Keyword.Reserved: "bold #004461", # class: 'kr' Keyword.Type: "bold #004461", # class: 'kt' Operator: "#582800", # class: 'o' Operator.Word: "bold #004461", # class: 'ow' - like keywords Punctuation: "bold #000000", # class: 'p' # because special names such as Name.Class, Name.Function, etc. # are not recognized as such later in the parsing, we choose them # to look the same as ordinary variables. Name: "#000000", # class: 'n' Name.Attribute: "#c4a000", # class: 'na' - to be revised Name.Builtin: "#004461", # class: 'nb' Name.Builtin.Pseudo: "#3465a4", # class: 'bp' Name.Class: "#000000", # class: 'nc' - to be revised Name.Constant: "#000000", # class: 'no' - to be revised Name.Decorator: "#888", # class: 'nd' - to be revised Name.Entity: "#ce5c00", # class: 'ni' Name.Exception: "bold #cc0000", # class: 'ne' Name.Function: "#000000", # class: 'nf' Name.Property: "#000000", # class: 'py' Name.Label: "#f57900", # class: 'nl' Name.Namespace: "#000000", # class: 'nn' - to be revised Name.Other: "#000000", # class: 'nx' Name.Tag: "bold #004461", # class: 'nt' - like a keyword Name.Variable: "#000000", # class: 'nv' - to be revised Name.Variable.Class: "#000000", # class: 'vc' - to be revised Name.Variable.Global: "#000000", # class: 'vg' - to be revised Name.Variable.Instance: "#000000", # class: 'vi' - to be revised Number: "#990000", # class: 'm' Literal: "#000000", # class: 'l' Literal.Date: "#000000", # class: 'ld' String: "#4e9a06", # class: 's' String.Backtick: "#4e9a06", # class: 'sb' String.Char: "#4e9a06", # class: 'sc' String.Doc: "italic #8f5902", # class: 'sd' - like a comment String.Double: "#4e9a06", # class: 's2' String.Escape: "#4e9a06", # class: 'se' String.Heredoc: "#4e9a06", # class: 'sh' String.Interpol: "#4e9a06", # class: 'si' String.Other: "#4e9a06", # class: 'sx' String.Regex: "#4e9a06", # class: 'sr' String.Single: "#4e9a06", # class: 's1' String.Symbol: "#4e9a06", # class: 'ss' Generic: "#000000", # class: 'g' Generic.Deleted: "#a40000", # class: 'gd' Generic.Emph: "italic #000000", # class: 'ge' Generic.Error: "#ef2929", # class: 'gr' Generic.Heading: "bold #000080", # class: 'gh' Generic.Inserted: "#00A000", # class: 'gi' Generic.Output: "#888", # class: 'go' Generic.Prompt: "#745334", # class: 'gp' Generic.Strong: "bold #000000", # class: 'gs' Generic.Subheading: "bold #800080", # class: 'gu' Generic.Traceback: "bold #a40000", # class: 'gt' } tinydb-3.15.2/docs/api.rst000066400000000000000000000044221357647250700153500ustar00rootroot00000000000000API Documentation ================= ``tinydb.database`` ------------------- .. autoclass:: tinydb.database.TinyDB :members: :special-members: :exclude-members: __dict__, __weakref__ :member-order: bysource .. _table_api: .. autoclass:: tinydb.database.Table :members: :special-members: :exclude-members: __dict__, __weakref__ :member-order: bysource .. autoclass:: tinydb.database.Document :members: :special-members: :exclude-members: __dict__, __weakref__ :member-order: bysource .. py:attribute:: doc_id The document's id .. autoclass:: tinydb.database.Element ``tinydb.queries`` ------------------ .. autoclass:: tinydb.queries.Query :members: :special-members: :exclude-members: __weakref__ :member-order: bysource ``tinydb.storage`` ------------------ .. automodule:: tinydb.storages :members: JSONStorage, MemoryStorage :special-members: :exclude-members: __weakref__ .. class:: Storage The abstract base class for all Storages. A Storage (de)serializes the current state of the database and stores it in some place (memory, file on disk, ...). .. method:: read() Read the last stored state. .. method:: write(data) Write the current state of the database to the storage. .. method:: close() Optional: Close open file handles, etc. ``tinydb.middlewares`` ---------------------- .. automodule:: tinydb.middlewares :members: CachingMiddleware :special-members: :exclude-members: __weakref__ .. class:: Middleware The base class for all Middlewares. Middlewares hook into the read/write process of TinyDB allowing you to extend the behaviour by adding caching, logging, ... If ``read()`` or ``write()`` are not overloaded, they will be forwarded directly to the storage instance. .. attribute:: storage :type: :class:`.Storage` Access to the underlying storage instance. .. method:: read() Read the last stored state. .. method:: write(data) Write the current state of the database to the storage. .. method:: close() Optional: Close open file handles, etc. tinydb-3.15.2/docs/changelog.rst000066400000000000000000000344761357647250700165420ustar00rootroot00000000000000Changelog ========= Version Numbering ^^^^^^^^^^^^^^^^^ TinyDB follows the SemVer versioning guidelines. For more information, see `semver.org `_ unreleased ^^^^^^^^^^ - *nothing yet* v3.15.2 (2019-12-18) ^^^^^^^^^^^^^^^^^^^^ - Fix: handle ``Query().search`` and ``Query().matches`` with non-string values correctly (see `a forum post by foprel `_). v3.15.1 (2019-10-26) ^^^^^^^^^^^^^^^^^^^^ - Internal change: fix missing values handling for ``LRUCache`` v3.15.0 (2019-10-12) ^^^^^^^^^^^^^^^^^^^^ - Feature: allow setting the parameters of TinyDB's default table (see `issue 278 `_) v3.14.2 (2019-09-13) ^^^^^^^^^^^^^^^^^^^^ - Internal change: support correct iteration for ``LRUCache`` objects v3.14.1 (2019-07-03) ^^^^^^^^^^^^^^^^^^^^ - Internal change: fix Query class to permit subclass creation (see `pull request 270 `_) v3.14.0 (2019-06-18) ^^^^^^^^^^^^^^^^^^^^ - Change: support for ``ujson`` is now deprecated (see `issue 258 `_) v3.13.0 (2019-03-16) ^^^^^^^^^^^^^^^^^^^^ - Feature: direct access to a TinyDB instance's storage (see `issue 258 `_) v3.12.2 (2018-12-12) ^^^^^^^^^^^^^^^^^^^^ - Internal change: convert documents to dicts during insertion (see `pull request 256 `_) - Internal change: use tuple literals instead of tuple class/constructor (see `pull request 247 `_) - Infra: ensure YAML tests are run (see `pull request 252 `_) v3.12.1 (2018-11-09) ^^^^^^^^^^^^^^^^^^^^ - Fix: Don't break when searching the same query multiple times (see `pull request 249 `_) - Internal change: allow ``collections.abc.Mutable`` as valid document types (see `pull request 245 `_) v3.12.0 (2018-11-06) ^^^^^^^^^^^^^^^^^^^^ - Feature: Add encoding option to ``JSONStorage`` (see `pull request 238 `_) - Internal change: allow ``collections.abc.Mutable`` as valid document types (see `pull request 245 `_) v3.11.1 (2018-09-13) ^^^^^^^^^^^^^^^^^^^^ - Bugfix: Make path queries (``db.search(where('key))``) work again (see `issue 232 `_) - Improvement: Add custom ``repr`` representations for main classes (see `pull request 229 `_) v3.11.0 (2018-08-20) ^^^^^^^^^^^^^^^^^^^^ - **Drop official support for Python 3.3**. Python 3.3 has reached its official End Of Life as of September 29, 2017. It will probably continue to work, but will not be tested against (`issue 217 `_) - Feature: Allow extending TinyDB with a custom storage proxy class (see `pull request 224 `_) - Bugfix: Return list of document IDs for upsert when creating a new document (see `issue 223 `_) v3.10.0 (2018-07-21) ^^^^^^^^^^^^^^^^^^^^ - Feature: Add support for regex flags (see `pull request 216 `_) v3.9.0 (2018-04-24) ^^^^^^^^^^^^^^^^^^^ - Feature: Allow setting a table class for single table only (see `issue 197 `_) - Internal change: call fsync after flushing ``JSONStorage`` (see `issue 208 `_) v3.8.1 (2018-03-26) ^^^^^^^^^^^^^^^^^^^ - Bugfix: Don't install tests as a package anymore (see `pull request #195 `_) v3.8.0 (2018-03-01) ^^^^^^^^^^^^^^^^^^^ - Feature: Allow disabling the query cache with ``db.table(name, cache_size=0)`` (see `pull request #187 `_) - Feature: Add ``db.write_back(docs)`` for replacing documents (see `pull request #184 `_) v3.7.0 (2017-11-11) ^^^^^^^^^^^^^^^^^^^ - Feature: ``one_of`` for checking if a value is contained in a list (see `issue 164 `_) - Feature: Upsert (insert if document doesn't exist, otherwise update; see https://forum.m-siemens.de/d/30-primary-key-well-sort-of) - Internal change: don't read from storage twice during initialization (see https://forum.m-siemens.de/d/28-reads-the-whole-data-file-twice) v3.6.0 (2017-10-05) ^^^^^^^^^^^^^^^^^^^ - Allow updating all documents using ``db.update(fields)`` (see `issue #157 `_). - Rename elements to documents. Document IDs now available with ``doc.doc_id``, using ``doc.eid`` is now deprecated (see `pull request #158 `_) v3.5.0 (2017-08-30) ^^^^^^^^^^^^^^^^^^^ - Expose the table name via ``table.name`` (see `issue #147 `_). - Allow better subclassing of the ``TinyDB`` class (see `pull request #150 `_). v3.4.1 (2017-08-23) ^^^^^^^^^^^^^^^^^^^ - Expose TinyDB version via ``import tinyb; tinydb.__version__`` (see `issue #148 `_). v3.4.0 (2017-08-08) ^^^^^^^^^^^^^^^^^^^ - Add new update operations: ``add(key, value)``, ``substract(key, value)``, and ``set(key, value)`` (see `pull request #145 `_). v3.3.1 (2017-06-27) ^^^^^^^^^^^^^^^^^^^ - Use relative imports to allow vendoring TinyDB in other packages (see `pull request #142 `_). v3.3.0 (2017-06-05) ^^^^^^^^^^^^^^^^^^^ - Allow iterating over a database or table yielding all documents (see `pull request #139 `_). v3.2.3 (2017-04-22) ^^^^^^^^^^^^^^^^^^^ - Fix bug with accidental modifications to the query cache when modifying the list of search results (see `issue #132 `_). v3.2.2 (2017-01-16) ^^^^^^^^^^^^^^^^^^^ - Fix the ``Query`` constructor to prevent wrong usage (see `issue #117 `_). v3.2.1 (2016-06-29) ^^^^^^^^^^^^^^^^^^^ - Fix a bug with queries on documents that have a ``path`` key (see `pull request #107 `_). - Don't write to the database file needlessly when opening the database (see `pull request #104 `_). v3.2.0 (2016-04-25) ^^^^^^^^^^^^^^^^^^^ - Add a way to specify the default table name via :ref:`default_table ` (see `pull request #98 `_). - Add ``db.purge_table(name)`` to remove a single table (see `pull request #100 `_). - Along the way: celebrating 100 issues and pull requests! Thanks everyone for every single contribution! - Extend API documentation (see `issue #96 `_). v3.1.3 (2016-02-14) ^^^^^^^^^^^^^^^^^^^ - Fix a bug when using unhashable documents (lists, dicts) with ``Query.any`` or ``Query.all`` queries (see `a forum post by karibul `_). v3.1.2 (2016-01-30) ^^^^^^^^^^^^^^^^^^^ - Fix a bug when using unhashable documents (lists, dicts) with ``Query.any`` or ``Query.all`` queries (see `a forum post by karibul `_). v3.1.1 (2016-01-23) ^^^^^^^^^^^^^^^^^^^ - Inserting a dictionary with data that is not JSON serializable doesn't lead to corrupt files anymore (see `issue #89 `_). - Fix a bug in the LRU cache that may lead to an invalid query cache (see `issue #87 `_). v3.1.0 (2015-12-31) ^^^^^^^^^^^^^^^^^^^ - ``db.update(...)`` and ``db.remove(...)`` now return affected document IDs (see `issue #83 `_). - Inserting an invalid document (i.e. not a ``dict``) now raises an error instead of corrupting the database (see `issue #74 `_). v3.0.0 (2015-11-13) ^^^^^^^^^^^^^^^^^^^ - Overhauled Query model: - ``where('...').contains('...')`` has been renamed to ``where('...').search('...')``. - Support for ORM-like usage: ``User = Query(); db.search(User.name == 'John')``. - ``where('foo')`` is an alias for ``Query().foo``. - ``where('foo').has('bar')`` is replaced by either ``where('foo').bar`` or ``Query().foo.bar``. - In case the key is not a valid Python identifier, array notation can be used: ``where('a.b.c')`` is now ``Query()['a.b.c']``. - Checking for the existence of a key has to be done explicitely: ``where('foo').exists()``. - Migrations from v1 to v2 have been removed. - ``SmartCacheTable`` has been moved to `msiemens/tinydb-smartcache`_. - Serialization has been moved to `msiemens/tinydb-serialization`_. - Empty storages are now expected to return ``None`` instead of raising ``ValueError``. (see `issue #67 `_. .. _msiemens/tinydb-smartcache: https://github.com/msiemens/tinydb-smartcache .. _msiemens/tinydb-serialization: https://github.com/msiemens/tinydb-serialization v2.4.0 (2015-08-14) ^^^^^^^^^^^^^^^^^^^ - Allow custom parameters for custom test functions (see `issue #63 `_ and `pull request #64 `_). v2.3.2 (2015-05-20) ^^^^^^^^^^^^^^^^^^^ - Fix a forgotten debug output in the ``SerializationMiddleware`` (see `issue #55 `_). - Fix an "ignored exception" warning when using the ``CachingMiddleware`` (see `pull request #54 `_) - Fix a problem with symlinks when checking out TinyDB on OSX Yosemite (see `issue #52 `_). v2.3.1 (2015-04-30) ^^^^^^^^^^^^^^^^^^^ - Hopefully fix a problem with using TinyDB as a dependency in a ``setup.py`` script (see `issue #51 `_). v2.3.0 (2015-04-08) ^^^^^^^^^^^^^^^^^^^ - Added support for custom serialization. That way, you can teach TinyDB to store ``datetime`` objects in a JSON file :) (see `issue #48 `_ and `pull request #50 `_) - Fixed a performance regression when searching became slower with every search (see `issue #49 `_) - Internal code has been cleaned up v2.2.2 (2015-02-12) ^^^^^^^^^^^^^^^^^^^ - Fixed a data loss when using ``CachingMiddleware`` together with ``JSONStorage`` (see `issue #47 `_) v2.2.1 (2015-01-09) ^^^^^^^^^^^^^^^^^^^ - Fixed handling of IDs with the JSON backend that converted integers to strings (see `issue #45 `_) v2.2.0 (2014-11-10) ^^^^^^^^^^^^^^^^^^^ - Extended ``any`` and ``all`` queries to take lists as conditions (see `pull request #38 `_) - Fixed an ``decode error`` when installing TinyDB in a non-UTF-8 environment (see `pull request #37 `_) - Fixed some issues with ``CachingMiddleware`` in combination with ``JSONStorage`` (see `pull request #39 `_) v2.1.0 (2014-10-14) ^^^^^^^^^^^^^^^^^^^ - Added ``where(...).contains(regex)`` (see `issue #32 `_) - Fixed a bug that corrupted data after reopening a database (see `issue #34 `_) v2.0.1 (2014-09-22) ^^^^^^^^^^^^^^^^^^^ - Fixed handling of Unicode data in Python 2 (see `issue #28 `_). v2.0.0 (2014-09-05) ^^^^^^^^^^^^^^^^^^^ :ref:`Upgrade Notes ` .. warning:: TinyDB changed the way data is stored. You may need to migrate your databases to the new scheme. Check out the :ref:`Upgrade Notes ` for details. - The syntax ``query in db`` has been removed, use ``db.contains`` instead. - The ``ConcurrencyMiddleware`` has been removed due to a insecure implementation (see `issue #18 `_). Consider :ref:`tinyrecord` instead. - Better support for working with :ref:`Document IDs `. - Added support for `nested comparisons `_. - Added ``all`` and ``any`` `comparisons on lists `_. - Added optional :`_. - The query cache is now a :ref:`fixed size LRU cache `. v1.4.0 (2014-07-22) ^^^^^^^^^^^^^^^^^^^ - Added ``insert_multiple`` function (see `issue #8 `_). v1.3.0 (2014-07-02) ^^^^^^^^^^^^^^^^^^^ - Fixed `bug #7 `_: IDs not unique. - Extended the API: ``db.count(where(...))`` and ``db.contains(where(...))``. - The syntax ``query in db`` is now **deprecated** and replaced by ``db.contains``. v1.2.0 (2014-06-19) ^^^^^^^^^^^^^^^^^^^ - Added ``update`` method (see `issue #6 `_). v1.1.1 (2014-06-14) ^^^^^^^^^^^^^^^^^^^ - Merged `PR #5 `_: Fix minor documentation typos and style issues. v1.1.0 (2014-05-06) ^^^^^^^^^^^^^^^^^^^ - Improved the docs and fixed some typos. - Refactored some internal code. - Fixed a bug with multiple ``TinyDB?`` instances. v1.0.1 (2014-04-26) ^^^^^^^^^^^^^^^^^^^ - Fixed a bug in ``JSONStorage`` that broke the database when removing entries. v1.0.0 (2013-07-20) ^^^^^^^^^^^^^^^^^^^ - First official release – consider TinyDB stable now. tinydb-3.15.2/docs/conf.py000066400000000000000000000213321357647250700153430ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # TinyDB documentation build configuration file, created by # sphinx-quickstart on Sat Jul 13 20:14:55 2013. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ----------------------------------------------------- # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['sphinx.ext.autodoc', 'sphinx.ext.coverage', 'sphinx.ext.viewcode', 'sphinx.ext.intersphinx', 'sphinx.ext.todo', 'sphinx.ext.extlinks'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'TinyDB' copyright = u'2016, Markus Siemens' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. import pkg_resources try: release = pkg_resources.get_distribution('tinydb').version except pkg_resources.DistributionNotFound: print 'To build the documentation, The distribution information of TinyDB' print 'Has to be available. Either install the package into your' print 'development environment or run "setup.py develop" to setup the' print 'metadata. A virtualenv is recommended!' sys.exit(1) del pkg_resources if 'dev' in release: release = release.split('dev')[0] + 'dev' version = '.'.join(release.split('.')[:2]) # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. #html_theme = 'default' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. html_sidebars = { 'index': ['sidebarlogo.html', 'links.html', 'searchbox.html'], '**': ['sidebarlogo.html', 'localtoc.html', 'links.html', 'searchbox.html'] } # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. html_show_sourcelink = False # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'TinyDBdoc' # -- Options for LaTeX output -------------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'TinyDB.tex', u'TinyDB Documentation', u'Markus Siemens', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output -------------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'tinydb', u'TinyDB Documentation', [u'Markus Siemens'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------------ # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'TinyDB', u'TinyDB Documentation', u'Markus Siemens', 'TinyDB', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False extlinks = {'issue': ('https://https://github.com/msiemens/tinydb/issues/%s', 'issue ')} sys.path.append(os.path.abspath('_themes')) html_theme_path = ['_themes'] html_theme = 'flask' todo_include_todos = True tinydb-3.15.2/docs/contribute.rst000066400000000000000000000034541357647250700167610ustar00rootroot00000000000000Contribution Guidelines ####################### Whether reporting bugs, discussing improvements and new ideas or writing extensions: Contributions to TinyDB are welcome! Here's how to get started: 1. Check for open issues or open a fresh issue to start a discussion around a feature idea or a bug 2. Fork `the repository `_ on Github, create a new branch off the `master` branch and start making your changes (known as `GitHub Flow `_) 3. Write a test which shows that the bug was fixed or that the feature works as expected 4. Send a pull request and bug the maintainer until it gets merged and published :) Philosophy of TinyDB ******************** TinyDB aims to be simple and fun to use. Therefore two key values are simplicity and elegance of interfaces and code. These values will contradict each other from time to time. In these cases , try using as little magic as possible. In any case don't forget documenting code that isn't clear at first glance. Code Conventions **************** In general the TinyDB source should always follow `PEP 8 `_. Exceptions are allowed in well justified and documented cases. However we make a small exception concerning docstrings: When using multiline docstrings, keep the opening and closing triple quotes on their own lines and add an empty line after it. .. code-block:: python def some_function(): """ Documentation ... """ # implementation ... Version Numbers *************** TinyDB follows the `SemVer versioning guidelines `_. This implies that backwards incompatible changes in the API will increment the major version. So think twice before making such changes. tinydb-3.15.2/docs/extend.rst000066400000000000000000000164111357647250700160670ustar00rootroot00000000000000How to Extend TinyDB ==================== There are three main ways to extend TinyDB and modify its behaviour: 1. custom storage, 2. custom middleware, and 3. custom table classes. Let's look at them in this order. Write Custom Storage ---------------------- First, we have support for custom storage. By default TinyDB comes with an in-memory storage mechanism and a JSON file storage mechanism. But of course you can add your own. Let's look how you could add a `YAML `_ storage using `PyYAML `_: .. code-block:: python import yaml def represent_doc(dumper, data): # Represent `Document` objects as their dict's string representation # which PyYAML understands return dumper.represent_data(dict(data)) yaml.add_representer(Document, represent_doc) class YAMLStorage(Storage): def __init__(self, filename): # (1) self.filename = filename def read(self): with open(self.filename) as handle: try: data = yaml.safe_load(handle.read()) # (2) return data except yaml.YAMLError: return None # (3) def write(self, data): with open(self.filename, 'w') as handle: yaml.dump(data, handle) def close(self): # (4) pass There are some things we should look closer at: 1. The constructor will receive all arguments passed to TinyDB when creating the database instance (except ``storage`` which TinyDB itself consumes). In other words calling ``TinyDB('something', storage=YAMLStorage)`` will pass ``'something'`` as an argument to ``YAMLStorage``. 2. We use ``yaml.safe_load`` as recommended by the `PyYAML documentation `_ when processing data from a potentially untrusted source. 3. If the storage is uninitialized, TinyDB expects the storage to return ``None`` so it can do any internal initialization that is necessary. 4. If your storage needs any cleanup (like closing file handles) before an instance is destroyed, you can put it in the ``close()`` method. To run these, you'll either have to run ``db.close()`` on your ``TinyDB`` instance or use it as a context manager, like this: .. code-block:: python with TinyDB('db.yml', storage=YAMLStorage) as db: # ... Finally, using the YAML storage is very straight-forward: .. code-block:: python db = TinyDB('db.yml', storage=YAMLStorage) # ... Write Custom Middleware ------------------------- Sometimes you don't want to write a new storage module but rather modify the behaviour of an existing one. As an example we'll build middleware that filters out any empty items. Because middleware acts as a wrapper around a storage, they needs a ``read()`` and a ``write(data)`` method. In addition, they can access the underlying storage via ``self.storage``. Before we start implementing we should look at the structure of the data that the middleware receives. Here's what the data that goes through the middleware looks like: .. code-block:: python { '_default': { 1: {'key': 'value'}, 2: {'key': 'value'}, # other items }, # other tables } Thus, we'll need two nested loops: 1. Process every table 2. Process every item Now let's implement that: .. code-block:: python class RemoveEmptyItemsMiddleware(Middleware): def __init__(self, storage_cls=TinyDB.DEFAULT_STORAGE): # Any middleware *has* to call the super constructor # with storage_cls super(CustomMiddleware, self).__init__(storage_cls) def read(self): data = self.storage.read() for table_name in data: table = data[table_name] for doc_id in table: item = table[doc_id] if item == {}: del table[doc_id] return data def write(self, data): for table_name in data: table = data[table_name] for doc_id in table: item = table[doc_id] if item == {}: del table[doc_id] self.storage.write(data) def close(self): self.storage.close() Two remarks: 1. You have to use the ``super(...)`` call as shown in the example. To run your own initialization, add it below the ``super(...)`` call. 2. This is an example for middleware, not an example for clean code. Don't use it as shown here without at least refactoring the loops into a separate method. To wrap storage with this new middleware, we use it like this: .. code-block:: python db = TinyDB(storage=RemoveEmptyItemsMiddleware(SomeStorageClass)) Here ``SomeStorageClass`` should be replaced with the storage you want to use. If you leave it empty, the default storage will be used (which is the ``JSONStorage``). Creating a Custom Table Classes ------------------------------- Custom storage and middleware are useful if you want to modify the way TinyDB stores its data. But there are cases where you want to modify how TinyDB itself behaves. For that use case TinyDB supports custom table classes. Internally TinyDB creates a ``Table`` instance for every table that is used. You can overwrite which class is used by setting ``TinyDB.table_class`` before creating a ``TinyDB`` instance. This class has to support the :ref:`Table API `. The best way to accomplish that is to subclass it: .. code-block:: python from tinydb import TinyDB from tinydb.database import Table class YourTableClass(Table): pass # Modify original methods as needed TinyDB.table_class = YourTableClass For an more advanced example, see the source of the `tinydb-smartcache `_ extension. Creating a Custom Storage Proxy Classes --------------------------------------- .. warning:: This extension requires knowledge of TinyDB internals. Use it if you understand how TinyDB works in detail. Another way to modify TinyDB's behavior is to create a custom storage proxy class. Internally, TinyDB uses a proxy class to allow tables to access a storage object. The proxy makes sure the table only accesses its own table data and doesn't accidentally modify other table's data. In this class you can modify how a table can read and write from a storage instance. Also, the proxy class has a method called ``_new_document`` which creates a new document object. If you want to replace it with a different document class, you can do it right here. .. code-block:: python from tinydb import TinyDB from tinydb.database import Table, StorageProxy, Document from tinydb.storages import MemoryStorage class YourStorageProxy(StorageProxy): def _new_document(self, key, val): # Modify document object creation doc_id = int(key) return Document(val, doc_id) def read(self): return {} # Modify reading def write(self, data): pass # Modify writing TinyDB.storage_proxy_class = YourStorageProxy # Or: TinyDB(storage=..., storage_proxy_class=YourStorageProxy)tinydb-3.15.2/docs/extensions.rst000066400000000000000000000051121357647250700167730ustar00rootroot00000000000000Extensions ========== Here are some extensions that might be useful to you: ``tinyindex`` ************* | **Repo:** https://github.com/eugene-eeo/tinyindex | **Status:** *experimental* | **Description:** Document indexing for TinyDB. Basically ensures deterministic (as long as there aren't any changes to the table) yielding of documents. ``tinymongo`` ************* | **Repo:** https://github.com/schapman1974/tinymongo | **Status:** *experimental* | **Description:** A simple wrapper that allows to use TinyDB as a flat file drop-in replacement for MongoDB. ``TinyMP`` ************* | **Repo:** https://github.com/alshapton/TinyMP | **Status:** *stable* | **Description:** A MessagePack-based storage extension to tinydb using http://msgpack.org .. _tinyrecord: ``tinyrecord`` ************** | **Repo:** https://github.com/eugene-eeo/tinyrecord | **Status:** *stable* | **Description:** Tinyrecord is a library which implements experimental atomic transaction support for the TinyDB NoSQL database. It uses a record-first then execute architecture which allows us to minimize the time that we are within a thread lock. ``tinydb-appengine`` ******************** | **Repo:** https://github.com/imalento/tinydb-appengine | **Status:** *stable* | **Description:** ``tinydb-appengine`` provides TinyDB storage for App Engine. You can use JSON readonly. ``tinydb-serialization`` ************************ | **Repo:** https://github.com/msiemens/tinydb-serialization | **Status:** *stable* | **Description:** ``tinydb-serialization`` provides serialization for objects that TinyDB otherwise couldn't handle. ``tinydb-smartcache`` ********************* | **Repo:** https://github.com/msiemens/tinydb-smartcache | **Status:** *stable* | **Description:** ``tinydb-smartcache`` provides a smart query cache for TinyDB. It updates the query cache when inserting/removing/updating documents so the cache doesn't get invalidated. It's useful if you perform lots of queries while the data changes only little. ``aiotinydb`` ************* | **Repo:** https://github.com/ASMfreaK/aiotinydb | **Status:** *stable* | **Description:** asyncio compatibility shim for TinyDB. Enables usage of TinyDB in asyncio-aware contexts without slow syncronous IO. tinydb-3.15.2/docs/getting-started.rst000066400000000000000000000114041357647250700177020ustar00rootroot00000000000000:tocdepth: 3 Getting Started =============== Installing TinyDB ----------------- To install TinyDB from PyPI, run:: $ pip install tinydb You can also grab the latest development version from GitHub_. After downloading and unpacking it, you can install it using:: $ python setup.py install Basic Usage ----------- Let's cover the basics before going more into detail. We'll start by setting up a TinyDB database: >>> from tinydb import TinyDB, Query >>> db = TinyDB('db.json') You now have a TinyDB database that stores its data in ``db.json``. What about inserting some data? TinyDB expects the data to be Python ``dict``\s: >>> db.insert({'type': 'apple', 'count': 7}) >>> db.insert({'type': 'peach', 'count': 3}) .. note:: The ``insert`` method returns the inserted document's ID. Read more about it here: :ref:`document_ids`. Now you can get all documents stored in the database by running: >>> db.all() [{'count': 7, 'type': 'apple'}, {'count': 3, 'type': 'peach'}] You can also iter over stored documents: >>> for item in db: >>> print(item) {'count': 7, 'type': 'apple'} {'count': 3, 'type': 'peach'} Of course you'll also want to search for specific documents. Let's try: >>> Fruit = Query() >>> db.search(Fruit.type == 'peach') [{'count': 3, 'type': 'peach'}] >>> db.search(Fruit.count > 5) [{'count': 7, 'type': 'apple'}] Next we'll update the ``count`` field of the apples: >>> db.update({'count': 10}, Fruit.type == 'apple') >>> db.all() [{'count': 10, 'type': 'apple'}, {'count': 3, 'type': 'peach'}] In the same manner you can also remove documents: >>> db.remove(Fruit.count < 5) >>> db.all() [{'count': 10, 'type': 'apple'}] And of course you can throw away all data to start with an empty database: >>> db.purge() >>> db.all() [] Recap ***** Before we dive deeper, let's recapitulate the basics: +-------------------------------+---------------------------------------------------------------+ | **Inserting** | +-------------------------------+---------------------------------------------------------------+ | ``db.insert(...)`` | Insert a document | +-------------------------------+---------------------------------------------------------------+ | **Getting data** | +-------------------------------+---------------------------------------------------------------+ | ``db.all()`` | Get all documents | +-------------------------------+---------------------------------------------------------------+ | ``iter(db)`` | Iter over all documents | +-------------------------------+---------------------------------------------------------------+ | ``db.search(query)`` | Get a list of documents matching the query | +-------------------------------+---------------------------------------------------------------+ | **Updating** | +-------------------------------+---------------------------------------------------------------+ | ``db.update(fields, query)`` | Update all documents matching the query to contain ``fields`` | +-------------------------------+---------------------------------------------------------------+ | **Removing** | +-------------------------------+---------------------------------------------------------------+ | ``db.remove(query)`` | Remove all documents matching the query | +-------------------------------+---------------------------------------------------------------+ | ``db.purge()`` | Purge all documents | +-------------------------------+---------------------------------------------------------------+ | **Querying** | +-------------------------------+---------------------------------------------------------------+ | ``Query()`` | Create a new query object | +-------------------------------+---------------------------------------------------------------+ | ``Query().field == 2`` | Match any document that has a key ``field`` with value | | | ``== 2`` (also possible: ``!=`` ``>`` ``>=`` ``<`` ``<=``) | +-------------------------------+---------------------------------------------------------------+ .. References .. _GitHub: http://github.com/msiemens/tinydb/ tinydb-3.15.2/docs/index.rst000066400000000000000000000013431357647250700157050ustar00rootroot00000000000000Welcome to TinyDB! ================== Welcome to TinyDB, your tiny, document oriented database optimized for your happiness :) >>> from tinydb import TinyDB, Query >>> db = TinyDB('path/to/db.json') >>> User = Query() >>> db.insert({'name': 'John', 'age': 22}) >>> db.search(User.name == 'John') [{'name': 'John', 'age': 22}] User's Guide ------------ .. toctree:: :maxdepth: 2 intro getting-started usage Extending TinyDB ---------------- .. toctree:: :maxdepth: 2 Extending TinyDB TinyDB Extensions API Reference ------------- .. toctree:: :maxdepth: 2 api Additional Notes ---------------- .. toctree:: :maxdepth: 2 contribute changelog Upgrade Notes tinydb-3.15.2/docs/intro.rst000066400000000000000000000040421357647250700157300ustar00rootroot00000000000000Introduction ============ Great that you've taken time to check out the TinyDB docs! Before we begin looking at TinyDB itself, let's take some time to see whether you should use TinyDB. Why Use TinyDB? --------------- - **tiny:** The current source code has 1200 lines of code (with about 40% documentation) and 1000 lines tests. For comparison: Buzhug_ has about 2500 lines of code (w/o tests), CodernityDB_ has about 7000 lines of code (w/o tests). - **document oriented:** Like MongoDB_, you can store any document (represented as ``dict``) in TinyDB. - **optimized for your happiness:** TinyDB is designed to be simple and fun to use by providing a simple and clean API. - **written in pure Python:** TinyDB neither needs an external server (as e.g. `PyMongo `_) nor any dependencies from PyPI. - **works on Python 2.6 + 2.7 and 3.3 – 3.7 and PyPy:** TinyDB works on all modern versions of Python and PyPy. - **powerfully extensible:** You can easily extend TinyDB by writing new storages or modify the behaviour of storages with Middlewares. - **100% test coverage:** No explanation needed. In short: If you need a simple database with a clean API that just works without lots of configuration, TinyDB might be the right choice for you. Why **Not** Use TinyDB? ----------------------- - You need **advanced features** like: - access from multiple processes or threads, - creating indexes for tables, - an HTTP server, - managing relationships between tables or similar, - `ACID guarantees `_. - You are really concerned about **performance** and need a high speed database. To put it plainly: If you need advanced features or high performance, TinyDB is the wrong database for you – consider using databases like SQLite_, Buzhug_, CodernityDB_ or MongoDB_. .. References .. _Buzhug: https://buzhug.sourceforge.net/ .. _CodernityDB: http://labs.codernity.com/codernitydb/ .. _MongoDB: https://mongodb.org/ .. _SQLite: https://www.sqlite.org/ tinydb-3.15.2/docs/make.bat000066400000000000000000000144731357647250700154610ustar00rootroot00000000000000@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . set I18NSPHINXOPTS=%SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. texinfo to make Texinfo files echo. gettext to make PO message catalogs echo. changes to make an overview over all changed/added/deprecated items echo. xml to make Docutils-native XML files echo. pseudoxml to make pseudoxml-XML files for display purposes echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) %SPHINXBUILD% 2> nul if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\TinyDB.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\TinyDB.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdf" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdfja" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf-ja cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "texinfo" ( %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo if errorlevel 1 exit /b 1 echo. echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. goto end ) if "%1" == "gettext" ( %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale if errorlevel 1 exit /b 1 echo. echo.Build finished. The message catalogs are in %BUILDDIR%/locale. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) if "%1" == "xml" ( %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml if errorlevel 1 exit /b 1 echo. echo.Build finished. The XML files are in %BUILDDIR%/xml. goto end ) if "%1" == "pseudoxml" ( %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml if errorlevel 1 exit /b 1 echo. echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. goto end ) :end tinydb-3.15.2/docs/upgrade.rst000066400000000000000000000032571357647250700162330ustar00rootroot00000000000000Upgrading to Newer Releases =========================== Version 3.0 ----------- .. _upgrade_v3_0: Breaking API Changes ^^^^^^^^^^^^^^^^^^^^ - Querying (see `Issue #62 `_): - ``where('...').contains('...')`` has been renamed to ``where('...').search('...')``. - ``where('foo').has('bar')`` is replaced by either ``where('foo').bar`` or ``Query().foo.bar``. - In case the key is not a valid Python identifier, array notation can be used: ``where('a.b.c')`` is now ``Query()['a.b.c']``. - Checking for the existence of a key has to be done explicitely: ``where('foo').exists()``. - ``SmartCacheTable`` has been moved to `msiemens/tinydb-smartcache`_. - Serialization has been moved to `msiemens/tinydb-serialization`_. - Empty storages are now expected to return ``None`` instead of raising ``ValueError`` (see `Issue #67 `_). .. _msiemens/tinydb-smartcache: https://github.com/msiemens/tinydb-smartcache .. _msiemens/tinydb-serialization: https://github.com/msiemens/tinydb-serialization .. _upgrade_v2_0: Version 2.0 ----------- Breaking API Changes ^^^^^^^^^^^^^^^^^^^^ - The syntax ``query in db`` is not supported any more. Use ``db.contains(...)`` instead. - The ``ConcurrencyMiddleware`` has been removed due to a insecure implementation (see `Issue #18 `_). Consider :ref:`tinyrecord` instead. Apart from that the API remains compatible to v1.4 and prior. For migration from v1 to v2, check out the `v2.0 documentation `_ tinydb-3.15.2/docs/usage.rst000066400000000000000000000567421357647250700157170ustar00rootroot00000000000000:tocdepth: 3 .. toctree:: :maxdepth: 2 Advanced Usage ============== Remarks on Storage ------------------ Before we dive deeper into the usage of TinyDB, we should stop for a moment and discuss how TinyDB stores data. To convert your data to a format that is writable to disk TinyDB uses the `Python JSON `_ module by default. It's great when only simple data types are involved but it cannot handle more complex data types like custom classes. On Python 2 it also converts strings to Unicode strings upon reading (described `here `_). If that causes problems, you can write :doc:`your own storage `, that uses a more powerful (but also slower) library like `pickle `_ or `PyYAML `_. .. hint:: Opening multiple TinyDB instances on the same data (e.g. with the ``JSONStorage``) may result in unexpected behavior due to query caching. See query_caching_ on how to disable the query cache. Queries ------- With that out of the way, let's start with TinyDB's rich set of queries. There are two main ways to construct queries. The first one resembles the syntax of popular ORM tools: >>> from tinydb import Query >>> User = Query() >>> db.search(User.name == 'John') As you can see, we first create a new Query object and then use it to specify which fields to check. Searching for nested fields is just as easy: >>> db.search(User.birthday.year == 1990) Not all fields can be accessed this way if the field name is not a valid Python identifier. In this case, you can switch to array indexing notation: >>> # This would be invalid Python syntax: >>> db.search(User.country-code == 'foo') >>> # Use this instead: >>> db.search(User['country-code'] == 'foo') The second, traditional way of constructing queries is as follows: >>> from tinydb import where >>> db.search(where('field') == 'value') Using ``where('field')`` is a shorthand for the following code: >>> db.search(Query()['field'] == 'value') Accessing nested fields with this syntax can be achieved like this: >>> db.search(where('birthday').year == 1900) >>> db.search(where('birthday')['year'] == 1900) Advanced queries ................ In the :doc:`getting-started` you've learned about the basic comparisons (``==``, ``<``, ``>``, ...). In addition to these TinyDB supports the following queries: >>> # Existence of a field: >>> db.search(User.name.exists()) >>> # Regex: >>> # Full item has to match the regex: >>> db.search(User.name.matches('[aZ]*')) >>> # Case insensitive search for 'John': >>> import re >>> db.search(User.name.matches('John', flags=re.IGNORECASE)) >>> # Any part of the item has to match the regex: >>> db.search(User.name.search('b+')) >>> # Custom test: >>> test_func = lambda s: s == 'John' >>> db.search(User.name.test(test_func)) >>> # Custom test with parameters: >>> def test_func(val, m, n): >>> return m <= val <= n >>> db.search(User.age.test(test_func, 0, 21)) >>> db.search(User.age.test(test_func, 21, 99)) When a field contains a list, you also can use the ``any`` and ``all`` methods. There are two ways to use them: with lists of values and with nested queries. Let's start with the first one. Assuming we have a user object with a groups list like this: >>> db.insert({'name': 'user1', 'groups': ['user']}) >>> db.insert({'name': 'user2', 'groups': ['admin', 'user']}) >>> db.insert({'name': 'user3', 'groups': ['sudo', 'user']}) Now we can use the following queries: >>> # User's groups include at least one value from ['admin', 'sudo'] >>> db.search(User.groups.any(['admin', 'sudo'])) [{'name': 'user2', 'groups': ['admin', 'user']}, {'name': 'user3', 'groups': ['sudo', 'user']}] >>> >>> # User's groups include all values from ['admin', 'user'] >>> db.search(User.groups.all(['admin', 'user'])) [{'name': 'user2', 'groups': ['admin', 'user']}] In some cases you may want to have more complex ``any``/``all`` queries. This is where nested queries come in as helpful. Let's set up a table like this: >>> Group = Query() >>> Permission = Query() >>> groups = db.table('groups') >>> groups.insert({ 'name': 'user', 'permissions': [{'type': 'read'}]}) >>> groups.insert({ 'name': 'sudo', 'permissions': [{'type': 'read'}, {'type': 'sudo'}]}) >>> groups.insert({ 'name': 'admin', 'permissions': [{'type': 'read'}, {'type': 'write'}, {'type': 'sudo'}]}) Now let's search this table using nested ``any``/``all`` queries: >>> # Group has a permission with type 'read' >>> groups.search(Group.permissions.any(Permission.type == 'read')) [{'name': 'user', 'permissions': [{'type': 'read'}]}, {'name': 'sudo', 'permissions': [{'type': 'read'}, {'type': 'sudo'}]}, {'name': 'admin', 'permissions': [{'type': 'read'}, {'type': 'write'}, {'type': 'sudo'}]}] >>> # Group has ONLY permission 'read' >>> groups.search(Group.permissions.all(Permission.type == 'read')) [{'name': 'user', 'permissions': [{'type': 'read'}]}] As you can see, ``any`` tests if there is *at least one* document matching the query while ``all`` ensures *all* documents match the query. The opposite operation, checking if a single item is contained in a list, is also possible using ``one_of``: >>> db.search(User.name.one_of(['jane', 'john'])) Query modifiers ............... TinyDB also allows you to use logical operations to modify and combine queries: >>> # Negate a query: >>> db.search(~ (User.name == 'John')) >>> # Logical AND: >>> db.search((User.name == 'John') & (User.age <= 30)) >>> # Logical OR: >>> db.search((User.name == 'John') | (User.name == 'Bob')) .. note:: When using ``&`` or ``|``, make sure you wrap the conditions on both sides with parentheses or Python will mess up the comparison. Also, when using negation (``~``) you'll have to wrap the query you want to negate in parentheses. The reason for these requirements is that Python's binary operators that are used for query modifiers have a higher operator precedence than comparison operators. Simply put, ``~ User.name == 'John'`` is parsed by Python as ``(~User.name) == 'John'`` instead of ``~(User.name == 'John')``. See also the Python `docs on operator precedence `_ for details. Recap ..... Let's review the query operations we've learned: +-------------------------------------+-------------------------------------------------------------+ | **Queries** | +-------------------------------------+-------------------------------------------------------------+ | ``Query().field.exists()`` | Match any document where a field called ``field`` exists | +-------------------------------------+-------------------------------------------------------------+ | ``Query().field.matches(regex)`` | Match any document with the whole field matching the | | | regular expression | +-------------------------------------+-------------------------------------------------------------+ | ``Query().field.search(regex)`` | Match any document with a substring of the field matching | | | the regular expression | +-------------------------------------+-------------------------------------------------------------+ | ``Query().field.test(func, *args)`` | Matches any document for which the function returns | | | ``True`` | +-------------------------------------+-------------------------------------------------------------+ | ``Query().field.all(query | list)`` | If given a query, matches all documents where all documents | | | in the list ``field`` match the query. | | | If given a list, matches all documents where all documents | | | in the list ``field`` are a member of the given list | +-------------------------------------+-------------------------------------------------------------+ | ``Query().field.any(query | list)`` | If given a query, matches all documents where at least one | | | document in the list ``field`` match the query. | | | If given a list, matches all documents where at least one | | | documents in the list ``field`` are a member of the given | | | list | +-------------------------------------+-------------------------------------------------------------+ | ``Query().field.one_of(list)`` | Match if the field is contained in the list | +-------------------------------------+-------------------------------------------------------------+ | **Logical operations on queries** | +-------------------------------------+-------------------------------------------------------------+ | ``~ (query)`` | Match documents that don't match the query | +-------------------------------------+-------------------------------------------------------------+ | ``(query1) & (query2)`` | Match documents that match both queries | +-------------------------------------+-------------------------------------------------------------+ | ``(query1) | (query2)`` | Match documents that match at least one of the queries | +-------------------------------------+-------------------------------------------------------------+ Handling Data ------------- Next, let's look at some more ways to insert, update and retrieve data from your database. Inserting data .............. As already described you can insert a document using ``db.insert(...)``. In case you want to insert multiple documents, you can use ``db.insert_multiple(...)``: >>> db.insert_multiple([ {'name': 'John', 'age': 22}, {'name': 'John', 'age': 37}]) >>> db.insert_multiple({'int': 1, 'value': i} for i in range(2)) Updating data ............. Sometimes you want to update all documents in your database. In this case, you can leave out the ``query`` argument: >>> db.update({'foo': 'bar'}) When passing a dict to ``db.update(fields, query)``, it only allows you to update a document by adding or overwriting its values. But sometimes you may need to e.g. remove one field or increment its value. In that case you can pass a function instead of ``fields``: >>> from tinydb.operations import delete >>> db.update(delete('key1'), User.name == 'John') This will remove the key ``key1`` from all matching documents. TinyDB comes with these operations: - ``delete(key)``: delete a key from the document - ``increment(key)``: increment the value of a key - ``decrement(key)``: decrement the value of a key - ``add(key, value)``: add ``value`` to the value of a key (also works for strings) - ``subtract(key, value)``: subtract ``value`` from the value of a key - ``set(key, value)``: set ``key`` to ``value`` Of course you also can write your own operations: >>> def your_operation(your_arguments): ... def transform(doc): ... # do something with the document ... # ... ... return transform ... >>> db.update(your_operation(arguments), query) Data access and modification ---------------------------- Upserting data .............. In some cases you'll need a mix of both ``update`` and ``insert``: ``upsert``. This operation is provided a document and a query. If it finds any documents matching the query, they will be updated with the data from the provided document. On the other hand, if no matching document is found, it inserts the provided document into the table: >>> db.upsert({'name': 'John', 'logged-in': True}, User.name == 'John') This will update all users with the name John to have ``logged-in`` set to ``True``. If no matching user is found, a new document is inserted with both the name set and the ``logged-in`` flag. Retrieving data ............... There are several ways to retrieve data from your database. For instance you can get the number of stored documents: >>> len(db) 3 .. hint:: This will return the number of documents in the default table (see the notes on the :ref:`default table `). Then of course you can use ``db.search(...)`` as described in the :doc:`getting-started` section. But sometimes you want to get only one matching document. Instead of using >>> try: ... result = db.search(User.name == 'John')[0] ... except IndexError: ... pass you can use ``db.get(...)``: >>> db.get(User.name == 'John') {'name': 'John', 'age': 22} >>> db.get(User.name == 'Bobby') None .. caution:: If multiple documents match the query, probably a random one of them will be returned! Often you don't want to search for documents but only know whether they are stored in the database. In this case ``db.contains(...)`` is your friend: >>> db.contains(User.name == 'John') In a similar manner you can look up the number of documents matching a query: >>> db.count(User.name == 'John') 2 Replacing data .............. Another occasionally useful operation is to replace a list of documents. If you have a list of documents with IDs (see document_ids_), you can pass them to ``db.write_back(list)``: >>> docs = db.search(User.name == 'John') [{name: 'John', age: 12}, {name: 'John', age: 44}] >>> for doc in docs: ... doc['name'] = 'Jane' >>> db.write_back(docs) # Will update the documents we retrieved >>> docs = db.search(User.name == 'John') [] >>> docs = db.search(User.name == 'Jane') [{name: 'Jane', age: 12}, {name: 'Jane', age: 44}] Alternatively you can pass a list of documents along with a list of document IDs to achieve the same goal. In this case, the length of the document list and the ID list has to be equal. Recap ^^^^^ Let's summarize the ways to handle data: +-------------------------------+---------------------------------------------------------------+ | **Inserting data** | +-------------------------------+---------------------------------------------------------------+ | ``db.insert_multiple(...)`` | Insert multiple documents | +-------------------------------+---------------------------------------------------------------+ | **Updating data** | +-------------------------------+---------------------------------------------------------------+ | ``db.update(operation, ...)`` | Update all matching documents with a special operation | +-------------------------------+---------------------------------------------------------------+ | ``db.write_back(docs)`` | Replace all documents with the updated versions | +-------------------------------+---------------------------------------------------------------+ | **Retrieving data** | +-------------------------------+---------------------------------------------------------------+ | ``len(db)`` | Get the number of documents in the database | +-------------------------------+---------------------------------------------------------------+ | ``db.get(query)`` | Get one document matching the query | +-------------------------------+---------------------------------------------------------------+ | ``db.contains(query)`` | Check if the database contains a matching document | +-------------------------------+---------------------------------------------------------------+ | ``db.count(query)`` | Get the number of matching documents | +-------------------------------+---------------------------------------------------------------+ .. note:: This was a new feature in v3.6.0 .. _document_ids: Using Document IDs ------------------ Internally TinyDB associates an ID with every document you insert. It's returned after inserting a document: >>> db.insert({'name': 'John', 'age': 22}) 3 >>> db.insert_multiple([{...}, {...}, {...}]) [4, 5, 6] In addition you can get the ID of already inserted documents using ``document.doc_id``. This works both with ``get`` and ``all``: >>> el = db.get(User.name == 'John') >>> el.doc_id 3 >>> el = db.all()[0] >>> el.doc_id 12 Different TinyDB methods also work with IDs, namely: ``update``, ``remove``, ``contains`` and ``get``. The first two also return a list of affected IDs. >>> db.update({'value': 2}, doc_ids=[1, 2]) >>> db.contains(doc_ids=[1]) True >>> db.remove(doc_ids=[1, 2]) >>> db.get(doc_id=3) {...} Using ``doc_id`` instead of ``Query()`` again is slightly faster in operation. Recap ..... Let's sum up the way TinyDB supports working with IDs: +-------------------------------------+------------------------------------------------------------+ | **Getting a document's ID** | +-------------------------------------+------------------------------------------------------------+ | ``db.insert(...)`` | Returns the inserted document's ID | +-------------------------------------+------------------------------------------------------------+ | ``db.insert_multiple(...)`` | Returns the inserted documents' ID | +-------------------------------------+------------------------------------------------------------+ | ``document.doc_id`` | Get the ID of a document fetched from the db | +-------------------------------------+------------------------------------------------------------+ | **Working with IDs** | +-------------------------------------+------------------------------------------------------------+ | ``db.get(doc_id=...)`` | Get the document with the given ID | +-------------------------------------+------------------------------------------------------------+ | ``db.contains(doc_ids=[...])`` | Check if the db contains documents with one of the given | | | IDs | +-------------------------------------+------------------------------------------------------------+ | ``db.update({...}, doc_ids=[...])`` | Update all documents with the given IDs | +-------------------------------------+------------------------------------------------------------+ | ``db.remove(doc_ids=[...])`` | Remove all documents with the given IDs | +-------------------------------------+------------------------------------------------------------+ Tables ------ TinyDB supports working with multiple tables. They behave just the same as the ``TinyDB`` class. To create and use a table, use ``db.table(name)``. >>> table = db.table('table_name') >>> table.insert({'value': True}) >>> table.all() [{'value': True}] >>> for row in table: >>> print(row) {'value': True} To remove a table from a database, use: >>> db.purge_table('table_name') If on the other hand you want to remove all tables, use the counterpart: >>> db.purge_tables() Finally, you can get a list with the names of all tables in your database: >>> db.tables() {'_default', 'table_name'} .. _default_table: Default Table ............. TinyDB uses a table named ``_default`` as the default table. All operations on the database object (like ``db.insert(...)``) operate on this table. The name of this table can be modified by either passing ``default_table`` to the ``TinyDB`` constructor or by setting the ``DEFAULT_TABLE`` class variable to modify the default table name for all instances: >>> #1: for a single instance only >>> TinyDB(storage=SomeStorage, default_table='my-default') >>> #2: for all instances >>> TinyDB.DEFAULT_TABLE = 'my-default' You also can modify the keyword arguments that are passed to the default table by setting ``TinyDB.DEFAULT_TABLE_KWARGS``. For example, you can disable the query cache for the default table by setting like this: >>> TinyDB.DEFAULT_TABLE_KWARGS = {'cache_size': 0} .. _query_caching: Query Caching ............. TinyDB caches query result for performance. You can optimize the query cache size by passing the ``cache_size`` to the ``table(...)`` function: >>> table = db.table('table_name', cache_size=30) .. hint:: You can set ``cache_size`` to ``None`` to make the cache unlimited in size. Also, you can set ``cache_size`` to 0 to disable it. Storage & Middleware -------------------- Storage Types ............. TinyDB comes with two storage types: JSON and in-memory. By default TinyDB stores its data in JSON files so you have to specify the path where to store it: >>> from tinydb import TinyDB, where >>> db = TinyDB('path/to/db.json') To use the in-memory storage, use: >>> from tinydb.storages import MemoryStorage >>> db = TinyDB(storage=MemoryStorage) .. hint:: All arguments except for the ``storage`` argument are forwarded to the underlying storage. For the JSON storage you can use this to pass additional keyword arguments to Python's `json.dump(...) `_ method. For example, you can set it to create prettified JSON files like this: >>> db = TinyDB('db.json', sort_keys=True, indent=4, separators=(',', ': ')) To modify the default storage for all ``TinyDB`` instances, set the ``DEFAULT_STORAGE`` class variable: >>> TinyDB.DEFAULT_STORAGE = MemoryStorage In case you need to access the storage instance directly, you can use the ``storage`` property of your TinyDB instance. This may be useful to call method directly on the storage or middleware: >>> db = TinyDB(storage=CachingMiddleware(MemoryStorage)) >>> db.storage.flush() Middleware .......... Middleware wraps around existing storage allowing you to customize their behaviour. >>> from tinydb.storages import JSONStorage >>> from tinydb.middlewares import CachingMiddleware >>> db = TinyDB('/path/to/db.json', storage=CachingMiddleware(JSONStorage)) .. hint:: You can nest middleware: >>> db = TinyDB('/path/to/db.json', storage=FirstMiddleware(SecondMiddleware(JSONStorage))) CachingMiddleware ^^^^^^^^^^^^^^^^^ The ``CachingMiddleware`` improves speed by reducing disk I/O. It caches all read operations and writes data to disk after a configured number of write operations. To make sure that all data is safely written when closing the table, use one of these ways: .. code-block:: python # Using a context manager: with database as db: # Your operations .. code-block:: python # Using the close function db.close() What's next ----------- Congratulations, you've made through the user guide! Now go and build something awesome or dive deeper into TinyDB with these resources: - Want to learn how to customize TinyDB (storages, middlewares) and what extensions exist? Check out :doc:`extend` and :doc:`extensions`. - Want to study the API in detail? Read :doc:`api`. - Interested in contributing to the TinyDB development guide? Go on to the :doc:`contribute`. tinydb-3.15.2/setup.cfg000066400000000000000000000001761357647250700147400ustar00rootroot00000000000000[aliases] test=pytest [tool:pytest] addopts=--verbose --cov-append --cov-report term --cov tinydb [bdist_wheel] universal=1 tinydb-3.15.2/setup.py000066400000000000000000000040771357647250700146350ustar00rootroot00000000000000# coding=utf-8 from setuptools import setup, find_packages from codecs import open import os def read(fname): path = os.path.join(os.path.dirname(__file__), fname) return open(path, encoding='utf-8').read() # This will set the version string to __version__ exec(read('tinydb/version.py')) setup( name="tinydb", version=__version__, packages=find_packages(exclude=['tests']), # development metadata zip_safe=True, # metadata for upload to PyPI author="Markus Siemens", author_email="markus@m-siemens.de", description="TinyDB is a tiny, document oriented database optimized for " "your happiness :)", license="MIT", keywords="database nosql", url="https://github.com/msiemens/tinydb", project_urls={ 'Documentation': 'http://tinydb.readthedocs.org/', 'Changelog': 'https://tinydb.readthedocs.io/en/latest/changelog.html', 'Extensions': 'https://tinydb.readthedocs.io/en/latest/extensions.html', 'Issues': 'https://github.com/msiemens/tinydb/issues', }, classifiers=[ "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "Intended Audience :: System Administrators", "License :: OSI Approved :: MIT License", "Topic :: Database", "Topic :: Database :: Database Engines/Servers", "Topic :: Utilities", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6", "Programming Language :: Python :: 3.7", "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", "Operating System :: OS Independent" ], tests_require=['pytest-cov', 'pyyaml'], setup_requires=['pytest-runner'], long_description=read('README.rst'), ) tinydb-3.15.2/tests/000077500000000000000000000000001357647250700142555ustar00rootroot00000000000000tinydb-3.15.2/tests/__init__.py000066400000000000000000000000001357647250700163540ustar00rootroot00000000000000tinydb-3.15.2/tests/conftest.py000066400000000000000000000006571357647250700164640ustar00rootroot00000000000000import pytest from tinydb.middlewares import CachingMiddleware from tinydb.storages import MemoryStorage from tinydb import TinyDB @pytest.fixture def db(): db_ = TinyDB(storage=MemoryStorage) db_.purge_tables() db_.insert_multiple({'int': 1, 'char': c} for c in 'abc') return db_ @pytest.fixture def storage(): _storage = CachingMiddleware(MemoryStorage) return _storage() # Initialize MemoryStorage tinydb-3.15.2/tests/test_middlewares.py000066400000000000000000000047421357647250700201750ustar00rootroot00000000000000import os from tinydb import TinyDB from tinydb.middlewares import CachingMiddleware from tinydb.storages import MemoryStorage, JSONStorage if 'xrange' not in dir(__builtins__): # noinspection PyShadowingBuiltins xrange = range # Python 3 support doc = {'none': [None, None], 'int': 42, 'float': 3.1415899999999999, 'list': ['LITE', 'RES_ACID', 'SUS_DEXT'], 'dict': {'hp': 13, 'sp': 5}, 'bool': [True, False, True, False]} def test_caching(storage): # Write contents storage.write(doc) # Verify contents assert doc == storage.read() def test_caching_read(): db = TinyDB(storage=CachingMiddleware(MemoryStorage)) assert db.all() == [] def test_caching_write_many(storage): storage.WRITE_CACHE_SIZE = 3 # Storage should be still empty assert storage.memory is None # Write contents for x in xrange(2): storage.write(doc) assert storage.memory is None # Still cached storage.write(doc) # Verify contents: Cache should be emptied and written to storage assert storage.memory def test_caching_flush(storage): # Write contents for _ in range(CachingMiddleware.WRITE_CACHE_SIZE - 1): storage.write(doc) # Not yet flushed... assert storage.memory is None storage.write(doc) # Verify contents: Cache should be emptied and written to storage assert storage.memory def test_caching_flush_manually(storage): # Write contents storage.write(doc) storage.flush() # Verify contents: Cache should be emptied and written to storage assert storage.memory def test_caching_write(storage): # Write contents storage.write(doc) storage.close() # Verify contents: Cache should be emptied and written to storage assert storage.storage.memory def test_nested(): storage = CachingMiddleware(MemoryStorage) storage() # Initialization # Write contents storage.write(doc) # Verify contents assert doc == storage.read() def test_caching_json_write(tmpdir): path = str(tmpdir.join('test.db')) with TinyDB(path, storage=CachingMiddleware(JSONStorage)) as db: db.insert({'key': 'value'}) # Verify database filesize statinfo = os.stat(path) assert statinfo.st_size != 0 # Assert JSON file has been closed assert db._storage._handle.closed del db # Repoen database with TinyDB(path, storage=CachingMiddleware(JSONStorage)) as db: assert db.all() == [{'key': 'value'}] tinydb-3.15.2/tests/test_operations.py000066400000000000000000000017751357647250700200630ustar00rootroot00000000000000from tinydb import where from tinydb.operations import delete, increment, decrement, add, subtract, set def test_delete(db): db.update(delete('int'), where('char') == 'a') assert 'int' not in db.get(where('char') == 'a') def test_add_int(db): db.update(add('int', 5), where('char') == 'a') assert db.get(where('char') == 'a')['int'] == 6 def test_add_str(db): db.update(add('char', 'xyz'), where('char') == 'a') assert db.get(where('char') == 'axyz')['int'] == 1 def test_subtract(db): db.update(subtract('int', 5), where('char') == 'a') assert db.get(where('char') == 'a')['int'] == -4 def test_set(db): db.update(set('char', 'xyz'), where('char') == 'a') assert db.get(where('char') == 'xyz')['int'] == 1 def test_increment(db): db.update(increment('int'), where('char') == 'a') assert db.get(where('char') == 'a')['int'] == 2 def test_decrement(db): db.update(decrement('int'), where('char') == 'a') assert db.get(where('char') == 'a')['int'] == 0 tinydb-3.15.2/tests/test_queries.py000066400000000000000000000250211357647250700173430ustar00rootroot00000000000000import pytest import re from tinydb.queries import Query, where def test_no_path(): with pytest.raises(ValueError): _ = Query() == 2 def test_path_only(): query = Query()['value'] assert query == where('value') assert query({'value': 1}) assert not query({'something': 1}) assert hash(query) assert hash(query) != hash(where('asd')) query = Query()['value']['val'] assert query == where('value')['val'] assert query({'value': {'val': 2}}) assert not query({'value': 1}) assert not query({'value': {'asd': 1}}) assert not query({'something': 1}) assert hash(query) assert hash(query) != hash(where('asd')) def test_path_and(): query = Query()['value'] & (Query()['value'] == 5) assert query({'value': 5}) assert not query({'value': 10}) assert not query({'something': 1}) assert hash(query) assert hash(query) != hash(where('value')) def test_eq(): query = Query().value == 1 assert query({'value': 1}) assert not query({'value': 2}) assert hash(query) query = Query().value == [0, 1] assert query({'value': [0, 1]}) assert not query({'value': [0, 1, 2]}) assert hash(query) def test_ne(): query = Query().value != 1 assert query({'value': 0}) assert query({'value': 2}) assert not query({'value': 1}) assert hash(query) query = Query().value != [0, 1] assert query({'value': [0, 1, 2]}) assert not query({'value': [0, 1]}) assert hash(query) def test_lt(): query = Query().value < 1 assert query({'value': 0}) assert not query({'value': 1}) assert not query({'value': 2}) assert hash(query) def test_le(): query = Query().value <= 1 assert query({'value': 0}) assert query({'value': 1}) assert not query({'value': 2}) assert hash(query) def test_gt(): query = Query().value > 1 assert query({'value': 2}) assert not query({'value': 1}) assert hash(query) def test_ge(): query = Query().value >= 1 assert query({'value': 2}) assert query({'value': 1}) assert not query({'value': 0}) assert hash(query) def test_or(): query = ( (Query().val1 == 1) | (Query().val2 == 2) ) assert query({'val1': 1}) assert query({'val2': 2}) assert query({'val1': 1, 'val2': 2}) assert not query({'val1': '', 'val2': ''}) assert hash(query) def test_and(): query = ( (Query().val1 == 1) & (Query().val2 == 2) ) assert query({'val1': 1, 'val2': 2}) assert not query({'val1': 1}) assert not query({'val2': 2}) assert not query({'val1': '', 'val2': ''}) assert hash(query) def test_not(): query = ~ (Query().val1 == 1) assert query({'val1': 5, 'val2': 2}) assert not query({'val1': 1, 'val2': 2}) assert hash(query) query = ( (~ (Query().val1 == 1)) & (Query().val2 == 2) ) assert query({'val1': '', 'val2': 2}) assert query({'val2': 2}) assert not query({'val1': 1, 'val2': 2}) assert not query({'val1': 1}) assert not query({'val1': '', 'val2': ''}) assert hash(query) def test_has_key(): query = Query().val3.exists() assert query({'val3': 1}) assert not query({'val1': 1, 'val2': 2}) assert hash(query) def test_regex(): query = Query().val.matches(r'\d{2}\.') assert query({'val': '42.'}) assert not query({'val': '44'}) assert not query({'val': 'ab.'}) assert not query({'': None}) assert hash(query) query = Query().val.search(r'\d+') assert query({'val': 'ab3'}) assert not query({'val': 'abc'}) assert not query({'val': ''}) assert not query({'val': 12}) assert not query({'': None}) assert hash(query) query = Query().val.search(r'JOHN', flags=re.IGNORECASE) assert query({'val': 'john'}) assert query({'val': 'xJohNx'}) assert not query({'val': 'JOH'}) assert not query({'': None}) assert not query({'': True}) assert hash(query) def test_custom(): def test(value): return value == 42 query = Query().val.test(test) assert query({'val': 42}) assert not query({'val': 40}) assert not query({'val': '44'}) assert not query({'': None}) assert hash(query) def in_list(value, l): return value in l query = Query().val.test(in_list, tuple([25, 35])) assert not query({'val': 20}) assert query({'val': 25}) assert not query({'val': 30}) assert query({'val': 35}) assert not query({'val': 36}) assert hash(query) def test_custom_with_params(): def test(value, minimum, maximum): return minimum <= value <= maximum query = Query().val.test(test, 1, 10) assert query({'val': 5}) assert not query({'val': 0}) assert not query({'val': 11}) assert not query({'': None}) assert hash(query) def test_any(): query = Query().followers.any(Query().name == 'don') assert query({'followers': [{'name': 'don'}, {'name': 'john'}]}) assert not query({'followers': 1}) assert not query({}) assert hash(query) query = Query().followers.any(Query().num.matches('\\d+')) assert query({'followers': [{'num': '12'}, {'num': 'abc'}]}) assert not query({'followers': [{'num': 'abc'}]}) assert hash(query) query = Query().followers.any(['don', 'jon']) assert query({'followers': ['don', 'greg', 'bill']}) assert not query({'followers': ['greg', 'bill']}) assert not query({}) assert hash(query) query = Query().followers.any([{'name': 'don'}, {'name': 'john'}]) assert query({'followers': [{'name': 'don'}, {'name': 'greg'}]}) assert not query({'followers': [{'name': 'greg'}]}) assert hash(query) def test_all(): query = Query().followers.all(Query().name == 'don') assert query({'followers': [{'name': 'don'}]}) assert not query({'followers': [{'name': 'don'}, {'name': 'john'}]}) assert hash(query) query = Query().followers.all(Query().num.matches('\\d+')) assert query({'followers': [{'num': '123'}, {'num': '456'}]}) assert not query({'followers': [{'num': '123'}, {'num': 'abc'}]}) assert hash(query) query = Query().followers.all(['don', 'john']) assert query({'followers': ['don', 'john', 'greg']}) assert not query({'followers': ['don', 'greg']}) assert not query({}) assert hash(query) query = Query().followers.all([{'name': 'jane'}, {'name': 'john'}]) assert query({'followers': [{'name': 'john'}, {'name': 'jane'}]}) assert query({'followers': [{'name': 'john'}, {'name': 'jane'}, {'name': 'bob'}]}) assert not query({'followers': [{'name': 'john'}, {'name': 'bob'}]}) assert hash(query) def test_has(): query = Query().key1.key2.exists() str(query) # This used to cause a bug... assert query({'key1': {'key2': {'key3': 1}}}) assert query({'key1': {'key2': 1}}) assert not query({'key1': 3}) assert not query({'key1': {'key1': 1}}) assert not query({'key2': {'key1': 1}}) assert hash(query) query = Query().key1.key2 == 1 assert query({'key1': {'key2': 1}}) assert not query({'key1': {'key2': 2}}) assert hash(query) # Nested has: key exists query = Query().key1.key2.key3.exists() assert query({'key1': {'key2': {'key3': 1}}}) # Not a dict assert not query({'key1': 1}) assert not query({'key1': {'key2': 1}}) # Wrong key assert not query({'key1': {'key2': {'key0': 1}}}) assert not query({'key1': {'key0': {'key3': 1}}}) assert not query({'key0': {'key2': {'key3': 1}}}) assert hash(query) # Nested has: check for value query = Query().key1.key2.key3 == 1 assert query({'key1': {'key2': {'key3': 1}}}) assert not query({'key1': {'key2': {'key3': 0}}}) assert hash(query) # Test special methods: regex matches query = Query().key1.value.matches(r'\d+') assert query({'key1': {'value': '123'}}) assert not query({'key2': {'value': '123'}}) assert not query({'key2': {'value': 'abc'}}) assert hash(query) # Test special methods: regex contains query = Query().key1.value.search(r'\d+') assert query({'key1': {'value': 'a2c'}}) assert not query({'key2': {'value': 'a2c'}}) assert not query({'key2': {'value': 'abc'}}) assert hash(query) # Test special methods: nested has and regex matches query = Query().key1.x.y.matches(r'\d+') assert query({'key1': {'x': {'y': '123'}}}) assert not query({'key1': {'x': {'y': 'abc'}}}) assert hash(query) # Test special method: nested has and regex contains query = Query().key1.x.y.search(r'\d+') assert query({'key1': {'x': {'y': 'a2c'}}}) assert not query({'key1': {'x': {'y': 'abc'}}}) assert hash(query) # Test special methods: custom test query = Query().key1.int.test(lambda x: x == 3) assert query({'key1': {'int': 3}}) assert hash(query) def test_one_of(): query = Query().key1.one_of(['value 1', 'value 2']) assert query({'key1': 'value 1'}) assert query({'key1': 'value 2'}) assert not query({'key1': 'value 3'}) def test_hash(): d = { Query().key1 == 2: True, Query().key1.key2.key3.exists(): True, Query().key1.exists() & Query().key2.exists(): True, Query().key1.exists() | Query().key2.exists(): True, } assert (Query().key1 == 2) in d assert (Query().key1.key2.key3.exists()) in d assert (Query()['key1.key2'].key3.exists()) not in d # Commutative property of & and | assert (Query().key1.exists() & Query().key2.exists()) in d assert (Query().key2.exists() & Query().key1.exists()) in d assert (Query().key1.exists() | Query().key2.exists()) in d assert (Query().key2.exists() | Query().key1.exists()) in d def test_orm_usage(): data = {'name': 'John', 'age': {'year': 2000}} User = Query() query1 = User.name == 'John' query2 = User.age.year == 2000 assert query1(data) assert query2(data) def test_repr(): Fruit = Query() assert repr(Fruit) == "Query()" assert repr(Fruit.type == 'peach') == "QueryImpl('==', ('type',), 'peach')" def test_subclass(): # Test that a new query test method in a custom subclass is properly usable class MyQueryClass(Query): def equal_double(self, rhs): return self._generate_test( lambda value: value == rhs*2, ('equal_double', self._path, rhs) ) query = MyQueryClass().val.equal_double('42') assert query({'val': '4242'}) assert not query({'val': '42'}) assert not query({'': None}) assert hash(query) tinydb-3.15.2/tests/test_storages.py000066400000000000000000000135761357647250700175310ustar00rootroot00000000000000# -*- coding: utf-8 -*- import os import random import tempfile import json import pytest from tinydb import TinyDB, where from tinydb.database import Document from tinydb.storages import JSONStorage, MemoryStorage, Storage, touch random.seed() try: import ujson as json except ImportError: HAS_UJSON = False else: HAS_UJSON = True doc = {'none': [None, None], 'int': 42, 'float': 3.1415899999999999, 'list': ['LITE', 'RES_ACID', 'SUS_DEXT'], 'dict': {'hp': 13, 'sp': 5}, 'bool': [True, False, True, False]} def test_json(tmpdir): # Write contents path = str(tmpdir.join('test.db')) storage = JSONStorage(path) storage.write(doc) # Verify contents assert doc == storage.read() storage.close() @pytest.mark.skipif(HAS_UJSON, reason="not compatible with ujson") def test_json_kwargs(tmpdir): db_file = tmpdir.join('test.db') db = TinyDB(str(db_file), sort_keys=True, indent=4, separators=(',', ': ')) # Write contents db.insert({'b': 1}) db.insert({'a': 1}) assert db_file.read() == '''{ "_default": { "1": { "b": 1 }, "2": { "a": 1 } } }''' db.close() def test_json_readwrite(tmpdir): """ Regression test for issue #1 """ path = str(tmpdir.join('test.db')) # Create TinyDB instance db = TinyDB(path, storage=JSONStorage) item = {'name': 'A very long entry'} item2 = {'name': 'A short one'} def get(s): return db.get(where('name') == s) db.insert(item) assert get('A very long entry') == item db.remove(where('name') == 'A very long entry') assert get('A very long entry') is None db.insert(item2) assert get('A short one') == item2 db.remove(where('name') == 'A short one') assert get('A short one') is None db.close() def test_create_dirs(): temp_dir = tempfile.gettempdir() while True: dname = os.path.join(temp_dir, str(random.getrandbits(20))) if not os.path.exists(dname): db_dir = dname db_file = os.path.join(db_dir, 'db.json') break with pytest.raises(IOError): JSONStorage(db_file) JSONStorage(db_file, create_dirs=True).close() assert os.path.exists(db_file) # Use create_dirs with already existing directory JSONStorage(db_file, create_dirs=True).close() assert os.path.exists(db_file) os.remove(db_file) os.rmdir(db_dir) def test_json_invalid_directory(): with pytest.raises(IOError): with TinyDB('/this/is/an/invalid/path/db.json', storage=JSONStorage): pass def test_in_memory(): # Write contents storage = MemoryStorage() storage.write(doc) # Verify contents assert doc == storage.read() # Test case for #21 other = MemoryStorage() other.write({}) assert other.read() != storage.read() def test_in_memory_close(): with TinyDB(storage=MemoryStorage) as db: db.insert({}) def test_custom(): # noinspection PyAbstractClass class MyStorage(Storage): pass with pytest.raises(TypeError): MyStorage() def test_read_once(): count = [0] # noinspection PyAbstractClass class MyStorage(Storage): def __init__(self): self.memory = None def read(self): count[0] += 1 return self.memory def write(self, data): self.memory = data def reset_counter(expected=1): assert count[0] == expected count[0] = 0 with TinyDB(storage=MyStorage) as db: reset_counter() db.all() reset_counter() db.insert({'foo': 'bar'}) reset_counter() db.all() reset_counter() def test_custom_with_exception(): class MyStorage(Storage): def read(self): pass def write(self, data): pass def __init__(self): raise ValueError() def close(self): raise RuntimeError() with pytest.raises(ValueError): with TinyDB(storage=MyStorage) as db: pass def test_yaml(tmpdir): """ :type tmpdir: py._path.local.LocalPath """ try: import yaml except ImportError: return pytest.skip('PyYAML not installed') def represent_doc(dumper, data): # Represent `Document` objects as their dict's string representation # which PyYAML understands return dumper.represent_data(dict(data)) yaml.add_representer(Document, represent_doc) class YAMLStorage(Storage): def __init__(self, filename): self.filename = filename touch(filename, False) def read(self): with open(self.filename) as handle: data = yaml.safe_load(handle.read()) return data def write(self, data): with open(self.filename, 'w') as handle: yaml.dump(data, handle) def close(self): pass # Write contents path = str(tmpdir.join('test.db')) db = TinyDB(path, storage=YAMLStorage) db.insert(doc) assert db.all() == [doc] db.update({'name': 'foo'}) assert '!' not in tmpdir.join('test.db').read() assert db.contains(where('name') == 'foo') assert len(db) == 1 def test_encoding(tmpdir): japanese_doc = {"Test": u"こんにちは世界"} path = str(tmpdir.join('test.db')) jap_storage = JSONStorage(path, encoding="cp936") # cp936 is used for japanese encodings jap_storage.write(japanese_doc) try: exception = json.decoder.JSONDecodeError except AttributeError: exception = ValueError with pytest.raises(exception): eng_storage = JSONStorage(path, encoding="cp037") # cp037 is used for english encodings eng_storage.read() jap_storage = JSONStorage(path, encoding="cp936") assert japanese_doc == jap_storage.read() tinydb-3.15.2/tests/test_tables.py000066400000000000000000000052431357647250700171440ustar00rootroot00000000000000import pytest import re from tinydb import where def test_tables_list(db): db.table('table1') db.table('table2') assert db.tables() == {'_default', 'table1', 'table2'} def test_one_table(db): table1 = db.table('table1') table1.insert_multiple({'int': 1, 'char': c} for c in 'abc') assert table1.get(where('int') == 1)['char'] == 'a' assert table1.get(where('char') == 'b')['char'] == 'b' def test_multiple_tables(db): table1 = db.table('table1') table2 = db.table('table2') table3 = db.table('table3') table1.insert({'int': 1, 'char': 'a'}) table2.insert({'int': 1, 'char': 'b'}) table3.insert({'int': 1, 'char': 'c'}) assert table1.count(where('char') == 'a') == 1 assert table2.count(where('char') == 'b') == 1 assert table3.count(where('char') == 'c') == 1 db.purge_tables() assert len(table1) == 0 assert len(table2) == 0 assert len(table3) == 0 def test_caching(db): table1 = db.table('table1') table2 = db.table('table1') assert table1 is table2 def test_zero_cache_size(db): table = db.table('table3', cache_size=0) query = where('int') == 1 table.insert({'int': 1}) table.insert({'int': 1}) assert table.count(query) == 2 assert table.count(where('int') == 2) == 0 assert len(table._query_cache) == 0 def test_query_cache_size(db): table = db.table('table3', cache_size=1) query = where('int') == 1 table.insert({'int': 1}) table.insert({'int': 1}) assert table.count(query) == 2 assert table.count(where('int') == 2) == 0 assert len(table._query_cache) == 1 def test_lru_cache(db): # Test integration into TinyDB table = db.table('table3', cache_size=2) query = where('int') == 1 table.search(query) table.search(where('int') == 2) table.search(where('int') == 3) assert query not in table._query_cache table.remove(where('int') == 1) assert not table._query_cache.lru table.search(query) assert len(table._query_cache) == 1 table.clear_cache() assert len(table._query_cache) == 0 def test_table_is_iterable(db): table = db.table('table1') table.insert_multiple({'int': i} for i in range(3)) assert [r for r in table] == table.all() def test_table_name(db): name = 'table3' table = db.table(name) assert name == table.name with pytest.raises(AttributeError): table.name = 'foo' def test_table_repr(db): name = 'table4' table = db.table(name) print(repr(table)) assert re.match( r">", repr(table)) tinydb-3.15.2/tests/test_tinydb.py000066400000000000000000000507061357647250700171670ustar00rootroot00000000000000# coding=utf-8 import sys import re import pytest from tinydb import TinyDB, where, Query from tinydb.middlewares import Middleware, CachingMiddleware from tinydb.storages import MemoryStorage try: import ujson as json except ImportError: HAS_UJSON = False else: HAS_UJSON = True def test_purge(db): db.purge() db.insert({}) db.purge() assert len(db) == 0 def test_all(db): db.purge() for i in range(10): db.insert({}) assert len(db.all()) == 10 def test_insert(db): db.purge() db.insert({'int': 1, 'char': 'a'}) assert db.count(where('int') == 1) == 1 db.purge() db.insert({'int': 1, 'char': 'a'}) db.insert({'int': 1, 'char': 'b'}) db.insert({'int': 1, 'char': 'c'}) assert db.count(where('int') == 1) == 3 assert db.count(where('char') == 'a') == 1 def test_insert_ids(db): db.purge() assert db.insert({'int': 1, 'char': 'a'}) == 1 assert db.insert({'int': 1, 'char': 'a'}) == 2 def test_insert_multiple(db): db.purge() assert not db.contains(where('int') == 1) # Insert multiple from list db.insert_multiple([{'int': 1, 'char': 'a'}, {'int': 1, 'char': 'b'}, {'int': 1, 'char': 'c'}]) assert db.count(where('int') == 1) == 3 assert db.count(where('char') == 'a') == 1 # Insert multiple from generator function def generator(): for j in range(10): yield {'int': j} db.purge() db.insert_multiple(generator()) for i in range(10): assert db.count(where('int') == i) == 1 assert db.count(where('int').exists()) == 10 # Insert multiple from inline generator db.purge() db.insert_multiple({'int': i} for i in range(10)) for i in range(10): assert db.count(where('int') == i) == 1 def test_insert_multiple_with_ids(db): db.purge() # Insert multiple from list assert db.insert_multiple([{'int': 1, 'char': 'a'}, {'int': 1, 'char': 'b'}, {'int': 1, 'char': 'c'}]) == [1, 2, 3] def test_insert_invalid_type_raises_error(db): with pytest.raises(ValueError, match='Document is not a Mapping'): db.insert(object()) # object() as an example of a non-mapping-type def test_insert_valid_mapping_type(db): from tinydb.database import Mapping class CustomDocument(Mapping): def __init__(self, data): self.data = data def __getitem__(self, key): return self.data[key] def __iter__(self): return iter(self.data) def __len__(self): return len(self.data) db.purge() db.insert(CustomDocument({'int': 1, 'char': 'a'})) assert db.count(where('int') == 1) == 1 def test_cutom_mapping_type_with_json(tmpdir): from tinydb.database import Mapping class CustomDocument(Mapping): def __init__(self, data): self.data = data def __getitem__(self, key): return self.data[key] def __iter__(self): return iter(self.data) def __len__(self): return len(self.data) # Insert db = TinyDB(str(tmpdir.join('test.db'))) db.purge() db.insert(CustomDocument({'int': 1, 'char': 'a'})) assert db.count(where('int') == 1) == 1 # Insert multiple db.insert_multiple([ CustomDocument({'int': 2, 'char': 'a'}), CustomDocument({'int': 3, 'char': 'a'}) ]) assert db.count(where('int') == 1) == 1 assert db.count(where('int') == 2) == 1 assert db.count(where('int') == 3) == 1 # Write back doc_id = db.get(where('int') == 3).doc_id db.write_back([CustomDocument({'int': 4, 'char': 'a'})], [doc_id]) assert db.count(where('int') == 3) == 0 assert db.count(where('int') == 4) == 1 def test_remove(db): db.remove(where('char') == 'b') assert len(db) == 2 assert db.count(where('int') == 1) == 2 def test_remove_all_fails(db): with pytest.raises(RuntimeError): db.remove() def test_remove_multiple(db): db.remove(where('int') == 1) assert len(db) == 0 def test_remove_ids(db): db.remove(doc_ids=[1, 2]) assert len(db) == 1 def test_remove_returns_ids(db): assert db.remove(where('char') == 'b') == [2] def test_update(db): assert len(db) == 3 db.update({'int': 2}, where('char') == 'a') assert db.count(where('int') == 2) == 1 assert db.count(where('int') == 1) == 2 def test_update_all(db): assert db.count(where('int') == 1) == 3 db.update({'newField': True}) assert db.count(where('newField') == True) == 3 def test_update_returns_ids(db): db.purge() assert db.insert({'int': 1, 'char': 'a'}) == 1 assert db.insert({'int': 1, 'char': 'a'}) == 2 assert db.update({'char': 'b'}, where('int') == 1) == [1, 2] def test_update_transform(db): def increment(field): def transform(el): el[field] += 1 return transform def delete(field): def transform(el): del el[field] return transform assert db.count(where('int') == 1) == 3 db.update(increment('int'), where('char') == 'a') db.update(delete('char'), where('char') == 'a') assert db.count(where('int') == 2) == 1 assert db.count(where('char') == 'a') == 0 assert db.count(where('int') == 1) == 2 def test_update_ids(db): db.update({'int': 2}, doc_ids=[1, 2]) assert db.count(where('int') == 2) == 2 def test_write_back(db): docs = db.search(where('int') == 1) for doc in docs: doc['int'] = [1, 2, 3] db.write_back(docs) assert db.count(where('int') == [1, 2, 3]) == 3 def test_write_back_whole_doc(db): docs = db.search(where('int') == 1) doc_ids = [doc.doc_id for doc in docs] for i, doc in enumerate(docs): docs[i] = {'newField': i} db.write_back(docs, doc_ids) assert db.count(where('newField') == 0) == 1 assert db.count(where('newField') == 1) == 1 assert db.count(where('newField') == 2) == 1 def test_write_back_returns_ids(db): db.purge() assert db.insert({'int': 1, 'char': 'a'}) == 1 assert db.insert({'int': 1, 'char': 'a'}) == 2 docs = [{'word': 'hello'}, {'word': 'world'}] assert db.write_back(docs, [1, 2]) == [1, 2] def test_write_back_fails(db): with pytest.raises(ValueError): db.write_back([{'get': 'error'}], [1, 2]) def test_write_back_id_exceed(db): db.purge() db.insert({'int': 1}) with pytest.raises(IndexError): db.write_back([{'get': 'error'}], [2]) def test_write_back_empty_ok(db): db.write_back([]) def test_upsert(db): assert len(db) == 3 # Document existing db.upsert({'int': 5}, where('char') == 'a') assert db.count(where('int') == 5) == 1 # Document missing assert db.upsert({'int': 9, 'char': 'x'}, where('char') == 'x') == [4] assert db.count(where('int') == 9) == 1 def test_search(db): assert not db._query_cache assert len(db.search(where('int') == 1)) == 3 assert len(db._query_cache) == 1 assert len(db.search(where('int') == 1)) == 3 # Query result from cache def test_search_path(db): assert not db._query_cache assert len(db.search(where('int'))) == 3 assert len(db._query_cache) == 1 assert len(db.search(where('asd'))) == 0 assert len(db.search(where('int'))) == 3 # Query result from cache def test_search_no_results_cache(db): assert len(db.search(where('missing'))) == 0 assert len(db.search(where('missing'))) == 0 def test_get(db): item = db.get(where('char') == 'b') assert item['char'] == 'b' def test_get_ids(db): el = db.all()[0] assert db.get(doc_id=el.doc_id) == el assert db.get(doc_id=float('NaN')) is None def test_count(db): assert db.count(where('int') == 1) == 3 assert db.count(where('char') == 'd') == 0 def test_contains(db): assert db.contains(where('int') == 1) assert not db.contains(where('int') == 0) def test_contains_ids(db): assert db.contains(doc_ids=[1, 2]) assert not db.contains(doc_ids=[88]) def test_get_idempotent(db): u = db.get(where('int') == 1) z = db.get(where('int') == 1) assert u == z def test_multiple_dbs(): """ Regression test for issue #3 """ db1 = TinyDB(storage=MemoryStorage) db2 = TinyDB(storage=MemoryStorage) db1.insert({'int': 1, 'char': 'a'}) db1.insert({'int': 1, 'char': 'b'}) db1.insert({'int': 1, 'value': 5.0}) db2.insert({'color': 'blue', 'animal': 'turtle'}) assert len(db1) == 3 assert len(db2) == 1 def test_storage_closed_once(): class Storage(object): def __init__(self): self.closed = False def read(self): return {} def write(self, data): pass def close(self): assert not self.closed self.closed = True with TinyDB(storage=Storage) as db: db.close() del db # If db.close() is called during cleanup, the assertion will fail and throw # and exception def test_unique_ids(tmpdir): """ :type tmpdir: py._path.local.LocalPath """ path = str(tmpdir.join('db.json')) # Verify ids are unique when reopening the DB and inserting with TinyDB(path) as _db: _db.insert({'x': 1}) with TinyDB(path) as _db: _db.insert({'x': 1}) with TinyDB(path) as _db: data = _db.all() assert data[0].doc_id != data[1].doc_id # Verify ids stay unique when inserting/removing with TinyDB(path) as _db: _db.purge() _db.insert_multiple({'x': i} for i in range(5)) _db.remove(where('x') == 2) assert len(_db) == 4 ids = [e.doc_id for e in _db.all()] assert len(ids) == len(set(ids)) def test_lastid_after_open(tmpdir): """ Regression test for issue #34 :type tmpdir: py._path.local.LocalPath """ NUM = 100 path = str(tmpdir.join('db.json')) with TinyDB(path) as _db: _db.insert_multiple({'i': i} for i in range(NUM)) with TinyDB(path) as _db: assert _db._last_id == NUM @pytest.mark.skipif(sys.version_info >= (3, 0), reason="requires python2") def test_unicode_memory(db): """ Regression test for issue #28 """ unic_str = 'ß'.decode('utf-8') byte_str = 'ß' db.insert({'value': unic_str}) assert db.contains(where('value') == byte_str) assert db.contains(where('value') == unic_str) db.purge() db.insert({'value': byte_str}) assert db.contains(where('value') == byte_str) assert db.contains(where('value') == unic_str) @pytest.mark.skipif(sys.version_info >= (3, 0), reason="requires python2") def test_unicode_json(tmpdir): """ Regression test for issue #28 """ unic_str1 = 'a'.decode('utf-8') byte_str1 = 'a' unic_str2 = 'ß'.decode('utf-8') byte_str2 = 'ß' path = str(tmpdir.join('db.json')) with TinyDB(path) as _db: _db.purge() _db.insert({'value': byte_str1}) _db.insert({'value': byte_str2}) assert _db.contains(where('value') == byte_str1) assert _db.contains(where('value') == unic_str1) assert _db.contains(where('value') == byte_str2) assert _db.contains(where('value') == unic_str2) with TinyDB(path) as _db: _db.purge() _db.insert({'value': unic_str1}) _db.insert({'value': unic_str2}) assert _db.contains(where('value') == byte_str1) assert _db.contains(where('value') == unic_str1) assert _db.contains(where('value') == byte_str2) assert _db.contains(where('value') == unic_str2) def test_doc_ids_json(tmpdir): """ Regression test for issue #45 """ path = str(tmpdir.join('db.json')) with TinyDB(path) as _db: _db.purge() assert _db.insert({'int': 1, 'char': 'a'}) == 1 assert _db.insert({'int': 1, 'char': 'a'}) == 2 _db.purge() assert _db.insert_multiple([{'int': 1, 'char': 'a'}, {'int': 1, 'char': 'b'}, {'int': 1, 'char': 'c'}]) == [1, 2, 3] assert _db.contains(doc_ids=[1, 2]) assert not _db.contains(doc_ids=[88]) _db.update({'int': 2}, doc_ids=[1, 2]) assert _db.count(where('int') == 2) == 2 el = _db.all()[0] assert _db.get(doc_id=el.doc_id) == el assert _db.get(doc_id=float('NaN')) is None _db.remove(doc_ids=[1, 2]) assert len(_db) == 1 def test_insert_string(tmpdir): path = str(tmpdir.join('db.json')) with TinyDB(path) as _db: data = [{'int': 1}, {'int': 2}] _db.insert_multiple(data) with pytest.raises(ValueError): _db.insert([1, 2, 3]) # Fails with pytest.raises(ValueError): _db.insert({'bark'}) # Fails assert data == _db.all() _db.insert({'int': 3}) # Does not fail @pytest.mark.skipif(HAS_UJSON, reason="not compatible with ujson") def test_insert_invalid_dict(tmpdir): path = str(tmpdir.join('db.json')) with TinyDB(path) as _db: data = [{'int': 1}, {'int': 2}] _db.insert_multiple(data) with pytest.raises(TypeError): _db.insert({'int': _db}) # Fails assert data == _db.all() _db.insert({'int': 3}) # Does not fail def test_gc(tmpdir): # See https://github.com/msiemens/tinydb/issues/92 path = str(tmpdir.join('db.json')) db = TinyDB(path) table = db.table('foo') table.insert({'something': 'else'}) table.insert({'int': 13}) assert len(table.search(where('int') == 13)) == 1 assert table.all() == [{'something': 'else'}, {'int': 13}] db.close() def test_non_default_table(): db = TinyDB(storage=MemoryStorage) assert [TinyDB.DEFAULT_TABLE] == list(db.tables()) db = TinyDB(storage=MemoryStorage, default_table='non-default') assert {'non-default'} == db.tables() db.purge_tables() default_table = TinyDB.DEFAULT_TABLE TinyDB.DEFAULT_TABLE = 'non-default' db = TinyDB(storage=MemoryStorage) assert {'non-default'} == db.tables() TinyDB.DEFAULT_TABLE = default_table def test_non_default_table_args(): TinyDB.DEFAULT_TABLE_KWARGS = {'cache_size': 0} db = TinyDB(storage=MemoryStorage) default_table = db.table() assert default_table._query_cache.capacity == 0 TinyDB.DEFAULT_TABLE_KWARGS = {} def test_purge_table(): db = TinyDB(storage=MemoryStorage) assert [TinyDB.DEFAULT_TABLE] == list(db.tables()) db.purge_table(TinyDB.DEFAULT_TABLE) assert [] == list(db.tables()) table_name = 'some-other-table' db = TinyDB(storage=MemoryStorage) db.table(table_name) assert {TinyDB.DEFAULT_TABLE, table_name} == db.tables() db.purge_table(table_name) assert {TinyDB.DEFAULT_TABLE} == db.tables() assert table_name not in db._table_cache db.purge_table('non-existent-table-name') assert {TinyDB.DEFAULT_TABLE} == db.tables() def test_empty_write(tmpdir): path = str(tmpdir.join('db.json')) class ReadOnlyMiddleware(Middleware): def write(self, data): raise AssertionError('No write for unchanged db') TinyDB(path).close() TinyDB(path, storage=ReadOnlyMiddleware()).close() def test_query_cache(): db = TinyDB(storage=MemoryStorage) db.insert_multiple([ {'name': 'foo', 'value': 42}, {'name': 'bar', 'value': -1337} ]) query = where('value') > 0 results = db.search(query) assert len(results) == 1 # Modify the db instance to not return any results when # bypassing the query cache db._table_cache[TinyDB.DEFAULT_TABLE]._read = lambda: {} # Make sure we got an independent copy of the result list results.extend([1]) assert db.search(query) == [{'name': 'foo', 'value': 42}] def test_tinydb_is_iterable(db): assert [r for r in db] == db.all() def test_eids(db): with pytest.warns(DeprecationWarning): assert db.contains(eids=[1]) is True with pytest.warns(DeprecationWarning): db.update({'field': 'value'}, eids=[1]) assert db.contains(where('field') == 'value') with pytest.warns(DeprecationWarning): doc = db.get(eid=1) with pytest.warns(DeprecationWarning): assert doc.eid == 1 with pytest.warns(DeprecationWarning): db.remove(eids=[1]) assert not db.contains(where('field') == 'value') with pytest.raises(TypeError): db.remove(eids=[1], doc_ids=[1]) with pytest.raises(TypeError): db.get(eid=[1], doc_id=[1]) def test_custom_table_class(): from tinydb.database import Table class MyTableClass(Table): pass # Table class for single table db = TinyDB(storage=MemoryStorage) assert isinstance(TinyDB(storage=MemoryStorage).table(), Table) assert isinstance(db.table('my_table', table_class=MyTableClass), MyTableClass) # Table class for all tables TinyDB.table_class = MyTableClass assert isinstance(TinyDB(storage=MemoryStorage).table(), MyTableClass) assert isinstance(TinyDB(storage=MemoryStorage).table('my_table'), MyTableClass) # Reset default table class TinyDB.table_class = Table def test_string_key(): from collections import Mapping from tinydb.database import Table, StorageProxy, Document from tinydb.storages import MemoryStorage class StorageProxy2(StorageProxy): def _new_document(self, key, val): # Don't convert the key to a number here! return Document(val, key) class Table2(Table): def _init_last_id(self, data): if data: self._last_id = len(data) else: self._last_id = 0 def _get_next_id(self): next_id = self._last_id + 1 data = self._read() while str(next_id) in data: next_id += 1 self._last_id = next_id return str(next_id) def insert(self, document): if not isinstance(document, Mapping): raise ValueError('Document is not a Mapping') doc_id = document.get('doc_id') or self._get_next_id() data = self._read() data[doc_id] = dict(document) self._write(data) def insert_multiple(self, documents): """ Insert multiple documents into the table. :param documents: a list of documents to insert :returns: a list containing the inserted documents' IDs """ doc_ids = [] data = self._read() for document in documents: if not isinstance(document, Mapping): raise ValueError('Document is not a Mapping') doc_id = document.get('doc_id') or self._get_next_id() doc_ids.append(doc_id) data[doc_id] = dict(document) self._write(data) return doc_ids db = TinyDB(storage=MemoryStorage, table_class=Table2, storage_proxy_class=StorageProxy2) table = db.table() table.insert({'doc_id': 'abc'}) assert table.get(doc_id='abc')['doc_id'] == 'abc' assert table._last_id == 0 table.insert({'abc': 10}) assert table.get(doc_id='1')['abc'] == 10 assert table._last_id == 1 def test_repr(tmpdir): path = str(tmpdir.join('db.json')) assert re.match( r"", repr(TinyDB(path))) def test_delete(tmpdir): path = str(tmpdir.join('db.json')) db = TinyDB(path, ensure_ascii=False) q = Query() db.insert({'network': {'id': '114', 'name': 'ok', 'rpc': 'dac', 'ticker': 'mkay'}}) assert db.search(q.network.id == '114') == [ {'network': {'id': '114', 'name': 'ok', 'rpc': 'dac', 'ticker': 'mkay'}} ] db.remove(q.network.id == '114') assert db.search(q.network.id == '114') == [] def test_insert_multiple_with_single_dict(db): with pytest.raises(ValueError): d = {'first': 'John', 'last': 'smith'} db.insert_multiple(d) db.close() def test_access_storage(): assert isinstance(TinyDB(storage=MemoryStorage).storage, MemoryStorage) assert isinstance(TinyDB(storage=CachingMiddleware(MemoryStorage)).storage, CachingMiddleware) tinydb-3.15.2/tests/test_utils.py000066400000000000000000000051701357647250700170310ustar00rootroot00000000000000import warnings import pytest from tinydb.utils import LRUCache, catch_warning, freeze, FrozenDict def test_lru_cache(): cache = LRUCache(capacity=3) cache["a"] = 1 cache["b"] = 2 cache["c"] = 3 _ = cache["a"] # move to front in lru queue cache["d"] = 4 # move oldest item out of lru queue try: _ = cache['f'] except KeyError: pass assert cache.lru == ["c", "a", "d"] def test_lru_cache_set_multiple(): cache = LRUCache(capacity=3) cache["a"] = 1 cache["a"] = 2 cache["a"] = 3 cache["a"] = 4 assert cache.lru == ["a"] def test_lru_cache_get(): cache = LRUCache(capacity=3) cache["a"] = 1 cache["b"] = 1 cache["c"] = 1 cache.get("a") cache["d"] = 4 assert cache.lru == ["c", "a", "d"] def test_lru_cache_delete(): cache = LRUCache(capacity=3) cache["a"] = 1 cache["b"] = 2 del cache["a"] try: del cache['f'] except KeyError: pass assert cache.lru == ["b"] def test_lru_cache_clear(): cache = LRUCache(capacity=3) cache["a"] = 1 cache["b"] = 2 cache.clear() assert cache.lru == [] def test_lru_cache_unlimited(): cache = LRUCache() for i in range(100): cache[i] = i assert len(cache.lru) == 100 def test_lru_cache_unlimited_explicit(): cache = LRUCache(capacity=None) for i in range(100): cache[i] = i assert len(cache.lru) == 100 def test_lru_cache_iteration_works(): cache = LRUCache() count = 0 for _ in cache: assert False, 'there should be no elements in the cache' assert count == 0 def test_catch_warning(): class MyWarning(Warning): pass filters = warnings.filters[:] with pytest.raises(MyWarning): with catch_warning(MyWarning): warnings.warn("message", MyWarning) assert filters == warnings.filters def test_catch_warning_reset_filter(): class MyWarning(Warning): pass warnings.filterwarnings(action='once', category=MyWarning) with pytest.raises(MyWarning): with catch_warning(MyWarning): warnings.warn("message", MyWarning) filters = [f for f in warnings.filters if f[2] == MyWarning] assert filters assert filters[0][0] == 'once' def test_freeze(): frozen = freeze([0, 1, 2, {'a': [1, 2, 3]}, {1, 2}]) assert isinstance(frozen, tuple) assert isinstance(frozen[3], FrozenDict) assert isinstance(frozen[3]['a'], tuple) assert isinstance(frozen[4], frozenset) with pytest.raises(TypeError): frozen[0] = 10 with pytest.raises(TypeError): frozen[3]['a'] = 10 tinydb-3.15.2/tinydb/000077500000000000000000000000001357647250700144045ustar00rootroot00000000000000tinydb-3.15.2/tinydb/__init__.py000066400000000000000000000016031357647250700165150ustar00rootroot00000000000000""" TinyDB is a tiny, document oriented database optimized for your happiness :) TinyDB stores differrent types of python data types using a configurable backend. It has support for handy querying and tables. .. codeauthor:: Markus Siemens Usage example: >>> from tinydb. import TinyDB, where >>> from tinydb.storages import MemoryStorage >>> db = TinyDB(storage=MemoryStorage) >>> db.insert({'data': 5}) # Insert into '_default' table >>> db.search(where('data') == 5) [{'data': 5, '_id': 1}] >>> # Now let's create a new table >>> tbl = db.table('our_table') >>> for i in range(10): ... tbl.insert({'data': i}) ... >>> len(tbl.search(where('data') < 5)) 5 """ from .queries import Query, where from .storages import Storage, JSONStorage from .database import TinyDB from .version import __version__ __all__ = ('TinyDB', 'Storage', 'JSONStorage', 'Query', 'where') tinydb-3.15.2/tinydb/database.py000066400000000000000000000446711357647250700165360ustar00rootroot00000000000000""" Contains the :class:`database ` and :class:`tables ` implementation. """ # Python 2/3 independent Mapping import try: from collections.abc import Mapping except ImportError: from collections import Mapping import warnings from . import JSONStorage from .utils import LRUCache, iteritems, itervalues class Document(dict): """ Represents a document stored in the database. This is a transparent proxy for database records. It exists to provide a way to access a record's id via ``el.doc_id``. """ def __init__(self, value, doc_id, **kwargs): super(Document, self).__init__(**kwargs) self.update(value) self.doc_id = doc_id @property def eid(self): warnings.warn('eid has been renamed to doc_id', DeprecationWarning) return self.doc_id Element = Document def _get_doc_id(doc_id, eid): # Backwards-compatibility shim if eid is not None: if doc_id is not None: raise TypeError('cannot pass both eid and doc_id') warnings.warn('eid has been renamed to doc_id', DeprecationWarning) return eid else: return doc_id def _get_doc_ids(doc_ids, eids): # Backwards-compatibility shim if eids is not None: if doc_ids is not None: raise TypeError('cannot pass both eids and doc_ids') warnings.warn('eids has been renamed to doc_ids', DeprecationWarning) return eids else: return doc_ids class DataProxy(dict): """ A proxy to a table's data that remembers the storage's data dictionary. """ def __init__(self, table, raw_data, **kwargs): super(DataProxy, self).__init__(**kwargs) self.update(table) self.raw_data = raw_data class StorageProxy(object): """ A proxy that only allows to read a single table from a storage. """ def __init__(self, storage, table_name): self._storage = storage self._table_name = table_name def _new_document(self, key, val): doc_id = int(key) return Document(val, doc_id) def read(self): raw_data = self._storage.read() or {} try: table = raw_data[self._table_name] except KeyError: raw_data.update({self._table_name: {}}) self._storage.write(raw_data) return DataProxy({}, raw_data) docs = {} for key, val in iteritems(table): doc = self._new_document(key, val) docs[doc.doc_id] = doc return DataProxy(docs, raw_data) def write(self, data): try: # Try accessing the full data dict from the data proxy raw_data = data.raw_data except AttributeError: # Not a data proxy, fall back to regular reading raw_data = self._storage.read() raw_data[self._table_name] = dict(data) self._storage.write(raw_data) def purge_table(self): try: data = self._storage.read() or {} del data[self._table_name] self._storage.write(data) except KeyError: pass class TinyDB(object): """ The main class of TinyDB. Gives access to the database, provides methods to insert/search/remove and getting tables. """ DEFAULT_TABLE = '_default' DEFAULT_TABLE_KWARGS = {} DEFAULT_STORAGE = JSONStorage def __init__(self, *args, **kwargs): """ Create a new instance of TinyDB. All arguments and keyword arguments will be passed to the underlying storage class (default: :class:`~tinydb.storages.JSONStorage`). :param storage: The class of the storage to use. Will be initialized with ``args`` and ``kwargs``. :param default_table: The name of the default table to populate. """ storage = kwargs.pop('storage', self.DEFAULT_STORAGE) default_table = kwargs.pop('default_table', self.DEFAULT_TABLE) self._cls_table = kwargs.pop('table_class', self.table_class) self._cls_storage_proxy = kwargs.pop('storage_proxy_class', self.storage_proxy_class) # Prepare the storage #: :type: Storage self._storage = storage(*args, **kwargs) self._opened = True # Prepare the default table self._table_cache = {} #: :type: Table self._table = self.table(default_table) def __repr__(self): args = [ 'tables={}'.format(list(self.tables())), 'tables_count={}'.format(len(self.tables())), 'default_table_documents_count={}'.format(self.__len__()), 'all_tables_documents_count={}'.format( ['{}={}'.format(table, len(self.table(table))) for table in self.tables()]), ] return '<{} {}>'.format(type(self).__name__, ', '.join(args)) def table(self, name=DEFAULT_TABLE, **options): """ Get access to a specific table. Creates a new table, if it hasn't been created before, otherwise it returns the cached :class:`~tinydb.Table` object. :param name: The name of the table. :type name: str :param cache_size: How many query results to cache. :param table_class: Which table class to use. """ if name in self._table_cache: return self._table_cache[name] table_class = options.pop('table_class', self._cls_table) table_kwargs = self.DEFAULT_TABLE_KWARGS.copy() table_kwargs.update(options) table = table_class(self._cls_storage_proxy(self._storage, name), name, **table_kwargs) self._table_cache[name] = table return table def tables(self): """ Get the names of all tables in the database. :returns: a set of table names :rtype: set[str] """ return set(self._storage.read()) def purge_tables(self): """ Purge all tables from the database. **CANNOT BE REVERSED!** """ self._storage.write({}) self._table_cache.clear() def purge_table(self, name): """ Purge a specific table from the database. **CANNOT BE REVERSED!** :param name: The name of the table. :type name: str """ if name in self._table_cache: del self._table_cache[name] proxy = StorageProxy(self._storage, name) proxy.purge_table() @property def storage(self): """ Access the storage used for this TinyDB instance. :return: This instance's storage """ return self._storage def close(self): """ Close the database. """ self._opened = False self._storage.close() def __enter__(self): return self def __exit__(self, *args): if self._opened: self.close() def __getattr__(self, name): """ Forward all unknown attribute calls to the underlying standard table. """ return getattr(self._table, name) # Methods that are executed on the default table # Because magic methods are not handled by __getattr__ we need to forward # them manually here def __len__(self): """ Get the total number of documents in the default table. >>> db = TinyDB('db.json') >>> len(db) 0 """ return len(self._table) def __iter__(self): """ Iter over all documents from default table. """ return self._table.__iter__() class Table(object): """ Represents a single TinyDB Table. """ def __init__(self, storage, name, cache_size=10): """ Get access to a table. :param storage: Access to the storage :type storage: StorageProxy :param name: The table name :param cache_size: Maximum size of query cache. """ self._storage = storage self._name = name self._query_cache = LRUCache(capacity=cache_size) data = self._read() self._init_last_id(data) def __repr__(self): args = [ 'name={!r}'.format(self.name), 'total={}'.format(self.__len__()), 'storage={}'.format(self._storage), ] return '<{} {}>'.format(type(self).__name__, ', '.join(args)) def _init_last_id(self, data): if data: self._last_id = max(i for i in data) else: self._last_id = 0 @property def name(self): """ Get the table name. """ return self._name def process_elements(self, func, cond=None, doc_ids=None, eids=None): """ Helper function for processing all documents specified by condition or IDs. A repeating pattern in TinyDB is to run some code on all documents that match a condition or are specified by their ID. This is implemented in this function. The function passed as ``func`` has to be a callable. Its first argument will be the data currently in the database. Its second argument is the document ID of the currently processed document. See: :meth:`~.update`, :meth:`.remove` :param func: the function to execute on every included document. first argument: all data second argument: the current eid :param cond: query that matches documents to use, or :param doc_ids: list of document IDs to use :param eids: list of document IDs to use (deprecated) :returns: the document IDs that were affected during processing """ doc_ids = _get_doc_ids(doc_ids, eids) data = self._read() if doc_ids is not None: # Processed document specified by id for doc_id in doc_ids: func(data, doc_id) elif cond is not None: # Collect affected doc_ids doc_ids = [] # Processed documents specified by condition for doc_id in list(data): if cond(data[doc_id]): func(data, doc_id) doc_ids.append(doc_id) else: # Processed documents doc_ids = list(data) for doc_id in doc_ids: func(data, doc_id) self._write(data) return doc_ids def clear_cache(self): """ Clear the query cache. A simple helper that clears the internal query cache. """ self._query_cache.clear() def _get_next_id(self): """ Increment the ID used the last time and return it """ current_id = self._last_id + 1 self._last_id = current_id return current_id def _read(self): """ Reading access to the DB. :returns: all values :rtype: DataProxy """ return self._storage.read() def _write(self, values): """ Writing access to the DB. :param values: the new values to write :type values: DataProxy | dict """ self.clear_cache() self._storage.write(values) def __len__(self): """ Get the total number of documents in the table. """ return len(self._read()) def all(self): """ Get all documents stored in the table. :returns: a list with all documents. :rtype: list[Element] """ return list(itervalues(self._read())) def __iter__(self): """ Iter over all documents stored in the table. :returns: an iterator over all documents. :rtype: listiterator[Element] """ for value in itervalues(self._read()): yield value def insert(self, document): """ Insert a new document into the table. :param document: the document to insert :returns: the inserted document's ID """ if not isinstance(document, Mapping): raise ValueError('Document is not a Mapping') doc_id = self._get_next_id() data = self._read() data[doc_id] = dict(document) self._write(data) return doc_id def insert_multiple(self, documents): """ Insert multiple documents into the table. :param documents: a list of documents to insert :returns: a list containing the inserted documents' IDs """ doc_ids = [] data = self._read() for document in documents: if not isinstance(document, Mapping): raise ValueError('Document is not a Mapping') doc_id = self._get_next_id() doc_ids.append(doc_id) data[doc_id] = dict(document) self._write(data) return doc_ids def remove(self, cond=None, doc_ids=None, eids=None): """ Remove all matching documents. :param cond: the condition to check against :type cond: query :param doc_ids: a list of document IDs :type doc_ids: list :returns: a list containing the removed document's ID """ doc_ids = _get_doc_ids(doc_ids, eids) if cond is None and doc_ids is None: raise RuntimeError('Use purge() to remove all documents') return self.process_elements( lambda data, doc_id: data.pop(doc_id), cond, doc_ids ) def update(self, fields, cond=None, doc_ids=None, eids=None): """ Update all matching documents to have a given set of fields. :param fields: the fields that the matching documents will have or a method that will update the documents :type fields: dict | dict -> None :param cond: which documents to update :type cond: query :param doc_ids: a list of document IDs :type doc_ids: list :returns: a list containing the updated document's ID """ doc_ids = _get_doc_ids(doc_ids, eids) if callable(fields): return self.process_elements( lambda data, doc_id: fields(data[doc_id]), cond, doc_ids ) else: return self.process_elements( lambda data, doc_id: data[doc_id].update(fields), cond, doc_ids ) def write_back(self, documents, doc_ids=None, eids=None): """ Write back documents by doc_id :param documents: a list of document to write back :param doc_ids: a list of document IDs which need to be written back :returns: a list of document IDs that have been written """ doc_ids = _get_doc_ids(doc_ids, eids) if doc_ids is not None and not len(documents) == len(doc_ids): raise ValueError( 'The length of documents and doc_ids is not match.') if doc_ids is None: doc_ids = [doc.doc_id for doc in documents] # Since this function will write docs back like inserting, to ensure # here only process existing or removed instead of inserting new, # raise error if doc_id exceeded the last. if len(doc_ids) > 0 and max(doc_ids) > self._last_id: raise IndexError( 'ID exceeds table length, use existing or removed doc_id.') data = self._read() # Document specified by ID documents.reverse() for doc_id in doc_ids: data[doc_id] = dict(documents.pop()) self._write(data) return doc_ids def upsert(self, document, cond): """ Update a document, if it exist - insert it otherwise. Note: this will update *all* documents matching the query. :param document: the document to insert or the fields to update :param cond: which document to look for :returns: a list containing the updated document's ID """ updated_docs = self.update(document, cond) if updated_docs: return updated_docs else: return [self.insert(document)] def purge(self): """ Purge the table by removing all documents. """ self._write({}) self._last_id = 0 def search(self, cond): """ Search for all documents matching a 'where' cond. :param cond: the condition to check against :type cond: Query :returns: list of matching documents :rtype: list[Element] """ if cond in self._query_cache: return self._query_cache.get(cond, [])[:] docs = [doc for doc in self.all() if cond(doc)] self._query_cache[cond] = docs return docs[:] def get(self, cond=None, doc_id=None, eid=None): """ Get exactly one document specified by a query or and ID. Returns ``None`` if the document doesn't exist :param cond: the condition to check against :type cond: Query :param doc_id: the document's ID :returns: the document or None :rtype: Element | None """ doc_id = _get_doc_id(doc_id, eid) # Cannot use process_elements here because we want to return a # specific document if doc_id is not None: # Document specified by ID return self._read().get(doc_id, None) # Document specified by condition for doc in self.all(): if cond(doc): return doc def count(self, cond): """ Count the documents matching a condition. :param cond: the condition use :type cond: Query """ return len(self.search(cond)) def contains(self, cond=None, doc_ids=None, eids=None): """ Check wether the database contains a document matching a condition or an ID. If ``eids`` is set, it checks if the db contains a document with one of the specified. :param cond: the condition use :type cond: Query :param doc_ids: the document IDs to look for """ doc_ids = _get_doc_ids(doc_ids, eids) if doc_ids is not None: # Documents specified by ID return any(self.get(doc_id=doc_id) for doc_id in doc_ids) # Document specified by condition return self.get(cond) is not None # Set the default table class TinyDB.table_class = Table # Set the default storage proxy class TinyDB.storage_proxy_class = StorageProxy tinydb-3.15.2/tinydb/middlewares.py000066400000000000000000000071541357647250700172650ustar00rootroot00000000000000""" Contains the :class:`base class ` for middlewares and implementations. """ from . import TinyDB class Middleware(object): """ The base class for all Middlewares. Middlewares hook into the read/write process of TinyDB allowing you to extend the behaviour by adding caching, logging, ... Your middleware's ``__init__`` method has to accept exactly one argument which is the class of the "real" storage. It has to be stored as ``_storage_cls`` (see :class:`~tinydb.middlewares.CachingMiddleware` for an example). """ def __init__(self, storage_cls=TinyDB.DEFAULT_STORAGE): self._storage_cls = storage_cls self.storage = None def __call__(self, *args, **kwargs): """ Create the storage instance and store it as self.storage. Usually a user creates a new TinyDB instance like this:: TinyDB(storage=StorageClass) The storage kwarg is used by TinyDB this way:: self.storage = storage(*args, **kwargs) As we can see, ``storage(...)`` runs the constructor and returns the new storage instance. Using Middlewares, the user will call:: The 'real' storage class v TinyDB(storage=Middleware(StorageClass)) ^ Already an instance! So, when running ``self.storage = storage(*args, **kwargs)`` Python now will call ``__call__`` and TinyDB will expect the return value to be the storage (or Middleware) instance. Returning the instance is simple, but we also got the underlying (*real*) StorageClass as an __init__ argument that still is not an instance. So, we initialize it in __call__ forwarding any arguments we recieve from TinyDB (``TinyDB(arg1, kwarg1=value, storage=...)``). In case of nested Middlewares, calling the instance as if it was an class results in calling ``__call__`` what initializes the next nested Middleware that itself will initialize the next Middleware and so on. """ self.storage = self._storage_cls(*args, **kwargs) return self def __getattr__(self, name): """ Forward all unknown attribute calls to the underlying storage so we remain as transparent as possible. """ return getattr(self.__dict__['storage'], name) class CachingMiddleware(Middleware): """ Add some caching to TinyDB. This Middleware aims to improve the performance of TinyDB by writing only the last DB state every :attr:`WRITE_CACHE_SIZE` time and reading always from cache. """ #: The number of write operations to cache before writing to disc WRITE_CACHE_SIZE = 1000 def __init__(self, storage_cls=TinyDB.DEFAULT_STORAGE): super(CachingMiddleware, self).__init__(storage_cls) self.cache = None self._cache_modified_count = 0 def read(self): if self.cache is None: self.cache = self.storage.read() return self.cache def write(self, data): self.cache = data self._cache_modified_count += 1 if self._cache_modified_count >= self.WRITE_CACHE_SIZE: self.flush() def flush(self): """ Flush all unwritten data to disk. """ if self._cache_modified_count > 0: self.storage.write(self.cache) self._cache_modified_count = 0 def close(self): self.flush() # Flush potentially unwritten data self.storage.close() tinydb-3.15.2/tinydb/operations.py000066400000000000000000000016201357647250700171400ustar00rootroot00000000000000def delete(field): """ Delete a given field from the document. """ def transform(doc): del doc[field] return transform def add(field, n): """ Add n to a given field in the document. """ def transform(doc): doc[field] += n return transform def subtract(field, n): """ Subtract n from a given field in the document. """ def transform(doc): doc[field] -= n return transform def set(field, val): """ Set a given field to val. """ def transform(doc): doc[field] = val return transform def increment(field): """ Increment a given field in the document. """ def transform(doc): doc[field] += 1 return transform def decrement(field): """ Decrement a given field in the document. """ def transform(doc): doc[field] -= 1 return transform tinydb-3.15.2/tinydb/queries.py000066400000000000000000000254731357647250700164460ustar00rootroot00000000000000""" Contains the querying interface. Starting with :class:`~tinydb.queries.Query` you can construct complex queries: >>> ((where('f1') == 5) & (where('f2') != 2)) | where('s').matches(r'^\\w+$') (('f1' == 5) and ('f2' != 2)) or ('s' ~= ^\\w+$ ) Queries are executed by using the ``__call__``: >>> q = where('val') == 5 >>> q({'val': 5}) True >>> q({'val': 1}) False """ import re import sys from .utils import catch_warning, freeze __all__ = ('Query', 'where') def is_sequence(obj): return hasattr(obj, '__iter__') class QueryImpl(object): """ A query implementation. This query implementation wraps a test function which is run when the query is evaluated by calling the object. Queries can be combined with logical and/or and modified with logical not. """ def __init__(self, test, hashval): self._test = test self.hashval = hashval def __call__(self, value): return self._test(value) def __hash__(self): return hash(self.hashval) def __repr__(self): return 'QueryImpl{}'.format(self.hashval) def __eq__(self, other): return self.hashval == other.hashval # --- Query modifiers ----------------------------------------------------- def __and__(self, other): # We use a frozenset for the hash as the AND operation is commutative # (a & b == b & a) return QueryImpl(lambda value: self(value) and other(value), ('and', frozenset([self.hashval, other.hashval]))) def __or__(self, other): # We use a frozenset for the hash as the OR operation is commutative # (a | b == b | a) return QueryImpl(lambda value: self(value) or other(value), ('or', frozenset([self.hashval, other.hashval]))) def __invert__(self): return QueryImpl(lambda value: not self(value), ('not', self.hashval)) class Query(QueryImpl): """ TinyDB Queries. Allows to build queries for TinyDB databases. There are two main ways of using queries: 1) ORM-like usage: >>> User = Query() >>> db.search(User.name == 'John Doe') >>> db.search(User['logged-in'] == True) 2) Classical usage: >>> db.search(where('value') == True) Note that ``where(...)`` is a shorthand for ``Query(...)`` allowing for a more fluent syntax. Besides the methods documented here you can combine queries using the binary AND and OR operators: >>> # Binary AND: >>> db.search((where('field1').exists()) & (where('field2') == 5)) >>> # Binary OR: >>> db.search((where('field1').exists()) | (where('field2') == 5)) Queries are executed by calling the resulting object. They expect to get the document to test as the first argument and return ``True`` or ``False`` depending on whether the documents matches the query or not. """ def __init__(self): self._path = () super(Query, self).__init__( self._prepare_test(lambda _: True), ('path', self._path) ) def __repr__(self): return '{}()'.format(type(self).__name__) def __hash__(self): return super(Query, self).__hash__() def __getattr__(self, item): query = type(self)() query._path = self._path + (item, ) query.hashval = ('path', query._path) return query __getitem__ = __getattr__ def _prepare_test(self, test): def runner(value): try: # Resolve the path for part in self._path: value = value[part] except (KeyError, TypeError): return False else: return test(value) return runner def _generate_test(self, test, hashval): """ Generate a query based on a test function. :param test: The test the query executes. :param hashval: The hash of the query. :return: A :class:`~tinydb.queries.QueryImpl` object """ if not self._path: raise ValueError('Query has no path') return QueryImpl(self._prepare_test(test), hashval) def __eq__(self, rhs): """ Test a dict value for equality. >>> Query().f1 == 42 :param rhs: The value to compare against """ if sys.version_info <= (3, 0): # pragma: no cover # Special UTF-8 handling on Python 2 def test(value): with catch_warning(UnicodeWarning): try: return value == rhs except UnicodeWarning: # Dealing with a case, where 'value' or 'rhs' # is unicode and the other is a byte string. if isinstance(value, str): return value.decode('utf-8') == rhs elif isinstance(rhs, str): return value == rhs.decode('utf-8') else: # pragma: no cover def test(value): return value == rhs return self._generate_test( lambda value: test(value), ('==', self._path, freeze(rhs)) ) def __ne__(self, rhs): """ Test a dict value for inequality. >>> Query().f1 != 42 :param rhs: The value to compare against """ return self._generate_test( lambda value: value != rhs, ('!=', self._path, freeze(rhs)) ) def __lt__(self, rhs): """ Test a dict value for being lower than another value. >>> Query().f1 < 42 :param rhs: The value to compare against """ return self._generate_test( lambda value: value < rhs, ('<', self._path, rhs) ) def __le__(self, rhs): """ Test a dict value for being lower than or equal to another value. >>> where('f1') <= 42 :param rhs: The value to compare against """ return self._generate_test( lambda value: value <= rhs, ('<=', self._path, rhs) ) def __gt__(self, rhs): """ Test a dict value for being greater than another value. >>> Query().f1 > 42 :param rhs: The value to compare against """ return self._generate_test( lambda value: value > rhs, ('>', self._path, rhs) ) def __ge__(self, rhs): """ Test a dict value for being greater than or equal to another value. >>> Query().f1 >= 42 :param rhs: The value to compare against """ return self._generate_test( lambda value: value >= rhs, ('>=', self._path, rhs) ) def exists(self): """ Test for a dict where a provided key exists. >>> Query().f1.exists() """ return self._generate_test( lambda _: True, ('exists', self._path) ) def matches(self, regex, flags=0): """ Run a regex test against a dict value (whole string has to match). >>> Query().f1.matches(r'^\\w+$') :param regex: The regular expression to use for matching :param flags: regex flags to pass to ``re.match`` """ def test(value): if not isinstance(value, str): return False return re.match(regex, value, flags) is not None return self._generate_test(test, ('matches', self._path, regex)) def search(self, regex, flags=0): """ Run a regex test against a dict value (only substring string has to match). >>> Query().f1.search(r'^\\w+$') :param regex: The regular expression to use for matching :param flags: regex flags to pass to ``re.match`` """ def test(value): if not isinstance(value, str): return False return re.search(regex, value, flags) is not None return self._generate_test(test, ('search', self._path, regex)) def test(self, func, *args): """ Run a user-defined test function against a dict value. >>> def test_func(val): ... return val == 42 ... >>> Query().f1.test(test_func) :param func: The function to call, passing the dict as the first argument :param args: Additional arguments to pass to the test function """ return self._generate_test( lambda value: func(value, *args), ('test', self._path, func, args) ) def any(self, cond): """ Check if a condition is met by any document in a list, where a condition can also be a sequence (e.g. list). >>> Query().f1.any(Query().f2 == 1) Matches:: {'f1': [{'f2': 1}, {'f2': 0}]} >>> Query().f1.any([1, 2, 3]) Matches:: {'f1': [1, 2]} {'f1': [3, 4, 5]} :param cond: Either a query that at least one document has to match or a list of which at least one document has to be contained in the tested document. """ if callable(cond): def _cmp(value): return is_sequence(value) and any(cond(e) for e in value) else: def _cmp(value): return is_sequence(value) and any(e in cond for e in value) return self._generate_test( lambda value: _cmp(value), ('any', self._path, freeze(cond)) ) def all(self, cond): """ Check if a condition is met by all documents in a list, where a condition can also be a sequence (e.g. list). >>> Query().f1.all(Query().f2 == 1) Matches:: {'f1': [{'f2': 1}, {'f2': 1}]} >>> Query().f1.all([1, 2, 3]) Matches:: {'f1': [1, 2, 3, 4, 5]} :param cond: Either a query that all documents have to match or a list which has to be contained in the tested document. """ if callable(cond): def _cmp(value): return is_sequence(value) and all(cond(e) for e in value) else: def _cmp(value): return is_sequence(value) and all(e in value for e in cond) return self._generate_test( lambda value: _cmp(value), ('all', self._path, freeze(cond)) ) def one_of(self, items): """ Check if the value is contained in a list or generator. >>> Query().f1.one_of(['value 1', 'value 2']) :param items: The list of items to check with """ return self._generate_test( lambda value: value in items, ('one_of', self._path, freeze(items)) ) def where(key): return Query()[key] tinydb-3.15.2/tinydb/storages.py000066400000000000000000000064501357647250700166120ustar00rootroot00000000000000""" Contains the :class:`base class ` for storages and implementations. """ from abc import ABCMeta, abstractmethod import codecs import os import warnings from .utils import with_metaclass try: import ujson as json warnings.warn( 'Support for `ujson` is reprecated and will be replaced in ' 'a future version. ' 'See https://github.com/msiemens/tinydb/issues/263 for ' 'details.', DeprecationWarning ) except ImportError: import json def touch(fname, create_dirs): if create_dirs: base_dir = os.path.dirname(fname) if not os.path.exists(base_dir): os.makedirs(base_dir) if not os.path.exists(fname): with open(fname, 'a'): os.utime(fname, None) class Storage(with_metaclass(ABCMeta, object)): """ The abstract base class for all Storages. A Storage (de)serializes the current state of the database and stores it in some place (memory, file on disk, ...). """ # Using ABCMeta as metaclass allows instantiating only storages that have # implemented read and write @abstractmethod def read(self): """ Read the last stored state. Any kind of deserialization should go here. Return ``None`` here to indicate that the storage is empty. :rtype: dict """ raise NotImplementedError('To be overridden!') @abstractmethod def write(self, data): """ Write the current state of the database to the storage. Any kind of serialization should go here. :param data: The current state of the database. :type data: dict """ raise NotImplementedError('To be overridden!') def close(self): """ Optional: Close open file handles, etc. """ pass class JSONStorage(Storage): """ Store the data in a JSON file. """ def __init__(self, path, create_dirs=False, encoding=None, **kwargs): """ Create a new instance. Also creates the storage file, if it doesn't exist. :param path: Where to store the JSON data. :type path: str """ super(JSONStorage, self).__init__() touch(path, create_dirs=create_dirs) # Create file if not exists self.kwargs = kwargs self._handle = codecs.open(path, 'r+', encoding=encoding) def close(self): self._handle.close() def read(self): # Get the file size self._handle.seek(0, os.SEEK_END) size = self._handle.tell() if not size: # File is empty return None else: self._handle.seek(0) return json.load(self._handle) def write(self, data): self._handle.seek(0) serialized = json.dumps(data, **self.kwargs) self._handle.write(serialized) self._handle.flush() os.fsync(self._handle.fileno()) self._handle.truncate() class MemoryStorage(Storage): """ Store the data as JSON in memory. """ def __init__(self): """ Create a new instance. """ super(MemoryStorage, self).__init__() self.memory = None def read(self): return self.memory def write(self, data): self.memory = data tinydb-3.15.2/tinydb/utils.py000066400000000000000000000076351357647250700161310ustar00rootroot00000000000000""" Utility functions. """ import warnings from collections import OrderedDict from contextlib import contextmanager # Python 2/3 independant dict iteration iteritems = getattr(dict, 'iteritems', dict.items) itervalues = getattr(dict, 'itervalues', dict.values) class LRUCache: # @param capacity, an integer def __init__(self, capacity=None): self.capacity = capacity self.__cache = OrderedDict() @property def lru(self): return list(self.__cache.keys()) @property def length(self): return len(self.__cache) def clear(self): self.__cache.clear() def __len__(self): return self.length def __contains__(self, key): return key in self.__cache def __setitem__(self, key, value): self.set(key, value) def __delitem__(self, key): del self.__cache[key] def __getitem__(self, key): if key not in self: raise KeyError(key) return self.get(key) def __iter__(self): return iter(self.__cache) def get(self, key, default=None): value = self.__cache.get(key) if value is not None: # Put the key back to the front of the ordered dict by # re-insertig it del self.__cache[key] self.__cache[key] = value return value return default def set(self, key, value): if self.__cache.get(key): del self.__cache[key] self.__cache[key] = value else: self.__cache[key] = value # Check, if the cache is full and we have to remove old items # If the queue is of unlimited size, self.capacity is NaN and # x > NaN is always False in Python and the cache won't be cleared. if self.capacity is not None and self.length > self.capacity: self.__cache.popitem(last=False) # Source: https://github.com/PythonCharmers/python-future/blob/466bfb2dfa36d865285dc31fe2b0c0a53ff0f181/future/utils/__init__.py#L102-L134 def with_metaclass(meta, *bases): """ Function from jinja2/_compat.py. License: BSD. Use it like this:: class BaseForm(object): pass class FormType(type): pass class Form(with_metaclass(FormType, BaseForm)): pass This requires a bit of explanation: the basic idea is to make a dummy metaclass for one level of class instantiation that replaces itself with the actual metaclass. Because of internal type checks we also need to make sure that we downgrade the custom metaclass for one level to something closer to type (that's why __call__ and __init__ comes back from type etc.). This has the advantage over six.with_metaclass of not introducing dummy classes into the final MRO. """ class Metaclass(meta): __call__ = type.__call__ __init__ = type.__init__ def __new__(cls, name, this_bases, d): if this_bases is None: return type.__new__(cls, name, (), d) return meta(name, bases, d) return Metaclass('temporary_class', None, {}) @contextmanager def catch_warning(warning_cls): with warnings.catch_warnings(): warnings.filterwarnings('error', category=warning_cls) yield class FrozenDict(dict): def __hash__(self): return hash(tuple(sorted(self.items()))) def _immutable(self, *args, **kws): raise TypeError('object is immutable') __setitem__ = _immutable __delitem__ = _immutable clear = _immutable update = _immutable setdefault = _immutable pop = _immutable popitem = _immutable def freeze(obj): if isinstance(obj, dict): return FrozenDict((k, freeze(v)) for k, v in obj.items()) elif isinstance(obj, list): return tuple(freeze(el) for el in obj) elif isinstance(obj, set): return frozenset(obj) else: return obj tinydb-3.15.2/tinydb/version.py000066400000000000000000000000271357647250700164420ustar00rootroot00000000000000__version__ = '3.15.2' tinydb-3.15.2/tox.ini000066400000000000000000000007201357647250700144250ustar00rootroot00000000000000# Tox (http://tox.testrun.org/) is a tool for running tests # in multiple virtualenvs. This configuration file will run the # test suite on all supported python versions. To use it, "pip install tox" # and then run "tox" from this directory. [tox] envlist = py{26,27,33,34,35,36,py,py3}-check [testenv] commands = pytest -v {posargs} deps = . pytest pytest-cov [testenv:check] commands = python setup.py check -r -s deps = . readme_renderer