pax_global_header00006660000000000000000000000064150760171220014513gustar00rootroot0000000000000052 comment=72a4df674ae5466028f2b762917f938813bb147d aiomysql-0.3.2/000077500000000000000000000000001507601712200133535ustar00rootroot00000000000000aiomysql-0.3.2/.codecov.yml000066400000000000000000000000521507601712200155730ustar00rootroot00000000000000codecov: notify: after_n_builds: 40 aiomysql-0.3.2/.coveragerc000066400000000000000000000001101507601712200154640ustar00rootroot00000000000000[run] branch = True source = aiomysql, tests omit = site-packages, .tox aiomysql-0.3.2/.flake8000066400000000000000000000000361507601712200145250ustar00rootroot00000000000000[flake8] max-line-length = 88 aiomysql-0.3.2/.git_archival.txt000066400000000000000000000002011507601712200166170ustar00rootroot00000000000000node: 72a4df674ae5466028f2b762917f938813bb147d node-date: 2025-10-22T01:59:14+02:00 describe-name: v0.3.2 ref-names: tag: v0.3.2 aiomysql-0.3.2/.gitattributes000066400000000000000000000002031507601712200162410ustar00rootroot00000000000000# Force LF line endings for text files * text=auto eol=lf # Needed for setuptools-scm-git-archive .git_archival.txt export-subst aiomysql-0.3.2/.github/000077500000000000000000000000001507601712200147135ustar00rootroot00000000000000aiomysql-0.3.2/.github/ISSUE_TEMPLATE/000077500000000000000000000000001507601712200170765ustar00rootroot00000000000000aiomysql-0.3.2/.github/ISSUE_TEMPLATE/bug_report.yml000066400000000000000000000060201507601712200217670ustar00rootroot00000000000000--- name: Bug Report description: Create a report to help us improve. labels: [bug] body: - type: markdown attributes: value: | **Thanks for taking a minute to file a bug report!** โš  Verify first that your issue is not [already reported on GitHub][issue search]. _Please fill out the form below with as many precise details as possible._ [issue search]: ../search?q=is%3Aissue&type=issues - type: textarea attributes: label: Describe the bug description: >- A clear and concise description of what the bug is. validations: required: true - type: textarea attributes: label: To Reproduce description: >- Describe the steps to reproduce this bug. placeholder: | 1. Have certain environment 2. Then run '...' 3. An error occurs. validations: required: true - type: textarea attributes: label: Expected behavior description: >- A clear and concise description of what you expected to happen. validations: required: true - type: textarea attributes: label: Logs/tracebacks description: | If applicable, add logs/tracebacks to help explain your problem. Paste the output of the steps above, including the commands themselves and their output/traceback etc. render: python-traceback validations: required: true - type: textarea attributes: label: Python Version description: Attach your version of Python. render: console value: | $ python --version validations: required: true - type: textarea attributes: label: aiomysql Version description: Attach your version of aiomysql. render: console value: | $ python -m pip show aiomysql validations: required: true - type: textarea attributes: label: PyMySQL Version description: Attach your version of PyMySQL. render: console value: | $ python -m pip show PyMySQL validations: required: true - type: textarea attributes: label: SQLAlchemy Version description: Attach your version of SQLAlchemy if you're using it. render: console value: | $ python -m pip show sqlalchemy - type: textarea attributes: label: OS placeholder: >- For example, Arch Linux, Windows, macOS, etc. validations: required: true - type: textarea attributes: label: Database type and version description: Attach your version of MariaDB/MySQL. render: console value: | SELECT VERSION(); validations: required: true - type: textarea attributes: label: Additional context description: | Add any other context about the problem here. Describe the environment you have that lead to your issue. - type: checkboxes attributes: label: Code of Conduct description: | Read the [aio-libs Code of Conduct][CoC] first. [CoC]: https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md options: - label: I agree to follow the aio-libs Code of Conduct required: true ... aiomysql-0.3.2/.github/ISSUE_TEMPLATE/feature_request.yml000066400000000000000000000031221507601712200230220ustar00rootroot00000000000000--- name: ๐Ÿš€ Feature request description: Suggest an idea for this project. labels: enhancement body: - type: markdown attributes: value: | **Thanks for taking a minute to file a feature for aiomysql!** โš  Verify first that your feature request is not [already reported on GitHub][issue search]. _Please fill out the form below with as many precise details as possible._ [issue search]: ../search?q=is%3Aissue&type=issues - type: textarea attributes: label: Is your feature request related to a problem? description: >- Please add a clear and concise description of what the problem is. _Ex. I'm always frustrated when [...]_ - type: textarea attributes: label: Describe the solution you'd like description: >- A clear and concise description of what you want to happen. validations: required: true - type: textarea attributes: label: Describe alternatives you've considered description: >- A clear and concise description of any alternative solutions or features you've considered. validations: required: true - type: textarea attributes: label: Additional context description: >- Add any other context or screenshots about the feature request here. - type: checkboxes attributes: label: Code of Conduct description: | Read the [aio-libs Code of Conduct][CoC] first. [CoC]: https://github.com/aio-libs/.github/blob/master/CODE_OF_CONDUCT.md options: - label: I agree to follow the aio-libs Code of Conduct required: true ... aiomysql-0.3.2/.github/dependabot.yml000066400000000000000000000006131507601712200175430ustar00rootroot00000000000000version: 2 updates: - package-ecosystem: pip directory: "/" schedule: interval: daily open-pull-requests-limit: 10 rebase-strategy: disabled # Automatic rebases are disabled to save CI time - package-ecosystem: github-actions directory: / schedule: interval: daily open-pull-requests-limit: 10 rebase-strategy: disabled # Automatic rebases are disabled to save CI time aiomysql-0.3.2/.github/workflows/000077500000000000000000000000001507601712200167505ustar00rootroot00000000000000aiomysql-0.3.2/.github/workflows/ci-cd.yml000066400000000000000000000545411507601712200204630ustar00rootroot00000000000000name: CI/CD on: push: branches-ignore: - dependabot/** pull_request: workflow_dispatch: inputs: release-version: # github.event_name == 'workflow_dispatch' # && github.event.inputs.release-version description: >- Target PEP440-compliant version to release. Please, don't prepend `v`. required: true release-commitish: # github.event_name == 'workflow_dispatch' # && github.event.inputs.release-commitish default: '' description: >- The commit to be released to PyPI and tagged in Git as `release-version`. Normally, you should keep this empty. required: false YOLO: default: false description: >- Flag whether test results should block the release (true/false). Only use this under extraordinary circumstances to ignore the test failures and cut the release regardless. required: false schedule: - cron: 1 0 * * * # Run daily at 0:01 UTC concurrency: group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.sha }} cancel-in-progress: true permissions: contents: read jobs: pre-setup: name: โš™๏ธ Pre-set global build settings runs-on: ubuntu-latest defaults: run: shell: python outputs: dist-version: >- ${{ steps.request-check.outputs.release-requested == 'true' && github.event.inputs.release-version || steps.scm-version.outputs.dist-version }} is-untagged-devel: >- ${{ steps.untagged-check.outputs.is-untagged-devel || false }} release-requested: >- ${{ steps.request-check.outputs.release-requested || false }} cache-key-for-dep-files: >- ${{ steps.calc-cache-key-files.outputs.cache-key-for-dep-files }} git-tag: ${{ steps.git-tag.outputs.tag }} sdist-artifact-name: ${{ steps.artifact-name.outputs.sdist }} wheel-artifact-name: ${{ steps.artifact-name.outputs.wheel }} steps: - name: Switch to using Python 3.13 by default uses: actions/setup-python@v6 with: python-version: >- 3.13 - name: >- Mark the build as untagged '${{ github.event.repository.default_branch }}' branch build id: untagged-check if: >- github.event_name == 'push' && github.ref == format( 'refs/heads/{0}', github.event.repository.default_branch ) run: >- echo "is-untagged-devel=true" >> "$GITHUB_OUTPUT" shell: bash - name: Mark the build as "release request" id: request-check if: github.event_name == 'workflow_dispatch' run: >- echo "release-requested=true" >> "$GITHUB_OUTPUT" shell: bash - name: Check out src from Git if: >- steps.request-check.outputs.release-requested != 'true' uses: actions/checkout@v5 with: show-progress: false persist-credentials: false fetch-depth: 0 ref: ${{ github.event.inputs.release-commitish }} - name: >- Calculate dependency files' combined hash value for use in the cache key if: >- steps.request-check.outputs.release-requested != 'true' id: calc-cache-key-files run: >- echo "cache-key-for-dep-files=${{ hashFiles( 'pyproject.toml', 'setup.cfg', 'requirements-dev.txt', '.pre-commit-config.yaml' ) }}" >> "$GITHUB_OUTPUT" shell: bash - name: Set up pip cache if: >- steps.request-check.outputs.release-requested != 'true' uses: re-actors/cache-python-deps@810325a232f2a28ea124dfba85c7c72fd1774b38 # v1.0.0 with: cache-key-for-dependency-files: >- ${{ steps.calc-cache-key-files.outputs.cache-key-for-dep-files }} - name: Drop Git tags from HEAD for non-release requests if: >- steps.request-check.outputs.release-requested != 'true' run: >- git tag --points-at HEAD | xargs git tag --delete shell: bash - name: Set up versioning prerequisites if: >- steps.request-check.outputs.release-requested != 'true' run: >- python -m pip install --user --upgrade "setuptools-scm>= 9.2, < 10" shell: bash - name: Set the current dist version from Git if: steps.request-check.outputs.release-requested != 'true' id: scm-version run: | from os import environ import setuptools_scm ver = setuptools_scm.get_version( ${{ steps.untagged-check.outputs.is-untagged-devel == 'true' && 'local_scheme="no-local-version"' || '' }} ) with open(environ['GITHUB_OUTPUT'], mode='a') as github_output: print('dist-version={ver}'.format(ver=ver), file=github_output) - name: Set the target Git tag id: git-tag run: >- echo "tag=v${{ steps.request-check.outputs.release-requested == 'true' && github.event.inputs.release-version || steps.scm-version.outputs.dist-version }}" >> "$GITHUB_OUTPUT" shell: bash - name: Set the expected dist artifact names id: artifact-name run: | echo "sdist=aiomysql-${{ steps.request-check.outputs.release-requested == 'true' && github.event.inputs.release-version || steps.scm-version.outputs.dist-version }}.tar.gz" >> "$GITHUB_OUTPUT" echo "wheel=aiomysql-${{ steps.request-check.outputs.release-requested == 'true' && github.event.inputs.release-version || steps.scm-version.outputs.dist-version }}-py3-none-any.whl" >> "$GITHUB_OUTPUT" shell: bash build: name: >- ๐Ÿ‘ท dists ${{ needs.pre-setup.outputs.git-tag }} [mode: ${{ fromJSON(needs.pre-setup.outputs.is-untagged-devel) && 'nightly' || '' }}${{ fromJSON(needs.pre-setup.outputs.release-requested) && 'release' || '' }}${{ ( !fromJSON(needs.pre-setup.outputs.is-untagged-devel) && !fromJSON(needs.pre-setup.outputs.release-requested) ) && 'test' || '' }}] needs: - pre-setup # transitive, for accessing settings runs-on: ubuntu-latest outputs: dists-artifact-id: ${{ steps.dist-artifact-upload.outputs.artifact-id }} env: PY_COLORS: 1 steps: - name: Switch to using Python v3.13 uses: actions/setup-python@v6 with: python-version: >- 3.13 - name: Set up pip cache uses: re-actors/cache-python-deps@810325a232f2a28ea124dfba85c7c72fd1774b38 # v1.0.0 with: cache-key-for-dependency-files: >- ${{ needs.pre-setup.outputs.cache-key-for-dep-files }} - name: Install build tools run: >- python -m pip install --user --upgrade build - name: Grab the source from Git uses: actions/checkout@v5 with: show-progress: false persist-credentials: false fetch-depth: >- ${{ steps.request-check.outputs.release-requested == 'true' && 1 || 0 }} ref: ${{ github.event.inputs.release-commitish }} - name: Setup git user as [bot] if: >- fromJSON(needs.pre-setup.outputs.is-untagged-devel) || fromJSON(needs.pre-setup.outputs.release-requested) uses: fregante/setup-git-user@024bc0b8e177d7e77203b48dab6fb45666854b35 - name: >- Tag the release in the local Git repo as ${{ needs.pre-setup.outputs.git-tag }} for setuptools-scm to set the desired version if: >- fromJSON(needs.pre-setup.outputs.is-untagged-devel) || fromJSON(needs.pre-setup.outputs.release-requested) run: >- git tag -m "${GIT_TAG}" "${GIT_TAG}" -- ${RELEASE_COMMITISH} env: GIT_TAG: ${{ needs.pre-setup.outputs.git-tag }} RELEASE_COMMITISH: ${{ github.event.inputs.release-commitish }} - name: Build dists run: >- python -m build - name: Verify that the artifacts with expected names got created run: >- ls -1 "dist/${SDIST_ARTIFACT_NAME}" "dist/${WHEEL_ARTIFACT_NAME}" env: SDIST_ARTIFACT_NAME: ${{ needs.pre-setup.outputs.sdist-artifact-name }} WHEEL_ARTIFACT_NAME: ${{ needs.pre-setup.outputs.wheel-artifact-name }} - name: Store the distribution packages id: dist-artifact-upload uses: actions/upload-artifact@v4 with: name: python-package-distributions # NOTE: Exact expected file names are specified here # NOTE: as a safety measure โ€” if anything weird ends # NOTE: up being in this dir or not all dists will be # NOTE: produced, this will fail the workflow. path: | dist/${{ needs.pre-setup.outputs.sdist-artifact-name }} dist/${{ needs.pre-setup.outputs.wheel-artifact-name }} retention-days: >- ${{ needs.pre-setup.outputs.release-requested == 'true' && 90 || 30 }} lint: name: ๐Ÿงน Lint needs: - build - pre-setup # transitive, for accessing settings runs-on: ubuntu-latest env: PY_COLORS: 1 steps: - name: Switch to using Python 3.13 by default uses: actions/setup-python@v6 with: python-version: >- 3.13 - name: Set up pip cache uses: re-actors/cache-python-deps@810325a232f2a28ea124dfba85c7c72fd1774b38 # v1.0.0 with: cache-key-for-dependency-files: >- ${{ needs.pre-setup.outputs.cache-key-for-dep-files }} - name: Grab the source from Git uses: actions/checkout@v5 with: show-progress: false persist-credentials: false ref: ${{ github.event.inputs.release-commitish }} - name: Download all the dists uses: actions/download-artifact@v5 with: artifact-ids: >- ${{ needs.build.outputs.dists-artifact-id }} path: dist/ - name: Install build tools run: >- python -m pip install --user --requirement requirements-dev.txt - name: flake8 Lint uses: py-actions/flake8@84ec6726560b6d5bd68f2a5bed83d62b52bb50ba # v2.3.0 with: flake8-version: 7.3.0 path: aiomysql args: tests examples - name: Check package description run: | python -m twine check --strict dist/* tests: name: >- ๐Ÿงช ๐Ÿ${{ matrix.py }} @ ${{ matrix.os }} on ${{ join(matrix.db, '-') }} needs: - build - pre-setup # transitive, for accessing settings strategy: # when updating matrix jobs make sure to adjust the expected reports in # codecov.notify.after_n_builds in .codecov.yml matrix: # service containers are only supported on ubuntu currently os: - ubuntu-latest py: - '3.9' - '3.10' - '3.11' - '3.12' - '3.13' db: - [mysql, '8.0'] - [mysql, '8.4'] - [mysql, '9.4'] - [mariadb, '10.6'] - [mariadb, '10.11'] - [mariadb, '11.4'] - [mariadb, '11.8'] - [mariadb, '12.0'] fail-fast: false runs-on: ${{ matrix.os }} timeout-minutes: 15 continue-on-error: >- ${{ ( ( needs.pre-setup.outputs.release-requested == 'true' && !toJSON(github.event.inputs.YOLO) ) || contains(matrix.py, '-dev') ) && true || false }} env: MYSQL_ROOT_PASSWORD: rootpw PY_COLORS: 1 services: mysql: image: "${{ join(matrix.db, ':') }}" ports: - 3306:3306 volumes: - "/tmp/run-${{ join(matrix.db, '-') }}/:/socket-mount/" options: '--name=mysqld' env: MYSQL_ROOT_PASSWORD: rootpw steps: - name: Setup Python ${{ matrix.py }} id: python-install uses: actions/setup-python@v6 with: python-version: ${{ matrix.py }} - name: Set up pip cache uses: re-actors/cache-python-deps@810325a232f2a28ea124dfba85c7c72fd1774b38 # v1.0.0 with: cache-key-for-dependency-files: >- ${{ needs.pre-setup.outputs.cache-key-for-dep-files }} - name: Update pip run: >- python -m pip install --user --upgrade pip - name: Grab the source from Git uses: actions/checkout@v5 with: show-progress: false persist-credentials: false ref: ${{ github.event.inputs.release-commitish }} - name: Remove aiomysql source to avoid accidentally using it run: >- rm -rf aiomysql - name: Download all the dists uses: actions/download-artifact@v5 with: artifact-ids: >- ${{ needs.build.outputs.dists-artifact-id }} path: dist/ - name: Install dependencies run: >- python -m pip install --user --requirement requirements-dev.txt - name: Install previously built wheel run: >- python -m pip install --user "dist/${WHEEL_ARTIFACT_NAME}" env: WHEEL_ARTIFACT_NAME: ${{ needs.pre-setup.outputs.wheel-artifact-name }} - name: >- Log platform.platform() run: >- python -m platform - name: >- Log platform.version() run: >- python -c "import platform; print(platform.version())" - name: >- Log platform.uname() run: >- python -c "import platform; print(platform.uname())" - name: >- Log platform.release() run: >- python -c "import platform; print(platform.release())" - name: Log stdlib OpenSSL version run: >- python -c "import ssl; print('\nOPENSSL_VERSION: ' + ssl.OPENSSL_VERSION + '\nOPENSSL_VERSION_INFO: ' + repr(ssl.OPENSSL_VERSION_INFO) + '\nOPENSSL_VERSION_NUMBER: ' + repr(ssl.OPENSSL_VERSION_NUMBER))" # this ensures our database is ready. typically by the time the preparations have completed its first start logic. # unfortunately we need this hacky workaround as GitHub Actions service containers can't reference data from our repo. - name: Prepare mysql run: | # ensure server is started up while : do sleep 1 mysql -h127.0.0.1 -uroot "-p$MYSQL_ROOT_PASSWORD" -e 'select version()' && break done # inject tls configuration docker container stop mysqld docker container cp "${{ github.workspace }}/tests/ssl_resources/ssl" mysqld:/etc/mysql/ssl docker container cp "${{ github.workspace }}/tests/ssl_resources/tls.cnf" mysqld:/etc/mysql/conf.d/aiomysql-tls.cnf # use custom socket path # we need to ensure that the socket path is writable for the user running the DB process in the container sudo chmod 0777 /tmp/run-${{ join(matrix.db, '-') }} docker container cp "${{ github.workspace }}/tests/ssl_resources/socket.cnf" mysqld:/etc/mysql/conf.d/aiomysql-socket.cnf docker container start mysqld # ensure server is started up while : do sleep 1 mysql -h127.0.0.1 -uroot "-p$MYSQL_ROOT_PASSWORD" -e 'select version()' && break done mysql -h127.0.0.1 -uroot "-p$MYSQL_ROOT_PASSWORD" -e "SET GLOBAL local_infile=on" - name: Run tests run: | # timeout ensures a more or less clean stop by sending a KeyboardInterrupt which will still provide useful logs timeout --preserve-status --signal=INT --verbose 570s \ pytest \ --capture=no \ --verbosity 2 \ --cov-report term \ --cov-report xml \ --junitxml=junit.xml \ -o junit_family=legacy \ --cov aiomysql \ --cov tests \ ./tests \ --mysql-unix-socket "unix-${{ join(matrix.db, '') }}=/tmp/run-${{ join(matrix.db, '-') }}/mysql.sock" \ --mysql-address "tcp-${{ join(matrix.db, '') }}=127.0.0.1:3306" env: PYTHONUNBUFFERED: 1 timeout-minutes: 10 - name: Upload coverage if: ${{ github.event_name != 'schedule' }} uses: codecov/codecov-action@5a1091511ad55cbe89839c7260b706298ca349f7 # v5.5.1 with: files: ./coverage.xml flags: >- CI-GHA, OS-${{ runner.os }}, VM-${{ matrix.os }}, Py-${{ steps.python-install.outputs.python-version }}, DB-${{ join(matrix.db, '-') }}, ${{ matrix.os }}_${{ matrix.py }}_${{ join(matrix.db, '-') }} fail_ci_if_error: true - name: Upload test results to Codecov if: ${{ !cancelled() }} uses: codecov/test-results-action@47f89e9acb64b76debcd5ea40642d25a4adced9f # v1.1.1 with: files: ./junit.xml flags: >- CI-GHA, OS-${{ runner.os }}, VM-${{ matrix.os }}, Py-${{ steps.python-install.outputs.python-version }}, DB-${{ join(matrix.db, '-') }}, ${{ matrix.os }}_${{ matrix.py }}_${{ join(matrix.db, '-') }} fail_ci_if_error: true check: # This job does nothing and is only used for the branch protection if: always() needs: - lint - tests runs-on: ubuntu-latest steps: - name: Decide whether the needed jobs succeeded or failed uses: re-actors/alls-green@05ac9388f0aebcb5727afa17fcccfecd6f8ec5fe # v1.2.2 with: jobs: ${{ toJSON(needs) }} publish-pypi: name: Publish ๐Ÿ๐Ÿ“ฆ ${{ needs.pre-setup.outputs.git-tag }} to PyPI needs: - check - build - pre-setup # transitive, for accessing settings if: >- fromJSON(needs.pre-setup.outputs.release-requested) runs-on: ubuntu-latest environment: name: pypi url: >- https://pypi.org/project/aiomysql/${{ needs.pre-setup.outputs.dist-version }} permissions: id-token: write # this permission is mandatory for trusted publishing steps: - name: Download all the dists uses: actions/download-artifact@v5 with: artifact-ids: >- ${{ needs.build.outputs.dists-artifact-id }} path: dist/ - name: >- Publish ๐Ÿ๐Ÿ“ฆ ${{ needs.pre-setup.outputs.git-tag }} to PyPI uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0 with: print-hash: true publish-testpypi: name: Publish ๐Ÿ๐Ÿ“ฆ ${{ needs.pre-setup.outputs.git-tag }} to TestPyPI needs: - check - build - pre-setup # transitive, for accessing settings if: >- fromJSON(needs.pre-setup.outputs.is-untagged-devel) || fromJSON(needs.pre-setup.outputs.release-requested) runs-on: ubuntu-latest environment: name: testpypi url: >- https://test.pypi.org/project/aiomysql/${{ needs.pre-setup.outputs.dist-version }} permissions: id-token: write # this permission is mandatory for trusted publishing steps: - name: Download all the dists uses: actions/download-artifact@v5 with: artifact-ids: >- ${{ needs.build.outputs.dists-artifact-id }} path: dist/ - name: >- Publish ๐Ÿ๐Ÿ“ฆ ${{ needs.pre-setup.outputs.git-tag }} to TestPyPI uses: pypa/gh-action-pypi-publish@ed0c53931b1dc9bd32cbe73a98c7f6766f8a527e # v1.13.0 with: repository-url: https://test.pypi.org/legacy/ print-hash: true post-release-repo-update: name: >- Publish post-release Git tag for ${{ needs.pre-setup.outputs.git-tag }} needs: - publish-pypi - pre-setup # transitive, for accessing settings runs-on: ubuntu-latest permissions: contents: write # Required for pushing git tag steps: - name: Fetch the src snapshot uses: actions/checkout@v5 with: show-progress: false persist-credentials: true fetch-depth: 1 ref: ${{ github.event.inputs.release-commitish }} - name: Setup git user as [bot] uses: fregante/setup-git-user@024bc0b8e177d7e77203b48dab6fb45666854b35 - name: >- Tag the release in the local Git repo as ${{ needs.pre-setup.outputs.git-tag }} run: >- git tag -m "${GIT_TAG}" "${GIT_TAG}" -- ${RELEASE_COMMITISH} env: GIT_TAG: ${{ needs.pre-setup.outputs.git-tag }} RELEASE_COMMITISH: ${{ github.event.inputs.release-commitish }} - name: >- Push ${{ needs.pre-setup.outputs.git-tag }} tag corresponding to the just published release back to GitHub run: >- git push --atomic origin "${GIT_TAG}" env: GIT_TAG: ${{ needs.pre-setup.outputs.git-tag }} publish-github-release: name: >- Publish a tag and GitHub release for ${{ needs.pre-setup.outputs.git-tag }} needs: - post-release-repo-update - build - pre-setup # transitive, for accessing settings runs-on: ubuntu-latest permissions: contents: write discussions: write steps: - name: Fetch the src snapshot uses: actions/checkout@v5 with: show-progress: false persist-credentials: false fetch-depth: 1 ref: ${{ github.event.inputs.release-commitish }} - name: Download all the dists uses: actions/download-artifact@v5 with: artifact-ids: >- ${{ needs.build.outputs.dists-artifact-id }} path: dist/ - name: >- Publish a GitHub Release for ${{ needs.pre-setup.outputs.git-tag }} uses: ncipollo/release-action@b7eabc95ff50cbeeedec83973935c8f306dfcd0b # v1.20.0 with: artifacts: | dist/${{ needs.pre-setup.outputs.sdist-artifact-name }} dist/${{ needs.pre-setup.outputs.wheel-artifact-name }} artifactContentType: raw # Because whl and tgz are of different types # FIXME: Use Towncrier once it is integrated. bodyFile: CHANGES.txt discussionCategory: Announcements name: ${{ needs.pre-setup.outputs.git-tag }} tag: ${{ needs.pre-setup.outputs.git-tag }} immutableCreate: true aiomysql-0.3.2/.github/workflows/codeql.yml000066400000000000000000000017171507601712200207500ustar00rootroot00000000000000name: "CodeQL" on: push: branches: [ "main" ] pull_request: branches: [ "main" ] schedule: - cron: "17 21 * * 1" permissions: contents: read jobs: analyze: name: Analyze runs-on: ubuntu-latest permissions: actions: read contents: read security-events: write strategy: fail-fast: false matrix: language: - actions - python steps: - name: Checkout uses: actions/checkout@v5 with: show-progress: false persist-credentials: false - name: Initialize CodeQL uses: github/codeql-action/init@v4 with: languages: ${{ matrix.language }} queries: +security-and-quality - name: Autobuild uses: github/codeql-action/autobuild@v4 - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v4 with: category: "/language:${{ matrix.language }}" aiomysql-0.3.2/.gitignore000066400000000000000000000014341507601712200153450ustar00rootroot00000000000000># Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] # C extensions *.so # Distribution / packaging .Python env/ pyvenv/ build/ develop-eggs/ dist/ downloads/ eggs/ lib/ lib64/ parts/ sdist/ var/ *.egg-info/ .installed.cfg *.egg # PyInstaller # Usually these files are written by a python script from a template # before PyInstaller builds the exe, so as to inject date/other infos into it. *.manifest *.spec # Installer logs pip-log.txt pip-delete-this-directory.txt # Unit test / coverage reports htmlcov/ .tox/ .coverage .cache nosetests.xml coverage.xml cover # Translations *.mo *.pot # Django stuff: *.log # Sphinx documentation docs/_build/ # PyBuilder target/ # PyCharm .idea *.iml # rope .ropeproject # pyenv .python-version tests/fixtures/my.cnf .pytest_cache aiomysql-0.3.2/.pre-commit-config.yaml000066400000000000000000000002051507601712200176310ustar00rootroot00000000000000repos: - repo: https://github.com/asottile/pyupgrade rev: v3.21.0 hooks: - id: pyupgrade args: [--py39-plus] aiomysql-0.3.2/.readthedocs.yaml000066400000000000000000000005771507601712200166130ustar00rootroot00000000000000# .readthedocs.yaml # Read the Docs configuration file # See https://docs.readthedocs.io/en/stable/config-file/v2.html for details # Required version: 2 build: os: ubuntu-24.04 tools: python: "3.13" sphinx: configuration: docs/conf.py fail_on_warning: true formats: - pdf - epub python: install: - requirements: requirements-docs.txt - method: pip path: . aiomysql-0.3.2/CHANGES.txt000066400000000000000000000177531507601712200152010ustar00rootroot00000000000000Changes ------- 0.3.2 (2025-10-21) ^^^^^^^^^^^^^^^^^^ * Fix not persisting GitHub credentials during Git checkout action in CI for publishing release tags #1047 0.3.1 (2025-10-21) ^^^^^^^^^^^^^^^^^^ * Fix permissions in GitHub Actions workflow to allow pushing git tag after release #1046 0.3.0 (2025-10-21) ^^^^^^^^^^^^^^^^^^ * Drop support for Python 3.7 and 3.8, replaced by 3.12 and 3.13 #1026 * Bump minimum Sphinx version to generate documentation to 6.2.0 for Python 3.13 support #1026 * Sphinx now uses the furo theme for local and RTD builds #936 * Drop support for EoL MySQL 5.7, replaced by 8.4 and 9.4 #1032 * Drop support for EoL MariaDB 10.4, 10.5, 10.9, 10.10, replaced by 11.4, 11.8, 12.0 #1032 * Use immutable GitHub tags and releases for future releases #1034 * | Bump setuptools to >=80, setuptools-scm to >=7, <10 #1018 | setuptools-scm must be at least 9.2.0 for consistent hash lengths of non-release builds. * | Properly check whether loading of local files is enabled #1044 | Loading local data now requires using the `local_infile` parameter, passing just the client flag through `client_flag` is no longer supported. | Fixes `GHSA-r397-ff8c-wv2g `_ | Thanks to @KonstantAnxiety for reporting this. 0.2.0 (2023-06-11) ^^^^^^^^^^^^^^^^^^ * Bump minimal SQLAlchemy version to 1.3 #815 * Remove deprecated Pool.get #706 * | Partially ported `PyMySQL#304 `_ #792 | aiomysql now reraises the original exception during connect() if it's not `IOError`, `OSError` or `asyncio.TimeoutError`. | This was previously always raised as `OperationalError`. * Fix debug log level with sha256_password authentication #863 * Modernized code with `pyupgrade `_ to Python 3.7+ syntax #930 * Removed tests for EoL MariaDB versions 10.3, 10.7 and 10.8, added tests for MariaDB 10.9, 10.10, 10.11 #932 0.1.1 (2022-05-08) ^^^^^^^^^^^^^^^^^^ * Fix SSL connection handshake charset not respecting client configuration #776 0.1.0 (2022-04-11) ^^^^^^^^^^^^^^^^^^ * Don't send sys.argv[0] as program_name to MySQL server by default #620 * Allow running process as anonymous uid #587 * Fix timed out MySQL 8.0 connections raising InternalError rather than OperationalError #660 * Fix timed out MySQL 8.0 connections being returned from Pool #660 * Ensure connections are properly closed before raising an OperationalError when the server connection is lost #660 * Ensure connections are properly closed before raising an InternalError when packet sequence numbers are out of sync #660 * Unix sockets are now internally considered secure, allowing sha256_password and caching_sha2_password auth methods to be used #695 * Test suite now also tests unix socket connections #696 * Fix SSCursor raising InternalError when last result was not fully retrieved #635 * Remove deprecated no_delay argument #702 * Support PyMySQL up to version 1.0.2 #643 * Bump minimal PyMySQL version to 1.0.0 #713 * Align % formatting in Cursor.executemany() with Cursor.execute(), literal % now need to be doubled in Cursor.executemany() #714 * Fixed unlimited Pool size not working, this is now working as documented by passing maxsize=0 to create_pool #119 * Added Pool.closed property as present in aiopg #463 * Fixed SQLAlchemy connection context iterator #410 * Fix error packet handling for SSCursor #428 * Required python version is now properly documented in python_requires instead of failing on setup.py execution #731 * Add rsa extras_require depending on PyMySQL[rsa] #557 * Migrate to PEP 517 build system #746 * Self-reported `__version__` now returns version generated by `setuptools-scm` during build, otherwise `'unknown'` #748 * Fix SSCursor raising query timeout error on wrong query #428 0.0.22 (2021-11-14) ^^^^^^^^^^^^^^^^^^^ * Support python 3.10 #505 0.0.21 (2020-11-26) ^^^^^^^^^^^^^^^^^^^ * Allow to use custom Cursor subclasses #374 * Fill Connection class with actual client version #388 * Fix legacy __aiter__ methods #403 * Fix & update docs #418 #437 * Ignore pyenv's .python-version file #424 * Replace asyncio.streams.IncompleteReadError with asyncio.IncompleteReadError #460 #454 * Add support for SQLAlchemy default parameters #455 #466 * Update dependencies #485 * Support Python 3.7 & 3.8 #493 0.0.20 (2018-12-19) ^^^^^^^^^^^^^^^^^^^ * Fixed connect_timeout #360 * Fixed support for SQLA executemany #324 * Fix the python 3.7 compatibility #357 * Fixed reuse connections when StreamReader has an exception #339 * Fixes warning when inserting binary strings #326 0.0.19 (2018-07-12) ^^^^^^^^^^^^^^^^^^^ * See v0.0.18 0.0.18 (2018-07-09) ^^^^^^^^^^^^^^^^^^^ * Updated to support latest PyMySQL changes. * aiomysql now sends client connection info. * MySQL8+ Support including sha256_password and cached_sha2_password authentication plugins. * Default max packet length sent to the server is no longer 1. * Fixes issue where cursor.nextset can hang on query sets that raise errors. 0.0.17 (2018-07-06) ^^^^^^^^^^^^^^^^^^^ * Pinned version of PyMySQL 0.0.16 (2018-06-03) ^^^^^^^^^^^^^^^^^^^ * Added ability to execute precompiled sqlalchemy queries #294 (Thanks @vlanse) 0.0.15 (2018-05-20) ^^^^^^^^^^^^^^^^^^^ * Fixed handling of user-defined types for sqlalchemy #290 * Fix KeyError when server reports unknown collation #289 0.0.14 (2018-04-22) ^^^^^^^^^^^^^^^^^^^ * Fixed SSL connection finalization #282 0.0.13 (2018-04-19) ^^^^^^^^^^^^^^^^^^^ * Added SSL support #280 (Thanks @terrycain) * Fixed __all__ in aiomysql/__init__ #270 (Thanks @matianjun1) * Added docker fixtures #275 (Thanks @terrycain) 0.0.12 (2018-01-18) ^^^^^^^^^^^^^^^^^^^ * Fixed support for SQLAlchemy 1.2.0 * Fixed argument for cursor.execute in sa engine #239 (Thanks @NotSoSuper) 0.0.11 (2017-12-06) ^^^^^^^^^^^^^^^^^^^ * Fixed README formatting on pypi 0.0.10 (2017-12-06) ^^^^^^^^^^^^^^^^^^^ * Updated regular expressions to be compatible with pymysql #167 (Thanks @AlexLisovoy) * Added connection recycling in the pool #216 0.0.9 (2016-09-14) ^^^^^^^^^^^^^^^^^^ * Fixed AttributeError in _request_authentication function #104 (Thanks @ttlttl) * Fixed legacy auth #105 * uvloop added to test suite #106 * Fixed bug with unicode in json field #107 (Thanks @methane) 0.0.8 (2016-08-24) ^^^^^^^^^^^^^^^^^^ * Default min pool size reduced to 1 #80 (Thanks @Drizzt1991) * Update to PyMySQL 0.7.5 #89 * Fixed connection cancellation in process of executing a query #79 (Thanks @Drizzt1991) 0.0.7 (2016-01-27) ^^^^^^^^^^^^^^^^^^ * Fix for multiple results issue, ported from pymysql #52 * Fixed useless warning with no_delay option #55 * Added async/await support for Engine, SAConnection, Transaction #57 * pool.release returns future so we can wait on it in __aexit__ #60 * Update to PyMySQL 0.6.7 0.0.6 (2015-12-11) ^^^^^^^^^^^^^^^^^^ * Fixed bug with SA rollback (Thanks @khlyestovillarion!) * Fixed issue with default no_delay option (Thanks @khlyestovillarion!) 0.0.5 (2015-10-28) ^^^^^^^^^^^^^^^^^^ * no_delay option is deprecated and True by default * Add Cursor.mogrify() method * Support for "LOAD LOCAL INFILE" query. * Check connection inside pool, in case of timeout drop it, fixes #25 * Add support of python 3.5 features to pool, connection and cursor 0.0.4 (2015-05-23) ^^^^^^^^^^^^^^^^^^ * Allow to call connection.wait_closed twice. * Fixed sqlalchemy 1.0.0 support. * Fix #11: Rename Connection.wait_closed() to .ensure_closed() * Raise ResourceWarning on non-closed Connection * Rename Connection.connect to _connect 0.0.3 (2015-03-10) ^^^^^^^^^^^^^^^^^^ * Added support for PyMySQL up to 0.6.6. * Ported improvements from PyMySQL. * Added basic documentation. * Fixed and added more examples. 0.0.2 (2015-02-17) ^^^^^^^^^^^^^^^^^^ * Added MANIFEST.in. 0.0.1 (2015-02-17) ^^^^^^^^^^^^^^^^^^ * Initial release. * Implemented plain connections: connect, Connection, Cursor. * Implemented database pools. * Ported sqlalchemy optional support. aiomysql-0.3.2/CONTRIBUTING.rst000066400000000000000000000076561507601712200160320ustar00rootroot00000000000000Contributing ============ .. _GitHub: https://github.com/aio-libs/aiomysql Thanks for your interest in contributing to ``aiomysql``, there are multiple ways and places you can contribute. Reporting an Issue ------------------ If you have found issue with `aiomysql` please do not hesitate to file an issue on the GitHub_ project. When filing your issue please make sure you can express the issue with a reproducible test case. When reporting an issue we also need as much information about your environment that you can include. We never know what information will be pertinent when trying narrow down the issue. Please include at least the following information: * Version of `aiomysql` and `python`. * Version of MySQL/MariaDB. * Platform you're running on (OS X, Linux, Windows). Instructions for contributors ----------------------------- In order to make a clone of the GitHub_ repo: open the link and press the "Fork" button on the upper-right menu of the web page. I hope everybody knows how to work with git and github nowadays :) Workflow is pretty straightforward: 1. Clone the GitHub_ repo 2. Make a change 3. Make sure all tests passed 4. Commit changes to own aiomysql clone 5. Make pull request from github page for your clone Preconditions for running aiomysql test suite --------------------------------------------- We expect you to use a python virtual environment to run our tests. There are several ways to make a virtual environment. If you like to use *virtualenv* please run: .. code-block:: sh $ cd aiomysql $ virtualenv --python="$(which python3)" venv For standard python *venv*: .. code-block:: sh $ cd aiomysql $ python3 -m venv venv For *virtualenvwrapper*: .. code-block:: sh $ cd aiomysql $ mkvirtualenv --python="$(which python3)" aiomysql There are other tools like *pyvenv* but you know the rule of thumb now: create a python3 virtual environment and activate it. After that please install libraries required for development: .. code-block:: sh $ pip install -r requirements-dev.txt Congratulations, you are ready to run the test suite Install database ---------------- Fresh local installation of `mysql` has user `root` with empty password, tests use this values by default. But you always can override host/port, user and password in `aiomysql/tests/base.py` file or install corresponding environment variables. Tests require two databases to be created before running suit: .. code-block:: sh $ mysql -u root mysql> CREATE DATABASE test_pymysql DEFAULT CHARACTER SET utf8 DEFAULT COLLATE utf8_general_ci; mysql> CREATE DATABASE test_pymysql2 DEFAULT CHARACTER SET utf8 DEFAULT COLLATE utf8_general_ci; Run aiomysql test suite ----------------------- After all the preconditions are met you can run tests typing the next command: .. code-block:: sh $ make test The command at first will run the *flake8* tool (sorry, we don't accept pull requests with pep8 or pyflakes errors). On *flake8* success the tests will be run. Please take a look on the produced output. Any extra texts (print statements and so on) should be removed. Tests coverage -------------- We are trying hard to have good test coverage; please don't make it worse. Use: .. code-block:: sh $ make cov to run test suite and collect coverage information. Once the command has finished check your coverage at the file that appears in the last line of the output: ``open file:///.../aiomysql/coverage/index.html`` Please go to the link and make sure that your code change is covered. Documentation ------------- We encourage documentation improvements. Please before making a Pull Request about documentation changes run: .. code-block:: sh $ make doc Once it finishes it will output the index html page ``open file:///.../aiomysql/docs/_build/html/index.html``. Go to the link and make sure your doc changes looks good. The End ------- After finishing all steps make a GitHub_ Pull Request, thanks. aiomysql-0.3.2/LICENSE000066400000000000000000000020561507601712200143630ustar00rootroot00000000000000Copyright (c) 2010, 2013 PyMySQL contributors Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. aiomysql-0.3.2/Makefile000066400000000000000000000033131507601712200150130ustar00rootroot00000000000000# Some simple testing tasks (sorry, UNIX only). FLAGS= checkrst: python -m twine check --strict dist/* flake:checkrst flake8 aiomysql tests examples test: flake py.test -s $(FLAGS) ./tests/ vtest: py.test -s -v $(FLAGS) ./tests/ cov cover coverage: flake py.test -s -v --cov-report term --cov-report html --cov aiomysql ./tests @echo "open file://`pwd`/htmlcov/index.html" clean: rm -rf `find . -name __pycache__` rm -f `find . -type f -name '*.py[co]' ` rm -f `find . -type f -name '*~' ` rm -f `find . -type f -name '.*~' ` rm -f `find . -type f -name '@*' ` rm -f `find . -type f -name '#*#' ` rm -f `find . -type f -name '*.orig' ` rm -f `find . -type f -name '*.rej' ` rm -f .coverage rm -rf coverage rm -rf build rm -rf htmlcov rm -rf dist start_mysql: @echo "----------------------------------------------------" @echo "Starting mysql, see docker-compose.yml for user/pass" @echo "----------------------------------------------------" docker-compose -f docker-compose.yml up -d mysql stop_mysql: docker-compose -f docker-compose.yml stop mysql # TODO: this depends on aiomysql being installed, e.g. in a venv. # TODO: maybe this can be solved better. doc: @echo "----------------------------------------------------------------" @echo "Doc builds require installing the aiomysql package in the" @echo "environment. Make sure you've installed your current dev version" @echo "into your environment, e.g. using venv, then run this command in" @echo "the virtual environment." @echo "----------------------------------------------------------------" git fetch --tags --all make -C docs html @echo "open file://`pwd`/docs/_build/html/index.html" .PHONY: all flake test vtest cov clean doc aiomysql-0.3.2/README.rst000066400000000000000000000065561507601712200150560ustar00rootroot00000000000000aiomysql ======== .. image:: https://github.com/aio-libs/aiomysql/actions/workflows/ci-cd.yml/badge.svg?branch=main :target: https://github.com/aio-libs/aiomysql/actions/workflows/ci-cd.yml .. image:: https://codecov.io/gh/aio-libs/aiomysql/branch/main/graph/badge.svg :target: https://codecov.io/gh/aio-libs/aiomysql :alt: Code coverage .. image:: https://badge.fury.io/py/aiomysql.svg :target: https://badge.fury.io/py/aiomysql :alt: Latest Version .. image:: https://readthedocs.org/projects/aiomysql/badge/?version=latest :target: https://aiomysql.readthedocs.io/ :alt: Documentation Status .. image:: https://badges.gitter.im/Join%20Chat.svg :target: https://gitter.im/aio-libs/Lobby :alt: Chat on Gitter **aiomysql** is a "driver" for accessing a `MySQL` database from the asyncio_ (PEP-3156/tulip) framework. It depends on and reuses most parts of PyMySQL_ . *aiomysql* tries to be like awesome aiopg_ library and preserve same api, look and feel. Internally **aiomysql** is copy of PyMySQL, underlying io calls switched to async, basically ``yield from`` and ``asyncio.coroutine`` added in proper places)). `sqlalchemy` support ported from aiopg_. Documentation ------------- https://aiomysql.readthedocs.io/ Basic Example ------------- **aiomysql** based on PyMySQL_ , and provides same api, you just need to use ``await conn.f()`` or ``yield from conn.f()`` instead of calling ``conn.f()`` for every method. Properties are unchanged, so ``conn.prop`` is correct as well as ``conn.prop = val``. .. code:: python import asyncio import aiomysql async def test_example(): async with aiomysql.create_pool(host='127.0.0.1', port=3306, user='root', password='', db='mysql') as pool: async with pool.acquire() as conn: async with conn.cursor() as cur: await cur.execute("SELECT 42;") print(cur.description) (r,) = await cur.fetchone() assert r == 42 asyncio.run(test_example()) Example of SQLAlchemy optional integration ------------------------------------------ Sqlalchemy support has been ported from aiopg_ so api should be very familiar for aiopg_ user.: .. code:: python import asyncio import sqlalchemy as sa from aiomysql.sa import create_engine metadata = sa.MetaData() tbl = sa.Table('tbl', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('val', sa.String(255))) async def go(): engine = await create_engine(user='root', db='test_pymysql', host='127.0.0.1', password='') async with engine.acquire() as conn: await conn.execute(tbl.insert().values(val='abc')) await conn.execute(tbl.insert().values(val='xyz')) async for row in conn.execute(tbl.select()): print(row.id, row.val) engine.close() await engine.wait_closed() asyncio.run(go()) Requirements ------------ * Python_ 3.9+ * PyMySQL_ .. _Python: https://www.python.org .. _asyncio: http://docs.python.org/3.5/library/asyncio.html .. _aiopg: https://github.com/aio-libs/aiopg .. _PyMySQL: https://github.com/PyMySQL/PyMySQL .. _Tornado-MySQL: https://github.com/PyMySQL/Tornado-MySQL aiomysql-0.3.2/aiomysql/000077500000000000000000000000001507601712200152115ustar00rootroot00000000000000aiomysql-0.3.2/aiomysql/.gitignore000066400000000000000000000000211507601712200171720ustar00rootroot00000000000000/_scm_version.py aiomysql-0.3.2/aiomysql/__init__.py000066400000000000000000000042531507601712200173260ustar00rootroot00000000000000""" aiomysql: A pure-Python MySQL client library for asyncio. Copyright (c) 2010, 2013-2014 PyMySQL contributors Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ from pymysql.converters import escape_dict, escape_sequence, escape_string from pymysql.err import (Warning, Error, InterfaceError, DataError, DatabaseError, OperationalError, IntegrityError, InternalError, NotSupportedError, ProgrammingError, MySQLError) from .connection import Connection, connect from .cursors import Cursor, SSCursor, DictCursor, SSDictCursor from .pool import create_pool, Pool from ._version import version __version__ = version __all__ = [ # Errors 'Error', 'DataError', 'DatabaseError', 'IntegrityError', 'InterfaceError', 'InternalError', 'MySQLError', 'NotSupportedError', 'OperationalError', 'ProgrammingError', 'Warning', 'escape_dict', 'escape_sequence', 'escape_string', 'Connection', 'Pool', 'connect', 'create_pool', 'Cursor', 'SSCursor', 'DictCursor', 'SSDictCursor' ] (Connection, Pool, connect, create_pool, Cursor, SSCursor, DictCursor, SSDictCursor) # pyflakes aiomysql-0.3.2/aiomysql/_scm_version.pyi000066400000000000000000000001751507601712200204250ustar00rootroot00000000000000# This stub file is necessary because `_scm_version.py` # autogenerated on build and absent on mypy checks time version: str aiomysql-0.3.2/aiomysql/_version.py000066400000000000000000000001271507601712200174070ustar00rootroot00000000000000try: from ._scm_version import version except ImportError: version = "unknown" aiomysql-0.3.2/aiomysql/connection.py000066400000000000000000001465301507601712200177330ustar00rootroot00000000000000# Python implementation of the MySQL client-server protocol # http://dev.mysql.com/doc/internals/en/client-server-protocol.html import asyncio import os import socket import struct import warnings import configparser import getpass from functools import partial from pymysql.charset import charset_by_name, charset_by_id from pymysql.constants import SERVER_STATUS from pymysql.constants import CLIENT from pymysql.constants import COMMAND from pymysql.constants import CR from pymysql.constants import FIELD_TYPE from pymysql.converters import (escape_item, encoders, decoders, escape_string, escape_bytes_prefixed, through) from pymysql.err import (Warning, Error, InterfaceError, DataError, DatabaseError, OperationalError, IntegrityError, InternalError, NotSupportedError, ProgrammingError) from pymysql.connections import TEXT_TYPES, MAX_PACKET_LEN, DEFAULT_CHARSET from pymysql.connections import _auth from pymysql.connections import MysqlPacket from pymysql.connections import FieldDescriptorPacket from pymysql.connections import EOFPacketWrapper from pymysql.connections import OKPacketWrapper from pymysql.connections import LoadLocalPacketWrapper # from aiomysql.utils import _convert_to_str from .cursors import Cursor from .utils import _pack_int24, _lenenc_int, _ConnectionContextManager, _ContextManager from .log import logger try: DEFAULT_USER = getpass.getuser() except KeyError: DEFAULT_USER = "unknown" def connect(host="localhost", user=None, password="", db=None, port=3306, unix_socket=None, charset='', sql_mode=None, read_default_file=None, conv=decoders, use_unicode=None, client_flag=0, cursorclass=Cursor, init_command=None, connect_timeout=None, read_default_group=None, autocommit=False, echo=False, local_infile=False, loop=None, ssl=None, auth_plugin='', program_name='', server_public_key=None): """See connections.Connection.__init__() for information about defaults.""" coro = _connect(host=host, user=user, password=password, db=db, port=port, unix_socket=unix_socket, charset=charset, sql_mode=sql_mode, read_default_file=read_default_file, conv=conv, use_unicode=use_unicode, client_flag=client_flag, cursorclass=cursorclass, init_command=init_command, connect_timeout=connect_timeout, read_default_group=read_default_group, autocommit=autocommit, echo=echo, local_infile=local_infile, loop=loop, ssl=ssl, auth_plugin=auth_plugin, program_name=program_name) return _ConnectionContextManager(coro) async def _connect(*args, **kwargs): conn = Connection(*args, **kwargs) await conn._connect() return conn async def _open_connection(host=None, port=None, **kwds): """This is based on asyncio.open_connection, allowing us to use a custom StreamReader. `limit` arg has been removed as we don't currently use it. """ loop = asyncio.events.get_running_loop() reader = _StreamReader(loop=loop) protocol = asyncio.StreamReaderProtocol(reader, loop=loop) transport, _ = await loop.create_connection( lambda: protocol, host, port, **kwds) writer = asyncio.StreamWriter(transport, protocol, reader, loop) return reader, writer async def _open_unix_connection(path=None, **kwds): """This is based on asyncio.open_unix_connection, allowing us to use a custom StreamReader. `limit` arg has been removed as we don't currently use it. """ loop = asyncio.events.get_running_loop() reader = _StreamReader(loop=loop) protocol = asyncio.StreamReaderProtocol(reader, loop=loop) transport, _ = await loop.create_unix_connection( lambda: protocol, path, **kwds) writer = asyncio.StreamWriter(transport, protocol, reader, loop) return reader, writer class _StreamReader(asyncio.StreamReader): """This StreamReader exposes whether EOF was received, allowing us to discard the associated connection instead of returning it from the pool when checking free connections in Pool._fill_free_pool(). `limit` arg has been removed as we don't currently use it. """ def __init__(self, loop=None): self._eof_received = False super().__init__(loop=loop) def feed_eof(self) -> None: self._eof_received = True super().feed_eof() @property def eof_received(self): return self._eof_received class Connection: """Representation of a socket with a mysql server. The proper way to get an instance of this class is to call connect(). """ def __init__(self, host="localhost", user=None, password="", db=None, port=3306, unix_socket=None, charset='', sql_mode=None, read_default_file=None, conv=decoders, use_unicode=None, client_flag=0, cursorclass=Cursor, init_command=None, connect_timeout=None, read_default_group=None, autocommit=False, echo=False, local_infile=False, loop=None, ssl=None, auth_plugin='', program_name='', server_public_key=None): """ Establish a connection to the MySQL database. Accepts several arguments: :param host: Host where the database server is located :param user: Username to log in as :param password: Password to use. :param db: Database to use, None to not use a particular one. :param port: MySQL port to use, default is usually OK. :param unix_socket: Optionally, you can use a unix socket rather than TCP/IP. :param charset: Charset you want to use. :param sql_mode: Default SQL_MODE to use. :param read_default_file: Specifies my.cnf file to read these parameters from under the [client] section. :param conv: Decoders dictionary to use instead of the default one. This is used to provide custom marshalling of types. See converters. :param use_unicode: Whether or not to default to unicode strings. :param client_flag: Custom flags to send to MySQL. Find potential values in constants.CLIENT. :param cursorclass: Custom cursor class to use. :param init_command: Initial SQL statement to run when connection is established. :param connect_timeout: Timeout before throwing an exception when connecting. :param read_default_group: Group to read from in the configuration file. :param autocommit: Autocommit mode. None means use server default. (default: False) :param local_infile: boolean to enable the use of LOAD DATA LOCAL command. (default: False) :param ssl: Optional SSL Context to force SSL :param auth_plugin: String to manually specify the authentication plugin to use, i.e you will want to use mysql_clear_password when using IAM authentication with Amazon RDS. (default: Server Default) :param program_name: Program name string to provide when handshaking with MySQL. (omitted by default) :param server_public_key: SHA256 authentication plugin public key value. :param loop: asyncio loop """ self._loop = loop or asyncio.get_event_loop() if use_unicode is None: use_unicode = True if read_default_file: if not read_default_group: read_default_group = "client" cfg = configparser.RawConfigParser() cfg.read(os.path.expanduser(read_default_file)) _config = partial(cfg.get, read_default_group) user = _config("user", fallback=user) password = _config("password", fallback=password) host = _config("host", fallback=host) db = _config("database", fallback=db) unix_socket = _config("socket", fallback=unix_socket) port = int(_config("port", fallback=port)) charset = _config("default-character-set", fallback=charset) self._host = host self._port = port self._user = user or DEFAULT_USER self._password = password or "" self._db = db self._echo = echo self._last_usage = self._loop.time() self._client_auth_plugin = auth_plugin self._server_auth_plugin = "" self._auth_plugin_used = "" self._secure = False self.server_public_key = server_public_key self.salt = None from . import __version__ self._connect_attrs = { '_client_name': 'aiomysql', '_pid': str(os.getpid()), '_client_version': __version__, } if program_name: self._connect_attrs["program_name"] = program_name self._unix_socket = unix_socket if charset: self._charset = charset self.use_unicode = True else: self._charset = DEFAULT_CHARSET self.use_unicode = False if use_unicode is not None: self.use_unicode = use_unicode self._ssl_context = ssl if ssl: client_flag |= CLIENT.SSL self._encoding = charset_by_name(self._charset).encoding self._local_infile = bool(local_infile) if self._local_infile: client_flag |= CLIENT.LOCAL_FILES client_flag |= CLIENT.CAPABILITIES client_flag |= CLIENT.MULTI_STATEMENTS if self._db: client_flag |= CLIENT.CONNECT_WITH_DB self.client_flag = client_flag self.cursorclass = cursorclass self.connect_timeout = connect_timeout self._result = None self._affected_rows = 0 self.host_info = "Not connected" #: specified autocommit mode. None means use server default. self.autocommit_mode = autocommit self.encoders = encoders # Need for MySQLdb compatibility. self.decoders = conv self.sql_mode = sql_mode self.init_command = init_command # asyncio StreamReader, StreamWriter self._reader = None self._writer = None # If connection was closed for specific reason, we should show that to # user self._close_reason = None @property def host(self): """MySQL server IP address or name""" return self._host @property def port(self): """MySQL server TCP/IP port""" return self._port @property def unix_socket(self): """MySQL Unix socket file location""" return self._unix_socket @property def db(self): """Current database name.""" return self._db @property def user(self): """User used while connecting to MySQL""" return self._user @property def echo(self): """Return echo mode status.""" return self._echo @property def last_usage(self): """Return time() when connection was used.""" return self._last_usage @property def loop(self): return self._loop @property def closed(self): """The readonly property that returns ``True`` if connections is closed. """ return self._writer is None @property def encoding(self): """Encoding employed for this connection.""" return self._encoding @property def charset(self): """Returns the character set for current connection.""" return self._charset def close(self): """Close socket connection""" if self._writer: self._writer.transport.close() self._writer = None self._reader = None async def ensure_closed(self): """Send quit command and then close socket connection""" if self._writer is None: # connection has been closed return send_data = struct.pack('= 5: self.client_flag |= CLIENT.MULTI_RESULTS if self.user is None: raise ValueError("Did not specify a username") charset_id = charset_by_name(self.charset).id data_init = struct.pack('=5.0) data += authresp + b'\0' if self._db and self.server_capabilities & CLIENT.CONNECT_WITH_DB: if isinstance(self._db, str): db = self._db.encode(self.encoding) else: db = self._db data += db + b'\0' if self.server_capabilities & CLIENT.PLUGIN_AUTH: name = auth_plugin if isinstance(name, str): name = name.encode('ascii') data += name + b'\0' self._auth_plugin_used = auth_plugin # Sends the server a few pieces of client info if self.server_capabilities & CLIENT.CONNECT_ATTRS: connect_attrs = b'' for k, v in self._connect_attrs.items(): k, v = k.encode('utf8'), v.encode('utf8') connect_attrs += struct.pack('B', len(k)) + k connect_attrs += struct.pack('B', len(v)) + v data += struct.pack('B', len(connect_attrs)) + connect_attrs self.write_packet(data) auth_packet = await self._read_packet() # if authentication method isn't accepted the first byte # will have the octet 254 if auth_packet.is_auth_switch_request(): # https://dev.mysql.com/doc/internals/en/ # connection-phase-packets.html#packet-Protocol::AuthSwitchRequest auth_packet.read_uint8() # 0xfe packet identifier plugin_name = auth_packet.read_string() if (self.server_capabilities & CLIENT.PLUGIN_AUTH and plugin_name is not None): await self._process_auth(plugin_name, auth_packet) else: # send legacy handshake data = _auth.scramble_old_password( self._password.encode('latin1'), auth_packet.read_all()) + b'\0' self.write_packet(data) await self._read_packet() elif auth_packet.is_extra_auth_data(): if auth_plugin == "caching_sha2_password": await self.caching_sha2_password_auth(auth_packet) elif auth_plugin == "sha256_password": await self.sha256_password_auth(auth_packet) else: raise OperationalError("Received extra packet " "for auth method %r", auth_plugin) async def _process_auth(self, plugin_name, auth_packet): # These auth plugins do their own packet handling if plugin_name == b"caching_sha2_password": await self.caching_sha2_password_auth(auth_packet) self._auth_plugin_used = plugin_name.decode() elif plugin_name == b"sha256_password": await self.sha256_password_auth(auth_packet) self._auth_plugin_used = plugin_name.decode() else: if plugin_name == b"mysql_native_password": # https://dev.mysql.com/doc/internals/en/ # secure-password-authentication.html#packet-Authentication:: # Native41 data = _auth.scramble_native_password( self._password.encode('latin1'), auth_packet.read_all()) elif plugin_name == b"mysql_old_password": # https://dev.mysql.com/doc/internals/en/ # old-password-authentication.html data = _auth.scramble_old_password( self._password.encode('latin1'), auth_packet.read_all() ) + b'\0' elif plugin_name == b"mysql_clear_password": # https://dev.mysql.com/doc/internals/en/ # clear-text-authentication.html data = self._password.encode('latin1') + b'\0' else: raise OperationalError( 2059, "Authentication plugin '{}'" " not configured".format(plugin_name) ) self.write_packet(data) pkt = await self._read_packet() pkt.check_error() self._auth_plugin_used = plugin_name.decode() return pkt async def caching_sha2_password_auth(self, pkt): # No password fast path if not self._password: self.write_packet(b'') pkt = await self._read_packet() pkt.check_error() return pkt if pkt.is_auth_switch_request(): # Try from fast auth logger.debug("caching sha2: Trying fast path") self.salt = pkt.read_all() scrambled = _auth.scramble_caching_sha2( self._password.encode('latin1'), self.salt ) self.write_packet(scrambled) pkt = await self._read_packet() pkt.check_error() # else: fast auth is tried in initial handshake if not pkt.is_extra_auth_data(): raise OperationalError( "caching sha2: Unknown packet " "for fast auth: {}".format(pkt._data[:1]) ) # magic numbers: # 2 - request public key # 3 - fast auth succeeded # 4 - need full auth pkt.advance(1) n = pkt.read_uint8() if n == 3: logger.debug("caching sha2: succeeded by fast path.") pkt = await self._read_packet() pkt.check_error() # pkt must be OK packet return pkt if n != 4: raise OperationalError("caching sha2: Unknown " "result for fast auth: {}".format(n)) logger.debug("caching sha2: Trying full auth...") if self._secure: logger.debug("caching sha2: Sending plain " "password via secure connection") self.write_packet(self._password.encode('latin1') + b'\0') pkt = await self._read_packet() pkt.check_error() return pkt if not self.server_public_key: self.write_packet(b'\x02') pkt = await self._read_packet() # Request public key pkt.check_error() if not pkt.is_extra_auth_data(): raise OperationalError( "caching sha2: Unknown packet " "for public key: {}".format(pkt._data[:1]) ) self.server_public_key = pkt._data[1:] logger.debug(self.server_public_key.decode('ascii')) data = _auth.sha2_rsa_encrypt( self._password.encode('latin1'), self.salt, self.server_public_key ) self.write_packet(data) pkt = await self._read_packet() pkt.check_error() async def sha256_password_auth(self, pkt): if self._secure: logger.debug("sha256: Sending plain password") data = self._password.encode('latin1') + b'\0' self.write_packet(data) pkt = await self._read_packet() pkt.check_error() return pkt if pkt.is_auth_switch_request(): self.salt = pkt.read_all() if not self.server_public_key and self._password: # Request server public key logger.debug("sha256: Requesting server public key") self.write_packet(b'\1') pkt = await self._read_packet() pkt.check_error() if pkt.is_extra_auth_data(): self.server_public_key = pkt._data[1:] logger.debug( "Received public key:\n%s", self.server_public_key.decode('ascii') ) if self._password: if not self.server_public_key: raise OperationalError("Couldn't receive server's public key") data = _auth.sha2_rsa_encrypt( self._password.encode('latin1'), self.salt, self.server_public_key ) else: data = b'' self.write_packet(data) pkt = await self._read_packet() pkt.check_error() return pkt # _mysql support def thread_id(self): return self.server_thread_id[0] def character_set_name(self): return self._charset def get_host_info(self): return self.host_info def get_proto_info(self): return self.protocol_version async def _get_server_information(self): i = 0 packet = await self._read_packet() data = packet.get_all_data() # logger.debug(dump_packet(data)) self.protocol_version = data[i] i += 1 server_end = data.find(b'\0', i) self.server_version = data[i:server_end].decode('latin1') i = server_end + 1 self.server_thread_id = struct.unpack('= i + 6: lang, stat, cap_h, salt_len = struct.unpack('= i + salt_len: # salt_len includes auth_plugin_data_part_1 and filler self.salt += data[i:i + salt_len] i += salt_len i += 1 # AUTH PLUGIN NAME may appear here. if self.server_capabilities & CLIENT.PLUGIN_AUTH and len(data) >= i: # Due to Bug#59453 the auth-plugin-name is missing the terminating # NUL-char in versions prior to 5.5.10 and 5.6.2. # ref: https://dev.mysql.com/doc/internals/en/ # connection-phase-packets.html#packet-Protocol::Handshake # didn't use version checks as mariadb is corrected and reports # earlier than those two. server_end = data.find(b'\0', i) if server_end < 0: # pragma: no cover - very specific upstream bug # not found \0 and last field so take it all self._server_auth_plugin = data[i:].decode('latin1') else: self._server_auth_plugin = data[i:server_end].decode('latin1') def get_transaction_status(self): return bool(self.server_status & SERVER_STATUS.SERVER_STATUS_IN_TRANS) def get_server_info(self): return self.server_version # Just to always have consistent errors 2 helpers def _close_on_cancel(self): self.close() self._close_reason = "Cancelled during execution" def _ensure_alive(self): if not self._writer: if self._close_reason is None: raise InterfaceError("(0, 'Not connected')") else: raise InterfaceError(self._close_reason) def __del__(self): if self._writer: warnings.warn(f"Unclosed connection {self!r}", ResourceWarning) self.close() Warning = Warning Error = Error InterfaceError = InterfaceError DatabaseError = DatabaseError DataError = DataError OperationalError = OperationalError IntegrityError = IntegrityError InternalError = InternalError ProgrammingError = ProgrammingError NotSupportedError = NotSupportedError # TODO: move OK and EOF packet parsing/logic into a proper subclass # of MysqlPacket like has been done with FieldDescriptorPacket. class MySQLResult: def __init__(self, connection): self.connection = connection self.affected_rows = None self.insert_id = None self.server_status = None self.warning_count = 0 self.message = None self.field_count = 0 self.description = None self.rows = None self.has_next = None self.unbuffered_active = False async def read(self): try: first_packet = await self.connection._read_packet() # TODO: use classes for different packet types? if first_packet.is_ok_packet(): self._read_ok_packet(first_packet) elif first_packet.is_load_local_packet(): await self._read_load_local_packet(first_packet) else: await self._read_result_packet(first_packet) finally: self.connection = None async def init_unbuffered_query(self): self.unbuffered_active = True first_packet = await self.connection._read_packet() if first_packet.is_ok_packet(): self._read_ok_packet(first_packet) self.unbuffered_active = False self.connection = None elif first_packet.is_load_local_packet(): await self._read_load_local_packet(first_packet) self.unbuffered_active = False self.connection = None else: self.field_count = first_packet.read_length_encoded_integer() await self._get_descriptions() # Apparently, MySQLdb picks this number because it's the maximum # value of a 64bit unsigned integer. Since we're emulating MySQLdb, # we set it to this instead of None, which would be preferred. self.affected_rows = 18446744073709551615 def _read_ok_packet(self, first_packet): ok_packet = OKPacketWrapper(first_packet) self.affected_rows = ok_packet.affected_rows self.insert_id = ok_packet.insert_id self.server_status = ok_packet.server_status self.warning_count = ok_packet.warning_count self.message = ok_packet.message self.has_next = ok_packet.has_next async def _read_load_local_packet(self, first_packet): if not self.connection._local_infile: raise RuntimeError( "**WARN**: Received LOAD_LOCAL packet but local_infile option is false." ) load_packet = LoadLocalPacketWrapper(first_packet) sender = LoadLocalFile(load_packet.filename, self.connection) try: await sender.send_data() except Exception: # Skip ok packet await self.connection._read_packet() raise ok_packet = await self.connection._read_packet() if not ok_packet.is_ok_packet(): raise OperationalError(2014, "Commands Out of Sync") self._read_ok_packet(ok_packet) def _check_packet_is_eof(self, packet): if packet.is_eof_packet(): eof_packet = EOFPacketWrapper(packet) self.warning_count = eof_packet.warning_count self.has_next = eof_packet.has_next return True return False async def _read_result_packet(self, first_packet): self.field_count = first_packet.read_length_encoded_integer() await self._get_descriptions() await self._read_rowdata_packet() async def _read_rowdata_packet_unbuffered(self): # Check if in an active query if not self.unbuffered_active: return packet = await self.connection._read_packet() if self._check_packet_is_eof(packet): self.unbuffered_active = False self.connection = None self.rows = None return row = self._read_row_from_packet(packet) self.affected_rows = 1 # rows should tuple of row for MySQL-python compatibility. self.rows = (row,) return row async def _finish_unbuffered_query(self): # After much reading on the MySQL protocol, it appears that there is, # in fact, no way to stop MySQL from sending all the data after # executing a query, so we just spin, and wait for an EOF packet. while self.unbuffered_active: try: packet = await self.connection._read_packet() except OperationalError as e: # TODO: replace these numbers with constants when available # TODO: in a new PyMySQL release if e.args[0] in ( 3024, # ER.QUERY_TIMEOUT 1969, # ER.STATEMENT_TIMEOUT ): # if the query timed out we can simply ignore this error self.unbuffered_active = False self.connection = None return raise if self._check_packet_is_eof(packet): self.unbuffered_active = False # release reference to kill cyclic reference. self.connection = None async def _read_rowdata_packet(self): """Read a rowdata packet for each data row in the result set.""" rows = [] while True: packet = await self.connection._read_packet() if self._check_packet_is_eof(packet): # release reference to kill cyclic reference. self.connection = None break rows.append(self._read_row_from_packet(packet)) self.affected_rows = len(rows) self.rows = tuple(rows) def _read_row_from_packet(self, packet): row = [] for encoding, converter in self.converters: try: data = packet.read_length_coded_string() except IndexError: # No more columns in this row # See https://github.com/PyMySQL/PyMySQL/pull/434 break if data is not None: if encoding is not None: data = data.decode(encoding) if converter is not None: data = converter(data) row.append(data) return tuple(row) async def _get_descriptions(self): """Read a column descriptor packet for each column in the result.""" self.fields = [] self.converters = [] use_unicode = self.connection.use_unicode conn_encoding = self.connection.encoding description = [] for i in range(self.field_count): field = await self.connection._read_packet( FieldDescriptorPacket) self.fields.append(field) description.append(field.description()) field_type = field.type_code if use_unicode: if field_type == FIELD_TYPE.JSON: # When SELECT from JSON column: charset = binary # When SELECT CAST(... AS JSON): charset = connection # encoding # This behavior is different from TEXT / BLOB. # We should decode result by connection encoding # regardless charsetnr. # See https://github.com/PyMySQL/PyMySQL/issues/488 encoding = conn_encoding # SELECT CAST(... AS JSON) elif field_type in TEXT_TYPES: if field.charsetnr == 63: # binary # TEXTs with charset=binary means BINARY types. encoding = None else: encoding = conn_encoding else: # Integers, Dates and Times, and other basic data # is encoded in ascii encoding = 'ascii' else: encoding = None converter = self.connection.decoders.get(field_type) if converter is through: converter = None self.converters.append((encoding, converter)) eof_packet = await self.connection._read_packet() assert eof_packet.is_eof_packet(), 'Protocol error, expecting EOF' self.description = tuple(description) class LoadLocalFile: def __init__(self, filename, connection): self.filename = filename self.connection = connection self._loop = connection.loop self._file_object = None self._executor = None # means use default executor def _open_file(self): def opener(filename): try: self._file_object = open(filename, 'rb') except OSError as e: msg = f"Can't find file '{filename}'" raise OperationalError(1017, msg) from e fut = self._loop.run_in_executor(self._executor, opener, self.filename) return fut def _file_read(self, chunk_size): def freader(chunk_size): try: chunk = self._file_object.read(chunk_size) if not chunk: self._file_object.close() self._file_object = None except Exception as e: self._file_object.close() self._file_object = None msg = f"Error reading file {self.filename}" raise OperationalError(1024, msg) from e return chunk fut = self._loop.run_in_executor(self._executor, freader, chunk_size) return fut async def send_data(self): """Send data packets from the local file to the server""" self.connection._ensure_alive() conn = self.connection try: await self._open_file() with self._file_object: chunk_size = MAX_PACKET_LEN while True: chunk = await self._file_read(chunk_size) if not chunk: break # TODO: consider drain data conn.write_packet(chunk) except asyncio.CancelledError: self.connection._close_on_cancel() raise finally: # send the empty packet to signify we are done sending data conn.write_packet(b"") aiomysql-0.3.2/aiomysql/cursors.py000066400000000000000000000557241507601712200173000ustar00rootroot00000000000000import re import json import warnings import contextlib from pymysql.err import ( Warning, Error, InterfaceError, DataError, DatabaseError, OperationalError, IntegrityError, InternalError, NotSupportedError, ProgrammingError) from .log import logger from .connection import FIELD_TYPE # https://github.com/PyMySQL/PyMySQL/blob/d7bb777e503d82bf2496113f07dd4ab249615efc/pymysql/cursors.py#L6-L14 #: Regular expression for :meth:`Cursor.executemany`. #: executemany only supports simple bulk insert. #: You can use it to load large dataset. RE_INSERT_VALUES = re.compile( r"\s*((?:INSERT|REPLACE)\s.+\sVALUES?\s+)" + r"(\(\s*(?:%s|%\(.+\)s)\s*(?:,\s*(?:%s|%\(.+\)s)\s*)*\))" + r"(\s*(?:ON DUPLICATE.*)?);?\s*\Z", re.IGNORECASE | re.DOTALL) class Cursor: """Cursor is used to interact with the database.""" #: Max statement size which :meth:`executemany` generates. #: #: Max size of allowed statement is max_allowed_packet - # packet_header_size. #: Default value of max_allowed_packet is 1048576. max_stmt_length = 1024000 def __init__(self, connection, echo=False): """Do not create an instance of a Cursor yourself. Call connections.Connection.cursor(). """ self._connection = connection self._loop = self._connection.loop self._description = None self._rownumber = 0 self._rowcount = -1 self._arraysize = 1 self._executed = None self._result = None self._rows = None self._lastrowid = None self._echo = echo @property def connection(self): """This read-only attribute return a reference to the Connection object on which the cursor was created.""" return self._connection @property def description(self): """This read-only attribute is a sequence of 7-item sequences. Each of these sequences is a collections.namedtuple containing information describing one result column: 0. name: the name of the column returned. 1. type_code: the type of the column. 2. display_size: the actual length of the column in bytes. 3. internal_size: the size in bytes of the column associated to this column on the server. 4. precision: total number of significant digits in columns of type NUMERIC. None for other types. 5. scale: count of decimal digits in the fractional part in columns of type NUMERIC. None for other types. 6. null_ok: always None as not easy to retrieve from the libpq. This attribute will be None for operations that do not return rows or if the cursor has not had an operation invoked via the execute() method yet. """ return self._description @property def rowcount(self): """Returns the number of rows that has been produced of affected. This read-only attribute specifies the number of rows that the last :meth:`execute` produced (for Data Query Language statements like SELECT) or affected (for Data Manipulation Language statements like UPDATE or INSERT). The attribute is -1 in case no .execute() has been performed on the cursor or the row count of the last operation if it can't be determined by the interface. """ return self._rowcount @property def rownumber(self): """Row index. This read-only attribute provides the current 0-based index of the cursor in the result set or ``None`` if the index cannot be determined. """ return self._rownumber @property def arraysize(self): """How many rows will be returned by fetchmany() call. This read/write attribute specifies the number of rows to fetch at a time with fetchmany(). It defaults to 1 meaning to fetch a single row at a time. """ return self._arraysize @arraysize.setter def arraysize(self, val): """How many rows will be returned by fetchmany() call. This read/write attribute specifies the number of rows to fetch at a time with fetchmany(). It defaults to 1 meaning to fetch a single row at a time. """ self._arraysize = val @property def lastrowid(self): """This read-only property returns the value generated for an AUTO_INCREMENT column by the previous INSERT or UPDATE statement or None when there is no such value available. For example, if you perform an INSERT into a table that contains an AUTO_INCREMENT column, lastrowid returns the AUTO_INCREMENT value for the new row. """ return self._lastrowid @property def echo(self): """Return echo mode status.""" return self._echo @property def closed(self): """The readonly property that returns ``True`` if connections was detached from current cursor """ return True if not self._connection else False async def close(self): """Closing a cursor just exhausts all remaining data.""" conn = self._connection if conn is None: return try: while (await self.nextset()): pass finally: self._connection = None def _get_db(self): if not self._connection: raise ProgrammingError("Cursor closed") return self._connection def _check_executed(self): if not self._executed: raise ProgrammingError("execute() first") def _conv_row(self, row): return row def setinputsizes(self, *args): """Does nothing, required by DB API.""" def setoutputsizes(self, *args): """Does nothing, required by DB API.""" async def nextset(self): """Get the next query set""" conn = self._get_db() current_result = self._result if current_result is None or current_result is not conn._result: return if not current_result.has_next: return self._result = None self._clear_result() await conn.next_result() await self._do_get_result() return True def _escape_args(self, args, conn): if isinstance(args, (tuple, list)): return tuple(conn.escape(arg) for arg in args) elif isinstance(args, dict): return {key: conn.escape(val) for (key, val) in args.items()} else: # If it's not a dictionary let's try escaping it anyways. # Worst case it will throw a Value error return conn.escape(args) def mogrify(self, query, args=None): """ Returns the exact string that is sent to the database by calling the execute() method. This method follows the extension to the DB API 2.0 followed by Psycopg. :param query: ``str`` sql statement :param args: ``tuple`` or ``list`` of arguments for sql query """ conn = self._get_db() if args is not None: query = query % self._escape_args(args, conn) return query async def execute(self, query, args=None): """Executes the given operation Executes the given operation substituting any markers with the given parameters. For example, getting all rows where id is 5: cursor.execute("SELECT * FROM t1 WHERE id = %s", (5,)) :param query: ``str`` sql statement :param args: ``tuple`` or ``list`` of arguments for sql query :returns: ``int``, number of rows that has been produced of affected """ conn = self._get_db() while (await self.nextset()): pass if args is not None: query = query % self._escape_args(args, conn) await self._query(query) self._executed = query if self._echo: logger.info(query) logger.info("%r", args) return self._rowcount async def executemany(self, query, args): """Execute the given operation multiple times The executemany() method will execute the operation iterating over the list of parameters in seq_params. Example: Inserting 3 new employees and their phone number data = [ ('Jane','555-001'), ('Joe', '555-001'), ('John', '555-003') ] stmt = "INSERT INTO employees (name, phone) VALUES ('%s','%s')" await cursor.executemany(stmt, data) INSERT or REPLACE statements are optimized by batching the data, that is using the MySQL multiple rows syntax. :param query: `str`, sql statement :param args: ``tuple`` or ``list`` of arguments for sql query """ if not args: return if self._echo: logger.info("CALL %s", query) logger.info("%r", args) m = RE_INSERT_VALUES.match(query) if m: q_prefix = m.group(1) % () q_values = m.group(2).rstrip() q_postfix = m.group(3) or '' assert q_values[0] == '(' and q_values[-1] == ')' return (await self._do_execute_many( q_prefix, q_values, q_postfix, args, self.max_stmt_length, self._get_db().encoding)) else: rows = 0 for arg in args: await self.execute(query, arg) rows += self._rowcount self._rowcount = rows return self._rowcount async def _do_execute_many(self, prefix, values, postfix, args, max_stmt_length, encoding): conn = self._get_db() escape = self._escape_args if isinstance(prefix, str): prefix = prefix.encode(encoding) if isinstance(postfix, str): postfix = postfix.encode(encoding) sql = bytearray(prefix) args = iter(args) v = values % escape(next(args), conn) if isinstance(v, str): v = v.encode(encoding, 'surrogateescape') sql += v rows = 0 for arg in args: v = values % escape(arg, conn) if isinstance(v, str): v = v.encode(encoding, 'surrogateescape') if len(sql) + len(v) + len(postfix) + 1 > max_stmt_length: r = await self.execute(sql + postfix) rows += r sql = bytearray(prefix) else: sql += b',' sql += v r = await self.execute(sql + postfix) rows += r self._rowcount = rows return rows async def callproc(self, procname, args=()): """Execute stored procedure procname with args Compatibility warning: PEP-249 specifies that any modified parameters must be returned. This is currently impossible as they are only available by storing them in a server variable and then retrieved by a query. Since stored procedures return zero or more result sets, there is no reliable way to get at OUT or INOUT parameters via callproc. The server variables are named @_procname_n, where procname is the parameter above and n is the position of the parameter (from zero). Once all result sets generated by the procedure have been fetched, you can issue a SELECT @_procname_0, ... query using .execute() to get any OUT or INOUT values. Compatibility warning: The act of calling a stored procedure itself creates an empty result set. This appears after any result sets generated by the procedure. This is non-standard behavior with respect to the DB-API. Be sure to use nextset() to advance through all result sets; otherwise you may get disconnected. :param procname: ``str``, name of procedure to execute on server :param args: `sequence of parameters to use with procedure :returns: the original args. """ conn = self._get_db() if self._echo: logger.info("CALL %s", procname) logger.info("%r", args) for index, arg in enumerate(args): q = "SET @_%s_%d=%s" % (procname, index, conn.escape(arg)) await self._query(q) await self.nextset() _args = ','.join('@_%s_%d' % (procname, i) for i in range(len(args))) q = f"CALL {procname}({_args})" await self._query(q) self._executed = q return args def fetchone(self): """Fetch the next row """ self._check_executed() fut = self._loop.create_future() if self._rows is None or self._rownumber >= len(self._rows): fut.set_result(None) return fut result = self._rows[self._rownumber] self._rownumber += 1 fut = self._loop.create_future() fut.set_result(result) return fut def fetchmany(self, size=None): """Returns the next set of rows of a query result, returning a list of tuples. When no more rows are available, it returns an empty list. The number of rows returned can be specified using the size argument, which defaults to one :param size: ``int`` number of rows to return :returns: ``list`` of fetched rows """ self._check_executed() fut = self._loop.create_future() if self._rows is None: fut.set_result([]) return fut end = self._rownumber + (size or self._arraysize) result = self._rows[self._rownumber:end] self._rownumber = min(end, len(self._rows)) fut.set_result(result) return fut def fetchall(self): """Returns all rows of a query result set :returns: ``list`` of fetched rows """ self._check_executed() fut = self._loop.create_future() if self._rows is None: fut.set_result([]) return fut if self._rownumber: result = self._rows[self._rownumber:] else: result = self._rows self._rownumber = len(self._rows) fut.set_result(result) return fut def scroll(self, value, mode='relative'): """Scroll the cursor in the result set to a new position according to mode. If mode is relative (default), value is taken as offset to the current position in the result set, if set to absolute, value states an absolute target position. An IndexError should be raised in case a scroll operation would leave the result set. In this case, the cursor position is left undefined (ideal would be to not move the cursor at all). :param int value: move cursor to next position according to mode. :param str mode: scroll mode, possible modes: `relative` and `absolute` """ self._check_executed() if mode == 'relative': r = self._rownumber + value elif mode == 'absolute': r = value else: raise ProgrammingError("unknown scroll mode %s" % mode) if not (0 <= r < len(self._rows)): raise IndexError("out of range") self._rownumber = r fut = self._loop.create_future() fut.set_result(None) return fut async def _query(self, q): conn = self._get_db() self._last_executed = q self._clear_result() await conn.query(q) await self._do_get_result() def _clear_result(self): self._rownumber = 0 self._result = None self._rowcount = 0 self._description = None self._lastrowid = None self._rows = None async def _do_get_result(self): conn = self._get_db() self._rownumber = 0 self._result = result = conn._result self._rowcount = result.affected_rows self._description = result.description self._lastrowid = result.insert_id self._rows = result.rows if result.warning_count > 0: await self._show_warnings(conn) async def _show_warnings(self, conn): if self._result and self._result.has_next: return ws = await conn.show_warnings() if ws is None: return for w in ws: msg = w[-1] warnings.warn(str(msg), Warning, 4) Warning = Warning Error = Error InterfaceError = InterfaceError DatabaseError = DatabaseError DataError = DataError OperationalError = OperationalError IntegrityError = IntegrityError InternalError = InternalError ProgrammingError = ProgrammingError NotSupportedError = NotSupportedError def __aiter__(self): return self async def __anext__(self): ret = await self.fetchone() if ret is not None: return ret else: raise StopAsyncIteration # noqa async def __aenter__(self): return self async def __aexit__(self, exc_type, exc_val, exc_tb): await self.close() return class _DeserializationCursorMixin: async def _do_get_result(self): await super()._do_get_result() if self._rows: self._rows = [self._deserialization_row(r) for r in self._rows] def _deserialization_row(self, row): if row is None: return None if isinstance(row, dict): dict_flag = True else: row = list(row) dict_flag = False for index, (name, field_type, *n) in enumerate(self._description): if field_type == FIELD_TYPE.JSON: point = name if dict_flag else index with contextlib.suppress(ValueError, TypeError): row[point] = json.loads(row[point]) if dict_flag: return row else: return tuple(row) def _conv_row(self, row): if row is None: return None row = super()._conv_row(row) return self._deserialization_row(row) class DeserializationCursor(_DeserializationCursorMixin, Cursor): """A cursor automatic deserialization of json type fields""" class _DictCursorMixin: # You can override this to use OrderedDict or other dict-like types. dict_type = dict async def _do_get_result(self): await super()._do_get_result() fields = [] if self._description: for f in self._result.fields: name = f.name if name in fields: name = f.table_name + '.' + name fields.append(name) self._fields = fields if fields and self._rows: self._rows = [self._conv_row(r) for r in self._rows] def _conv_row(self, row): if row is None: return None row = super()._conv_row(row) return self.dict_type(zip(self._fields, row)) class DictCursor(_DictCursorMixin, Cursor): """A cursor which returns results as a dictionary""" class SSCursor(Cursor): """Unbuffered Cursor, mainly useful for queries that return a lot of data, or for connections to remote servers over a slow network. Instead of copying every row of data into a buffer, this will fetch rows as needed. The upside of this, is the client uses much less memory, and rows are returned much faster when traveling over a slow network, or if the result set is very big. There are limitations, though. The MySQL protocol doesn't support returning the total number of rows, so the only way to tell how many rows there are is to iterate over every row returned. Also, it currently isn't possible to scroll backwards, as only the current row is held in memory. """ async def close(self): conn = self._connection if conn is None: return if self._result is not None and self._result is conn._result: await self._result._finish_unbuffered_query() try: while (await self.nextset()): pass finally: self._connection = None async def _query(self, q): conn = self._get_db() self._last_executed = q await conn.query(q, unbuffered=True) await self._do_get_result() return self._rowcount async def _read_next(self): """Read next row """ row = await self._result._read_rowdata_packet_unbuffered() row = self._conv_row(row) return row async def fetchone(self): """ Fetch next row """ self._check_executed() row = await self._read_next() if row is None: return self._rownumber += 1 return row async def fetchall(self): """Fetch all, as per MySQLdb. Pretty useless for large queries, as it is buffered. """ rows = [] while True: row = await self.fetchone() if row is None: break rows.append(row) return rows async def fetchmany(self, size=None): """Returns the next set of rows of a query result, returning a list of tuples. When no more rows are available, it returns an empty list. The number of rows returned can be specified using the size argument, which defaults to one :param size: ``int`` number of rows to return :returns: ``list`` of fetched rows """ self._check_executed() if size is None: size = self._arraysize rows = [] for i in range(size): row = await self._read_next() if row is None: break rows.append(row) self._rownumber += 1 return rows async def scroll(self, value, mode='relative'): """Scroll the cursor in the result set to a new position according to mode . Same as :meth:`Cursor.scroll`, but move cursor on server side one by one row. If you want to move 20 rows forward scroll will make 20 queries to move cursor. Currently only forward scrolling is supported. :param int value: move cursor to next position according to mode. :param str mode: scroll mode, possible modes: `relative` and `absolute` """ self._check_executed() if mode == 'relative': if value < 0: raise NotSupportedError("Backwards scrolling not supported " "by this cursor") for _ in range(value): await self._read_next() self._rownumber += value elif mode == 'absolute': if value < self._rownumber: raise NotSupportedError( "Backwards scrolling not supported by this cursor") end = value - self._rownumber for _ in range(end): await self._read_next() self._rownumber = value else: raise ProgrammingError("unknown scroll mode %s" % mode) class SSDictCursor(_DictCursorMixin, SSCursor): """An unbuffered cursor, which returns results as a dictionary """ aiomysql-0.3.2/aiomysql/log.py000066400000000000000000000001721507601712200163440ustar00rootroot00000000000000"""Logging configuration.""" import logging # Name the logger after the package. logger = logging.getLogger(__package__) aiomysql-0.3.2/aiomysql/pool.py000066400000000000000000000203221507601712200165330ustar00rootroot00000000000000# based on aiopg pool # https://github.com/aio-libs/aiopg/blob/master/aiopg/pool.py import asyncio import collections import warnings from .connection import connect from .utils import (_PoolContextManager, _PoolConnectionContextManager, _PoolAcquireContextManager) def create_pool(minsize=1, maxsize=10, echo=False, pool_recycle=-1, loop=None, **kwargs): coro = _create_pool(minsize=minsize, maxsize=maxsize, echo=echo, pool_recycle=pool_recycle, loop=loop, **kwargs) return _PoolContextManager(coro) async def _create_pool(minsize=1, maxsize=10, echo=False, pool_recycle=-1, loop=None, **kwargs): if loop is None: loop = asyncio.get_event_loop() pool = Pool(minsize=minsize, maxsize=maxsize, echo=echo, pool_recycle=pool_recycle, loop=loop, **kwargs) if minsize > 0: async with pool._cond: await pool._fill_free_pool(False) return pool class Pool(asyncio.AbstractServer): """Connection pool""" def __init__(self, minsize, maxsize, echo, pool_recycle, loop, **kwargs): if minsize < 0: raise ValueError("minsize should be zero or greater") if maxsize < minsize and maxsize != 0: raise ValueError("maxsize should be not less than minsize") self._minsize = minsize self._loop = loop self._conn_kwargs = kwargs self._acquiring = 0 self._free = collections.deque(maxlen=maxsize or None) self._cond = asyncio.Condition() self._used = set() self._terminated = set() self._closing = False self._closed = False self._echo = echo self._recycle = pool_recycle @property def echo(self): return self._echo @property def minsize(self): return self._minsize @property def maxsize(self): return self._free.maxlen @property def size(self): return self.freesize + len(self._used) + self._acquiring @property def freesize(self): return len(self._free) async def clear(self): """Close all free connections in pool.""" async with self._cond: while self._free: conn = self._free.popleft() await conn.ensure_closed() self._cond.notify() @property def closed(self): """ The readonly property that returns ``True`` if connections is closed. """ return self._closed def close(self): """Close pool. Mark all pool connections to be closed on getting back to pool. Closed pool doesn't allow to acquire new connections. """ if self._closed: return self._closing = True def terminate(self): """Terminate pool. Close pool with instantly closing all acquired connections also. """ self.close() for conn in list(self._used): conn.close() self._terminated.add(conn) self._used.clear() async def wait_closed(self): """Wait for closing all pool's connections.""" if self._closed: return if not self._closing: raise RuntimeError(".wait_closed() should be called " "after .close()") while self._free: conn = self._free.popleft() conn.close() async with self._cond: while self.size > self.freesize: await self._cond.wait() self._closed = True def acquire(self): """Acquire free connection from the pool.""" coro = self._acquire() return _PoolAcquireContextManager(coro, self) async def _acquire(self): if self._closing: raise RuntimeError("Cannot acquire connection after closing pool") async with self._cond: while True: await self._fill_free_pool(True) if self._free: conn = self._free.popleft() assert not conn.closed, conn assert conn not in self._used, (conn, self._used) self._used.add(conn) return conn else: await self._cond.wait() async def _fill_free_pool(self, override_min): # iterate over free connections and remove timed out ones free_size = len(self._free) n = 0 while n < free_size: conn = self._free[-1] if conn._reader.at_eof() or conn._reader.exception(): self._free.pop() conn.close() # On MySQL 8.0 a timed out connection sends an error packet before # closing the connection, preventing us from relying on at_eof(). # This relies on our custom StreamReader, as eof_received is not # present in asyncio.StreamReader. elif conn._reader.eof_received: self._free.pop() conn.close() elif (self._recycle > -1 and self._loop.time() - conn.last_usage > self._recycle): self._free.pop() conn.close() else: self._free.rotate() n += 1 while self.size < self.minsize: self._acquiring += 1 try: conn = await connect(echo=self._echo, loop=self._loop, **self._conn_kwargs) # raise exception if pool is closing self._free.append(conn) self._cond.notify() finally: self._acquiring -= 1 if self._free: return if override_min and (not self.maxsize or self.size < self.maxsize): self._acquiring += 1 try: conn = await connect(echo=self._echo, loop=self._loop, **self._conn_kwargs) # raise exception if pool is closing self._free.append(conn) self._cond.notify() finally: self._acquiring -= 1 async def _wakeup(self): async with self._cond: self._cond.notify() def release(self, conn): """Release free connection back to the connection pool. This is **NOT** a coroutine. """ fut = self._loop.create_future() fut.set_result(None) if conn in self._terminated: assert conn.closed, conn self._terminated.remove(conn) return fut assert conn in self._used, (conn, self._used) self._used.remove(conn) if not conn.closed: in_trans = conn.get_transaction_status() if in_trans: conn.close() return fut if self._closing: conn.close() else: self._free.append(conn) fut = self._loop.create_task(self._wakeup()) return fut def __enter__(self): raise RuntimeError( '"yield from" should be used as context manager expression') def __exit__(self, *args): # This must exist because __enter__ exists, even though that # always raises; that's how the with-statement works. pass # pragma: nocover def __iter__(self): # This is not a coroutine. It is meant to enable the idiom: # # with (yield from pool) as conn: # # # as an alternative to: # # conn = yield from pool.acquire() # try: # # finally: # conn.release() conn = yield from self.acquire() return _PoolConnectionContextManager(self, conn) def __await__(self): msg = "with await pool as conn deprecated, use" \ "async with pool.acquire() as conn instead" warnings.warn(msg, DeprecationWarning, stacklevel=2) conn = yield from self.acquire() return _PoolConnectionContextManager(self, conn) async def __aenter__(self): return self async def __aexit__(self, exc_type, exc_val, exc_tb): self.close() await self.wait_closed() aiomysql-0.3.2/aiomysql/sa/000077500000000000000000000000001507601712200156145ustar00rootroot00000000000000aiomysql-0.3.2/aiomysql/sa/__init__.py000066400000000000000000000010611507601712200177230ustar00rootroot00000000000000"""Optional support for sqlalchemy.sql dynamic query generation.""" from .connection import SAConnection from .engine import create_engine, Engine from .exc import (Error, ArgumentError, InvalidRequestError, NoSuchColumnError, ResourceClosedError) __all__ = ('create_engine', 'SAConnection', 'Error', 'ArgumentError', 'InvalidRequestError', 'NoSuchColumnError', 'ResourceClosedError', 'Engine') (SAConnection, Error, ArgumentError, InvalidRequestError, NoSuchColumnError, ResourceClosedError, create_engine, Engine) aiomysql-0.3.2/aiomysql/sa/connection.py000066400000000000000000000350571507601712200203370ustar00rootroot00000000000000# ported from: # https://github.com/aio-libs/aiopg/blob/master/aiopg/sa/connection.py import weakref from sqlalchemy.sql import ClauseElement from sqlalchemy.sql.dml import UpdateBase from sqlalchemy.sql.ddl import DDLElement from . import exc from .result import create_result_proxy from .transaction import (RootTransaction, Transaction, NestedTransaction, TwoPhaseTransaction) from ..utils import _TransactionContextManager, _SAConnectionContextManager def noop(k): return k class SAConnection: def __init__(self, connection, engine, compiled_cache=None): self._connection = connection self._transaction = None self._savepoint_seq = 0 self._weak_results = weakref.WeakSet() self._engine = engine self._dialect = engine.dialect self._compiled_cache = compiled_cache def execute(self, query, *multiparams, **params): """Executes a SQL query with optional parameters. query - a SQL query string or any sqlalchemy expression. *multiparams/**params - represent bound parameter values to be used in the execution. Typically, the format is a dictionary passed to *multiparams: await conn.execute( table.insert(), {"id":1, "value":"v1"}, ) ...or individual key/values interpreted by **params:: await conn.execute( table.insert(), id=1, value="v1" ) In the case that a plain SQL string is passed, a tuple or individual values in *multiparams may be passed:: await conn.execute( "INSERT INTO table (id, value) VALUES (%d, %s)", (1, "v1") ) await conn.execute( "INSERT INTO table (id, value) VALUES (%s, %s)", 1, "v1" ) Returns ResultProxy instance with results of SQL query execution. """ coro = self._execute(query, *multiparams, **params) return _SAConnectionContextManager(coro) def _base_params(self, query, dp, compiled, is_update): """ handle params """ if dp and isinstance(dp, (list, tuple)): if is_update: dp = {c.key: pval for c, pval in zip(query.table.c, dp)} else: raise exc.ArgumentError( "Don't mix sqlalchemy SELECT " "clause with positional " "parameters" ) compiled_params = compiled.construct_params(dp) processors = compiled._bind_processors params = [{ key: processors.get(key, noop)(compiled_params[key]) for key in compiled_params }] post_processed_params = self._dialect.execute_sequence_format(params) return post_processed_params[0] async def _executemany(self, query, dps, cursor): """ executemany """ result_map = None if isinstance(query, str): await cursor.executemany(query, dps) elif isinstance(query, DDLElement): raise exc.ArgumentError( "Don't mix sqlalchemy DDL clause " "and execution with parameters" ) elif isinstance(query, ClauseElement): compiled = query.compile(dialect=self._dialect) params = [] is_update = isinstance(query, UpdateBase) for dp in dps: params.append( self._base_params( query, dp, compiled, is_update, ) ) await cursor.executemany(str(compiled), params) result_map = compiled._result_columns else: raise exc.ArgumentError( "sql statement should be str or " "SQLAlchemy data " "selection/modification clause" ) ret = await create_result_proxy( self, cursor, self._dialect, result_map ) self._weak_results.add(ret) return ret async def _execute(self, query, *multiparams, **params): cursor = await self._connection.cursor() dp = _distill_params(multiparams, params) if len(dp) > 1: return await self._executemany(query, dp, cursor) elif dp: dp = dp[0] result_map = None if isinstance(query, str): await cursor.execute(query, dp or None) elif isinstance(query, ClauseElement): if self._compiled_cache is not None: key = query compiled = self._compiled_cache.get(key) if not compiled: compiled = query.compile(dialect=self._dialect) if dp and dp.keys() == compiled.params.keys() \ or not (dp or compiled.params): # we only want queries with bound params in cache self._compiled_cache[key] = compiled else: compiled = query.compile(dialect=self._dialect) if not isinstance(query, DDLElement): post_processed_params = self._base_params( query, dp, compiled, isinstance(query, UpdateBase) ) result_map = compiled._result_columns else: if dp: raise exc.ArgumentError("Don't mix sqlalchemy DDL clause " "and execution with parameters") post_processed_params = compiled.construct_params() result_map = None await cursor.execute(str(compiled), post_processed_params) else: raise exc.ArgumentError("sql statement should be str or " "SQLAlchemy data " "selection/modification clause") ret = await create_result_proxy( self, cursor, self._dialect, result_map ) self._weak_results.add(ret) return ret async def scalar(self, query, *multiparams, **params): """Executes a SQL query and returns a scalar value.""" res = await self.execute(query, *multiparams, **params) return (await res.scalar()) @property def closed(self): """The readonly property that returns True if connections is closed.""" return self._connection is None or self._connection.closed @property def connection(self): return self._connection def begin(self): """Begin a transaction and return a transaction handle. The returned object is an instance of Transaction. This object represents the "scope" of the transaction, which completes when either the .rollback or .commit method is called. Nested calls to .begin on the same SAConnection instance will return new Transaction objects that represent an emulated transaction within the scope of the enclosing transaction, that is:: trans = await conn.begin() # outermost transaction trans2 = await conn.begin() # "nested" await trans2.commit() # does nothing await trans.commit() # actually commits Calls to .commit only have an effect when invoked via the outermost Transaction object, though the .rollback method of any of the Transaction objects will roll back the transaction. See also: .begin_nested - use a SAVEPOINT .begin_twophase - use a two phase/XA transaction """ coro = self._begin() return _TransactionContextManager(coro) async def _begin(self): if self._transaction is None: self._transaction = RootTransaction(self) await self._begin_impl() return self._transaction else: return Transaction(self, self._transaction) async def _begin_impl(self): cur = await self._connection.cursor() try: await cur.execute('BEGIN') finally: await cur.close() async def _commit_impl(self): cur = await self._connection.cursor() try: await cur.execute('COMMIT') finally: await cur.close() self._transaction = None async def _rollback_impl(self): cur = await self._connection.cursor() try: await cur.execute('ROLLBACK') finally: await cur.close() self._transaction = None async def begin_nested(self): """Begin a nested transaction and return a transaction handle. The returned object is an instance of :class:`.NestedTransaction`. Nested transactions require SAVEPOINT support in the underlying database. Any transaction in the hierarchy may .commit() and .rollback(), however the outermost transaction still controls the overall .commit() or .rollback() of the transaction of a whole. """ if self._transaction is None: self._transaction = RootTransaction(self) await self._begin_impl() else: self._transaction = NestedTransaction(self, self._transaction) self._transaction._savepoint = await self._savepoint_impl() return self._transaction async def _savepoint_impl(self, name=None): self._savepoint_seq += 1 name = 'aiomysql_sa_savepoint_%s' % self._savepoint_seq cur = await self._connection.cursor() try: await cur.execute('SAVEPOINT ' + name) return name finally: await cur.close() async def _rollback_to_savepoint_impl(self, name, parent): cur = await self._connection.cursor() try: await cur.execute('ROLLBACK TO SAVEPOINT ' + name) finally: await cur.close() self._transaction = parent async def _release_savepoint_impl(self, name, parent): cur = await self._connection.cursor() try: await cur.execute('RELEASE SAVEPOINT ' + name) finally: await cur.close() self._transaction = parent async def begin_twophase(self, xid=None): """Begin a two-phase or XA transaction and return a transaction handle. The returned object is an instance of TwoPhaseTransaction, which in addition to the methods provided by Transaction, also provides a TwoPhaseTransaction.prepare() method. xid - the two phase transaction id. If not supplied, a random id will be generated. """ if self._transaction is not None: raise exc.InvalidRequestError( "Cannot start a two phase transaction when a transaction " "is already in progress.") if xid is None: xid = self._dialect.create_xid() self._transaction = TwoPhaseTransaction(self, xid) await self.execute("XA START %s", xid) return self._transaction async def _prepare_twophase_impl(self, xid): await self.execute("XA END '%s'" % xid) await self.execute("XA PREPARE '%s'" % xid) async def recover_twophase(self): """Return a list of prepared twophase transaction ids.""" result = await self.execute("XA RECOVER;") return [row[0] for row in result] async def rollback_prepared(self, xid, *, is_prepared=True): """Rollback prepared twophase transaction.""" if not is_prepared: await self.execute("XA END '%s'" % xid) await self.execute("XA ROLLBACK '%s'" % xid) async def commit_prepared(self, xid, *, is_prepared=True): """Commit prepared twophase transaction.""" if not is_prepared: await self.execute("XA END '%s'" % xid) await self.execute("XA COMMIT '%s'" % xid) @property def in_transaction(self): """Return True if a transaction is in progress.""" return self._transaction is not None and self._transaction.is_active async def close(self): """Close this SAConnection. This results in a release of the underlying database resources, that is, the underlying connection referenced internally. The underlying connection is typically restored back to the connection-holding Pool referenced by the Engine that produced this SAConnection. Any transactional state present on the underlying connection is also unconditionally released via calling Transaction.rollback() method. After .close() is called, the SAConnection is permanently in a closed state, and will allow no further operations. """ if self._connection is None: return if self._transaction is not None: await self._transaction.rollback() self._transaction = None # don't close underlying connection, it can be reused by pool # conn.close() self._engine.release(self) self._connection = None self._engine = None async def __aenter__(self): return self async def __aexit__(self, exc_type, exc_val, exc_tb): await self.close() def _distill_params(multiparams, params): """Given arguments from the calling form *multiparams, **params, return a list of bind parameter structures, usually a list of dictionaries. In the case of 'raw' execution which accepts positional parameters, it may be a list of tuples or lists. """ if not multiparams: if params: return [params] else: return [] elif len(multiparams) == 1: zero = multiparams[0] if isinstance(zero, (list, tuple)): if not zero or hasattr(zero[0], '__iter__') and \ not hasattr(zero[0], 'strip'): # execute(stmt, [{}, {}, {}, ...]) # execute(stmt, [(), (), (), ...]) return zero else: # execute(stmt, ("value", "value")) return [zero] elif hasattr(zero, 'keys'): # execute(stmt, {"key":"value"}) return [zero] else: # execute(stmt, "value") return [[zero]] else: if (hasattr(multiparams[0], '__iter__') and not hasattr(multiparams[0], 'strip')): return multiparams else: return [multiparams] aiomysql-0.3.2/aiomysql/sa/engine.py000066400000000000000000000154041507601712200174370ustar00rootroot00000000000000# ported from: # https://github.com/aio-libs/aiopg/blob/master/aiopg/sa/engine.py import asyncio import aiomysql from .connection import SAConnection from .exc import InvalidRequestError, ArgumentError from ..utils import _PoolContextManager, _PoolAcquireContextManager from ..cursors import ( Cursor, DeserializationCursor, DictCursor, SSCursor, SSDictCursor) try: from sqlalchemy.dialects.mysql.pymysql import MySQLDialect_pymysql from sqlalchemy.dialects.mysql.mysqldb import MySQLCompiler_mysqldb except ImportError: # pragma: no cover raise ImportError('aiomysql.sa requires sqlalchemy') class MySQLCompiler_pymysql(MySQLCompiler_mysqldb): def construct_params(self, params=None, _group_number=None, _check=True): pd = super().construct_params(params, _group_number, _check) for column in self.prefetch: pd[column.key] = self._exec_default(column.default) return pd def _exec_default(self, default): if default.is_callable: return default.arg(self.dialect) else: return default.arg _dialect = MySQLDialect_pymysql(paramstyle='pyformat') _dialect.statement_compiler = MySQLCompiler_pymysql _dialect.default_paramstyle = 'pyformat' def create_engine(minsize=1, maxsize=10, loop=None, dialect=_dialect, pool_recycle=-1, compiled_cache=None, **kwargs): """A coroutine for Engine creation. Returns Engine instance with embedded connection pool. The pool has *minsize* opened connections to MySQL server. """ deprecated_cursor_classes = [ DeserializationCursor, DictCursor, SSCursor, SSDictCursor, ] cursorclass = kwargs.get('cursorclass', Cursor) if not issubclass(cursorclass, Cursor) or any( issubclass(cursorclass, cursor_class) for cursor_class in deprecated_cursor_classes ): raise ArgumentError('SQLAlchemy engine does not support ' 'this cursor class') coro = _create_engine(minsize=minsize, maxsize=maxsize, loop=loop, dialect=dialect, pool_recycle=pool_recycle, compiled_cache=compiled_cache, **kwargs) return _EngineContextManager(coro) async def _create_engine(minsize=1, maxsize=10, loop=None, dialect=_dialect, pool_recycle=-1, compiled_cache=None, **kwargs): if loop is None: loop = asyncio.get_event_loop() pool = await aiomysql.create_pool(minsize=minsize, maxsize=maxsize, loop=loop, pool_recycle=pool_recycle, **kwargs) conn = await pool.acquire() try: return Engine(dialect, pool, compiled_cache=compiled_cache, **kwargs) finally: pool.release(conn) class Engine: """Connects a aiomysql.Pool and sqlalchemy.engine.interfaces.Dialect together to provide a source of database connectivity and behavior. An Engine object is instantiated publicly using the create_engine coroutine. """ def __init__(self, dialect, pool, compiled_cache=None, **kwargs): self._dialect = dialect self._pool = pool self._compiled_cache = compiled_cache self._conn_kw = kwargs @property def dialect(self): """An dialect for engine.""" return self._dialect @property def name(self): """A name of the dialect.""" return self._dialect.name @property def driver(self): """A driver of the dialect.""" return self._dialect.driver @property def minsize(self): return self._pool.minsize @property def maxsize(self): return self._pool.maxsize @property def size(self): return self._pool.size @property def freesize(self): return self._pool.freesize def close(self): """Close engine. Mark all engine connections to be closed on getting back to pool. Closed engine doesn't allow to acquire new connections. """ self._pool.close() def terminate(self): """Terminate engine. Terminate engine pool with instantly closing all acquired connections also. """ self._pool.terminate() async def wait_closed(self): """Wait for closing all engine's connections.""" await self._pool.wait_closed() def acquire(self): """Get a connection from pool.""" coro = self._acquire() return _EngineAcquireContextManager(coro, self) async def _acquire(self): raw = await self._pool.acquire() conn = SAConnection(raw, self, compiled_cache=self._compiled_cache) return conn def release(self, conn): """Revert back connection to pool.""" if conn.in_transaction: raise InvalidRequestError("Cannot release a connection with " "not finished transaction") raw = conn.connection return self._pool.release(raw) def __enter__(self): raise RuntimeError( '"yield from" should be used as context manager expression') def __exit__(self, *args): # This must exist because __enter__ exists, even though that # always raises; that's how the with-statement works. pass # pragma: nocover def __iter__(self): # This is not a coroutine. It is meant to enable the idiom: # # with (yield from engine) as conn: # # # as an alternative to: # # conn = yield from engine.acquire() # try: # # finally: # engine.release(conn) conn = yield from self.acquire() return _ConnectionContextManager(self, conn) async def __aenter__(self): return self async def __aexit__(self, exc_type, exc_val, exc_tb): self.close() await self.wait_closed() _EngineContextManager = _PoolContextManager _EngineAcquireContextManager = _PoolAcquireContextManager class _ConnectionContextManager: """Context manager. This enables the following idiom for acquiring and releasing a connection around a block: with (yield from engine) as conn: cur = yield from conn.cursor() while failing loudly when accidentally using: with engine: """ __slots__ = ('_engine', '_conn') def __init__(self, engine, conn): self._engine = engine self._conn = conn def __enter__(self): assert self._conn is not None return self._conn def __exit__(self, *args): try: self._engine.release(self._conn) finally: self._engine = None self._conn = None aiomysql-0.3.2/aiomysql/sa/exc.py000066400000000000000000000014111507601712200167420ustar00rootroot00000000000000# ported from: https://github.com/aio-libs/aiopg/blob/master/aiopg/sa/exc.py class Error(Exception): """Generic error class.""" class ArgumentError(Error): """Raised when an invalid or conflicting function argument is supplied. This error generally corresponds to construction time state errors. """ class InvalidRequestError(ArgumentError): """aiomysql.sa was asked to do something it can't do. This error generally corresponds to runtime state errors. """ class NoSuchColumnError(KeyError, InvalidRequestError): """A nonexistent column is requested from a ``RowProxy``.""" class ResourceClosedError(InvalidRequestError): """An operation was requested from a connection, cursor, or other object that's in a closed state.""" aiomysql-0.3.2/aiomysql/sa/result.py000066400000000000000000000353341507601712200175140ustar00rootroot00000000000000# ported from: # https://github.com/aio-libs/aiopg/blob/master/aiopg/sa/result.py import weakref from collections.abc import Mapping, Sequence from sqlalchemy.sql import expression, sqltypes from . import exc async def create_result_proxy(connection, cursor, dialect, result_map): result_proxy = ResultProxy(connection, cursor, dialect, result_map) await result_proxy._prepare() return result_proxy class RowProxy(Mapping): __slots__ = ('_result_proxy', '_row', '_processors', '_keymap') def __init__(self, result_proxy, row, processors, keymap): """RowProxy objects are constructed by ResultProxy objects.""" self._result_proxy = result_proxy self._row = row self._processors = processors self._keymap = keymap def __iter__(self): return iter(self._result_proxy.keys) def __len__(self): return len(self._row) def __getitem__(self, key): try: processor, obj, index = self._keymap[key] except KeyError: processor, obj, index = self._result_proxy._key_fallback(key) # Do we need slicing at all? RowProxy now is Mapping not Sequence # except TypeError: # if isinstance(key, slice): # l = [] # for processor, value in zip(self._processors[key], # self._row[key]): # if processor is None: # l.append(value) # else: # l.append(processor(value)) # return tuple(l) # else: # raise if index is None: raise exc.InvalidRequestError( "Ambiguous column name '%s' in result set! " "try 'use_labels' option on select statement." % key) if processor is not None: return processor(self._row[index]) else: return self._row[index] def __getattr__(self, name): try: return self[name] except KeyError as e: raise AttributeError(e.args[0]) def __contains__(self, key): return self._result_proxy._has_key(self._row, key) __hash__ = None def __eq__(self, other): if isinstance(other, RowProxy): return self.as_tuple() == other.as_tuple() elif isinstance(other, Sequence): return self.as_tuple() == other else: return NotImplemented def __ne__(self, other): return not self == other def as_tuple(self): return tuple(self[k] for k in self) def __repr__(self): return repr(self.as_tuple()) class ResultMetaData: """Handle cursor.description, applying additional info from an execution context.""" def __init__(self, result_proxy, metadata): self._processors = processors = [] result_map = {} if result_proxy._result_map: result_map = {elem[0]: elem[3] for elem in result_proxy._result_map} # We do not strictly need to store the processor in the key mapping, # though it is faster in the Python version (probably because of the # saved attribute lookup self._processors) self._keymap = keymap = {} self.keys = [] dialect = result_proxy.dialect # `dbapi_type_map` property removed in SQLAlchemy 1.2+. # Usage of `getattr` only needed for backward compatibility with # older versions of SQLAlchemy. typemap = getattr(dialect, 'dbapi_type_map', {}) assert dialect.case_sensitive, \ "Doesn't support case insensitive database connection" # high precedence key values. primary_keymap = {} assert not dialect.description_encoding, \ "psycopg in py3k should not use this" for i, rec in enumerate(metadata): colname = rec[0] coltype = rec[1] # PostgreSQL doesn't require this. # if dialect.requires_name_normalize: # colname = dialect.normalize_name(colname) name, obj, type_ = ( colname, None, result_map.get( colname, typemap.get(coltype, sqltypes.NULLTYPE)) ) processor = type_._cached_result_processor(dialect, coltype) processors.append(processor) rec = (processor, obj, i) # indexes as keys. This is only needed for the Python version of # RowProxy (the C version uses a faster path for integer indexes). primary_keymap[i] = rec # populate primary keymap, looking for conflicts. if primary_keymap.setdefault(name, rec) is not rec: # place a record that doesn't have the "index" - this # is interpreted later as an AmbiguousColumnError, # but only when actually accessed. Columns # colliding by name is not a problem if those names # aren't used; integer access is always # unambiguous. primary_keymap[name] = rec = (None, obj, None) self.keys.append(colname) if obj: for o in obj: keymap[o] = rec # technically we should be doing this but we # are saving on callcounts by not doing so. # if keymap.setdefault(o, rec) is not rec: # keymap[o] = (None, obj, None) # overwrite keymap values with those of the # high precedence keymap. keymap.update(primary_keymap) def _key_fallback(self, key, raiseerr=True): map = self._keymap result = None if isinstance(key, str): result = map.get(key) # fallback for targeting a ColumnElement to a textual expression # this is a rare use case which only occurs when matching text() # or colummn('name') constructs to ColumnElements, or after a # pickle/unpickle roundtrip elif isinstance(key, expression.ColumnElement): if (key._label and key._label in map): result = map[key._label] elif (hasattr(key, 'name') and key.name in map): # match is only on name. result = map[key.name] # search extra hard to make sure this # isn't a column/label name overlap. # this check isn't currently available if the row # was unpickled. if (result is not None and result[1] is not None): for obj in result[1]: if key._compare_name_for_result(obj): break else: result = None if result is None: if raiseerr: raise exc.NoSuchColumnError( "Could not locate column in row for column '%s'" % expression._string_or_unprintable(key)) else: return None else: map[key] = result return result def _has_key(self, row, key): if key in self._keymap: return True else: return self._key_fallback(key, False) is not None class ResultProxy: """Wraps a DB-API cursor object to provide easier access to row columns. Individual columns may be accessed by their integer position, case-insensitive column name, or by sqlalchemy schema.Column object. e.g.: row = fetchone() col1 = row[0] # access via integer position col2 = row['col2'] # access via name col3 = row[mytable.c.mycol] # access via Column object. ResultProxy also handles post-processing of result column data using sqlalchemy TypeEngine objects, which are referenced from the originating SQL statement that produced this result set. """ def __init__(self, connection, cursor, dialect, result_map): self._dialect = dialect self._closed = False self._cursor = cursor self._connection = connection self._rowcount = cursor.rowcount self._lastrowid = cursor.lastrowid self._result_map = result_map async def _prepare(self): loop = self._connection.connection.loop cursor = self._cursor if cursor.description is not None: self._metadata = ResultMetaData(self, cursor.description) def callback(wr): loop.create_task(cursor.close()) self._weak = weakref.ref(self, callback) else: self._metadata = None await self.close() self._weak = None @property def dialect(self): """SQLAlchemy dialect.""" return self._dialect @property def cursor(self): return self._cursor def keys(self): """Return the current set of string keys for rows.""" if self._metadata: return tuple(self._metadata.keys) else: return () @property def rowcount(self): """Return the 'rowcount' for this result. The 'rowcount' reports the number of rows *matched* by the WHERE criterion of an UPDATE or DELETE statement. .. note:: Notes regarding .rowcount: * This attribute returns the number of rows *matched*, which is not necessarily the same as the number of rows that were actually *modified* - an UPDATE statement, for example, may have no net change on a given row if the SET values given are the same as those present in the row already. Such a row would be matched but not modified. * .rowcount is *only* useful in conjunction with an UPDATE or DELETE statement. Contrary to what the Python DBAPI says, it does *not* return the number of rows available from the results of a SELECT statement as DBAPIs cannot support this functionality when rows are unbuffered. * Statements that use RETURNING may not return a correct rowcount. """ return self._rowcount @property def lastrowid(self): """Returns the 'lastrowid' accessor on the DBAPI cursor. This is a DBAPI specific method and is only functional for those backends which support it, for statements where it is appropriate. """ return self._lastrowid @property def returns_rows(self): """True if this ResultProxy returns rows. I.e. if it is legal to call the methods .fetchone(), .fetchmany() and .fetchall()`. """ return self._metadata is not None @property def closed(self): return self._closed async def close(self): """Close this ResultProxy. Closes the underlying DBAPI cursor corresponding to the execution. Note that any data cached within this ResultProxy is still available. For some types of results, this may include buffered rows. If this ResultProxy was generated from an implicit execution, the underlying Connection will also be closed (returns the underlying DBAPI connection to the connection pool.) This method is called automatically when: * all result rows are exhausted using the fetchXXX() methods. * cursor.description is None. """ if not self._closed: self._closed = True await self._cursor.close() # allow consistent errors self._cursor = None self._weak = None # def __iter__(self): # while True: # row = yield from self.fetchone() # if row is None: # raise StopIteration # else: # yield row def _non_result(self): if self._metadata is None: raise exc.ResourceClosedError( "This result object does not return rows. " "It has been closed automatically.") else: raise exc.ResourceClosedError("This result object is closed.") def _process_rows(self, rows): process_row = RowProxy metadata = self._metadata keymap = metadata._keymap processors = metadata._processors return [process_row(metadata, row, processors, keymap) for row in rows] async def fetchall(self): """Fetch all rows, just like DB-API cursor.fetchall().""" try: rows = await self._cursor.fetchall() except AttributeError: self._non_result() else: ret = self._process_rows(rows) await self.close() return ret async def fetchone(self): """Fetch one row, just like DB-API cursor.fetchone(). If a row is present, the cursor remains open after this is called. Else the cursor is automatically closed and None is returned. """ try: row = await self._cursor.fetchone() except AttributeError: self._non_result() else: if row is not None: return self._process_rows([row])[0] else: await self.close() return None async def fetchmany(self, size=None): """Fetch many rows, just like DB-API cursor.fetchmany(size=cursor.arraysize). If rows are present, the cursor remains open after this is called. Else the cursor is automatically closed and an empty list is returned. """ try: if size is None: rows = await self._cursor.fetchmany() else: rows = await self._cursor.fetchmany(size) except AttributeError: self._non_result() else: ret = self._process_rows(rows) if len(ret) == 0: await self.close() return ret async def first(self): """Fetch the first row and then close the result set unconditionally. Returns None if no row is present. """ if self._metadata is None: self._non_result() try: return (await self.fetchone()) finally: await self.close() async def scalar(self): """Fetch the first column of the first row, and close the result set. Returns None if no row is present. """ row = await self.first() if row is not None: return row[0] else: return None def __aiter__(self): return self async def __anext__(self): data = await self.fetchone() if data is not None: return data else: raise StopAsyncIteration # noqa aiomysql-0.3.2/aiomysql/sa/transaction.py000066400000000000000000000114601507601712200205150ustar00rootroot00000000000000# ported from: # https://github.com/aio-libs/aiopg/blob/master/aiopg/sa/transaction.py from . import exc class Transaction: """Represent a database transaction in progress. The Transaction object is procured by calling the SAConnection.begin() method of SAConnection: with (yield from engine) as conn: trans = yield from conn.begin() try: yield from conn.execute("insert into x (a, b) values (1, 2)") except Exception: yield from trans.rollback() else: yield from trans.commit() The object provides .rollback() and .commit() methods in order to control transaction boundaries. See also: SAConnection.begin(), SAConnection.begin_twophase(), SAConnection.begin_nested(). """ def __init__(self, connection, parent): self._connection = connection self._parent = parent or self self._is_active = True @property def is_active(self): """Return ``True`` if a transaction is active.""" return self._is_active @property def connection(self): """Return transaction's connection (SAConnection instance).""" return self._connection async def close(self): """Close this transaction. If this transaction is the base transaction in a begin/commit nesting, the transaction will rollback(). Otherwise, the method returns. This is used to cancel a Transaction without affecting the scope of an enclosing transaction. """ if not self._parent._is_active: return if self._parent is self: await self.rollback() else: self._is_active = False async def rollback(self): """Roll back this transaction.""" if not self._parent._is_active: return await self._do_rollback() self._is_active = False async def _do_rollback(self): await self._parent.rollback() async def commit(self): """Commit this transaction.""" if not self._parent._is_active: raise exc.InvalidRequestError("This transaction is inactive") await self._do_commit() self._is_active = False async def _do_commit(self): pass async def __aenter__(self): return self async def __aexit__(self, exc_type, exc_val, exc_tb): if exc_type: await self.rollback() else: if self._is_active: await self.commit() class RootTransaction(Transaction): def __init__(self, connection): super().__init__(connection, None) async def _do_rollback(self): await self._connection._rollback_impl() async def _do_commit(self): await self._connection._commit_impl() class NestedTransaction(Transaction): """Represent a 'nested', or SAVEPOINT transaction. A new NestedTransaction object may be procured using the SAConnection.begin_nested() method. The interface is the same as that of Transaction class. """ _savepoint = None def __init__(self, connection, parent): super().__init__(connection, parent) async def _do_rollback(self): assert self._savepoint is not None, "Broken transaction logic" if self._is_active: await self._connection._rollback_to_savepoint_impl( self._savepoint, self._parent) async def _do_commit(self): assert self._savepoint is not None, "Broken transaction logic" if self._is_active: await self._connection._release_savepoint_impl( self._savepoint, self._parent) class TwoPhaseTransaction(Transaction): """Represent a two-phase transaction. A new TwoPhaseTransaction object may be procured using the SAConnection.begin_twophase() method. The interface is the same as that of Transaction class with the addition of the .prepare() method. """ def __init__(self, connection, xid): super().__init__(connection, None) self._is_prepared = False self._xid = xid @property def xid(self): """Returns twophase transaction id.""" return self._xid async def prepare(self): """Prepare this TwoPhaseTransaction. After a PREPARE, the transaction can be committed. """ if not self._parent.is_active: raise exc.InvalidRequestError("This transaction is inactive") await self._connection._prepare_twophase_impl(self._xid) self._is_prepared = True async def _do_rollback(self): await self._connection.rollback_prepared( self._xid, is_prepared=self._is_prepared) async def _do_commit(self): await self._connection.commit_prepared( self._xid, is_prepared=self._is_prepared) aiomysql-0.3.2/aiomysql/utils.py000066400000000000000000000106061507601712200167260ustar00rootroot00000000000000from collections.abc import Coroutine import struct def _pack_int24(n): return struct.pack(" """ __slots__ = ('_pool', '_conn') def __init__(self, pool, conn): self._pool = pool self._conn = conn def __enter__(self): assert self._conn return self._conn def __exit__(self, exc_type, exc_val, exc_tb): try: self._pool.release(self._conn) finally: self._pool = None self._conn = None async def __aenter__(self): assert not self._conn self._conn = await self._pool.acquire() return self._conn async def __aexit__(self, exc_type, exc_val, exc_tb): try: await self._pool.release(self._conn) finally: self._pool = None self._conn = None aiomysql-0.3.2/docker-compose.yml000066400000000000000000000004131507601712200170060ustar00rootroot00000000000000mysql: image: mysql:8.4 environment: - MYSQL_USER=aiomysql - MYSQL_PASSWORD=mypass - MYSQL_DATABASE=test_pymysql - MYSQL_ROOT_PASSWORD=mypass ports: - 3306:3306 volumes: - /tmp/aiomysql:/var/run/mysqld/ aiomysql-0.3.2/docs/000077500000000000000000000000001507601712200143035ustar00rootroot00000000000000aiomysql-0.3.2/docs/Makefile000066400000000000000000000011721507601712200157440ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line, and also # from the environment for the first two. SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build SOURCEDIR = . BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) aiomysql-0.3.2/docs/conf.py000066400000000000000000000024111507601712200156000ustar00rootroot00000000000000# Configuration file for the Sphinx documentation builder. # # For the full list of built-in configuration values, see the documentation: # https://www.sphinx-doc.org/en/master/usage/configuration.html from aiomysql import __version__ # -- Project information ----------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#project-information project = 'aiomysql' copyright = '2015,2016 Nikolay Novik' author = 'Nikolay Novik' version = '.'.join(__version__.split('.')[:2]) release = __version__ # -- General configuration --------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#general-configuration extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.viewcode', 'sphinx_copybutton', ] intersphinx_mapping = { 'python': ('https://docs.python.org/3', None), } templates_path = ['_templates'] exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] language = 'en' highlight_language = 'python3' # -- Options for HTML output ------------------------------------------------- # https://www.sphinx-doc.org/en/master/usage/configuration.html#options-for-html-output html_theme = 'furo' htmlhelp_basename = 'aiomysqldoc' aiomysql-0.3.2/docs/connection.rst000066400000000000000000000153161507601712200172020ustar00rootroot00000000000000.. _api: :mod:`aiomysql` --- API Reference ================================= .. module:: aiomysql :synopsis: A library for accessing a MySQL database from the asyncio .. currentmodule:: aiomysql .. _aiomysql-connection: Connection ========== The library provides a way to connect to MySQL database with simple factory function :func:`aiomysql.connnect`. Use this function if you want just one connection to the database, consider connection pool for multiple connections. Example:: import asyncio import aiomysql loop = asyncio.get_event_loop() async def test_example(): conn = await aiomysql.connect(host='127.0.0.1', port=3306, user='root', password='', db='mysql', loop=loop) cur = await conn.cursor() await cur.execute("SELECT Host,User FROM user") print(cur.description) r = await cur.fetchall() print(r) await cur.close() conn.close() loop.run_until_complete(test_example()) .. function:: connect(host="localhost", user=None, password="", db=None, port=3306, unix_socket=None, charset='', sql_mode=None, read_default_file=None, conv=decoders, use_unicode=None, client_flag=0, cursorclass=Cursor, init_command=None, connect_timeout=None, read_default_group=None, autocommit=False, echo=False local_infile=False, loop=None, ssl=None, auth_plugin='', program_name='', server_public_key=None) A :ref:`coroutine ` that connects to MySQL. The function accepts all parameters that :func:`pymysql.connect` does plus optional keyword-only *loop* and *timeout* parameters. :param str host: host where the database server is located, default: `localhost`. :param str user: username to log in as. :param str password: password to use. :param str db: database to use, None to not use a particular one. :param int port: MySQL port to use, default is usually OK. :param str unix_socket: optionally, you can use a unix socket rather than TCP/IP. :param str charset: charset you want to use, for example 'utf8'. :param sql_mode: default sql-mode_ to use, like 'NO_BACKSLASH_ESCAPES' :param read_default_file: specifies my.cnf file to read these parameters from under the [client] section. :param conv: decoders dictionary to use instead of the default one. This is used to provide custom marshalling of types. See `pymysql.converters`. :param use_unicode: whether or not to default to unicode strings. :param client_flag: custom flags to send to MySQL. Find potential values in `pymysql.constants.CLIENT`. Refer to the `local_infile` parameter for enabling loading of local data. :param cursorclass: custom cursor class to use. :param str init_command: initial SQL statement to run when connection is established. :param connect_timeout: Timeout in seconds before throwing an exception when connecting. :param str read_default_group: Group to read from in the configuration file. :param autocommit: Autocommit mode. None means use server default. (default: ``False``) :param local_infile: Boolean to enable the use of `LOAD DATA LOCAL` command. This also enables the corresponding `client_flag`. aiomysql does not perform any validation of files requested by the server. Do not use this with untrusted servers. (default: ``False``) :param ssl: Optional SSL Context to force SSL :param auth_plugin: String to manually specify the authentication plugin to use, i.e you will want to use mysql_clear_password when using IAM authentication with Amazon RDS. (default: Server Default) :param program_name: Program name string to provide when handshaking with MySQL. (omitted by default) .. versionchanged:: 1.0 ``sys.argv[0]`` is no longer passed by default :param server_public_key: SHA256 authenticaiton plugin public key value. :param loop: asyncio event loop instance or ``None`` for default one. :returns: :class:`Connection` instance. Representation of a socket with a mysql server. The proper way to get an instance of this class is to call :func:`aiomysql.connnect`. Its insterface is almost the same as `pymysql.connection` except all methods are :ref:`coroutines `. The most important methods are: .. method:: cursor(cursor=None) A :ref:`coroutine ` that creates a new cursor object using the connection. By default, :class:`Cursor` is returned. It is possible to also give a custom cursor through the `cursor` parameter, but it needs to be a subclass of :class:`Cursor` :param cursor: subclass of :class:`Cursor` or ``None`` for default cursor. :returns: :class:`Cursor` instance. .. method:: close() Immediately close the connection. Close the connection now (rather than whenever `del` is executed). The connection will be unusable from this point forward. .. method:: ensure_closed() A :ref:`coroutine ` ends quit command and then closes socket connection. .. method:: autocommit(value) A :ref:`coroutine ` to enable/disable autocommit mode for current MySQL session. :param bool value: toggle atutocommit mode. .. method:: get_autocommit() Returns autocommit status for current MySQL sesstion. :returns bool: current autocommit status. .. method:: begin() A :ref:`coroutine ` to begin transaction. .. method:: commit() Commit changes to stable storage :ref:`coroutine `. .. method:: rollback() Roll back the current transaction :ref:`coroutine `. .. method:: select_db(db) A :ref:`coroutine ` to set current db. :param str db: database name .. attribute:: closed The readonly property that returns ``True`` if connections is closed. .. attribute:: host MySQL server IP address or name. .. attribute:: port MySQL server TCP/IP port. .. attribute:: unix_socket ySQL Unix socket file location. .. attribute:: db Current database name. .. attribute:: user User used while connecting to MySQL .. attribute:: echo Return echo mode status. .. attribute:: encoding Encoding employed for this connection. .. attribute:: charset Returns the character set for current connection. .. _sql-mode: http://dev.mysql.com/doc/refman/5.0/en/sql-mode.html aiomysql-0.3.2/docs/contributing.rst000066400000000000000000000000751507601712200175460ustar00rootroot00000000000000.. _aiomysql-contributing: .. include:: ../CONTRIBUTING.rst aiomysql-0.3.2/docs/cursors.rst000066400000000000000000000340641507601712200165440ustar00rootroot00000000000000.. _aiomysql-cursors: Cursor ====== .. class:: Cursor A cursor for connection. Allows Python code to execute :term:`MySQL` command in a database session. Cursors are created by the :meth:`Connection.cursor` :ref:`coroutine `: they are bound to the connection for the entire lifetime and all the commands are executed in the context of the database session wrapped by the connection. Cursors that are created from the same connection are not isolated, i.e., any changes done to the database by a cursor are immediately visible by the other cursors. Cursors created from different connections can or can not be isolated, depending on the connectionsโ€™ isolation level. .. code:: python import asyncio import aiomysql loop = asyncio.get_event_loop() async def test_example(): conn = await aiomysql.connect(host='127.0.0.1', port=3306, user='root', password='', db='mysql', loop=loop) # create default cursor cursor = await conn.cursor() # execute sql query await cursor.execute("SELECT Host, User FROM user") # fetch all results r = await cursor.fetchall() # detach cursor from connection await cursor.close() # close connection conn.close() loop.run_until_complete(test_example()) Use :meth:`Connection.cursor()` for getting cursor for connection. .. attribute:: connection This read-only attribute return a reference to the :class:`Connection` object on which the cursor was created .. attribute:: echo Return echo mode status. .. attribute:: description This read-only attribute is a sequence of 7-item sequences. Each of these sequences is a collections.namedtuple containing information describing one result column: 0. name: the name of the column returned. 1. type_code: the type of the column. 2. display_size: the actual length of the column in bytes. 3. internal_size: the size in bytes of the column associated to this column on the server. 4. precision: total number of significant digits in columns of type ``NUMERIC``. None for other types. 5. scale: count of decimal digits in the fractional part in columns of type ``NUMERIC``. None for other types. 6. null_ok: always None. This attribute will be None for operations that do not return rows or if the cursor has not had an operation invoked via the :meth:`Cursor.execute()` method yet. .. attribute:: rowcount Returns the number of rows that has been produced of affected. This read-only attribute specifies the number of rows that the last :meth:`Cursor.execute()` produced (for Data Query Language statements like SELECT) or affected (for Data Manipulation Language statements like ``UPDATE`` or ``INSERT``). The attribute is -1 in case no :meth:`Cursor.execute()` has been performed on the cursor or the row count of the last operation if it can't be determined by the interface. .. attribute:: rownumber Row index. This read-only attribute provides the current 0-based index of the cursor in the result set or ``None`` if the index cannot be determined. .. attribute:: arraysize How many rows will be returned by :meth:`Cursor.fetchmany()` call. This read/write attribute specifies the number of rows to fetch at a time with :meth:`Cursor.fetchmany()`. It defaults to 1 meaning to fetch a single row at a time. .. attribute:: lastrowid This read-only property returns the value generated for an `AUTO_INCREMENT` column by the previous `INSERT` or `UPDATE` statement or None when there is no such value available. For example, if you perform an `INSERT` into a table that contains an `AUTO_INCREMENT` column, :attr:`Cursor.lastrowid` returns the `AUTO_INCREMENT` value for the new row. .. attribute:: closed The readonly property that returns ``True`` if connections was detached from current cursor .. method:: close() :ref:`Coroutine ` to close the cursor now (rather than whenever ``del`` is executed). The cursor will be unusable from this point forward; closing a cursor just exhausts all remaining data. .. method:: execute(query, args=None) :ref:`Coroutine `, executes the given operation substituting any markers with the given parameters. For example, getting all rows where id is 5:: await cursor.execute("SELECT * FROM t1 WHERE id=%s", (5,)) :param str query: sql statement :param list args: tuple or list of arguments for sql query :returns int: number of rows that has been produced of affected .. method:: executemany(query, args) The `executemany()` :ref:`coroutine ` will execute the operation iterating over the list of parameters in seq_params. Example: Inserting 3 new employees and their phone number:: data = [ ('Jane','555-001'), ('Joe', '555-001'), ('John', '555-003') ] stmt = "INSERT INTO employees (name, phone) VALUES ('%s','%s')" await cursor.executemany(stmt, data) `INSERT` statements are optimized by batching the data, that is using the MySQL multiple rows syntax. :param str query: sql statement :param list args: tuple or list of arguments for sql query .. method:: callproc(procname, args) Execute stored procedure procname with args, this method is :ref:`coroutine `. Compatibility warning: PEP-249 specifies that any modified parameters must be returned. This is currently impossible as they are only available by storing them in a server variable and then retrieved by a query. Since stored procedures return zero or more result sets, there is no reliable way to get at OUT or INOUT parameters via `callproc`. The server variables are named `@_procname_n`, where `procname` is the parameter above and n is the position of the parameter (from zero). Once all result sets generated by the procedure have been fetched, you can issue a `SELECT @_procname_0`, ... query using :meth:`Cursor.execute()` to get any OUT or INOUT values. Basic usage example:: conn = await aiomysql.connect(host='127.0.0.1', port=3306, user='root', password='', db='mysql', loop=self.loop) cur = await conn.cursor() await cur.execute("""CREATE PROCEDURE myinc(p1 INT) BEGIN SELECT p1 + 1; END """) await cur.callproc('myinc', [1]) (ret, ) = await cur.fetchone() assert 2, ret await cur.close() conn.close() Compatibility warning: The act of calling a stored procedure itself creates an empty result set. This appears after any result sets generated by the procedure. This is non-standard behavior with respect to the DB-API. Be sure to use :meth:`Cursor.nextset()` to advance through all result sets; otherwise you may get disconnected. :param str procname: name of procedure to execute on server :param args: sequence of parameters to use with procedure :returns: the original args. .. method:: fetchone() Fetch the next row :ref:`coroutine `. .. method:: fetchmany(size=None) :ref:`Coroutine ` the next set of rows of a query result, returning a list of tuples. When no more rows are available, it returns an empty list. The number of rows to fetch per call is specified by the parameter. If it is not given, the cursor's :attr:`Cursor.arraysize` determines the number of rows to be fetched. The method should try to fetch as many rows as indicated by the size parameter. If this is not possible due to the specified number of rows not being available, fewer rows may be returned :: cursor = await connection.cursor() await cursor.execute("SELECT * FROM test;") r = cursor.fetchmany(2) print(r) # [(1, 100, "abc'def"), (2, None, 'dada')] r = await cursor.fetchmany(2) print(r) # [(3, 42, 'bar')] r = await cursor.fetchmany(2) print(r) # [] :param int size: number of rows to return :returns list: of fetched rows .. method:: fetchall() :ref:`Coroutine ` returns all rows of a query result set:: await cursor.execute("SELECT * FROM test;") r = await cursor.fetchall() print(r) # [(1, 100, "abc'def"), (2, None, 'dada'), (3, 42, 'bar')] :returns list: list of fetched rows .. method:: scroll(value, mode='relative') Scroll the cursor in the result set to a new position according to mode. This method is :ref:`coroutine `. If mode is ``relative`` (default), value is taken as offset to the current position in the result set, if set to ``absolute``, value states an absolute target position. An IndexError should be raised in case a scroll operation would leave the result set. In this case, the cursor position is left undefined (ideal would be to not move the cursor at all). .. note:: According to the :term:`DBAPI`, the exception raised for a cursor out of bound should have been :exc:`IndexError`. The best option is probably to catch both exceptions in your code:: try: await cur.scroll(1000 * 1000) except (ProgrammingError, IndexError), exc: deal_with_it(exc) :param int value: move cursor to next position according to mode. :param str mode: scroll mode, possible modes: `relative` and `absolute` .. class:: DictCursor A cursor which returns results as a dictionary. All methods and arguments same as :class:`Cursor`, see example:: import asyncio import aiomysql loop = asyncio.get_event_loop() async def test_example(): conn = await aiomysql.connect(host='127.0.0.1', port=3306, user='root', password='', db='mysql', loop=loop) # create dict cursor cursor = await conn.cursor(aiomysql.DictCursor) # execute sql query await cursor.execute( "SELECT * from people where name='bob'") # fetch all results r = await cursor.fetchone() print(r) # {'age': 20, 'DOB': datetime.datetime(1990, 2, 6, 23, 4, 56), # 'name': 'bob'} loop.run_until_complete(test_example()) You can customize your dictionary, see example:: import asyncio import aiomysql class AttrDict(dict): """Dict that can get attribute by dot, and doesn't raise KeyError""" def __getattr__(self, name): try: return self[name] except KeyError: return None class AttrDictCursor(aiomysql.DictCursor): dict_type = AttrDict loop = asyncio.get_event_loop() async def test_example(): conn = await aiomysql.connect(host='127.0.0.1', port=3306, user='root', password='', db='mysql', loop=loop) # create your dict cursor cursor = await conn.cursor(AttrDictCursor) # execute sql query await cursor.execute( "SELECT * from people where name='bob'") # fetch all results r = await cursor.fetchone() print(r) # {'age': 20, 'DOB': datetime.datetime(1990, 2, 6, 23, 4, 56), # 'name': 'bob'} print(r.age) # 20 print(r.foo) # None loop.run_until_complete(test_example()) .. class:: SSCursor Unbuffered Cursor, mainly useful for queries that return a lot of data, or for connections to remote servers over a slow network. Instead of copying every row of data into a buffer, this will fetch rows as needed. The upside of this, is the client uses much less memory, and rows are returned much faster when traveling over a slow network, or if the result set is very big. There are limitations, though. The MySQL protocol doesn't support returning the total number of rows, so the only way to tell how many rows there are is to iterate over every row returned. Also, it currently isn't possible to scroll backwards, as only the current row is held in memory. All methods are the same as in :class:`Cursor` but with different behaviour. .. method:: fetchall() Same as :meth:`Cursor.fetchall` :ref:`coroutine `, useless for large queries, as all rows fetched one by one. .. method:: fetchmany(size=None, mode='relative') Same as :meth:`Cursor.fetchall`, but each row fetched one by one. .. method:: scroll(size=None) Same as :meth:`Cursor.scroll`, but move cursor on server side one by one. If you want to move 20 rows forward scroll will make 20 queries to move cursor. Currently only forward scrolling is supported. .. class:: SSDictCursor An unbuffered cursor, which returns results as a dictionary. aiomysql-0.3.2/docs/examples.rst000066400000000000000000000016641507601712200166620ustar00rootroot00000000000000Examples of aiomysql usage ========================== Below is a list of examples from `aiomysql/examples `_ Every example is a correct tiny python program that demonstrates specific feature of library. .. _aiomysql-examples-simple: Low-level API ------------- Basic example, fetch host and user information from internal table: user. .. literalinclude:: ../examples/example.py Example of stored procedure, which just increments input value. .. literalinclude:: ../examples/example_callproc.py Example of using `executemany` method: .. literalinclude:: ../examples/example_executemany.py Example of using transactions `rollback` and `commit` methods: .. literalinclude:: ../examples/example_transaction.py Example of using connection pool: .. literalinclude:: ../examples/example_pool.py sqlalchemy usage ---------------- .. literalinclude:: ../examples/example_simple_sa.py aiomysql-0.3.2/docs/glossary.rst000066400000000000000000000025121507601712200167000ustar00rootroot00000000000000.. _glossary: ******** Glossary ******** .. if you add new entries, keep the alphabetical sorting! .. glossary:: DBAPI :pep:`249` -- Python Database API Specification v2.0 ipdb ipdb exports functions to access the IPython debugger, which features tab completion, syntax highlighting, better tracebacks, better introspection with the same interface as the pdb module. MySQL A popular database server. http://www.mysql.com/ pep8 Python style guide checker *pep8* is a tool to check your Python code against some of the style conventions in :pep:`8` -- Style Guide for Python Code. pyflakes passive checker of Python programs A simple program which checks Python source files for errors. Pyflakes analyzes programs and detects various errors. It works by parsing the source file, not importing it, so it is safe to use on modules with side effects. It's also much faster. https://pypi.python.org/pypi/pyflakes PyMySQL Pure-Python MySQL client library. The goal of PyMySQL is to be a drop-in replacement for MySQLdb and work on CPython, PyPy, IronPython and Jython. https://github.com/PyMySQL/PyMySQL sqlalchemy The Python SQL Toolkit and Object Relational Mapper. http://www.sqlalchemy.org/ aiomysql-0.3.2/docs/index.rst000066400000000000000000000067751507601712200161630ustar00rootroot00000000000000.. _aiomysql-index: .. aiomysql documentation master file, created by sphinx-quickstart on Sun Jun 11 16:24:33 2023. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. Welcome to aiomysql's documentation! ==================================== .. _GitHub: https://github.com/aio-libs/aiomysql .. _asyncio: http://docs.python.org/3.5/library/asyncio.html .. _aiopg: https://github.com/aio-libs/aiopg .. _Tornado-MySQL: https://github.com/PyMySQL/Tornado-MySQL .. _aio-libs: https://github.com/aio-libs **aiomysql** is a library for accessing a :term:`MySQL` database from the asyncio_ (PEP-3156/tulip) framework. It depends and reuses most parts of :term:`PyMySQL` . **aiomysql** tries to be like awesome aiopg_ library and preserve same api, look and feel. Internally **aiomysql** is copy of PyMySQL, underlying io calls switched to async, basically ``await`` and ``async def coroutine`` added in proper places. :term:`sqlalchemy` support ported from aiopg_. Features -------- * Implements *asyncio* :term:`DBAPI` *like* interface for :term:`MySQL`. It includes :ref:`aiomysql-connection`, :ref:`aiomysql-cursors` and :ref:`aiomysql-pool` objects. * Implements *optional* support for charming :term:`sqlalchemy` functional sql layer. Basics ------ **aiomysql** based on :term:`PyMySQL` , and provides same api, you just need to use ``await conn.f()`` instead of just call ``conn.f()`` for every method. Properties are unchanged, so ``conn.prop`` is correct as well as ``conn.prop = val``. See example: .. code:: python import asyncio import aiomysql loop = asyncio.get_event_loop() async def test_example(): conn = await aiomysql.connect(host='127.0.0.1', port=3306, user='root', password='', db='mysql', loop=loop) cur = await conn.cursor() await cur.execute("SELECT Host,User FROM user") print(cur.description) r = await cur.fetchall() print(r) await cur.close() conn.close() loop.run_until_complete(test_example()) Installation ------------ .. code:: pip3 install aiomysql .. note:: :mod:`aiomysql` requires :term:`PyMySQL` library. Also you probably want to use :mod:`aiomysql.sa`. .. _aiomysql-install-sqlalchemy: :mod:`aiomysql.sa` module is **optional** and requires :term:`sqlalchemy`. You can install *sqlalchemy* by running:: pip3 install sqlalchemy Source code ----------- The project is hosted on GitHub_ Please feel free to file an issue on `bug tracker `_ if you have found a bug or have some suggestion for library improvement. The library uses `GitHub Actions `_ for Continuous Integration and `Codecov `_ for coverage reports. Dependencies ------------ - Python 3.9+ - :term:`PyMySQL` - aiomysql.sa requires :term:`sqlalchemy`. Authors and License ------------------- The ``aiomysql`` package is written by Nikolay Novik, :term:`PyMySQL` and aio-libs_ contributors. It's MIT licensed (same as PyMySQL). Feel free to improve this package and send a pull request to GitHub_. Contents: --------- .. toctree:: :maxdepth: 2 :titlesonly: connection cursors pool tutorial sa examples glossary contributing Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` aiomysql-0.3.2/docs/make.bat000066400000000000000000000014401507601712200157070ustar00rootroot00000000000000@ECHO OFF pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set SOURCEDIR=. set BUILDDIR=_build %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.https://www.sphinx-doc.org/ exit /b 1 ) if "%1" == "" goto help %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% :end popd aiomysql-0.3.2/docs/pool.rst000066400000000000000000000104761507601712200160160ustar00rootroot00000000000000.. _aiomysql-pool: Pool ==== The library provides *connection pool* as well as plain :class:`Connection` objects. The basic usage is:: import asyncio import aiomysql loop = asyncio.get_event_loop() async def go(): pool = await aiomysql.create_pool(host='127.0.0.1', port=3306, user='root', password='', db='mysql', loop=loop, autocommit=False) async with pool.acquire() as conn: async with conn.cursor() as cur: await cur.execute("SELECT 10") # print(cur.description) (r,) = await cur.fetchone() assert r == 10 pool.close() await pool.wait_closed() loop.run_until_complete(go()) .. function:: create_pool(minsize=1, maxsize=10, loop=None, **kwargs) A :ref:`coroutine ` that creates a pool of connections to :term:`MySQL` database. :param int minsize: minimum sizes of the *pool*. :param int maxsize: maximum sizes of the *pool*. :param loop: is an optional *event loop* instance, :func:`asyncio.get_event_loop` is used if *loop* is not specified. :param bool echo: -- executed log SQL queryes (``False`` by default). :param kwargs: The function accepts all parameters that :func:`aiomysql.connect` does plus optional keyword-only parameters *loop*, *minsize*, *maxsize*. :param float pool_recycle: number of seconds after which connection is recycled, helps to deal with stale connections in pool, default value is -1, means recycling logic is disabled. :returns: :class:`Pool` instance. .. class:: Pool A connection pool. After creation pool has *minsize* free connections and can grow up to *maxsize* ones. If *minsize* is ``0`` the pool doesn't creates any connection on startup. If *maxsize* is ``0`` than size of pool is unlimited (but it recycles used connections of course). The most important way to use it is getting connection in *with statement*:: async with pool.acquire() as conn: async with conn.cursor() as cur: pass See also :meth:`Pool.acquire` and :meth:`Pool.release` for acquiring :class:`Connection` without *with statement*. .. attribute:: echo Return *echo mode* status. Log all executed queries to logger named ``aiomysql`` if ``True`` .. attribute:: minsize A minimal size of the pool (*read-only*), ``1`` by default. .. attribute:: maxsize A maximal size of the pool (*read-only*), ``10`` by default. .. attribute:: size A current size of the pool (*readonly*). Includes used and free connections. .. attribute:: freesize A count of free connections in the pool (*readonly*). .. method:: clear() A :ref:`coroutine ` that closes all *free* connections in the pool. At next connection acquiring at least :attr:`minsize` of them will be recreated. .. method:: close() Close pool. Mark all pool connections to be closed on getting back to pool. Closed pool doesn't allow to acquire new connections. If you want to wait for actual closing of acquired connection please call :meth:`wait_closed` after :meth:`close`. .. warning:: The method is not a :ref:`coroutine `. .. method:: terminate() Terminate pool. Close pool with instantly closing all acquired connections also. :meth:`wait_closed` should be called after :meth:`terminate` for waiting for actual finishing. .. warning:: The method is not a :ref:`coroutine `. .. method:: wait_closed() A :ref:`coroutine ` that waits for releasing and closing all acquired connections. Should be called after :meth:`close` for waiting for actual pool closing. .. method:: acquire() A :ref:`coroutine ` that acquires a connection from *free pool*. Creates new connection if needed and :attr:`size` of pool is less than :attr:`maxsize`. Returns a :class:`Connection` instance. .. method:: release(conn) Reverts connection *conn* to *free pool* for future recycling. .. warning:: The method is not a :ref:`coroutine `. aiomysql-0.3.2/docs/sa.rst000066400000000000000000000454131507601712200154470ustar00rootroot00000000000000.. _aiomysql-sa: :mod:`aiomysql.sa` --- support for SQLAlchemy functional SQL layer ================================================================== .. module:: aiomysql.sa :synopsis: support for SQLAlchemy functional SQL layer .. currentmodule:: aiomysql.sa Intro ----- .. note:: :term:`sqlalchemy` support ported from aiopg_, so api should be very familiar for aiopg_ user. While :ref:`core API ` provides a core support for access to :term:`MySQL` database, manipulations with raw SQL strings too annoying. Fortunately we can use excellent :ref:`aiomysql ` as **SQL query builder**. Example:: import asyncio import sqlalchemy as sa from aiomysql.sa import create_engine metadata = sa.MetaData() tbl = sa.Table( "tbl", metadata, sa.Column("id", sa.Integer, primary_key=True), sa.Column("val", sa.String(255)), ) async def go(): engine = await create_engine( user="root", db="test_pymysql", host="127.0.0.1", password="", ) async with engine.acquire() as conn: async with conn.begin() as transaction: await conn.execute(tbl.insert().values(val="abc")) await transaction.commit() res = await conn.execute(tbl.select()) async for row in res: print(row.id, row.val) engine.close() await engine.wait_closed() asyncio.run(go()) So you can execute SQL query built by ``tbl.insert().values(val='abc')`` or ``tbl.select()`` expressions. :term:`sqlalchemy` has rich and very powerful set of SQL construction functions, please read :ref:`tutorial ` for full list of available operations. Also we provide SQL transactions support. Please take a look on :meth:`SAConnection.begin` method and family. .. _aiopg: https://github.com/aio-libs/aiopg Engine ------ .. function:: create_engine(*, minsize=1, maxsize=10, loop=None, \ dialect=dialect, **kwargs) A :ref:`coroutine ` for :class:`Engine` creation. Returns :class:`Engine` instance with embedded connection pool. The pool has *minsize* opened connections to :term:`MySQL` server. At *kwargs* function accepts all parameters that :func:`aiomysql.connect` does. .. data:: dialect An instance of :term:`SQLAlchemy` dialect set up for :term:`pymysql` usage. An :class:`sqlalchemy.engine.interfaces.Dialect` instance. .. seealso:: :mod:`sqlalchemy.dialects.mysql.pymysql` PyMySQL dialect. .. class:: Engine Connects a :class:`aiomysql.Pool` and :class:`sqlalchemy.engine.interfaces.Dialect` together to provide a source of database connectivity and behavior. An :class:`Engine` object is instantiated publicly using the :func:`create_engine` coroutine. .. attribute:: dialect A :class:`sqlalchemy.engine.interfaces.Dialect` for the engine, readonly property. .. attribute:: name A name of the dialect, readonly property. .. attribute:: driver A driver of the dialect, readonly property. .. attribute:: minsize A minimal size of the pool (*read-only*), ``1`` by default. .. attribute:: maxsize A maximal size of the pool (*read-only*), ``10`` by default. .. attribute:: size A current size of the pool (*readonly*). Includes used and free connections. .. attribute:: freesize A count of free connections in the pool (*readonly*). .. method:: close() Close engine. Mark all engine connections to be closed on getting back to engine. Closed engine doesn't allow to acquire new connections. If you want to wait for actual closing of acquired connection please call :meth:`wait_closed` after :meth:`close`. .. warning:: The method is not a :ref:`coroutine `. .. method:: terminate() Terminate engine. Close engine's pool with instantly closing all acquired connections also. :meth:`wait_closed` should be called after :meth:`terminate` for waiting for actual finishing. .. warning:: The method is not a :ref:`coroutine `. .. method:: wait_closed() A :ref:`coroutine ` that waits for releasing and closing all acquired connections. Should be called after :meth:`close` for waiting for actual engine closing. .. method:: acquire() Get a connection from pool. This method is a :ref:`coroutine `. Returns a :class:`SAConnection` instance. .. method:: release() Revert back connection *conn* to pool. .. warning:: The method is not a :ref:`coroutine `. Connection ---------- .. class:: SAConnection A wrapper for :class:`aiomysql.Connection` instance. The class provides methods for executing *SQL queries* and working with *SQL transactions*. .. method:: execute(query, *multiparams, **params) Executes a *SQL* *query* with optional parameters. This method is a :ref:`coroutine `. :param query: a SQL query string or any :term:`sqlalchemy` expression (see :ref:`aiomysql core `) :param \*multiparams/\**params: represent bound parameter values to be used in the execution. Typically, the format is either a dictionary passed to \*multiparams:: await conn.execute( table.insert(), {"id":1, "value":"v1"} ) ...or individual key/values interpreted by \**params:: await conn.execute( table.insert(), id=1, value="v1" ) In the case that a plain SQL string is passed, a tuple or individual values in \*multiparams may be passed:: await conn.execute( "INSERT INTO table (id, value) VALUES (%d, %s)", (1, "v1") ) await conn.execute( "INSERT INTO table (id, value) VALUES (%s, %s)", 1, "v1" ) :returns: :class:`ResultProxy` instance with results of SQL query execution. .. method:: scalar(query, *multiparams, **params) Executes a *SQL* *query* and returns a scalar value. This method is a :ref:`coroutine `. .. seealso:: :meth:`SAConnection.execute` and :meth:`ResultProxy.scalar`. .. attribute:: closed The readonly property that returns ``True`` if connections is closed. .. method:: begin() Begin a transaction and return a transaction handle. This method is a :ref:`coroutine `. The returned object is an instance of :class:`.Transaction`. This object represents the "scope" of the transaction, which completes when either the :meth:`.Transaction.rollback` or :meth:`.Transaction.commit` method is called. Nested calls to :meth:`.begin` on the same :class:`.SAConnection` will return new :class:`.Transaction` objects that represent an emulated transaction within the scope of the enclosing transaction, that is:: trans = await conn.begin() # outermost transaction trans2 = await conn.begin() # "inner" await trans2.commit() # does nothing await trans.commit() # actually commits Calls to :meth:`.Transaction.commit` only have an effect when invoked via the outermost :class:`.Transaction` object, though the :meth:`.Transaction.rollback` method of any of the :class:`.Transaction` objects will roll back the transaction. .. seealso:: :meth:`.SAConnection.begin_nested` - use a SAVEPOINT :meth:`.SAConnection.begin_twophase` - use a two phase (XA) transaction .. method:: begin_nested() Begin a nested transaction and return a transaction handle. This method is a :ref:`coroutine `. The returned object is an instance of :class:`.NestedTransaction`. Any transaction in the hierarchy may ``commit`` and ``rollback``, however the outermost transaction still controls the overall ``commit`` or ``rollback`` of the transaction of a whole. It utilizes SAVEPOINT facility of :term:`MySQL` server. .. seealso:: :meth:`.SAConnection.begin`, :meth:`.SAConnection.begin_twophase`. .. method:: begin_twophase(xid=None) Begin a two-phase or XA transaction and return a transaction handle. This method is a :ref:`coroutine `. The returned object is an instance of :class:`.TwoPhaseTransaction`, which in addition to the methods provided by :class:`.Transaction`, also provides a :meth:`~.TwoPhaseTransaction.prepare` method. :param xid: the two phase transaction id. If not supplied, a random id will be generated. .. seealso:: :meth:`.SAConnection.begin`, :meth:`.SAConnection.begin_twophase`. .. method:: recover_twophase() Return a list of prepared twophase transaction ids. This method is a :ref:`coroutine `. .. method:: rollback_prepared(xid) Rollback prepared twophase transaction *xid*. This method is a :ref:`coroutine `. .. method:: commit_prepared(xid) Commit prepared twophase transaction *xid*. This method is a :ref:`coroutine `. .. attribute:: in_transaction The readonly property that returns ``True`` if a transaction is in progress. .. method:: close() Close this :class:`SAConnection`. This method is a :ref:`coroutine `. This results in a release of the underlying database resources, that is, the :class:`aiomysql.Connection` referenced internally. The :class:`aiomysql.Connection` is typically restored back to the connection-holding :class:`aiomysql.Pool` referenced by the :class:`.Engine` that produced this :class:`SAConnection`. Any transactional state present on the :class:`aiomysql.Connection` is also unconditionally released via calling :meth:`Transaction.rollback` method. After :meth:`~.SAConnection.close` is called, the :class:`.SAConnection` is permanently in a closed state, and will allow no further operations. ResultProxy ----------- .. class:: ResultProxy Wraps a *DB-API like* :class:`Cursor` object to provide easier access to row columns. Individual columns may be accessed by their integer position, case-sensitive column name, or by :class:`sqlalchemy.schema.Column`` object. e.g.:: async for row in conn.execute(...): col1 = row[0] # access via integer position col2 = row['col2'] # access via name col3 = row[mytable.c.mycol] # access via Column object. :class:`ResultProxy` also handles post-processing of result column data using :class:`sqlalchemy.types.TypeEngine` objects, which are referenced from the originating SQL statement that produced this result set. .. attribute:: dialect The readonly property that returns :class:`sqlalchemy.engine.interfaces.Dialect` dialect for the :class:`ResultProxy` instance. .. seealso:: :data:`dialect` global data. .. method:: keys() Return the current set of string keys for rows. .. attribute:: rowcount The readonly property that returns the 'rowcount' for this result. The 'rowcount' reports the number of rows *matched* by the WHERE criterion of an UPDATE or DELETE statement. .. note:: Notes regarding :attr:`ResultProxy.rowcount`: * This attribute returns the number of rows *matched*, which is not necessarily the same as the number of rows that were actually *modified* - an UPDATE statement, for example, may have no net change on a given row if the SET values given are the same as those present in the row already. Such a row would be matched but not modified. * :attr:`ResultProxy.rowcount` is *only* useful in conjunction with an UPDATE or DELETE statement. Contrary to what the Python DBAPI says, it does *not* return the number of rows available from the results of a SELECT statement as DBAPIs cannot support this functionality when rows are unbuffered. * Statements that use RETURNING does not return a correct rowcount. .. attribute:: lastrowid Returns the 'lastrowid' accessor on the DBAPI cursor. value generated for an *AUTO_INCREMENT* column by the previous INSERT or UPDATE statement or None when there is no such value available. For example, if you perform an *INSERT* into a table that contains an AUTO_INCREMENT column, `lastrowid` returns the *AUTO_INCREMENT* value for the new row. .. attribute:: returns_rows A readonly property that returns ``True`` if this :class:`ResultProxy` returns rows. I.e. if it is legal to call the methods :meth:`ResultProxy.fetchone`, :meth:`ResultProxy.fetchmany`, :meth:`ResultProxy.fetchall`. .. attribute:: closed Return ``True`` if this :class:`ResultProxy` is closed (no pending rows in underlying cursor). .. method:: close() Close this :class:`ResultProxy`. Closes the underlying :class:`aiomysql.Cursor` corresponding to the execution. Note that any data cached within this :class:`ResultProxy` is still available. For some types of results, this may include buffered rows. This method is called automatically when: * all result rows are exhausted using the fetchXXX() methods. * cursor.description is None. .. method:: fetchall() Fetch all rows, just like :meth:`aiomysql.Cursor.fetchall`. This method is a :ref:`coroutine `. The connection is closed after the call. Returns a list of :class:`RowProxy`. .. method:: fetchone() Fetch one row, just like :meth:`aiomysql.Cursor.fetchone`. This method is a :ref:`coroutine `. If a row is present, the cursor remains open after this is called. Else the cursor is automatically closed and ``None`` is returned. Returns an :class:`RowProxy` instance or ``None``. .. method:: fetchmany(size=None) Fetch many rows, just like :meth:`aiomysql.Cursor.fetchmany`. This method is a :ref:`coroutine `. If rows are present, the cursor remains open after this is called. Else the cursor is automatically closed and an empty list is returned. Returns a list of :class:`RowProxy`. .. method:: first() Fetch the first row and then close the result set unconditionally. This method is a :ref:`coroutine `. Returns ``None`` if no row is present or an :class:`RowProxy` instance. .. method:: scalar() Fetch the first column of the first row, and close the result set. Returns ``None`` if no row is present or an :class:`RowProxy` instance. .. class:: RowProxy A :class:`collections.abc.Mapping` for representing a row in query result. Keys are column names, values are result values. Individual columns may be accessed by their integer position, case-sensitive column name, or by :class:`sqlalchemy.schema.Column`` object. Has overloaded operators ``__eq__`` and ``__ne__`` for comparing two rows. The :class:`RowProxy` is *not hashable*. ..method:: as_tuple() Return a tuple with values from :meth:`RowProxy.values`. Transaction objects ------------------- .. class:: Transaction Represent a database transaction in progress. The :class:`Transaction` object is procured by calling the :meth:`SAConnection.begin` method of :class:`SAConnection`:: async with engine.acquire() as conn: trans = await conn.begin() try: await conn.execute("insert into x (a, b) values (1, 2)") except Exception: await trans.rollback() else: await trans.commit() The object provides :meth:`.rollback` and :meth:`.commit` methods in order to control transaction boundaries. .. seealso:: :meth:`SAConnection.begin`, :meth:`SAConnection.begin_twophase`, :meth:`SAConnection.begin_nested`. .. attribute:: is_active A readonly property that returns ``True`` if a transaction is active. .. attribute:: connection A readonly property that returns :class:`SAConnection` for transaction. .. method:: close() Close this :class:`Transaction`. This method is a :ref:`coroutine `. If this transaction is the base transaction in a begin/commit nesting, the transaction will :meth:`Transaction.rollback`. Otherwise, the method returns. This is used to cancel a :class:`Transaction` without affecting the scope of an enclosing transaction. .. method:: rollback() Roll back this :class:`Transaction`. This method is a :ref:`coroutine `. .. method:: commit() Commit this :class:`Transaction`. This method is a :ref:`coroutine `. .. class:: NestedTransaction Represent a 'nested', or SAVEPOINT transaction. A new :class:`NestedTransaction` object may be procured using the :meth:`SAConnection.begin_nested` method. The interface is the same as that of :class:`Transaction`. .. seealso:: `SAVEPOINT, ROLLBACK TO SAVEPOINT, and RELEASE SAVEPOINT`__ on :term:`MySQL`: .. __: https://dev.mysql.com/doc/refman/8.4/en/savepoint.html .. class:: TwoPhaseTransaction Represent a two-phase transaction. A new :class:`TwoPhaseTransaction` object may be procured using the :meth:`SAConnection.begin_twophase` method. The interface is the same as that of :class:`Transaction` with the addition of the :meth:`TwoPhaseTransaction.prepare` method. .. attribute:: xid A readonly property that returns twophase transaction id. .. method:: prepare() Prepare this :class:`TwoPhaseTransaction`. This method is a :ref:`coroutine `. After a `PREPARE`, the transaction can be committed. .. seealso:: :term:`MySQL` commands for two phase transactions: https://dev.mysql.com/doc/refman/8.4/en/xa-statements.html aiomysql-0.3.2/docs/tutorial.rst000066400000000000000000000115611507601712200167040ustar00rootroot00000000000000.. _aiomysql-tutorial: Tutorial ======== Python database access modules all have similar interfaces, described by the :term:`DBAPI`. Most relational databases use the same synchronous interface, *aiomysql* tries to provide same api you just need to use ``await conn.f()`` instead of just call ``conn.f()`` for every method. Installation ------------ .. code:: pip3 install aiomysql .. note:: :mod:`aiomysql` requires :term:`PyMySQL` library. Getting Started --------------- Lets start from basic example:: import asyncio import aiomysql loop = asyncio.get_event_loop() async def test_example(): conn = await aiomysql.connect(host='127.0.0.1', port=3306, user='root', password='', db='mysql', loop=loop) cur = await conn.cursor() await cur.execute("SELECT Host,User FROM user") print(cur.description) r = await cur.fetchall() print(r) await cur.close() conn.close() loop.run_until_complete(test_example()) Connection is established by invoking the :func:`connect()` coroutine, arguments list are keyword arguments, almost same as in :term:`PyMySQL` corresponding method. Example makes connection to :term:`MySQL` server on local host to access `mysql` database with user name `root`' and empty password. If :func:`connect()` coroutine succeeds, it returns a :class:`Connection` instance as the basis for further interaction with :term:`MySQL`. After the connection object has been obtained, code in example invokes :meth:`Connection.cursor()` coroutine method to create a cursor object for processing statements. Example uses cursor to issue a ``SELECT Host,User FROM user;`` statement, which returns a list of `host` and `user` from :term:`MySQL` system table ``user``:: cur = await conn.cursor() await cur.execute("SELECT Host,User FROM user") print(cur.description) r = await cur.fetchall() The cursor object's :meth:`Cursor.execute()` method sends the query the server and :meth:`Cursor.fetchall()` retrieves rows. Finally, the script invokes :meth:`Cursor.close()` coroutine and connection object's :meth:`Connection.close()` method to disconnect from the server:: await cur.close() conn.close() After that, ``conn`` becomes invalid and should not be used to access the server. Inserting Data -------------- Let's take basic example of :meth:`Cursor.execute` method:: import asyncio import aiomysql async def test_example_execute(loop): conn = await aiomysql.connect(host='127.0.0.1', port=3306, user='root', password='', db='test_pymysql', loop=loop) cur = await conn.cursor() async with conn.cursor() as cur: await cur.execute("DROP TABLE IF EXISTS music_style;") await cur.execute("""CREATE TABLE music_style (id INT, name VARCHAR(255), PRIMARY KEY (id));""") await conn.commit() # insert 3 rows one by one await cur.execute("INSERT INTO music_style VALUES(1,'heavy metal')") await cur.execute("INSERT INTO music_style VALUES(2,'death metal');") await cur.execute("INSERT INTO music_style VALUES(3,'power metal');") await conn.commit() conn.close() loop = asyncio.get_event_loop() loop.run_until_complete(test_example_execute(loop)) Please note that you need to manually call :func:`commit()` bound to your :class:`Connection` object, because by default it's set to ``False`` or in :meth:`aiomysql.connect()` you can transfer addition keyword argument ``autocommit=True``. Example with ``autocommit=True``:: import asyncio import aiomysql async def test_example_execute(loop): conn = await aiomysql.connect(host='127.0.0.1', port=3306, user='root', password='', db='test_pymysql', loop=loop, autocommit=True) cur = await conn.cursor() async with conn.cursor() as cur: await cur.execute("DROP TABLE IF EXISTS music_style;") await cur.execute("""CREATE TABLE music_style (id INT, name VARCHAR(255), PRIMARY KEY (id));""") # insert 3 rows one by one await cur.execute("INSERT INTO music_style VALUES(1,'heavy metal')") await cur.execute("INSERT INTO music_style VALUES(2,'death metal');") await cur.execute("INSERT INTO music_style VALUES(3,'power metal');") conn.close() loop = asyncio.get_event_loop() loop.run_until_complete(test_example_execute(loop)) aiomysql-0.3.2/examples/000077500000000000000000000000001507601712200151715ustar00rootroot00000000000000aiomysql-0.3.2/examples/example.py000066400000000000000000000010011507601712200171660ustar00rootroot00000000000000import asyncio import aiomysql async def test_example(loop): conn = await aiomysql.connect(host='127.0.0.1', port=3306, user='root', password='', db='mysql', loop=loop) async with conn.cursor() as cur: await cur.execute("SELECT Host,User FROM user") print(cur.description) r = await cur.fetchall() print(r) conn.close() loop = asyncio.get_event_loop() loop.run_until_complete(test_example(loop)) aiomysql-0.3.2/examples/example_callproc.py000066400000000000000000000013531507601712200210570ustar00rootroot00000000000000import asyncio import aiomysql async def test_example(loop): conn = await aiomysql.connect(host='127.0.0.1', port=3306, user='root', password='', db='test_pymysql', loop=loop) async with conn.cursor() as cur: await cur.execute('DROP PROCEDURE IF EXISTS myinc;') await cur.execute("""CREATE PROCEDURE myinc(p1 INT) BEGIN SELECT p1 + 1; END""") await cur.callproc('myinc', [1]) (ret, ) = await cur.fetchone() assert 2, ret print(ret) conn.close() loop = asyncio.get_event_loop() loop.run_until_complete(test_example(loop)) aiomysql-0.3.2/examples/example_callproc_oldstyle.py000066400000000000000000000014541507601712200230000ustar00rootroot00000000000000import asyncio import aiomysql loop = asyncio.get_event_loop() @asyncio.coroutine def test_example(): conn = yield from aiomysql.connect(host='127.0.0.1', port=3306, user='root', password='', db='test_pymysql', loop=loop) cur = yield from conn.cursor() yield from cur.execute("DROP PROCEDURE IF EXISTS myinc;") yield from cur.execute("""CREATE PROCEDURE myinc(p1 INT) BEGIN SELECT p1 + 1; END """) yield from cur.callproc('myinc', [1]) (ret, ) = yield from cur.fetchone() assert 2, ret print(ret) yield from cur.close() conn.close() loop.run_until_complete(test_example()) aiomysql-0.3.2/examples/example_cursors.py000066400000000000000000000012121507601712200207520ustar00rootroot00000000000000import asyncio import aiomysql async def test_example(loop): conn = await aiomysql.connect(host='127.0.0.1', port=3306, user='root', password='', db='mysql', loop=loop) sql = "SELECT 1 `id`, JSON_OBJECT('key1', 1, 'key2', 'abc') obj" async with conn.cursor(aiomysql.cursors.DeserializationCursor, aiomysql.cursors.DictCursor) as cur: await cur.execute(sql) print(cur.description) r = await cur.fetchall() print(r) conn.close() loop = asyncio.get_event_loop() loop.run_until_complete(test_example(loop)) aiomysql-0.3.2/examples/example_executemany.py000066400000000000000000000027361507601712200216150ustar00rootroot00000000000000import asyncio import aiomysql async def test_example_executemany(loop): conn = await aiomysql.connect(host='127.0.0.1', port=3306, user='root', password='', db='test_pymysql', loop=loop) cur = await conn.cursor() async with conn.cursor() as cur: await cur.execute("DROP TABLE IF EXISTS music_style;") await cur.execute("""CREATE TABLE music_style (id INT, name VARCHAR(255), PRIMARY KEY (id));""") await conn.commit() # insert 3 rows one by one await cur.execute("INSERT INTO music_style VALUES(1,'heavy metal')") await cur.execute("INSERT INTO music_style VALUES(2,'death metal');") await cur.execute("INSERT INTO music_style VALUES(3,'power metal');") await conn.commit() # insert 3 row by one long query using *executemany* method data = [(4, 'gothic metal'), (5, 'doom metal'), (6, 'post metal')] await cur.executemany( "INSERT INTO music_style (id, name)" "values (%s,%s)", data) await conn.commit() # fetch all insert row from table music_style await cur.execute("SELECT * FROM music_style;") result = await cur.fetchall() print(result) conn.close() loop = asyncio.get_event_loop() loop.run_until_complete(test_example_executemany(loop)) aiomysql-0.3.2/examples/example_executemany_oldstyle.py000066400000000000000000000027101507601712200235240ustar00rootroot00000000000000import asyncio import aiomysql loop = asyncio.get_event_loop() @asyncio.coroutine def test_example_executemany(): conn = yield from aiomysql.connect(host='127.0.0.1', port=3306, user='root', password='', db='test_pymysql', loop=loop) cur = yield from conn.cursor() yield from cur.execute("DROP TABLE IF EXISTS music_style;") yield from cur.execute("""CREATE TABLE music_style (id INT, name VARCHAR(255), PRIMARY KEY (id));""") yield from conn.commit() # insert 3 rows one by one yield from cur.execute("INSERT INTO music_style VALUES(1,'heavy metal')") yield from cur.execute("INSERT INTO music_style VALUES(2,'death metal');") yield from cur.execute("INSERT INTO music_style VALUES(3,'power metal');") yield from conn.commit() # insert 3 row by one long query using *executemane* method data = [(4, 'gothic metal'), (5, 'doom metal'), (6, 'post metal')] yield from cur.executemany( "INSERT INTO music_style (id, name)" "values (%s,%s)", data) yield from conn.commit() # fetch all insert row from table music_style yield from cur.execute("SELECT * FROM music_style;") result = yield from cur.fetchall() print(result) yield from cur.close() conn.close() loop.run_until_complete(test_example_executemany()) aiomysql-0.3.2/examples/example_oldstyle.py000066400000000000000000000010221507601712200211100ustar00rootroot00000000000000import asyncio import aiomysql loop = asyncio.get_event_loop() @asyncio.coroutine def test_example(): conn = yield from aiomysql.connect( host='127.0.0.1', port=3306, user='root', password='', db='mysql', loop=loop ) cur = yield from conn.cursor() yield from cur.execute("SELECT Host,User FROM user") print(cur.description) r = yield from cur.fetchall() print(r) yield from cur.close() conn.close() loop.run_until_complete(test_example()) aiomysql-0.3.2/examples/example_pool.py000066400000000000000000000011351507601712200202270ustar00rootroot00000000000000import asyncio import aiomysql async def test_example(loop): pool = await aiomysql.create_pool(host='127.0.0.1', port=3306, user='root', password='', db='mysql', loop=loop) async with pool.acquire() as conn: async with conn.cursor() as cur: await cur.execute("SELECT 42;") print(cur.description) (r,) = await cur.fetchone() assert r == 42 pool.close() await pool.wait_closed() loop = asyncio.get_event_loop() loop.run_until_complete(test_example(loop)) aiomysql-0.3.2/examples/example_pool_oldstyle.py000066400000000000000000000011161507601712200221450ustar00rootroot00000000000000import asyncio import aiomysql loop = asyncio.get_event_loop() @asyncio.coroutine def test_example(): pool = yield from aiomysql.create_pool( host='127.0.0.1', port=3306, user='root', password='', db='mysql', loop=loop ) with (yield from pool) as conn: cur = yield from conn.cursor() yield from cur.execute("SELECT 10") # print(cur.description) (r,) = yield from cur.fetchone() assert r == 10 pool.close() yield from pool.wait_closed() loop.run_until_complete(test_example()) aiomysql-0.3.2/examples/example_simple_sa.py000066400000000000000000000021531507601712200212330ustar00rootroot00000000000000import asyncio import sqlalchemy as sa from aiomysql.sa import create_engine metadata = sa.MetaData() tbl = sa.Table('tbl', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('val', sa.String(255))) async def create_table(engine): async with engine.acquire() as conn: await conn.execute('DROP TABLE IF EXISTS tbl') await conn.execute('''CREATE TABLE tbl ( id serial PRIMARY KEY, val varchar(255))''') async def go(loop): engine = await create_engine(user='root', db='test_pymysql', host='127.0.0.1', password='', loop=loop) await create_table(engine) async with engine.acquire() as conn: await conn.execute(tbl.insert().values(val='abc')) await conn.execute(tbl.insert().values(val='xyz')) async for row in conn.execute(tbl.select()): print(row.id, row.val) await conn.execute("commit") engine.close() await engine.wait_closed() loop = asyncio.get_event_loop() loop.run_until_complete(go(loop)) aiomysql-0.3.2/examples/example_simple_sa_oldstyle.py000066400000000000000000000023041507601712200231500ustar00rootroot00000000000000import asyncio import sqlalchemy as sa from aiomysql.sa import create_engine metadata = sa.MetaData() tbl = sa.Table('tbl', metadata, sa.Column('id', sa.Integer, primary_key=True), sa.Column('val', sa.String(255))) @asyncio.coroutine def create_table(engine): with (yield from engine) as conn: yield from conn.execute('DROP TABLE IF EXISTS tbl') yield from conn.execute('''CREATE TABLE tbl ( id serial PRIMARY KEY, val varchar(255))''') @asyncio.coroutine def go(): engine = yield from create_engine(user='root', db='test_pymysql', host='127.0.0.1', password='') yield from create_table(engine) with (yield from engine) as conn: yield from conn.execute(tbl.insert().values(val='abc')) res = yield from conn.execute(tbl.select()) for row in res: print(row.id, row.val) yield from conn.commit() engine.close() yield from engine.wait_closed() asyncio.get_event_loop().run_until_complete(go()) aiomysql-0.3.2/examples/example_ssl.py000066400000000000000000000025071507601712200200630ustar00rootroot00000000000000import asyncio import ssl import aiomysql ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1) ctx.check_hostname = False ctx.load_verify_locations(cafile='../tests/ssl_resources/ssl/ca.pem') async def main(): async with aiomysql.create_pool( host='localhost', port=3306, user='root', password='rootpw', ssl=ctx, auth_plugin='mysql_clear_password') as pool: async with pool.acquire() as conn: async with conn.cursor() as cur: # Run simple command await cur.execute("SHOW DATABASES;") value = await cur.fetchall() values = [item[0] for item in value] # Spot check the answers, we should at least have mysql # and information_schema assert 'mysql' in values, \ 'Could not find the "mysql" table' assert 'information_schema' in values, \ 'Could not find the "mysql" table' # Check TLS variables await cur.execute("SHOW STATUS LIKE 'Ssl_version%';") value = await cur.fetchone() # The context has TLS assert value[1].startswith('TLS'), \ 'Not connected to the database with TLS' asyncio.get_event_loop().run_until_complete(main()) aiomysql-0.3.2/examples/example_transaction.py000066400000000000000000000033611507601712200216060ustar00rootroot00000000000000import asyncio import aiomysql async def test_example_transaction(loop): conn = await aiomysql.connect(host='127.0.0.1', port=3306, user='root', password='', db='test_pymysql', autocommit=False, loop=loop) async with conn.cursor() as cursor: stmt_drop = "DROP TABLE IF EXISTS names" await cursor.execute(stmt_drop) await cursor.execute(""" CREATE TABLE names ( id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT, name VARCHAR(30) DEFAULT '' NOT NULL, cnt TINYINT UNSIGNED DEFAULT 0, PRIMARY KEY (id))""") await conn.commit() # Insert 3 records names = (('Geert',), ('Jan',), ('Michel',)) stmt_insert = "INSERT INTO names (name) VALUES (%s)" await cursor.executemany(stmt_insert, names) # Roll back!!!! await conn.rollback() # There should be no data! stmt_select = "SELECT id, name FROM names ORDER BY id" await cursor.execute(stmt_select) resp = await cursor.fetchall() # Check there is no data assert not resp # Do the insert again. await cursor.executemany(stmt_insert, names) # Data should be already there await cursor.execute(stmt_select) resp = await cursor.fetchall() print(resp) # Do a commit await conn.commit() await cursor.execute(stmt_select) print(resp) # Cleaning up, dropping the table again await cursor.execute(stmt_drop) await cursor.close() conn.close() loop = asyncio.get_event_loop() loop.run_until_complete(test_example_transaction(loop)) aiomysql-0.3.2/examples/example_transaction_oldstyle.py000066400000000000000000000033041507601712200235220ustar00rootroot00000000000000import asyncio import aiomysql loop = asyncio.get_event_loop() @asyncio.coroutine def test_example_transaction(): conn = yield from aiomysql.connect(host='127.0.0.1', port=3306, user='root', password='', db='test_pymysql', autocommit=False, loop=loop) cursor = yield from conn.cursor() stmt_drop = "DROP TABLE IF EXISTS names" yield from cursor.execute(stmt_drop) yield from cursor.execute(""" CREATE TABLE names ( id TINYINT UNSIGNED NOT NULL AUTO_INCREMENT, name VARCHAR(30) DEFAULT '' NOT NULL, cnt TINYINT UNSIGNED DEFAULT 0, PRIMARY KEY (id))""") yield from conn.commit() # Insert 3 records names = (('Geert',), ('Jan',), ('Michel',)) stmt_insert = "INSERT INTO names (name) VALUES (%s)" yield from cursor.executemany(stmt_insert, names) # Roll back!!!! yield from conn.rollback() # There should be no data! stmt_select = "SELECT id, name FROM names ORDER BY id" yield from cursor.execute(stmt_select) resp = yield from cursor.fetchall() # Check there is no data assert not resp # Do the insert again. yield from cursor.executemany(stmt_insert, names) # Data should be already there yield from cursor.execute(stmt_select) resp = yield from cursor.fetchall() print(resp) # Do a commit yield from conn.commit() yield from cursor.execute(stmt_select) print(resp) # Cleaning up, dropping the table again yield from cursor.execute(stmt_drop) yield from cursor.close() conn.close() loop.run_until_complete(test_example_transaction()) aiomysql-0.3.2/pyproject.toml000066400000000000000000000035631507601712200162760ustar00rootroot00000000000000[project] name = "aiomysql" authors = [ {name = "Nikolay Novik", email = "nickolainovik@gmail.com"} ] description = "MySQL driver for asyncio." readme = "README.rst" requires-python = ">=3.9" keywords = ["mysql", "mariadb", "asyncio", "aiomysql"] license = "MIT" classifiers = [ "Intended Audience :: Developers", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Operating System :: POSIX", "Environment :: Web Environment", "Development Status :: 3 - Alpha", "Topic :: Database", "Topic :: Database :: Front-Ends", "Framework :: AsyncIO", ] dependencies = [ "PyMySQL>=1.0", ] dynamic = ["version"] [project.optional-dependencies] "sa" = ["sqlalchemy>=1.3,<1.4"] "rsa" = ["PyMySQL[rsa]>=1.0"] [project.urls] homepage = "https://github.com/aio-libs/aiomysql" source = "https://github.com/aio-libs/aiomysql" download = "https://pypi.python.org/pypi/aiomysql" changelog = "https://github.com/aio-libs/aiomysql/blob/main/CHANGES.txt" documentation = "https://aiomysql.readthedocs.io/" issues = "https://github.com/aio-libs/aiomysql/issues" "GitHub: CI" = "https://github.com/aio-libs/aiomysql/actions" "GitHub: discussions" = "https://github.com/aio-libs/aiomysql/discussions" [build-system] requires = [ # Essentials "setuptools >= 80", # Plugins # >= 9.2 is needed for consistent generation of hashes for non-release artifacts # https://github.com/pypa/setuptools-scm/pull/1158 "setuptools_scm[toml] >= 7, < 10", ] build-backend = "setuptools.build_meta" [tool.setuptools] platforms = ["POSIX"] [tool.setuptools.packages.find] exclude = ["tests", "tests.*"] [tool.setuptools_scm] write_to = "aiomysql/_scm_version.py" aiomysql-0.3.2/requirements-dev.txt000066400000000000000000000003411507601712200174110ustar00rootroot00000000000000coverage==7.10.7 # flake8 version is also specified in .github/workflows/ci-cd.yml flake8==7.3.0 ipdb==0.13.13 pytest==8.4.2 pytest-cov==7.0.0 pytest-sugar==1.1.1 PyMySQL==1.1.2 SQLAlchemy==1.3.24 uvloop==0.22.1 twine==6.2.0 aiomysql-0.3.2/requirements-docs.txt000066400000000000000000000000671507601712200175700ustar00rootroot00000000000000sphinx==8.2.3 furo==2025.9.25 sphinx-copybutton==0.5.2 aiomysql-0.3.2/tests/000077500000000000000000000000001507601712200145155ustar00rootroot00000000000000aiomysql-0.3.2/tests/__init__.py000066400000000000000000000000001507601712200166140ustar00rootroot00000000000000aiomysql-0.3.2/tests/conftest.py000066400000000000000000000244441507601712200167240ustar00rootroot00000000000000import asyncio import gc import os import re import ssl import aiomysql import pymysql import pytest import uvloop @pytest.fixture def disable_gc(): gc_enabled = gc.isenabled() if gc_enabled: gc.disable() gc.collect() yield if gc_enabled: gc.collect() gc.enable() def pytest_generate_tests(metafunc): if 'loop_type' in metafunc.fixturenames: loop_type = ['asyncio', 'uvloop'] if uvloop else ['asyncio'] metafunc.parametrize("loop_type", loop_type) if "mysql_address" in metafunc.fixturenames: mysql_addresses = [] ids = [] opt_mysql_unix_socket = \ list(metafunc.config.getoption("mysql_unix_socket")) for i in range(len(opt_mysql_unix_socket)): if "=" in opt_mysql_unix_socket[i]: label, path = opt_mysql_unix_socket[i].split("=", 1) mysql_addresses.append(path) ids.append(label) else: mysql_addresses.append(opt_mysql_unix_socket[i]) ids.append(f"unix{i}") opt_mysql_address = list(metafunc.config.getoption("mysql_address")) for i in range(len(opt_mysql_address)): if "=" in opt_mysql_address[i]: label, addr = opt_mysql_address[i].split("=", 1) ids.append(label) else: addr = opt_mysql_address[i] ids.append(f"tcp{i}") if ":" in addr: addr = addr.split(":", 1) mysql_addresses.append((addr[0], int(addr[1]))) else: mysql_addresses.append((addr, 3306)) # default to connecting to localhost if len(mysql_addresses) == 0: mysql_addresses = [("127.0.0.1", 3306)] ids = ["tcp-local"] assert len(mysql_addresses) == len(set(mysql_addresses)), \ "mysql targets are not unique" assert len(ids) == len(set(ids)), \ "mysql target names are not unique" metafunc.parametrize("mysql_address", mysql_addresses, ids=ids, scope="session", ) @pytest.fixture def loop(request, loop_type): loop = asyncio.new_event_loop() asyncio.set_event_loop(None) if uvloop and loop_type == 'uvloop': loop = uvloop.new_event_loop() else: loop = asyncio.new_event_loop() yield loop if not loop._closed: loop.call_soon(loop.stop) loop.run_forever() loop.close() gc.collect() asyncio.set_event_loop(None) @pytest.mark.tryfirst def pytest_pycollect_makeitem(collector, name, obj): if collector.funcnamefilter(name): if not callable(obj): return item = pytest.Function.from_parent(collector, name=name) if 'run_loop' in item.keywords: return list(collector._genfunctions(name, obj)) @pytest.mark.tryfirst def pytest_pyfunc_call(pyfuncitem): """ Run asyncio marked test functions in an event loop instead of a normal function call. """ if 'run_loop' in pyfuncitem.keywords: funcargs = pyfuncitem.funcargs loop = funcargs['loop'] testargs = {arg: funcargs[arg] for arg in pyfuncitem._fixtureinfo.argnames} loop.run_until_complete(pyfuncitem.obj(**testargs)) return True def pytest_runtest_setup(item): if 'run_loop' in item.keywords and 'loop' not in item.fixturenames: # inject an event loop fixture for all async tests item.fixturenames.append('loop') def pytest_configure(config): config.addinivalue_line( "markers", "run_loop" ) config.addinivalue_line( "markers", "mysql_version(db, version): run only on specific database versions" ) def pytest_addoption(parser): parser.addoption( "--mysql-address", action="append", default=[], help="list of addresses to connect to: [name=]host[:port]", ) parser.addoption( "--mysql-unix-socket", action="append", default=[], help="list of unix sockets to connect to: [name=]/path/to/socket", ) @pytest.fixture def mysql_params(mysql_server): params = {**mysql_server['conn_params'], "db": os.environ.get('MYSQL_DB', 'test_pymysql'), "local_infile": True, "use_unicode": True, } return params # TODO: fix this workaround async def _cursor_wrapper(conn): return await conn.cursor() @pytest.fixture def cursor(connection, loop): cur = loop.run_until_complete(_cursor_wrapper(connection)) yield cur loop.run_until_complete(cur.close()) @pytest.fixture def connection(mysql_params, loop): coro = aiomysql.connect(loop=loop, **mysql_params) conn = loop.run_until_complete(coro) yield conn loop.run_until_complete(conn.ensure_closed()) @pytest.fixture def connection_creator(mysql_params, loop): connections = [] async def f(**kw): conn_kw = mysql_params.copy() conn_kw.update(kw) _loop = conn_kw.pop('loop', loop) conn = await aiomysql.connect(loop=_loop, **conn_kw) connections.append(conn) return conn yield f for conn in connections: try: loop.run_until_complete(conn.ensure_closed()) except ConnectionResetError: pass @pytest.fixture def pool_creator(mysql_params, loop): pools = [] async def f(**kw): conn_kw = mysql_params.copy() conn_kw.update(kw) _loop = conn_kw.pop('loop', loop) pool = await aiomysql.create_pool(loop=_loop, **conn_kw) pools.append(pool) return pool yield f for pool in pools: pool.close() loop.run_until_complete(pool.wait_closed()) @pytest.fixture def table_cleanup(loop, connection): table_list = [] cursor = loop.run_until_complete(_cursor_wrapper(connection)) def _register_table(table_name): table_list.append(table_name) yield _register_table for t in table_list: # TODO: probably this is not safe code sql = f"DROP TABLE IF EXISTS {t}" loop.run_until_complete(cursor.execute(sql)) @pytest.fixture(scope='session') def mysql_server(mysql_address): unix_socket = type(mysql_address) is str if not unix_socket: ssl_directory = os.path.join(os.path.dirname(__file__), 'ssl_resources', 'ssl') ca_file = os.path.join(ssl_directory, 'ca.pem') ctx = ssl.SSLContext(ssl.PROTOCOL_TLSv1_2) ctx.check_hostname = False ctx.load_verify_locations(cafile=ca_file) # ctx.verify_mode = ssl.CERT_NONE server_params = { 'user': 'root', 'password': os.environ.get("MYSQL_ROOT_PASSWORD"), } if unix_socket: server_params["unix_socket"] = mysql_address else: server_params["host"] = mysql_address[0] server_params["port"] = mysql_address[1] server_params["ssl"] = ctx try: connection = pymysql.connect( db='mysql', charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor, **server_params) with connection.cursor() as cursor: cursor.execute("SELECT VERSION() AS version") server_version = cursor.fetchone()["version"] db_type = "mariadb" if "MariaDB" in server_version else "mysql" server_version_tuple = tuple( (int(dig) if dig is not None else 0) for dig in re.match(r"^(\d+)\.(\d+)(?:\.(\d+))?", server_version).group(1, 2, 3) ) server_version_tuple_short = (server_version_tuple[0], server_version_tuple[1]) if not unix_socket: cursor.execute("SHOW STATUS LIKE 'Ssl_version%'") result = cursor.fetchone() # As we connected with TLS, it should start with that :D assert result['Value'].startswith('TLS'), \ "Not connected to the database with TLS" # Drop possibly existing old databases cursor.execute('DROP DATABASE IF EXISTS test_pymysql;') cursor.execute('DROP DATABASE IF EXISTS test_pymysql2;') # Create Databases cursor.execute('CREATE DATABASE test_pymysql ' 'DEFAULT CHARACTER SET utf8 ' 'DEFAULT COLLATE utf8_general_ci;') cursor.execute('CREATE DATABASE test_pymysql2 ' 'DEFAULT CHARACTER SET utf8 ' 'DEFAULT COLLATE utf8_general_ci;') # Do MySQL8+ Specific Setup if db_type == "mysql" and server_version_tuple_short == (8, 0): # Drop existing users cursor.execute('DROP USER IF EXISTS user_sha256;') cursor.execute('DROP USER IF EXISTS nopass_sha256;') cursor.execute('DROP USER IF EXISTS user_caching_sha2;') cursor.execute('DROP USER IF EXISTS nopass_caching_sha2;') # Create Users to test SHA256 cursor.execute('CREATE USER user_sha256 ' 'IDENTIFIED WITH "sha256_password" ' 'BY "pass_sha256"') cursor.execute('CREATE USER nopass_sha256 ' 'IDENTIFIED WITH "sha256_password"') cursor.execute('CREATE USER user_caching_sha2 ' 'IDENTIFIED ' 'WITH "caching_sha2_password" ' 'BY "pass_caching_sha2"') cursor.execute('CREATE USER nopass_caching_sha2 ' 'IDENTIFIED ' 'WITH "caching_sha2_password" ' 'PASSWORD EXPIRE NEVER') cursor.execute('FLUSH PRIVILEGES') connection.close() except Exception: pytest.fail("Cannot initialize MySQL environment") return { "conn_params": server_params, "server_version": server_version, "server_version_tuple": server_version_tuple, "server_version_tuple_short": server_version_tuple_short, "db_type": db_type, } aiomysql-0.3.2/tests/fixtures/000077500000000000000000000000001507601712200163665ustar00rootroot00000000000000aiomysql-0.3.2/tests/fixtures/load_local_data.txt000066400000000000000000003361311507601712200222200ustar00rootroot000000000000001,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 71,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 71,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 71,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 71,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 71,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 71,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 71,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 71,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, aiomysql-0.3.2/tests/fixtures/load_local_warn_data.txt000066400000000000000000000003711507601712200232410ustar00rootroot000000000000001,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, ,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, 5,6, 7,8, 1,2, 3,4, aiomysql-0.3.2/tests/fixtures/my.cnf.tcp.tmpl000066400000000000000000000004621507601712200212450ustar00rootroot00000000000000# # The MySQL database server configuration file. # [client] user = {user} port = {port} host = {host} password = {password} database = {db} default-character-set = utf8 [client_with_unix_socket] user = {user} port = {port} host = {host} password = {password} database = {db} default-character-set = utf8 aiomysql-0.3.2/tests/fixtures/my.cnf.unix.tmpl000066400000000000000000000004501507601712200214370ustar00rootroot00000000000000# # The MySQL database server configuration file. # [client] user = {user} socket = {unix_socket} password = {password} database = {db} default-character-set = utf8 [client_with_unix_socket] user = {user} socket = {unix_socket} password = {password} database = {db} default-character-set = utf8 aiomysql-0.3.2/tests/sa/000077500000000000000000000000001507601712200151205ustar00rootroot00000000000000aiomysql-0.3.2/tests/sa/__init__.py000066400000000000000000000000001507601712200172170ustar00rootroot00000000000000aiomysql-0.3.2/tests/sa/test_sa_compiled_cache.py000066400000000000000000000106551507601712200221420ustar00rootroot00000000000000import pytest from sqlalchemy import bindparam from sqlalchemy import MetaData, Table, Column, Integer, String from aiomysql import sa meta = MetaData() tbl = Table('sa_tbl_cache_test', meta, Column('id', Integer, nullable=False, primary_key=True), Column('val', String(255))) @pytest.fixture() def make_engine(connection, mysql_params, loop): engines = [] async def _make_engine(**kwargs): if "unix_socket" in mysql_params: conn_args = {"unix_socket": mysql_params["unix_socket"]} else: conn_args = { "host": mysql_params['host'], "port": mysql_params['port'], } if "ssl" in mysql_params: conn_args["ssl"] = mysql_params["ssl"] engine = await sa.create_engine( db=mysql_params['db'], user=mysql_params['user'], password=mysql_params['password'], minsize=10, **conn_args, **kwargs, ) engines.append(engine) return engine yield _make_engine for engine in engines: engine.terminate() loop.run_until_complete(engine.wait_closed()) async def start(engine): async with engine.acquire() as conn: tx = await conn.begin() await conn.execute("DROP TABLE IF EXISTS " "sa_tbl_cache_test") await conn.execute("CREATE TABLE sa_tbl_cache_test" "(id serial, val varchar(255))") await conn.execute(tbl.insert().values(val='some_val_1')) await conn.execute(tbl.insert().values(val='some_val_2')) await conn.execute(tbl.insert().values(val='some_val_3')) await tx.commit() @pytest.mark.run_loop async def test_dialect(make_engine): cache = dict() engine = await make_engine(compiled_cache=cache) await start(engine) async with engine.acquire() as conn: # check select with params not added to cache q = tbl.select().where(tbl.c.val == 'some_val_1') cursor = await conn.execute(q) row = await cursor.fetchone() assert 'some_val_1' == row.val assert 0 == len(cache) # check select with bound params added to cache select_by_val = tbl.select().where( tbl.c.val == bindparam('value') ) cursor = await conn.execute( select_by_val, {'value': 'some_val_3'} ) row = await cursor.fetchone() assert 'some_val_3' == row.val assert 1 == len(cache) cursor = await conn.execute( select_by_val, value='some_val_2' ) row = await cursor.fetchone() assert 'some_val_2' == row.val assert 1 == len(cache) select_all = tbl.select() cursor = await conn.execute(select_all) rows = await cursor.fetchall() assert 3 == len(rows) assert 2 == len(cache) # check insert with bound params not added to cache await conn.execute(tbl.insert().values(val='some_val_4')) assert 2 == len(cache) # check insert with bound params added to cache q = tbl.insert().values(val=bindparam('value')) await conn.execute(q, value='some_val_5') assert 3 == len(cache) await conn.execute(q, value='some_val_6') assert 3 == len(cache) await conn.execute(q, {'value': 'some_val_7'}) assert 3 == len(cache) cursor = await conn.execute(select_all) rows = await cursor.fetchall() assert 7 == len(rows) assert 3 == len(cache) # check update with params not added to cache q = tbl.update().where( tbl.c.val == 'some_val_1' ).values(val='updated_val_1') await conn.execute(q) assert 3 == len(cache) cursor = await conn.execute( select_by_val, value='updated_val_1' ) row = await cursor.fetchone() assert 'updated_val_1' == row.val # check update with bound params added to cache q = tbl.update().where( tbl.c.val == bindparam('value') ).values(val=bindparam('update')) await conn.execute( q, value='some_val_2', update='updated_val_2' ) assert 4 == len(cache) cursor = await conn.execute( select_by_val, value='updated_val_2' ) row = await cursor.fetchone() assert 'updated_val_2' == row.val aiomysql-0.3.2/tests/sa/test_sa_connection.py000066400000000000000000000322351507601712200213600ustar00rootroot00000000000000from unittest import mock import pytest from sqlalchemy import MetaData, Table, Column, Integer, String, func, select from sqlalchemy.schema import DropTable, CreateTable from sqlalchemy.sql.expression import bindparam import aiomysql from aiomysql import sa, Cursor meta = MetaData() tbl = Table('sa_tbl', meta, Column('id', Integer, nullable=False, primary_key=True), Column('name', String(255))) @pytest.fixture() def sa_connect(connection_creator): async def connect(**kwargs): conn = await connection_creator(**kwargs) await conn.autocommit(True) cur = await conn.cursor() await cur.execute("DROP TABLE IF EXISTS sa_tbl") await cur.execute("CREATE TABLE sa_tbl " "(id serial, name varchar(255))") await cur.execute("INSERT INTO sa_tbl (name)" "VALUES ('first')") await cur._connection.commit() # yield from cur.close() engine = mock.Mock() engine.dialect = sa.engine._dialect return sa.SAConnection(conn, engine) return connect @pytest.mark.run_loop async def test_execute_text_select(sa_connect): conn = await sa_connect() res = await conn.execute("SELECT * FROM sa_tbl;") assert isinstance(res.cursor, Cursor) assert ('id', 'name') == res.keys() rows = await res.fetchall() assert res.closed assert res.cursor is None assert 1 == len(rows) row = rows[0] assert 1 == row[0] assert 1 == row['id'] assert 1 == row.id assert 'first' == row[1] assert 'first' == row['name'] assert 'first' == row.name # TODO: fix this await conn._connection.commit() @pytest.mark.run_loop async def test_execute_sa_select(sa_connect): conn = await sa_connect() res = await conn.execute(tbl.select()) assert isinstance(res.cursor, Cursor) assert ('id', 'name') == res.keys() rows = await res.fetchall() assert res.closed assert res.cursor is None assert res.returns_rows assert 1 == len(rows) row = rows[0] assert 1 == row[0] assert 1 == row['id'] assert 1 == row.id assert 'first' == row[1] assert 'first' == row['name'] assert 'first' == row.name # TODO: fix this await conn._connection.commit() @pytest.mark.run_loop async def test_execute_sa_insert_with_dict(sa_connect): conn = await sa_connect() await conn.execute(tbl.insert(), {"id": 2, "name": "second"}) res = await conn.execute(tbl.select()) rows = await res.fetchall() assert 2 == len(rows) assert (1, 'first') == rows[0] assert (2, 'second') == rows[1] @pytest.mark.run_loop async def test_execute_sa_insert_with_tuple(sa_connect): conn = await sa_connect() await conn.execute(tbl.insert(), (2, "second")) res = await conn.execute(tbl.select()) rows = await res.fetchall() assert 2 == len(rows) assert (1, 'first') == rows[0] assert (2, 'second') == rows[1] @pytest.mark.run_loop async def test_execute_sa_insert_named_params(sa_connect): conn = await sa_connect() await conn.execute(tbl.insert(), id=2, name="second") res = await conn.execute(tbl.select()) rows = await res.fetchall() assert 2 == len(rows) assert (1, 'first') == rows[0] assert (2, 'second') == rows[1] @pytest.mark.run_loop async def test_execute_sa_insert_positional_params(sa_connect): conn = await sa_connect() await conn.execute(tbl.insert(), 2, "second") res = await conn.execute(tbl.select()) rows = await res.fetchall() assert 2 == len(rows) assert (1, 'first') == rows[0] assert (2, 'second') == rows[1] @pytest.mark.run_loop async def test_scalar(sa_connect): conn = await sa_connect() res = await conn.scalar(select([func.count()]).select_from(tbl)) assert 1 == res @pytest.mark.run_loop async def test_scalar_None(sa_connect): conn = await sa_connect() await conn.execute(tbl.delete()) res = await conn.scalar(tbl.select()) assert res is None # TODO: fix this await conn._connection.commit() @pytest.mark.run_loop async def test_row_proxy(sa_connect): conn = await sa_connect() res = await conn.execute(tbl.select()) rows = await res.fetchall() row = rows[0] row2 = await (await conn.execute(tbl.select())).first() assert 2 == len(row) assert ['id', 'name'] == list(row) assert 'id' in row assert 'unknown' not in row assert 'first' == row.name assert 'first' == row[tbl.c.name] with pytest.raises(AttributeError): row.unknown assert "(1, 'first')" == repr(row) assert (1, 'first') == row.as_tuple() assert (555, 'other') != row.as_tuple() assert row2 == row assert 5 != row # TODO: fix this await conn._connection.commit() @pytest.mark.run_loop async def test_insert(sa_connect): conn = await sa_connect() res = await conn.execute(tbl.insert().values(name='second')) assert 1 == res.rowcount assert 2 == res.lastrowid @pytest.mark.run_loop async def test_raw_insert(sa_connect): conn = await sa_connect() await conn.execute( "INSERT INTO sa_tbl (name) VALUES ('third')") res = await conn.execute(tbl.select()) assert 2 == res.rowcount assert ('id', 'name') == res.keys() assert res.returns_rows rows = await res.fetchall() assert 2 == len(rows) assert 2 == rows[1].id @pytest.mark.run_loop async def test_raw_insert_with_params(sa_connect): conn = await sa_connect() res = await conn.execute( "INSERT INTO sa_tbl (id, name) VALUES (%s, %s)", 2, 'third') res = await conn.execute(tbl.select()) assert 2 == res.rowcount assert ('id', 'name') == res.keys() assert res.returns_rows rows = await res.fetchall() assert 2 == len(rows) assert 2 == rows[1].id @pytest.mark.run_loop async def test_raw_insert_with_params_dict(sa_connect): conn = await sa_connect() res = await conn.execute( "INSERT INTO sa_tbl (id, name) VALUES (%(id)s, %(name)s)", {'id': 2, 'name': 'third'}) res = await conn.execute(tbl.select()) assert 2 == res.rowcount assert ('id', 'name') == res.keys() assert res.returns_rows rows = await res.fetchall() assert 2 == len(rows) assert 2 == rows[1].id @pytest.mark.run_loop async def test_raw_insert_with_named_params(sa_connect): conn = await sa_connect() res = await conn.execute( "INSERT INTO sa_tbl (id, name) VALUES (%(id)s, %(name)s)", id=2, name='third') res = await conn.execute(tbl.select()) assert 2 == res.rowcount assert ('id', 'name') == res.keys() assert res.returns_rows rows = await res.fetchall() assert 2 == len(rows) assert 2 == rows[1].id @pytest.mark.run_loop async def test_raw_insert_with_executemany(sa_connect): conn = await sa_connect() # with pytest.raises(sa.ArgumentError): await conn.execute( "INSERT INTO sa_tbl (id, name) VALUES (%(id)s, %(name)s)", [{"id": 2, "name": 'third'}, {"id": 3, "name": 'forth'}]) await conn.execute( tbl.update().where( tbl.c.id == bindparam("id") ).values( {"name": bindparam("name")} ), [ {"id": 2, "name": "t2"}, {"id": 3, "name": "t3"} ] ) with pytest.raises(sa.ArgumentError): await conn.execute( DropTable(tbl), [{}, {}] ) with pytest.raises(sa.ArgumentError): await conn.execute( {}, [{}, {}] ) @pytest.mark.run_loop async def test_raw_select_with_wildcard(sa_connect): conn = await sa_connect() await conn.execute( 'SELECT * FROM sa_tbl WHERE name LIKE "%test%"') @pytest.mark.run_loop async def test_delete(sa_connect): conn = await sa_connect() res = await conn.execute(tbl.delete().where(tbl.c.id == 1)) assert () == res.keys() assert 1 == res.rowcount assert not res.returns_rows assert res.closed assert res.cursor is None @pytest.mark.run_loop async def test_double_close(sa_connect): conn = await sa_connect() res = await conn.execute("SELECT 1") await res.close() assert res.closed assert res.cursor is None await res.close() assert res.closed assert res.cursor is None @pytest.mark.run_loop @pytest.mark.skip("Find out how to close cursor on __del__ method") async def test_weakrefs(sa_connect): conn = await sa_connect() assert 0 == len(conn._weak_results) res = await conn.execute("SELECT 1") assert 1 == len(conn._weak_results) cur = res.cursor assert not cur.closed # TODO: fix this, how close cursor if result was deleted # yield from cur.close() del res assert cur.closed assert 0 == len(conn._weak_results) @pytest.mark.run_loop async def test_fetchall(sa_connect): conn = await sa_connect() await conn.execute(tbl.insert().values(name='second')) res = await conn.execute(tbl.select()) rows = await res.fetchall() assert 2 == len(rows) assert res.closed assert res.returns_rows assert [(1, 'first'), (2, 'second')] == rows @pytest.mark.run_loop async def test_fetchall_closed(sa_connect): conn = await sa_connect() await conn.execute(tbl.insert().values(name='second')) res = await conn.execute(tbl.select()) await res.close() with pytest.raises(sa.ResourceClosedError): await res.fetchall() @pytest.mark.run_loop async def test_fetchall_not_returns_rows(sa_connect): conn = await sa_connect() res = await conn.execute(tbl.delete()) with pytest.raises(sa.ResourceClosedError): await res.fetchall() @pytest.mark.run_loop async def test_fetchone_closed(sa_connect): conn = await sa_connect() await conn.execute(tbl.insert().values(name='second')) res = await conn.execute(tbl.select()) await res.close() with pytest.raises(sa.ResourceClosedError): await res.fetchone() @pytest.mark.run_loop async def test_first_not_returns_rows(sa_connect): conn = await sa_connect() res = await conn.execute(tbl.delete()) with pytest.raises(sa.ResourceClosedError): await res.first() @pytest.mark.run_loop async def test_fetchmany(sa_connect): conn = await sa_connect() await conn.execute(tbl.insert().values(name='second')) res = await conn.execute(tbl.select()) rows = await res.fetchmany() assert 1 == len(rows) assert not res.closed assert res.returns_rows assert [(1, 'first')] == rows @pytest.mark.run_loop async def test_fetchmany_with_size(sa_connect): conn = await sa_connect() await conn.execute(tbl.insert().values(name='second')) res = await conn.execute(tbl.select()) rows = await res.fetchmany(100) assert 2 == len(rows) assert not res.closed assert res.returns_rows assert [(1, 'first'), (2, 'second')] == rows @pytest.mark.run_loop async def test_fetchmany_closed(sa_connect): conn = await sa_connect() await conn.execute(tbl.insert().values(name='second')) res = await conn.execute(tbl.select()) await res.close() with pytest.raises(sa.ResourceClosedError): await res.fetchmany() @pytest.mark.run_loop async def test_fetchmany_with_size_closed(sa_connect): conn = await sa_connect() await conn.execute(tbl.insert().values(name='second')) res = await conn.execute(tbl.select()) await res.close() with pytest.raises(sa.ResourceClosedError): await res.fetchmany(5555) @pytest.mark.run_loop async def test_fetchmany_not_returns_rows(sa_connect): conn = await sa_connect() res = await conn.execute(tbl.delete()) with pytest.raises(sa.ResourceClosedError): await res.fetchmany() @pytest.mark.run_loop async def test_fetchmany_close_after_last_read(sa_connect): conn = await sa_connect() res = await conn.execute(tbl.select()) rows = await res.fetchmany() assert 1 == len(rows) assert not res.closed assert res.returns_rows assert [(1, 'first')] == rows rows2 = await res.fetchmany() assert 0 == len(rows2) assert res.closed @pytest.mark.run_loop async def test_create_table(sa_connect): conn = await sa_connect() res = await conn.execute(DropTable(tbl)) with pytest.raises(sa.ResourceClosedError): await res.fetchmany() with pytest.raises(aiomysql.ProgrammingError): await conn.execute("SELECT * FROM sa_tbl") res = await conn.execute(CreateTable(tbl)) with pytest.raises(sa.ResourceClosedError): await res.fetchmany() res = await conn.execute("SELECT * FROM sa_tbl") assert 0 == len(await res.fetchall()) @pytest.mark.run_loop async def test_async_iter(sa_connect): conn = await sa_connect() await conn.execute(tbl.insert().values(name="second")) ret = [] async for row in conn.execute(tbl.select()): ret.append(row) assert [(1, "first"), (2, "second")] == ret @pytest.mark.run_loop async def test_statement_in(sa_connect): conn = await sa_connect() await conn.execute(tbl.insert().values(name="second")) await conn.execute(tbl.insert().values(name="third")) stmt = tbl.select().where(tbl.c.id.in_([1, 2])) ret = [] async for row in conn.execute(stmt): ret.append(row) assert [(1, "first"), (2, "second")] == ret aiomysql-0.3.2/tests/sa/test_sa_default.py000066400000000000000000000077431507601712200206530ustar00rootroot00000000000000import datetime import pytest from sqlalchemy import MetaData, Table, Column, Integer, String from sqlalchemy import func, DateTime, Boolean from aiomysql import sa meta = MetaData() table = Table('sa_tbl_default_test', meta, Column('id', Integer, nullable=False, primary_key=True), Column('string_length', Integer, default=func.length('qwerty')), Column('number', Integer, default=100, nullable=False), Column('description', String(255), nullable=False, default='default test'), Column('created_at', DateTime, default=datetime.datetime.now), Column('enabled', Boolean, default=True)) @pytest.fixture() def make_engine(connection, mysql_params, loop): engines = [] async def _make_engine(**kwargs): if "unix_socket" in mysql_params: conn_args = {"unix_socket": mysql_params["unix_socket"]} else: conn_args = { "host": mysql_params['host'], "port": mysql_params['port'], } if "ssl" in mysql_params: conn_args["ssl"] = mysql_params["ssl"] engine = await sa.create_engine( db=mysql_params['db'], user=mysql_params['user'], password=mysql_params['password'], minsize=10, **conn_args, **kwargs, ) engines.append(engine) return engine yield _make_engine for engine in engines: engine.terminate() loop.run_until_complete(engine.wait_closed()) async def start(engine): async with engine.acquire() as conn: await conn.execute("DROP TABLE IF EXISTS sa_tbl_default_test") await conn.execute("CREATE TABLE sa_tbl_default_test " "(id integer," " string_length integer, " "number integer," " description VARCHAR(255), " "created_at DATETIME(6), " "enabled TINYINT)") @pytest.mark.run_loop async def test_default_fields(make_engine): engine = await make_engine() await start(engine) async with engine.acquire() as conn: await conn.execute(table.insert().values()) res = await conn.execute(table.select()) row = await res.fetchone() assert row.string_length == 6 assert row.number == 100 assert row.description == 'default test' assert row.enabled is True assert isinstance(row.created_at, datetime.datetime) @pytest.mark.run_loop async def test_default_fields_isnull(make_engine): engine = await make_engine() await start(engine) async with engine.acquire() as conn: created_at = None enabled = False await conn.execute(table.insert().values( enabled=enabled, created_at=created_at, )) res = await conn.execute(table.select()) row = await res.fetchone() assert row.number == 100 assert row.string_length == 6 assert row.description == 'default test' assert row.enabled == enabled assert row.created_at == created_at @pytest.mark.run_loop async def test_default_fields_edit(make_engine): engine = await make_engine() await start(engine) async with engine.acquire() as conn: created_at = datetime.datetime.now() description = 'new descr' enabled = False number = 111 await conn.execute(table.insert().values( description=description, enabled=enabled, created_at=created_at, number=number, )) res = await conn.execute(table.select()) row = await res.fetchone() assert row.number == number assert row.string_length == 6 assert row.description == description assert row.enabled == enabled assert row.created_at == created_at aiomysql-0.3.2/tests/sa/test_sa_distil.py000066400000000000000000000045051507601712200205100ustar00rootroot00000000000000import unittest import sqlalchemy from aiomysql.sa.connection import _distill_params class DistillArgsTest(unittest.TestCase): def test_distill_none(self): self.assertEqual( _distill_params(None, None), [] ) def test_distill_no_multi_no_param(self): self.assertEqual( _distill_params((), {}), [] ) def test_distill_dict_multi_none_param(self): self.assertEqual( _distill_params(None, {"foo": "bar"}), [{"foo": "bar"}] ) def test_distill_dict_multi_empty_param(self): self.assertEqual( _distill_params((), {"foo": "bar"}), [{"foo": "bar"}] ) def test_distill_single_dict(self): self.assertEqual( _distill_params(({"foo": "bar"},), {}), [{"foo": "bar"}] ) def test_distill_single_list_strings(self): self.assertEqual( _distill_params((["foo", "bar"],), {}), [["foo", "bar"]] ) def test_distill_single_list_tuples(self): self.assertEqual( _distill_params(([("foo", "bar"), ("bat", "hoho")],), {}), [('foo', 'bar'), ('bat', 'hoho')] ) def test_distill_single_list_tuple(self): self.assertEqual( _distill_params(([("foo", "bar")],), {}), [('foo', 'bar')] ) def test_distill_multi_list_tuple(self): self.assertEqual( _distill_params( ([("foo", "bar")], [("bar", "bat")]), {} ), ([('foo', 'bar')], [('bar', 'bat')]) ) def test_distill_multi_strings(self): self.assertEqual( _distill_params(("foo", "bar"), {}), [('foo', 'bar')] ) def test_distill_single_list_dicts(self): self.assertEqual( _distill_params(([{"foo": "bar"}, {"foo": "hoho"}],), {}), [{'foo': 'bar'}, {'foo': 'hoho'}] ) def test_distill_single_string(self): self.assertEqual( _distill_params(("arg",), {}), [["arg"]] ) def test_distill_multi_string_tuple(self): self.assertEqual( _distill_params((("arg", "arg"),), {}), [("arg", "arg")] ) sqlalchemy # for sake of pyflakes checks aiomysql-0.3.2/tests/sa/test_sa_engine.py000066400000000000000000000110071507601712200204600ustar00rootroot00000000000000import asyncio import pytest from sqlalchemy import MetaData, Table, Column, Integer, String from aiomysql import sa meta = MetaData() tbl = Table('sa_tbl3', meta, Column('id', Integer, nullable=False, primary_key=True), Column('name', String(255))) @pytest.fixture() def make_engine(connection, mysql_params, loop): engines = [] async def _make_engine(**kwargs): if "unix_socket" in mysql_params: conn_args = {"unix_socket": mysql_params["unix_socket"]} else: conn_args = { "host": mysql_params['host'], "port": mysql_params['port'], } if "ssl" in mysql_params: conn_args["ssl"] = mysql_params["ssl"] engine = await sa.create_engine( db=mysql_params['db'], user=mysql_params['user'], password=mysql_params['password'], minsize=10, **conn_args, **kwargs, ) engines.append(engine) return engine yield _make_engine for engine in engines: engine.terminate() loop.run_until_complete(engine.wait_closed()) async def start(engine): async with engine.acquire() as conn: await conn.execute("DROP TABLE IF EXISTS sa_tbl3") await conn.execute("CREATE TABLE sa_tbl3 " "(id serial, name varchar(255))") @pytest.mark.run_loop async def test_dialect(make_engine): engine = await make_engine() await start(engine) assert sa.engine._dialect == engine.dialect @pytest.mark.run_loop async def test_name(make_engine): engine = await make_engine() await start(engine) assert 'mysql' == engine.name @pytest.mark.run_loop async def test_driver(make_engine): engine = await make_engine() await start(engine) assert 'pymysql' == engine.driver # @pytest.mark.run_loop # async def test_dsn(self): # self.assertEqual( # 'dbname=aiomysql user=aiomysql password=xxxxxx host=127.0.0.1', # engine.dsn) @pytest.mark.run_loop async def test_minsize(make_engine): engine = await make_engine() await start(engine) assert 10 == engine.minsize @pytest.mark.run_loop async def test_maxsize(make_engine): engine = await make_engine() await start(engine) assert 10 == engine.maxsize @pytest.mark.run_loop async def test_size(make_engine): engine = await make_engine() await start(engine) assert 10 == engine.size @pytest.mark.run_loop async def test_freesize(make_engine): engine = await make_engine() await start(engine) assert 10 == engine.freesize @pytest.mark.run_loop async def test_make_engine_with_default_loop(make_engine): engine = await make_engine() await start(engine) engine.close() await engine.wait_closed() @pytest.mark.run_loop async def test_not_context_manager(make_engine): engine = await make_engine() await start(engine) with pytest.raises(RuntimeError): with engine: pass @pytest.mark.run_loop async def test_release_transacted(make_engine): engine = await make_engine() await start(engine) conn = await engine.acquire() tr = await conn.begin() with pytest.raises(sa.InvalidRequestError): engine.release(conn) del tr @pytest.mark.run_loop async def test_cannot_acquire_after_closing(make_engine): engine = await make_engine() await start(engine) engine.close() with pytest.raises(RuntimeError): await engine.acquire() await engine.wait_closed() @pytest.mark.run_loop async def test_wait_closed(make_engine): engine = await make_engine() await start(engine) c1 = await engine.acquire() c2 = await engine.acquire() assert 10 == engine.size assert 8 == engine.freesize ops = [] async def do_release(conn): await asyncio.sleep(0) engine.release(conn) ops.append('release') async def wait_closed(): await engine.wait_closed() ops.append('wait_closed') engine.close() await asyncio.gather(wait_closed(), do_release(c1), do_release(c2)) assert ['release', 'release', 'wait_closed'] == ops assert 0 == engine.freesize engine.close() await engine.wait_closed() @pytest.mark.run_loop async def test_terminate_with_acquired_connections(make_engine): engine = await make_engine() await start(engine) conn = await engine.acquire() engine.terminate() await engine.wait_closed() assert conn.closed aiomysql-0.3.2/tests/sa/test_sa_transaction.py000066400000000000000000000255251507601712200215520ustar00rootroot00000000000000import functools import unittest from unittest import mock import pytest from sqlalchemy import MetaData, Table, Column, Integer, String, func, select from aiomysql import sa meta = MetaData() tbl = Table('sa_tbl2', meta, Column('id', Integer, nullable=False, primary_key=True), Column('name', String(255))) def check_prepared_transactions(func): @functools.wraps(func) async def wrapper(self): conn = await self.loop.run_until_complete(self._connect()) val = await conn.scalar('show max_prepared_transactions') if not val: raise unittest.SkipTest('Twophase transacions are not supported. ' 'Set max_prepared_transactions to ' 'a nonzero value') return func(self) return wrapper async def start(conn): await conn.execute("DROP TABLE IF EXISTS sa_tbl2") await conn.execute("CREATE TABLE sa_tbl2 " "(id serial, name varchar(255))") await conn.execute("INSERT INTO sa_tbl2 (name)" "VALUES ('first')") await conn._connection.commit() @pytest.fixture() def sa_connect(connection, connection_creator): async def _connect(**kwargs): conn = await connection_creator(**kwargs) # TODO: fix this, should autocommit be enabled by default? await conn.autocommit(True) engine = mock.Mock() engine.dialect = sa.engine._dialect def release(*args): return engine.release = release ret = sa.SAConnection(conn, engine) return ret return _connect @pytest.mark.run_loop async def test_without_transactions(sa_connect): conn1 = await sa_connect() await start(conn1) conn2 = await sa_connect() res1 = await conn1.scalar(select([func.count()]).select_from(tbl)) assert 1 == res1 await conn2.execute(tbl.delete()) res2 = await conn1.scalar(select([func.count()]).select_from(tbl)) assert 0 == res2 await conn1.close() await conn2.close() @pytest.mark.run_loop async def test_connection_attr(sa_connect): conn = await sa_connect() await start(conn) tr = await conn.begin() assert tr.connection is conn await conn.close() @pytest.mark.run_loop async def test_root_transaction(sa_connect): conn1 = await sa_connect() await start(conn1) conn2 = await sa_connect() tr = await conn1.begin() assert tr.is_active await conn1.execute(tbl.delete()) res1 = await conn2.scalar(select([func.count()]).select_from(tbl)) assert 1 == res1 await tr.commit() assert not tr.is_active assert not conn1.in_transaction res2 = await conn2.scalar(select([func.count()]).select_from(tbl)) assert 0 == res2 await conn1.close() await conn2.close() @pytest.mark.run_loop async def test_root_transaction_rollback(sa_connect): conn1 = await sa_connect() await start(conn1) conn2 = await sa_connect() tr = await conn1.begin() assert tr.is_active await conn1.execute(tbl.delete()) res1 = await conn2.scalar(select([func.count()]).select_from(tbl)) assert 1 == res1 await tr.rollback() assert not tr.is_active res2 = await conn2.scalar(select([func.count()]).select_from(tbl)) assert 1 == res2 await conn1.close() await conn2.close() @pytest.mark.run_loop async def test_root_transaction_close(sa_connect): conn1 = await sa_connect() await start(conn1) conn2 = await sa_connect() tr = await conn1.begin() assert tr.is_active await conn1.execute(tbl.delete()) res1 = await conn2.scalar(select([func.count()]).select_from(tbl)) assert 1 == res1 await tr.close() assert not tr.is_active res2 = await conn2.scalar(select([func.count()]).select_from(tbl)) assert 1 == res2 await conn1.close() await conn2.close() @pytest.mark.run_loop async def test_rollback_on_connection_close(sa_connect): conn1 = await sa_connect() await start(conn1) conn2 = await sa_connect() tr = await conn1.begin() await conn1.execute(tbl.delete()) res1 = await conn2.scalar(select([func.count()]).select_from(tbl)) assert 1 == res1 await conn1.close() res2 = await conn2.scalar(select([func.count()]).select_from(tbl)) assert 1 == res2 del tr await conn1.close() await conn2.close() @pytest.mark.run_loop async def test_root_transaction_commit_inactive(sa_connect): conn = await sa_connect() await start(conn) tr = await conn.begin() assert tr.is_active await tr.commit() assert not tr.is_active with pytest.raises(sa.InvalidRequestError): await tr.commit() await conn.close() @pytest.mark.run_loop async def test_root_transaction_rollback_inactive(sa_connect): conn = await sa_connect() await start(conn) tr = await conn.begin() assert tr.is_active await tr.rollback() assert not tr.is_active await tr.rollback() assert not tr.is_active await conn.close() @pytest.mark.run_loop async def test_root_transaction_double_close(sa_connect): conn = await sa_connect() await start(conn) tr = await conn.begin() assert tr.is_active await tr.close() assert not tr.is_active await tr.close() assert not tr.is_active await conn.close() @pytest.mark.run_loop async def test_inner_transaction_commit(sa_connect): conn = await sa_connect() await start(conn) tr1 = await conn.begin() tr2 = await conn.begin() assert tr2.is_active await tr2.commit() assert not tr2.is_active assert tr1.is_active await tr1.commit() assert not tr2.is_active assert not tr1.is_active await conn.close() @pytest.mark.run_loop async def test_inner_transaction_rollback(sa_connect): conn = await sa_connect() await start(conn) tr1 = await conn.begin() tr2 = await conn.begin() assert tr2.is_active await conn.execute(tbl.insert().values(name='aaaa')) await tr2.rollback() assert not tr2.is_active assert not tr1.is_active res = await conn.scalar(select([func.count()]).select_from(tbl)) assert 1 == res await conn.close() @pytest.mark.run_loop async def test_inner_transaction_close(sa_connect): conn = await sa_connect() await start(conn) tr1 = await conn.begin() tr2 = await conn.begin() assert tr2.is_active await conn.execute(tbl.insert().values(name='aaaa')) await tr2.close() assert not tr2.is_active assert tr1.is_active await tr1.commit() res = await conn.scalar(select([func.count()]).select_from(tbl)) assert 2 == res await conn.close() @pytest.mark.run_loop async def test_nested_transaction_commit(sa_connect): conn = await sa_connect() await start(conn) tr1 = await conn.begin_nested() tr2 = await conn.begin_nested() assert tr1.is_active assert tr2.is_active await conn.execute(tbl.insert().values(name='aaaa')) await tr2.commit() assert not tr2.is_active assert tr1.is_active res = await conn.scalar(select([func.count()]).select_from(tbl)) assert 2 == res await tr1.commit() assert not tr2.is_active assert not tr1.is_active res = await conn.scalar(select([func.count()]).select_from(tbl)) assert 2 == res await conn.close() @pytest.mark.run_loop async def test_nested_transaction_commit_twice(sa_connect): conn = await sa_connect() await start(conn) tr1 = await conn.begin_nested() tr2 = await conn.begin_nested() await conn.execute(tbl.insert().values(name='aaaa')) await tr2.commit() assert not tr2.is_active assert tr1.is_active await tr2.commit() assert not tr2.is_active assert tr1.is_active res = await conn.scalar(select([func.count()]).select_from(tbl)) assert 2 == res await tr1.close() await conn.close() @pytest.mark.run_loop async def test_nested_transaction_rollback(sa_connect): conn = await sa_connect() await start(conn) tr1 = await conn.begin_nested() tr2 = await conn.begin_nested() assert tr1.is_active assert tr2.is_active await conn.execute(tbl.insert().values(name='aaaa')) await tr2.rollback() assert not tr2.is_active assert tr1.is_active res = await conn.scalar(select([func.count()]).select_from(tbl)) assert 1 == res await tr1.commit() assert not tr2.is_active assert not tr1.is_active res = await conn.scalar(select([func.count()]).select_from(tbl)) assert 1 == res await conn.close() @pytest.mark.run_loop async def test_nested_transaction_rollback_twice(sa_connect): conn = await sa_connect() await start(conn) tr1 = await conn.begin_nested() tr2 = await conn.begin_nested() await conn.execute(tbl.insert().values(name='aaaa')) await tr2.rollback() assert not tr2.is_active assert tr1.is_active await tr2.rollback() assert not tr2.is_active assert tr1.is_active await tr1.commit() res = await conn.scalar(select([func.count()]).select_from(tbl)) assert 1 == res await conn.close() @pytest.mark.run_loop async def test_twophase_transaction_commit(sa_connect): conn = await sa_connect() await start(conn) tr = await conn.begin_twophase('sa_twophase') assert tr.xid == 'sa_twophase' await conn.execute(tbl.insert().values(name='aaaa')) await tr.prepare() assert tr.is_active await tr.commit() assert not tr.is_active res = await conn.scalar(select([func.count()]).select_from(tbl)) assert 2 == res await conn.close() @pytest.mark.run_loop async def test_twophase_transaction_twice(sa_connect): conn = await sa_connect() await start(conn) tr = await conn.begin_twophase() with pytest.raises(sa.InvalidRequestError): await conn.begin_twophase() assert tr.is_active await tr.prepare() await tr.commit() await conn.close() @pytest.mark.run_loop async def test_transactions_sequence(sa_connect): conn = await sa_connect() await start(conn) await conn.execute(tbl.delete()) assert conn._transaction is None tr1 = await conn.begin() assert tr1 is conn._transaction await conn.execute(tbl.insert().values(name='a')) res1 = await conn.scalar(select([func.count()]).select_from(tbl)) assert 1 == res1 await tr1.commit() assert conn._transaction is None tr2 = await conn.begin() assert tr2 is conn._transaction await conn.execute(tbl.insert().values(name='b')) res2 = await conn.scalar(select([func.count()]).select_from(tbl)) assert 2 == res2 await tr2.rollback() assert conn._transaction is None tr3 = await conn.begin() assert tr3 is conn._transaction await conn.execute(tbl.insert().values(name='b')) res3 = await conn.scalar(select([func.count()]).select_from(tbl)) assert 2 == res3 await tr3.commit() assert conn._transaction is None await conn.close() aiomysql-0.3.2/tests/sa/test_sa_types.py000066400000000000000000000037341507601712200203670ustar00rootroot00000000000000from enum import IntEnum from unittest import mock import pytest from sqlalchemy import MetaData, Table, Column, Integer, TypeDecorator from aiomysql import sa class UserDefinedEnum(IntEnum): Value1 = 111 Value2 = 222 class IntEnumField(TypeDecorator): impl = Integer def __init__(self, enum_class, *arg, **kw): TypeDecorator.__init__(self, *arg, **kw) self.enum_class = enum_class def process_bind_param(self, value, dialect): """ From python to DB """ if value is None: return None elif not isinstance(value, self.enum_class): return self.enum_class(value).value else: return value.value def process_result_value(self, value, dialect): """ From DB to Python """ if value is None: return None return self.enum_class(value) meta = MetaData() tbl = Table('sa_test_type_tbl', meta, Column('id', Integer, nullable=False, primary_key=True), Column('val', IntEnumField(enum_class=UserDefinedEnum))) @pytest.fixture() def sa_connect(connection_creator): async def connect(**kwargs): conn = await connection_creator() await conn.autocommit(True) cur = await conn.cursor() await cur.execute("DROP TABLE IF EXISTS sa_test_type_tbl") await cur.execute("CREATE TABLE sa_test_type_tbl " "(id serial, val bigint)") await cur._connection.commit() engine = mock.Mock() engine.dialect = sa.engine._dialect return sa.SAConnection(conn, engine) return connect @pytest.mark.run_loop async def test_values(sa_connect): conn = await sa_connect() await conn.execute(tbl.insert().values( val=UserDefinedEnum.Value1) ) result = await conn.execute(tbl.select().where( tbl.c.val == UserDefinedEnum.Value1) ) data = await result.fetchone() assert data['val'] == UserDefinedEnum.Value1 aiomysql-0.3.2/tests/ssl_resources/000077500000000000000000000000001507601712200174105ustar00rootroot00000000000000aiomysql-0.3.2/tests/ssl_resources/README.md000066400000000000000000000016661507601712200207000ustar00rootroot00000000000000# MySQL TLS Stuff This folder contains some resources to be mounted into a mysql container to support TLS Most of the instructions were taken from here https://dev.mysql.com/doc/refman/5.7/en/creating-ssl-files-using-openssl.html # Generating certificates ```bash openssl genrsa 2048 > ca-key.pem openssl req -new -x509 -nodes -days 3600 -key ca-key.pem -out ca.pem openssl req -newkey rsa:2048 -days 3600 -nodes -keyout server-key.pem -out server-req.pem openssl rsa -in server-key.pem -out server-key.pem openssl x509 -req -in server-req.pem -days 3600 -CA ca.pem -CAkey ca-key.pem -set_serial 01 -out server-cert.pem ``` The current files under `ssl/` have the default values provided by openssl. # MySQL Config MySQL imports all `.cnf` files under `/etc/mysql/conf.d` so a `tls.cnf` is placed in there referencing the SSL CA cert and server cert and key. The entire `ssl/` directory should be mounted on the container to `/etc/mysql/ssl/` aiomysql-0.3.2/tests/ssl_resources/socket.cnf000066400000000000000000000000641507601712200213700ustar00rootroot00000000000000[mysqld] socket = /socket-mount/mysql.sock aiomysql-0.3.2/tests/ssl_resources/ssl/000077500000000000000000000000001507601712200202115ustar00rootroot00000000000000aiomysql-0.3.2/tests/ssl_resources/ssl/ca-key.pem000066400000000000000000000032171507601712200220700ustar00rootroot00000000000000-----BEGIN RSA PRIVATE KEY----- MIIEpAIBAAKCAQEA4QGo8oVPe8HCIqfJXYVxZbHAZDVlMK/e6ypQM463B8lNg6Ce dd0QfAVIs3wuXtRTVD6ELVFDrmcg/eBjM00Idh2GcI0ojDAPcECBvvk0SWYrUBFn bou5R1tPwCb8yY601dNExteT61egVhTQd8Vz6NjN9tkMqrpnZ3pn86QqnFWuDOvJ 5YkOAHOjXipkb8ba+Y4Jl50N6KZH2vrHwdWiO/DYQJLuvoOtYI6a+AXNH+ZSNvid Vz9nb3taUQcNxUlYtTByPfnU9wo/dzDFpNKvC5yScPvh2AmJUrEKNEYkFY/YbqLc iCXOoBfh1EHqmR2EASjanr+LgwTJfXoX3bQeQQIDAQABAoIBAQDLQ3igPhXjstHy BKlANwCN4dnvrNzQ8s/qmbsCGHb4Lb48nqkHyMDPiOZ4XkJ1oFH21NMLLVJ7Buci 8cYr3fc63MlKe/qZSgFoYp3TK8U0WXvfRRmvH8Is2CxfZdkPLD/ouoZzKuSRwgMy QHNi/5kKTHEkAkgTI3muXUHzM+baeknisEqXqCib1yY4FfX2Vnip1dTbbj1gEbGu vgFM67uXsoKeQ8ykTF5ZUDIphP+tFWgLMZA2L3iTgRSZJztWwxUMa9NZ8erxy25h HPntC5OELndKEbO+su7wCMkw8w2cNA1V6yyQlwSoIRD1fhit/v4hOmvofTpoikXH DaCJjgwBAoGBAP84Wq8nrp+79BWRX/qsivERtkGnfjFIDbWBhj70az/CgCV8OWWu td6VEEl/Gk7IHsrmOySF0EMYYeMoc5WQKMZbb+1x2m7ovxEwS/UGwNGdtZswyOJw y0LKI6dJPQUiZm/O2m0w1zs6/CvvIfu9QOUTxyvwub4lM8UW/gDy79URAoGBAOGx q83cTMHKY32dNU/IVaMpw9OxojAWpWXYOEqyV7hM2+gw/lrdBKRyWwB3l3smwet1 FvKINCZ0bTIRbz/UsNtXp8lISTvw5bQhGsQEYx6ncBeBeHN50zSVoR2xBfqu3pQ4 G5V/UI82hba7QUDXkuMJ2T7dcZixLk1vp3y+LvYxAoGBANkGYc7J7qs0F6Xzfeta p7fA+PuxYxSjEc1DfBWyoDSSv4egr+owe8Tveu8UnxlZAR5GUwqGo4c6h5qzvj3z XUj3XiFKjJV9Y2RJbn3IpVRaSKDUBi7P/XgpDdJl6/aevv7aplDtlEhwqxjs+zfn QfTKMbbCuB/h4Lj7CTljW+ARAoGAchB7hgVK/b4t3jRv1yymq1nWUM077RXk7b4D ZS0RTGH72jO4uW9ugzYQbAIFGwaRh1CcEmNoB+9bqKxLD3WNFK4ObJoN+S9cyFba 0iptdfalnhufJq1xYugkj38CSJnMgBiDSGEZ8+dYWOv2pLDO2dQGadE9MjCJ+DTv 7wmnbmECgYBnkVNH0wOvdMuT0vovm18zqtH04PmQGg5JXgQjtpn/6iC9BjRaF2Nz Jj9arX00KxIO5vkzbyt5ht5fzk4dpXoG5ozOqnqbj6WNEfgJDeyCkqo1gZOMZSQ/ YPjKGL7rhXbE/FsNEH90nG0NpIAk1ibtD9sYn6LBBYJcuauegOgE7g== -----END RSA PRIVATE KEY----- aiomysql-0.3.2/tests/ssl_resources/ssl/ca.pem000066400000000000000000000023151507601712200213000ustar00rootroot00000000000000-----BEGIN CERTIFICATE----- MIIDXTCCAkWgAwIBAgIJAP2JtVGC0ZPKMA0GCSqGSIb3DQEBCwUAMEUxCzAJBgNV BAYTAkFVMRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBX aWRnaXRzIFB0eSBMdGQwHhcNMTgwMzI5MjEzMzA1WhcNMjgwMjA1MjEzMzA1WjBF MQswCQYDVQQGEwJBVTETMBEGA1UECAwKU29tZS1TdGF0ZTEhMB8GA1UECgwYSW50 ZXJuZXQgV2lkZ2l0cyBQdHkgTHRkMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIB CgKCAQEA4QGo8oVPe8HCIqfJXYVxZbHAZDVlMK/e6ypQM463B8lNg6Cedd0QfAVI s3wuXtRTVD6ELVFDrmcg/eBjM00Idh2GcI0ojDAPcECBvvk0SWYrUBFnbou5R1tP wCb8yY601dNExteT61egVhTQd8Vz6NjN9tkMqrpnZ3pn86QqnFWuDOvJ5YkOAHOj Xipkb8ba+Y4Jl50N6KZH2vrHwdWiO/DYQJLuvoOtYI6a+AXNH+ZSNvidVz9nb3ta UQcNxUlYtTByPfnU9wo/dzDFpNKvC5yScPvh2AmJUrEKNEYkFY/YbqLciCXOoBfh 1EHqmR2EASjanr+LgwTJfXoX3bQeQQIDAQABo1AwTjAdBgNVHQ4EFgQUyNN4RMiv zhCNNvMta2kOoiw/SXswHwYDVR0jBBgwFoAUyNN4RMivzhCNNvMta2kOoiw/SXsw DAYDVR0TBAUwAwEB/zANBgkqhkiG9w0BAQsFAAOCAQEAyAiIqfKoemjnJmoMqcrg N/7QhC9P79rzwKQOEFkCFWJFuGiOD6GhPPCu/9ssrl5vBEwDLdl0V4AeGq8DeKV+ SNON1o6Y6uUAiX5uYK5Asv0avQUu7SS+uvE3YhTELjbvp4vqdLCUjBqq4KZoEA+F 4hXCltPVdOItztzAd7hgktYrkJeDA1M7sZHTv26HaO6vJ0trUdb4tvqzShzMCvN/ 2s9ZJAVBZL77Px40yUPiK6cjpq5fGcUen0zBumymBRFOb8ykvq7azUdjk6sz65Vb Q3kgsKGBHjVOPfXf00YWvgG3NePX3FsBEHtYbBDSD6k+aXQ1WfOUB8mtdwjyEnzD 7w== -----END CERTIFICATE----- aiomysql-0.3.2/tests/ssl_resources/ssl/server-cert.pem000066400000000000000000000021131507601712200231520ustar00rootroot00000000000000-----BEGIN CERTIFICATE----- MIIC/jCCAeYCAQEwDQYJKoZIhvcNAQELBQAwRTELMAkGA1UEBhMCQVUxEzARBgNV BAgMClNvbWUtU3RhdGUxITAfBgNVBAoMGEludGVybmV0IFdpZGdpdHMgUHR5IEx0 ZDAeFw0xODAzMjkyMTMzMTFaFw0yODAyMDUyMTMzMTFaMEUxCzAJBgNVBAYTAkFV MRMwEQYDVQQIDApTb21lLVN0YXRlMSEwHwYDVQQKDBhJbnRlcm5ldCBXaWRnaXRz IFB0eSBMdGQwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCfUvCGVUNU reTRbcuBQQ2w9+VM3TPMNr3gdZV1X1yg3TtEJc/kBYnSPfhz3S0Rar+Y8pdxCLvr lGJeBaevaXwzj8tW85S/3mv5BFtG71x5bB/dueG4wgEaEEhuXJIQHs89nzGgggCb hZCuDp2lqwptX2U/Pr9YJDbSuko3pp0EvMi7qfnNyzrZY9gLqUkTd1QH+bNWQW5G DMK9fbDXvGme/xyD4OLLEHy7Cc+KyGBzSOFqLNVpmppC4M5jf9aTM2s57fEaDE+t pT4E8LKciXYKAW03n+pfoECf1JneslGltmhbvANGLuA4lfOr/I6v5f2QsNS8ISV/ d7+YR+O9k5oVAgMBAAEwDQYJKoZIhvcNAQELBQADggEBACUWEAWfv3EOy8JmUbWA jEytJIh/N55hfknninjOBPMv1U1BRixJwXVKlwA8o+8JiacriObWeje2rDUOt6zY U5DnySQbTYJcJZ9jprqU7VXST7D9NvA0ueLclWTZcqIr/josyhK+l1YbezFYBf41 JQ4PVzkNz9Of4e022qONnlEX0MbtFlcyPEK4yWyXLAhidPAV9QcOCy85vob0+3EE hmRVVzcTv4Pbzgpee0ZORqozSLzZ3N6RvDyYIczqaytcbyvaQ7GuykE7XvIK5hz0 EM8pwsvxSY1z2yNIw38M8ZOYk18LsEGkf/TyT6eQqymMMD9Qy8rOTsOLfY5eQCf7 pKQ= -----END CERTIFICATE----- aiomysql-0.3.2/tests/ssl_resources/ssl/server-key.pem000066400000000000000000000032131507601712200230070ustar00rootroot00000000000000-----BEGIN RSA PRIVATE KEY----- MIIEogIBAAKCAQEAn1LwhlVDVK3k0W3LgUENsPflTN0zzDa94HWVdV9coN07RCXP 5AWJ0j34c90tEWq/mPKXcQi765RiXgWnr2l8M4/LVvOUv95r+QRbRu9ceWwf3bnh uMIBGhBIblySEB7PPZ8xoIIAm4WQrg6dpasKbV9lPz6/WCQ20rpKN6adBLzIu6n5 zcs62WPYC6lJE3dUB/mzVkFuRgzCvX2w17xpnv8cg+DiyxB8uwnPishgc0jhaizV aZqaQuDOY3/WkzNrOe3xGgxPraU+BPCynIl2CgFtN5/qX6BAn9SZ3rJRpbZoW7wD Ri7gOJXzq/yOr+X9kLDUvCElf3e/mEfjvZOaFQIDAQABAoIBAFNubFP8LEEYut1M 4Kez+EZ22hXRNEG5XN9A095d7LS0hUefgWkH2W9GUmfiJ6qaOvEOAG4Jw9aOoqBX 18LMu2SI5VOIRJnhEKubM21HBSb0jw9eOqy0sz0Bz9wzD63vZFkBl0xVJ5pJbEUp lDZgBhrWPL/MzQiMFkVtllXkIw+KNRIokV0HJn0VNUm+ORaDO0TnTAiqL7Wv553+ lWvGaeI4NpMZOPtlgqym1neQmllkeB07pSEtTopO0iINwuTuDUU7IMleN4eOomfh GwirEHUban8BDL5djckS0GrkUq9EuJbvjEikHAFuNwz7D1Sn3LsDYygD6pKWoXxh 8Ng/AZUCgYEAy1ma5xKvRZZ6QUFssZjtwbruxhj0j3lPA3t9VGlp4SQb3g6FgeOV 6dyJLuuFfSLT9ps+k8D1Er+v85OqO0IM8TR12rPjWMzSjBz/xHb+0uRbEoPwArfn wcPj3NN6M+tcZH//djogrpphN7u+BudBG6YWzUOQjLwdStM52s8hFD8CgYEAyJM2 M5AWPYL57CI1lqzN135aS6OFyG4N2+rtEWEGAmoeP+NoSGFmQPLTWBdT4ZOGl59/ fSBrWRKted6/H9frjZuSdsGXFMb7e71DDyYZq7tJbYEExc0a6BePINLCwLCIMKHj PcoPGVsdQXfZK17+qzACwDNbEis/J3H7xcPv7KsCgYBO+SG7k/oV4HbiWPJJlsbf ciXBMXfpMIeLJq5p1faUxV09RA59f1F9XXS5kCZrjtca8ve+kjWbbm56/mIiWWiF VIZgxXQJzKIIYErEliIo7R6hdjQEGkAbdGROIqNW/pUHQt6Hn9OJe9M9vd/y9mTG xB4e4ZqFzZjisl3JqJ+EKQKBgFyJboxDgb9HWj7TWZ32g9FT/hy/iM172PEJZe6K sNcUVnhrVoVuSlrUrSULPivogEQb1hnIhz5FG7wKRGtQluByUhRwJF/1nbjtDK9E iLtuYOYgjC8l/a/ujp46Hpf/2hV12v1655RvMQQvYwZbgWtBb0N1biLnyO9N6zbG uz6ZAoGAXW3mhN9zbN6EgsIlLu+wCdPuC9Vs968gCT612E8Ijul1kiSODG3rbMoG 2FbjqZLahyX8vWVhX/m4xDqO7DXTwi81polfFuxbc/PimOLI4DKq62lWLMBDmUba X8Bxal3FXLvdcNEplcXadqmxJeXMYnYsfC+MCQhe6im0bGyDOAg= -----END RSA PRIVATE KEY----- aiomysql-0.3.2/tests/ssl_resources/ssl/server-req.pem000066400000000000000000000016741507601712200230170ustar00rootroot00000000000000-----BEGIN CERTIFICATE REQUEST----- MIICijCCAXICAQAwRTELMAkGA1UEBhMCQVUxEzARBgNVBAgMClNvbWUtU3RhdGUx ITAfBgNVBAoMGEludGVybmV0IFdpZGdpdHMgUHR5IEx0ZDCCASIwDQYJKoZIhvcN AQEBBQADggEPADCCAQoCggEBAJ9S8IZVQ1St5NFty4FBDbD35UzdM8w2veB1lXVf XKDdO0Qlz+QFidI9+HPdLRFqv5jyl3EIu+uUYl4Fp69pfDOPy1bzlL/ea/kEW0bv XHlsH9254bjCARoQSG5ckhAezz2fMaCCAJuFkK4OnaWrCm1fZT8+v1gkNtK6Sjem nQS8yLup+c3LOtlj2AupSRN3VAf5s1ZBbkYMwr19sNe8aZ7/HIPg4ssQfLsJz4rI YHNI4Wos1WmamkLgzmN/1pMzaznt8RoMT62lPgTwspyJdgoBbTef6l+gQJ/Umd6y UaW2aFu8A0Yu4DiV86v8jq/l/ZCw1LwhJX93v5hH472TmhUCAwEAAaAAMA0GCSqG SIb3DQEBCwUAA4IBAQCWRJYZTj9Wxt5Wf7IG7K6V7cZNfiJBkAyKbt7Ny60lB3/C xS3hmq4mFKpsE2OXtNCHjs1/fhodgR4aMkORTN7Zin5tbGoc3bRwfY92ktbZx4z0 W3EOP4peih6xGtrTYdcr/Ww/M72fhDvLopr55n091U2fismXcjLD5BJWx7T/rGX1 Luwu+S90E+fA5vqYhOgfT87OcopAGzia1xDFnsX62OSpj36EwPcB6tS1IjIZz87a lugDmd39+gyM0rJ0bpe0HH8PmgKOAc/F5SZCCWJChmYcvXNcURaLEReCg0RHOvsD ZEYmB2Ki+jwHO6DoQbNz3Erq8rl4GWjorYz4GDUH -----END CERTIFICATE REQUEST----- aiomysql-0.3.2/tests/ssl_resources/tls.cnf000066400000000000000000000001641507601712200207030ustar00rootroot00000000000000[mysqld] ssl-ca=/etc/mysql/ssl/ca.pem ssl-cert=/etc/mysql/ssl/server-cert.pem ssl-key=/etc/mysql/ssl/server-key.pem aiomysql-0.3.2/tests/test_async_iter.py000066400000000000000000000034541507601712200202740ustar00rootroot00000000000000import pytest from aiomysql import SSCursor from aiomysql import sa from sqlalchemy import MetaData, Table, Column, Integer, String meta = MetaData() tbl = Table('tbl', meta, Column('id', Integer, nullable=False, primary_key=True), Column('name', String(255))) @pytest.fixture def table(loop, connection_creator, table_cleanup): async def f(): connection = await connection_creator() cursor = await connection.cursor() await cursor.execute("DROP TABLE IF EXISTS tbl;") await cursor.execute("""CREATE TABLE tbl ( id MEDIUMINT NOT NULL AUTO_INCREMENT, name VARCHAR(255) NOT NULL, PRIMARY KEY (id));""") for i in [(1, 'a'), (2, 'b'), (3, 'c')]: await cursor.execute("INSERT INTO tbl VALUES(%s, %s)", i) await cursor.execute("commit;") await cursor.close() table_cleanup('tbl') loop.run_until_complete(f()) @pytest.mark.run_loop async def test_async_cursor(cursor, table): ret = [] await cursor.execute('SELECT * from tbl;') async for i in cursor: ret.append(i) assert [(1, 'a'), (2, 'b'), (3, 'c')] == ret @pytest.mark.run_loop async def test_async_cursor_server_side(connection, table): ret = [] cursor = await connection.cursor(SSCursor) await cursor.execute('SELECT * from tbl;') async for i in cursor: ret.append(i) assert [(1, 'a'), (2, 'b'), (3, 'c')] == ret @pytest.mark.run_loop async def test_async_iter_over_sa_result(mysql_params, table, loop): ret = [] engine = await sa.create_engine(**mysql_params, loop=loop) conn = await engine.acquire() async for i in (await conn.execute(tbl.select())): ret.append(i) assert [(1, 'a'), (2, 'b'), (3, 'c')] == ret engine.terminate() aiomysql-0.3.2/tests/test_async_with.py000066400000000000000000000223741507601712200203060ustar00rootroot00000000000000import warnings import aiomysql import pytest from aiomysql import sa, create_pool, DictCursor, Cursor from sqlalchemy import MetaData, Table, Column, Integer, String, func, select meta = MetaData() tbl = Table('tbl', meta, Column('id', Integer, nullable=False, primary_key=True), Column('name', String(255))) @pytest.fixture def table(loop, connection_creator, table_cleanup): async def f(): connection = await connection_creator() cursor = await connection.cursor() await cursor.execute("DROP TABLE IF EXISTS tbl;") await cursor.execute("""CREATE TABLE tbl ( id MEDIUMINT NOT NULL AUTO_INCREMENT, name VARCHAR(255) NOT NULL, PRIMARY KEY (id));""") for i in [(1, 'a'), (2, 'b'), (3, 'c')]: await cursor.execute("INSERT INTO tbl VALUES(%s, %s)", i) await cursor.execute("commit;") await cursor.close() table_cleanup('tbl') loop.run_until_complete(f()) @pytest.mark.run_loop async def test_cursor(table, cursor): ret = [] await cursor.execute('SELECT * from tbl;') assert not cursor.closed async with cursor: async for i in cursor: ret.append(i) assert [(1, 'a'), (2, 'b'), (3, 'c')] == ret assert cursor.closed @pytest.mark.run_loop async def test_cursor_lightweight(table, cursor): await cursor.execute('SELECT * from tbl;') assert not cursor.closed async with cursor: pass assert cursor.closed @pytest.mark.run_loop async def test_cursor_method(connection): async with connection.cursor() as cursor: await cursor.execute('SELECT 42;') value = await cursor.fetchone() assert value == (42,) assert cursor.closed @pytest.mark.run_loop async def test_connection(connection): assert not connection.closed async with connection: assert not connection.closed assert connection.closed @pytest.mark.run_loop async def test_connection_exception(connection): assert not connection.closed with pytest.raises(RuntimeError) as ctx: async with connection: assert not connection.closed raise RuntimeError('boom') assert str(ctx.value) == 'boom' assert connection.closed @pytest.mark.run_loop async def test_connect_method(mysql_params, loop): async with aiomysql.connect(loop=loop, **mysql_params) as connection: async with connection.cursor() as cursor: await cursor.execute("SELECT 42") value = await cursor.fetchone() assert value, (42,) assert cursor.closed assert connection.closed @pytest.mark.run_loop async def test_connect_method_exception(mysql_params, loop): with pytest.raises(RuntimeError) as ctx: async with aiomysql.connect(loop=loop, **mysql_params) as connection: assert not connection.closed raise RuntimeError('boom') assert str(ctx.value) == 'boom' assert connection.closed @pytest.mark.run_loop async def test_pool(table, pool_creator, loop): pool = await pool_creator() async with pool.acquire() as conn: async with (await conn.cursor()) as cur: await cur.execute("SELECT * from tbl") ret = [] async for i in cur: ret.append(i) assert [(1, 'a'), (2, 'b'), (3, 'c')] == ret @pytest.mark.run_loop async def test_create_pool_deprecations(mysql_params, loop): async with create_pool(loop=loop, **mysql_params) as pool: with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") with await pool as conn: pass assert issubclass(w[-1].category, DeprecationWarning) assert conn.closed @pytest.mark.run_loop async def test_sa_connection(table, mysql_params, loop): async with sa.create_engine(loop=loop, **mysql_params) as engine: connection = await engine.acquire() assert not connection.closed async with connection: async with connection.execute(tbl.select()) as cursor: ret = [] async for i in cursor: ret.append(i) assert [(1, 'a'), (2, 'b'), (3, 'c')] == ret assert connection.closed @pytest.mark.run_loop async def test_sa_transaction(table, mysql_params, loop): async with sa.create_engine(loop=loop, **mysql_params) as engine: async with engine.acquire() as connection: cnt = await connection.scalar(select([func.count()]).select_from(tbl)) assert 3 == cnt async with (await connection.begin()) as tr: assert tr.is_active await connection.execute(tbl.delete()) assert not tr.is_active cnt = await connection.scalar(select([func.count()]).select_from(tbl)) assert 0 == cnt @pytest.mark.run_loop async def test_sa_transaction_rollback(loop, mysql_params, table): async with sa.create_engine(loop=loop, **mysql_params) as engine: async with engine.acquire() as conn: cnt = await conn.scalar(select([func.count()]).select_from(tbl)) assert 3 == cnt with pytest.raises(RuntimeError) as ctx: async with (await conn.begin()) as tr: assert tr.is_active await conn.execute(tbl.delete()) raise RuntimeError("Exit") assert str(ctx.value) == "Exit" assert not tr.is_active cnt = await conn.scalar(select([func.count()]).select_from(tbl)) assert 3 == cnt @pytest.mark.run_loop async def test_create_engine(loop, mysql_params, table): async with sa.create_engine(loop=loop, **mysql_params) as engine: async with engine.acquire() as conn: async with conn.execute(tbl.select()) as cursor: ret = [] async for i in cursor: ret.append(i) assert [(1, 'a'), (2, 'b'), (3, 'c')] == ret @pytest.mark.run_loop async def test_engine(loop, mysql_params, table): engine = await sa.create_engine(loop=loop, **mysql_params) async with engine: async with engine.acquire() as conn: async with conn.execute(tbl.select()) as cursor: ret = [] async for i in cursor: ret.append(i) assert [(1, 'a'), (2, 'b'), (3, 'c')] == ret @pytest.mark.run_loop async def test_transaction_context_manager(loop, mysql_params, table): async with sa.create_engine(loop=loop, **mysql_params) as engine: async with engine.acquire() as conn: async with conn.begin() as tr: async with conn.execute(tbl.select()) as cursor: ret = [] async for i in cursor: ret.append(i) assert [(1, 'a'), (2, 'b'), (3, 'c')] == ret assert cursor.closed assert not tr.is_active tr2 = await conn.begin() async with tr2: assert tr2.is_active async with conn.execute('SELECT 1;') as cursor: rec = await cursor.scalar() assert rec == 1 await cursor.close() assert not tr2.is_active @pytest.mark.run_loop async def test_transaction_context_manager_error(loop, mysql_params, table): async with sa.create_engine(loop=loop, **mysql_params) as engine: async with engine.acquire() as conn: with pytest.raises(RuntimeError) as ctx: async with conn.begin() as tr: assert tr.is_active raise RuntimeError('boom') assert str(ctx.value) == 'boom' assert not tr.is_active assert conn.closed @pytest.mark.run_loop async def test_transaction_context_manager_commit_once(loop, mysql_params, table): async with sa.create_engine(loop=loop, **mysql_params) as engine: async with engine.acquire() as conn: async with conn.begin() as tr: # check that in context manager we do not execute # commit for second time. Two commits in row causes # InvalidRequestError exception await tr.commit() assert not tr.is_active tr2 = await conn.begin() async with tr2: assert tr2.is_active # check for double commit one more time await tr2.commit() assert not tr2.is_active assert conn.closed @pytest.mark.run_loop async def test_incompatible_cursor_fails(loop, mysql_params): mysql_params['cursorclass'] = DictCursor with pytest.raises(sa.ArgumentError) as ctx: await sa.create_engine(loop=loop, **mysql_params) msg = 'SQLAlchemy engine does not support this cursor class' assert str(ctx.value) == msg @pytest.mark.run_loop async def test_compatible_cursor_correct(loop, mysql_params): class SubCursor(Cursor): pass mysql_params['cursorclass'] = SubCursor async with sa.create_engine(loop=loop, **mysql_params) as engine: async with engine.acquire() as conn: # check not raise sa.ArgumentError exception pass assert conn.closed aiomysql-0.3.2/tests/test_basic.py000066400000000000000000000234111507601712200172100ustar00rootroot00000000000000import datetime import json import re import time import pytest from pymysql.err import ProgrammingError @pytest.fixture def datatype_table(loop, cursor, table_cleanup): async def f(): await cursor.execute( "CREATE TABLE test_datatypes (b bit, i int, l bigint, f real, s " "varchar(32), u varchar(32), bb blob, d date, dt datetime, " "ts timestamp, td time, t time, st datetime)") table_cleanup('test_datatypes') loop.run_until_complete(f()) table_cleanup('test_datatypes') @pytest.mark.run_loop async def test_datatypes(connection, cursor, datatype_table): encoding = connection.charset if encoding == 'utf8mb4': encoding = 'utf8' # insert values v = ( True, -3, 123456789012, 5.7, "hello'\" world", "Espa\xc3\xb1ol", "binary\x00data".encode(encoding), datetime.date(1988, 2, 2), datetime.datetime.now().replace(microsecond=0), datetime.timedelta(5, 6), datetime.time(16, 32), time.localtime()) await cursor.execute( "INSERT INTO test_datatypes (b,i,l,f,s,u,bb,d,dt,td,t,st) " "values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)", v) await cursor.execute( "select b,i,l,f,s,u,bb,d,dt,td,t,st from test_datatypes") r = await cursor.fetchone() assert bytes([1]) == r[0] # assert v[1:8] == r[1:8]) assert v[1:9] == r[1:9] # mysql throws away microseconds so we need to check datetimes # specially. additionally times are turned into timedeltas. # self.assertEqual(datetime.datetime(*v[8].timetuple()[:6]), r[8]) # TODO: figure out why this assert fails # assert [9] == r[9] # just timedeltas expected = datetime.timedelta(0, 60 * (v[10].hour * 60 + v[10].minute)) assert expected == r[10] assert datetime.datetime(*v[-1][:6]) == r[-1] @pytest.mark.run_loop async def test_datatypes_nulls(cursor, datatype_table): # check nulls await cursor.execute( "insert into test_datatypes (b,i,l,f,s,u,bb,d,dt,td,t,st) " "values (%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)", [None] * 12) await cursor.execute( "select b,i,l,f,s,u,bb,d,dt,td,t,st from test_datatypes") r = await cursor.fetchone() assert tuple([None] * 12) == r @pytest.mark.run_loop async def test_datatypes_sequence_types(cursor, datatype_table): # check sequence type await cursor.execute( "INSERT INTO test_datatypes (i, l) VALUES (2,4), (6,8), " "(10,12)") await cursor.execute( "select l from test_datatypes where i in %s order by i", ((2, 6),)) r = await cursor.fetchall() assert ((4,), (8,)) == r @pytest.mark.run_loop async def test_dict_escaping(cursor, table_cleanup): sql = "CREATE TABLE test_dict (a INTEGER, b INTEGER, c INTEGER)" await cursor.execute(sql) table_cleanup('test_dict') sql = "INSERT INTO test_dict (a,b,c) VALUES (%(a)s, %(b)s, %(c)s)" await cursor.execute(sql, {"a": 1, "b": 2, "c": 3}) await cursor.execute("SELECT a,b,c FROM test_dict") r = await cursor.fetchone() assert (1, 2, 3) == r @pytest.mark.run_loop async def test_string(cursor, table_cleanup): await cursor.execute("DROP TABLE IF EXISTS test_string;") await cursor.execute("CREATE TABLE test_string (a text)") test_value = "I am a test string" table_cleanup('test_string') await cursor.execute("INSERT INTO test_string (a) VALUES (%s)", test_value) await cursor.execute("SELECT a FROM test_string") r = await cursor.fetchone() assert (test_value,) == r @pytest.mark.run_loop async def test_string_with_emoji(cursor, table_cleanup): await cursor.execute("DROP TABLE IF EXISTS test_string_with_emoji;") await cursor.execute("CREATE TABLE test_string_with_emoji (a text) " "DEFAULT CHARACTER SET=\"utf8mb4\"") test_value = "I am a test string with emoji ๐Ÿ˜„" table_cleanup('test_string_with_emoji') await cursor.execute("INSERT INTO test_string_with_emoji (a) VALUES (%s)", test_value) await cursor.execute("SELECT a FROM test_string_with_emoji") r = await cursor.fetchone() assert (test_value,) == r @pytest.mark.run_loop async def test_integer(cursor, table_cleanup): await cursor.execute("CREATE TABLE test_integer (a INTEGER)") table_cleanup('test_integer') test_value = 12345 await cursor.execute("INSERT INTO test_integer (a) VALUES (%s)", test_value) await cursor.execute("SELECT a FROM test_integer") r = await cursor.fetchone() assert (test_value,) == r @pytest.mark.run_loop async def test_binary_data(cursor, table_cleanup): data = bytes(bytearray(range(256)) * 4) await cursor.execute("CREATE TABLE test_blob (b blob)") table_cleanup('test_blob') await cursor.execute("INSERT INTO test_blob (b) VALUES (%s)", (data,)) await cursor.execute("SELECT b FROM test_blob") (r,) = await cursor.fetchone() assert data == r @pytest.mark.run_loop async def test_untyped_convertion_to_null_and_empty_string(cursor): await cursor.execute("select null,''") r = await cursor.fetchone() assert (None, '') == r await cursor.execute("select '',null") r = await cursor.fetchone() assert ('', None) == r @pytest.mark.run_loop async def test_timedelta_conversion(cursor): await cursor.execute( "select time('12:30'), time('23:12:59'), time('23:12:59.05100'), " "time('-12:30'), time('-23:12:59'), time('-23:12:59.05100'), " "time('-00:30')") r = await cursor.fetchone() assert (datetime.timedelta(0, 45000), datetime.timedelta(0, 83579), datetime.timedelta(0, 83579, 51000), -datetime.timedelta(0, 45000), -datetime.timedelta(0, 83579), -datetime.timedelta(0, 83579, 51000), -datetime.timedelta(0, 1800)) == r @pytest.mark.run_loop async def test_datetime_conversion(cursor, table_cleanup): dt = datetime.datetime(2013, 11, 12, 9, 9, 9, 123450) try: await cursor.execute("CREATE TABLE test_datetime" "(id INT, ts DATETIME(6))") table_cleanup('test_datetime') await cursor.execute("INSERT INTO test_datetime VALUES " "(1,'2013-11-12 09:09:09.12345')") await cursor.execute("SELECT ts FROM test_datetime") r = await cursor.fetchone() assert (dt,) == r except ProgrammingError: # User is running a version of MySQL that doesn't support # msecs within datetime pass @pytest.mark.run_loop async def test_get_transaction_status(connection, cursor): # make sure that connection is clean without transactions transaction_flag = connection.get_transaction_status() assert not transaction_flag # start transaction await connection.begin() # make sure transaction flag is up transaction_flag = connection.get_transaction_status() assert transaction_flag await cursor.execute('SELECT 1;') (r, ) = await cursor.fetchone() assert r == 1 await connection.commit() # make sure that transaction flag is down transaction_flag = connection.get_transaction_status() assert not transaction_flag @pytest.mark.run_loop async def test_rollback(connection, cursor): await cursor.execute('DROP TABLE IF EXISTS tz_data;') await cursor.execute('CREATE TABLE tz_data (' 'region VARCHAR(64),' 'zone VARCHAR(64),' 'name VARCHAR(64))') await connection.commit() args = ('America', '', 'America/New_York') await cursor.execute('INSERT INTO tz_data VALUES (%s, %s, %s)', args) await cursor.execute('SELECT * FROM tz_data;') data = await cursor.fetchall() assert len(data) == 1 await connection.rollback() await cursor.execute('SELECT * FROM tz_data;') data = await cursor.fetchall() # should not return any rows since no inserts was commited assert len(data) == 0 def mysql_server_is(server_version, version_tuple): """Return True if the given connection is on the version given or greater. e.g.:: if self.mysql_server_is(conn, (5, 6, 4)): # do something for MySQL 5.6.4 and above """ server_version_tuple = tuple( (int(dig) if dig is not None else 0) for dig in re.match(r'(\d+)\.(\d+)\.(\d+)', server_version).group(1, 2, 3) ) return server_version_tuple >= version_tuple def get_mysql_vendor(server_info): return "mariadb" if "MariaDB" in server_info else "mysql" @pytest.mark.run_loop async def test_json(connection_creator, table_cleanup): connection = await connection_creator( charset="utf8mb4", autocommit=True) # TODO do better server_info = connection.get_server_info() if not mysql_server_is(server_info, (5, 7, 0)): raise pytest.skip("JSON type is not supported on MySQL <= 5.6") cursor = await connection.cursor() await cursor.execute("""\ CREATE TABLE test_json ( id INT NOT NULL, json JSON NOT NULL, PRIMARY KEY (id) );""") table_cleanup("test_json") json_str = '{"hello": "ใ“ใ‚“ใซใกใฏ"}' await cursor.execute( "INSERT INTO test_json (id, `json`) values (42, %s)", (json_str,)) await cursor.execute("SELECT `json` from `test_json` WHERE `id`=42") r = await cursor.fetchone() assert json.loads(r[0]) == json.loads(json_str) # MariaDB does not support JSON as a type # See also https://github.com/PyMySQL/PyMySQL/pull/1165 if get_mysql_vendor(connection.get_server_info()) == "mysql": await cursor.execute("SELECT CAST(%s AS JSON) AS x", (json_str,)) r = await cursor.fetchone() assert json.loads(r[0]) == json.loads(json_str) aiomysql-0.3.2/tests/test_bulk_inserts.py000066400000000000000000000142171507601712200206370ustar00rootroot00000000000000import pytest from aiomysql import DictCursor @pytest.fixture def table(loop, connection, table_cleanup): async def f(): cursor = await connection.cursor(DictCursor) sql = """CREATE TABLE bulkinsert (id INT(11), name CHAR(20), age INT, height INT, PRIMARY KEY (id))""" await cursor.execute(sql) table_cleanup('bulkinsert') loop.run_until_complete(f()) @pytest.fixture def assert_records(cursor): async def f(data): await cursor.execute( "SELECT id, name, age, height FROM bulkinsert") result = await cursor.fetchall() await cursor.execute('COMMIT') assert sorted(data) == sorted(result) return f @pytest.fixture def assert_dict_records(connection): async def f(data): cursor = await connection.cursor(DictCursor) await cursor.execute( "SELECT id, name, age, height FROM bulkinsert") result = await cursor.fetchall() await cursor.execute('COMMIT') assert sorted(data, key=lambda k: k['id']) == \ sorted(result, key=lambda k: k['id']) return f @pytest.mark.run_loop async def test_bulk_insert(cursor, table, assert_records): data = [(0, "bob", 21, 123), (1, "jim", 56, 45), (2, "fred", 100, 180)] await cursor.executemany( "INSERT INTO bulkinsert (id, name, age, height) " "VALUES (%s,%s,%s,%s)", data) expected = bytearray(b"INSERT INTO bulkinsert (id, name, age, height) " b"VALUES (0,'bob',21,123),(1,'jim',56,45)," b"(2,'fred',100,180)") assert cursor._last_executed == expected await cursor.execute('commit') await assert_records(data) @pytest.mark.run_loop async def test_bulk_insert_multiline_statement(cursor, table, assert_records): data = [(0, "bob", 21, 123), (1, "jim", 56, 45), (2, "fred", 100, 180)] await cursor.executemany("""insert into bulkinsert (id, name, age, height) values (%s, %s , %s, %s ) """, data) assert cursor._last_executed.strip() == bytearray(b"""insert into bulkinsert (id, name, age, height) values (0, 'bob' , 21, 123 ),(1, 'jim' , 56, 45 ),(2, 'fred' , 100, 180 )""") await cursor.execute('COMMIT') await assert_records(data) @pytest.mark.run_loop async def test_bulk_insert_single_record(cursor, table, assert_records): data = [(0, "bob", 21, 123)] await cursor.executemany( "insert into bulkinsert (id, name, age, height) " "values (%s,%s,%s,%s)", data) await cursor.execute('COMMIT') await assert_records(data) @pytest.mark.run_loop async def test_insert_on_duplicate_key_update(cursor, table, assert_records): # executemany should work with "insert ... on update" " data = [(0, "bob", 21, 123), (1, "jim", 56, 45), (2, "fred", 100, 180)] await cursor.executemany("""insert into bulkinsert (id, name, age, height) values (%s, %s , %s, %s ) on duplicate key update age = values(age) """, data) assert cursor._last_executed.strip() == bytearray(b"""insert into bulkinsert (id, name, age, height) values (0, 'bob' , 21, 123 ),(1, 'jim' , 56, 45 ),(2, 'fred' , 100, 180 ) on duplicate key update age = values(age)""") await cursor.execute('COMMIT') await assert_records(data) @pytest.mark.run_loop async def test_bulk_insert_with_params_as_dict(cursor, table, assert_dict_records): data = [ { 'id': 0, 'name': "bob", 'age': 21, 'height': 123 }, { 'id': 1, 'name': "jim", 'age': 56, 'height': 45 }, { 'id': 2, 'name': "fred", 'age': 100, 'height': 180 }, ] await cursor.executemany( "INSERT INTO bulkinsert (id, name, age, height) " "VALUES (%(id)s,%(name)s,%(age)s,%(height)s)", data) expected = bytearray(b"INSERT INTO bulkinsert (id, name, age, height) " b"VALUES (0,'bob',21,123),(1,'jim',56,45)," b"(2,'fred',100,180)") assert cursor._last_executed == expected await cursor.execute('commit') await assert_dict_records(data) @pytest.mark.run_loop async def test_bulk_insert_with_precedence_spaces(cursor, table, assert_records): data = [(0, "bob", 21, 123), (1, "jim", 56, 45)] await cursor.executemany(""" INSERT INTO bulkinsert (id, name, age, height) VALUES (%s,%s,%s,%s) """, data) expected = bytearray(b"INSERT INTO bulkinsert (id, name, age, height)" b"\n VALUES (0,\'bob\',21,123)," b"(1,\'jim\',56,45)\n ") assert cursor._last_executed == expected await cursor.execute('commit') await assert_records(data) @pytest.mark.run_loop async def test_bulk_replace(cursor, table, assert_records): data = [(0, "bob", 21, 123), (0, "jim", 56, 45)] sql = ("REPLACE INTO bulkinsert (id, name, age, height) " + "VALUES (%s,%s,%s,%s)") await cursor.executemany(sql, data) assert cursor._last_executed.strip() == bytearray( b"REPLACE INTO bulkinsert (id, name, age, height) " + b"VALUES (0,'bob',21,123),(0,'jim',56,45)" ) await cursor.execute('COMMIT') await assert_records([(0, "jim", 56, 45)]) @pytest.mark.run_loop async def test_bulk_insert_with_semicolon_at_the_end(cursor, table, assert_records): data = [(0, "bob", 21, 123), (1, "jim", 56, 45)] await cursor.executemany( "INSERT INTO bulkinsert (id, name, age, height) " "VALUES (%s,%s,%s,%s);", data) expected = bytearray(b"INSERT INTO bulkinsert (id, name, age, height) " b"VALUES (0,'bob',21,123),(1,'jim',56,45)") assert cursor._last_executed == expected await cursor.execute('commit') await assert_records(data) aiomysql-0.3.2/tests/test_connection.py000066400000000000000000000177671507601712200203070ustar00rootroot00000000000000import asyncio import gc import os import pytest import aiomysql @pytest.fixture() def fill_my_cnf(mysql_params): tests_root = os.path.abspath(os.path.dirname(__file__)) if "unix_socket" in mysql_params: tmpl_path = "fixtures/my.cnf.unix.tmpl" else: tmpl_path = "fixtures/my.cnf.tcp.tmpl" path1 = os.path.join(tests_root, tmpl_path) path2 = os.path.join(tests_root, 'fixtures/my.cnf') with open(path1) as f1: tmpl = f1.read() with open(path2, 'w') as f2: f2.write(tmpl.format_map(mysql_params)) @pytest.mark.run_loop async def test_connect_timeout(connection_creator): # OSErrors and asyncio.TimeoutError are caught and raised as operational # errors with pytest.raises(aiomysql.OperationalError): await connection_creator(connect_timeout=0.000000000001) @pytest.mark.run_loop async def test_config_file(fill_my_cnf, connection_creator, mysql_params): tests_root = os.path.abspath(os.path.dirname(__file__)) path = os.path.join(tests_root, 'fixtures/my.cnf') conn = await connection_creator(read_default_file=path) if "unix_socket" in mysql_params: assert conn.unix_socket == mysql_params["unix_socket"] else: assert conn.host == mysql_params['host'] assert conn.port == mysql_params['port'] assert conn.user, mysql_params['user'] # make sure connection is working cur = await conn.cursor() await cur.execute('SELECT 42;') (r, ) = await cur.fetchone() assert r == 42 conn.close() @pytest.mark.run_loop async def test_config_file_with_different_group(fill_my_cnf, connection_creator, mysql_params): # same test with config file but actual settings # located in not default group. tests_root = os.path.abspath(os.path.dirname(__file__)) path = os.path.join(tests_root, 'fixtures/my.cnf') group = 'client_with_unix_socket' conn = await connection_creator(read_default_file=path, read_default_group=group) assert conn.charset == 'utf8' assert conn.user == 'root' # make sure connection is working cur = await conn.cursor() await cur.execute('SELECT 42;') (r, ) = await cur.fetchone() assert r == 42 conn.close() @pytest.mark.run_loop async def test_utf8mb4(connection_creator): """This test requires MySQL >= 5.5""" charset = 'utf8mb4' conn = await connection_creator(charset=charset) assert conn.charset == charset conn.close() @pytest.mark.run_loop async def test_largedata(connection_creator): """Large query and response (>=16MB)""" conn = await connection_creator() cur = await conn.cursor() await cur.execute("SELECT @@max_allowed_packet") r = await cur.fetchone() if r[0] < 16 * 1024 * 1024 + 10: pytest.skip('Set max_allowed_packet to bigger than 17MB') else: t = 'a' * (16 * 1024 * 1024) await cur.execute("SELECT '" + t + "'") r = await cur.fetchone() assert r[0] == t @pytest.mark.run_loop async def test_escape_string(connection_creator): con = await connection_creator() cur = await con.cursor() assert con.escape("foo'bar") == "'foo\\'bar'" # literal is alias for escape assert con.literal("foo'bar") == "'foo\\'bar'" await cur.execute("SET sql_mode='NO_BACKSLASH_ESCAPES'") assert con.escape("foo'bar") == "'foo''bar'" @pytest.mark.run_loop async def test_sql_mode_param(connection_creator): con = await connection_creator(sql_mode='NO_BACKSLASH_ESCAPES') assert con.escape("foo'bar") == "'foo''bar'" @pytest.mark.run_loop async def test_init_param(connection_creator): init_command = "SET sql_mode='NO_BACKSLASH_ESCAPES';" con = await connection_creator(init_command=init_command) assert con.escape("foo'bar") == "'foo''bar'" @pytest.mark.run_loop async def test_autocommit(connection_creator): con = await connection_creator() assert con.get_autocommit() is False cur = await con.cursor() await cur.execute("SET AUTOCOMMIT=1") assert con.get_autocommit() is True await con.autocommit(False) assert con.get_autocommit() is False await cur.execute("SELECT @@AUTOCOMMIT") r = await cur.fetchone() assert r[0] == 0 @pytest.mark.run_loop async def test_select_db(connection_creator): con = await connection_creator() current_db = 'test_pymysql' other_db = 'test_pymysql2' cur = await con.cursor() await cur.execute('SELECT database()') r = await cur.fetchone() assert r[0] == current_db await con.select_db(other_db) await cur.execute('SELECT database()') r = await cur.fetchone() assert r[0] == other_db @pytest.mark.run_loop async def test_connection_gone_away(connection_creator): # test # http://dev.mysql.com/doc/refman/5.0/en/gone-away.html # http://dev.mysql.com/doc/refman/5.0/en/error-messages-client.html # error_cr_server_gone_error conn = await connection_creator() cur = await conn.cursor() await cur.execute("SET wait_timeout=1") await asyncio.sleep(2) with pytest.raises(aiomysql.OperationalError) as cm: await cur.execute("SELECT 1+1") # error occures while reading, not writing because of socket buffer. # assert cm.exception.args[0] == 2006 assert cm.value.args[0] in (2006, 2013) conn.close() @pytest.mark.run_loop async def test_connection_info_methods(connection_creator, mysql_params): conn = await connection_creator() # trhead id is int assert isinstance(conn.thread_id(), int) assert conn.character_set_name() in ('latin1', 'utf8mb4') if "unix_socket" in mysql_params: assert mysql_params["unix_socket"] in conn.get_host_info() else: assert str(conn.port) in conn.get_host_info() assert isinstance(conn.get_server_info(), str) # protocol id is int assert isinstance(conn.get_proto_info(), int) conn.close() @pytest.mark.run_loop async def test_connection_set_charset(connection_creator): conn = await connection_creator() assert conn.character_set_name(), ('latin1' in 'utf8mb4') await conn.set_charset('utf8') assert conn.character_set_name() == 'utf8' @pytest.mark.run_loop async def test_connection_ping(connection_creator): conn = await connection_creator() await conn.ping() assert conn.closed is False conn.close() await conn.ping() assert conn.closed is False @pytest.mark.run_loop async def test_connection_properties(connection_creator, mysql_params): conn = await connection_creator() if "unix_socket" in mysql_params: assert conn.unix_socket == mysql_params["unix_socket"] else: assert conn.host == mysql_params['host'] assert conn.port == mysql_params['port'] assert conn.user == mysql_params['user'] assert conn.db == mysql_params['db'] assert conn.echo is False conn.close() @pytest.mark.run_loop async def test_connection_double_ensure_closed(connection_creator): conn = await connection_creator() assert conn.closed is False await conn.ensure_closed() assert conn.closed is True await conn.ensure_closed() assert conn.closed is True @pytest.mark.run_loop @pytest.mark.usefixtures("disable_gc") async def test___del__(connection_creator): conn = await connection_creator() with pytest.warns(ResourceWarning): del conn gc.collect() @pytest.mark.run_loop async def test_previous_cursor_not_closed(connection_creator): conn = await connection_creator() cur1 = await conn.cursor() await cur1.execute("SELECT 1; SELECT 2") cur2 = await conn.cursor() await cur2.execute("SELECT 3;") resp = await cur2.fetchone() assert resp[0] == 3 @pytest.mark.run_loop async def test_commit_during_multi_result(connection_creator): conn = await connection_creator() cur = await conn.cursor() await cur.execute("SELECT 1; SELECT 2;") await conn.commit() await cur.execute("SELECT 3;") resp = await cur.fetchone() assert resp[0] == 3 aiomysql-0.3.2/tests/test_cursor.py000066400000000000000000000322341507601712200174470ustar00rootroot00000000000000import asyncio import pytest from aiomysql import ProgrammingError, Cursor, InterfaceError, OperationalError from aiomysql.cursors import RE_INSERT_VALUES async def _prepare(conn): cur = await conn.cursor() await cur.execute("DROP TABLE IF EXISTS tbl;") await cur.execute("""CREATE TABLE tbl ( id MEDIUMINT NOT NULL AUTO_INCREMENT, name VARCHAR(255) NOT NULL, PRIMARY KEY (id));""") for i in [(1, 'a'), (2, 'b'), (3, 'c')]: await cur.execute("INSERT INTO tbl VALUES(%s, %s)", i) await cur.execute("DROP TABLE IF EXISTS tbl2") await cur.execute("""CREATE TABLE tbl2 (id int, name varchar(255))""") await conn.commit() async def _prepare_procedure(conn): cur = await conn.cursor() await cur.execute("DROP PROCEDURE IF EXISTS myinc;") await cur.execute("""CREATE PROCEDURE myinc(p1 INT) BEGIN SELECT p1 + 1; END """) await conn.commit() @pytest.mark.run_loop async def test_description(connection_creator): conn = await connection_creator() await _prepare(conn) cur = await conn.cursor() assert cur.description is None await cur.execute('SELECT * from tbl;') assert len(cur.description) == 2, \ 'cursor.description describes too many columns' assert len(cur.description[0]) == 7, \ 'cursor.description[x] tuples must have 7 elements' assert cur.description[0][0].lower() == 'id', \ 'cursor.description[x][0] must return column name' assert cur.description[1][0].lower() == 'name', \ 'cursor.description[x][0] must return column name' # Make sure self.description gets reset, cursor should be # set to None in case of none resulting queries like DDL await cur.execute('DROP TABLE IF EXISTS foobar;') assert cur.description is None @pytest.mark.run_loop async def test_cursor_properties(connection_creator): conn = await connection_creator() cur = await conn.cursor() assert cur.connection is conn cur.setinputsizes() cur.setoutputsizes() assert cur.echo == conn.echo @pytest.mark.run_loop async def test_scroll_relative(connection_creator): conn = await connection_creator() await _prepare(conn) cur = await conn.cursor() await cur.execute('SELECT * FROM tbl;') await cur.scroll(1) ret = await cur.fetchone() assert (2, 'b') == ret @pytest.mark.run_loop async def test_scroll_absolute(connection_creator): conn = await connection_creator() await _prepare(conn) cur = await conn.cursor() await cur.execute('SELECT * FROM tbl;') await cur.scroll(2, mode='absolute') ret = await cur.fetchone() assert (3, 'c') == ret @pytest.mark.run_loop async def test_scroll_errors(connection_creator): conn = await connection_creator() await _prepare(conn) cur = await conn.cursor() with pytest.raises(ProgrammingError): await cur.scroll(2, mode='absolute') cur = await conn.cursor() await cur.execute('SELECT * FROM tbl;') with pytest.raises(ProgrammingError): await cur.scroll(2, mode='not_valid_mode') @pytest.mark.run_loop async def test_scroll_index_error(connection_creator): conn = await connection_creator() await _prepare(conn) cur = await conn.cursor() await cur.execute('SELECT * FROM tbl;') with pytest.raises(IndexError): await cur.scroll(1000) @pytest.mark.run_loop async def test_close(connection_creator): conn = await connection_creator() cur = await conn.cursor() await cur.close() assert cur.closed is True with pytest.raises(ProgrammingError): await cur.execute('SELECT 1') # try to close for second time await cur.close() @pytest.mark.run_loop async def test_arraysize(connection_creator): conn = await connection_creator() cur = await conn.cursor() assert 1 == cur.arraysize cur.arraysize = 10 assert 10 == cur.arraysize @pytest.mark.run_loop async def test_rows(connection_creator): conn = await connection_creator() await _prepare(conn) cur = await conn.cursor() await cur.execute('SELECT * from tbl') assert 3 == cur.rowcount assert 0 == cur.rownumber await cur.fetchone() assert 1 == cur.rownumber assert cur.lastrowid is None await cur.execute('INSERT INTO tbl VALUES (%s, %s)', (4, 'd')) assert 0 != cur.lastrowid await conn.commit() @pytest.mark.run_loop async def test_callproc(connection_creator): conn = await connection_creator() await _prepare_procedure(conn) cur = await conn.cursor() await cur.callproc('myinc', [1]) ret = await cur.fetchone() assert (2,) == ret await cur.close() with pytest.raises(ProgrammingError): await cur.callproc('myinc', [1]) conn.close() @pytest.mark.run_loop async def test_fetchone_no_result(connection_creator): # test a fetchone() with no rows conn = await connection_creator() c = await conn.cursor() await c.execute("create table test_nr (b varchar(32))") try: data = "pymysql" await c.execute("insert into test_nr (b) values (%s)", (data,)) r = await c.fetchone() assert r is None finally: await c.execute("drop table test_nr") @pytest.mark.run_loop async def test_fetchmany_no_result(connection_creator): conn = await connection_creator() cur = await conn.cursor() await cur.execute('DROP TABLE IF EXISTS foobar;') r = await cur.fetchmany() assert [] == r @pytest.mark.run_loop async def test_fetchall_no_result(connection_creator): # test a fetchone() with no rows conn = await connection_creator() cur = await conn.cursor() await cur.execute('DROP TABLE IF EXISTS foobar;') r = await cur.fetchall() assert [] == r @pytest.mark.run_loop async def test_fetchall_with_scroll(connection_creator): conn = await connection_creator() await _prepare(conn) cur = await conn.cursor() await cur.execute('SELECT * FROM tbl;') await cur.scroll(1) ret = await cur.fetchall() assert ((2, 'b'), (3, 'c')) == ret @pytest.mark.run_loop async def test_aggregates(connection_creator): """ test aggregate functions """ conn = await connection_creator() c = await conn.cursor() try: await c.execute('create table test_aggregates (i integer)') for i in range(0, 10): await c.execute( 'insert into test_aggregates (i) values (%s)', (i,)) await c.execute('select sum(i) from test_aggregates') r, = await c.fetchone() assert sum(range(0, 10)) == r finally: await c.execute('drop table test_aggregates') @pytest.mark.run_loop async def test_single_tuple(connection_creator): """ test a single tuple """ conn = await connection_creator() c = await conn.cursor() try: await c.execute( "create table mystuff (id integer primary key)") await c.execute("insert into mystuff (id) values (1)") await c.execute("insert into mystuff (id) values (2)") await c.execute("select id from mystuff where id in %s", ((1,),)) r = await c.fetchall() assert [(1,)] == list(r) finally: await c.execute("drop table mystuff") @pytest.mark.run_loop async def test_executemany(connection_creator): conn = await connection_creator() await _prepare(conn) cur = await conn.cursor() assert cur.description is None args = [1, 2, 3] row_count = await cur.executemany( 'SELECT * FROM tbl WHERE id = %s;', args) assert row_count == 3 r = await cur.fetchall() # TODO: if this right behaviour assert ((3, 'c'),) == r # calling execute many without args row_count = await cur.executemany('SELECT 1;', ()) assert row_count is None @pytest.mark.run_loop async def test_custom_cursor(connection_creator): class MyCursor(Cursor): pass conn = await connection_creator() cur = await conn.cursor(MyCursor) assert isinstance(cur, MyCursor) await cur.execute("SELECT 42;") (r, ) = await cur.fetchone() assert r == 42 @pytest.mark.run_loop async def test_custom_cursor_not_cursor_subclass(connection_creator): class MyCursor2: pass conn = await connection_creator() with pytest.raises(TypeError): await conn.cursor(MyCursor2) @pytest.mark.run_loop async def test_morgify(connection_creator): conn = await connection_creator() cur = await conn.cursor() pairs = [(1, 'a'), (2, 'b'), (3, 'c')] sql = "INSERT INTO tbl VALUES(%s, %s)" results = [cur.mogrify(sql, p) for p in pairs] expected = ["INSERT INTO tbl VALUES(1, 'a')", "INSERT INTO tbl VALUES(2, 'b')", "INSERT INTO tbl VALUES(3, 'c')"] assert results == expected @pytest.mark.run_loop async def test_execute_cancel(connection_creator): conn = await connection_creator() cur = await conn.cursor() # Cancel a cursor in the middle of execution, before it could # read even the first packet (SLEEP assures the timings) task = asyncio.ensure_future(cur.execute( "SELECT 1 as id, SLEEP(0.1) as xxx")) await asyncio.sleep(0.05) task.cancel() try: await task except asyncio.CancelledError: pass with pytest.raises(InterfaceError): await conn.cursor() @pytest.mark.run_loop async def test_execute_percentage(connection_creator): # %% in column set conn = await connection_creator() async with conn.cursor() as cur: await cur.execute("DROP TABLE IF EXISTS percent_test") await cur.execute("""\ CREATE TABLE percent_test ( `A%` INTEGER, `B%` INTEGER)""") q = "INSERT INTO percent_test (`A%%`, `B%%`) VALUES (%s, %s)" await cur.execute(q, (3, 4)) @pytest.mark.run_loop async def test_executemany_percentage(connection_creator): # %% in column set conn = await connection_creator() async with conn.cursor() as cur: await cur.execute("DROP TABLE IF EXISTS percent_test") await cur.execute("""\ CREATE TABLE percent_test ( `A%` INTEGER, `B%` INTEGER)""") q = "INSERT INTO percent_test (`A%%`, `B%%`) VALUES (%s, %s)" assert RE_INSERT_VALUES.match(q) is not None await cur.executemany(q, [(3, 4), (5, 6)]) assert cur._last_executed.endswith(b"(3, 4),(5, 6)"), \ "executemany with %% not in one query" @pytest.mark.run_loop async def test_max_execution_time(mysql_server, connection_creator): conn = await connection_creator() await _prepare(conn) async with conn.cursor() as cur: # MySQL MAX_EXECUTION_TIME takes ms # MariaDB max_statement_time takes seconds as int/float, introduced in 10.1 # this will sleep 0.01 seconds per row if mysql_server["db_type"] == "mysql": sql = """ SELECT /*+ MAX_EXECUTION_TIME(2000) */ name, sleep(0.01) FROM tbl """ else: sql = """ SET STATEMENT max_statement_time=2 FOR SELECT name, sleep(0.01) FROM tbl """ await cur.execute(sql) # unlike SSCursor, Cursor returns a tuple of tuples here assert (await cur.fetchall()) == ( ("a", 0), ("b", 0), ("c", 0), ) if mysql_server["db_type"] == "mysql": sql = """ SELECT /*+ MAX_EXECUTION_TIME(2000) */ name, sleep(0.01) FROM tbl """ else: sql = """ SET STATEMENT max_statement_time=2 FOR SELECT name, sleep(0.01) FROM tbl """ await cur.execute(sql) assert (await cur.fetchone()) == ("a", 0) # this discards the previous unfinished query await cur.execute("SELECT 1") assert (await cur.fetchone()) == (1,) if mysql_server["db_type"] == "mysql": sql = """ SELECT /*+ MAX_EXECUTION_TIME(1) */ name, sleep(1) FROM tbl """ else: sql = """ SET STATEMENT max_statement_time=0.001 FOR SELECT name, sleep(1) FROM tbl """ with pytest.raises(OperationalError) as cm: # in a buffered cursor this should reliably raise an # OperationalError await cur.execute(sql) if mysql_server["db_type"] == "mysql": # this constant was only introduced in MySQL 5.7, not sure # what was returned before, may have been ER_QUERY_INTERRUPTED # this constant is pending a new PyMySQL release # assert cm.value.args[0] == pymysql.constants.ER.QUERY_TIMEOUT assert cm.value.args[0] == 3024 else: # this constant is pending a new PyMySQL release # assert cm.value.args[0] == pymysql.constants.ER.STATEMENT_TIMEOUT assert cm.value.args[0] == 1969 # connection should still be fine at this point await cur.execute("SELECT 1") assert (await cur.fetchone()) == (1,) aiomysql-0.3.2/tests/test_deserialize_cursor.py000066400000000000000000000132601507601712200220250ustar00rootroot00000000000000import copy import aiomysql.cursors import pytest BOB = ("bob", 21, {"k1": "pretty", "k2": [18, 25]}) JIM = ("jim", 56, {"k1": "rich", "k2": [20, 60]}) FRED = ("fred", 100, {"k1": "longevity", "k2": [100, 160]}) @pytest.fixture() async def prepare(connection): havejson = True c = await connection.cursor(aiomysql.cursors.DeserializationCursor) # create a table ane some data to query await c.execute("drop table if exists deserialize_cursor") await c.execute("select VERSION()") v = await c.fetchone() version, *db_type = v[0].split('-', 1) version = float(".".join(version.split('.', 2)[:2])) ismariadb = db_type and 'mariadb' in db_type[0].lower() if ismariadb or version < 5.7: await c.execute( """CREATE TABLE deserialize_cursor (name char(20), age int , claim text)""") havejson = False else: await c.execute( """CREATE TABLE deserialize_cursor (name char(20), age int , claim json)""") data = [("bob", 21, '{"k1": "pretty", "k2": [18, 25]}'), ("jim", 56, '{"k1": "rich", "k2": [20, 60]}'), ("fred", 100, '{"k1": "longevity", "k2": [100, 160]}')] await c.executemany("insert into deserialize_cursor values " "(%s,%s,%s)", data) return havejson @pytest.mark.run_loop async def test_deserialize_cursor(prepare, connection): havejson = await prepare if not havejson: return bob, jim, fred = copy.deepcopy(BOB), copy.deepcopy( JIM), copy.deepcopy(FRED) # all assert test compare to the structure as would come # out from MySQLdb conn = connection c = await conn.cursor(aiomysql.cursors.DeserializationCursor) # pull back the single row dict for bob and check await c.execute("SELECT * from deserialize_cursor " "where name='bob'") r = await c.fetchone() assert bob == r, "fetchone via DeserializeCursor failed" # same again, but via fetchall => tuple) await c.execute("SELECT * from deserialize_cursor " "where name='bob'") r = await c.fetchall() assert [bob] == r, \ "fetch a 1 row result via fetchall failed via DeserializeCursor" # get all 3 row via fetchall await c.execute("SELECT * from deserialize_cursor") r = await c.fetchall() assert [bob, jim, fred] == r, "fetchall failed via DictCursor" # get all 2 row via fetchmany await c.execute("SELECT * from deserialize_cursor") r = await c.fetchmany(2) assert [bob, jim] == r, "fetchmany failed via DictCursor" await c.execute('commit') @pytest.mark.run_loop async def test_deserialize_cursor_low_version(prepare, connection): havejson = await prepare if havejson: return bob = ("bob", 21, '{"k1": "pretty", "k2": [18, 25]}') jim = ("jim", 56, '{"k1": "rich", "k2": [20, 60]}') fred = ("fred", 100, '{"k1": "longevity", "k2": [100, 160]}') # all assert test compare to the structure as would come # out from MySQLdb conn = connection c = await conn.cursor(aiomysql.cursors.DeserializationCursor) # pull back the single row dict for bob and check await c.execute("SELECT * from deserialize_cursor where name='bob'") r = await c.fetchone() assert bob == r, "fetchone via DeserializeCursor failed" # same again, but via fetchall => tuple) await c.execute("SELECT * from deserialize_cursor " "where name='bob'") r = await c.fetchall() assert [bob] == r, \ "fetch a 1 row result via fetchall failed via DeserializeCursor" # get all 3 row via fetchall await c.execute("SELECT * from deserialize_cursor") r = await c.fetchall() assert [bob, jim, fred] == r, "fetchall failed via DictCursor" # get all 2 row via fetchmany await c.execute("SELECT * from deserialize_cursor") r = await c.fetchmany(2) assert [bob, jim] == r, "fetchmany failed via DictCursor" await c.execute('commit') @pytest.mark.run_loop async def test_deserializedictcursor(prepare, connection): havejson = await prepare if not havejson: return bob = {'name': 'bob', 'age': 21, 'claim': {"k1": "pretty", "k2": [18, 25]}} conn = connection c = await conn.cursor(aiomysql.cursors.DeserializationCursor, aiomysql.cursors.DictCursor) await c.execute("SELECT * from deserialize_cursor " "where name='bob'") r = await c.fetchall() assert [bob] == r, \ "fetch a 1 row result via fetchall failed via DeserializationCursor" @pytest.mark.run_loop async def test_ssdeserializecursor(prepare, connection): havejson = await prepare if not havejson: return conn = connection c = await conn.cursor(aiomysql.cursors.SSCursor, aiomysql.cursors.DeserializationCursor) await c.execute("SELECT * from deserialize_cursor " "where name='bob'") r = await c.fetchall() assert [BOB] == r, \ "fetch a 1 row result via fetchall failed via DeserializationCursor" @pytest.mark.run_loop async def test_ssdeserializedictcursor(prepare, connection): havejson = await prepare if not havejson: return bob = {'name': 'bob', 'age': 21, 'claim': {"k1": "pretty", "k2": [18, 25]}} conn = connection c = await conn.cursor(aiomysql.cursors.SSCursor, aiomysql.cursors.DeserializationCursor, aiomysql.cursors.DictCursor) await c.execute("SELECT * from deserialize_cursor " "where name='bob'") r = await c.fetchall() assert [bob] == r, \ "fetch a 1 row result via fetchall failed via DeserializationCursor" aiomysql-0.3.2/tests/test_dictcursor.py000066400000000000000000000064531507601712200203170ustar00rootroot00000000000000import datetime import pytest import aiomysql.cursors BOB = {'name': 'bob', 'age': 21, 'DOB': datetime.datetime(1990, 2, 6, 23, 4, 56)} JIM = {'name': 'jim', 'age': 56, 'DOB': datetime.datetime(1955, 5, 9, 13, 12, 45)} FRED = {'name': 'fred', 'age': 100, 'DOB': datetime.datetime(1911, 9, 12, 1, 1, 1)} CURSOR_TYPE = aiomysql.cursors.DictCursor async def prepare(conn): c = await conn.cursor(CURSOR_TYPE) # create a table ane some data to query await c.execute("drop table if exists dictcursor") await c.execute( """CREATE TABLE dictcursor (name char(20), age int , DOB datetime)""") data = [("bob", 21, "1990-02-06 23:04:56"), ("jim", 56, "1955-05-09 13:12:45"), ("fred", 100, "1911-09-12 01:01:01")] await c.executemany("insert into dictcursor values " "(%s,%s,%s)", data) @pytest.mark.run_loop async def test_dictcursor(connection): conn = connection await prepare(connection) bob, jim, fred = BOB.copy(), JIM.copy(), FRED.copy() # all assert test compare to the structure as would come # out from MySQLdb c = await conn.cursor(CURSOR_TYPE) # try an update which should return no rows await c.execute("update dictcursor set age=20 where name='bob'") bob['age'] = 20 # pull back the single row dict for bob and check await c.execute("SELECT * from dictcursor where name='bob'") r = await c.fetchone() assert bob == r, "fetchone via DictCursor failed" # same again, but via fetchall => tuple) await c.execute("SELECT * from dictcursor where name='bob'") r = await c.fetchall() assert [bob] == r, \ "fetch a 1 row result via fetchall failed via DictCursor" # get all 3 row via fetchall await c.execute("SELECT * from dictcursor") r = await c.fetchall() assert [bob, jim, fred] == r, "fetchall failed via DictCursor" # get all 2 row via fetchmany await c.execute("SELECT * from dictcursor") r = await c.fetchmany(2) assert [bob, jim] == r, "fetchmany failed via DictCursor" await c.execute('commit') @pytest.mark.run_loop async def test_custom_dict(connection): conn = connection await prepare(connection) class MyDict(dict): pass class MyDictCursor(CURSOR_TYPE): dict_type = MyDict keys = ['name', 'age', 'DOB'] bob = MyDict([(k, BOB[k]) for k in keys]) jim = MyDict([(k, JIM[k]) for k in keys]) fred = MyDict([(k, FRED[k]) for k in keys]) cur = await conn.cursor(MyDictCursor) await cur.execute("SELECT * FROM dictcursor WHERE name='bob'") r = await cur.fetchone() assert bob == r, "fetchone() returns MyDictCursor" await cur.execute("SELECT * FROM dictcursor") r = await cur.fetchall() assert [bob, jim, fred] == r, "fetchall failed via MyDictCursor" await cur.execute("SELECT * FROM dictcursor") r = await cur.fetchmany(2) assert [bob, jim] == r, "list failed via MyDictCursor" @pytest.mark.run_loop async def test_ssdictcursor(connection): conn = connection await prepare(connection) c = await conn.cursor(aiomysql.cursors.SSDictCursor) await c.execute("SELECT * from dictcursor where name='bob'") r = await c.fetchall() assert [BOB] == r, \ "fetch a 1 row result via fetchall failed via DictCursor" aiomysql-0.3.2/tests/test_issues.py000066400000000000000000000367631507601712200174600ustar00rootroot00000000000000import datetime import pytest from pymysql.err import Warning import aiomysql @pytest.mark.run_loop async def test_issue_3(connection): """ undefined methods datetime_or_None, date_or_None """ conn = connection c = await conn.cursor() await c.execute("drop table if exists issue3") await c.execute( "create table issue3 (d date, t time, dt datetime, ts timestamp)") try: await c.execute( "insert into issue3 (d, t, dt, ts) values (%s,%s,%s,%s)", (None, None, None, None)) await c.execute("select d from issue3") r = await c.fetchone() assert r[0] is None await c.execute("select t from issue3") r = await c.fetchone() assert r[0] is None await c.execute("select dt from issue3") r = await c.fetchone() assert r[0] is None await c.execute("select ts from issue3") r = await c.fetchone() assert type(r[0]) in (type(None), datetime.datetime) finally: await c.execute("drop table issue3") @pytest.mark.run_loop async def test_issue_4(connection): """ can't retrieve TIMESTAMP fields """ conn = connection c = await conn.cursor() await c.execute("drop table if exists issue4") await c.execute("create table issue4 (ts timestamp)") try: await c.execute("insert into issue4 (ts) values (now())") await c.execute("select ts from issue4") r = await c.fetchone() assert isinstance(r[0], datetime.datetime) finally: await c.execute("drop table issue4") @pytest.mark.run_loop async def test_issue_5(connection): """ query on information_schema.tables fails """ conn = connection cur = await conn.cursor() await cur.execute("select * from information_schema.tables") @pytest.mark.run_loop async def test_issue_6(connection_creator): # test for exception: TypeError: ord() expected a character, # but string of length 0 found conn = await connection_creator(db='mysql') c = await conn.cursor() assert conn.db == 'mysql' await c.execute("select * from user") await conn.ensure_closed() @pytest.mark.run_loop async def test_issue_8(connection): """ Primary Key and Index error when selecting data """ conn = connection c = await conn.cursor() await c.execute("drop table if exists test") await c.execute("""CREATE TABLE `test` ( `station` int(10) NOT NULL DEFAULT '0', `dh` datetime NOT NULL DEFAULT '2020-04-25 22:39:12', `echeance` int(1) NOT NULL DEFAULT '0', `me` double DEFAULT NULL, `mo` double DEFAULT NULL, PRIMARY KEY (`station`,`dh`,`echeance`)) ENGINE=MyISAM DEFAULT CHARSET=latin1;""") try: await c.execute("SELECT * FROM test") assert 0 == c.rowcount await c.execute( "ALTER TABLE `test` ADD INDEX `idx_station` (`station`)") await c.execute("SELECT * FROM test") assert 0 == c.rowcount finally: await c.execute("drop table test") @pytest.mark.run_loop async def test_issue_13(connection): """ can't handle large result fields """ conn = connection cur = await conn.cursor() await cur.execute("drop table if exists issue13") try: await cur.execute("create table issue13 (t text)") # ticket says 18k size = 18 * 1024 await cur.execute("insert into issue13 (t) values (%s)", ("x" * size,)) await cur.execute("select t from issue13") # use assertTrue so that obscenely huge error messages don't print r = await cur.fetchone() assert "x" * size == r[0] finally: await cur.execute("drop table issue13") @pytest.mark.run_loop async def test_issue_15(connection): """ query should be expanded before perform character encoding """ conn = connection c = await conn.cursor() await c.execute("drop table if exists issue15") await c.execute("create table issue15 (t varchar(32))") try: await c.execute("insert into issue15 (t) values (%s)", ('\xe4\xf6\xfc',)) await c.execute("select t from issue15") r = await c.fetchone() assert '\xe4\xf6\xfc' == r[0] finally: await c.execute("drop table issue15") @pytest.mark.run_loop async def test_issue_16(connection): """ Patch for string and tuple escaping """ conn = connection c = await conn.cursor() await c.execute("drop table if exists issue16") await c.execute("create table issue16 (name varchar(32) " "primary key, email varchar(32))") try: await c.execute("insert into issue16 (name, email) values " "('pete', 'floydophone')") await c.execute("select email from issue16 where name=%s", ("pete",)) r = await c.fetchone() assert "floydophone" == r[0] finally: await c.execute("drop table issue16") @pytest.mark.skip( "test_issue_17() requires a custom, legacy MySQL configuration and " "will not be run.") @pytest.mark.run_loop async def test_issue_17(connection, connection_creator, mysql_params): """ could not connect mysql use passwod """ conn = connection c = await conn.cursor() db = mysql_params['db'] # grant access to a table to a user with a password try: await c.execute("drop table if exists issue17") await c.execute( "create table issue17 (x varchar(32) primary key)") await c.execute( "insert into issue17 (x) values ('hello, world!')") await c.execute("grant all privileges on %s.issue17 to " "'issue17user'@'%%' identified by '1234'" % db) await conn.commit() conn2 = await connection_creator(user="issue17user", passwd="1234") c2 = await conn2.cursor() await c2.execute("select x from issue17") r = await c2.fetchone() assert "hello == world!", r[0] finally: await c.execute("drop table issue17") @pytest.mark.run_loop async def test_issue_34(connection_creator): try: await connection_creator(host="localhost", port=1237, user="root", unix_socket=None) pytest.fail() except aiomysql.OperationalError as e: assert 2003 == e.args[0] except Exception: pytest.fail() @pytest.mark.run_loop async def test_issue_33(connection_creator): conn = await connection_creator(charset='utf8') c = await conn.cursor() try: await c.execute( b"drop table if exists hei\xc3\x9fe".decode("utf8")) await c.execute( b"create table hei\xc3\x9fe (name varchar(32))".decode("utf8")) await c.execute(b"insert into hei\xc3\x9fe (name) " b"values ('Pi\xc3\xb1ata')". decode("utf8")) await c.execute( b"select name from hei\xc3\x9fe".decode("utf8")) r = await c.fetchone() assert b"Pi\xc3\xb1ata".decode("utf8") == r[0] finally: await c.execute(b"drop table hei\xc3\x9fe".decode("utf8")) @pytest.mark.skip("This test requires manual intervention") @pytest.mark.run_loop async def test_issue_35(connection): conn = connection c = await conn.cursor() print("sudo killall -9 mysqld within the next 10 seconds") try: await c.execute("select sleep(10)") pytest.fail() except aiomysql.OperationalError as e: assert 2013 == e.args[0] @pytest.mark.run_loop async def test_issue_36(connection_creator): conn = await connection_creator() c = await conn.cursor() # kill connections[0] await c.execute("show processlist") kill_id = None rows = await c.fetchall() for row in rows: id = row[0] info = row[7] if info == "show processlist": kill_id = id break try: # now nuke the connection await conn.kill(kill_id) # make sure this connection has broken await c.execute("show tables") pytest.fail() except Exception: pass # check the process list from the other connection conn2 = await connection_creator() c = await conn2.cursor() await c.execute("show processlist") rows = await c.fetchall() ids = [row[0] for row in rows] try: assert kill_id not in ids except AssertionError: # FIXME: figure out why this is failing pytest.xfail("https://github.com/aio-libs/aiomysql/issues/714") @pytest.mark.run_loop async def test_issue_37(connection): conn = connection c = await conn.cursor() assert 1 == (await c.execute("SELECT @foo")) r = await c.fetchone() assert (None,) == r assert 0 == (await c.execute("SET @foo = 'bar'")) await c.execute("set @foo = 'bar'") @pytest.mark.run_loop async def test_issue_38(connection): conn = connection c = await conn.cursor() # reduced size for most default mysql installs datum = "a" * 1024 * 1023 try: await c.execute("drop table if exists issue38") await c.execute( "create table issue38 (id integer, data mediumblob)") await c.execute("insert into issue38 values (1, %s)", (datum,)) finally: await c.execute("drop table issue38") @pytest.mark.run_loop async def disabled_test_issue_54(connection): conn = connection c = await conn.cursor() await c.execute("drop table if exists issue54") big_sql = "select * from issue54 where " big_sql += " and ".join("%d=%d" % (i, i) for i in range(0, 100000)) try: await c.execute( "create table issue54 (id integer primary key)") await c.execute("insert into issue54 (id) values (7)") await c.execute(big_sql) r = await c.fetchone() assert 7 == r[0] finally: await c.execute("drop table issue54") @pytest.mark.run_loop async def test_issue_66(connection): """ 'Connection' object has no attribute 'insert_id' """ conn = connection c = await conn.cursor() assert 0 == conn.insert_id() try: await c.execute("drop table if exists issue66") await c.execute("create table issue66 (id integer primary " "key auto_increment, x integer)") await c.execute("insert into issue66 (x) values (1)") await c.execute("insert into issue66 (x) values (1)") assert 2 == conn.insert_id() finally: await c.execute("drop table issue66") @pytest.mark.run_loop async def test_issue_79(connection): """ Duplicate field overwrites the previous one in the result of DictCursor """ conn = connection c = await conn.cursor(aiomysql.cursors.DictCursor) await c.execute("drop table if exists a") await c.execute("drop table if exists b") await c.execute("""CREATE TABLE a (id int, value int)""") await c.execute("""CREATE TABLE b (id int, value int)""") a = (1, 11) b = (1, 22) try: await c.execute("insert into a values (%s, %s)", a) await c.execute("insert into b values (%s, %s)", b) await c.execute("SELECT * FROM a inner join b on a.id = b.id") r, *_ = await c.fetchall() assert r['id'] == 1 assert r['value'] == 11 assert r['b.value'] == 22 finally: await c.execute("drop table a") await c.execute("drop table b") @pytest.mark.run_loop async def test_issue_95(connection): """ Leftover trailing OK packet for "CALL my_sp" queries """ conn = connection cur = await conn.cursor() await cur.execute("DROP PROCEDURE IF EXISTS `foo`") await cur.execute("""CREATE PROCEDURE `foo` () BEGIN SELECT 1; END""") try: await cur.execute("""CALL foo()""") await cur.execute("""SELECT 1""") r = await cur.fetchone() assert r[0] == 1 finally: await cur.execute("DROP PROCEDURE IF EXISTS `foo`") @pytest.mark.run_loop async def test_issue_114(connection_creator): """ autocommit is not set after reconnecting with ping() """ conn = await connection_creator(charset="utf8") await conn.autocommit(False) c = await conn.cursor() await c.execute("""select @@autocommit;""") r = await c.fetchone() assert not r[0] await conn.ensure_closed() await conn.ping() await c.execute("""select @@autocommit;""") r = await c.fetchone() assert not r[0] await conn.ensure_closed() # Ensure autocommit() is still working conn = await connection_creator(charset="utf8") c = await conn.cursor() await c.execute("""select @@autocommit;""") r = await c.fetchone() assert not r[0] await conn.ensure_closed() await conn.ping() await conn.autocommit(True) await c.execute("""select @@autocommit;""") r = await c.fetchone() assert r[0] await conn.ensure_closed() @pytest.mark.run_loop async def test_issue_175(connection): """ The number of fields returned by server is read in wrong way """ conn = connection cur = await conn.cursor() for length in (200, 300): cols = ', '.join(f'c{i} integer' for i in range(length)) sql = f'create table test_field_count ({cols})' try: await cur.execute(sql) await cur.execute('select * from test_field_count') assert len(cur.description) == length finally: await cur.execute('drop table if exists test_field_count') # MySQL will get you to renegotiate if sent a cleartext password @pytest.mark.run_loop async def test_issue_323(mysql_server, loop, recwarn): async with aiomysql.create_pool(**mysql_server['conn_params'], loop=loop) as pool: async with pool.acquire() as conn: async with conn.cursor() as cur: drop_db = "DROP DATABASE IF EXISTS bugtest;" await cur.execute(drop_db) create_db = "CREATE DATABASE bugtest;" await cur.execute(create_db) create_table = """CREATE TABLE IF NOT EXISTS `bugtest`.`testtable` ( `id` INT UNSIGNED NOT NULL AUTO_INCREMENT, `bindata` VARBINARY(200) NOT NULL, PRIMARY KEY (`id`) );""" await cur.execute(create_table) try: recwarn.clear() async with conn.cursor() as cur: await cur.execute("INSERT INTO `bugtest`.`testtable` " "(bindata) VALUES (%s);", (b'\xB0\x17',)) warnings = [warn for warn in recwarn.list if warn.category is Warning] assert len(warnings) == 0, \ "Got unexpected MySQL warning {}".\ format(' '.join(str(x) for x in warnings)) await cur.execute("SELECT * FROM `bugtest`.`testtable`;") rows = await cur.fetchall() assert len(rows) == 1, "Table should have 1 row" finally: async with conn.cursor() as cur: await cur.execute("DELETE FROM `bugtest`.`testtable`;") # https://github.com/aio-libs/aiomysql/issues/792 @pytest.mark.run_loop async def test_issue_792(connection_creator): with pytest.raises(aiomysql.OperationalError) as exc_info: await connection_creator(db="does_not_exist") assert exc_info.value.args[0] == 1049 assert exc_info.value.args[1] == "Unknown database 'does_not_exist'" aiomysql-0.3.2/tests/test_load_local.py000066400000000000000000000101471507601712200202220ustar00rootroot00000000000000import builtins import os from unittest.mock import patch, MagicMock import aiomysql import pytest from pymysql.constants import CLIENT from pymysql.err import OperationalError @pytest.fixture def table_local_file(connection, loop): async def prepare_table(conn): c = await conn.cursor() await c.execute("DROP TABLE IF EXISTS test_load_local;") await c.execute("CREATE TABLE test_load_local " "(a INTEGER, b INTEGER)") await c.close() async def drop_table(conn): c = await conn.cursor() await c.execute("DROP TABLE test_load_local") await c.close() loop.run_until_complete(prepare_table(connection)) yield loop.run_until_complete(drop_table(connection)) @pytest.mark.run_loop async def test_no_file(cursor, table_local_file): # Test load local infile when the file does not exist sql = "LOAD DATA LOCAL INFILE 'no_data.txt'" + \ " INTO TABLE test_load_local fields " + \ "terminated by ','" with pytest.raises(OperationalError): await cursor.execute(sql) @pytest.mark.run_loop async def test_error_on_file_read(cursor, table_local_file): with patch.object(builtins, 'open') as open_mocked: m = MagicMock() m.read.side_effect = OperationalError(1024, 'Error reading file') m.close.return_value = None open_mocked.return_value = m with pytest.raises(OperationalError): await cursor.execute("LOAD DATA LOCAL INFILE 'some.txt'" " INTO TABLE test_load_local fields " "terminated by ','") @pytest.mark.run_loop async def test_load_file(cursor, table_local_file): # Test load local infile with a valid file filename = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'fixtures', 'load_local_data.txt') await cursor.execute( ("LOAD DATA LOCAL INFILE '{0}' INTO TABLE " + "test_load_local FIELDS TERMINATED BY ','").format(filename) ) await cursor.execute("SELECT COUNT(*) FROM test_load_local") resp = await cursor.fetchone() assert 22749 == resp[0] @pytest.mark.run_loop async def test_load_warnings(cursor, table_local_file): # Test load local infile produces the appropriate warnings import warnings # TODO: Move to pathlib filename = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'fixtures', 'load_local_warn_data.txt') sql = ("LOAD DATA LOCAL INFILE '{0}' INTO TABLE " + "test_load_local FIELDS TERMINATED BY ','").format(filename) with warnings.catch_warnings(record=True) as w: await cursor.execute(sql) assert "Incorrect integer value" in str(w[-1].message) @pytest.mark.run_loop async def test_load_local_disabled(mysql_params, table_local_file): # By setting the client flag, the server will be informed that we support # loading local files. This validates that the client side check catches # the server attempting to read files from us without having this # explicitly enabled on the connection. The local_infile parameter sets # the client flag, but not the other way round. params = mysql_params.copy() params["local_infile"] = False if "client_flag" in params: params["client_flag"] |= CLIENT.LOCAL_FILES else: params["client_flag"] = CLIENT.LOCAL_FILES async with aiomysql.connect(**params) as conn: async with conn.cursor() as cursor: # Test load local infile with a valid file filename = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'fixtures', 'load_local_data.txt') with pytest.raises( RuntimeError, match="Received LOAD_LOCAL packet but local_infile option is false", ): await cursor.execute( ("LOAD DATA LOCAL INFILE '{0}' INTO TABLE " + "test_load_local FIELDS TERMINATED BY ','").format(filename) ) aiomysql-0.3.2/tests/test_nextset.py000066400000000000000000000036061507601712200176250ustar00rootroot00000000000000import asyncio import pytest from pymysql.err import ProgrammingError @pytest.mark.run_loop async def test_nextset(cursor): await cursor.execute("SELECT 1; SELECT 2;") r = await cursor.fetchall() assert [(1,)] == list(r) r = await cursor.nextset() assert r r = await cursor.fetchall() assert [(2,)] == list(r) res = await cursor.nextset() assert res is None @pytest.mark.run_loop async def test_skip_nextset(cursor): await cursor.execute("SELECT 1; SELECT 2;") r = await cursor.fetchall() assert [(1,)] == list(r) await cursor.execute("SELECT 42") r = await cursor.fetchall() assert [(42,)] == list(r) @pytest.mark.run_loop async def test_nextset_error(cursor): await cursor.execute("SELECT 1; xyzzy;") # nextset shouldn't hang on error, it should raise syntax error with pytest.raises(ProgrammingError): await asyncio.wait_for(cursor.nextset(), 5) @pytest.mark.run_loop async def test_ok_and_next(cursor): await cursor.execute("SELECT 1; commit; SELECT 2;") r = await cursor.fetchall() assert [(1,)] == list(r) res = await cursor.nextset() assert res res = await cursor.nextset() assert res r = await cursor.fetchall() assert [(2,)] == list(r) res = await cursor.nextset() assert res is None @pytest.mark.xfail @pytest.mark.run_loop async def test_multi_cursorxx(connection): cur1 = await connection.cursor() cur2 = await connection.cursor() await cur1.execute("SELECT 1; SELECT 2;") await cur2.execute("SELECT 42") r1 = await cur1.fetchall() r2 = await cur2.fetchall() assert [(1,)] == list(r1) assert [(42,)] == list(r2) res = await cur1.nextset() assert res assert [(2,)] == list(r1) res = await cur1.nextset() assert res is None # TODO: How about SSCursor and nextset? # It's very hard to implement correctly... aiomysql-0.3.2/tests/test_pool.py000066400000000000000000000352551507601712200171110ustar00rootroot00000000000000import asyncio import pytest from aiomysql.connection import Connection from aiomysql.pool import Pool @pytest.mark.run_loop async def test_create_pool(pool_creator): pool = await pool_creator() assert isinstance(pool, Pool) assert 1 == pool.minsize assert 10 == pool.maxsize assert 1 == pool.size assert 1 == pool.freesize assert not pool.echo @pytest.mark.run_loop async def test_create_pool2(pool_creator): pool = await pool_creator(minsize=10, maxsize=20) assert isinstance(pool, Pool) assert 10 == pool.minsize assert 20 == pool.maxsize assert 10 == pool.size assert 10 == pool.freesize @pytest.mark.run_loop async def test_acquire(pool_creator): pool = await pool_creator() conn = await pool.acquire() assert isinstance(conn, Connection) assert not conn.closed cursor = await conn.cursor() await cursor.execute('SELECT 1') val = await cursor.fetchone() await cursor.close() assert (1,) == val pool.release(conn) @pytest.mark.run_loop async def test_release(pool_creator): pool = await pool_creator() conn = await pool.acquire() assert 0 == pool.freesize assert {conn} == pool._used pool.release(conn) assert 1 == pool.freesize assert not pool._used @pytest.mark.run_loop async def test_release_closed(pool_creator): pool = await pool_creator(minsize=10, maxsize=10) conn = await pool.acquire() assert 9 == pool.freesize await conn.ensure_closed() pool.release(conn) assert 9 == pool.freesize assert not pool._used assert 9 == pool.size conn2 = await pool.acquire() assert 9 == pool.freesize assert 10 == pool.size pool.release(conn2) @pytest.mark.run_loop async def test_bad_context_manager_usage(pool_creator): pool = await pool_creator() with pytest.raises(RuntimeError): with pool: pass @pytest.mark.run_loop async def test_context_manager(pool_creator): pool = await pool_creator(minsize=10, maxsize=10) async with pool.acquire() as conn: assert isinstance(conn, Connection) assert 9 == pool.freesize assert {conn} == pool._used assert 10 == pool.freesize @pytest.mark.run_loop async def test_clear(pool_creator): pool = await pool_creator() await pool.clear() assert 0 == pool.freesize @pytest.mark.run_loop async def test_initial_empty(pool_creator): pool = await pool_creator(minsize=0) assert 10 == pool.maxsize assert 0 == pool.minsize assert 0 == pool.size assert 0 == pool.freesize async with pool.acquire(): assert 1 == pool.size assert 0 == pool.freesize assert 1 == pool.size assert 1 == pool.freesize conn1 = await pool.acquire() assert 1 == pool.size assert 0 == pool.freesize conn2 = await pool.acquire() assert 2 == pool.size assert 0 == pool.freesize pool.release(conn1) assert 2 == pool.size assert 1 == pool.freesize pool.release(conn2) assert 2 == pool.size assert 2 == pool.freesize @pytest.mark.run_loop async def test_parallel_tasks(pool_creator, loop): pool = await pool_creator(minsize=0, maxsize=2) assert 2 == pool.maxsize assert 0 == pool.minsize assert 0 == pool.size assert 0 == pool.freesize fut1 = pool.acquire() fut2 = pool.acquire() conn1, conn2 = await asyncio.gather(fut1, fut2) assert 2 == pool.size assert 0 == pool.freesize assert {conn1, conn2} == pool._used pool.release(conn1) assert 2 == pool.size assert 1 == pool.freesize assert {conn2} == pool._used pool.release(conn2) assert 2 == pool.size assert 2 == pool.freesize assert not conn1.closed assert not conn2.closed conn3 = await pool.acquire() assert conn3 is conn1 pool.release(conn3) @pytest.mark.run_loop async def test_parallel_tasks_more(pool_creator, loop): pool = await pool_creator(minsize=0, maxsize=3) fut1 = pool.acquire() fut2 = pool.acquire() fut3 = pool.acquire() conn1, conn2, conn3 = await asyncio.gather(fut1, fut2, fut3) assert 3 == pool.size assert 0 == pool.freesize assert {conn1, conn2, conn3} == pool._used pool.release(conn1) assert 3 == pool.size assert 1 == pool.freesize assert {conn2, conn3} == pool._used pool.release(conn2) assert 3 == pool.size assert 2 == pool.freesize assert {conn3} == pool._used assert not conn1.closed assert not conn2.closed pool.release(conn3) assert 3, pool.size assert 3, pool.freesize assert not pool._used assert not conn1.closed assert not conn2.closed assert not conn3.closed conn4 = await pool.acquire() assert conn4 is conn1 pool.release(conn4) @pytest.mark.run_loop async def test_default_event_loop(pool_creator, loop): asyncio.set_event_loop(loop) pool = await pool_creator(loop=None) assert pool._loop is loop @pytest.mark.run_loop async def test_release_with_invalid_status(pool_creator): pool = await pool_creator(minsize=10, maxsize=10) conn = await pool.acquire() assert 9 == pool.freesize assert {conn} == pool._used cur = await conn.cursor() await cur.execute('BEGIN') await cur.close() pool.release(conn) assert 9 == pool.freesize assert not pool._used assert conn.closed @pytest.mark.run_loop async def test_release_with_invalid_status_wait_release(pool_creator): pool = await pool_creator(minsize=10, maxsize=10) conn = await pool.acquire() assert 9 == pool.freesize assert {conn} == pool._used cur = await conn.cursor() await cur.execute('BEGIN') await cur.close() await pool.release(conn) assert 9 == pool.freesize assert not pool._used assert conn.closed @pytest.mark.run_loop async def test__fill_free(pool_creator, loop): pool = await pool_creator(minsize=1) async with pool.acquire(): assert 0 == pool.freesize assert 1 == pool.size conn = await asyncio.wait_for(pool.acquire(), timeout=0.5) assert 0 == pool.freesize assert 2 == pool.size pool.release(conn) assert 1 == pool.freesize assert 2 == pool.size assert 2 == pool.freesize assert 2 == pool.size @pytest.mark.run_loop async def test_connect_from_acquire(pool_creator): pool = await pool_creator(minsize=0) assert 0 == pool.freesize assert 0 == pool.size async with pool.acquire(): assert 1 == pool.size assert 0 == pool.freesize assert 1 == pool.size assert 1 == pool.freesize @pytest.mark.run_loop async def test_concurrency(pool_creator): pool = await pool_creator(minsize=2, maxsize=4) c1 = await pool.acquire() c2 = await pool.acquire() assert 0 == pool.freesize assert 2 == pool.size pool.release(c1) pool.release(c2) @pytest.mark.run_loop async def test_invalid_minsize_and_maxsize(pool_creator): with pytest.raises(ValueError): await pool_creator(minsize=-1) with pytest.raises(ValueError): await pool_creator(minsize=5, maxsize=2) @pytest.mark.run_loop async def test_true_parallel_tasks(pool_creator, loop): pool = await pool_creator(minsize=0, maxsize=1) assert 1 == pool.maxsize assert 0 == pool.minsize assert 0 == pool.size assert 0 == pool.freesize maxsize = 0 minfreesize = 100 async def inner(): nonlocal maxsize, minfreesize maxsize = max(maxsize, pool.size) minfreesize = min(minfreesize, pool.freesize) conn = await pool.acquire() maxsize = max(maxsize, pool.size) minfreesize = min(minfreesize, pool.freesize) await asyncio.sleep(0.01) pool.release(conn) maxsize = max(maxsize, pool.size) minfreesize = min(minfreesize, pool.freesize) await asyncio.gather(inner(), inner()) assert 1 == maxsize assert 0 == minfreesize @pytest.mark.run_loop async def test_cannot_acquire_after_closing(pool_creator): pool = await pool_creator() pool.close() with pytest.raises(RuntimeError): await pool.acquire() @pytest.mark.run_loop async def test_wait_closed(pool_creator, loop): pool = await pool_creator(minsize=10, maxsize=10) c1 = await pool.acquire() c2 = await pool.acquire() assert 10 == pool.size assert 8 == pool.freesize ops = [] async def do_release(conn): await asyncio.sleep(0) pool.release(conn) ops.append('release') async def wait_closed(): await pool.wait_closed() ops.append('wait_closed') pool.close() await asyncio.gather(wait_closed(), do_release(c1), do_release(c2)) assert ['release', 'release', 'wait_closed'] == ops assert 0 == pool.freesize assert pool.closed @pytest.mark.run_loop async def test_echo(pool_creator): pool = await pool_creator(echo=True) assert pool.echo async with pool.acquire() as conn: assert conn.echo @pytest.mark.run_loop async def test_terminate_with_acquired_connections(pool_creator): pool = await pool_creator() conn = await pool.acquire() pool.terminate() await pool.wait_closed() assert conn.closed @pytest.mark.run_loop async def test_release_closed_connection(pool_creator): pool = await pool_creator() conn = await pool.acquire() conn.close() pool.release(conn) pool.close() @pytest.mark.run_loop async def test_wait_closing_on_not_closed(pool_creator): pool = await pool_creator() with pytest.raises(RuntimeError): await pool.wait_closed() pool.close() @pytest.mark.run_loop async def test_release_terminated_pool(pool_creator): pool = await pool_creator() conn = await pool.acquire() pool.terminate() await pool.wait_closed() pool.release(conn) pool.close() @pytest.mark.run_loop async def test_release_terminated_pool_wait_release(pool_creator): pool = await pool_creator() conn = await pool.acquire() pool.terminate() await pool.wait_closed() await pool.release(conn) pool.close() @pytest.mark.run_loop async def test_close_with_acquired_connections(pool_creator, loop): pool = await pool_creator() conn = await pool.acquire() pool.close() with pytest.raises(asyncio.TimeoutError): await asyncio.wait_for(pool.wait_closed(), 0.1) pool.release(conn) async def _set_global_conn_timeout(conn, t): # create separate connection to setup global connection timeouts # https://dev.mysql.com/doc/refman/5.1/en/server-system-variables # .html#sysvar_interactive_timeout cur = await conn.cursor() await cur.execute('SET GLOBAL wait_timeout=%s;', t) await cur.execute('SET GLOBAL interactive_timeout=%s;', t) await cur.close() @pytest.mark.run_loop async def test_drop_connection_if_timedout(pool_creator, connection_creator, loop): conn = await connection_creator() await _set_global_conn_timeout(conn, 2) await conn.ensure_closed() pool = conn = None try: pool = await pool_creator(minsize=3, maxsize=3) # sleep, more then connection timeout await asyncio.sleep(3) conn = await pool.acquire() cur = await conn.cursor() # query should not throw exception OperationalError await cur.execute('SELECT 1;') pool.release(conn) conn = None pool.close() await pool.wait_closed() finally: # TODO: this could probably be done better # if this isn't closed it blocks forever try: if conn is not None: pool.release(conn) if pool is not None: pool.close() await pool.wait_closed() except Exception: pass # setup default timeouts conn = await connection_creator() await _set_global_conn_timeout(conn, 28800) await conn.ensure_closed() @pytest.mark.skip(reason='Not implemented') @pytest.mark.run_loop async def test_create_pool_with_timeout(pool_creator): pool = await pool_creator(minsize=3, maxsize=3) timeout = 0.1 assert timeout == pool.timeout conn = await pool.acquire() assert timeout == conn.timeout pool.release(conn) @pytest.mark.run_loop async def test_cancelled_connection(pool_creator, loop): pool = await pool_creator(minsize=0, maxsize=1) try: async with pool.acquire() as conn: curs = await conn.cursor() # Cancel a cursor in the middle of execution, before it # could read even the first packet (SLEEP assures the # timings) task = loop.create_task(curs.execute( "SELECT 1 as id, SLEEP(0.1) as xxx")) await asyncio.sleep(0.05) task.cancel() await task except asyncio.CancelledError: pass async with pool.acquire() as conn: cur2 = await conn.cursor() res = await cur2.execute("SELECT 2 as value, 0 as xxx") names = [x[0] for x in cur2.description] # If we receive ["id", "xxx"] - we corrupted the connection assert names == ["value", "xxx"] res = await cur2.fetchall() # If we receive [(1, 0)] - we retrieved old cursor's values assert list(res) == [(2, 0)] @pytest.mark.run_loop async def test_pool_with_connection_recycling(pool_creator, loop): pool = await pool_creator(minsize=1, maxsize=1, pool_recycle=3) async with pool.acquire() as conn: cur = await conn.cursor() await cur.execute('SELECT 1;') val = await cur.fetchone() assert (1,) == val await asyncio.sleep(5) assert 1 == pool.freesize async with pool.acquire() as conn: cur = await conn.cursor() await cur.execute('SELECT 1;') val = await cur.fetchone() assert (1,) == val @pytest.mark.run_loop async def test_pool_drops_connection_with_exception(pool_creator, loop): pool = await pool_creator(minsize=1, maxsize=1) async with pool.acquire() as conn: cur = await conn.cursor() await cur.execute('SELECT 1;') connection, = pool._free connection._writer._protocol.connection_lost(IOError()) async with pool.acquire() as conn: cur = await conn.cursor() await cur.execute('SELECT 1;') @pytest.mark.run_loop async def test_pool_maxsize_unlimited(pool_creator, loop): pool = await pool_creator(minsize=0, maxsize=0) async with pool.acquire() as conn: cur = await conn.cursor() await cur.execute('SELECT 1;') @pytest.mark.run_loop async def test_pool_maxsize_unlimited_minsize_1(pool_creator, loop): pool = await pool_creator(minsize=1, maxsize=0) async with pool.acquire() as conn: cur = await conn.cursor() await cur.execute('SELECT 1;') aiomysql-0.3.2/tests/test_sha_connection.py000066400000000000000000000074351507601712200211310ustar00rootroot00000000000000import copy from aiomysql import create_pool import pytest # You could parameterise these tests with this, but then pytest # does some funky stuff and spins up and tears down containers # per function call. Remember it would be # mysql_versions * event_loops * 4 auth tests ~= 3*2*4 ~= 24 tests # As the MySQL daemon restarts at least 3 times in the container # before it becomes stable, there's a sleep(10) so that's # around a 4min wait time. # @pytest.mark.parametrize("user,password,plugin", [ # ("nopass_sha256", None, 'sha256_password'), # ("user_sha256", 'pass_sha256', 'sha256_password'), # ("nopass_caching_sha2", None, 'caching_sha2_password'), # ("user_caching_sha2", 'pass_caching_sha2', 'caching_sha2_password'), # ]) def ensure_mysql_version(mysql_server): if mysql_server["db_type"] != "mysql" \ or mysql_server["server_version_tuple_short"] != (8, 0): pytest.skip("Not applicable for {} version: {}" .format(mysql_server["db_type"], mysql_server["server_version_tuple_short"])) @pytest.mark.run_loop async def test_sha256_nopw(mysql_server, loop): ensure_mysql_version(mysql_server) connection_data = copy.copy(mysql_server['conn_params']) connection_data['user'] = 'nopass_sha256' connection_data['password'] = None async with create_pool(**connection_data, loop=loop) as pool: async with pool.acquire() as conn: # User doesnt have any permissions to look at DBs # But as 8.0 will default to caching_sha2_password assert conn._auth_plugin_used == 'sha256_password' @pytest.mark.run_loop async def test_sha256_pw(mysql_server, loop): ensure_mysql_version(mysql_server) # https://dev.mysql.com/doc/refman/8.0/en/sha256-pluggable-authentication.html # Unlike caching_sha2_password, the sha256_password plugin does not treat # shared-memory connections as secure, even though share-memory transport # is secure by default. if "unix_socket" in mysql_server['conn_params']: pytest.skip("sha256_password is not supported on unix sockets") connection_data = copy.copy(mysql_server['conn_params']) connection_data['user'] = 'user_sha256' connection_data['password'] = 'pass_sha256' async with create_pool(**connection_data, loop=loop) as pool: async with pool.acquire() as conn: # User doesnt have any permissions to look at DBs # But as 8.0 will default to caching_sha2_password assert conn._auth_plugin_used == 'sha256_password' @pytest.mark.run_loop async def test_cached_sha256_nopw(mysql_server, loop): ensure_mysql_version(mysql_server) connection_data = copy.copy(mysql_server['conn_params']) connection_data['user'] = 'nopass_caching_sha2' connection_data['password'] = None async with create_pool(**connection_data, loop=loop) as pool: async with pool.acquire() as conn: # User doesnt have any permissions to look at DBs # But as 8.0 will default to caching_sha2_password assert conn._auth_plugin_used == 'caching_sha2_password' @pytest.mark.run_loop async def test_cached_sha256_pw(mysql_server, loop): ensure_mysql_version(mysql_server) connection_data = copy.copy(mysql_server['conn_params']) connection_data['user'] = 'user_caching_sha2' connection_data['password'] = 'pass_caching_sha2' async with create_pool(**connection_data, loop=loop) as pool: async with pool.acquire() as conn: # User doesnt have any permissions to look at DBs # But as 8.0 will default to caching_sha2_password assert conn._auth_plugin_used == 'caching_sha2_password' aiomysql-0.3.2/tests/test_sscursor.py000066400000000000000000000232301507601712200200110ustar00rootroot00000000000000import asyncio import pytest from pymysql import NotSupportedError from aiomysql import ProgrammingError, InterfaceError, OperationalError from aiomysql.cursors import SSCursor DATA = [ ('America', '', 'America/Jamaica'), ('America', '', 'America/Los_Angeles'), ('America', '', 'America/Lima'), ('America', '', 'America/New_York'), ('America', '', 'America/Menominee'), ('America', '', 'America/Havana'), ('America', '', 'America/El_Salvador'), ('America', '', 'America/Costa_Rica'), ('America', '', 'America/Denver'), ('America', '', 'America/Detroit'), ] async def _prepare(conn): cursor = await conn.cursor() # Create table await cursor.execute('DROP TABLE IF EXISTS tz_data;') await cursor.execute('CREATE TABLE tz_data (' 'region VARCHAR(64),' 'zone VARCHAR(64),' 'name VARCHAR(64))') await cursor.executemany( 'INSERT INTO tz_data VALUES (%s, %s, %s)', DATA) await conn.commit() await cursor.close() @pytest.mark.run_loop async def test_ssursor(connection): # affected_rows = 18446744073709551615 conn = connection cursor = await conn.cursor(SSCursor) # Create table await cursor.execute('DROP TABLE IF EXISTS tz_data;') await cursor.execute('CREATE TABLE tz_data (' 'region VARCHAR(64),' 'zone VARCHAR(64),' 'name VARCHAR(64))') # Test INSERT for i in DATA: await cursor.execute( 'INSERT INTO tz_data VALUES (%s, %s, %s)', i) assert conn.affected_rows() == 1, 'affected_rows does not match' await conn.commit() # Test update, affected_rows() await cursor.execute('UPDATE tz_data SET zone = %s', ['Foo']) await conn.commit() assert cursor.rowcount == len(DATA), \ 'Update failed. affected_rows != %s' % (str(len(DATA))) await cursor.close() await cursor.close() @pytest.mark.run_loop async def test_sscursor_fetchall(connection): conn = connection cursor = await conn.cursor(SSCursor) await _prepare(conn) await cursor.execute('SELECT * FROM tz_data') fetched_data = await cursor.fetchall() assert len(fetched_data) == len(DATA), \ 'fetchall failed. Number of rows does not match' @pytest.mark.run_loop async def test_sscursor_fetchmany(connection): conn = connection cursor = await conn.cursor(SSCursor) await _prepare(conn) await cursor.execute('SELECT * FROM tz_data') fetched_data = await cursor.fetchmany(2) assert len(fetched_data) == 2, \ 'fetchmany failed. Number of rows does not match' await cursor.close() # test default fetchmany size cursor = await conn.cursor(SSCursor) await cursor.execute('SELECT * FROM tz_data;') fetched_data = await cursor.fetchmany() assert len(fetched_data) == 1 @pytest.mark.run_loop async def test_sscursor_executemany(connection): conn = connection await _prepare(conn) cursor = await conn.cursor(SSCursor) # Test executemany await cursor.executemany( 'INSERT INTO tz_data VALUES (%s, %s, %s)', DATA) msg = 'executemany failed. cursor.rowcount != %s' assert cursor.rowcount == len(DATA), msg % (str(len(DATA))) @pytest.mark.run_loop async def test_sscursor_scroll_relative(connection): conn = connection await _prepare(conn) cursor = await conn.cursor(SSCursor) await cursor.execute('SELECT * FROM tz_data;') await cursor.scroll(1) ret = await cursor.fetchone() assert ('America', '', 'America/Los_Angeles') == ret @pytest.mark.run_loop async def test_sscursor_scroll_absolute(connection): conn = connection await _prepare(conn) cursor = await conn.cursor(SSCursor) await cursor.execute('SELECT * FROM tz_data;') await cursor.scroll(2, mode='absolute') ret = await cursor.fetchone() assert ('America', '', 'America/Lima') == ret @pytest.mark.run_loop async def test_sscursor_scroll_errors(connection): conn = connection await _prepare(conn) cursor = await conn.cursor(SSCursor) await cursor.execute('SELECT * FROM tz_data;') with pytest.raises(NotSupportedError): await cursor.scroll(-2, mode='relative') await cursor.scroll(2, mode='absolute') with pytest.raises(NotSupportedError): await cursor.scroll(1, mode='absolute') with pytest.raises(ProgrammingError): await cursor.scroll(2, mode='not_valid_mode') @pytest.mark.run_loop async def test_sscursor_cancel(connection): conn = connection cur = await conn.cursor(SSCursor) # Prepare A LOT of data await cur.execute('DROP TABLE IF EXISTS long_seq;') await cur.execute( """ CREATE TABLE long_seq ( id int(11) ) """) ids = [(x) for x in range(100000)] await cur.executemany('INSERT INTO long_seq VALUES (%s)', ids) # Will return several results. All we need at this point big_str = "x" * 10000 await cur.execute( """SELECT '{}' as id FROM long_seq; """.format(big_str)) first = await cur.fetchone() assert first == (big_str,) async def read_cursor(): while True: res = await cur.fetchone() if res is None: break task = asyncio.ensure_future(read_cursor()) await asyncio.sleep(0) assert not task.done(), "Test failed to produce needed condition." task.cancel() try: await task except asyncio.CancelledError: pass with pytest.raises(InterfaceError): await conn.cursor(SSCursor) @pytest.mark.run_loop async def test_sscursor_discarded_result(connection): conn = connection await _prepare(conn) async with conn.cursor(SSCursor) as cursor: await cursor.execute("select 1") with pytest.warns( UserWarning, match="Previous unbuffered result was left incomplete", ): await cursor.execute("select 2") ret = await cursor.fetchone() assert (2,) == ret @pytest.mark.run_loop async def test_max_execution_time(mysql_server, connection): conn = connection async with connection.cursor() as cur: await cur.execute("DROP TABLE IF EXISTS tbl;") await cur.execute( """ CREATE TABLE tbl ( id MEDIUMINT NOT NULL AUTO_INCREMENT, name VARCHAR(255) NOT NULL, PRIMARY KEY (id)); """ ) for i in [(1, "a"), (2, "b"), (3, "c")]: await cur.execute("INSERT INTO tbl VALUES(%s, %s)", i) await conn.commit() async with conn.cursor(SSCursor) as cur: # MySQL MAX_EXECUTION_TIME takes ms # MariaDB max_statement_time takes seconds as int/float, introduced in 10.1 # this will sleep 0.01 seconds per row if mysql_server["db_type"] == "mysql": sql = """ SELECT /*+ MAX_EXECUTION_TIME(2000) */ name, sleep(0.01) FROM tbl """ else: sql = """ SET STATEMENT max_statement_time=2 FOR SELECT name, sleep(0.01) FROM tbl """ await cur.execute(sql) # unlike Cursor, SSCursor returns a list of tuples here assert (await cur.fetchall()) == [ ("a", 0), ("b", 0), ("c", 0), ] if mysql_server["db_type"] == "mysql": sql = """ SELECT /*+ MAX_EXECUTION_TIME(2000) */ name, sleep(0.01) FROM tbl """ else: sql = """ SET STATEMENT max_statement_time=2 FOR SELECT name, sleep(0.01) FROM tbl """ await cur.execute(sql) assert (await cur.fetchone()) == ("a", 0) # this discards the previous unfinished query and raises an # incomplete unbuffered query warning with pytest.warns( UserWarning, match="Previous unbuffered result was left incomplete", ): await cur.execute("SELECT 1") assert (await cur.fetchone()) == (1,) # SSCursor will not read the EOF packet until we try to read # another row. Skipping this will raise an incomplete unbuffered # query warning in the next cur.execute(). assert (await cur.fetchone()) is None if mysql_server["db_type"] == "mysql": sql = """ SELECT /*+ MAX_EXECUTION_TIME(1) */ name, sleep(1) FROM tbl """ else: sql = """ SET STATEMENT max_statement_time=0.001 FOR SELECT name, sleep(1) FROM tbl """ with pytest.raises(OperationalError) as cm: # in an unbuffered cursor the OperationalError may not show up # until fetching the entire result await cur.execute(sql) await cur.fetchall() if mysql_server["db_type"] == "mysql": # this constant was only introduced in MySQL 5.7, not sure # what was returned before, may have been ER_QUERY_INTERRUPTED # this constant is pending a new PyMySQL release # assert cm.value.args[0] == pymysql.constants.ER.QUERY_TIMEOUT assert cm.value.args[0] == 3024 else: # this constant is pending a new PyMySQL release # assert cm.value.args[0] == pymysql.constants.ER.STATEMENT_TIMEOUT assert cm.value.args[0] == 1969 # connection should still be fine at this point await cur.execute("SELECT 1") assert (await cur.fetchone()) == (1,) aiomysql-0.3.2/tests/test_ssl.py000066400000000000000000000051751507601712200167370ustar00rootroot00000000000000from aiomysql import create_pool import pytest @pytest.mark.run_loop async def test_tls_connect(mysql_server, loop, mysql_params): if "unix_socket" in mysql_params: pytest.skip("TLS is not supported on unix sockets") async with create_pool(**mysql_server['conn_params'], loop=loop) as pool: async with pool.acquire() as conn: async with conn.cursor() as cur: # Run simple command await cur.execute("SHOW DATABASES;") value = await cur.fetchall() values = [item[0] for item in value] # Spot check the answers, we should at least have mysql # and information_schema assert 'mysql' in values, \ 'Could not find the "mysql" table' assert 'information_schema' in values, \ 'Could not find the "mysql" table' # Check TLS variables await cur.execute("SHOW STATUS LIKE 'Ssl_version%';") value = await cur.fetchone() # The context has TLS assert value[1].startswith('TLS'), \ 'Not connected to the database with TLS' # MySQL will get you to renegotiate if sent a cleartext password @pytest.mark.run_loop async def test_auth_plugin_renegotiation(mysql_server, loop, mysql_params): if "unix_socket" in mysql_params: pytest.skip("TLS is not supported on unix sockets") async with create_pool(**mysql_server['conn_params'], auth_plugin='mysql_clear_password', loop=loop) as pool: async with pool.acquire() as conn: async with conn.cursor() as cur: # Run simple command await cur.execute("SHOW DATABASES;") value = await cur.fetchall() assert len(value), 'No databases found' # Check we tried to use the cleartext plugin assert conn._client_auth_plugin == 'mysql_clear_password', \ 'Client did not try clear password auth' # Check the server asked us to use MySQL's default plugin assert conn._server_auth_plugin in ( 'mysql_native_password', 'caching_sha2_password'), \ 'Server did not ask for native auth' # Check we actually used the servers default plugin assert conn._auth_plugin_used in ( 'mysql_native_password', 'caching_sha2_password'), \ 'Client did not renegotiate with server\'s default auth'