pax_global_header00006660000000000000000000000064136462632540014525gustar00rootroot0000000000000052 comment=6035f52da50dabcfa1abe562c513a2044cfcd5c5 txacme-0.9.3/000077500000000000000000000000001364626325400130175ustar00rootroot00000000000000txacme-0.9.3/.coveragerc000066400000000000000000000003201364626325400151330ustar00rootroot00000000000000[run] branch = True source = txacme omit = src/txacme/_version.py src/txacme/interfaces.py [paths] source = src/txacme .tox/*/lib/python*/site-packages/txacme .tox/*/site-packages/txacme txacme-0.9.3/.gitattributes000066400000000000000000000000441364626325400157100ustar00rootroot00000000000000src/txacme/_version.py export-subst txacme-0.9.3/.github/000077500000000000000000000000001364626325400143575ustar00rootroot00000000000000txacme-0.9.3/.github/PULL_REQUEST_TEMPLATE000066400000000000000000000005071364626325400175630ustar00rootroot00000000000000## See CONTRIBUTING.rst for more details. ## Contributor Checklist: * [ ] The Pull Request description explain the purpose of this PR or provides a link / reference to a GitHub Issue. * [ ] Created a newsfragment in src/txacme/newsfragments/. * [ ] Updated the automated tests. * [ ] The changes pass minimal style checks. txacme-0.9.3/.gitignore000066400000000000000000000001751364626325400150120ustar00rootroot00000000000000*.egg-info/ *.pyc .coverage .hypothesis/ .testrepository/ .tox/ _trial_temp/ build/ dist/ docs/_build docs/api/ dropin.cache txacme-0.9.3/.testr.conf000066400000000000000000000002141364626325400151020ustar00rootroot00000000000000[DEFAULT] test_command=python -m subunit.run discover -s src/ $LISTOPT $IDOPTION test_id_option=--load-list $IDFILE test_list_option=--list txacme-0.9.3/.travis.yml000066400000000000000000000060771364626325400151420ustar00rootroot00000000000000language: python sudo: false cache: pip if: (branch = master) OR (tag IS present) stages: - name: deploy if: tag IS present matrix: include: - python: 2.7 env: TOXENV=py27-twlatest-alldeps - python: 3.5 env: TOXENV=py35-twlatest-alldeps - python: 3.6 env: TOXENV=py36-twlatest-alldeps - python: 3.7 env: TOXENV=py37-twlatest-alldeps dist: xenial - python: pypy2.7-7.2.0 dist: bionic env: TOXENV=pypy-twlatest-alldeps - python: pypy3.6-7.2.0 dist: bionic env: TOXENV=pypy3-twlatest-alldeps - python: 2.7 env: TOXENV=py27-twtrunk-acmaster-alldeps - python: 3.5 env: TOXENV=py35-twtrunk-acmaster-alldeps - python: 3.6 env: TOXENV=py36-twtrunk-acmaster-alldeps - python: 3.7 env: TOXENV=py37-twtrunk-acmaster-alldeps dist: xenial - python: pypy2.7-7.2.0 dist: bionic env: TOXENV=pypy-twtrunk-acmaster-alldeps - python: pypy3.6-7.2.0 dist: bionic env: TOXENV=pypy3-twtrunk-acmaster-alldeps - python: 2.7 env: TOXENV=py27-twlowest-alldeps - python: 3.5 env: TOXENV=py35-twlowest-alldeps - python: 3.6 env: TOXENV=py36-twlowest-alldeps - python: 3.7 env: TOXENV=py37-twlowest-alldeps dist: xenial - python: pypy2.7-7.2.0 dist: bionic env: TOXENV=pypy-twlowest-alldeps - python: pypy3.6-7.2.0 dist: bionic env: TOXENV=pypy3-twlowest-alldeps - python: 2.7 env: TOXENV=py27-twlatest - python: 2.7 env: TOXENV=docs addons: apt: packages: - libenchant-dev - python: 3.6 env: TOXENV=flake8 allow_failures: - env: TOXENV=py27-twtrunk-acmaster-alldeps - env: TOXENV=py35-twtrunk-acmaster-alldeps - env: TOXENV=py36-twtrunk-acmaster-alldeps - env: TOXENV=py37-twtrunk-acmaster-alldeps - env: TOXENV=pypy-twtrunk-acmaster-alldeps - env: TOXENV=pypy3-twtrunk-acmaster-alldeps install: # Upgrade packaging tools separately, so that other installations are # performed with the upgraded tools. - pip install -U pip setuptools wheel - pip install tox codecov script: - tox after_success: # Codecov needs combined coverage, and having the raw report in the test # output can be useful. - tox -e coverage-report - codecov notifications: email: false deploy: provider: pypi user: releasebot.txacme distributions: sdist bdist_wheel on: tags: true condition: "$TOXENV = py37-twlatest-alldeps" all_branches: true password: secure: VLwvRgwwOHp6+8huOdReN8Z6OgiLtyTCGRl82mujlr+rHxSGiUfZrOXKLLVaRhKW3UpMw1Yi4F9KDWBqrbdjn5kAdbgwDrp5s1xnrxX2CeC7BP17fkW37mnY6+BqqJQzhpoiqLFgqS77es6QONosFhxrpu8PqbSlQjy5Ar/OR2NzVd9+/2uEjxw+CKGhI1WSOop7XBnFZ7b4Gjtlcw//lpzed3iOTSDLmK94m617DvhzfJEGH3a2XmkRvLbPsFD5t0KzWwC0AABDaXr7zb6DWP4lPs94x7ZDATpFHruU3m8Zsp0MKl0xIPDdcTPvAWcIpUkJ9da5VeMl049O17l1Hi+NMeAa3UsuRAch5Rp6KOOeTasZWlnk9dgse+Wu6NHCaUJx6VF/qyYkuxTEG8+9LkXeKEYVfjS+TOgKGydd6hc9jLm4U86i3fxNVcw91Ch59TljeWXRr21/ClPrJbFNcDKIBbYu4Lnpzac42w3Gle41zLmSkCvD3zbUJuNxphuOgTDd4+DnOXxmuVE1wvVUVTyVrSxzjc7L9BD5DDaw3QEy3N0az8LBp+OMOJyoRetw2sH7DTioi/7TQLFHxaV7Znv+mbJR8X/7NaNqEQSelm1rD/4PgBlcDoG1Q4Xfc42wj4RsCMxBB3ST7qhGdvD/oa8Zr/GyXwkhzGFhrXtdoio= txacme-0.9.3/CONTRIBUTING.rst000066400000000000000000000036201364626325400154610ustar00rootroot00000000000000Contributing to txacme ###################### We use `tox` to run the test in a controller environment. Each change should consider covering the followings: * Create a release notes fragment. See section below. * Write automated tests to the point of having at least 100% code coverage. * Documenting the API. * Update the documentation with usage examples. Documenting the changes ----------------------- `towncrier `_ is used to manage the release notes. Beside the normal docstring and API documentation, each change which is visible to the users of txame should be documented in the release notes. To avoid merge conflict in the release notes files, each item of the release notes is create in a separate file located in `src/txacme/newsfragments/` The file will have the following format: ISSUE_ID.ITEM_TYPE. `ISSUE_ID` is the GitHub Issue ID targeted by this branch. `ITEM_TYPE` is one of the `default types `_ supported by Towncrier. Executing tests and checking coverage ------------------------------------- You can run all tests in a specific environment, or just a single test:: $ tox -e py27-twlatest txacme.test.test_service $ tox -e py27-twlatest \ txacme.test.test_service.AcmeIssuingServiceTests.test_timer_errors You can check the test coverage, and diff coverage by running the dedicated `coverage-report` tox env:: $ tox -e py27-twlatest,coverage-report There is a tox environment dedicated to code style checks:: $ tox -e flake8 and another one for documentation and API checks:: $ tox -e docs If executing the `tox` environment is too slow for you, you can always enable a specific environment and execute the test with `trial`:: $ . .tox/py27-twlatest/bin/activate $ pip install -e . $ trial txacme.test.test_service.AcmeIssuingServiceTests.test_timer_errors txacme-0.9.3/LICENSE000066400000000000000000000020641364626325400140260ustar00rootroot00000000000000© 2016 Tristan Seligmann Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. txacme-0.9.3/MANIFEST.in000066400000000000000000000002421364626325400145530ustar00rootroot00000000000000include README.rst LICENSE tox.ini .coveragerc setup.cfg graft docs graft src prune src/txacme/newsfragments include versioneer.py include src/txacme/_version.py txacme-0.9.3/README.rst000066400000000000000000000031501364626325400145050ustar00rootroot00000000000000===================================================== txacme: A Twisted implementation of the ACME protocol ===================================================== .. image:: https://readthedocs.org/projects/txacme/badge/?version=stable :target: http://txacme.readthedocs.org/en/stable/?badge=stable :alt: Documentation Status .. image:: https://travis-ci.org/twisted/txacme.svg?branch=master :target: https://travis-ci.org/twisted/txacme :alt: CI status .. image:: https://codecov.io/github/twisted/txacme/coverage.svg?branch=master :target: https://codecov.io/github/twisted/txacme?branch=master :alt: Coverage .. teaser-begin `ACME`_ is Automatic Certificate Management Environment, a protocol that allows clients and certificate authorities to automate verification and certificate issuance. The ACME protocol is used by the free `Let's Encrypt`_ Certificate Authority. ``txacme`` is an implementation of the protocol for `Twisted`_, the event-driven networking engine for Python. ``txacme`` is still under heavy development, and currently only an implementation of the client side of the protocol is planned; if you are interested in implementing or have need of the server side, please get in touch! ``txacme``\ ’s documentation lives at `Read the Docs`_, the code on `GitHub`_. It’s rigorously tested on Python 2.7, 3.4+, and PyPy. .. _ACME: https://github.com/ietf-wg-acme/acme/blob/master/draft-ietf-acme-acme.md .. _Let's Encrypt: https://letsencrypt.org/ .. _Twisted: https://twistedmatrix.com/trac/ .. _Read the Docs: https://txacme.readthedocs.io/ .. _GitHub: https://github.com/twisted/txacme txacme-0.9.3/codecov.yml000066400000000000000000000001001364626325400151530ustar00rootroot00000000000000ignore: - src/txacme/_version.py - src/txacme/interfaces.py txacme-0.9.3/docs/000077500000000000000000000000001364626325400137475ustar00rootroot00000000000000txacme-0.9.3/docs/Makefile000066400000000000000000000167501364626325400154200ustar00rootroot00000000000000# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " applehelp to make an Apple Help Book" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" @echo " coverage to run coverage check of the documentation (if enabled)" .PHONY: clean clean: rm -rf $(BUILDDIR)/* .PHONY: html html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." .PHONY: dirhtml dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." .PHONY: singlehtml singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." .PHONY: pickle pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." .PHONY: json json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." .PHONY: htmlhelp htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." .PHONY: qthelp qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/txacme.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/txacme.qhc" .PHONY: applehelp applehelp: $(SPHINXBUILD) -b applehelp $(ALLSPHINXOPTS) $(BUILDDIR)/applehelp @echo @echo "Build finished. The help book is in $(BUILDDIR)/applehelp." @echo "N.B. You won't be able to view it unless you put it in" \ "~/Library/Documentation/Help or install it in your application" \ "bundle." .PHONY: devhelp devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/txacme" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/txacme" @echo "# devhelp" .PHONY: epub epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." .PHONY: latex latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." .PHONY: latexpdf latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." .PHONY: latexpdfja latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." .PHONY: text text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." .PHONY: man man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." .PHONY: texinfo texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." .PHONY: info info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." .PHONY: gettext gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." .PHONY: changes changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." .PHONY: linkcheck linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." .PHONY: doctest doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." .PHONY: coverage coverage: $(SPHINXBUILD) -b coverage $(ALLSPHINXOPTS) $(BUILDDIR)/coverage @echo "Testing of coverage in the sources finished, look at the " \ "results in $(BUILDDIR)/coverage/python.txt." .PHONY: xml xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." .PHONY: pseudoxml pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." txacme-0.9.3/docs/api-stability.rst000066400000000000000000000014361364626325400172600ustar00rootroot00000000000000API stability ============= txacme is versioned according to `SemVer 2.0.0`_. In addition, since SemVer does not make this explicit, versions following txacme 1.0.0 will have a "rolling compatibility" guarantee: new major versions will not break behaviour that did not already emit a deprecation warning in the latest minor version of the previous major version series. The current version number of 0.9.x is intended to reflect the not-quite-finalized nature of the API. While it is not expected that the API will change drastically, the 0.9 version series is intended to allow space for users to experiment and identify any issues obstructing their use cases so that these can be corrected before the API is finalized in the 1.0.0 release. .. _SemVer 2.0.0: http://semver.org/spec/v2.0.0.html txacme-0.9.3/docs/certs-dir.rst000066400000000000000000000017141364626325400164000ustar00rootroot00000000000000Certificates directory ====================== The layout of the certificates directory used by ``DirectoryStore`` (and thus the ``le:`` and ``lets:`` endpoints) is coordinated with `txsni`_ to allow sharing a certificates directory with other applications. The txsni and txacme maintainers have committed to coordination of any future changes to the contents of this directory to ensure continued compatibility. .. _txsni: https://github.com/glyph/txsni At present, the following entries may exist in this directory: * ``.pem`` A file containing a certificate and matching private key valid for ````, serialized in PEM format. * ``client.key`` A file containing an ACME client key, serialized in PEM format. All other filenames are currently reserved for future use; introducing non-specified files or directories into a certificates directory may result in conflicts with items specified by future versions of txacme and/or txsni. txacme-0.9.3/docs/changelog.rst000066400000000000000000000051221364626325400164300ustar00rootroot00000000000000txacme changelog ~~~~~~~~~~~~~~~~ .. towncrier release notes start Txacme 0.9.3 (2020-04-16) ========================= Bugfixes -------- - Become installable on current versions of attrs again. (#137) Deprecations and Removals ------------------------- - INCOMPATIBLE CHANGE: Removed ``txacme.util.key_cryptography_to_pyopenssl`` and ``txacme.util.cert_cryptography_to_pyopenssl`` in favour of using the native PyOpenSSL conversion methods. (#122) Txacme 0.9.2 (2018-01-24) ========================= Features -------- - The default client timeout is now 40 seconds to allow Let's Encrypt's server side timeout of 30 seconds to kick in first. (#111) Misc ---- - #115 Txacme 0.9.1 (2016-12-08) ========================= Features -------- - INCOMPATIBLE CHANGE: AcmeIssuingService now takes a client creator, rather than a client, and invokes it for every issuing attempt. (#21) - INCOMPATIBLE CHANGE: The ``*_DIRECTORY`` constants are now in txacme.urls. (#28) - INCOMPATIBLE CHANGE: ``IResponder.start_responding`` and ``IResponder.stop_responding`` now take the server_name and challenge object in addition to the challenge response object. (#60) - AcmeIssuingService now logs info messages about what it is doing. (#38) - txacme.challenges.LibcloudDNSResponder implements a dns-01 challenge responder using libcloud. Installing txacme[libcloud] is necessary to pull in the dependencies for this. (#59) - ``txacme.challenges.HTTP01Responder``, an http-01 challenge responder that can be embedded into an existing twisted.web application. (#65) - ``txacme.endpoint.load_or_create_client_key`` gets a client key from the certs directory, using the same logic as the endpoints. (#71) - ``AcmeIssuingService`` now accepts an ``email`` parameter which it adds to the ACME registration. In addition, existing registrations are updated with this email address. (#72) - ``AcmeIssuingService`` now has a public ``issue_cert`` method for safely issuing a new cert on demand. (#76) Bugfixes -------- - ``txacme.client.JWSClient`` now automatically retries a POST request that fails with a ``badNonce`` error. (#66) - ``txacme.store.DirectoryStore`` now handles bytes mode paths correctly. (#68) - The txacme endpoint plugin now lazily imports the rest of the code, avoiding ReactorAlreadyInstalled errors in various cases. (#79) Improved Documentation ---------------------- - The contents of the certificates directory, and compatibility with txsni, is now documented. (#35) Misc ---- - #67 Txacme 0.9.0 (2016-04-10) ========================= Features -------- - Initial release! (#23) txacme-0.9.3/docs/conf.py000066400000000000000000000273661364626325400152640ustar00rootroot00000000000000# -*- coding: utf-8 -*- # # txacme documentation build configuration file, created by # sphinx-quickstart on Wed Feb 24 09:42:24 2016. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import os import subprocess import sys try: import sphinx_rtd_theme except ImportError: sphinx_rtd_theme = None # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinx.ext.doctest', 'sphinx.ext.intersphinx', 'sphinx.ext.linkcode', 'sphinx.ext.todo', 'repoze.sphinx.autointerface', ] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix(es) of source filenames. # You can specify multiple suffix as a list of string: # source_suffix = ['.rst', '.md'] source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'txacme' copyright = u'2016, Tristan Seligmann' author = u'Tristan Seligmann' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. sys.path.insert(0, '.') sys.path.insert(0, '../src') from txacme import __version__, _version version = release = __version__ txacme_version_info = _version.get_versions() # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. # # This is also used if you do content translation via gettext catalogs. # Usually you set "language" from the command line for these cases. language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. default_role = 'py:obj' # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # If true, `todo` and `todoList` produce output, else they produce nothing. todo_include_todos = True # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. if sphinx_rtd_theme: html_theme = "sphinx_rtd_theme" html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] else: html_theme = "default" # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". #html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Language to be used for generating the HTML full-text search index. # Sphinx supports the following languages: # 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja' # 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr' #html_search_language = 'en' # A dictionary with options for the search language support, empty by default. # Now only 'ja' uses this config value #html_search_options = {'type': 'default'} # The name of a javascript file (relative to the configuration directory) that # implements a search results scorer. If empty, the default will be used. #html_search_scorer = 'scorer.js' # Output file base name for HTML help builder. htmlhelp_basename = 'txacmedoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', # Latex figure (float) alignment #'figure_align': 'htbp', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ (master_doc, 'txacme.tex', u'txacme Documentation', u'Tristan Seligmann', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ (master_doc, 'txacme', u'txacme Documentation', [author], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ (master_doc, 'txacme', u'txacme Documentation', author, 'txacme', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False # Example configuration for intersphinx: refer to the Python standard library. intersphinx_mapping = { 'python': ('https://docs.python.org/3/', (None, 'python-objects.inv')), 'acme': ('https://acme-python.readthedocs.io/en/latest/', (None, 'acme-objects.inv')), 'jose': ('https://josepy.readthedocs.io/en/latest/', (None, 'jose-objects.inv')), 'twisted': ('https://twisted.readthedocs.io/en/latest/', (None, 'twisted-objects.inv')), 'cryptography': ('https://cryptography.io/en/latest/', (None, 'cryptography-objects.inv')), 'pem': ('https://pem.readthedocs.io/en/stable/', (None, 'pem-objects.inv')), } nitpick_ignore = [('py:class', 'testtools.testcase.TestCase')] import inspect from os.path import relpath, dirname def linkcode_resolve(domain, info): """ Determine the URL corresponding to Python object """ if domain != 'py': return None modname = info['module'] fullname = info['fullname'] submod = sys.modules.get(modname) if submod is None: return None obj = submod for part in fullname.split('.'): try: obj = getattr(obj, part) except: return None try: fn = inspect.getsourcefile(obj) except: fn = None if not fn: return None try: source, lineno = inspect.findsource(obj) except: lineno = None if lineno: linespec = "#L%d" % (lineno + 1) else: linespec = "" fn = relpath(fn, start='..') return "https://github.com/mithrandi/txacme/blob/%s/%s%s" % ( txacme_version_info['full-revisionid'], fn, linespec) def run_apidoc(_): modules = ['../src/txacme'] for module in modules: cur_dir = os.path.abspath(os.path.dirname(__file__)) output_path = os.path.join(cur_dir, 'api') cmd_path = 'sphinx-apidoc' if hasattr(sys, 'real_prefix'): # Check to see if we are in a virtualenv # If we are, assemble the path manually cmd_path = os.path.abspath(os.path.join(sys.prefix, 'bin', 'sphinx-apidoc')) subprocess.check_call( [cmd_path, '-e', '-o', output_path, module, '--force'], env={'SPHINX_APIDOC_OPTIONS': 'members'}) def setup(app): app.connect('builder-inited', run_apidoc) txacme-0.9.3/docs/index.rst000066400000000000000000000006171364626325400156140ustar00rootroot00000000000000txacme: A Twisted implementation of the ACME protocol ===================================================== .. include:: ../README.rst :start-after: teaser-begin Contents ======== .. toctree:: :maxdepth: 2 using certs-dir api-stability changelog API documentation Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` txacme-0.9.3/docs/using.rst000066400000000000000000000120651364626325400156320ustar00rootroot00000000000000Using txacme ============ There are several possible ways to make use of txacme: * An issuing service for keeping certificates in a certificate store up to date; * A server endpoint, which may be used anywhere an endpoint is accepted, that combines the issuing service with TLS SNI for certificate mapping; * A server endpoint string parser, which can be used anywhere a server endpoint string es accepted, that produces a server endpoint. While the server endpoint string parser is a convenient high-level API the lower-level APIs (the issuing service and server endpoint) may be useful for better integration with existing systems. For example, if the requirements for storing certificates were more complex than a directory on a filesystem, one might implement a certificate store that communicated with a REST webservice or directly with a database and pass an instance of this to the server endpoint. Server endpoint string ---------------------- Note: as of 09-Jan-2018, the Let's Encrypt server has indefinitely `disabled`_ the ``tls-sni-01`` validation method needed by these endpoints (for new sites), due to a security problem. A future ``txacme`` release might reimplement these with a different validation method: see `Issue 129`_ for details. The simplest way to use txacme is the stream server endpoint string. Two endpoint parsers are provided, under the ``le:`` (Let's Encrypt) and ``lets:`` (Let's Encrypt Test in Staging) prefixes. The endpoint takes as parameters a directory to store certificates in, and the underlying endpoint to listen on. One might use the following command to start a Twisted web server on TCP port 443 and store certificates in the ``/srv/www/certs`` directory: .. code-block:: shell $ twistd -n web --port lets:/srv/www/certs:tcp:443 --path /srv/www/root .. note:: The certificate directory must already exist, and be writable by the user the application is running as. .. note:: The Let's Encrypt staging environment generates certificates signed by *Fake LE Intermediate X1*, but does not have the `stringent limits`_ that the production environment has, so using it for testing before switching to the production environment is highly recommended. .. _stringent limits: https://community.letsencrypt.org/t/rate-limits-for-lets-encrypt/6769 .. _disabled: https://community.letsencrypt.org/t/important-what-you-need-to-know-about-tls-sni-validation-issues/50811 .. _Issue 129: https://github.com/twisted/txacme/issues/129 The ACME client key will be stored in ``client.key`` in the certificate directory, if this file does not exist a new key will automatically be generated. Certificates (and chain certificates and keys) in PEM format will be stored in the certificate directory using filenames based on the servername that the client sends by SNI, e.g. ``some.domain.name.pem``. The contents of the directory are documented in more detail :doc:`here `. If there is no existing certificate available for a domain, an empty file should be created to have one issued on startup; the behaviour is as if the certificate had expired. Importantly, clients that do not perform SNI will not be able to connect to the endpoint. At startup, and every 24 hours, a check will be performed for expiring certificates; if a certificate will expire in less than 30 days' time, it will be reissued. If the reissue fails, it will be retried at the next check. If a certificate will expire in less than 15 days' time, and reissue fails, a message will be logged at *CRITICAL* level. .. note:: This endpoint uses the ``tls-sni-01`` challenge type to perform authorization; this requires that the endpoint is reachable on port 443 for those domains (possibly via port forwarding). Sharing certificates ~~~~~~~~~~~~~~~~~~~~ A certificate directory can be shared amongst multiple applications by using ``le:`` for the application running on port 443 to keep the certificates up to date, and ``txsni:`` for other applications to make use of certificates in the same directory. Server endpoint --------------- The endpoint can be instantiated directly as well; this allows extra customizations beyond what the string syntax provides for. Most of the parameters that can be passed correspond to the parameters of the `issuing service`_. .. autoclass:: txacme.endpoint.AutoTLSEndpoint :noindex: :members: Issuing service --------------- The `server endpoint`_ is a simple wrapper that combines the functionality of the `txsni`_ endpoint for handling SNI, and the issuing service which takes care of (re)issuing certificates using an ACME service. .. autoclass:: txacme.service.AcmeIssuingService :noindex: :members: The `~txacme.interfaces.ICertificateStore` and `~txacme.interfaces.IResponder` interfaces are the main extension points for using the issuing service directly. For example, a custom implementation of `~txacme.interfaces.ICertificateStore` might manage the certificate configuration of a cloud load balancer, implementing the ``dns-01`` challenge type by modifying DNS entries in the cloud DNS configuration. .. _txsni: https://github.com/glyph/txsni txacme-0.9.3/pyproject.toml000066400000000000000000000001311364626325400157260ustar00rootroot00000000000000[tool.towncrier] package = 'txacme' package_dir = 'src/' filename = 'docs/changelog.rst' txacme-0.9.3/requirements-doc.txt000066400000000000000000000001121364626325400170400ustar00rootroot00000000000000.[test,libcloud] sphinx sphinx_rtd_theme repoze.sphinx.autointerface>=0.8 txacme-0.9.3/setup.cfg000066400000000000000000000011621364626325400146400ustar00rootroot00000000000000[wheel] universal = 1 [isort] default_section=THIRDPARTY known_first_party=txacme multi_line_output=4 lines_after_imports=2 balanced_wrapping=True order_by_type=False [flake8] exclude=src/txacme/_version.py,src/txacme/interfaces.py ignore_names=setUp,_setUp,tearDown,startService,stopService # See the docstring in versioneer.py for instructions. Note that you must # re-run 'versioneer.py setup' after changing this section, and commit the # resulting files. [versioneer] VCS = git style = pep440 versionfile_source = src/txacme/_version.py versionfile_build = txacme/_version.py tag_prefix = parentdir_prefix = txacme- txacme-0.9.3/setup.py000066400000000000000000000043011364626325400145270ustar00rootroot00000000000000import os import codecs import versioneer from setuptools import setup, find_packages HERE = os.path.abspath(os.path.dirname(__file__)) def read(*parts): with codecs.open(os.path.join(HERE, *parts), 'rb', 'utf-8') as f: return f.read() setup( version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), name='txacme', description='ACME protocol implementation for Twisted', license='Expat', url='https://github.com/mithrandi/txacme', author='Tristan Seligmann', author_email='mithrandi@mithrandi.net', maintainer='Tristan Seligmann', maintainer_email='mithrandi@mithrandi.net', long_description=read('README.rst'), packages=find_packages(where='src') + ['twisted.plugins'], package_dir={'': 'src'}, zip_safe=True, classifiers=[ 'Development Status :: 3 - Alpha', 'Intended Audience :: Developers', 'Natural Language :: English', 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Programming Language :: Python :: 3.6', 'Programming Language :: Python :: 3.7', 'Programming Language :: Python :: Implementation :: CPython', 'Programming Language :: Python :: Implementation :: PyPy', 'Topic :: Software Development :: Libraries :: Python Modules', ], install_requires=[ 'acme>=0.21.0,<1.0.0', 'attrs>=17.4.0', 'eliot>=0.8.0', 'josepy', 'pem>=16.1.0', 'treq>=15.1.0', 'twisted[tls]>=16.2.0', 'txsni', 'pyopenssl>=17.1.0', ], extras_require={ 'libcloud': [ 'apache-libcloud', ], 'test': [ 'fixtures>=1.4.0', 'hypothesis>=4.0.0,<5.0.0', 'service_identity>=17.0.0', 'testrepository>=0.0.20', 'testscenarios', 'testtools>=2.1.0', ], }, ) txacme-0.9.3/src/000077500000000000000000000000001364626325400136065ustar00rootroot00000000000000txacme-0.9.3/src/integration/000077500000000000000000000000001364626325400161315ustar00rootroot00000000000000txacme-0.9.3/src/integration/__init__.py000066400000000000000000000000001364626325400202300ustar00rootroot00000000000000txacme-0.9.3/src/integration/test_client.py000066400000000000000000000230131364626325400210170ustar00rootroot00000000000000""" Integration tests for :mod:`acme.client`. """ from __future__ import print_function from functools import partial from os import getenv from josepy.jwk import JWKRSA from acme.messages import NewRegistration, STATUS_PENDING from cryptography.hazmat.primitives import serialization from eliot import start_action from eliot.twisted import DeferredContext from twisted.internet import reactor from twisted.internet.defer import succeed from twisted.internet.endpoints import serverFromString from twisted.python.compat import _PY3 from twisted.python.filepath import FilePath from twisted.trial.unittest import TestCase from twisted.web.resource import Resource from twisted.web.server import Site from txsni.snimap import SNIMap from txsni.tlsendpoint import TLSEndpoint from txacme.challenges import TLSSNI01Responder from txacme.client import ( answer_challenge, Client, fqdn_identifier, poll_until_valid) from txacme.messages import CertificateRequest from txacme.testing import FakeClient, NullResponder from txacme.urls import LETSENCRYPT_STAGING_DIRECTORY from txacme.util import csr_for_names, generate_private_key, tap try: from txacme.challenges import LibcloudDNSResponder except ImportError: pass class ClientTestsMixin(object): """ Integration tests for the ACME client. """ def _test_create_client(self): with start_action(action_type=u'integration:create_client').context(): self.key = JWKRSA(key=generate_private_key('rsa')) return ( DeferredContext(self._create_client(self.key)) .addActionFinish()) def _test_register(self, new_reg=None): with start_action(action_type=u'integration:register').context(): return ( DeferredContext(self.client.register(new_reg)) .addActionFinish()) def _test_agree_to_tos(self, reg): with start_action(action_type=u'integration:agree_to_tos').context(): return ( DeferredContext(self.client.agree_to_tos(reg)) .addActionFinish()) def _test_request_challenges(self, host): action = start_action( action_type=u'integration:request_challenges', host=host) with action.context(): return ( DeferredContext( self.client.request_challenges(fqdn_identifier(host))) .addActionFinish()) def _test_poll_pending(self, auth): action = start_action(action_type=u'integration:poll_pending') with action.context(): return ( DeferredContext(self.client.poll(auth)) .addCallback( lambda auth: self.assertEqual(auth[0].body.status, STATUS_PENDING)) .addActionFinish()) def _test_answer_challenge(self, responder): action = start_action(action_type=u'integration:answer_challenge') with action.context(): self.responder = responder return ( DeferredContext( answer_challenge( self.authzr, self.client, [responder])) .addActionFinish()) def _test_poll(self, auth): action = start_action(action_type=u'integration:poll') with action.context(): return ( DeferredContext(poll_until_valid(auth, reactor, self.client)) .addActionFinish()) def _test_issue(self, name): def got_cert(certr): key_bytes = self.issued_key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=serialization.NoEncryption()) FilePath('issued.crt').setContent(certr.body) FilePath('issued.key').setContent(key_bytes) return certr action = start_action(action_type=u'integration:issue') with action.context(): self.issued_key = generate_private_key('rsa') csr = csr_for_names([name], self.issued_key) return ( DeferredContext( self.client.request_issuance(CertificateRequest(csr=csr))) .addCallback(got_cert) .addActionFinish()) def _test_chain(self, certr): action = start_action(action_type=u'integration:chain') with action.context(): return ( DeferredContext(self.client.fetch_chain(certr)) .addActionFinish()) def _test_registration(self): return ( DeferredContext(self._test_create_client()) .addCallback(partial(setattr, self, 'client')) .addCallback(lambda _: self._test_register()) .addCallback(tap( lambda reg1: self.assertEqual(reg1.body.contact, ()))) .addCallback(tap( lambda reg1: self._test_register( NewRegistration.from_data(email=u'example@example.com')) .addCallback(tap( lambda reg2: self.assertEqual(reg1.uri, reg2.uri))) .addCallback(lambda reg2: self.assertEqual( reg2.body.contact, (u'mailto:example@example.com',))))) .addCallback(self._test_agree_to_tos) .addCallback( lambda _: self._test_request_challenges(self.HOST)) .addCallback(partial(setattr, self, 'authzr')) .addCallback(lambda _: self._create_responder()) .addCallback(tap(lambda _: self._test_poll_pending(self.authzr))) .addCallback(self._test_answer_challenge) .addCallback(tap(lambda _: self._test_poll(self.authzr))) .addCallback(lambda stop_responding: stop_responding()) .addCallback(lambda _: self._test_issue(self.HOST)) .addCallback(self._test_chain) .addActionFinish()) def test_issuing(self): action = start_action(action_type=u'integration') with action.context(): return self._test_registration() def _getenv(name, default=None): """ Sigh. """ if not _PY3: name = name.encode('utf-8') value = getenv(name) if value is None: return default if not _PY3: value = value.decode('utf-8') return value class LetsEncryptStagingTLSSNI01Tests(ClientTestsMixin, TestCase): """ Tests using the real ACME client against the Let's Encrypt staging environment, and the tls-sni-01 challenge. You must set $ACME_HOST to a hostname that will, when connected to on port 443, reach a listening socket opened by the tests on $ACME_ENDPOINT. """ HOST = _getenv(u'ACME_HOST') ENDPOINT = _getenv(u'ACME_ENDPOINT') if None in [HOST, ENDPOINT]: skip = 'Must provide $ACME_HOST and $ACME_ENDPOINT' elif not _PY3: ENDPOINT = ENDPOINT.encode('utf-8') def _create_client(self, key): return ( Client.from_url(reactor, LETSENCRYPT_STAGING_DIRECTORY, key=key) .addCallback(tap( lambda client: self.addCleanup( client._client._treq._agent._pool.closeCachedConnections))) ) def _create_responder(self): action = start_action(action_type=u'integration:create_responder') with action.context(): responder = TLSSNI01Responder() host_map = responder.wrap_host_map({}) site = Site(Resource()) endpoint = TLSEndpoint( endpoint=serverFromString(reactor, self.ENDPOINT), contextFactory=SNIMap(host_map)) return ( DeferredContext(endpoint.listen(site)) .addCallback(lambda port: self.addCleanup(port.stopListening)) .addCallback(lambda _: responder) .addActionFinish()) class LetsEncryptStagingLibcloudTests(ClientTestsMixin, TestCase): """ Tests using the real ACME client against the Let's Encrypt staging environment, and the dns-01 challenge. You must set $ACME_HOST to a hostname that will be used for the challenge, and $LIBCLOUD_PROVIDER, $LIBCLOUD_USERNAME, $LIBCLOUD_PASSWORD, and $LIBCLOUD_ZONE to the appropriate values for the DNS provider to complete the challenge with. """ HOST = _getenv(u'ACME_HOST') PROVIDER = _getenv(u'LIBCLOUD_PROVIDER') USERNAME = _getenv(u'LIBCLOUD_USERNAME') PASSWORD = _getenv(u'LIBCLOUD_PASSWORD') ZONE = _getenv(u'LIBCLOUD_ZONE') if None in (HOST, PROVIDER, USERNAME, PASSWORD): skip = 'Must provide $ACME_HOST and $LIBCLOUD_*' def _create_client(self, key): return ( Client.from_url(reactor, LETSENCRYPT_STAGING_DIRECTORY, key=key) .addCallback(tap( lambda client: self.addCleanup( client._client._treq._agent._pool.closeCachedConnections))) ) def _create_responder(self): with start_action(action_type=u'integration:create_responder'): return LibcloudDNSResponder.create( reactor, self.PROVIDER, self.USERNAME, self.PASSWORD, self.ZONE) class FakeClientTests(ClientTestsMixin, TestCase): """ Tests against our verified fake. """ HOST = u'example.com' def _create_client(self, key): return succeed(FakeClient(key, reactor)) def _create_responder(self): return succeed(NullResponder(u'tls-sni-01')) __all__ = ['LetsEncryptStagingTLSSNI01Tests', 'FakeClientTests'] txacme-0.9.3/src/twisted/000077500000000000000000000000001364626325400152715ustar00rootroot00000000000000txacme-0.9.3/src/twisted/plugins/000077500000000000000000000000001364626325400167525ustar00rootroot00000000000000txacme-0.9.3/src/twisted/plugins/txacme_endpoint.py000066400000000000000000000003661364626325400225120ustar00rootroot00000000000000from txacme._endpoint_parser import _AcmeParser from txacme.urls import LETSENCRYPT_DIRECTORY, LETSENCRYPT_STAGING_DIRECTORY le_parser = _AcmeParser('le', LETSENCRYPT_DIRECTORY) lets_parser = _AcmeParser('lets', LETSENCRYPT_STAGING_DIRECTORY) txacme-0.9.3/src/txacme/000077500000000000000000000000001364626325400150675ustar00rootroot00000000000000txacme-0.9.3/src/txacme/__init__.py000066400000000000000000000001351364626325400171770ustar00rootroot00000000000000 from ._version import get_versions __version__ = get_versions()['version'] del get_versions txacme-0.9.3/src/txacme/_endpoint_parser.py000066400000000000000000000015311364626325400207740ustar00rootroot00000000000000""" Standalone module for the endpoint parser to avoid eagerly importing a bunch of things which will install a reactor. """ import attr from twisted.internet.interfaces import IStreamServerEndpointStringParser from twisted.plugin import IPlugin from zope.interface import implementer @implementer(IPlugin, IStreamServerEndpointStringParser) @attr.s class _AcmeParser(object): """ txacme endpoint parser. Connects an `AutoTLSEndpoint` to the an ACME certificate authority and a directory certificate store. """ prefix = attr.ib() directory = attr.ib() def parseStreamServer(self, reactor, *args, **kwargs): # noqa """ .. seealso:: `txacme.endpoint._parse` """ from txacme.endpoint import _parse return _parse(reactor, self.directory, *args, **kwargs) __all___ = ['_AcmeParser'] txacme-0.9.3/src/txacme/_version.py000066400000000000000000000441021364626325400172660ustar00rootroot00000000000000 # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build # directories (produced by setup.py build) will contain a much shorter file # that just contains the computed version number. # This file is released into the public domain. Generated by # versioneer-0.18 (https://github.com/warner/python-versioneer) """Git implementation of _version.py.""" import errno import os import re import subprocess import sys def get_keywords(): """Get the keywords needed to look up the version information.""" # these strings will be replaced by git during git-archive. # setup.py/versioneer.py will grep for the variable names, so they must # each be defined on a line of their own. _version.py will just call # get_keywords(). git_refnames = " (tag: 0.9.3)" git_full = "6035f52da50dabcfa1abe562c513a2044cfcd5c5" git_date = "2020-04-17 01:19:56 -0700" keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} return keywords class VersioneerConfig: """Container for Versioneer configuration parameters.""" def get_config(): """Create, populate and return the VersioneerConfig() object.""" # these strings are filled in when 'setup.py versioneer' creates # _version.py cfg = VersioneerConfig() cfg.VCS = "git" cfg.style = "pep440" cfg.tag_prefix = "" cfg.parentdir_prefix = "txacme-" cfg.versionfile_source = "src/txacme/_version.py" cfg.verbose = False return cfg class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): # decorator """Decorator to mark a method as the handler for a particular VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): """Call the given command(s).""" assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen([c] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %s" % dispcmd) print(e) return None, None else: if verbose: print("unable to find command, tried %s" % (commands,)) return None, None stdout = p.communicate()[0].strip() if sys.version_info[0] >= 3: stdout = stdout.decode() if p.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) print("stdout was %s" % stdout) return None, p.returncode return stdout, p.returncode def versions_from_parentdir(parentdir_prefix, root, verbose): """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both the project name and a version string. We will also support searching up two directory levels for an appropriately named parent directory """ rootdirs = [] for i in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None, "date": None} else: rootdirs.append(root) root = os.path.dirname(root) # up a level if verbose: print("Tried directories %s but none started with prefix %s" % (str(rootdirs), parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) if line.strip().startswith("git_date ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["date"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" if not keywords: raise NotThisMethod("no keywords at all, weird") date = keywords.get("date") if date is not None: # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because # it's been around since git-1.5.3, and it's too difficult to # discover which version we're using, or to work around using an # older one. date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = set([r.strip() for r in refnames.strip("()").split(",")]) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = set([r for r in refs if re.search(r'\d', r)]) if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: print("likely tags: %s" % ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] if verbose: print("picking %s" % r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None, "date": date} # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags", "date": None} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out source tree. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) if rc != 0: if verbose: print("Directory %s not under git control" % root) raise NotThisMethod("'git rev-parse --git-dir' returned error") # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%s*" % tag_prefix], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%s'" % describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces def plus_or_dot(pieces): """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty Exceptions: 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): """TAG[.post.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post.devDISTANCE """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%d" % pieces["distance"] else: # exception #1 rendered = "0.post.dev%d" % pieces["distance"] return rendered def render_pep440_post(pieces): """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards (a dirty tree will appear "older" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%s" % pieces["short"] else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%s" % pieces["short"] return rendered def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Eexceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"], "date": None} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%s'" % style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None, "date": pieces.get("date")} def get_versions(): """Get version information or return default if unable to do so.""" # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which # case we can only use expanded keywords. cfg = get_config() verbose = cfg.verbose try: return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) except NotThisMethod: pass try: root = os.path.realpath(__file__) # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. for i in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to find root of source tree", "date": None} try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) return render(pieces, cfg.style) except NotThisMethod: pass try: if cfg.parentdir_prefix: return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) except NotThisMethod: pass return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version", "date": None} txacme-0.9.3/src/txacme/challenges/000077500000000000000000000000001364626325400171745ustar00rootroot00000000000000txacme-0.9.3/src/txacme/challenges/__init__.py000066400000000000000000000004141364626325400213040ustar00rootroot00000000000000from ._http import HTTP01Responder from ._tls import TLSSNI01Responder try: from ._libcloud import LibcloudDNSResponder except ImportError: # libcloud may not be installed pass __all__ = ['HTTP01Responder', 'LibcloudDNSResponder', 'TLSSNI01Responder'] txacme-0.9.3/src/txacme/challenges/_http.py000066400000000000000000000020331364626325400206620ustar00rootroot00000000000000""" ``http-01`` challenge implementation. """ from twisted.web.resource import Resource from twisted.web.static import Data from zope.interface import implementer from txacme.interfaces import IResponder @implementer(IResponder) class HTTP01Responder(object): """ An ``http-01`` challenge responder for txsni. """ challenge_type = u'http-01' def __init__(self): self.resource = Resource() def start_responding(self, server_name, challenge, response): """ Add the child resource. """ self.resource.putChild( challenge.encode('token').encode('utf-8'), Data(response.key_authorization.encode('utf-8'), 'text/plain')) def stop_responding(self, server_name, challenge, response): """ Remove the child resource. """ encoded_token = challenge.encode('token').encode('utf-8') if self.resource.getStaticEntity(encoded_token) is not None: self.resource.delEntity(encoded_token) __all__ = ['HTTP01Responder'] txacme-0.9.3/src/txacme/challenges/_libcloud.py000066400000000000000000000135641364626325400215130ustar00rootroot00000000000000import hashlib import time from threading import Thread import attr from josepy.b64 import b64encode from libcloud.dns.providers import get_driver from twisted._threads import pool from twisted.internet.defer import Deferred from twisted.python.failure import Failure from zope.interface import implementer from txacme.errors import NotInZone, ZoneNotFound from txacme.interfaces import IResponder from txacme.util import const def _daemon_thread(*a, **kw): """ Create a `threading.Thread`, but always set ``daemon``. """ thread = Thread(*a, **kw) thread.daemon = True return thread def _defer_to_worker(deliver, worker, work, *args, **kwargs): """ Run a task in a worker, delivering the result as a ``Deferred`` in the reactor thread. """ deferred = Deferred() def wrapped_work(): try: result = work(*args, **kwargs) except BaseException: f = Failure() deliver(lambda: deferred.errback(f)) else: deliver(lambda: deferred.callback(result)) worker.do(wrapped_work) return deferred def _split_zone(server_name, zone_name): """ Split the zone portion off from a DNS label. :param str server_name: The full DNS label. :param str zone_name: The zone name suffix. """ server_name = server_name.rstrip(u'.') zone_name = zone_name.rstrip(u'.') if not (server_name == zone_name or server_name.endswith(u'.' + zone_name)): raise NotInZone(server_name=server_name, zone_name=zone_name) return server_name[:-len(zone_name)].rstrip(u'.') def _get_existing(driver, zone_name, server_name, validation): """ Get existing validation records. """ if zone_name is None: zones = sorted( (z for z in driver.list_zones() if server_name.rstrip(u'.') .endswith(u'.' + z.domain.rstrip(u'.'))), key=lambda z: len(z.domain), reverse=True) if len(zones) == 0: raise NotInZone(server_name=server_name, zone_name=None) else: zones = [ z for z in driver.list_zones() if z.domain == zone_name] if len(zones) == 0: raise ZoneNotFound(zone_name=zone_name) zone = zones[0] subdomain = _split_zone(server_name, zone.domain) existing = [ record for record in zone.list_records() if record.name == subdomain and record.type == 'TXT' and record.data == validation] return zone, existing, subdomain def _validation(response): """ Get the validation value for a challenge response. """ h = hashlib.sha256(response.key_authorization.encode("utf-8")) return b64encode(h.digest()).decode() @attr.s(hash=False) @implementer(IResponder) class LibcloudDNSResponder(object): """ A ``dns-01`` challenge responder using libcloud. .. warning:: Some libcloud backends are broken with regard to TXT records at the time of writing; the Route 53 backend, for example. This makes them unusable with this responder. .. note:: This implementation relies on invoking libcloud in a thread, so may not be entirely production quality. """ challenge_type = u'dns-01' _reactor = attr.ib() _thread_pool = attr.ib() _driver = attr.ib() zone_name = attr.ib() settle_delay = attr.ib() @classmethod def create(cls, reactor, driver_name, username, password, zone_name=None, settle_delay=60.0): """ Create a responder. :param reactor: The Twisted reactor to use for threading support. :param str driver_name: The name of the libcloud DNS driver to use. :param str username: The username to authenticate with (the meaning of this is driver-specific). :param str password: The username to authenticate with (the meaning of this is driver-specific). :param str zone_name: The zone name to respond in, or ``None`` to automatically detect zones. Usually auto-detection should be fine, unless restricting responses to a single specific zone is desired. :param float settle_delay: The time, in seconds, to allow for the DNS provider to propagate record changes. """ return cls( reactor=reactor, thread_pool=pool(const(1), threadFactory=_daemon_thread), driver=get_driver(driver_name)(username, password), zone_name=zone_name, settle_delay=settle_delay) def _defer(self, f): """ Run a function in our private thread pool. """ return _defer_to_worker( self._reactor.callFromThread, self._thread_pool, f) def start_responding(self, server_name, challenge, response): """ Install a TXT challenge response record. """ validation = _validation(response) full_name = challenge.validation_domain_name(server_name) _driver = self._driver def _go(): zone, existing, subdomain = _get_existing( _driver, self.zone_name, full_name, validation) if len(existing) == 0: zone.create_record(name=subdomain, type='TXT', data=validation) time.sleep(self.settle_delay) return self._defer(_go) def stop_responding(self, server_name, challenge, response): """ Remove a TXT challenge response record. """ validation = _validation(response) full_name = challenge.validation_domain_name(server_name) _driver = self._driver def _go(): zone, existing, subdomain = _get_existing( _driver, self.zone_name, full_name, validation) for record in existing: record.delete() return self._defer(_go) __all__ = ['LibcloudDNSResponder'] txacme-0.9.3/src/txacme/challenges/_tls.py000066400000000000000000000046441364626325400205170ustar00rootroot00000000000000""" ``tls-sni-01`` challenge implementation. """ from collections import Mapping import attr from OpenSSL import crypto from twisted.internet.ssl import CertificateOptions from zope.interface import implementer from txacme.interfaces import IResponder from txacme.util import generate_tls_sni_01_cert @attr.s(hash=False) class _MergingMappingProxy(Mapping): """ Merges two mappings together. The proxy is immutable, even if the underlying mappings are mutable. """ underlay = attr.ib() overlay = attr.ib() def __getitem__(self, key): try: return self.overlay[key] except KeyError: return self.underlay[key] def __iter__(self): return iter(set(self.underlay.keys()) | set(self.overlay.keys())) def __len__(self): return sum(1 for _ in self) def __contains__(self, key): return key in self.underlay or key in self.overlay @implementer(IResponder) class TLSSNI01Responder(object): """ A ``tls-sni-01`` challenge responder for txsni. """ challenge_type = u'tls-sni-01' _generate_private_key = None def __init__(self): self._challenge_options = {} def wrap_host_map(self, host_map): """ Wrap a txsni host mapping. The wrapper should be passed to ``txsni.snimap.SNIMap``; any active challenge server names will override entries in the wrapped map, but this scenario is unlikely to occur due to the invalid nature of these names. """ return _MergingMappingProxy( underlay=host_map, overlay=self._challenge_options) def start_responding(self, server_name, challenge, response): """ Put a context into the mapping. """ server_name = response.z_domain.decode('ascii') cert, pkey = generate_tls_sni_01_cert( server_name, _generate_private_key=self._generate_private_key) server_name = server_name.encode('utf-8') self._challenge_options[server_name] = CertificateOptions( certificate=crypto.X509.from_cryptography(cert), privateKey=crypto.PKey.from_cryptography_key(pkey)) def stop_responding(self, server_name, challenge, response): """ Remove a context from the mapping. """ server_name = response.z_domain.decode('ascii') self._challenge_options.pop(server_name, None) __all__ = ['TLSSNI01Responder'] txacme-0.9.3/src/txacme/client.py000066400000000000000000001016711364626325400167250ustar00rootroot00000000000000""" ACME client API (like :mod:`acme.client`) implementation for Twisted. """ import re import time from acme import errors, messages from acme.jws import JWS, Header from acme.messages import STATUS_PENDING, STATUS_PROCESSING, STATUS_VALID from josepy.jwa import RS256 from josepy.errors import DeserializationError from eliot.twisted import DeferredContext from treq import json_content from treq.client import HTTPClient from twisted.internet.defer import maybeDeferred, succeed from twisted.internet.task import deferLater from twisted.web import http from twisted.web.client import Agent, HTTPConnectionPool from twisted.web.http_headers import Headers from txacme import __version__ from txacme.logging import ( LOG_ACME_ANSWER_CHALLENGE, LOG_ACME_CONSUME_DIRECTORY, LOG_ACME_CREATE_AUTHORIZATION, LOG_ACME_FETCH_CHAIN, LOG_ACME_POLL_AUTHORIZATION, LOG_ACME_REGISTER, LOG_ACME_REQUEST_CERTIFICATE, LOG_ACME_UPDATE_REGISTRATION, LOG_HTTP_PARSE_LINKS, LOG_JWS_ADD_NONCE, LOG_JWS_CHECK_RESPONSE, LOG_JWS_GET, LOG_JWS_GET_NONCE, LOG_JWS_HEAD, LOG_JWS_POST, LOG_JWS_REQUEST, LOG_JWS_SIGN) from txacme.util import check_directory_url_type, tap _DEFAULT_TIMEOUT = 40 # Borrowed from requests, with modifications. def _parse_header_links(response): """ Parse the links from a Link: header field. .. todo:: Links with the same relation collide at the moment. :param bytes value: The header value. :rtype: `dict` :return: A dictionary of parsed links, keyed by ``rel`` or ``url``. """ values = response.headers.getRawHeaders(b'link', [b'']) value = b','.join(values).decode('ascii') with LOG_HTTP_PARSE_LINKS(raw_link=value) as action: links = {} replace_chars = u' \'"' for val in re.split(u', *<', value): try: url, params = val.split(u';', 1) except ValueError: url, params = val, u'' link = {} link[u'url'] = url.strip(u'<> \'"') for param in params.split(u';'): try: key, value = param.split(u'=') except ValueError: break link[key.strip(replace_chars)] = value.strip(replace_chars) links[link.get(u'rel') or link.get(u'url')] = link action.add_success_fields(parsed_links=links) return links def _default_client(jws_client, reactor, key, alg): """ Make a client if we didn't get one. """ if jws_client is None: pool = HTTPConnectionPool(reactor) agent = Agent(reactor, pool=pool) jws_client = JWSClient(HTTPClient(agent=agent), key, alg) return jws_client def fqdn_identifier(fqdn): """ Construct an identifier from an FQDN. Trivial implementation, just saves on typing. :param str fqdn: The domain name. :return: The identifier. :rtype: `~acme.messages.Identifier` """ return messages.Identifier( typ=messages.IDENTIFIER_FQDN, value=fqdn) class Client(object): """ ACME client interface. """ def __init__(self, directory, reactor, key, jws_client): self._client = jws_client self._clock = reactor self.directory = directory self.key = key @classmethod def from_url( cls, reactor, url, key, alg=RS256, jws_client=None, timeout=_DEFAULT_TIMEOUT, ): """ Construct a client from an ACME directory at a given URL. :param url: The ``twisted.python.url.URL`` to fetch the directory from. See `txacme.urls` for constants for various well-known public directories. :param reactor: The Twisted reactor to use. :param ~josepy.jwk.JWK key: The client key to use. :param alg: The signing algorithm to use. Needs to be compatible with the type of key used. :param JWSClient jws_client: The underlying client to use, or ``None`` to construct one. :param int timeout: Number of seconds to wait for an HTTP response during ACME server interaction. :return: The constructed client. :rtype: Deferred[`Client`] """ action = LOG_ACME_CONSUME_DIRECTORY( url=url, key_type=key.typ, alg=alg.name) with action.context(): check_directory_url_type(url) jws_client = _default_client(jws_client, reactor, key, alg) jws_client.timeout = timeout return ( DeferredContext(jws_client.get(url.asText())) .addCallback(json_content) .addCallback(messages.Directory.from_json) .addCallback( tap(lambda d: action.add_success_fields(directory=d))) .addCallback(cls, reactor, key, jws_client) .addActionFinish()) def register(self, new_reg=None): """ Create a new registration with the ACME server. :param ~acme.messages.NewRegistration new_reg: The registration message to use, or ``None`` to construct one. :return: The registration resource. :rtype: Deferred[`~acme.messages.RegistrationResource`] """ if new_reg is None: new_reg = messages.NewRegistration() action = LOG_ACME_REGISTER(registration=new_reg) with action.context(): return ( DeferredContext( self.update_registration( new_reg, uri=self.directory[new_reg])) .addErrback(self._maybe_registered, new_reg) .addCallback( tap(lambda r: action.add_success_fields(registration=r))) .addActionFinish()) @classmethod def _maybe_location(cls, response, uri=None): """ Get the Location: if there is one. """ location = response.headers.getRawHeaders(b'location', [None])[0] if location is not None: return location.decode('ascii') return uri def _maybe_registered(self, failure, new_reg): """ If the registration already exists, we should just load it. """ failure.trap(ServerError) response = failure.value.response if response.code == http.CONFLICT: reg = new_reg.update( resource=messages.UpdateRegistration.resource_type) uri = self._maybe_location(response) return self.update_registration(reg, uri=uri) return failure def agree_to_tos(self, regr): """ Accept the terms-of-service for a registration. :param ~acme.messages.RegistrationResource regr: The registration to update. :return: The updated registration resource. :rtype: Deferred[`~acme.messages.RegistrationResource`] """ return self.update_registration( regr.update( body=regr.body.update( agreement=regr.terms_of_service))) def update_registration(self, regr, uri=None): """ Submit a registration to the server to update it. :param ~acme.messages.RegistrationResource regr: The registration to update. Can be a :class:`~acme.messages.NewRegistration` instead, in order to create a new registration. :param str uri: The url to submit to. Must be specified if a :class:`~acme.messages.NewRegistration` is provided. :return: The updated registration resource. :rtype: Deferred[`~acme.messages.RegistrationResource`] """ if uri is None: uri = regr.uri if isinstance(regr, messages.RegistrationResource): message = messages.UpdateRegistration(**dict(regr.body)) else: message = regr action = LOG_ACME_UPDATE_REGISTRATION(uri=uri, registration=message) with action.context(): return ( DeferredContext(self._client.post(uri, message)) .addCallback(self._parse_regr_response, uri=uri) .addCallback(self._check_regr, regr) .addCallback( tap(lambda r: action.add_success_fields(registration=r))) .addActionFinish()) def _parse_regr_response(self, response, uri=None, new_authzr_uri=None, terms_of_service=None): """ Parse a registration response from the server. """ links = _parse_header_links(response) if u'terms-of-service' in links: terms_of_service = links[u'terms-of-service'][u'url'] if u'next' in links: new_authzr_uri = links[u'next'][u'url'] if new_authzr_uri is None: raise errors.ClientError('"next" link missing') return ( response.json() .addCallback( lambda body: messages.RegistrationResource( body=messages.Registration.from_json(body), uri=self._maybe_location(response, uri=uri), new_authzr_uri=new_authzr_uri, terms_of_service=terms_of_service)) ) def _check_regr(self, regr, new_reg): """ Check that a registration response contains the registration we were expecting. """ body = getattr(new_reg, 'body', new_reg) for k, v in body.items(): if k == 'resource' or not v: continue if regr.body[k] != v: raise errors.UnexpectedUpdate(regr) if regr.body.key != self.key.public_key(): raise errors.UnexpectedUpdate(regr) return regr def request_challenges(self, identifier): """ Create a new authorization. :param ~acme.messages.Identifier identifier: The identifier to authorize. :return: The new authorization resource. :rtype: Deferred[`~acme.messages.AuthorizationResource`] """ action = LOG_ACME_CREATE_AUTHORIZATION(identifier=identifier) with action.context(): message = messages.NewAuthorization(identifier=identifier) return ( DeferredContext( self._client.post(self.directory[message], message)) .addCallback(self._expect_response, http.CREATED) .addCallback(self._parse_authorization) .addCallback(self._check_authorization, identifier) .addCallback( tap(lambda a: action.add_success_fields(authorization=a))) .addActionFinish()) @classmethod def _expect_response(cls, response, code): """ Ensure we got the expected response code. """ if response.code != code: raise errors.ClientError( 'Expected {!r} response but got {!r}'.format( code, response.code)) return response @classmethod def _parse_authorization(cls, response, uri=None): """ Parse an authorization resource. """ links = _parse_header_links(response) try: new_cert_uri = links[u'next'][u'url'] except KeyError: raise errors.ClientError('"next" link missing') return ( response.json() .addCallback( lambda body: messages.AuthorizationResource( body=messages.Authorization.from_json(body), uri=cls._maybe_location(response, uri=uri), new_cert_uri=new_cert_uri)) ) @classmethod def _check_authorization(cls, authzr, identifier): """ Check that the authorization we got is the one we expected. """ if authzr.body.identifier != identifier: raise errors.UnexpectedUpdate(authzr) return authzr def answer_challenge(self, challenge_body, response): """ Respond to an authorization challenge. :param ~acme.messages.ChallengeBody challenge_body: The challenge being responded to. :param ~acme.challenges.ChallengeResponse response: The response to the challenge. :return: The updated challenge resource. :rtype: Deferred[`~acme.messages.ChallengeResource`] """ action = LOG_ACME_ANSWER_CHALLENGE( challenge_body=challenge_body, response=response) with action.context(): return ( DeferredContext( self._client.post(challenge_body.uri, response)) .addCallback(self._parse_challenge) .addCallback(self._check_challenge, challenge_body) .addCallback( tap(lambda c: action.add_success_fields(challenge_resource=c))) .addActionFinish()) @classmethod def _parse_challenge(cls, response): """ Parse a challenge resource. """ links = _parse_header_links(response) try: authzr_uri = links['up']['url'] except KeyError: raise errors.ClientError('"up" link missing') return ( response.json() .addCallback( lambda body: messages.ChallengeResource( authzr_uri=authzr_uri, body=messages.ChallengeBody.from_json(body))) ) @classmethod def _check_challenge(cls, challenge, challenge_body): """ Check that the challenge resource we got is the one we expected. """ if challenge.uri != challenge_body.uri: raise errors.UnexpectedUpdate(challenge.uri) return challenge def poll(self, authzr): """ Update an authorization from the server (usually to check its status). """ action = LOG_ACME_POLL_AUTHORIZATION(authorization=authzr) with action.context(): return ( DeferredContext(self._client.get(authzr.uri)) # Spec says we should get 202 while pending, Boulder actually # sends us 200 always, so just don't check. # .addCallback(self._expect_response, http.ACCEPTED) .addCallback( lambda res: self._parse_authorization(res, uri=authzr.uri) .addCallback( self._check_authorization, authzr.body.identifier) .addCallback( lambda authzr: (authzr, self.retry_after(res, _now=self._clock.seconds))) ) .addCallback(tap( lambda a_r: action.add_success_fields( authorization=a_r[0], retry_after=a_r[1]))) .addActionFinish()) @classmethod def retry_after(cls, response, default=5, _now=time.time): """ Parse the Retry-After value from a response. """ val = response.headers.getRawHeaders(b'retry-after', [default])[0] try: return int(val) except ValueError: return http.stringToDatetime(val) - _now() def request_issuance(self, csr): """ Request a certificate. Authorizations should have already been completed for all of the names requested in the CSR. Note that unlike `acme.client.Client.request_issuance`, the certificate resource will have the body data as raw bytes. .. seealso:: `txacme.util.csr_for_names` .. todo:: Delayed issuance is not currently supported, the server must issue the requested certificate immediately. :param csr: A certificate request message: normally `txacme.messages.CertificateRequest` or `acme.messages.CertificateRequest`. :rtype: Deferred[`acme.messages.CertificateResource`] :return: The issued certificate. """ action = LOG_ACME_REQUEST_CERTIFICATE() with action.context(): return ( DeferredContext( self._client.post( self.directory[csr], csr, content_type=DER_CONTENT_TYPE, headers=Headers({b'Accept': [DER_CONTENT_TYPE]}))) .addCallback(self._expect_response, http.CREATED) .addCallback(self._parse_certificate) .addActionFinish()) @classmethod def _parse_certificate(cls, response): """ Parse a response containing a certificate resource. """ links = _parse_header_links(response) try: cert_chain_uri = links[u'up'][u'url'] except KeyError: cert_chain_uri = None return ( response.content() .addCallback( lambda body: messages.CertificateResource( uri=cls._maybe_location(response), cert_chain_uri=cert_chain_uri, body=body)) ) def fetch_chain(self, certr, max_length=10): """ Fetch the intermediary chain for a certificate. :param acme.messages.CertificateResource certr: The certificate to fetch the chain for. :param int max_length: The maximum length of the chain that will be fetched. :rtype: Deferred[List[`acme.messages.CertificateResource`]] :return: The issuer certificate chain, ordered with the trust anchor last. """ action = LOG_ACME_FETCH_CHAIN() with action.context(): if certr.cert_chain_uri is None: return succeed([]) elif max_length < 1: raise errors.ClientError('chain too long') return ( DeferredContext( self._client.get( certr.cert_chain_uri, content_type=DER_CONTENT_TYPE, headers=Headers({b'Accept': [DER_CONTENT_TYPE]}))) .addCallback(self._parse_certificate) .addCallback( lambda issuer: self.fetch_chain(issuer, max_length=max_length - 1) .addCallback(lambda chain: [issuer] + chain)) .addActionFinish()) def _find_supported_challenge(authzr, responders): """ Find a challenge combination that consists of a single challenge that the responder can satisfy. :param ~acme.messages.AuthorizationResource auth: The authorization to examine. :type responder: List[`~txacme.interfaces.IResponder`] :param responder: The possible responders to use. :raises NoSupportedChallenges: When a suitable challenge combination is not found. :rtype: Tuple[`~txacme.interfaces.IResponder`, `~acme.messages.ChallengeBody`] :return: The responder and challenge that were found. """ matches = [ (responder, challbs[0]) for challbs in authzr.body.resolved_combinations for responder in responders if [challb.typ for challb in challbs] == [responder.challenge_type]] if len(matches) == 0: raise NoSupportedChallenges(authzr) else: return matches[0] def answer_challenge(authzr, client, responders): """ Complete an authorization using a responder. :param ~acme.messages.AuthorizationResource auth: The authorization to complete. :param .Client client: The ACME client. :type responders: List[`~txacme.interfaces.IResponder`] :param responders: A list of responders that can be used to complete the challenge with. :return: A deferred firing when the authorization is verified. """ responder, challb = _find_supported_challenge(authzr, responders) response = challb.response(client.key) def _stop_responding(): return maybeDeferred( responder.stop_responding, authzr.body.identifier.value, challb.chall, response) return ( maybeDeferred( responder.start_responding, authzr.body.identifier.value, challb.chall, response) .addCallback(lambda _: client.answer_challenge(challb, response)) .addCallback(lambda _: _stop_responding) ) def poll_until_valid(authzr, clock, client, timeout=300.0): """ Poll an authorization until it is in a state other than pending or processing. :param ~acme.messages.AuthorizationResource auth: The authorization to complete. :param clock: The ``IReactorTime`` implementation to use; usually the reactor, when not testing. :param .Client client: The ACME client. :param float timeout: Maximum time to poll in seconds, before giving up. :raises txacme.client.AuthorizationFailed: if the authorization is no longer in the pending, processing, or valid states. :raises: ``twisted.internet.defer.CancelledError`` if the authorization was still in pending or processing state when the timeout was reached. :rtype: Deferred[`~acme.messages.AuthorizationResource`] :return: A deferred firing when the authorization has completed/failed; if the authorization is valid, the authorization resource will be returned. """ def repoll(result): authzr, retry_after = result if authzr.body.status in {STATUS_PENDING, STATUS_PROCESSING}: return ( deferLater(clock, retry_after, lambda: None) .addCallback(lambda _: client.poll(authzr)) .addCallback(repoll) ) if authzr.body.status != STATUS_VALID: raise AuthorizationFailed(authzr) return authzr def cancel_timeout(result): if timeout_call.active(): timeout_call.cancel() return result d = client.poll(authzr).addCallback(repoll) timeout_call = clock.callLater(timeout, d.cancel) d.addBoth(cancel_timeout) return d JSON_CONTENT_TYPE = b'application/json' JSON_ERROR_CONTENT_TYPE = b'application/problem+json' DER_CONTENT_TYPE = b'application/pkix-cert' REPLAY_NONCE_HEADER = b'Replay-Nonce' class ServerError(Exception): """ :exc:`acme.messages.Error` isn't usable as an asynchronous exception, because it doesn't allow setting the ``__traceback__`` attribute like Twisted wants to do when cleaning Failures. This type exists to wrap such an error, as well as provide access to the original response. """ def __init__(self, message, response): Exception.__init__(self, message, response) self.message = message self.response = response def __repr__(self): return 'ServerError({!r})'.format(self.message) class AuthorizationFailed(Exception): """ An attempt was made to complete an authorization, but it failed. """ def __init__(self, authzr): self.status = authzr.body.status self.authzr = authzr self.errors = [ challb.error for challb in authzr.body.challenges if challb.error is not None] def __repr__(self): return ( 'AuthorizationFailed(<' '{0.status!r} ' '{0.authzr.body.identifier!r} ' '{0.errors!r}>)'.format(self)) def __str__(self): return repr(self) class NoSupportedChallenges(Exception): """ No supported challenges were found in an authorization. """ class JWSClient(object): """ HTTP client using JWS-signed messages. """ timeout = _DEFAULT_TIMEOUT def __init__(self, treq_client, key, alg, user_agent=u'txacme/{}'.format(__version__).encode('ascii')): self._treq = treq_client self._key = key self._alg = alg self._user_agent = user_agent self._nonces = set() def _wrap_in_jws(self, nonce, obj): """ Wrap ``JSONDeSerializable`` object in JWS. .. todo:: Implement ``acmePath``. :param ~josepy.interfaces.JSONDeSerializable obj: :param bytes nonce: :rtype: `bytes` :return: JSON-encoded data """ with LOG_JWS_SIGN(key_type=self._key.typ, alg=self._alg.name, nonce=nonce): jobj = obj.json_dumps().encode() return ( JWS.sign( payload=jobj, key=self._key, alg=self._alg, nonce=nonce) .json_dumps() .encode()) @classmethod def _check_response(cls, response, content_type=JSON_CONTENT_TYPE): """ Check response content and its type. .. note:: Unlike :mod:`acme.client`, checking is strict. :param bytes content_type: Expected Content-Type response header. If the response Content-Type does not match, :exc:`ClientError` is raised. :raises .ServerError: If server response body carries HTTP Problem (draft-ietf-appsawg-http-problem-00). :raises ~acme.errors.ClientError: In case of other networking errors. """ def _got_failure(f): f.trap(ValueError) return None def _got_json(jobj): if 400 <= response.code < 600: if response_ct == JSON_ERROR_CONTENT_TYPE and jobj is not None: raise ServerError( messages.Error.from_json(jobj), response) else: # response is not JSON object raise errors.ClientError(response) elif response_ct != content_type: raise errors.ClientError( 'Unexpected response Content-Type: {0!r}'.format( response_ct)) elif content_type == JSON_CONTENT_TYPE and jobj is None: raise errors.ClientError(response) return response response_ct = response.headers.getRawHeaders( b'Content-Type', [None])[0] action = LOG_JWS_CHECK_RESPONSE( expected_content_type=content_type, response_content_type=response_ct) with action.context(): # TODO: response.json() is called twice, once here, and # once in _get and _post clients return ( DeferredContext(response.json()) .addErrback(_got_failure) .addCallback(_got_json) .addActionFinish()) def _send_request(self, method, url, *args, **kwargs): """ Send HTTP request. :param str method: The HTTP method to use. :param str url: The URL to make the request to. :return: Deferred firing with the HTTP response. """ action = LOG_JWS_REQUEST(url=url) with action.context(): headers = kwargs.setdefault('headers', Headers()) headers.setRawHeaders(b'user-agent', [self._user_agent]) kwargs.setdefault('timeout', self.timeout) return ( DeferredContext( self._treq.request(method, url, *args, **kwargs)) .addCallback( tap(lambda r: action.add_success_fields( code=r.code, content_type=r.headers.getRawHeaders( b'content-type', [None])[0]))) .addActionFinish()) def head(self, url, *args, **kwargs): """ Send HEAD request without checking the response. Note that ``_check_response`` is not called, as there will be no response body to check. :param str url: The URL to make the request to. """ with LOG_JWS_HEAD().context(): return DeferredContext( self._send_request(u'HEAD', url, *args, **kwargs) ).addActionFinish() def get(self, url, content_type=JSON_CONTENT_TYPE, **kwargs): """ Send GET request and check response. :param str method: The HTTP method to use. :param str url: The URL to make the request to. :raises txacme.client.ServerError: If server response body carries HTTP Problem (draft-ietf-appsawg-http-problem-00). :raises acme.errors.ClientError: In case of other protocol errors. :return: Deferred firing with the checked HTTP response. """ with LOG_JWS_GET().context(): return ( DeferredContext(self._send_request(u'GET', url, **kwargs)) .addCallback(self._check_response, content_type=content_type) .addActionFinish()) def _add_nonce(self, response): """ Store a nonce from a response we received. :param twisted.web.iweb.IResponse response: The HTTP response. :return: The response, unmodified. """ nonce = response.headers.getRawHeaders( REPLAY_NONCE_HEADER, [None])[0] with LOG_JWS_ADD_NONCE(raw_nonce=nonce) as action: if nonce is None: raise errors.MissingNonce(response) else: try: decoded_nonce = Header._fields['nonce'].decode( nonce.decode('ascii') ) action.add_success_fields(nonce=decoded_nonce) except DeserializationError as error: raise errors.BadNonce(nonce, error) self._nonces.add(decoded_nonce) return response def _get_nonce(self, url): """ Get a nonce to use in a request, removing it from the nonces on hand. """ action = LOG_JWS_GET_NONCE() if len(self._nonces) > 0: with action: nonce = self._nonces.pop() action.add_success_fields(nonce=nonce) return succeed(nonce) else: with action.context(): return ( DeferredContext(self.head(url)) .addCallback(self._add_nonce) .addCallback(lambda _: self._nonces.pop()) .addCallback(tap( lambda nonce: action.add_success_fields(nonce=nonce))) .addActionFinish()) def _post(self, url, obj, content_type, **kwargs): """ POST an object and check the response. :param str url: The URL to request. :param ~josepy.interfaces.JSONDeSerializable obj: The serializable payload of the request. :param bytes content_type: The expected content type of the response. :raises txacme.client.ServerError: If server response body carries HTTP Problem (draft-ietf-appsawg-http-problem-00). :raises acme.errors.ClientError: In case of other protocol errors. """ with LOG_JWS_POST().context(): headers = kwargs.setdefault('headers', Headers()) headers.setRawHeaders(b'content-type', [JSON_CONTENT_TYPE]) return ( DeferredContext(self._get_nonce(url)) .addCallback(self._wrap_in_jws, obj) .addCallback( lambda data: self._send_request( u'POST', url, data=data, **kwargs)) .addCallback(self._add_nonce) .addCallback(self._check_response, content_type=content_type) .addActionFinish()) def post(self, url, obj, content_type=JSON_CONTENT_TYPE, **kwargs): """ POST an object and check the response. Retry once if a badNonce error is received. :param str url: The URL to request. :param ~josepy.interfaces.JSONDeSerializable obj: The serializable payload of the request. :param bytes content_type: The expected content type of the response. By default, JSON. :raises txacme.client.ServerError: If server response body carries HTTP Problem (draft-ietf-appsawg-http-problem-00). :raises acme.errors.ClientError: In case of other protocol errors. """ def retry_bad_nonce(f): f.trap(ServerError) # The current RFC draft defines the namespace as # urn:ietf:params:acme:error:, but earlier drafts (and some # current implementations) use urn:acme:error: instead. We # don't really care about the namespace here, just the error code. if f.value.message.typ.split(':')[-1] == 'badNonce': # If one nonce is bad, others likely are too. Let's clear them # and re-add the one we just got. self._nonces.clear() self._add_nonce(f.value.response) return self._post(url, obj, content_type, **kwargs) return f return ( self._post(url, obj, content_type, **kwargs) .addErrback(retry_bad_nonce)) __all__ = [ 'Client', 'JWSClient', 'ServerError', 'JSON_CONTENT_TYPE', 'JSON_ERROR_CONTENT_TYPE', 'REPLAY_NONCE_HEADER', 'fqdn_identifier', 'answer_challenge', 'poll_until_valid', 'NoSupportedChallenges', 'AuthorizationFailed', 'DER_CONTENT_TYPE'] txacme-0.9.3/src/txacme/endpoint.py000066400000000000000000000162741364626325400172730ustar00rootroot00000000000000""" A TLS endpoint that supports SNI automatically issues / renews certificates via an ACME CA (eg. Let's Encrypt). """ from datetime import timedelta from functools import partial import attr from josepy.jwk import JWKRSA from josepy.jwa import RS256 from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization from twisted.internet.defer import maybeDeferred from twisted.internet.endpoints import serverFromString from twisted.internet.interfaces import IListeningPort, IStreamServerEndpoint from twisted.protocols.tls import TLSMemoryBIOFactory from twisted.python.filepath import FilePath from txsni.snimap import HostDirectoryMap, SNIMap from zope.interface import implementer from txacme.challenges import TLSSNI01Responder from txacme.client import Client, _DEFAULT_TIMEOUT from txacme.service import _default_panic, AcmeIssuingService from txacme.store import DirectoryStore from txacme.util import check_directory_url_type, generate_private_key @implementer(IListeningPort) @attr.s(cmp=False, hash=False) class _WrapperPort(object): """ Wrapper for the underlying port to stop the issuing service when the port is stopped. """ _port = attr.ib() _service = attr.ib() def stopListening(self): # noqa return ( maybeDeferred(self._port.stopListening) .addCallback(lambda _: self._service.stopService())) @implementer(IStreamServerEndpoint) @attr.s(cmp=False, hash=False) class AutoTLSEndpoint(object): """ A server endpoint that does TLS SNI, with certificates automatically (re)issued from an ACME certificate authority. :param reactor: The Twisted reactor. :param directory: ``twisted.python.url.URL`` for the ACME directory to use for issuing certs. :type client_creator: Callable[[reactor, ``twisted.python.url.URL``], Deferred[`txacme.client.Client`]] :param client_creator: A callable called with the reactor and directory URL for creating the ACME client. For example, ``partial(Client.from_url, key=acme_key, alg=RS256)``. :type cert_store: `txacme.interfaces.ICertificateStore` :param cert_store: The certificate store containing the certificates to manage. For example, `txacme.store.DirectoryStore`. :param dict cert_mapping: The certificate mapping to use for SNI; for example, ``txsni.snimap.HostDirectoryMap``. Usually this should correspond to the same underlying storage as ``cert_store``. :param ~datetime.timedelta check_interval: How often to check for expiring certificates. :param ~datetime.timedelta reissue_interval: If a certificate is expiring in less time than this interval, it will be reissued. :param ~datetime.timedelta panic_interval: If a certificate is expiring in less time than this interval, and reissuing fails, the panic callback will be invoked. :type panic: Callable[[Failure, `str`], Deferred] :param panic: A callable invoked with the failure and server name when reissuing fails for a certificate expiring in the ``panic_interval``. For example, you could generate a monitoring alert. The default callback logs a message at *CRITICAL* level. :param generate_key: A 0-arg callable used to generate a private key for a new cert. Normally you would not pass this unless you have specialized key generation requirements. """ reactor = attr.ib() directory = attr.ib( validator=lambda inst, a, value: check_directory_url_type(value)) client_creator = attr.ib() cert_store = attr.ib() cert_mapping = attr.ib() sub_endpoint = attr.ib() check_interval = attr.ib(default=timedelta(days=1)) reissue_interval = attr.ib(default=timedelta(days=30)) panic_interval = attr.ib(default=timedelta(days=15)) _panic = attr.ib(default=_default_panic) _generate_key = attr.ib(default=partial(generate_private_key, u'rsa')) def listen(self, protocolFactory): # noqa """ Start an issuing service, and wait until initial issuing is complete. """ def _got_port(port): self.service = AcmeIssuingService( cert_store=self.cert_store, client_creator=partial( self.client_creator, self.reactor, self.directory), clock=self.reactor, responders=[responder], check_interval=self.check_interval, reissue_interval=self.reissue_interval, panic_interval=self.panic_interval, panic=self._panic, generate_key=self._generate_key) self.service.startService() return ( self.service.when_certs_valid() .addCallback( lambda _: _WrapperPort(port=port, service=self.service))) responder = TLSSNI01Responder() sni_map = SNIMap(responder.wrap_host_map(self.cert_mapping)) return ( maybeDeferred( self.sub_endpoint.listen, TLSMemoryBIOFactory( contextFactory=sni_map, isClient=False, wrappedFactory=protocolFactory)) .addCallback(_got_port)) def load_or_create_client_key(pem_path): """ Load the client key from a directory, creating it if it does not exist. .. note:: The client key that will be created will be a 2048-bit RSA key. :type pem_path: ``twisted.python.filepath.FilePath`` :param pem_path: The certificate directory to use, as with the endpoint. """ acme_key_file = pem_path.asTextMode().child(u'client.key') if acme_key_file.exists(): key = serialization.load_pem_private_key( acme_key_file.getContent(), password=None, backend=default_backend()) else: key = generate_private_key(u'rsa') acme_key_file.setContent( key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=serialization.NoEncryption())) return JWKRSA(key=key) def _parse(reactor, directory, pemdir, *args, **kwargs): """ Parse a txacme endpoint description. :param reactor: The Twisted reactor. :param directory: ``twisted.python.url.URL`` for the ACME directory to use for issuing certs. :param str pemdir: The path to the certificate directory to use. """ def colon_join(items): return ':'.join([item.replace(':', '\\:') for item in items]) timeout = kwargs.pop('timeout', _DEFAULT_TIMEOUT) sub = colon_join(list(args) + ['='.join(item) for item in kwargs.items()]) pem_path = FilePath(pemdir).asTextMode() acme_key = load_or_create_client_key(pem_path) return AutoTLSEndpoint( reactor=reactor, directory=directory, client_creator=partial( Client.from_url, key=acme_key, alg=RS256, timeout=timeout), cert_store=DirectoryStore(pem_path), cert_mapping=HostDirectoryMap(pem_path), sub_endpoint=serverFromString(reactor, sub)) __all__ = ['AutoTLSEndpoint', 'load_or_create_client_key'] txacme-0.9.3/src/txacme/errors.py000066400000000000000000000010071364626325400167530ustar00rootroot00000000000000""" Exception types for txacme. """ import attr @attr.s class NotInZone(ValueError): """ The given domain name is not in the configured zone. """ server_name = attr.ib() zone_name = attr.ib() def __str__(self): return repr(self) @attr.s class ZoneNotFound(ValueError): """ The configured zone was not found in the zones at the configured provider. """ zone_name = attr.ib() def __str__(self): return repr(self) __all__ = ['NotInZone', 'ZoneNotFound'] txacme-0.9.3/src/txacme/interfaces.py000066400000000000000000000061311364626325400175650ustar00rootroot00000000000000# -*- coding: utf-8 -*- """ Interface definitions for txacme. """ from zope.interface import Attribute, Interface class IResponder(Interface): """ Configuration for a ACME challenge responder. The actual responder may exist somewhere else, this interface is merely for an object that knows how to configure it. """ challenge_type = Attribute( """ The type of challenge this responder is able to respond for. Must correspond to one of the types from `acme.challenges`; for example, ``u'tls-sni-01'``. """) def start_responding(server_name, challenge, response): """ Start responding for a particular challenge. :param str server_name: The server name for which the challenge is being completed. :param challenge: The `acme.challenges` challenge object; the exact type of this object depends on the challenge type. :param response: The `acme.challenges` response object; the exact type of this object depends on the challenge type. :rtype: ``Deferred`` :return: A deferred firing when the challenge is ready to be verified. """ def stop_responding(server_name, challenge, response): """ Stop responding for a particular challenge. May be a noop if a particular responder does not need or implement explicit cleanup; implementations should not rely on this method always being called. :param str server_name: The server name for which the challenge is being completed. :param challenge: The `acme.challenges` challenge object; the exact type of this object depends on the challenge type. :param response: The `acme.challenges` response object; the exact type of this object depends on the challenge type. """ class ICertificateStore(Interface): """ A store of certificate/keys/chains. """ def get(self, server_name): """ Retrieve the current PEM objects for the given server name. :param str server_name: The server name. :raises KeyError: if the given name does not exist in the store. :return: ``Deferred[List[:ref:`pem-objects`]]`` """ def store(self, server_name, pem_objects): """ Store PEM objects for the given server name. Implementations do not have to permit invoking this with a server name that was not already present in the store. :param str server_name: The server name to update. :param pem_objects: A list of :ref:`pem-objects`; must contain exactly one private key, a certificate corresponding to that private key, and zero or more chain certificates. :rtype: ``Deferred`` """ def as_dict(self): """ Get all certificates in the store. :rtype: ``Deferred[Dict[str, List[:ref:`pem-objects`]]]`` :return: A deferred firing with a dict mapping server names to :ref:`pem-objects`. """ __all__ = ['IResponder', 'ICertificateStore'] txacme-0.9.3/src/txacme/logging.py000066400000000000000000000111131364626325400170640ustar00rootroot00000000000000""" Eliot message and action definitions. """ from operator import methodcaller from eliot import ActionType, Field, fields from twisted.python.compat import unicode NONCE = Field( u'nonce', lambda nonce: nonce.encode('hex').decode('ascii'), u'A nonce value') LOG_JWS_SIGN = ActionType( u'txacme:jws:sign', fields(NONCE, key_type=unicode, alg=unicode), fields(), u'Signing a message with JWS') LOG_JWS_HEAD = ActionType( u'txacme:jws:http:head', fields(), fields(), u'A JWSClient HEAD request') LOG_JWS_GET = ActionType( u'txacme:jws:http:get', fields(), fields(), u'A JWSClient GET request') LOG_JWS_POST = ActionType( u'txacme:jws:http:post', fields(), fields(), u'A JWSClient POST request') LOG_JWS_REQUEST = ActionType( u'txacme:jws:http:request', fields(url=unicode), fields(Field.for_types(u'content_type', [unicode, None], u'Content-Type header field'), code=int), u'A JWSClient request') LOG_JWS_CHECK_RESPONSE = ActionType( u'txacme:jws:http:check-response', fields(Field.for_types(u'response_content_type', [unicode, None], u'Content-Type header field'), expected_content_type=unicode), fields(), u'Checking a JWSClient response') LOG_JWS_GET_NONCE = ActionType( u'txacme:jws:nonce:get', fields(), fields(NONCE), u'Consuming a nonce') LOG_JWS_ADD_NONCE = ActionType( u'txacme:jws:nonce:add', fields(Field.for_types(u'raw_nonce', [bytes, None], u'Nonce header field')), fields(NONCE), u'Adding a nonce') LOG_HTTP_PARSE_LINKS = ActionType( u'txacme:http:parse-links', fields(raw_link=unicode), fields(parsed_links=dict), u'Parsing HTTP Links') DIRECTORY = Field(u'directory', methodcaller('to_json'), u'An ACME directory') URL = Field(u'url', methodcaller('asText'), u'A URL object') LOG_ACME_CONSUME_DIRECTORY = ActionType( u'txacme:acme:client:from-url', fields(URL, key_type=unicode, alg=unicode), fields(DIRECTORY), u'Creating an ACME client from a remote directory') LOG_ACME_REGISTER = ActionType( u'txacme:acme:client:registration:create', fields(Field(u'registration', methodcaller('to_json'), u'An ACME registration')), fields(Field(u'registration', methodcaller('to_json'), u'The resulting registration')), u'Registering with an ACME server') LOG_ACME_UPDATE_REGISTRATION = ActionType( u'txacme:acme:client:registration:update', fields(Field(u'registration', methodcaller('to_json'), u'An ACME registration'), uri=unicode), fields(Field(u'registration', methodcaller('to_json'), u'The updated registration')), u'Updating a registration') LOG_ACME_CREATE_AUTHORIZATION = ActionType( u'txacme:acme:client:authorization:create', fields(Field(u'identifier', methodcaller('to_json'), u'An identifier')), fields(Field(u'authorization', methodcaller('to_json'), u'The authorization')), u'Creating an authorization') LOG_ACME_ANSWER_CHALLENGE = ActionType( u'txacme:acme:client:challenge:answer', fields(Field(u'challenge_body', methodcaller('to_json'), u'The challenge body'), Field(u'response', methodcaller('to_json'), u'The challenge response')), fields(Field(u'challenge_resource', methodcaller('to_json'), u'The updated challenge')), u'Answering an authorization challenge') LOG_ACME_POLL_AUTHORIZATION = ActionType( u'txacme:acme:client:authorization:poll', fields(Field(u'authorization', methodcaller('to_json'), u'The authorization resource')), fields(Field(u'authorization', methodcaller('to_json'), u'The updated authorization'), Field.for_types(u'retry_after', [int, float], u'How long before polling again?')), u'Polling an authorization') LOG_ACME_REQUEST_CERTIFICATE = ActionType( u'txacme:acme:client:certificate:request', fields(), fields(), u'Requesting a certificate') LOG_ACME_FETCH_CHAIN = ActionType( u'txacme:acme:client:certificate:fetch-chain', fields(), fields(), u'Fetching a certificate chain') txacme-0.9.3/src/txacme/messages.py000066400000000000000000000014241364626325400172510ustar00rootroot00000000000000""" ACME protocol messages. This module provides supplementary message implementations that are not already provided by the `acme` library. .. seealso:: `acme.messages` """ from acme.fields import Resource from josepy import Field, JSONObjectWithFields from txacme.util import decode_csr, encode_csr class CertificateRequest(JSONObjectWithFields): """ ACME new-cert request. Differs from the upstream version because it wraps a Cryptography CSR object instead of a PyOpenSSL one. .. seealso:: `acme.messages.CertificateRequest`, `cryptography.x509.CertificateSigningRequest` """ resource_type = 'new-cert' resource = Resource(resource_type) csr = Field('csr', decoder=decode_csr, encoder=encode_csr) __all__ = ['CertificateRequest'] txacme-0.9.3/src/txacme/newsfragments/000077500000000000000000000000001364626325400177525ustar00rootroot00000000000000txacme-0.9.3/src/txacme/newsfragments/.gitignore000066400000000000000000000000141364626325400217350ustar00rootroot00000000000000!.gitignore txacme-0.9.3/src/txacme/service.py000066400000000000000000000260371364626325400171110ustar00rootroot00000000000000from datetime import timedelta from functools import partial from acme import messages import attr from cryptography import x509 from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization from pem import Certificate, Key from twisted.application.internet import TimerService from twisted.application.service import Service from twisted.internet.defer import Deferred, gatherResults, succeed from twisted.logger import Logger from txacme.client import answer_challenge, fqdn_identifier, poll_until_valid from txacme.messages import CertificateRequest from txacme.util import clock_now, csr_for_names, generate_private_key, tap log = Logger() def _default_panic(failure, server_name): log.failure( u'PANIC! Unable to renew certificate for: {server_name!r}', failure, server_name=server_name) @attr.s(cmp=False, hash=False) class AcmeIssuingService(Service): """ A service for keeping certificates up to date by using an ACME server. :type cert_store: `~txacme.interfaces.ICertificateStore` :param cert_store: The certificate store containing the certificates to manage. :type client_creator: Callable[[], Deferred[`txacme.client.Client`]] :param client_creator: A callable called with no arguments for creating the ACME client. For example, ``partial(Client.from_url, reactor=reactor, url=LETSENCRYPT_STAGING_DIRECTORY, key=acme_key, alg=RS256)``. :param clock: ``IReactorTime`` provider; usually the reactor, when not testing. :type responders: List[`~txacme.interfaces.IResponder`] :param responders: Challenge responders. Usually only one responder is needed; if more than one responder for the same type is provided, only the first will be used. :param str email: An (optional) email address to use during registration. :param ~datetime.timedelta check_interval: How often to check for expiring certificates. :param ~datetime.timedelta reissue_interval: If a certificate is expiring in less time than this interval, it will be reissued. :param ~datetime.timedelta panic_interval: If a certificate is expiring in less time than this interval, and reissuing fails, the panic callback will be invoked. :type panic: Callable[[Failure, `str`], Deferred] :param panic: A callable invoked with the failure and server name when reissuing fails for a certificate expiring in the ``panic_interval``. For example, you could generate a monitoring alert. The default callback logs a message at *CRITICAL* level. :param generate_key: A 0-arg callable used to generate a private key for a new cert. Normally you would not pass this unless you have specialized key generation requirements. """ cert_store = attr.ib() _client_creator = attr.ib() _clock = attr.ib() _responders = attr.ib() _email = attr.ib(default=None) check_interval = attr.ib(default=timedelta(days=1)) reissue_interval = attr.ib(default=timedelta(days=30)) panic_interval = attr.ib(default=timedelta(days=15)) _panic = attr.ib(default=_default_panic) _generate_key = attr.ib(default=partial(generate_private_key, u'rsa')) _waiting = attr.ib(default=attr.Factory(list), init=False) _issuing = attr.ib(default=attr.Factory(dict), init=False) ready = False def _now(self): """ Get the current time. """ return clock_now(self._clock) def _check_certs(self): """ Check all of the certs in the store, and reissue any that are expired or close to expiring. """ log.info('Starting scheduled check for expired certificates.') def check(certs): panicing = set() expiring = set() for server_name, objects in certs.items(): if len(objects) == 0: panicing.add(server_name) for o in filter(lambda o: isinstance(o, Certificate), objects): cert = x509.load_pem_x509_certificate( o.as_bytes(), default_backend()) until_expiry = cert.not_valid_after - self._now() if until_expiry <= self.panic_interval: panicing.add(server_name) elif until_expiry <= self.reissue_interval: expiring.add(server_name) log.info( 'Found {panicing_count:d} overdue / expired and ' '{expiring_count:d} expiring certificates.', panicing_count=len(panicing), expiring_count=len(expiring)) d1 = ( gatherResults( [self._with_client(self._issue_cert, server_name) .addErrback(self._panic, server_name) for server_name in panicing], consumeErrors=True) .addCallback(done_panicing)) d2 = gatherResults( [self.issue_cert(server_name) .addErrback( lambda f: log.failure( u'Error issuing certificate for: {server_name!r}', f, server_name=server_name)) for server_name in expiring], consumeErrors=True) return gatherResults([d1, d2], consumeErrors=True) def done_panicing(ignored): self.ready = True for d in list(self._waiting): d.callback(None) self._waiting = [] return ( self._ensure_registered() .addCallback(lambda _: self.cert_store.as_dict()) .addCallback(check) .addErrback( lambda f: log.failure( u'Error in scheduled certificate check.', f))) def issue_cert(self, server_name): """ Issue a new cert for a particular name. If an existing cert exists, it will be replaced with the new cert. If issuing is already in progress for the given name, a second issuing process will *not* be started. :param str server_name: The name to issue a cert for. :rtype: ``Deferred`` :return: A deferred that fires when issuing is complete. """ def finish(result): _, waiting = self._issuing.pop(server_name) for d in waiting: d.callback(result) # d_issue is assigned below, in the conditional, since we may be # creating it or using the existing one. d = Deferred(lambda _: d_issue.cancel()) if server_name in self._issuing: d_issue, waiting = self._issuing[server_name] waiting.append(d) else: d_issue = self._with_client(self._issue_cert, server_name) waiting = [d] self._issuing[server_name] = (d_issue, waiting) # Add the callback afterwards in case we're using a client # implementation that isn't actually async d_issue.addBoth(finish) return d def _with_client(self, f, *a, **kw): """ Construct a client, and perform an operation with it. """ return self._client_creator().addCallback(f, *a, **kw) def _issue_cert(self, client, server_name): """ Issue a new cert for a particular name. """ log.info( 'Requesting a certificate for {server_name!r}.', server_name=server_name) key = self._generate_key() objects = [ Key(key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=serialization.NoEncryption()))] def answer_and_poll(authzr): def got_challenge(stop_responding): return ( poll_until_valid(authzr, self._clock, client) .addBoth(tap(lambda _: stop_responding()))) return ( answer_challenge(authzr, client, self._responders) .addCallback(got_challenge)) def got_cert(certr): objects.append( Certificate( x509.load_der_x509_certificate( certr.body, default_backend()) .public_bytes(serialization.Encoding.PEM))) return certr def got_chain(chain): for certr in chain: got_cert(certr) log.info( 'Received certificate for {server_name!r}.', server_name=server_name) return objects return ( client.request_challenges(fqdn_identifier(server_name)) .addCallback(answer_and_poll) .addCallback(lambda ign: client.request_issuance( CertificateRequest( csr=csr_for_names([server_name], key)))) .addCallback(got_cert) .addCallback(client.fetch_chain) .addCallback(got_chain) .addCallback(partial(self.cert_store.store, server_name))) def _ensure_registered(self): """ Register if needed. """ if self._registered: return succeed(None) else: return self._with_client(self._register) def _register(self, client): """ Register and agree to the TOS. """ def _registered(regr): self._regr = regr self._registered = True regr = messages.NewRegistration.from_data(email=self._email) return ( client.register(regr) .addCallback(client.agree_to_tos) .addCallback(_registered)) def when_certs_valid(self): """ Get a notification once the startup check has completed. When the service starts, an initial check is made immediately; the deferred returned by this function will only fire once reissue has been attempted for any certificates within the panic interval. .. note:: The reissue for any of these certificates may not have been successful; the panic callback will be invoked for any certificates in the panic interval that failed reissue. :rtype: ``Deferred`` :return: A deferred that fires once the initial check has resolved. """ if self.ready: return succeed(None) d = Deferred() self._waiting.append(d) return d def startService(self): Service.startService(self) self._registered = False self._timer_service = TimerService( self.check_interval.total_seconds(), self._check_certs) self._timer_service.clock = self._clock self._timer_service.startService() def stopService(self): Service.stopService(self) self.ready = False self._registered = False for d in list(self._waiting): d.cancel() self._waiting = [] return self._timer_service.stopService() __all__ = ['AcmeIssuingService'] txacme-0.9.3/src/txacme/store.py000066400000000000000000000023371364626325400166020ustar00rootroot00000000000000""" ``txacme.interfaces.ICertificateStore`` implementations. """ from operator import methodcaller import attr from pem import parse from twisted.internet.defer import maybeDeferred, succeed from zope.interface import implementer from txacme.interfaces import ICertificateStore @attr.s @implementer(ICertificateStore) class DirectoryStore(object): """ A certificate store that keeps certificates in a directory on disk. """ path = attr.ib(converter=methodcaller('asTextMode')) def _get(self, server_name): """ Synchronously retrieve an entry. """ p = self.path.child(server_name + u'.pem') if p.isfile(): return parse(p.getContent()) else: raise KeyError(server_name) def get(self, server_name): return maybeDeferred(self._get, server_name) def store(self, server_name, pem_objects): p = self.path.child(server_name + u'.pem') p.setContent(b''.join(o.as_bytes() for o in pem_objects)) return succeed(None) def as_dict(self): return succeed( {fn[:-4]: self._get(fn[:-4]) for fn in self.path.listdir() if fn.endswith(u'.pem')}) __all__ = ['DirectoryStore'] txacme-0.9.3/src/txacme/test/000077500000000000000000000000001364626325400160465ustar00rootroot00000000000000txacme-0.9.3/src/txacme/test/__init__.py000066400000000000000000000005451364626325400201630ustar00rootroot00000000000000from os import getenv import eliot.twisted from hypothesis import HealthCheck, settings eliot.twisted.redirectLogsForTrial() del eliot settings.register_profile( "coverage", settings(max_examples=20, suppress_health_check=[HealthCheck.too_slow])) settings.load_profile(getenv(u'HYPOTHESIS_PROFILE', 'default')) del HealthCheck, getenv, settings txacme-0.9.3/src/txacme/test/doubles.py000066400000000000000000000006241364626325400200570ustar00rootroot00000000000000""" Test doubles. """ from twisted.internet.interfaces import IReactorFromThreads from zope.interface import implementer @implementer(IReactorFromThreads) class SynchronousReactorThreads(object): """ An implementation of ``IReactorFromThreads`` that calls things synchronously in the same thread. """ def callFromThread(self, f, *args, **kwargs): # noqa f(*args, **kwargs) txacme-0.9.3/src/txacme/test/matchers.py000066400000000000000000000026151364626325400202320ustar00rootroot00000000000000from cryptography import x509 from testtools.matchers import Mismatch from cryptography.x509.oid import ExtensionOID from service_identity._common import ( DNS_ID, DNSPattern, verify_service_identity) from service_identity.exceptions import VerificationError class ValidForName(object): """ Matches when the matchee object (must be a `~cryptography.x509.Certificate` or `~cryptography.x509.CertificateSigningRequest`) is valid for the given name. """ def __init__(self, name): self.name = name def __str__(self): return 'ValidForName({0.name!r})'.format(self) def match(self, value): # This is somewhat terrible. Probably can be better after # pyca/service_identity#14 is resolved. target_ids = [ DNSPattern(target_name.encode('utf-8')) for target_name in ( value.extensions .get_extension_for_oid( ExtensionOID.SUBJECT_ALTERNATIVE_NAME) .value .get_values_for_type(x509.DNSName) )] ids = [DNS_ID(self.name)] try: verify_service_identity( cert_patterns=target_ids, obligatory_ids=ids, optional_ids=[]) except VerificationError: return Mismatch( '{!r} is not valid for {!r}'.format(value, self.name)) __all__ = ['ValidForName'] txacme-0.9.3/src/txacme/test/strategies.py000066400000000000000000000031361364626325400205750ustar00rootroot00000000000000""" Miscellaneous strategies for Hypothesis testing. """ try: from base64 import encodebytes except ImportError: from base64 import encodestring as encodebytes from hypothesis import strategies as s from pem import Certificate, RSAPrivateKey from twisted.python.url import URL def dns_labels(): """ Strategy for generating limited charset DNS labels. """ # This is too limited, but whatever return s.from_regex(u'\\A[a-z]{3}[a-z0-9-]{0,21}[a-z]\\Z') def dns_names(): """ Strategy for generating limited charset DNS names. """ return ( s.lists(dns_labels(), min_size=1, max_size=10) .map(u'.'.join)) def urls(): """ Strategy for generating ``twisted.python.url.URL``\\s. """ return s.builds( URL, scheme=s.just(u'https'), host=dns_names(), path=s.lists(s.text( max_size=64, alphabet=s.characters(blacklist_characters=u'/?#', blacklist_categories=('Cs',)) ), min_size=1, max_size=10)) @s.composite def pem_objects(draw): """ Strategy for generating ``pem`` objects. """ key = RSAPrivateKey(( b'-----BEGIN RSA PRIVATE KEY-----\n' + encodebytes(draw(s.binary(min_size=1))) + b'-----END RSA PRIVATE KEY-----\n')) return [key] + [ Certificate(( b'-----BEGIN CERTIFICATE-----\n' + encodebytes(cert) + b'-----END CERTIFICATE-----\n')) for cert in draw(s.lists(s.binary(min_size=1), min_size=1))] __all__ = ['dns_labels', 'dns_names', 'urls', 'pem_objects'] txacme-0.9.3/src/txacme/test/test_challenges.py000066400000000000000000000452261364626325400215750ustar00rootroot00000000000000""" Tests for `txacme.challenges`. """ from operator import methodcaller from acme import challenges from josepy.b64 import b64encode from hypothesis import strategies as s from hypothesis import assume, example, given from testtools import skipIf, TestCase from testtools.matchers import ( AfterPreprocessing, Always, Contains, EndsWith, Equals, HasLength, Is, IsInstance, MatchesAll, MatchesListwise, MatchesPredicate, MatchesStructure, Not) from testtools.twistedsupport import succeeded from treq.testing import StubTreq from twisted._threads import createMemoryWorker from twisted.internet.defer import maybeDeferred from twisted.python.url import URL from twisted.web.resource import Resource from zope.interface.verify import verifyObject from txacme.challenges import HTTP01Responder, TLSSNI01Responder from txacme.challenges._tls import _MergingMappingProxy from txacme.errors import NotInZone, ZoneNotFound from txacme.interfaces import IResponder from txacme.test import strategies as ts from txacme.test.doubles import SynchronousReactorThreads from txacme.test.test_client import failed_with, RSA_KEY_512, RSA_KEY_512_RAW try: from txacme.challenges import LibcloudDNSResponder from txacme.challenges._libcloud import _daemon_thread except ImportError: LibcloudDNSResponder = None # A random example token for the challenge tests that need one EXAMPLE_TOKEN = b'BWYcfxzmOha7-7LoxziqPZIUr99BCz3BfbN9kzSFnrU' class _CommonResponderTests(object): """ Common properties which every responder implementation should satisfy. """ def _do_one_thing(self): """ Make the underlying fake implementation do one thing (eg. simulate one network request, one threaded task execution). """ def test_interface(self): """ The `.IResponder` interface is correctly implemented. """ responder = self._responder_factory() verifyObject(IResponder, responder) self.assertThat(responder.challenge_type, Equals(self._challenge_type)) @example(token=EXAMPLE_TOKEN) @given(token=s.binary(min_size=32, max_size=32).map(b64encode)) def test_stop_responding_already_stopped(self, token): """ Calling ``stop_responding`` when we are not responding for a server name does nothing. """ challenge = self._challenge_factory(token=token) response = challenge.response(RSA_KEY_512) responder = self._responder_factory() d = maybeDeferred( responder.stop_responding, u'example.com', challenge, response) self._do_one_thing() self.assertThat(d, succeeded(Always())) class TLSResponderTests(_CommonResponderTests, TestCase): """ `.TLSSNI01Responder` is a responder for tls-sni-01 challenges that works with txsni. """ _challenge_factory = challenges.TLSSNI01 _responder_factory = TLSSNI01Responder _challenge_type = u'tls-sni-01' @example(token=b'BWYcfxzmOha7-7LoxziqPZIUr99BCz3BfbN9kzSFnrU') @given(token=s.binary(min_size=32, max_size=32).map(b64encode)) def test_start_responding(self, token): """ Calling ``start_responding`` makes an appropriate entry appear in the host map. """ ckey = RSA_KEY_512_RAW challenge = challenges.TLSSNI01(token=token) response = challenge.response(RSA_KEY_512) server_name = response.z_domain.decode('ascii') host_map = {} responder = TLSSNI01Responder() responder._generate_private_key = lambda key_type: ckey wrapped_host_map = responder.wrap_host_map(host_map) self.assertThat(wrapped_host_map, Not(Contains(server_name))) responder.start_responding(u'example.com', challenge, response) self.assertThat( wrapped_host_map.get(server_name.encode('utf-8')).certificate, MatchesPredicate(response.verify_cert, '%r does not verify')) # Starting twice before stopping doesn't break things responder.start_responding(u'example.com', challenge, response) self.assertThat( wrapped_host_map.get(server_name.encode('utf-8')).certificate, MatchesPredicate(response.verify_cert, '%r does not verify')) responder.stop_responding(u'example.com', challenge, response) self.assertThat(wrapped_host_map, Not(Contains(server_name))) class MergingProxyTests(TestCase): """ ``_MergingMappingProxy`` merges two mappings together. """ @example(underlay={}, overlay={}, key=u'foo') @given(underlay=s.dictionaries(s.text(), s.builds(object)), overlay=s.dictionaries(s.text(), s.builds(object)), key=s.text()) def test_get_overlay(self, underlay, overlay, key): """ Getting an key that only exists in the overlay returns the value from the overlay. """ underlay.pop(key, None) overlay[key] = object() proxy = _MergingMappingProxy( overlay=overlay, underlay=underlay) self.assertThat(proxy[key], Is(overlay[key])) @example(underlay={}, overlay={}, key=u'foo') @given(underlay=s.dictionaries(s.text(), s.builds(object)), overlay=s.dictionaries(s.text(), s.builds(object)), key=s.text()) def test_get_underlay(self, underlay, overlay, key): """ Getting an key that only exists in the underlay returns the value from the underlay. """ underlay[key] = object() overlay.pop(key, None) proxy = _MergingMappingProxy( overlay=overlay, underlay=underlay) self.assertThat(proxy[key], Is(underlay[key])) @example(underlay={}, overlay={}, key=u'foo') @given(underlay=s.dictionaries(s.text(), s.builds(object)), overlay=s.dictionaries(s.text(), s.builds(object)), key=s.text()) def test_get_both(self, underlay, overlay, key): """ Getting an key that exists in both the underlay and the overlay returns the value from the overlay. """ underlay[key] = object() overlay[key] = object() proxy = _MergingMappingProxy( overlay=overlay, underlay=underlay) self.assertThat(proxy[key], Not(Is(underlay[key]))) self.assertThat(proxy[key], Is(overlay[key])) @example(underlay={u'foo': object(), u'bar': object()}, overlay={u'bar': object(), u'baz': object()}) @given(underlay=s.dictionaries(s.text(), s.builds(object)), overlay=s.dictionaries(s.text(), s.builds(object))) def test_len(self, underlay, overlay): """ ``__len__`` of the proxy does not count duplicates. """ proxy = _MergingMappingProxy( overlay=overlay, underlay=underlay) self.assertThat(len(proxy), Equals(len(list(proxy)))) @example(underlay={u'foo': object(), u'bar': object()}, overlay={u'bar': object(), u'baz': object()}) @given(underlay=s.dictionaries(s.text(), s.builds(object)), overlay=s.dictionaries(s.text(), s.builds(object))) def test_iter(self, underlay, overlay): """ ``__iter__`` of the proxy does not produce duplicate keys. """ proxy = _MergingMappingProxy( overlay=overlay, underlay=underlay) keys = sorted(list(proxy)) self.assertThat(keys, Equals(sorted(list(set(keys))))) @example(underlay={u'foo': object()}, overlay={}, key=u'foo') @example(underlay={}, overlay={}, key=u'bar') @given(underlay=s.dictionaries(s.text(), s.builds(object)), overlay=s.dictionaries(s.text(), s.builds(object)), key=s.text()) def test_contains(self, underlay, overlay, key): """ The mapping only contains a key if it can be gotten. """ proxy = _MergingMappingProxy( overlay=overlay, underlay=underlay) self.assertThat( key in proxy, Equals(proxy.get(key) is not None)) class HTTPResponderTests(_CommonResponderTests, TestCase): """ `.HTTP01Responder` is a responder for http-01 challenges. """ _challenge_factory = challenges.HTTP01 _responder_factory = HTTP01Responder _challenge_type = u'http-01' @example(token=b'BWYcfxzmOha7-7LoxziqPZIUr99BCz3BfbN9kzSFnrU') @given(token=s.binary(min_size=32, max_size=32).map(b64encode)) def test_start_responding(self, token): """ Calling ``start_responding`` makes an appropriate resource available. """ challenge = challenges.HTTP01(token=token) response = challenge.response(RSA_KEY_512) responder = HTTP01Responder() challenge_resource = Resource() challenge_resource.putChild(b'acme-challenge', responder.resource) root = Resource() root.putChild(b'.well-known', challenge_resource) client = StubTreq(root) encoded_token = challenge.encode('token') challenge_url = URL(host=u'example.com', path=[ u'.well-known', u'acme-challenge', encoded_token]).asText() self.assertThat(client.get(challenge_url), succeeded(MatchesStructure(code=Equals(404)))) responder.start_responding(u'example.com', challenge, response) self.assertThat(client.get(challenge_url), succeeded(MatchesAll( MatchesStructure( code=Equals(200), headers=AfterPreprocessing( methodcaller('getRawHeaders', b'content-type'), Equals([b'text/plain']))), AfterPreprocessing(methodcaller('content'), succeeded( Equals(response.key_authorization.encode('utf-8')))) ))) # Starting twice before stopping doesn't break things responder.start_responding(u'example.com', challenge, response) self.assertThat(client.get(challenge_url), succeeded(MatchesStructure(code=Equals(200)))) responder.stop_responding(u'example.com', challenge, response) self.assertThat(client.get(challenge_url), succeeded(MatchesStructure(code=Equals(404)))) @skipIf(LibcloudDNSResponder is None, 'libcloud not available') class LibcloudResponderTests(_CommonResponderTests, TestCase): """ `.LibcloudDNSResponder` implements a responder for dns-01 challenges using libcloud on the backend. """ _challenge_factory = challenges.DNS01 _challenge_type = u'dns-01' def _responder_factory(self, zone_name=u'example.com'): responder = LibcloudDNSResponder.create( reactor=SynchronousReactorThreads(), driver_name='dummy', username='ignored', password='ignored', zone_name=zone_name, settle_delay=0.0) if zone_name is not None: responder._driver.create_zone(zone_name) responder._thread_pool, self._perform = createMemoryWorker() return responder def _do_one_thing(self): return self._perform() def test_daemon_threads(self): """ ``_daemon_thread`` creates thread objects with ``daemon`` set. """ thread = _daemon_thread() self.assertThat(thread, MatchesStructure(daemon=Equals(True))) @example(token=EXAMPLE_TOKEN, subdomain=u'acme-testing', zone_name=u'example.com') @given(token=s.binary(min_size=32, max_size=32).map(b64encode), subdomain=ts.dns_names(), zone_name=ts.dns_names()) def test_start_responding(self, token, subdomain, zone_name): """ Calling ``start_responding`` causes an appropriate TXT record to be created. """ challenge = self._challenge_factory(token=token) response = challenge.response(RSA_KEY_512) responder = self._responder_factory(zone_name=zone_name) server_name = u'{}.{}'.format(subdomain, zone_name) zone = responder._driver.list_zones()[0] self.assertThat(zone.list_records(), HasLength(0)) d = responder.start_responding(server_name, challenge, response) self._perform() self.assertThat(d, succeeded(Always())) self.assertThat( zone.list_records(), MatchesListwise([ MatchesStructure( name=EndsWith(u'.' + subdomain), type=Equals('TXT'), )])) # Starting twice before stopping doesn't break things d = responder.start_responding(server_name, challenge, response) self._perform() self.assertThat(d, succeeded(Always())) self.assertThat(zone.list_records(), HasLength(1)) d = responder.stop_responding(server_name, challenge, response) self._perform() self.assertThat(d, succeeded(Always())) self.assertThat(zone.list_records(), HasLength(0)) @example(token=EXAMPLE_TOKEN, subdomain=u'acme-testing', zone_name=u'example.com') @given(token=s.binary(min_size=32, max_size=32).map(b64encode), subdomain=ts.dns_names(), zone_name=ts.dns_names()) def test_wrong_zone(self, token, subdomain, zone_name): """ Trying to respond for a domain not in the configured zone results in a `.NotInZone` exception. """ challenge = self._challenge_factory(token=token) response = challenge.response(RSA_KEY_512) responder = self._responder_factory(zone_name=zone_name) server_name = u'{}.{}.junk'.format(subdomain, zone_name) d = maybeDeferred( responder.start_responding, server_name, challenge, response) self._perform() self.assertThat( d, failed_with(MatchesAll( IsInstance(NotInZone), MatchesStructure( server_name=EndsWith(u'.' + server_name), zone_name=Equals(zone_name))))) @example(token=EXAMPLE_TOKEN, subdomain=u'acme-testing', zone_name=u'example.com') @given(token=s.binary(min_size=32, max_size=32).map(b64encode), subdomain=ts.dns_names(), zone_name=ts.dns_names()) def test_missing_zone(self, token, subdomain, zone_name): """ `.ZoneNotFound` is raised if the configured zone cannot be found at the configured provider. """ challenge = self._challenge_factory(token=token) response = challenge.response(RSA_KEY_512) responder = self._responder_factory(zone_name=zone_name) server_name = u'{}.{}'.format(subdomain, zone_name) for zone in responder._driver.list_zones(): zone.delete() d = maybeDeferred( responder.start_responding, server_name, challenge, response) self._perform() self.assertThat( d, failed_with(MatchesAll( IsInstance(ZoneNotFound), MatchesStructure( zone_name=Equals(zone_name))))) @example(token=EXAMPLE_TOKEN, subdomain=u'acme-testing', extra=u'extra', zone_name1=u'example.com', suffix1=u'.', zone_name2=u'example.org', suffix2=u'') @given(token=s.binary(min_size=32, max_size=32).map(b64encode), subdomain=ts.dns_names(), extra=ts.dns_names(), zone_name1=ts.dns_names(), suffix1=s.sampled_from([u'', u'.']), zone_name2=ts.dns_names(), suffix2=s.sampled_from([u'', u'.'])) def test_auto_zone(self, token, subdomain, extra, zone_name1, suffix1, zone_name2, suffix2): """ If the configured zone_name is ``None``, the zone will be guessed by finding the longest zone that is a suffix of the server name. """ zone_name3 = extra + u'.' + zone_name1 zone_name4 = extra + u'.' + zone_name2 server_name = u'{}.{}.{}'.format(subdomain, extra, zone_name1) assume( len({server_name, zone_name1, zone_name2, zone_name3, zone_name4}) == 5) challenge = self._challenge_factory(token=token) response = challenge.response(RSA_KEY_512) responder = self._responder_factory(zone_name=None) zone1 = responder._driver.create_zone(zone_name1 + suffix1) zone2 = responder._driver.create_zone(zone_name2 + suffix2) zone3 = responder._driver.create_zone(zone_name3 + suffix1) zone4 = responder._driver.create_zone(zone_name4 + suffix2) self.assertThat(zone1.list_records(), HasLength(0)) self.assertThat(zone2.list_records(), HasLength(0)) self.assertThat(zone3.list_records(), HasLength(0)) self.assertThat(zone4.list_records(), HasLength(0)) d = responder.start_responding(server_name, challenge, response) self._perform() self.assertThat(d, succeeded(Always())) self.assertThat(zone1.list_records(), HasLength(0)) self.assertThat(zone2.list_records(), HasLength(0)) self.assertThat( zone3.list_records(), MatchesListwise([ MatchesStructure( name=AfterPreprocessing( methodcaller('rstrip', u'.'), EndsWith(u'.' + subdomain)), type=Equals('TXT'), )])) self.assertThat(zone4.list_records(), HasLength(0)) @example(token=EXAMPLE_TOKEN, subdomain=u'acme-testing', zone_name1=u'example.com', zone_name2=u'example.org') @given(token=s.binary(min_size=32, max_size=32).map(b64encode), subdomain=ts.dns_names(), zone_name1=ts.dns_names(), zone_name2=ts.dns_names()) def test_auto_zone_missing(self, token, subdomain, zone_name1, zone_name2): """ If the configured zone_name is ``None``, and no matching zone is found, ``NotInZone`` is raised. """ server_name = u'{}.{}'.format(subdomain, zone_name1) assume(not server_name.endswith(zone_name2)) challenge = self._challenge_factory(token=token) response = challenge.response(RSA_KEY_512) responder = self._responder_factory(zone_name=None) zone = responder._driver.create_zone(zone_name2) self.assertThat(zone.list_records(), HasLength(0)) d = maybeDeferred( responder.start_responding, server_name, challenge, response) self._perform() self.assertThat( d, failed_with(MatchesAll( IsInstance(NotInZone), MatchesStructure( server_name=EndsWith(u'.' + server_name), zone_name=Is(None))))) __all__ = [ 'HTTPResponderTests', 'TLSResponderTests', 'MergingProxyTests', 'LibcloudResponderTests'] txacme-0.9.3/src/txacme/test/test_client.py000066400000000000000000002044511364626325400207430ustar00rootroot00000000000000import json from contextlib import contextmanager from operator import attrgetter, methodcaller import attr from josepy.jwa import RS256, RS384 from josepy.jwk import JWKRSA from josepy.jws import JWS from josepy.b64 import b64encode, b64decode from acme import challenges, errors, messages from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import serialization from cryptography.hazmat.primitives.asymmetric import rsa from fixtures import Fixture from hypothesis import strategies as s from hypothesis import assume, example, given, settings from testtools import ExpectedException, TestCase from testtools.matchers import ( AfterPreprocessing, Always, ContainsDict, Equals, Is, IsInstance, MatchesAll, MatchesListwise, MatchesPredicate, MatchesStructure, Mismatch, Never, Not, StartsWith) from testtools.twistedsupport import failed, succeeded from treq.client import HTTPClient from treq.testing import RequestSequence as treq_RequestSequence from treq.testing import ( _SynchronousProducer, RequestTraversalAgent, StringStubbingResource) from twisted.internet import reactor from twisted.internet.defer import CancelledError, fail, succeed from twisted.internet.task import Clock from twisted.python.compat import _PY3 from twisted.python.url import URL from twisted.test.proto_helpers import MemoryReactor from twisted.web import http from twisted.web.http_headers import Headers from zope.interface import implementer from txacme.client import ( _default_client, _find_supported_challenge, _parse_header_links, answer_challenge, AuthorizationFailed, Client, DER_CONTENT_TYPE, fqdn_identifier, JSON_CONTENT_TYPE, JSON_ERROR_CONTENT_TYPE, JWSClient, NoSupportedChallenges, poll_until_valid, ServerError) from txacme.interfaces import IResponder from txacme.messages import CertificateRequest from txacme.test import strategies as ts from txacme.testing import NullResponder from txacme.util import ( csr_for_names, generate_private_key, generate_tls_sni_01_cert) def failed_with(matcher): return failed(AfterPreprocessing(attrgetter('value'), matcher)) # from cryptography: RSA_KEY_512_RAW = rsa.RSAPrivateNumbers( p=int( "d57846898d5c0de249c08467586cb458fa9bc417cdf297f73cfc52281b787cd9", 16 ), q=int( "d10f71229e87e010eb363db6a85fd07df72d985b73c42786191f2ce9134afb2d", 16 ), d=int( "272869352cacf9c866c4e107acc95d4c608ca91460a93d28588d51cfccc07f449" "18bbe7660f9f16adc2b4ed36ca310ef3d63b79bd447456e3505736a45a6ed21", 16 ), dmp1=int( "addff2ec7564c6b64bc670d250b6f24b0b8db6b2810099813b7e7658cecf5c39", 16 ), dmq1=int( "463ae9c6b77aedcac1397781e50e4afc060d4b216dc2778494ebe42a6850c81", 16 ), iqmp=int( "54deef8548f65cad1d411527a32dcb8e712d3e128e4e0ff118663fae82a758f4", 16 ), public_numbers=rsa.RSAPublicNumbers( e=65537, n=int( "ae5411f963c50e3267fafcf76381c8b1e5f7b741fdb2a544bcf48bd607b10c991" "90caeb8011dc22cf83d921da55ec32bd05cac3ee02ca5e1dbef93952850b525", 16 ), ) ).private_key(default_backend()) RSA_KEY_512 = JWKRSA(key=RSA_KEY_512_RAW) class Nearly(object): """Within a certain threshold.""" def __init__(self, expected, epsilon=0.001): self.expected = expected self.epsilon = epsilon def __str__(self): return 'Nearly(%r, %r)' % (self.expected, self.epsilon) def match(self, value): if abs(value - self.expected) > self.epsilon: return Mismatch( u'%r more than %r from %r' % ( value, self.epsilon, self.expected)) class ClientFixture(Fixture): """ Create a :class:`~txacme.client.Client` for testing. """ def __init__(self, sequence, key=None, alg=RS256): super(ClientFixture, self).__init__() self._sequence = sequence self._directory = messages.Directory({ messages.NewRegistration: u'https://example.org/acme/new-reg', messages.Revocation: u'https://example.org/acme/revoke-cert', messages.NewAuthorization: u'https://example.org/acme/new-authz', messages.CertificateRequest: u'https://example.org/acme/new-cert', }) if key is None: key = JWKRSA(key=generate_private_key('rsa')) self._key = key self._alg = alg def _setUp(self): # noqa treq_client = HTTPClient( agent=RequestTraversalAgent( StringStubbingResource(self._sequence)), data_to_body_producer=_SynchronousProducer) self.clock = Clock() self.client = Client( self._directory, self.clock, self._key, jws_client=JWSClient(treq_client, self._key, self._alg)) def _nonce_response(url, nonce): """ Construct an expected request for an initial nonce check. :param bytes url: The url being requested. :param bytes nonce: The nonce to return. :return: A request/response tuple suitable for use with :class:`~treq.testing.RequestSequence`. """ return ( MatchesListwise([ Equals(b'HEAD'), Equals(url), Equals({}), ContainsDict({b'User-Agent': MatchesListwise([StartsWith(b'txacme/')])}), Equals(b'')]), (http.NOT_ALLOWED, {b'content-type': JSON_CONTENT_TYPE, b'replay-nonce': b64encode(nonce)}, b'{}')) def _json_dumps(j): s = json.dumps(j) if _PY3: s = s.encode('utf-8') return s class RequestSequence(treq_RequestSequence): @contextmanager def consume(self, sync_failure_reporter): yield if not self.consumed(): sync_failure_reporter("\n".join( ["Not all expected requests were made. Still expecting:"] + ["- {0!r})".format(e) for e, _ in self._sequence])) def __call__(self, method, url, params, headers, data): """ :return: the next response in the sequence, provided that the parameters match the next in the sequence. """ req = (method, url, params, headers, data) if len(self._sequence) == 0: self._async_reporter( None, Never(), "No more requests expected, but request {0!r} made.".format( req)) return (500, {}, "StubbingError") matcher, response = self._sequence[0] self._async_reporter(req, matcher) self._sequence = self._sequence[1:] return response def on_json(matcher): def _loads(s): assert isinstance(s, bytes) s = s.decode('utf-8') return json.loads(s) return AfterPreprocessing(_loads, matcher) def on_jws(matcher, nonce=None): nonce_matcher = Always() if nonce is not None: def extract_nonce(j): protected = json.loads(j.signatures[0].protected) return b64decode(protected[u'nonce']) nonce_matcher = AfterPreprocessing(extract_nonce, Equals(nonce)) return on_json( AfterPreprocessing( JWS.from_json, MatchesAll( MatchesPredicate( methodcaller('verify'), '%r does not verify'), AfterPreprocessing( attrgetter('payload'), on_json(matcher)), nonce_matcher))) @attr.s class TestResponse(object): """ Test response implementation for various bad response cases. """ code = attr.ib(default=http.OK) content_type = attr.ib(default=JSON_CONTENT_TYPE) nonce = attr.ib(default=None) json = attr.ib(default=lambda: succeed({})) links = attr.ib(default=None) @property def headers(self): h = Headers({b'content-type': [self.content_type]}) if self.nonce is not None: h.setRawHeaders(b'replay-nonce', [self.nonce]) if self.links is not None: h.setRawHeaders(b'link', self.links) return h @implementer(IResponder) @attr.s class RecordingResponder(object): challenges = attr.ib() challenge_type = attr.ib() def start_responding(self, server_name, challenge, response): self.challenges.add(challenge) def stop_responding(self, server_name, challenge, response): self.challenges.discard(challenge) class ClientTests(TestCase): """ :class:`.Client` provides a client interface for the ACME API. """ def test_directory_url_type(self): """ `~txacme.client.Client.from_url` expects a ``twisted.python.url.URL`` instance for the ``url`` argument. """ with ExpectedException(TypeError): Client.from_url( reactor, '/wrong/kind/of/directory', key=RSA_KEY_512) def test_register_missing_next(self): """ If the directory does not return a ``"next"`` link, a :exc:`~acme.errors.ClientError` failure occurs. """ sequence = RequestSequence( [_nonce_response( u'https://example.org/acme/new-reg', b'Nonce'), (MatchesListwise([ Equals(b'POST'), Equals(u'https://example.org/acme/new-reg'), Equals({}), Always(), Always()]), (http.CREATED, {b'content-type': JSON_CONTENT_TYPE, b'replay-nonce': b64encode(b'Nonce2')}, b'{}'))], self.expectThat) client = self.useFixture(ClientFixture(sequence)).client with sequence.consume(self.fail): d = client.register() self.expectThat( d, failed_with(MatchesAll( IsInstance(errors.ClientError), AfterPreprocessing(str, Equals('"next" link missing'))))) def test_unexpected_update(self): """ If the server does not return the registration we expected, an :exc:`~acme.errors.UnexpectedUpdate` failure occurs. """ update = ( MatchesListwise([ Equals(b'POST'), Equals(u'https://example.org/acme/new-reg'), Equals({}), ContainsDict({b'Content-Type': Equals([JSON_CONTENT_TYPE])}), Always()]), (http.CREATED, {b'content-type': JSON_CONTENT_TYPE, b'replay-nonce': b64encode(b'Nonce2'), b'location': b'https://example.org/acme/reg/1', b'link': b','.join([ b';rel="next"', b';rel="recover"', b';rel="terms-of-service"', ])}, _json_dumps({ u'key': { u'n': u'alQR-WPFDjJn-vz3Y4HIseX3t0H9sqVEvPSL1gexDJkZDK6' u'4AR3CLPg9kh2lXsMr0FysPuAspeHb75OVKFC1JQ', u'e': u'AQAB', u'kty': u'RSA'}, u'contact': [u'mailto:example@example.com'], }))) sequence = RequestSequence( [_nonce_response( u'https://example.org/acme/new-reg', b'Nonce'), update, update], self.expectThat) client = self.useFixture( ClientFixture(sequence, key=RSA_KEY_512)).client reg = messages.NewRegistration.from_data(email=u'example@example.com') reg2 = messages.NewRegistration.from_data(email=u'foo@example.com') with sequence.consume(self.fail): self.assertThat( client.register(reg), failed_with(IsInstance(errors.UnexpectedUpdate))) self.assertThat( client.register(reg2), failed_with(IsInstance(errors.UnexpectedUpdate))) def test_register(self): """ If the registration succeeds, the new registration is returned. """ sequence = RequestSequence( [_nonce_response( u'https://example.org/acme/new-reg', b'Nonce'), (MatchesListwise([ Equals(b'POST'), Equals(u'https://example.org/acme/new-reg'), Equals({}), ContainsDict({b'Content-Type': Equals([JSON_CONTENT_TYPE])}), on_jws(Equals({ u'resource': u'new-reg', u'contact': [u'mailto:example@example.com']}))]), (http.CREATED, {b'content-type': JSON_CONTENT_TYPE, b'replay-nonce': b64encode(b'Nonce2'), b'location': b'https://example.org/acme/reg/1', b'link': b','.join([ b';rel="next"', b';rel="recover"', b';rel="terms-of-service"', ])}, _json_dumps({ u'key': { u'n': u'rlQR-WPFDjJn-vz3Y4HIseX3t0H9sqVEvPSL1gexDJkZDK6' u'4AR3CLPg9kh2lXsMr0FysPuAspeHb75OVKFC1JQ', u'e': u'AQAB', u'kty': u'RSA'}, u'contact': [u'mailto:example@example.com'], })))], self.expectThat) client = self.useFixture( ClientFixture(sequence, key=RSA_KEY_512)).client reg = messages.NewRegistration.from_data(email=u'example@example.com') with sequence.consume(self.fail): d = client.register(reg) self.assertThat( d, succeeded(MatchesStructure( body=MatchesStructure( key=Equals(RSA_KEY_512.public_key()), contact=Equals(reg.contact)), uri=Equals(u'https://example.org/acme/reg/1'), new_authzr_uri=Equals( u'https://example.org/acme/new-authz'), terms_of_service=Equals(u'https://example.org/acme/terms'), ))) def test_register_existing(self): """ If registration fails due to our key already being registered, the existing registration is returned. """ sequence = RequestSequence( [_nonce_response( u'https://example.org/acme/new-reg', b'Nonce'), (MatchesListwise([ Equals(b'POST'), Equals(u'https://example.org/acme/new-reg'), Equals({}), Always(), on_jws(Equals({ u'resource': u'new-reg', u'contact': [u'mailto:example@example.com']}))]), (http.CONFLICT, {b'content-type': JSON_ERROR_CONTENT_TYPE, b'replay-nonce': b64encode(b'Nonce2'), b'location': b'https://example.org/acme/reg/1', }, _json_dumps( {u'status': http.CONFLICT, u'type': u'urn:acme:error:malformed', u'detail': u'Registration key is already in use'} ))), (MatchesListwise([ Equals(b'POST'), Equals(u'https://example.org/acme/reg/1'), Equals({}), Always(), on_jws(Equals({ u'resource': u'reg', u'contact': [u'mailto:example@example.com']}))]), (http.ACCEPTED, {b'content-type': JSON_CONTENT_TYPE, b'replay-nonce': b64encode(b'Nonce3'), b'link': b','.join([ b';rel="next"', b';rel="recover"', b';rel="terms-of-service"', ])}, _json_dumps({ u'key': { u'n': u'rlQR-WPFDjJn-vz3Y4HIseX3t0H9sqVEvPSL1gexDJkZDK6' u'4AR3CLPg9kh2lXsMr0FysPuAspeHb75OVKFC1JQ', u'e': u'AQAB', u'kty': u'RSA'}, u'contact': [u'mailto:example@example.com'], u'agreement': u'https://example.org/acme/terms', })))], self.expectThat) client = self.useFixture( ClientFixture(sequence, key=RSA_KEY_512)).client reg = messages.NewRegistration.from_data(email=u'example@example.com') with sequence.consume(self.fail): d = client.register(reg) self.assertThat( d, succeeded(MatchesStructure( body=MatchesStructure( key=Equals(RSA_KEY_512.public_key()), contact=Equals(reg.contact)), uri=Equals(u'https://example.org/acme/reg/1'), new_authzr_uri=Equals( u'https://example.org/acme/new-authz'), terms_of_service=Equals(u'https://example.org/acme/terms'), ))) def test_register_existing_update(self): """ If registration fails due to our key already being registered, the existing registration is updated. """ sequence = RequestSequence( [_nonce_response( u'https://example.org/acme/new-reg', b'Nonce'), (MatchesListwise([ Equals(b'POST'), Equals(u'https://example.org/acme/new-reg'), Equals({}), Always(), on_jws(Equals({ u'resource': u'new-reg', u'contact': [u'mailto:example2@example.com']}))]), (http.CONFLICT, {b'content-type': JSON_ERROR_CONTENT_TYPE, b'replay-nonce': b64encode(b'Nonce2'), b'location': b'https://example.org/acme/reg/1', }, _json_dumps( {u'status': http.CONFLICT, u'type': u'urn:acme:error:malformed', u'detail': u'Registration key is already in use'} ))), (MatchesListwise([ Equals(b'POST'), Equals(u'https://example.org/acme/reg/1'), Equals({}), Always(), on_jws(Equals({ u'resource': u'reg', u'contact': [u'mailto:example2@example.com']}))]), (http.ACCEPTED, {b'content-type': JSON_CONTENT_TYPE, b'replay-nonce': b64encode(b'Nonce3'), b'link': b','.join([ b';rel="next"', b';rel="recover"', b';rel="terms-of-service"', ])}, _json_dumps({ u'key': { u'n': u'rlQR-WPFDjJn-vz3Y4HIseX3t0H9sqVEvPSL1gexDJkZDK6' u'4AR3CLPg9kh2lXsMr0FysPuAspeHb75OVKFC1JQ', u'e': u'AQAB', u'kty': u'RSA'}, u'contact': [u'mailto:example2@example.com'], u'agreement': u'https://example.org/acme/terms', })))], self.expectThat) client = self.useFixture( ClientFixture(sequence, key=RSA_KEY_512)).client reg = messages.NewRegistration.from_data(email=u'example2@example.com') with sequence.consume(self.fail): d = client.register(reg) self.assertThat( d, succeeded(MatchesStructure( body=MatchesStructure( key=Equals(RSA_KEY_512.public_key()), contact=Equals(reg.contact)), uri=Equals(u'https://example.org/acme/reg/1'), new_authzr_uri=Equals( u'https://example.org/acme/new-authz'), terms_of_service=Equals(u'https://example.org/acme/terms'), ))) def test_register_error(self): """ If some other error occurs during registration, a :exc:`txacme.client.ServerError` results. """ sequence = RequestSequence( [_nonce_response( u'https://example.org/acme/new-reg', b'Nonce'), (MatchesListwise([ Equals(b'POST'), Equals(u'https://example.org/acme/new-reg'), Equals({}), Always(), on_jws(Equals({ u'resource': u'new-reg', u'contact': [u'mailto:example@example.com']}))]), (http.SERVICE_UNAVAILABLE, {b'content-type': JSON_ERROR_CONTENT_TYPE, b'replay-nonce': b64encode(b'Nonce2'), }, _json_dumps( {u'status': http.SERVICE_UNAVAILABLE, u'type': u'urn:acme:error:rateLimited', u'detail': u'The request exceeds a rate limit'} )))], self.expectThat) client = self.useFixture( ClientFixture(sequence, key=RSA_KEY_512)).client reg = messages.NewRegistration.from_data(email=u'example@example.com') with sequence.consume(self.fail): d = client.register(reg) self.assertThat( d, failed_with(MatchesAll( IsInstance(ServerError), MatchesStructure( message=MatchesStructure( typ=Equals(u'urn:acme:error:rateLimited'), detail=Equals(u'The request exceeds a rate limit'), ))))) def test_register_bad_nonce_once(self): """ If a badNonce error is received, we clear all old nonces and retry the request once. """ sequence = RequestSequence( [(MatchesListwise([ Equals(b'POST'), Equals(u'https://example.org/acme/new-reg'), Equals({}), Always(), on_jws(Equals({ u'resource': u'new-reg', u'contact': [u'mailto:example@example.com']}))]), (http.SERVICE_UNAVAILABLE, {b'content-type': JSON_ERROR_CONTENT_TYPE, b'replay-nonce': b64encode(b'Nonce2'), }, _json_dumps( {u'status': http.SERVICE_UNAVAILABLE, u'type': u'urn:acme:error:badNonce', u'detail': u'The client sent a bad nonce'} ))), (MatchesListwise([ Equals(b'POST'), Equals(u'https://example.org/acme/new-reg'), Equals({}), ContainsDict({b'Content-Type': Equals([JSON_CONTENT_TYPE])}), on_jws(Equals({ u'resource': u'new-reg', u'contact': [u'mailto:example@example.com'], }), nonce=b'Nonce2')]), (http.CREATED, {b'content-type': JSON_CONTENT_TYPE, b'replay-nonce': b64encode(b'Nonce3'), b'location': b'https://example.org/acme/reg/1', b'link': b','.join([ b';rel="next"', b';rel="recover"', b';rel="terms-of-service"', ])}, _json_dumps({ u'key': { u'n': u'rlQR-WPFDjJn-vz3Y4HIseX3t0H9sqVEvPSL1gexDJkZDK6' u'4AR3CLPg9kh2lXsMr0FysPuAspeHb75OVKFC1JQ', u'e': u'AQAB', u'kty': u'RSA'}, u'contact': [u'mailto:example@example.com'], })))], self.expectThat) client = self.useFixture( ClientFixture(sequence, key=RSA_KEY_512)).client # Stash a few nonces so that we have some to clear on the retry. client._client._nonces.update( [b'OldNonce1', b'OldNonce2', b'OldNonce3', b'OldNonce4']) reg = messages.NewRegistration.from_data(email=u'example@example.com') with sequence.consume(self.fail): d = client.register(reg) self.assertThat( d, succeeded(MatchesStructure( body=MatchesStructure( key=Equals(RSA_KEY_512.public_key()), contact=Equals(reg.contact)), uri=Equals(u'https://example.org/acme/reg/1'), new_authzr_uri=Equals( u'https://example.org/acme/new-authz'), terms_of_service=Equals(u'https://example.org/acme/terms'), ))) self.assertThat(client._client._nonces, Equals(set([b'Nonce3']))) def test_register_bad_nonce_twice(self): """ If a badNonce error is received on a retry, fail the request. """ sequence = RequestSequence( [_nonce_response( u'https://example.org/acme/new-reg', b'Nonce'), (MatchesListwise([ Equals(b'POST'), Equals(u'https://example.org/acme/new-reg'), Equals({}), Always(), on_jws(Equals({ u'resource': u'new-reg', u'contact': [u'mailto:example@example.com']}))]), (http.SERVICE_UNAVAILABLE, {b'content-type': JSON_ERROR_CONTENT_TYPE, b'replay-nonce': b64encode(b'Nonce2'), }, _json_dumps( {u'status': http.SERVICE_UNAVAILABLE, u'type': u'urn:acme:error:badNonce', u'detail': u'The client sent a bad nonce'} ))), (MatchesListwise([ Equals(b'POST'), Equals(u'https://example.org/acme/new-reg'), Equals({}), Always(), on_jws(Equals({ u'resource': u'new-reg', u'contact': [u'mailto:example@example.com']}))]), (http.SERVICE_UNAVAILABLE, {b'content-type': JSON_ERROR_CONTENT_TYPE, b'replay-nonce': b64encode(b'Nonce3'), }, _json_dumps( {u'status': http.SERVICE_UNAVAILABLE, u'type': u'urn:acme:error:badNonce', u'detail': u'The client sent a bad nonce'} )))], self.expectThat) client = self.useFixture( ClientFixture(sequence, key=RSA_KEY_512)).client reg = messages.NewRegistration.from_data(email=u'example@example.com') with sequence.consume(self.fail): d = client.register(reg) self.assertThat( d, failed_with(MatchesAll( IsInstance(ServerError), MatchesStructure( message=MatchesStructure( typ=Equals(u'urn:acme:error:badNonce'), detail=Equals(u'The client sent a bad nonce'), ))))) def test_agree_to_tos(self): """ Agreeing to the TOS returns a registration with the agreement updated. """ tos = u'https://example.org/acme/terms' sequence = RequestSequence( [_nonce_response( u'https://example.org/acme/reg/1', b'Nonce'), (MatchesListwise([ Equals(b'POST'), Equals(u'https://example.org/acme/reg/1'), Equals({}), ContainsDict({b'Content-Type': Equals([JSON_CONTENT_TYPE])}), on_jws(ContainsDict({ u'resource': Equals(u'reg'), u'agreement': Equals(tos)}))]), (http.ACCEPTED, {b'content-type': JSON_CONTENT_TYPE, b'replay-nonce': b64encode(b'Nonce2'), b'link': b','.join([ b';rel="next"', b';rel="recover"', b';rel="terms-of-service"', ])}, _json_dumps({ u'key': { u'n': u'rlQR-WPFDjJn-vz3Y4HIseX3t0H9sqVEvPSL1gexDJkZDK6' u'4AR3CLPg9kh2lXsMr0FysPuAspeHb75OVKFC1JQ', u'e': u'AQAB', u'kty': u'RSA'}, u'contact': [u'mailto:example@example.com'], u'agreement': tos, })))], self.expectThat) client = self.useFixture( ClientFixture(sequence, key=RSA_KEY_512)).client reg = messages.RegistrationResource( body=messages.Registration( contact=(u'mailto:example@example.com',), key=RSA_KEY_512.public_key()), uri=u'https://example.org/acme/reg/1', new_authzr_uri=u'https://example.org/acme/new-authz', terms_of_service=tos) with sequence.consume(self.fail): d = client.agree_to_tos(reg) self.assertThat( d, succeeded(MatchesStructure( body=MatchesStructure( key=Equals(RSA_KEY_512.public_key()), contact=Equals(reg.body.contact), agreement=Equals(tos)), uri=Equals(u'https://example.org/acme/reg/1'), new_authzr_uri=Equals( u'https://example.org/acme/new-authz'), terms_of_service=Equals(tos), ))) def test_from_directory(self): """ :func:`~txacme.client.Client.from_url` constructs a client with a directory retrieved from the given URL. """ new_reg = u'https://example.org/acme/new-reg' sequence = RequestSequence( [(MatchesListwise([ Equals(b'GET'), Equals(u'https://example.org/acme/'), Always(), Always(), Always()]), (http.OK, {b'content-type': JSON_CONTENT_TYPE, b'replay-nonce': b64encode(b'Nonce')}, _json_dumps({ u'new-reg': new_reg, u'revoke-cert': u'https://example.org/acme/revoke-cert', u'new-authz': u'https://example.org/acme/new-authz', })))], self.expectThat) treq_client = HTTPClient( agent=RequestTraversalAgent( StringStubbingResource(sequence)), data_to_body_producer=_SynchronousProducer) with sequence.consume(self.fail): d = Client.from_url( reactor, URL.fromText(u'https://example.org/acme/'), key=RSA_KEY_512, alg=RS256, jws_client=JWSClient( treq_client, key=RSA_KEY_512, alg=RS256)) self.assertThat( d, succeeded( MatchesAll( AfterPreprocessing( lambda client: client.directory[messages.NewRegistration()], Equals(new_reg))))) def test_default_client(self): """ ``~txacme.client._default_client`` constructs a client if one was not provided. """ reactor = MemoryReactor() client = _default_client(None, reactor, RSA_KEY_512, RS384) self.assertThat(client, IsInstance(JWSClient)) # We should probably assert some stuff about the treq.HTTPClient, but # it's hard without doing awful mock stuff. def test_request_challenges(self): """ :meth:`~txacme.client.Client.request_challenges` creates a new authorization, and returns the authorization resource with a list of possible challenges to proceed with. """ name = u'example.com' identifier_json = {u'type': u'dns', u'value': name} identifier = messages.Identifier.from_json(identifier_json) challenges = [ {u'type': u'http-01', u'uri': u'https://example.org/acme/authz/1/0', u'token': u'IlirfxKKXAsHtmzK29Pj8A'}, {u'type': u'dns', u'uri': u'https://example.org/acme/authz/1/1', u'token': u'DGyRejmCefe7v4NfDGDKfA'}, ] sequence = RequestSequence( [_nonce_response( u'https://example.org/acme/new-authz', b'Nonce'), (MatchesListwise([ Equals(b'POST'), Equals(u'https://example.org/acme/new-authz'), Equals({}), ContainsDict({b'Content-Type': Equals([JSON_CONTENT_TYPE])}), on_jws(Equals({ u'resource': u'new-authz', u'identifier': identifier_json, }))]), (http.CREATED, {b'content-type': JSON_CONTENT_TYPE, b'replay-nonce': b64encode(b'Nonce2'), b'location': b'https://example.org/acme/authz/1', b'link': b';rel="next"', }, _json_dumps({ u'status': u'pending', u'identifier': identifier_json, u'challenges': challenges, u'combinations': [[0], [1]], })))], self.expectThat) client = self.useFixture( ClientFixture(sequence, key=RSA_KEY_512)).client with sequence.consume(self.fail): self.assertThat( client.request_challenges(identifier), succeeded(MatchesStructure( body=MatchesStructure( identifier=Equals(identifier), challenges=Equals( tuple(map( messages.ChallengeBody.from_json, challenges))), combinations=Equals(((0,), (1,))), status=Equals(messages.STATUS_PENDING)), new_cert_uri=Equals( u'https://example.org/acme/new-cert'), ))) @example(http.CREATED, http.FOUND) @given(s.sampled_from(sorted(http.RESPONSES)), s.sampled_from(sorted(http.RESPONSES))) def test_expect_response_wrong_code(self, expected, actual): """ ``_expect_response`` raises `~acme.errors.ClientError` if the response code does not match the expected code. """ assume(expected != actual) response = TestResponse(code=actual) with ExpectedException(errors.ClientError): Client._expect_response(response, expected) def test_authorization_missing_link(self): """ ``_parse_authorization`` raises `~acme.errors.ClientError` if the ``"next"`` link is missing. """ response = TestResponse() with ExpectedException(errors.ClientError, '"next" link missing'): Client._parse_authorization(response) def test_authorization_unexpected_identifier(self): """ ``_check_authorization`` raises `~acme.errors.UnexpectedUpdate` if the return identifier doesn't match. """ with ExpectedException(errors.UnexpectedUpdate): Client._check_authorization( messages.AuthorizationResource( body=messages.Authorization()), messages.Identifier( typ=messages.IDENTIFIER_FQDN, value=u'example.org')) @example(u'example.com') @given(ts.dns_names()) def test_fqdn_identifier(self, name): """ `~txacme.client.fqdn_identifier` constructs an `~acme.messages.Identifier` of the right type. """ self.assertThat( fqdn_identifier(name), MatchesStructure( typ=Equals(messages.IDENTIFIER_FQDN), value=Equals(name))) def test_answer_challenge(self): """ `~txacme.client.Client.answer_challenge` responds to a challenge and returns the updated challenge. """ key_authorization = u'blahblahblah' uri = u'https://example.org/acme/authz/1/0' sequence = RequestSequence( [_nonce_response( u'https://example.org/acme/authz/1/0', b'Nonce'), (MatchesListwise([ Equals(b'POST'), Equals(uri), Equals({}), ContainsDict({b'Content-Type': Equals([JSON_CONTENT_TYPE])}), on_jws(Equals({ u'resource': u'challenge', u'type': u'http-01', }))]), (http.OK, {b'content-type': JSON_CONTENT_TYPE, b'replay-nonce': b64encode(b'Nonce2'), b'link': b';rel="up"', }, _json_dumps({ u'uri': uri, u'type': u'http-01', u'status': u'processing', u'token': u'DGyRejmCefe7v4NfDGDKfA', })))], self.expectThat) client = self.useFixture( ClientFixture(sequence, key=RSA_KEY_512)).client with sequence.consume(self.fail): self.assertThat( client.answer_challenge( messages.ChallengeBody( uri=uri, chall=challenges.HTTP01(token=b'blahblah'), status=messages.STATUS_PENDING), challenges.HTTP01Response( key_authorization=key_authorization)), succeeded(MatchesStructure( body=MatchesStructure(), authzr_uri=Equals( u'https://example.org/acme/authz/1'), ))) def test_challenge_missing_link(self): """ ``_parse_challenge`` raises `~acme.errors.ClientError` if the ``"up"`` link is missing. """ response = TestResponse() with ExpectedException(errors.ClientError, '"up" link missing'): Client._parse_challenge(response) @example(URL.fromText(u'https://example.org/'), URL.fromText(u'https://example.com/')) @given(ts.urls(), ts.urls()) def test_challenge_unexpected_uri(self, url1, url2): """ ``_check_challenge`` raises `~acme.errors.UnexpectedUpdate` if the challenge does not have the expected URI. """ url1 = url1.asURI().asText() url2 = url2.asURI().asText() assume(url1 != url2) with ExpectedException(errors.UnexpectedUpdate): Client._check_challenge( messages.ChallengeResource( body=messages.ChallengeBody(chall=None, uri=url1)), messages.ChallengeBody(chall=None, uri=url2)) @example(now=1459184402., name=u'example.com', retry_after=60, date_string=False) @example(now=1459184402., name=u'example.org', retry_after=60, date_string=True) @given(now=s.floats(min_value=0., max_value=2147483648.), name=ts.dns_names(), retry_after=s.none() | s.integers(min_value=0, max_value=1000), date_string=s.booleans()) def test_poll(self, now, name, retry_after, date_string): """ `~txacme.client.Client.poll` retrieves the latest state of an authorization resource, as well as the minimum time to wait before polling the state again. """ if retry_after is None: retry_after_encoded = None retry_after = 5 elif date_string: retry_after_encoded = http.datetimeToString(retry_after + now) else: retry_after_encoded = u'{}'.format(retry_after).encode('ascii') identifier_json = {u'type': u'dns', u'value': name} identifier = messages.Identifier.from_json(identifier_json) challenges = [ {u'type': u'http-01', u'status': u'invalid', u'uri': u'https://example.org/acme/authz/1/0', u'token': u'IlirfxKKXAsHtmzK29Pj8A'}, {u'type': u'dns', u'status': u'pending', u'uri': u'https://example.org/acme/authz/1/1', u'token': u'DGyRejmCefe7v4NfDGDKfA'}, ] authzr = messages.AuthorizationResource( uri=u'https://example.org/acme/authz/1', body=messages.Authorization( identifier=identifier)) response_headers = { b'content-type': JSON_CONTENT_TYPE, b'replay-nonce': b64encode(b'Nonce2'), b'location': b'https://example.org/acme/authz/1', b'link': b';rel="next"', } if retry_after_encoded is not None: response_headers[b'retry-after'] = retry_after_encoded sequence = RequestSequence( [(MatchesListwise([ Equals(b'GET'), Equals(u'https://example.org/acme/authz/1'), Equals({}), Always(), Always()]), (http.OK, response_headers, _json_dumps({ u'status': u'invalid', u'identifier': identifier_json, u'challenges': challenges, u'combinations': [[0], [1]], })))], self.expectThat) fixture = self.useFixture( ClientFixture(sequence, key=RSA_KEY_512)) fixture.clock.rightNow = now client = fixture.client with sequence.consume(self.fail): self.assertThat( client.poll(authzr), succeeded(MatchesListwise([ MatchesStructure( body=MatchesStructure( identifier=Equals(identifier), challenges=Equals( tuple(map( messages.ChallengeBody.from_json, challenges))), combinations=Equals(((0,), (1,))), status=Equals(messages.STATUS_INVALID)), new_cert_uri=Equals( u'https://example.org/acme/new-cert')), Nearly(retry_after, 1.0), ]))) def test_tls_sni_01_no_singleton(self): """ If a suitable singleton challenge is not found, `.NoSupportedChallenges` is raised. """ challs = [ {u'type': u'http-01', u'uri': u'https://example.org/acme/authz/1/0', u'token': u'IlirfxKKXAsHtmzK29Pj8A'}, {u'type': u'dns', u'uri': u'https://example.org/acme/authz/1/1', u'token': u'DGyRejmCefe7v4NfDGDKfA'}, {u'type': u'tls-sni-01', u'uri': u'https://example.org/acme/authz/1/2', u'token': u'f8IfXqddYr8IJqYHSH6NpA'}, ] combinations = ((0, 2), (1, 2)) authzr = messages.AuthorizationResource( body=messages.Authorization( challenges=list(map( messages.ChallengeBody.from_json, challs)), combinations=combinations)) with ExpectedException(NoSupportedChallenges): _find_supported_challenge( authzr, [NullResponder(challenges.TLSSNI01.typ)]) def test_no_tls_sni_01(self): """ If no tls-sni-01 challenges are available, `.NoSupportedChallenges` is raised. """ challs = [ {u'type': u'http-01', u'uri': u'https://example.org/acme/authz/1/0', u'token': u'IlirfxKKXAsHtmzK29Pj8A'}, {u'type': u'dns', u'uri': u'https://example.org/acme/authz/1/1', u'token': u'DGyRejmCefe7v4NfDGDKfA'}, {u'type': u'tls-sni-01', u'uri': u'https://example.org/acme/authz/1/2', u'token': u'f8IfXqddYr8IJqYHSH6NpA'}, ] combinations = ((0,), (1,)) authzr = messages.AuthorizationResource( body=messages.Authorization( challenges=list(map( messages.ChallengeBody.from_json, challs)), combinations=combinations)) with ExpectedException(NoSupportedChallenges): _find_supported_challenge( authzr, [NullResponder(challenges.TLSSNI01.typ)]) def test_only_tls_sni_01(self): """ If a singleton tls-sni-01 challenge is available, it is returned. """ challs = list(map( messages.ChallengeBody.from_json, [{u'type': u'http-01', u'uri': u'https://example.org/acme/authz/1/0', u'token': u'IlirfxKKXAsHtmzK29Pj8A'}, {u'type': u'dns', u'uri': u'https://example.org/acme/authz/1/1', u'token': u'DGyRejmCefe7v4NfDGDKfA'}, {u'type': u'tls-sni-01', u'uri': u'https://example.org/acme/authz/1/2', u'token': u'f8IfXqddYr8IJqYHSH6NpA'}, ])) combinations = ((0,), (1,), (2,)) authzr = messages.AuthorizationResource( body=messages.Authorization( challenges=challs, combinations=combinations)) responder = NullResponder(challenges.TLSSNI01.typ) self.assertThat( _find_supported_challenge(authzr, [responder]), MatchesListwise([ Is(responder), MatchesAll( IsInstance(messages.ChallengeBody), MatchesStructure( chall=IsInstance(challenges.TLSSNI01)))])) def test_answer_challenge_function(self): """ The challenge is found in the responder after invoking `~txacme.client.answer_challenge`. """ recorded_challenges = set() responder = RecordingResponder(recorded_challenges, u'tls-sni-01') uri = u'https://example.org/acme/authz/1/1' challb = messages.ChallengeBody.from_json({ u'uri': uri, u'token': u'IlirfxKKXAsHtmzK29Pj8A', u'type': u'tls-sni-01', u'status': u'pending'}) identifier_json = {u'type': u'dns', u'value': u'example.com'} identifier = messages.Identifier.from_json(identifier_json) authzr = messages.AuthorizationResource( body=messages.Authorization( identifier=identifier, challenges=[challb], combinations=[[0]])) sequence = RequestSequence( [_nonce_response( u'https://example.org/acme/authz/1/1', b'Nonce'), (MatchesListwise([ Equals(b'POST'), Equals(uri), Equals({}), ContainsDict({b'Content-Type': Equals([JSON_CONTENT_TYPE])}), on_jws(Equals({ u'resource': u'challenge', u'type': u'tls-sni-01', }))]), (http.OK, {b'content-type': JSON_CONTENT_TYPE, b'replay-nonce': b64encode(b'Nonce2'), b'link': b';rel="up"', }, _json_dumps({ u'uri': uri, u'token': u'IlirfxKKXAsHtmzK29Pj8A', u'type': u'tls-sni-01', u'status': u'processing', })))], self.expectThat) client = self.useFixture( ClientFixture(sequence, key=RSA_KEY_512)).client with sequence.consume(self.fail): d = answer_challenge(authzr, client, [responder]) self.assertThat(d, succeeded(Always())) stop_responding = d.result self.assertThat( recorded_challenges, MatchesListwise([ IsInstance(challenges.TLSSNI01) ])) self.assertThat( stop_responding(), succeeded(Always())) self.assertThat(recorded_challenges, Equals(set())) def _make_poll_response(self, uri, identifier_json): """ Return a factory for a poll response. """ def rr(status, error=None): chall = { u'type': u'tls-sni-01', u'status': status, u'uri': uri + u'/0', u'token': u'IlirfxKKXAsHtmzK29Pj8A'} if error is not None: chall[u'error'] = error return ( MatchesListwise([ Equals(b'GET'), Equals(uri), Equals({}), Always(), Always()]), (http.ACCEPTED, {b'content-type': JSON_CONTENT_TYPE, b'replay-nonce': b64encode(b'nonce2'), b'location': uri.encode('ascii'), b'link': b';rel="next"'}, _json_dumps({ u'status': status, u'identifier': identifier_json, u'challenges': [chall], u'combinations': [[0]], }))) return rr @example(name=u'example.com') @given(name=ts.dns_names()) def test_poll_timeout(self, name): """ If the timeout is exceeded during polling, `.poll_until_valid` will fail with ``CancelledError``. """ identifier_json = {u'type': u'dns', u'value': name} uri = u'https://example.org/acme/authz/1' rr = self._make_poll_response(uri, identifier_json) sequence = RequestSequence( [rr(u'pending'), rr(u'pending'), rr(u'pending'), ], self.expectThat) clock = Clock() challb = messages.ChallengeBody.from_json({ u'uri': uri + u'/0', u'token': u'IlirfxKKXAsHtmzK29Pj8A', u'type': u'tls-sni-01', u'status': u'pending'}) authzr = messages.AuthorizationResource( uri=uri, body=messages.Authorization( identifier=messages.Identifier.from_json(identifier_json), challenges=[challb], combinations=[[0]])) client = self.useFixture( ClientFixture(sequence, key=RSA_KEY_512)).client with sequence.consume(self.fail): d = poll_until_valid(authzr, clock, client, timeout=14.) clock.pump([5, 5, 5]) self.assertThat( d, failed_with(IsInstance(CancelledError))) @example(name=u'example.com') @given(name=ts.dns_names()) def test_poll_invalid(self, name): """ If the authorization enters an invalid state while polling, `.poll_until_valid` will fail with `.AuthorizationFailed`. """ identifier_json = {u'type': u'dns', u'value': name} uri = u'https://example.org/acme/authz/1' rr = self._make_poll_response(uri, identifier_json) sequence = RequestSequence( [rr(u'pending'), rr(u'invalid', { u'type': u'urn:acme:error:connection', u'detail': u'Failed to connect'}), ], self.expectThat) clock = Clock() challb = messages.ChallengeBody.from_json({ u'uri': uri + u'/0', u'token': u'IlirfxKKXAsHtmzK29Pj8A', u'type': u'tls-sni-01', u'status': u'pending', }) authzr = messages.AuthorizationResource( uri=uri, body=messages.Authorization( identifier=messages.Identifier.from_json(identifier_json), challenges=[challb], combinations=[[0]])) client = self.useFixture( ClientFixture(sequence, key=RSA_KEY_512)).client with sequence.consume(self.fail): d = poll_until_valid(authzr, clock, client, timeout=14.) clock.pump([5, 5]) self.assertThat( d, failed_with(MatchesAll( IsInstance(AuthorizationFailed), MatchesStructure( status=Equals(messages.STATUS_INVALID), errors=Equals([ messages.Error( typ=u'urn:acme:error:connection', detail=u'Failed to connect', title=None)])), AfterPreprocessing( repr, StartsWith(u'AuthorizationFailed(;rel="up"'.format( issuer_url.asURI().asText()).encode('utf-8')}, cert_bytes)), ], self.expectThat) client = self.useFixture( ClientFixture(sequence, key=RSA_KEY_512)).client with sequence.consume(self.fail): self.assertThat( client.request_issuance( CertificateRequest( csr=csr_for_names([name], RSA_KEY_512_RAW))), succeeded(MatchesStructure( body=Equals(cert_bytes)))) def test_fetch_chain_empty(self): """ If a certificate has no issuer link, `.Client.fetch_chain` returns an empty chain. """ cert = messages.CertificateResource(cert_chain_uri=None) sequence = RequestSequence([], self.expectThat) client = self.useFixture( ClientFixture(sequence, key=RSA_KEY_512)).client with sequence.consume(self.fail): self.assertThat( client.fetch_chain(cert), succeeded(Equals([]))) def _make_cert_sequence(self, cert_urls): """ Build a sequence for fetching a list of certificates. """ return RequestSequence([ (MatchesListwise([ Equals(b'GET'), Equals(url), Equals({}), ContainsDict({b'Accept': Equals([DER_CONTENT_TYPE])}), Always()]), (http.OK, {b'content-type': DER_CONTENT_TYPE, b'location': url.encode('utf-8'), b'link': u'<{!s}>;rel="up"'.format( issuer_url).encode('utf-8') if issuer_url is not None else b''}, b'')) for url, issuer_url in cert_urls ], self.expectThat) @settings(deadline=None) @example([u'http://example.com/1', u'http://example.com/2']) @given(s.lists(s.integers() .map(lambda n: u'http://example.com/{}'.format(n)), min_size=1, max_size=10)) def test_fetch_chain_okay(self, cert_urls): """ A certificate chain that is shorter than the max length is returned. """ cert = messages.CertificateResource( uri=u'http://example.com/', cert_chain_uri=cert_urls[0]) urls = list(zip(cert_urls, cert_urls[1:] + [None])) sequence = self._make_cert_sequence(urls) client = self.useFixture( ClientFixture(sequence, key=RSA_KEY_512)).client with sequence.consume(self.fail): self.assertThat( client.fetch_chain(cert), succeeded( MatchesListwise([ MatchesStructure( uri=Equals(url), cert_chain_uri=Equals(issuer_url)) for url, issuer_url in urls]))) @settings(deadline=None) @example([u'http://example.com/{}'.format(n) for n in range(20)]) @given(s.lists(s.integers() .map(lambda n: u'http://example.com/{}'.format(n)), min_size=11)) def test_fetch_chain_too_long(self, cert_urls): """ A certificate chain that is too long fails with `~acme.errors.ClientError`. """ cert = messages.CertificateResource( uri=u'http://example.com/', cert_chain_uri=cert_urls[0]) sequence = self._make_cert_sequence( list(zip(cert_urls, cert_urls[1:]))[:10]) client = self.useFixture( ClientFixture(sequence, key=RSA_KEY_512)).client with sequence.consume(self.fail): self.assertThat( client.fetch_chain(cert), failed_with(IsInstance(errors.ClientError))) class JWSClientTests(TestCase): """ :class:`.JWSClient` implements JWS-signed requests over HTTP. """ def test_check_invalid_json(self): """ If a JSON response is expected, but a response is received with a non-JSON Content-Type, :exc:`~acme.errors.ClientError` is raised. """ self.assertThat( JWSClient._check_response( TestResponse(content_type=b'application/octet-stream')), failed_with(IsInstance(errors.ClientError))) def test_check_invalid_error_type(self): """ If an error response is received with a non-JSON-problem Content-Type, :exc:`~acme.errors.ClientError` is raised. """ self.assertThat( JWSClient._check_response( TestResponse( code=http.FORBIDDEN, content_type=b'application/octet-stream')), failed_with(IsInstance(errors.ClientError))) def test_check_invalid_error(self): """ If an error response is received but cannot be parsed, :exc:`~acme.errors.ServerError` is raised. """ self.assertThat( JWSClient._check_response( TestResponse( code=http.FORBIDDEN, content_type=JSON_ERROR_CONTENT_TYPE)), failed_with(IsInstance(ServerError))) def test_check_valid_error(self): """ If an error response is received but cannot be parsed, :exc:`~acme.errors.ClientError` is raised. """ self.assertThat( JWSClient._check_response( TestResponse( code=http.FORBIDDEN, content_type=JSON_ERROR_CONTENT_TYPE, json=lambda: succeed({ u'type': u'unauthorized', u'detail': u'blah blah blah'}))), failed_with( MatchesAll( IsInstance(ServerError), AfterPreprocessing(repr, StartsWith('ServerError'))))) def test_check_expected_bad_json(self): """ If a JSON response was expected, but could not be parsed, :exc:`~acme.errors.ClientError` is raised. """ self.assertThat( JWSClient._check_response( TestResponse(json=lambda: fail(ValueError()))), failed_with(IsInstance(errors.ClientError))) def test_missing_nonce(self): """ If the response from the server does not have a nonce, :exc:`~acme.errors.MissingNonce` is raised. """ client = JWSClient(None, None, None) with ExpectedException(errors.MissingNonce): client._add_nonce(TestResponse()) def test_bad_nonce(self): """ If the response from the server has an unparseable nonce, :exc:`~acme.errors.BadNonce` is raised. """ client = JWSClient(None, None, None) with ExpectedException(errors.BadNonce): client._add_nonce(TestResponse(nonce=b'a!_')) def test_already_nonce(self): """ No request is made if we already have a nonce. """ client = JWSClient(None, None, None) client._nonces.add(u'nonce') self.assertThat(client._get_nonce(b''), succeeded(Equals(u'nonce'))) class ExtraCoverageTests(TestCase): """ Tests to get coverage on some test helpers that we don't really want to maintain ourselves. """ def test_always_never(self): self.assertThat(Always(), AfterPreprocessing(str, Equals('Always()'))) self.assertThat(Never(), AfterPreprocessing(str, Equals('Never()'))) self.assertThat(None, Not(Never())) self.assertThat( Nearly(1.0, 2.0), AfterPreprocessing(str, Equals('Nearly(1.0, 2.0)'))) self.assertThat(2.0, Not(Nearly(1.0))) def test_unexpected_number_of_request_causes_failure(self): """ If there are no more expected requests, making a request causes a failure. """ async_failures = [] sequence = RequestSequence( [], async_failure_reporter=lambda *a: async_failures.append(a)) client = HTTPClient( agent=RequestTraversalAgent( StringStubbingResource(sequence)), data_to_body_producer=_SynchronousProducer) d = client.get('https://anything', data=b'what', headers={b'1': b'1'}) self.assertThat( d, succeeded(MatchesStructure(code=Equals(500)))) self.assertEqual(1, len(async_failures)) self.assertIn("No more requests expected, but request", async_failures[0][2]) # the expected requests have all been made self.assertTrue(sequence.consumed()) def test_consume_context_manager_fails_on_remaining_requests(self): """ If the ``consume`` context manager is used, if there are any remaining expecting requests, the test case will be failed. """ sequence = RequestSequence( [(Always(), (418, {}, b'body'))] * 2, async_failure_reporter=self.assertThat) client = HTTPClient( agent=RequestTraversalAgent( StringStubbingResource(sequence)), data_to_body_producer=_SynchronousProducer) consume_failures = [] with sequence.consume(sync_failure_reporter=consume_failures.append): self.assertThat( client.get('https://anything', data=b'what', headers={b'1': b'1'}), succeeded(Always())) self.assertEqual(1, len(consume_failures)) self.assertIn( "Not all expected requests were made. Still expecting:", consume_failures[0]) class LinkParsingTests(TestCase): """ ``_parse_header_links`` parses the links from a response with Link: header fields. This implementation is ... actually not very good, which is why there aren't many tests. .. seealso: RFC 5988 """ def test_rfc_example1(self): """ The first example from the RFC. """ self.assertThat( _parse_header_links( TestResponse( links=[b'; ' b'rel="previous"; ' b'title="previous chapter"'])), Equals({ u'previous': {u'rel': u'previous', u'title': u'previous chapter', u'url': u'http://example.com/TheBook/chapter2'} })) __all__ = ['ClientTests', 'ExtraCoverageTests', 'LinkParsingTests'] txacme-0.9.3/src/txacme/test/test_endpoint.py000066400000000000000000000166171364626325400213120ustar00rootroot00000000000000""" Tests for `txacme.endpoint`. """ from datetime import datetime from josepy.jwk import JWKRSA from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.serialization import load_pem_private_key from fixtures import TempDir from testtools import ExpectedException, TestCase from testtools.matchers import ( Always, Equals, Is, IsInstance, MatchesAll, MatchesPredicate, MatchesStructure) from testtools.twistedsupport import succeeded from twisted.internet.defer import succeed from twisted.internet.interfaces import ( IListeningPort, IStreamServerEndpoint, IStreamServerEndpointStringParser) from twisted.internet.protocol import Factory from twisted.internet.task import Clock from twisted.plugin import IPlugin from twisted.plugins import txacme_endpoint from twisted.python.filepath import FilePath from twisted.python.url import URL from txsni.snimap import HostDirectoryMap from zope.interface import implementer from zope.interface.verify import verifyObject from txacme._endpoint_parser import _AcmeParser from txacme.endpoint import AutoTLSEndpoint, load_or_create_client_key from txacme.store import DirectoryStore from txacme.test.test_client import RSA_KEY_512 from txacme.testing import FakeClient, MemoryStore from txacme.urls import LETSENCRYPT_DIRECTORY, LETSENCRYPT_STAGING_DIRECTORY @implementer(IListeningPort) class DummyPort(object): """ Port implementation that does nothing. """ def stopListening(self): # noqa pass @implementer(IStreamServerEndpoint) class DummyEndpoint(object): """ Endpoint implementation that does nothing. """ def listen(self, factory): return succeed(DummyPort()) class EndpointTests(TestCase): """ Tests for `~txacme.endpoint.AutoTLSEndpoint`. """ def setUp(self): super(EndpointTests, self).setUp() clock = Clock() clock.rightNow = ( datetime.now() - datetime(1970, 1, 1)).total_seconds() client = FakeClient(RSA_KEY_512, clock) self.endpoint = AutoTLSEndpoint( reactor=clock, directory=URL.fromText(u'https://example.com/'), client_creator=lambda reactor, directory: succeed(client), cert_store=MemoryStore(), cert_mapping={}, sub_endpoint=DummyEndpoint()) def test_directory_url_type(self): """ `~txacme.endpoint.AutoTLSEndpoint` expects a ``twisted.python.url.URL`` instance for the ``directory`` argument. """ with ExpectedException(TypeError): AutoTLSEndpoint( reactor=Clock(), directory='/wrong/kind/of/directory', client_creator=None, cert_store=None, cert_mapping={}, sub_endpoint=DummyEndpoint()) def test_listen_starts_service(self): """ ``AutoTLSEndpoint.listen`` starts an ``AcmeIssuingService``. Stopping the port stops the service. """ factory = Factory() d = self.endpoint.listen(factory) self.assertThat( d, succeeded( MatchesPredicate( IListeningPort.providedBy, '%r does not provide IListeningPort'))) port = d.result self.assertThat( self.endpoint.service, MatchesStructure(running=Equals(True))) self.assertThat(port.stopListening(), succeeded(Always())) self.assertThat( self.endpoint.service, MatchesStructure(running=Equals(False))) class PluginTests(TestCase): """ Tests for the plugins. """ def test_le_parser(self): """ The ``le:`` parser uses the Let's Encrypt production directory, and provides the relevant interfaces. """ verifyObject( IPlugin, txacme_endpoint.le_parser) verifyObject( IStreamServerEndpointStringParser, txacme_endpoint.le_parser) self.assertThat( txacme_endpoint.le_parser, MatchesStructure( prefix=Equals('le'), directory=Equals(LETSENCRYPT_DIRECTORY))) def test_lets_parser(self): """ The ``lets:`` parser uses the Let's Encrypt staging directory, and provides the relevant interfaces. """ verifyObject( IPlugin, txacme_endpoint.lets_parser) verifyObject( IStreamServerEndpointStringParser, txacme_endpoint.lets_parser) self.assertThat( txacme_endpoint.lets_parser, MatchesStructure( prefix=Equals('lets'), directory=Equals(LETSENCRYPT_STAGING_DIRECTORY))) def test_parser(self): """ ``AcmeParser`` creates an endpoint with the specified ACME directory and directory store. """ directory = URL.fromText(u'https://example.com/acme') parser = _AcmeParser(u'prefix', directory) tempdir = self.useFixture(TempDir()).path temp_path = FilePath(tempdir) key_path = temp_path.child('client.key') reactor = object() self.assertThat( parser.parseStreamServer( reactor, tempdir, 'tcp', '443', timeout=0), MatchesAll( IsInstance(AutoTLSEndpoint), MatchesStructure( reactor=Is(reactor), directory=Equals(directory), cert_store=MatchesAll( IsInstance(DirectoryStore), MatchesStructure( path=Equals(temp_path))), cert_mapping=MatchesAll( IsInstance(HostDirectoryMap), MatchesStructure( directoryPath=Equals(temp_path))), sub_endpoint=MatchesPredicate( IStreamServerEndpoint.providedBy, '%r is not a stream server endpoint')))) self.assertThat(key_path.isfile(), Equals(True)) key_data = key_path.getContent() parser.parseStreamServer(reactor, tempdir, 'tcp', '443'), self.assertThat(key_path.getContent(), Equals(key_data)) class LoadClientKeyTests(TestCase): """ Tests for `~txacme.endpoint.load_or_create_client_key`. """ def test_create_key(self): """ `~txacme.endpoint.load_or_create_client_key` creates a new key if one does not exist. """ tempdir = self.useFixture(TempDir()).path temp_path = FilePath(tempdir) key_path = temp_path.child('client.key') self.assertThat(key_path.isfile(), Equals(False)) self.assertThat( load_or_create_client_key(temp_path), Equals(JWKRSA(key=load_pem_private_key( key_path.getContent(), password=None, backend=default_backend())))) def test_idempotent(self): """ Loading the key twice loads the same key the second time as was created the first time. """ tempdir = self.useFixture(TempDir()).path temp_path = FilePath(tempdir) key_path = temp_path.child('client.key') self.assertThat(key_path.isfile(), Equals(False)) key = load_or_create_client_key(temp_path) self.assertThat(load_or_create_client_key(temp_path), Equals(key)) __all__ = ['EndpointTests', 'PluginTests'] txacme-0.9.3/src/txacme/test/test_matchers.py000066400000000000000000000020051364626325400212620ustar00rootroot00000000000000from testtools import TestCase from testtools.tests.matchers.helpers import TestMatchersInterface from txacme.util import csr_for_names from txacme.test.matchers import ValidForName from txacme.test.test_client import RSA_KEY_512_RAW class ValidForNameTests(TestMatchersInterface, TestCase): """ `~txacme.test.matchers.ValidForName` matches if a CSR/cert is valid for the given name. """ matches_matcher = ValidForName(u'example.com') matches_matches = [ csr_for_names([u'example.com'], RSA_KEY_512_RAW), csr_for_names([u'example.invalid', u'example.com'], RSA_KEY_512_RAW), csr_for_names([u'example.com', u'example.invalid'], RSA_KEY_512_RAW), ] matches_mismatches = [ csr_for_names([u'example.org'], RSA_KEY_512_RAW), csr_for_names([u'example.net', u'example.info'], RSA_KEY_512_RAW), ] str_examples = [ ('ValidForName({!r})'.format(u'example.com'), ValidForName(u'example.com')), ] describe_examples = [] txacme-0.9.3/src/txacme/test/test_service.py000066400000000000000000000436541364626325400211330ustar00rootroot00000000000000import uuid from datetime import datetime, timedelta from operator import methodcaller from cryptography import x509 from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes, serialization from cryptography.x509.oid import NameOID from fixtures import Fixture from hypothesis import strategies as s from hypothesis import example, given from hypothesis.strategies import datetimes from pem import Certificate, RSAPrivateKey from testtools import run_test_with, TestCase from testtools.matchers import ( AfterPreprocessing, AllMatch, Always, Contains, Equals, GreaterThan, HasLength, Is, IsInstance, MatchesAny, MatchesDict, MatchesListwise, MatchesStructure, Not) from testtools.twistedsupport import ( AsynchronousDeferredRunTest, failed, flush_logged_errors, has_no_result, succeeded) from twisted.internet.defer import CancelledError, Deferred, fail, succeed from twisted.internet.task import Clock from twisted.python.failure import Failure from txacme.service import _default_panic, AcmeIssuingService from txacme.test import strategies as ts from txacme.test.test_client import ( failed_with, RecordingResponder, RSA_KEY_512, RSA_KEY_512_RAW) from txacme.testing import FakeClient, FakeClientController, MemoryStore def _generate_cert(server_name, not_valid_before, not_valid_after, key=RSA_KEY_512_RAW): """ Generate a self-signed certificate for test purposes. :param str server_name: The SAN the certificate should have. :param ~datetime.datetime not_valid_before: Valid from this moment. :param ~datetime.datetime not_valid_after: Expiry time. :param key: The private key. :rtype: `str` :return: The certificate in PEM format. """ common_name = ( u'san.too.long.invalid' if len(server_name) > 64 else server_name) name = x509.Name([ x509.NameAttribute(NameOID.COMMON_NAME, common_name)]) cert = ( x509.CertificateBuilder() .subject_name(name) .issuer_name(name) .not_valid_before(not_valid_before) .not_valid_after(not_valid_after) .serial_number(int(uuid.uuid4())) .public_key(key.public_key()) .add_extension( x509.SubjectAlternativeName([x509.DNSName(server_name)]), critical=False) .sign( private_key=key, algorithm=hashes.SHA256(), backend=default_backend()) ) return [ Certificate( cert.public_bytes(serialization.Encoding.PEM)), RSAPrivateKey( key.private_bytes( encoding=serialization.Encoding.PEM, format=serialization.PrivateFormat.TraditionalOpenSSL, encryption_algorithm=serialization.NoEncryption())), ] def _match_certificate(matcher): return MatchesAny( Not(IsInstance(Certificate)), AfterPreprocessing( lambda c: x509.load_pem_x509_certificate( c.as_bytes(), default_backend()), matcher)) class HangingClient(object): """ Test client that always hangs. """ def __getattr__(self, name): return lambda *a, **kw: Deferred() class FailingClient(object): """ Test client that always fails. """ def __getattr__(self, name): return lambda *a, **kw: fail(RuntimeError('Tried to do something')) class AcmeFixture(Fixture): """ A fixture for setting up an `~txacme.service.AcmeIssuingService`. """ def __init__(self, now=datetime(2000, 1, 1, 0, 0, 0), certs=None, panic_interval=None, panic=None, client=None, email=None): super(AcmeFixture, self).__init__() self.now = now self._certs = certs self._panic_interval = panic_interval self._panic = panic self._email = email self.acme_client = client self.controller = FakeClientController() def _setUp(self): self.cert_store = MemoryStore(self._certs) self.clock = Clock() self.clock.rightNow = ( self.now - datetime(1970, 1, 1)).total_seconds() if self.acme_client is None: acme_client = FakeClient( RSA_KEY_512, clock=self.clock, ca_key=RSA_KEY_512_RAW, controller=self.controller) else: acme_client = self.acme_client self.responder = RecordingResponder(set(), u'tls-sni-01') args = dict( cert_store=self.cert_store, client_creator=lambda: succeed(acme_client), clock=self.clock, responders=[self.responder], email=self._email, panic_interval=self._panic_interval, panic=self._panic, generate_key=lambda: RSA_KEY_512_RAW) self.service = AcmeIssuingService( **{k: v for k, v in args.items() if v is not None}) self.addCleanup( lambda: self.service.running and self.service.stopService()) @s.composite def panicing_cert(draw, now, panic): server_name = draw(ts.dns_names()) offset = timedelta(seconds=draw( s.integers( min_value=-1000, max_value=int(panic.total_seconds())))) return (server_name, _generate_cert( server_name, not_valid_before=now + offset - timedelta(seconds=1), not_valid_after=now + offset)) @s.composite def panicing_certs_fixture(draw): now = draw(datetimes( min_value=datetime(1971, 1, 1), max_value=datetime(2030, 1, 1))) panic = timedelta(seconds=draw( s.integers(min_value=60, max_value=60 * 60 * 24))) certs = dict( draw( s.lists( panicing_cert(now, panic), min_size=1, max_size=5, unique_by=lambda i: i[0]))) return AcmeFixture(now=now, panic_interval=panic, certs=certs) class AcmeIssuingServiceTests(TestCase): """ Tests for `txacme.service.AcmeIssuingService`. """ def test_when_certs_valid_no_certs(self): """ The deferred returned by ``when_certs_valid`` fires immediately if there are no certs in the store. """ service = self.useFixture(AcmeFixture()).service service.startService() self.assertThat( service.when_certs_valid(), succeeded(Is(None))) @example(now=datetime(2000, 1, 1, 0, 0, 0), certs=[(timedelta(seconds=60), u'example.com'), (timedelta(seconds=90), u'example.org')]) @given(now=datetimes( min_value=datetime(1971, 1, 1), max_value=datetime(2030, 1, 1)), certs=s.lists( s.tuples( s.integers(min_value=0, max_value=1000) .map(lambda s: timedelta(seconds=s)), ts.dns_names()))) def test_when_certs_valid_all_certs_valid(self, now, certs): """ The deferred returned by ``when_certs_valid`` fires immediately if none of the certs in the store are expired. """ certs = { server_name: _generate_cert( server_name, not_valid_before=now - timedelta(seconds=1), not_valid_after=now + offset) for offset, server_name in certs} with AcmeFixture(now=now, certs=certs) as fixture: service = fixture.service service.startService() self.assertThat( service.when_certs_valid(), succeeded(Is(None))) self.assertThat(fixture.responder.challenges, HasLength(0)) @given(fixture=panicing_certs_fixture()) def test_when_certs_valid_certs_expired(self, fixture): """ The deferred returned by ``when_certs_valid`` only fires once all panicing and expired certs have been renewed. """ with fixture: service = fixture.service d = service.when_certs_valid() self.assertThat(d, has_no_result()) service.startService() self.assertThat(d, succeeded(Is(None))) max_expiry = fixture.now + service.panic_interval self.assertThat( fixture.cert_store.as_dict(), succeeded(AfterPreprocessing( methodcaller('values'), AllMatch(AllMatch( _match_certificate( MatchesStructure( not_valid_after=GreaterThan(max_expiry)))))))) self.assertThat(fixture.responder.challenges, HasLength(0)) def test_time_marches_on(self): """ Any certs that have exceeded the panic or reissue intervals will be reissued at the next check. """ now = datetime(2000, 1, 1, 0, 0, 0) certs = { u'example.com': _generate_cert( u'example.com', not_valid_before=now - timedelta(seconds=1), not_valid_after=now + timedelta(days=31)), u'example.org': _generate_cert( u'example.org', not_valid_before=now - timedelta(seconds=1), not_valid_after=now + timedelta(days=32)), } with AcmeFixture(now=now, certs=certs) as fixture: fixture.service.startService() self.assertThat( fixture.service.when_certs_valid(), succeeded(Is(None))) self.assertThat( fixture.cert_store.as_dict(), succeeded(Equals(certs))) fixture.clock.advance(36 * 60 * 60) self.assertThat( fixture.cert_store.as_dict(), succeeded( MatchesDict({ u'example.com': Not(Equals(certs[u'example.com'])), u'example.org': Equals(certs[u'example.org']), }))) self.assertThat(fixture.responder.challenges, HasLength(0)) fixture.clock.advance(36 * 60 * 60) self.assertThat( fixture.cert_store.as_dict(), succeeded( MatchesDict({ u'example.com': Not(Equals(certs[u'example.com'])), u'example.org': Not(Equals(certs[u'example.org'])), }))) self.assertThat(fixture.responder.challenges, HasLength(0)) @run_test_with(AsynchronousDeferredRunTest) def test_errors(self): """ If a cert renewal fails within the panic interval, the panic callback is invoked; otherwise the error is logged normally. """ now = datetime(2000, 1, 1, 0, 0, 0) certs = { u'example.com': _generate_cert( u'example.com', not_valid_before=now - timedelta(seconds=1), not_valid_after=now + timedelta(days=31)), } panics = [] with AcmeFixture(now=now, certs=certs, panic=lambda *a: panics.append(a)) as fixture: fixture.service.startService() self.assertThat( fixture.service.when_certs_valid(), succeeded(Is(None))) self.assertThat(fixture.responder.challenges, HasLength(0)) fixture.controller.pause() fixture.clock.advance(36 * 60 * 60) # Resume the client.request_issuance deferred with an exception. fixture.controller.resume(Failure(Exception())) self.assertThat(flush_logged_errors(), HasLength(1)) self.assertThat(panics, Equals([])) self.assertThat(fixture.responder.challenges, HasLength(0)) fixture.controller.pause() fixture.clock.advance(15 * 24 * 60 * 60) # Resume the client.request_issuance deferred with an exception. fixture.controller.resume(Failure(Exception())) self.assertThat( panics, MatchesListwise([ MatchesListwise([IsInstance(Failure), Equals(u'example.com')]), ])) self.assertThat(fixture.responder.challenges, HasLength(0)) @run_test_with(AsynchronousDeferredRunTest) def test_timer_errors(self): """ If the timed check fails (for example, because registration fails), the error should be caught and logged. """ with AcmeFixture(client=FailingClient()) as fixture: fixture.service.startService() self.assertThat( fixture.service._check_certs(), succeeded(Always())) self.assertThat(flush_logged_errors(), HasLength(2)) def test_starting_stopping_cancellation(self): """ Test the starting and stopping behaviour. """ with AcmeFixture(client=HangingClient()) as fixture: d = fixture.service.when_certs_valid() self.assertThat(d, has_no_result()) fixture.service.startService() self.assertThat(d, has_no_result()) fixture.service.stopService() self.assertThat(d, failed(Always())) @run_test_with(AsynchronousDeferredRunTest) def test_default_panic(self): """ The default panic callback logs a message via ``twisted.logger``. """ try: 1 / 0 except BaseException: f = Failure() _default_panic(f, u'server_name') self.assertThat(flush_logged_errors(), Equals([f])) @example(u'example.com') @given(ts.dns_names()) def test_blank_cert(self, server_name): """ An empty certificate file will be treated like an expired certificate. """ with AcmeFixture(certs={server_name: []}) as fixture: fixture.service.startService() self.assertThat( fixture.service.when_certs_valid(), succeeded(Always())) self.assertThat( fixture.cert_store.as_dict(), succeeded( MatchesDict({server_name: Not(Equals([]))}))) self.assertThat(fixture.responder.challenges, HasLength(0)) @example(u'example.com') @given(ts.dns_names()) def test_issue_one_cert(self, server_name): """ ``issue_cert`` will (re)issue a single certificate unconditionally. """ with AcmeFixture() as fixture: fixture.service.startService() self.assertThat( fixture.cert_store.as_dict(), succeeded( Not(Contains(server_name)))) self.assertThat( fixture.service.issue_cert(server_name), succeeded(Always())) self.assertThat( fixture.cert_store.as_dict(), succeeded( MatchesDict({server_name: Not(Equals([]))}))) @example(u'example.com') @given(ts.dns_names()) def test_issue_concurrently(self, server_name): """ Invoking ``issue_cert`` multiple times concurrently for the same name will not start multiple issuing processes, only wait for the first process to complete. """ with AcmeFixture() as fixture: fixture.service.startService() self.assertThat( fixture.cert_store.as_dict(), succeeded( Not(Contains(server_name)))) fixture.controller.pause() d1 = fixture.service.issue_cert(server_name) self.assertThat(d1, has_no_result()) d2 = fixture.service.issue_cert(server_name) self.assertThat(d2, has_no_result()) self.assertThat(fixture.controller.count(), Equals(1)) fixture.controller.resume() self.assertThat(d1, succeeded(Always())) self.assertThat(d2, succeeded(Always())) self.assertThat( fixture.cert_store.as_dict(), succeeded( MatchesDict({server_name: Not(Equals([]))}))) @example(u'example.com') @given(ts.dns_names()) def test_cancellation(self, server_name): """ Cancelling the deferred returned by ``issue_cert`` cancels the actual issuing process. """ with AcmeFixture() as fixture: fixture.service.startService() self.assertThat( fixture.cert_store.as_dict(), succeeded( Not(Contains(server_name)))) fixture.controller.pause() d1 = fixture.service.issue_cert(server_name) self.assertThat(d1, has_no_result()) d2 = fixture.service.issue_cert(server_name) self.assertThat(d2, has_no_result()) self.assertThat(fixture.controller.count(), Equals(1)) d2.cancel() fixture.controller.resume() self.assertThat(d1, failed_with(IsInstance(CancelledError))) self.assertThat(d2, failed_with(IsInstance(CancelledError))) self.assertThat( fixture.cert_store.as_dict(), succeeded( Not(Contains(server_name)))) def test_registration_email(self): """ If we give our service an email address, that address will be used as a registration contact. """ # First the case with no email given. with AcmeFixture() as fixture: fixture.service.startService() self.assertThat(fixture.service._regr, MatchesStructure( body=MatchesStructure( key=Is(None), contact=Equals(())))) # Next, we give an email. with AcmeFixture(email=u'example@example.com') as fixture: fixture.service.startService() self.assertThat(fixture.service._regr, MatchesStructure( body=MatchesStructure( key=Is(None), contact=Equals((u'mailto:example@example.com',))))) __all__ = ['AcmeIssuingServiceTests'] txacme-0.9.3/src/txacme/test/test_store.py000066400000000000000000000106441364626325400206200ustar00rootroot00000000000000from operator import methodcaller import pem from fixtures import TempDir from hypothesis import example, given from testtools import TestCase from testtools.matchers import ( AfterPreprocessing, AllMatch, ContainsDict, Equals, Is, IsInstance) from testtools.twistedsupport import succeeded from twisted.python.compat import unicode from twisted.python.filepath import FilePath from txacme.store import DirectoryStore from txacme.test import strategies as ts from txacme.test.test_client import failed_with from txacme.testing import MemoryStore EXAMPLE_PEM_OBJECTS = [ pem.RSAPrivateKey( b'-----BEGIN RSA PRIVATE KEY-----\n' b'iq63EP+H3w==\n' b'-----END RSA PRIVATE KEY-----\n'), pem.Certificate( b'-----BEGIN CERTIFICATE-----\n' b'yns=\n' b'-----END CERTIFICATE-----\n'), pem.Certificate( b'-----BEGIN CERTIFICATE-----\n' b'pNaiqhAT\n' b'-----END CERTIFICATE-----\n'), ] EXAMPLE_PEM_OBJECTS2 = [ pem.RSAPrivateKey( b'-----BEGIN RSA PRIVATE KEY-----\n' b'fQ==\n' b'-----END RSA PRIVATE KEY-----\n'), pem.Certificate( b'-----BEGIN CERTIFICATE-----\n' b'xUg=\n' b'-----END CERTIFICATE-----\n'), ] class _StoreTestsMixin(object): """ Tests for `txacme.interfaces.ICertificateStore` implementations. """ @example(u'example.com', EXAMPLE_PEM_OBJECTS) @given(ts.dns_names(), ts.pem_objects()) def test_insert(self, server_name, pem_objects): """ Inserting an entry causes the same entry to be returned by ``get`` and ``as_dict``. """ self.assertThat( self.cert_store.store(server_name, pem_objects), succeeded(Is(None))) self.assertThat( self.cert_store.get(server_name), succeeded(Equals(pem_objects))) self.assertThat( self.cert_store.as_dict(), succeeded(ContainsDict( {server_name: Equals(pem_objects)}))) @example(u'example.com', EXAMPLE_PEM_OBJECTS, EXAMPLE_PEM_OBJECTS2) @given(ts.dns_names(), ts.pem_objects(), ts.pem_objects()) def test_insert_twice(self, server_name, pem_objects, pem_objects2): """ Inserting an entry a second time overwrites the first entry. """ self.assertThat( self.cert_store.store(server_name, pem_objects), succeeded(Is(None))) self.assertThat( self.cert_store.store(server_name, pem_objects2), succeeded(Is(None))) self.assertThat( self.cert_store.get(server_name), succeeded(Equals(pem_objects2))) self.assertThat( self.cert_store.as_dict(), succeeded(ContainsDict({server_name: Equals(pem_objects2)}))) @example(u'example.com') @given(ts.dns_names()) def test_get_missing(self, server_name): """ Getting a non-existent entry results in `KeyError`. """ self.assertThat( self.cert_store.get(server_name), failed_with(IsInstance(KeyError))) @example(u'example.com', EXAMPLE_PEM_OBJECTS) @given(ts.dns_names(), ts.pem_objects()) def test_unicode_keys(self, server_name, pem_objects): """ The keys of the dict returned by ``as_dict`` are ``unicode``. """ self.assertThat( self.cert_store.store(server_name, pem_objects), succeeded(Is(None))) self.assertThat( self.cert_store.as_dict(), succeeded(AfterPreprocessing( methodcaller('keys'), AllMatch(IsInstance(unicode))))) class DirectoryStoreTests(_StoreTestsMixin, TestCase): """ Tests for `txacme.store.DirectoryStore`. """ def setUp(self): super(DirectoryStoreTests, self).setUp() temp_dir = self.useFixture(TempDir()) self.cert_store = DirectoryStore(FilePath(temp_dir.path)) def test_filepath_mode(self): """ The given ``FilePath`` is always converted to text mode. """ store = DirectoryStore(FilePath(b'bytesbytesbytes')) self.assertThat(store.path.path, IsInstance(unicode)) class MemoryStoreTests(_StoreTestsMixin, TestCase): """ Tests for `txacme.testing.MemoryStore`. """ def setUp(self): super(MemoryStoreTests, self).setUp() self.cert_store = MemoryStore() __all__ = ['DirectoryStoreTests', 'MemoryStoreTests'] txacme-0.9.3/src/txacme/test/test_util.py000066400000000000000000000133311364626325400204350ustar00rootroot00000000000000from codecs import decode import attr from OpenSSL import crypto from acme import challenges from josepy.b64 import b64encode from josepy.errors import DeserializationError from cryptography import x509 from cryptography.hazmat.primitives import hashes from cryptography.hazmat.primitives.asymmetric import rsa from cryptography.x509.oid import NameOID from hypothesis import strategies as s from hypothesis import assume, example, given from service_identity.pyopenssl import verify_hostname from testtools import ExpectedException, TestCase from testtools.matchers import ( Equals, IsInstance, MatchesAll, MatchesStructure, Not) from txacme.test import strategies as ts from txacme.test.matchers import ValidForName from txacme.test.test_client import RSA_KEY_512, RSA_KEY_512_RAW from txacme.util import ( const, csr_for_names, decode_csr, encode_csr, generate_private_key, generate_tls_sni_01_cert) class GeneratePrivateKeyTests(TestCase): """ `.generate_private_key` generates private keys of various types using sensible parameters. """ @example(u'not-a-real-key-type') @given(s.text().filter(lambda t: t not in [u'rsa'])) def test_unknown_key_type(self, key_type): """ Passing an unknown key type results in :exc:`.ValueError`. """ with ExpectedException(ValueError): generate_private_key(key_type) def test_rsa_key(self): """ Passing ``u'rsa'`` results in an RSA private key. """ key1 = generate_private_key(u'rsa') self.assertThat(key1, IsInstance(rsa.RSAPrivateKey)) key2 = generate_private_key(u'rsa') self.assertThat(key2, IsInstance(rsa.RSAPrivateKey)) self.assertThat( key1.public_key().public_numbers(), Not(Equals(key2.public_key().public_numbers()))) @attr.s class NotAConnection(object): """ Pretend to be an ``OpenSSL.Connection`` object as far as ``service_identity`` cares. """ _cert = attr.ib() def get_peer_certificate(self): """ Return the certificate. """ return self._cert class GenerateCertTests(TestCase): """ `.generate_tls_sni_01_cert` generates a cert and key suitable for responding for the given challenge SAN. """ @example(token=b'BWYcfxzmOha7-7LoxziqPZIUr99BCz3BfbN9kzSFnrU') @given(token=s.binary(min_size=32, max_size=32).map(b64encode)) def test_cert_verifies(self, token): """ The certificates generated verify using ``acme.challenges.TLSSNI01Response.verify_cert``. """ ckey = RSA_KEY_512_RAW challenge = challenges.TLSSNI01(token=token) response = challenge.response(RSA_KEY_512) server_name = response.z_domain.decode('ascii') cert, pkey = generate_tls_sni_01_cert( server_name, _generate_private_key=lambda key_type: ckey) self.assertThat(cert, ValidForName(server_name)) ocert = crypto.X509.from_cryptography(cert) self.assertThat( decode(ocert.digest('sha256').replace(b':', b''), 'hex'), Equals(cert.fingerprint(hashes.SHA256()))) okey = crypto.PKey.from_cryptography_key(pkey) # TODO: Can we assert more here? self.assertThat(okey.bits(), Equals(pkey.key_size)) self.assertThat( response.verify_cert(ocert), Equals(True)) verify_hostname(NotAConnection(ocert), server_name) class CSRTests(TestCase): """ `~txacme.util.encode_csr` and `~txacme.util.decode_csr` serialize CSRs in JOSE Base64 DER encoding. """ @example(names=[u'example.com', u'example.org']) @given(names=s.lists(ts.dns_names(), min_size=1)) def test_roundtrip(self, names): """ The encoding roundtrips. """ assume(len(names[0]) <= 64) csr = csr_for_names(names, RSA_KEY_512_RAW) self.assertThat(decode_csr(encode_csr(csr)), Equals(csr)) def test_decode_garbage(self): """ If decoding fails, `~txacme.util.decode_csr` raises `~josepy.errors.DeserializationError`. """ with ExpectedException(DeserializationError): decode_csr(u'blah blah not a valid CSR') def test_empty_names_invalid(self): """ `~txacme.util.csr_for_names` raises `ValueError` if given an empty list of names. """ with ExpectedException(ValueError): csr_for_names([], RSA_KEY_512_RAW) @example(names=[u'example.com', u'example.org'], key=RSA_KEY_512_RAW) @given(names=s.lists(ts.dns_names(), min_size=1), key=s.just(RSA_KEY_512_RAW)) def test_valid_for_names(self, names, key): """ `~txacme.util.csr_for_names` returns a CSR that is actually valid for the given names. """ assume(len(names[0]) <= 64) self.assertThat( csr_for_names(names, key), MatchesAll(*[ValidForName(name) for name in names])) def test_common_name_too_long(self): """ If the first name provided is too long, `~txacme.util.csr_for_names` uses a dummy value for the common name. """ self.assertThat( csr_for_names([u'aaaa.' * 16], RSA_KEY_512_RAW), MatchesStructure( subject=Equals(x509.Name([ x509.NameAttribute( NameOID.COMMON_NAME, u'san.too.long.invalid')])))) class ConstTests(TestCase): """ `~txacme.util.const` returns a function that always returns a constant value. """ @given(s.integers()) def test_const(self, x): self.assertThat(const(x)(), Equals(x)) __all__ = [ 'GeneratePrivateKeyTests', 'GenerateCertTests', 'CSRTests', 'ConstTests'] txacme-0.9.3/src/txacme/testing.py000066400000000000000000000204451364626325400171230ustar00rootroot00000000000000""" Utilities for testing with txacme. """ from collections import OrderedDict from datetime import timedelta from uuid import uuid4 import attr from acme import challenges, messages from cryptography import x509 from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes, serialization from cryptography.x509.oid import ExtensionOID, NameOID from twisted.internet.defer import Deferred, fail, succeed from twisted.python.compat import unicode from zope.interface import implementer from txacme.interfaces import ICertificateStore, IResponder from txacme.util import clock_now, generate_private_key @attr.s class FakeClientController(object): """ Controls issuing for `FakeClient`. """ paused = attr.ib(default=False) _waiting = attr.ib(default=attr.Factory(list), init=False) def issue(self): """ Return a deferred that fires when we are ready to issue. """ if self.paused: d = Deferred() self._waiting.append(d) return d else: return succeed(None) def pause(self): """ Temporarily pause issuing. """ self.paused = True def resume(self, value=None): """ Resume issuing, allowing any pending issuances to proceed. :param value: An (optional) value with which pending deferreds will be called back. """ _waiting = self._waiting self._waiting = [] for d in _waiting: d.callback(value) def count(self): """ Count pending issuances. """ return len(self._waiting) class FakeClient(object): """ Provides the same API as `~txacme.client.Client`, but performs no network operations and issues certificates signed by its own fake CA. """ _challenge_types = [challenges.TLSSNI01] def __init__(self, key, clock, ca_key=None, controller=None): self.key = key self._clock = clock self._registered = False self._tos_agreed = None self._authorizations = {} self._challenges = {} self._ca_key = ca_key self._generate_ca_cert() self._paused = False self._waiting = [] if controller is None: controller = FakeClientController() self._controller = controller def _now(self): """ Get the current time. """ return clock_now(self._clock) def _generate_ca_cert(self): """ Generate a CA cert/key. """ if self._ca_key is None: self._ca_key = generate_private_key(u'rsa') self._ca_name = x509.Name([ x509.NameAttribute(NameOID.COMMON_NAME, u'ACME Snake Oil CA')]) self._ca_cert = ( x509.CertificateBuilder() .subject_name(self._ca_name) .issuer_name(self._ca_name) .not_valid_before(self._now() - timedelta(seconds=3600)) .not_valid_after(self._now() + timedelta(days=3650)) .public_key(self._ca_key.public_key()) .serial_number(int(uuid4())) .add_extension( x509.BasicConstraints(ca=True, path_length=0), critical=True) .add_extension( x509.SubjectKeyIdentifier.from_public_key( self._ca_key.public_key()), critical=False) .sign( private_key=self._ca_key, algorithm=hashes.SHA256(), backend=default_backend())) self._ca_aki = x509.AuthorityKeyIdentifier.from_issuer_public_key( self._ca_key.public_key()) def register(self, new_reg=None): self._registered = True if new_reg is None: new_reg = messages.NewRegistration() self.regr = messages.RegistrationResource( body=messages.Registration( contact=new_reg.contact, agreement=new_reg.agreement)) return succeed(self.regr) def agree_to_tos(self, regr): self._tos_agreed = True self.regr = self.regr.update( body=regr.body.update( agreement=regr.terms_of_service)) return succeed(self.regr) def request_challenges(self, identifier): self._authorizations[identifier] = challenges = OrderedDict() for chall_type in self._challenge_types: uuid = unicode(uuid4()) challb = messages.ChallengeBody( chall=chall_type(token=b'token'), uri=uuid, status=messages.STATUS_PENDING) challenges[chall_type] = uuid self._challenges[uuid] = challb return succeed( messages.AuthorizationResource( body=messages.Authorization( identifier=identifier, status=messages.STATUS_PENDING, challenges=[ self._challenges[u] for u in challenges.values()], combinations=[[n] for n in range(len(challenges))]))) def answer_challenge(self, challenge_body, response): challb = self._challenges[challenge_body.uri] challb = challb.update(status=messages.STATUS_VALID) self._challenges[challenge_body.uri] = challb return succeed(challb) def poll(self, authzr): challenges = [ self._challenges[u] for u in self._authorizations[authzr.body.identifier].values()] status = ( messages.STATUS_VALID if any(c.status == messages.STATUS_VALID for c in challenges) else messages.STATUS_PENDING) return succeed( (messages.AuthorizationResource( body=messages.Authorization( status=status, challenges=challenges, combinations=[[n] for n in range(len(challenges))])), 1.0)) def request_issuance(self, csr): csr = csr.csr # TODO: Only in Cryptography 1.3 # assert csr.is_signature_valid cert = ( x509.CertificateBuilder() .subject_name(csr.subject) .issuer_name(self._ca_name) .not_valid_before(self._now() - timedelta(seconds=3600)) .not_valid_after(self._now() + timedelta(days=90)) .serial_number(int(uuid4())) .public_key(csr.public_key()) .add_extension( csr.extensions.get_extension_for_oid( ExtensionOID.SUBJECT_ALTERNATIVE_NAME).value, critical=False) .add_extension( x509.SubjectKeyIdentifier.from_public_key(csr.public_key()), critical=False) .add_extension(self._ca_aki, critical=False) .sign( private_key=self._ca_key, algorithm=hashes.SHA256(), backend=default_backend())) cert_res = messages.CertificateResource( body=cert.public_bytes(encoding=serialization.Encoding.DER)) return self._controller.issue().addCallback(lambda _: cert_res) def fetch_chain(self, certr, max_length=10): return succeed([ messages.CertificateResource( body=self._ca_cert.public_bytes( encoding=serialization.Encoding.DER))]) @implementer(IResponder) @attr.s class NullResponder(object): """ A responder that does absolutely nothing. """ challenge_type = attr.ib() def start_responding(self, server_name, challenge, response): pass def stop_responding(self, server_name, challenge, response): pass @implementer(ICertificateStore) class MemoryStore(object): """ A certificate store that keeps certificates in memory only. """ def __init__(self, certs=None): if certs is None: self._store = {} else: self._store = dict(certs) def get(self, server_name): try: return succeed(self._store[server_name]) except KeyError: return fail() def store(self, server_name, pem_objects): self._store[server_name] = pem_objects return succeed(None) def as_dict(self): return succeed(self._store) __all__ = [ 'FakeClient', 'FakeClientController', 'MemoryStore', 'NullResponder'] txacme-0.9.3/src/txacme/urls.py000066400000000000000000000004641364626325400164320ustar00rootroot00000000000000from twisted.python.url import URL LETSENCRYPT_DIRECTORY = URL.fromText( u'https://acme-v01.api.letsencrypt.org/directory') LETSENCRYPT_STAGING_DIRECTORY = URL.fromText( u'https://acme-staging.api.letsencrypt.org/directory') __all__ = ['LETSENCRYPT_DIRECTORY', 'LETSENCRYPT_STAGING_DIRECTORY'] txacme-0.9.3/src/txacme/util.py000066400000000000000000000116141364626325400164210ustar00rootroot00000000000000""" Utility functions that may prove useful when writing an ACME client. """ import uuid from datetime import datetime, timedelta from functools import wraps from josepy.errors import DeserializationError from josepy.json_util import encode_b64jose, decode_b64jose from cryptography import x509 from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives import hashes, serialization from cryptography.hazmat.primitives.asymmetric import rsa from cryptography.x509.oid import NameOID from twisted.internet.defer import maybeDeferred from twisted.python.url import URL def generate_private_key(key_type): """ Generate a random private key using sensible parameters. :param str key_type: The type of key to generate. One of: ``rsa``. """ if key_type == u'rsa': return rsa.generate_private_key( public_exponent=65537, key_size=2048, backend=default_backend()) raise ValueError(key_type) def generate_tls_sni_01_cert(server_name, key_type=u'rsa', _generate_private_key=None): """ Generate a certificate/key pair for responding to a tls-sni-01 challenge. :param str server_name: The SAN the certificate should have. :param str key_type: The type of key to generate; usually not necessary. :rtype: ``Tuple[`~cryptography.x509.Certificate`, PrivateKey]`` :return: A tuple of the certificate and private key. """ key = (_generate_private_key or generate_private_key)(key_type) name = x509.Name([ x509.NameAttribute(NameOID.COMMON_NAME, u'acme.invalid')]) cert = ( x509.CertificateBuilder() .subject_name(name) .issuer_name(name) .not_valid_before(datetime.now() - timedelta(seconds=3600)) .not_valid_after(datetime.now() + timedelta(seconds=3600)) .serial_number(int(uuid.uuid4())) .public_key(key.public_key()) .add_extension( x509.SubjectAlternativeName([x509.DNSName(server_name)]), critical=False) .sign( private_key=key, algorithm=hashes.SHA256(), backend=default_backend()) ) return (cert, key) def tap(f): """ "Tap" a Deferred callback chain with a function whose return value is ignored. """ @wraps(f) def _cb(res, *a, **kw): d = maybeDeferred(f, res, *a, **kw) d.addCallback(lambda ignored: res) return d return _cb def encode_csr(csr): """ Encode CSR as JOSE Base-64 DER. :param cryptography.x509.CertificateSigningRequest csr: The CSR. :rtype: str """ return encode_b64jose(csr.public_bytes(serialization.Encoding.DER)) def decode_csr(b64der): """ Decode JOSE Base-64 DER-encoded CSR. :param str b64der: The encoded CSR. :rtype: `cryptography.x509.CertificateSigningRequest` :return: The decoded CSR. """ try: return x509.load_der_x509_csr( decode_b64jose(b64der), default_backend()) except ValueError as error: raise DeserializationError(error) def csr_for_names(names, key): """ Generate a certificate signing request for the given names and private key. .. seealso:: `acme.client.Client.request_issuance` .. seealso:: `generate_private_key` :param ``List[str]``: One or more names (subjectAltName) for which to request a certificate. :param key: A Cryptography private key object. :rtype: `cryptography.x509.CertificateSigningRequest` :return: The certificate request message. """ if len(names) == 0: raise ValueError('Must have at least one name') if len(names[0]) > 64: common_name = u'san.too.long.invalid' else: common_name = names[0] return ( x509.CertificateSigningRequestBuilder() .subject_name(x509.Name([ x509.NameAttribute(NameOID.COMMON_NAME, common_name)])) .add_extension( x509.SubjectAlternativeName(list(map(x509.DNSName, names))), critical=False) .sign(key, hashes.SHA256(), default_backend())) def clock_now(clock): """ Get a datetime representing the current time. :param clock: An ``IReactorTime`` provider. :rtype: `~datetime.datetime` :return: A datetime representing the current time. """ return datetime.utcfromtimestamp(clock.seconds()) def check_directory_url_type(url): """ Check that ``url`` is a ``twisted.python.url.URL`` instance, raising `TypeError` if it isn't. """ if not isinstance(url, URL): raise TypeError( 'ACME directory URL should be a twisted.python.url.URL, ' 'got {!r} instead'.format(url)) def const(x): """ Return a constant function. """ return lambda: x __all__ = [ 'generate_private_key', 'generate_tls_sni_01_cert', 'encode_csr', 'decode_csr', 'csr_for_names', 'clock_now', 'check_directory_url_type', 'const', 'tap'] txacme-0.9.3/tox.ini000066400000000000000000000030211364626325400143260ustar00rootroot00000000000000[tox] envlist = coverage-clean,{py27,pypy,py35,py36,py37,pypy3}-{twlatest,twtrunk,twlowest}-{aclatest,acmaster}-alldeps,py27-twlatest,flake8,docs,coverage-report [testenv] setenv = PYTHONWARNINGS = default::DeprecationWarning HYPOTHESIS_PROFILE = coverage whitelist_externals = mkdir deps = .[test] alldeps: .[libcloud] acmaster: https://github.com/certbot/certbot/archive/master.zip#egg=acme&subdirectory=acme twlatest: Twisted[tls] twtrunk: https://github.com/twisted/twisted/archive/trunk.zip#egg=Twisted[tls] twlowest: Twisted[tls]==16.2.0 coverage commands = pip list mkdir -p {envtmpdir} coverage run --parallel-mode \ {envdir}/bin/trial --temp-directory={envtmpdir}/_trial_temp {posargs:txacme integration} [testenv:flake8] basepython = python3.6 deps = flake8 pep8-naming commands = flake8 src setup.py [testenv:coverage-clean] deps = coverage skip_install = true commands = coverage erase [testenv:coverage-report] deps = coverage diff_cover skip_install = true commands = coverage combine coverage report coverage xml -o {envtmpdir}/coverage.xml diff-cover {envtmpdir}/coverage.xml [testenv:docs] whitelist_externals = rm test cat changedir = docs deps = -rrequirements-doc.txt commands = rm -rf {toxinidir}/docs/api/ rm -f {envtmpdir}/errors sphinx-build -W -w {envtmpdir}/errors --keep-going \ -n -b html -d {envtmpdir}/doctrees . {envtmpdir}/html cat {envtmpdir}/errors test ! -s {envtmpdir}/errors txacme-0.9.3/versioneer.py000066400000000000000000002060031364626325400155530ustar00rootroot00000000000000 # Version: 0.18 """The Versioneer - like a rocketeer, but for versions. The Versioneer ============== * like a rocketeer, but for versions! * https://github.com/warner/python-versioneer * Brian Warner * License: Public Domain * Compatible With: python2.6, 2.7, 3.2, 3.3, 3.4, 3.5, 3.6, and pypy * [![Latest Version] (https://pypip.in/version/versioneer/badge.svg?style=flat) ](https://pypi.python.org/pypi/versioneer/) * [![Build Status] (https://travis-ci.org/warner/python-versioneer.png?branch=master) ](https://travis-ci.org/warner/python-versioneer) This is a tool for managing a recorded version number in distutils-based python projects. The goal is to remove the tedious and error-prone "update the embedded version string" step from your release process. Making a new release should be as easy as recording a new tag in your version-control system, and maybe making new tarballs. ## Quick Install * `pip install versioneer` to somewhere to your $PATH * add a `[versioneer]` section to your setup.cfg (see below) * run `versioneer install` in your source tree, commit the results ## Version Identifiers Source trees come from a variety of places: * a version-control system checkout (mostly used by developers) * a nightly tarball, produced by build automation * a snapshot tarball, produced by a web-based VCS browser, like github's "tarball from tag" feature * a release tarball, produced by "setup.py sdist", distributed through PyPI Within each source tree, the version identifier (either a string or a number, this tool is format-agnostic) can come from a variety of places: * ask the VCS tool itself, e.g. "git describe" (for checkouts), which knows about recent "tags" and an absolute revision-id * the name of the directory into which the tarball was unpacked * an expanded VCS keyword ($Id$, etc) * a `_version.py` created by some earlier build step For released software, the version identifier is closely related to a VCS tag. Some projects use tag names that include more than just the version string (e.g. "myproject-1.2" instead of just "1.2"), in which case the tool needs to strip the tag prefix to extract the version identifier. For unreleased software (between tags), the version identifier should provide enough information to help developers recreate the same tree, while also giving them an idea of roughly how old the tree is (after version 1.2, before version 1.3). Many VCS systems can report a description that captures this, for example `git describe --tags --dirty --always` reports things like "0.7-1-g574ab98-dirty" to indicate that the checkout is one revision past the 0.7 tag, has a unique revision id of "574ab98", and is "dirty" (it has uncommitted changes. The version identifier is used for multiple purposes: * to allow the module to self-identify its version: `myproject.__version__` * to choose a name and prefix for a 'setup.py sdist' tarball ## Theory of Operation Versioneer works by adding a special `_version.py` file into your source tree, where your `__init__.py` can import it. This `_version.py` knows how to dynamically ask the VCS tool for version information at import time. `_version.py` also contains `$Revision$` markers, and the installation process marks `_version.py` to have this marker rewritten with a tag name during the `git archive` command. As a result, generated tarballs will contain enough information to get the proper version. To allow `setup.py` to compute a version too, a `versioneer.py` is added to the top level of your source tree, next to `setup.py` and the `setup.cfg` that configures it. This overrides several distutils/setuptools commands to compute the version when invoked, and changes `setup.py build` and `setup.py sdist` to replace `_version.py` with a small static file that contains just the generated version data. ## Installation See [INSTALL.md](./INSTALL.md) for detailed installation instructions. ## Version-String Flavors Code which uses Versioneer can learn about its version string at runtime by importing `_version` from your main `__init__.py` file and running the `get_versions()` function. From the "outside" (e.g. in `setup.py`), you can import the top-level `versioneer.py` and run `get_versions()`. Both functions return a dictionary with different flavors of version information: * `['version']`: A condensed version string, rendered using the selected style. This is the most commonly used value for the project's version string. The default "pep440" style yields strings like `0.11`, `0.11+2.g1076c97`, or `0.11+2.g1076c97.dirty`. See the "Styles" section below for alternative styles. * `['full-revisionid']`: detailed revision identifier. For Git, this is the full SHA1 commit id, e.g. "1076c978a8d3cfc70f408fe5974aa6c092c949ac". * `['date']`: Date and time of the latest `HEAD` commit. For Git, it is the commit date in ISO 8601 format. This will be None if the date is not available. * `['dirty']`: a boolean, True if the tree has uncommitted changes. Note that this is only accurate if run in a VCS checkout, otherwise it is likely to be False or None * `['error']`: if the version string could not be computed, this will be set to a string describing the problem, otherwise it will be None. It may be useful to throw an exception in setup.py if this is set, to avoid e.g. creating tarballs with a version string of "unknown". Some variants are more useful than others. Including `full-revisionid` in a bug report should allow developers to reconstruct the exact code being tested (or indicate the presence of local changes that should be shared with the developers). `version` is suitable for display in an "about" box or a CLI `--version` output: it can be easily compared against release notes and lists of bugs fixed in various releases. The installer adds the following text to your `__init__.py` to place a basic version in `YOURPROJECT.__version__`: from ._version import get_versions __version__ = get_versions()['version'] del get_versions ## Styles The setup.cfg `style=` configuration controls how the VCS information is rendered into a version string. The default style, "pep440", produces a PEP440-compliant string, equal to the un-prefixed tag name for actual releases, and containing an additional "local version" section with more detail for in-between builds. For Git, this is TAG[+DISTANCE.gHEX[.dirty]] , using information from `git describe --tags --dirty --always`. For example "0.11+2.g1076c97.dirty" indicates that the tree is like the "1076c97" commit but has uncommitted changes (".dirty"), and that this commit is two revisions ("+2") beyond the "0.11" tag. For released software (exactly equal to a known tag), the identifier will only contain the stripped tag, e.g. "0.11". Other styles are available. See [details.md](details.md) in the Versioneer source tree for descriptions. ## Debugging Versioneer tries to avoid fatal errors: if something goes wrong, it will tend to return a version of "0+unknown". To investigate the problem, run `setup.py version`, which will run the version-lookup code in a verbose mode, and will display the full contents of `get_versions()` (including the `error` string, which may help identify what went wrong). ## Known Limitations Some situations are known to cause problems for Versioneer. This details the most significant ones. More can be found on Github [issues page](https://github.com/warner/python-versioneer/issues). ### Subprojects Versioneer has limited support for source trees in which `setup.py` is not in the root directory (e.g. `setup.py` and `.git/` are *not* siblings). The are two common reasons why `setup.py` might not be in the root: * Source trees which contain multiple subprojects, such as [Buildbot](https://github.com/buildbot/buildbot), which contains both "master" and "slave" subprojects, each with their own `setup.py`, `setup.cfg`, and `tox.ini`. Projects like these produce multiple PyPI distributions (and upload multiple independently-installable tarballs). * Source trees whose main purpose is to contain a C library, but which also provide bindings to Python (and perhaps other langauges) in subdirectories. Versioneer will look for `.git` in parent directories, and most operations should get the right version string. However `pip` and `setuptools` have bugs and implementation details which frequently cause `pip install .` from a subproject directory to fail to find a correct version string (so it usually defaults to `0+unknown`). `pip install --editable .` should work correctly. `setup.py install` might work too. Pip-8.1.1 is known to have this problem, but hopefully it will get fixed in some later version. [Bug #38](https://github.com/warner/python-versioneer/issues/38) is tracking this issue. The discussion in [PR #61](https://github.com/warner/python-versioneer/pull/61) describes the issue from the Versioneer side in more detail. [pip PR#3176](https://github.com/pypa/pip/pull/3176) and [pip PR#3615](https://github.com/pypa/pip/pull/3615) contain work to improve pip to let Versioneer work correctly. Versioneer-0.16 and earlier only looked for a `.git` directory next to the `setup.cfg`, so subprojects were completely unsupported with those releases. ### Editable installs with setuptools <= 18.5 `setup.py develop` and `pip install --editable .` allow you to install a project into a virtualenv once, then continue editing the source code (and test) without re-installing after every change. "Entry-point scripts" (`setup(entry_points={"console_scripts": ..})`) are a convenient way to specify executable scripts that should be installed along with the python package. These both work as expected when using modern setuptools. When using setuptools-18.5 or earlier, however, certain operations will cause `pkg_resources.DistributionNotFound` errors when running the entrypoint script, which must be resolved by re-installing the package. This happens when the install happens with one version, then the egg_info data is regenerated while a different version is checked out. Many setup.py commands cause egg_info to be rebuilt (including `sdist`, `wheel`, and installing into a different virtualenv), so this can be surprising. [Bug #83](https://github.com/warner/python-versioneer/issues/83) describes this one, but upgrading to a newer version of setuptools should probably resolve it. ### Unicode version strings While Versioneer works (and is continually tested) with both Python 2 and Python 3, it is not entirely consistent with bytes-vs-unicode distinctions. Newer releases probably generate unicode version strings on py2. It's not clear that this is wrong, but it may be surprising for applications when then write these strings to a network connection or include them in bytes-oriented APIs like cryptographic checksums. [Bug #71](https://github.com/warner/python-versioneer/issues/71) investigates this question. ## Updating Versioneer To upgrade your project to a new release of Versioneer, do the following: * install the new Versioneer (`pip install -U versioneer` or equivalent) * edit `setup.cfg`, if necessary, to include any new configuration settings indicated by the release notes. See [UPGRADING](./UPGRADING.md) for details. * re-run `versioneer install` in your source tree, to replace `SRC/_version.py` * commit any changed files ## Future Directions This tool is designed to make it easily extended to other version-control systems: all VCS-specific components are in separate directories like src/git/ . The top-level `versioneer.py` script is assembled from these components by running make-versioneer.py . In the future, make-versioneer.py will take a VCS name as an argument, and will construct a version of `versioneer.py` that is specific to the given VCS. It might also take the configuration arguments that are currently provided manually during installation by editing setup.py . Alternatively, it might go the other direction and include code from all supported VCS systems, reducing the number of intermediate scripts. ## License To make Versioneer easier to embed, all its code is dedicated to the public domain. The `_version.py` that it creates is also in the public domain. Specifically, both are released under the Creative Commons "Public Domain Dedication" license (CC0-1.0), as described in https://creativecommons.org/publicdomain/zero/1.0/ . """ from __future__ import print_function try: import configparser except ImportError: import ConfigParser as configparser import errno import json import os import re import subprocess import sys class VersioneerConfig: """Container for Versioneer configuration parameters.""" def get_root(): """Get the project root directory. We require that all commands are run from the project root, i.e. the directory that contains setup.py, setup.cfg, and versioneer.py . """ root = os.path.realpath(os.path.abspath(os.getcwd())) setup_py = os.path.join(root, "setup.py") versioneer_py = os.path.join(root, "versioneer.py") if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): # allow 'python path/to/setup.py COMMAND' root = os.path.dirname(os.path.realpath(os.path.abspath(sys.argv[0]))) setup_py = os.path.join(root, "setup.py") versioneer_py = os.path.join(root, "versioneer.py") if not (os.path.exists(setup_py) or os.path.exists(versioneer_py)): err = ("Versioneer was unable to run the project root directory. " "Versioneer requires setup.py to be executed from " "its immediate directory (like 'python setup.py COMMAND'), " "or in a way that lets it use sys.argv[0] to find the root " "(like 'python path/to/setup.py COMMAND').") raise VersioneerBadRootError(err) try: # Certain runtime workflows (setup.py install/develop in a setuptools # tree) execute all dependencies in a single python process, so # "versioneer" may be imported multiple times, and python's shared # module-import table will cache the first one. So we can't use # os.path.dirname(__file__), as that will find whichever # versioneer.py was first imported, even in later projects. me = os.path.realpath(os.path.abspath(__file__)) me_dir = os.path.normcase(os.path.splitext(me)[0]) vsr_dir = os.path.normcase(os.path.splitext(versioneer_py)[0]) if me_dir != vsr_dir: print("Warning: build in %s is using versioneer.py from %s" % (os.path.dirname(me), versioneer_py)) except NameError: pass return root def get_config_from_root(root): """Read the project setup.cfg file to determine Versioneer config.""" # This might raise EnvironmentError (if setup.cfg is missing), or # configparser.NoSectionError (if it lacks a [versioneer] section), or # configparser.NoOptionError (if it lacks "VCS="). See the docstring at # the top of versioneer.py for instructions on writing your setup.cfg . setup_cfg = os.path.join(root, "setup.cfg") parser = configparser.SafeConfigParser() with open(setup_cfg, "r") as f: parser.readfp(f) VCS = parser.get("versioneer", "VCS") # mandatory def get(parser, name): if parser.has_option("versioneer", name): return parser.get("versioneer", name) return None cfg = VersioneerConfig() cfg.VCS = VCS cfg.style = get(parser, "style") or "" cfg.versionfile_source = get(parser, "versionfile_source") cfg.versionfile_build = get(parser, "versionfile_build") cfg.tag_prefix = get(parser, "tag_prefix") if cfg.tag_prefix in ("''", '""'): cfg.tag_prefix = "" cfg.parentdir_prefix = get(parser, "parentdir_prefix") cfg.verbose = get(parser, "verbose") return cfg class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" # these dictionaries contain VCS-specific tools LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): # decorator """Decorator to mark a method as the handler for a particular VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): """Call the given command(s).""" assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen([c] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %s" % dispcmd) print(e) return None, None else: if verbose: print("unable to find command, tried %s" % (commands,)) return None, None stdout = p.communicate()[0].strip() if sys.version_info[0] >= 3: stdout = stdout.decode() if p.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) print("stdout was %s" % stdout) return None, p.returncode return stdout, p.returncode LONG_VERSION_PY['git'] = ''' # This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build # directories (produced by setup.py build) will contain a much shorter file # that just contains the computed version number. # This file is released into the public domain. Generated by # versioneer-0.18 (https://github.com/warner/python-versioneer) """Git implementation of _version.py.""" import errno import os import re import subprocess import sys def get_keywords(): """Get the keywords needed to look up the version information.""" # these strings will be replaced by git during git-archive. # setup.py/versioneer.py will grep for the variable names, so they must # each be defined on a line of their own. _version.py will just call # get_keywords(). git_refnames = "%(DOLLAR)sFormat:%%d%(DOLLAR)s" git_full = "%(DOLLAR)sFormat:%%H%(DOLLAR)s" git_date = "%(DOLLAR)sFormat:%%ci%(DOLLAR)s" keywords = {"refnames": git_refnames, "full": git_full, "date": git_date} return keywords class VersioneerConfig: """Container for Versioneer configuration parameters.""" def get_config(): """Create, populate and return the VersioneerConfig() object.""" # these strings are filled in when 'setup.py versioneer' creates # _version.py cfg = VersioneerConfig() cfg.VCS = "git" cfg.style = "%(STYLE)s" cfg.tag_prefix = "%(TAG_PREFIX)s" cfg.parentdir_prefix = "%(PARENTDIR_PREFIX)s" cfg.versionfile_source = "%(VERSIONFILE_SOURCE)s" cfg.verbose = False return cfg class NotThisMethod(Exception): """Exception raised if a method is not valid for the current scenario.""" LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): # decorator """Decorator to mark a method as the handler for a particular VCS.""" def decorate(f): """Store f in HANDLERS[vcs][method].""" if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False, env=None): """Call the given command(s).""" assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen([c] + args, cwd=cwd, env=env, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %%s" %% dispcmd) print(e) return None, None else: if verbose: print("unable to find command, tried %%s" %% (commands,)) return None, None stdout = p.communicate()[0].strip() if sys.version_info[0] >= 3: stdout = stdout.decode() if p.returncode != 0: if verbose: print("unable to run %%s (error)" %% dispcmd) print("stdout was %%s" %% stdout) return None, p.returncode return stdout, p.returncode def versions_from_parentdir(parentdir_prefix, root, verbose): """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both the project name and a version string. We will also support searching up two directory levels for an appropriately named parent directory """ rootdirs = [] for i in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None, "date": None} else: rootdirs.append(root) root = os.path.dirname(root) # up a level if verbose: print("Tried directories %%s but none started with prefix %%s" %% (str(rootdirs), parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) if line.strip().startswith("git_date ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["date"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" if not keywords: raise NotThisMethod("no keywords at all, weird") date = keywords.get("date") if date is not None: # git-2.2.0 added "%%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because # it's been around since git-1.5.3, and it's too difficult to # discover which version we're using, or to work around using an # older one. date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = set([r.strip() for r in refnames.strip("()").split(",")]) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %%d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = set([r for r in refs if re.search(r'\d', r)]) if verbose: print("discarding '%%s', no digits" %% ",".join(refs - tags)) if verbose: print("likely tags: %%s" %% ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] if verbose: print("picking %%s" %% r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None, "date": date} # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags", "date": None} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out source tree. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) if rc != 0: if verbose: print("Directory %%s not under git control" %% root) raise NotThisMethod("'git rev-parse --git-dir' returned error") # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%%s*" %% tag_prefix], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%%s'" %% describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%%s' doesn't start with prefix '%%s'" print(fmt %% (full_tag, tag_prefix)) pieces["error"] = ("tag '%%s' doesn't start with prefix '%%s'" %% (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() date = run_command(GITS, ["show", "-s", "--format=%%ci", "HEAD"], cwd=root)[0].strip() pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces def plus_or_dot(pieces): """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty Exceptions: 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%%d.g%%s" %% (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%%d.g%%s" %% (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): """TAG[.post.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post.devDISTANCE """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%%d" %% pieces["distance"] else: # exception #1 rendered = "0.post.dev%%d" %% pieces["distance"] return rendered def render_pep440_post(pieces): """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards (a dirty tree will appear "older" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%%s" %% pieces["short"] else: # exception #1 rendered = "0.post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%%s" %% pieces["short"] return rendered def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Eexceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%%d" %% pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%%d-g%%s" %% (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"], "date": None} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%%s'" %% style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None, "date": pieces.get("date")} def get_versions(): """Get version information or return default if unable to do so.""" # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which # case we can only use expanded keywords. cfg = get_config() verbose = cfg.verbose try: return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) except NotThisMethod: pass try: root = os.path.realpath(__file__) # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. for i in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to find root of source tree", "date": None} try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) return render(pieces, cfg.style) except NotThisMethod: pass try: if cfg.parentdir_prefix: return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) except NotThisMethod: pass return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version", "date": None} ''' @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): """Extract version information from the given file.""" # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) if line.strip().startswith("git_date ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["date"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): """Get version information from git keywords.""" if not keywords: raise NotThisMethod("no keywords at all, weird") date = keywords.get("date") if date is not None: # git-2.2.0 added "%cI", which expands to an ISO-8601 -compliant # datestamp. However we prefer "%ci" (which expands to an "ISO-8601 # -like" string, which we must then edit to make compliant), because # it's been around since git-1.5.3, and it's too difficult to # discover which version we're using, or to work around using an # older one. date = date.strip().replace(" ", "T", 1).replace(" ", "", 1) refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = set([r.strip() for r in refnames.strip("()").split(",")]) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = set([r for r in refs if re.search(r'\d', r)]) if verbose: print("discarding '%s', no digits" % ",".join(refs - tags)) if verbose: print("likely tags: %s" % ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] if verbose: print("picking %s" % r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None, "date": date} # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags", "date": None} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): """Get version from 'git describe' in the root of the source tree. This only gets called if the git-archive 'subst' keywords were *not* expanded, and _version.py hasn't already been rewritten with a short version string, meaning we're inside a checked out source tree. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] out, rc = run_command(GITS, ["rev-parse", "--git-dir"], cwd=root, hide_stderr=True) if rc != 0: if verbose: print("Directory %s not under git control" % root) raise NotThisMethod("'git rev-parse --git-dir' returned error") # if there is a tag matching tag_prefix, this yields TAG-NUM-gHEX[-dirty] # if there isn't one, this yields HEX[-dirty] (no NUM) describe_out, rc = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long", "--match", "%s*" % tag_prefix], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out, rc = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%s'" % describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out, rc = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits # commit date: see ISO-8601 comment in git_versions_from_keywords() date = run_command(GITS, ["show", "-s", "--format=%ci", "HEAD"], cwd=root)[0].strip() pieces["date"] = date.strip().replace(" ", "T", 1).replace(" ", "", 1) return pieces def do_vcs_install(manifest_in, versionfile_source, ipy): """Git-specific installation logic for Versioneer. For Git, this means creating/changing .gitattributes to mark _version.py for export-subst keyword substitution. """ GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] files = [manifest_in, versionfile_source] if ipy: files.append(ipy) try: me = __file__ if me.endswith(".pyc") or me.endswith(".pyo"): me = os.path.splitext(me)[0] + ".py" versioneer_file = os.path.relpath(me) except NameError: versioneer_file = "versioneer.py" files.append(versioneer_file) present = False try: f = open(".gitattributes", "r") for line in f.readlines(): if line.strip().startswith(versionfile_source): if "export-subst" in line.strip().split()[1:]: present = True f.close() except EnvironmentError: pass if not present: f = open(".gitattributes", "a+") f.write("%s export-subst\n" % versionfile_source) f.close() files.append(".gitattributes") run_command(GITS, ["add", "--"] + files) def versions_from_parentdir(parentdir_prefix, root, verbose): """Try to determine the version from the parent directory name. Source tarballs conventionally unpack into a directory that includes both the project name and a version string. We will also support searching up two directory levels for an appropriately named parent directory """ rootdirs = [] for i in range(3): dirname = os.path.basename(root) if dirname.startswith(parentdir_prefix): return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None, "date": None} else: rootdirs.append(root) root = os.path.dirname(root) # up a level if verbose: print("Tried directories %s but none started with prefix %s" % (str(rootdirs), parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") SHORT_VERSION_PY = """ # This file was generated by 'versioneer.py' (0.18) from # revision-control system data, or from the parent directory name of an # unpacked source archive. Distribution tarballs contain a pre-generated copy # of this file. import json version_json = ''' %s ''' # END VERSION_JSON def get_versions(): return json.loads(version_json) """ def versions_from_file(filename): """Try to determine the version from _version.py if present.""" try: with open(filename) as f: contents = f.read() except EnvironmentError: raise NotThisMethod("unable to read _version.py") mo = re.search(r"version_json = '''\n(.*)''' # END VERSION_JSON", contents, re.M | re.S) if not mo: mo = re.search(r"version_json = '''\r\n(.*)''' # END VERSION_JSON", contents, re.M | re.S) if not mo: raise NotThisMethod("no version_json in _version.py") return json.loads(mo.group(1)) def write_to_version_file(filename, versions): """Write the given version number to the given _version.py file.""" os.unlink(filename) contents = json.dumps(versions, sort_keys=True, indent=1, separators=(",", ": ")) with open(filename, "w") as f: f.write(SHORT_VERSION_PY % contents) print("set %s to '%s'" % (filename, versions["version"])) def plus_or_dot(pieces): """Return a + if we don't already have one, else return a .""" if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): """Build up version string, with post-release "local version identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty Exceptions: 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): """TAG[.post.devDISTANCE] -- No -dirty. Exceptions: 1: no tags. 0.post.devDISTANCE """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%d" % pieces["distance"] else: # exception #1 rendered = "0.post.dev%d" % pieces["distance"] return rendered def render_pep440_post(pieces): """TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that .dev0 sorts backwards (a dirty tree will appear "older" than the corresponding clean one), but you shouldn't be releasing software with -dirty anyways. Exceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%s" % pieces["short"] else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%s" % pieces["short"] return rendered def render_pep440_old(pieces): """TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. Eexceptions: 1: no tags. 0.postDISTANCE[.dev0] """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): """TAG[-DISTANCE-gHEX][-dirty]. Like 'git describe --tags --dirty --always'. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): """TAG-DISTANCE-gHEX[-dirty]. Like 'git describe --tags --dirty --always -long'. The distance/hash is unconditional. Exceptions: 1: no tags. HEX[-dirty] (note: no 'g' prefix) """ if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): """Render the given version pieces into the requested style.""" if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"], "date": None} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%s'" % style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None, "date": pieces.get("date")} class VersioneerBadRootError(Exception): """The project root directory is unknown or missing key files.""" def get_versions(verbose=False): """Get the project version from whatever source is available. Returns dict with two keys: 'version' and 'full'. """ if "versioneer" in sys.modules: # see the discussion in cmdclass.py:get_cmdclass() del sys.modules["versioneer"] root = get_root() cfg = get_config_from_root(root) assert cfg.VCS is not None, "please set [versioneer]VCS= in setup.cfg" handlers = HANDLERS.get(cfg.VCS) assert handlers, "unrecognized VCS '%s'" % cfg.VCS verbose = verbose or cfg.verbose assert cfg.versionfile_source is not None, \ "please set versioneer.versionfile_source" assert cfg.tag_prefix is not None, "please set versioneer.tag_prefix" versionfile_abs = os.path.join(root, cfg.versionfile_source) # extract version from first of: _version.py, VCS command (e.g. 'git # describe'), parentdir. This is meant to work for developers using a # source checkout, for users of a tarball created by 'setup.py sdist', # and for users of a tarball/zipball created by 'git archive' or github's # download-from-tag feature or the equivalent in other VCSes. get_keywords_f = handlers.get("get_keywords") from_keywords_f = handlers.get("keywords") if get_keywords_f and from_keywords_f: try: keywords = get_keywords_f(versionfile_abs) ver = from_keywords_f(keywords, cfg.tag_prefix, verbose) if verbose: print("got version from expanded keyword %s" % ver) return ver except NotThisMethod: pass try: ver = versions_from_file(versionfile_abs) if verbose: print("got version from file %s %s" % (versionfile_abs, ver)) return ver except NotThisMethod: pass from_vcs_f = handlers.get("pieces_from_vcs") if from_vcs_f: try: pieces = from_vcs_f(cfg.tag_prefix, root, verbose) ver = render(pieces, cfg.style) if verbose: print("got version from VCS %s" % ver) return ver except NotThisMethod: pass try: if cfg.parentdir_prefix: ver = versions_from_parentdir(cfg.parentdir_prefix, root, verbose) if verbose: print("got version from parentdir %s" % ver) return ver except NotThisMethod: pass if verbose: print("unable to compute version") return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version", "date": None} def get_version(): """Get the short version string for this project.""" return get_versions()["version"] def get_cmdclass(): """Get the custom setuptools/distutils subclasses used by Versioneer.""" if "versioneer" in sys.modules: del sys.modules["versioneer"] # this fixes the "python setup.py develop" case (also 'install' and # 'easy_install .'), in which subdependencies of the main project are # built (using setup.py bdist_egg) in the same python process. Assume # a main project A and a dependency B, which use different versions # of Versioneer. A's setup.py imports A's Versioneer, leaving it in # sys.modules by the time B's setup.py is executed, causing B to run # with the wrong versioneer. Setuptools wraps the sub-dep builds in a # sandbox that restores sys.modules to it's pre-build state, so the # parent is protected against the child's "import versioneer". By # removing ourselves from sys.modules here, before the child build # happens, we protect the child from the parent's versioneer too. # Also see https://github.com/warner/python-versioneer/issues/52 cmds = {} # we add "version" to both distutils and setuptools from distutils.core import Command class cmd_version(Command): description = "report generated version string" user_options = [] boolean_options = [] def initialize_options(self): pass def finalize_options(self): pass def run(self): vers = get_versions(verbose=True) print("Version: %s" % vers["version"]) print(" full-revisionid: %s" % vers.get("full-revisionid")) print(" dirty: %s" % vers.get("dirty")) print(" date: %s" % vers.get("date")) if vers["error"]: print(" error: %s" % vers["error"]) cmds["version"] = cmd_version # we override "build_py" in both distutils and setuptools # # most invocation pathways end up running build_py: # distutils/build -> build_py # distutils/install -> distutils/build ->.. # setuptools/bdist_wheel -> distutils/install ->.. # setuptools/bdist_egg -> distutils/install_lib -> build_py # setuptools/install -> bdist_egg ->.. # setuptools/develop -> ? # pip install: # copies source tree to a tempdir before running egg_info/etc # if .git isn't copied too, 'git describe' will fail # then does setup.py bdist_wheel, or sometimes setup.py install # setup.py egg_info -> ? # we override different "build_py" commands for both environments if "setuptools" in sys.modules: from setuptools.command.build_py import build_py as _build_py else: from distutils.command.build_py import build_py as _build_py class cmd_build_py(_build_py): def run(self): root = get_root() cfg = get_config_from_root(root) versions = get_versions() _build_py.run(self) # now locate _version.py in the new build/ directory and replace # it with an updated value if cfg.versionfile_build: target_versionfile = os.path.join(self.build_lib, cfg.versionfile_build) print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) cmds["build_py"] = cmd_build_py if "cx_Freeze" in sys.modules: # cx_freeze enabled? from cx_Freeze.dist import build_exe as _build_exe # nczeczulin reports that py2exe won't like the pep440-style string # as FILEVERSION, but it can be used for PRODUCTVERSION, e.g. # setup(console=[{ # "version": versioneer.get_version().split("+", 1)[0], # FILEVERSION # "product_version": versioneer.get_version(), # ... class cmd_build_exe(_build_exe): def run(self): root = get_root() cfg = get_config_from_root(root) versions = get_versions() target_versionfile = cfg.versionfile_source print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) _build_exe.run(self) os.unlink(target_versionfile) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] f.write(LONG % {"DOLLAR": "$", "STYLE": cfg.style, "TAG_PREFIX": cfg.tag_prefix, "PARENTDIR_PREFIX": cfg.parentdir_prefix, "VERSIONFILE_SOURCE": cfg.versionfile_source, }) cmds["build_exe"] = cmd_build_exe del cmds["build_py"] if 'py2exe' in sys.modules: # py2exe enabled? try: from py2exe.distutils_buildexe import py2exe as _py2exe # py3 except ImportError: from py2exe.build_exe import py2exe as _py2exe # py2 class cmd_py2exe(_py2exe): def run(self): root = get_root() cfg = get_config_from_root(root) versions = get_versions() target_versionfile = cfg.versionfile_source print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, versions) _py2exe.run(self) os.unlink(target_versionfile) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] f.write(LONG % {"DOLLAR": "$", "STYLE": cfg.style, "TAG_PREFIX": cfg.tag_prefix, "PARENTDIR_PREFIX": cfg.parentdir_prefix, "VERSIONFILE_SOURCE": cfg.versionfile_source, }) cmds["py2exe"] = cmd_py2exe # we override different "sdist" commands for both environments if "setuptools" in sys.modules: from setuptools.command.sdist import sdist as _sdist else: from distutils.command.sdist import sdist as _sdist class cmd_sdist(_sdist): def run(self): versions = get_versions() self._versioneer_generated_versions = versions # unless we update this, the command will keep using the old # version self.distribution.metadata.version = versions["version"] return _sdist.run(self) def make_release_tree(self, base_dir, files): root = get_root() cfg = get_config_from_root(root) _sdist.make_release_tree(self, base_dir, files) # now locate _version.py in the new base_dir directory # (remembering that it may be a hardlink) and replace it with an # updated value target_versionfile = os.path.join(base_dir, cfg.versionfile_source) print("UPDATING %s" % target_versionfile) write_to_version_file(target_versionfile, self._versioneer_generated_versions) cmds["sdist"] = cmd_sdist return cmds CONFIG_ERROR = """ setup.cfg is missing the necessary Versioneer configuration. You need a section like: [versioneer] VCS = git style = pep440 versionfile_source = src/myproject/_version.py versionfile_build = myproject/_version.py tag_prefix = parentdir_prefix = myproject- You will also need to edit your setup.py to use the results: import versioneer setup(version=versioneer.get_version(), cmdclass=versioneer.get_cmdclass(), ...) Please read the docstring in ./versioneer.py for configuration instructions, edit setup.cfg, and re-run the installer or 'python versioneer.py setup'. """ SAMPLE_CONFIG = """ # See the docstring in versioneer.py for instructions. Note that you must # re-run 'versioneer.py setup' after changing this section, and commit the # resulting files. [versioneer] #VCS = git #style = pep440 #versionfile_source = #versionfile_build = #tag_prefix = #parentdir_prefix = """ INIT_PY_SNIPPET = """ from ._version import get_versions __version__ = get_versions()['version'] del get_versions """ def do_setup(): """Main VCS-independent setup function for installing Versioneer.""" root = get_root() try: cfg = get_config_from_root(root) except (EnvironmentError, configparser.NoSectionError, configparser.NoOptionError) as e: if isinstance(e, (EnvironmentError, configparser.NoSectionError)): print("Adding sample versioneer config to setup.cfg", file=sys.stderr) with open(os.path.join(root, "setup.cfg"), "a") as f: f.write(SAMPLE_CONFIG) print(CONFIG_ERROR, file=sys.stderr) return 1 print(" creating %s" % cfg.versionfile_source) with open(cfg.versionfile_source, "w") as f: LONG = LONG_VERSION_PY[cfg.VCS] f.write(LONG % {"DOLLAR": "$", "STYLE": cfg.style, "TAG_PREFIX": cfg.tag_prefix, "PARENTDIR_PREFIX": cfg.parentdir_prefix, "VERSIONFILE_SOURCE": cfg.versionfile_source, }) ipy = os.path.join(os.path.dirname(cfg.versionfile_source), "__init__.py") if os.path.exists(ipy): try: with open(ipy, "r") as f: old = f.read() except EnvironmentError: old = "" if INIT_PY_SNIPPET not in old: print(" appending to %s" % ipy) with open(ipy, "a") as f: f.write(INIT_PY_SNIPPET) else: print(" %s unmodified" % ipy) else: print(" %s doesn't exist, ok" % ipy) ipy = None # Make sure both the top-level "versioneer.py" and versionfile_source # (PKG/_version.py, used by runtime code) are in MANIFEST.in, so # they'll be copied into source distributions. Pip won't be able to # install the package without this. manifest_in = os.path.join(root, "MANIFEST.in") simple_includes = set() try: with open(manifest_in, "r") as f: for line in f: if line.startswith("include "): for include in line.split()[1:]: simple_includes.add(include) except EnvironmentError: pass # That doesn't cover everything MANIFEST.in can do # (http://docs.python.org/2/distutils/sourcedist.html#commands), so # it might give some false negatives. Appending redundant 'include' # lines is safe, though. if "versioneer.py" not in simple_includes: print(" appending 'versioneer.py' to MANIFEST.in") with open(manifest_in, "a") as f: f.write("include versioneer.py\n") else: print(" 'versioneer.py' already in MANIFEST.in") if cfg.versionfile_source not in simple_includes: print(" appending versionfile_source ('%s') to MANIFEST.in" % cfg.versionfile_source) with open(manifest_in, "a") as f: f.write("include %s\n" % cfg.versionfile_source) else: print(" versionfile_source already in MANIFEST.in") # Make VCS-specific changes. For git, this means creating/changing # .gitattributes to mark _version.py for export-subst keyword # substitution. do_vcs_install(manifest_in, cfg.versionfile_source, ipy) return 0 def scan_setup_py(): """Validate the contents of setup.py against Versioneer's expectations.""" found = set() setters = False errors = 0 with open("setup.py", "r") as f: for line in f.readlines(): if "import versioneer" in line: found.add("import") if "versioneer.get_cmdclass()" in line: found.add("cmdclass") if "versioneer.get_version()" in line: found.add("get_version") if "versioneer.VCS" in line: setters = True if "versioneer.versionfile_source" in line: setters = True if len(found) != 3: print("") print("Your setup.py appears to be missing some important items") print("(but I might be wrong). Please make sure it has something") print("roughly like the following:") print("") print(" import versioneer") print(" setup( version=versioneer.get_version(),") print(" cmdclass=versioneer.get_cmdclass(), ...)") print("") errors += 1 if setters: print("You should remove lines like 'versioneer.VCS = ' and") print("'versioneer.versionfile_source = ' . This configuration") print("now lives in setup.cfg, and should be removed from setup.py") print("") errors += 1 return errors if __name__ == "__main__": cmd = sys.argv[1] if cmd == "setup": errors = do_setup() errors += scan_setup_py() if errors: sys.exit(1)