pax_global_header00006660000000000000000000000064151020221620014501gustar00rootroot0000000000000052 comment=bddc82951e012aceab918dd14cfcebe891a97cac borgmatic/000077500000000000000000000000001510202216200130145ustar00rootroot00000000000000borgmatic/.bandit000066400000000000000000000000471510202216200142570ustar00rootroot00000000000000[bandit] exclude=tests skips=S105,S404 borgmatic/.dockerignore000066400000000000000000000000121510202216200154610ustar00rootroot00000000000000.git .tox borgmatic/.eleventy.js000066400000000000000000000031171510202216200152650ustar00rootroot00000000000000const pluginSyntaxHighlight = require("@11ty/eleventy-plugin-syntaxhighlight"); const codeClipboard = require("eleventy-plugin-code-clipboard"); const inclusiveLangPlugin = require("@11ty/eleventy-plugin-inclusive-language"); const navigationPlugin = require("@11ty/eleventy-navigation"); module.exports = function(eleventyConfig) { eleventyConfig.addPlugin(pluginSyntaxHighlight); eleventyConfig.addPlugin(inclusiveLangPlugin); eleventyConfig.addPlugin(navigationPlugin); eleventyConfig.addPlugin(codeClipboard); let markdownIt = require("markdown-it"); let markdownItAnchor = require("markdown-it-anchor"); let markdownItReplaceLink = require("markdown-it-replace-link"); let markdownItOptions = { html: true, breaks: false, linkify: true, replaceLink: function (link, env) { if (process.env.NODE_ENV == "production") { return link; } return link.replace('https://torsion.org/', 'http://localhost:8080/'); } }; let markdownItAnchorOptions = { permalink: markdownItAnchor.permalink.headerLink() }; eleventyConfig.setLibrary( "md", markdownIt(markdownItOptions) .use(markdownItAnchor, markdownItAnchorOptions) .use(markdownItReplaceLink) .use(codeClipboard.markdownItCopyButton) ); eleventyConfig.addPassthroughCopy({"docs/static": "static"}); eleventyConfig.setLiquidOptions({dynamicPartials: false}); return { templateFormats: [ "md", "txt" ], } }; borgmatic/.gitea/000077500000000000000000000000001510202216200141635ustar00rootroot00000000000000borgmatic/.gitea/issue_template/000077500000000000000000000000001510202216200172065ustar00rootroot00000000000000borgmatic/.gitea/issue_template/bug_template.yaml000066400000000000000000000041011510202216200225360ustar00rootroot00000000000000name: "Bug or question/support" about: "For filing a bug or getting support" body: - type: textarea id: problem attributes: label: What I'm trying to do and why validations: required: true - type: textarea id: repro_steps attributes: label: Steps to reproduce description: Include (sanitized) borgmatic configuration files if applicable. validations: required: false - type: textarea id: actual_behavior attributes: label: Actual behavior description: Include (sanitized) `--verbosity 2` output if applicable. validations: required: false - type: textarea id: expected_behavior attributes: label: Expected behavior validations: required: false - type: textarea id: notes attributes: label: Other notes / implementation ideas validations: required: false - type: input id: borgmatic_version attributes: label: borgmatic version description: Use `sudo borgmatic --version` or `sudo pip show borgmatic | grep ^Version` validations: required: false - type: input id: borgmatic_install_method attributes: label: borgmatic installation method description: e.g., pip install, Debian package, container, etc. validations: required: false - type: input id: borg_version attributes: label: Borg version description: Use `sudo borg --version` validations: required: false - type: input id: python_version attributes: label: Python version description: Use `python3 --version` validations: required: false - type: input id: database_version attributes: label: Database version (if applicable) description: Use `psql --version` / `mysql --version` / `mongodump --version` / `sqlite3 --version` validations: required: false - type: input id: operating_system_version attributes: label: Operating system and version description: On Linux, use `cat /etc/os-release` validations: required: false borgmatic/.gitea/issue_template/config.yaml000066400000000000000000000000331510202216200213330ustar00rootroot00000000000000blank_issues_enabled: true borgmatic/.gitea/issue_template/feature_template.yaml000066400000000000000000000005161510202216200234220ustar00rootroot00000000000000name: "Feature" about: "For filing a feature request or idea" body: - type: textarea id: request attributes: label: What I'd like to do and why validations: required: true - type: textarea id: notes attributes: label: Other notes / implementation ideas validations: required: false borgmatic/.gitea/pull_request_template.md000066400000000000000000000000631510202216200211230ustar00rootroot00000000000000--- name: "Pull Request" about: "Pull Request" --- borgmatic/.gitea/workflows/000077500000000000000000000000001510202216200162205ustar00rootroot00000000000000borgmatic/.gitea/workflows/build.yaml000066400000000000000000000021721510202216200202050ustar00rootroot00000000000000name: build run-name: ${{ gitea.actor }} is building on: push: branches: [main] pull_request: branches: [main] jobs: test: runs-on: host steps: - uses: actions/checkout@v4 - run: scripts/run-end-to-end-tests docs: needs: [test] runs-on: host if: gitea.event_name == 'push' env: IMAGE_NAME: projects.torsion.org/borgmatic-collective/borgmatic:docs steps: - uses: actions/checkout@v4 - run: podman login --username "$USERNAME" --password "$PASSWORD" projects.torsion.org env: USERNAME: "${{ secrets.REGISTRY_USERNAME }}" PASSWORD: "${{ secrets.REGISTRY_PASSWORD }}" - run: podman build --tag "$IMAGE_NAME" --file docs/Dockerfile --storage-opt "overlay.mount_program=/usr/bin/fuse-overlayfs" . - run: podman push "$IMAGE_NAME" - run: scripts/export-docs-from-image - run: curl --user "${{ secrets.REGISTRY_USERNAME }}:${{ secrets.REGISTRY_PASSWORD }}" --upload-file borgmatic-docs.tar.gz https://projects.torsion.org/api/packages/borgmatic-collective/generic/borgmatic-docs/$(head --lines=1 NEWS)/borgmatic-docs.tar.gz borgmatic/.github/000077500000000000000000000000001510202216200143545ustar00rootroot00000000000000borgmatic/.github/pull_request_template.md000066400000000000000000000006301510202216200213140ustar00rootroot00000000000000## Hold up, GitHub users Thanks for your contribution! Unfortunately, we don't use GitHub pull requests to manage code contributions to this repository (and GitHub doesn't have any way to disable pull requests entirely). Instead, please see: https://torsion.org/borgmatic/#contributing ... which provides full instructions on how to submit pull requests. You can even use your GitHub account to login. borgmatic/.gitignore000066400000000000000000000001511510202216200150010ustar00rootroot00000000000000*.egg-info *.pyc *.swp .cache .coverage* .pytest_cache .tox __pycache__ build/ dist/ pip-wheel-metadata/ borgmatic/AUTHORS000066400000000000000000000011671510202216200140710ustar00rootroot00000000000000Dan Helfman : Main developer Alexander Görtz: Python 3 compatibility Florian Lindner: Logging rewrite Henning Schroeder: Copy editing Johannes Feichtner: Support for user hooks Michele Lazzeri: Custom archive names Nick Whyte: Support prefix filtering for archive consistency checks newtonne: Read encryption password from external file Robin `ypid` Schneider: Support additional options of Borg and add validate-borgmatic-config command Scott Squires: Custom archive names Thomas LÉVEIL: Support for a keep_minutely prune option. Support for the --json option And many others! See the output of "git log". borgmatic/LICENSE000066400000000000000000001044621510202216200140300ustar00rootroot00000000000000GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. {one line to give the program's name and a brief idea of what it does.} Copyright (C) {year} {name of author} This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: {project} Copyright (C) {year} {fullname} This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . borgmatic/MANIFEST.in000066400000000000000000000002711510202216200145520ustar00rootroot00000000000000# This file only applies to the source dist tarball, not the built wheel. include borgmatic/config/schema.yaml graft docs graft sample graft scripts graft tests global-exclude *.py[co] borgmatic/NEWS000066400000000000000000003143611510202216200135230ustar00rootroot000000000000002.0.11 * #957: Document borgmatic's limitations around parallelism—both its own and Borg's. See the documentation for more information: https://torsion.org/borgmatic/how-to/make-per-application-backups/#limitations * #1165: Fix for when the systemd service directories (RuntimeDirectory and StateDirectory) each contain multiple paths. * #1168: Fix for the "list", "info", and "delete" options in "extra_borg_options" being ignored when "--archive" is omitted with Borg 1.x. * #1169: Fix for a regression in the ZFS, LVM, and Btrfs hooks in which partial excludes of snapshot paths were ignored. * #1170: Fix for an inconsistent log level for Borg's last output line before exiting. * #1172: Add an "environment" option to the Sentry monitoring hook. * #1176: Fix the "--repository" flag not applying to command hooks. * Add a "rename" option to "extra_borg_options" to support passing arbitrary flags to "borg rename". * Add documentation on patterns and excludes: https://torsion.org/borgmatic/reference/configuration/patterns-and-excludes/ * Drop support for Python 3.9, which has been end-of-lifed. 2.0.10 * #427: Expand the "extra_borg_options" option to support passing arbitrary Borg flags to every Borg sub-command that borgmatic uses. As part of this, deprecate the "init" option under "borg_extra_options" in favor of "repo_create". * #942: Factor reference material out of the documentation how-to guides. This means there's now a whole reference section in the docs! Check it out: https://torsion.org/borgmatic/ * #973: For the MariaDB and MySQL database hooks, add a "skip_names" option to ignore particular databases when dumping "all". * #1150: Fix for a runtime directory error when the "create" action is used with the "--log-json" flag. * #1150: Fix for a runtime directory error when the configured patterns contain a global exclude. * #1161: Fix a traceback (TypeError) in the "check" action with Python 3.14. * #1166: Add a "borg_key_file" option for setting the Borg repository key file path. * Add documentation search. * Change the URL of the local documentation development server to be more like the production URL. 2.0.9 * #1105: More accurately collect Btrfs subvolumes to snapshot. As part of this, the Btrfs hook no longer uses "findmnt" and the "findmnt_command" option is deprecated. * #1123: Add loading of systemd credentials even when running borgmatic outside of a systemd service. * #1149: Add support for Python 3.14. * #1149: Include automated tests in the source dist tarball uploaded to PyPI. * #1151: Fix snapshotting in the ZFS, Btrfs, and LVM hooks to play nicely with the Borg 1.4+ "slashdot" hack within source directory paths. * #1152: Fix a regression in the Loki monitoring hook in which log messages weren't sending. * #1156: Fix snapshotting in the ZFS, Btrfs, and LVM hooks to snapshot both parent and child volumes/filesystems instead of just the parent. As part of this fix, borgmatic no longer deduplicates patterns except for those containing the borgmatic runtime directory. * Fix a traceback (TypeError) regression in the "spot" check when the "local_path" option isn't set. 2.0.8 * #1114: Document systemd configuration changes for the ZFS filesystem hook. * #1116: Add dumping of database containers via their container names, handy for backing up database containers from the host. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/backup-your-databases/#database-client-on-the-host * #1116: Add optional database labels to make it easier to find your dumps within a Borg archive. * #1118: Fix a bug in which Borg hangs during database backup when different filesystems are in use. * #1122: To prevent the user from inadvertently excluding the "bootstrap" action's manifest, always error and exit when the borgmatic runtime directory overlaps with the configured excludes. * #1125: Clarify documentation about ZFS, Btrfs, and LVM snapshotting when a separate filesystem is mounted in the source directory. (Spoiler: The separate filesystem doesn't get included in the snapshot.) * #1126: Create LVM snapshots as read-write to avoid an error when snapshotting ext4 filesystems with orphaned files that need recovery. * #1133: Fix the "spot" check to include borgmatic configuration files that were backed up to support the "bootstrap" action. * #1136: For all database hooks, record metadata about the dumps contained within an archive. * #1139: Set "borgmatic" as the user agent when connecting to monitoring services. * #1146: Fix a broken "create" action and "--archive latest" flag when multiple archives share the same name with Borg 2. * Treat configuration file permissions issues as errors instead of warnings. * When running tests, use Ruff for faster and more comprehensive code linting and formatting, replacing Flake8, Black, isort, etc. * Switch from pipx to uv for installing development tools, and added tox-uv for speeding up test environment creation. See the developer documentation for more information: https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/ 2.0.7 * #1032: Fix a bug in which a Borg archive gets created even when a database hook fails. * #1043: Support Btrfs subvolume paths in "source_directories" even when the subvolume is mounted elsewhere. * #1048: Ignore Btrfs subvolumes whose read-only status can't be determined. * #1083: Add "debug_passphrase"/"display_passphrase" options and a "{unixtime}" placeholder in support of Borg 2 features. * #1099: Clarify documentation on command hooks order of execution. * #1100: Fix a bug in which "borg --version" failing isn't considered a "fail" state in a command hook. * #1108: Fix a bug in which quoted "extra_borg_options" values containing spaces are passed to Borg incorrectly. * #1108: Add a "--comment" flag to the "create" action for creating an archive with a comment. * Use the Bandit security analysis tool when running tests. * SECURITY: Add timeouts to all monitoring hooks to prevent hangs on network requests, e.g. due to a compromised monitoring server holding requests open. * SECURITY: For the "spot" check, use a more secure source of randomness when selecting paths to check. 2.0.6 * #1068: Fix a warning from LVM about leaked file descriptors. * #1086: Fix for the "spot" check breaking when the "--progress" flag is used. * #1089: Fix for the "spot" check erroring when a checksum command errors. * #1091: Fix for the "config generate" action generating invalid configuration when upgrading deprecated command hooks. * #1093: Fix for the LVM hook erroring when the "--dry-run" flag is used. * #1094: Fix incorrect documentation about customizing Borg exit codes: https://torsion.org/borgmatic/docs/how-to/customize-warnings-and-errors/ * #1095: Fix for the "spot" check's "xxh64sum_command" option erroring on commands containing spaces. * Add support for Borg 2's "s3:" and "b2:" repository URLs, so you can backup to S3 or B2 cloud storage services even without using Rclone. * During the "spot" check, truncate log messages containing many file paths. 2.0.5 * #1033: Add a "password_transport" option to the MariaDB and MySQL database hooks for customizing how borgmatic transmits passwords to the database client. * #1078: Add "keep_3monthly" and "keep_13weekly" options for customizing "prune" action archive retention. * #1078: Add a "use_chunks_archive" option for controlling whether Borg uses its chunks cache directory. * #1078: For the "compact" action, pass "--dry-run" through to Borg. * #1085: Fix a regression in which the default monitoring verbosity is 0 (warnings only) instead of 1 (info about steps borgmatic is taking). This prevented logs from showing up in monitoring services like Healthchecks unless you had an explicit monitoring verbosity set. * Move Mastodon social hosting from Fosstodon to FLOSS.social: https://floss.social/@borgmatic * The borgmatic project no longer accepts pull requests on GitHub. But see https://torsion.org/borgmatic/#contributing for how you can still submit pull requests. You can even use your GitHub account to login. 2.0.4 * #1072: Fix path rewriting for non-root patterns in the ZFS, Btrfs, and LVM hooks. * #1073: Clarify the documentation about when an "after: error" command hook runs and how it differs from other hooks: https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/ * #1075: Fix an incorrect warning about Borg placeholders being unsupported in a command hook. * #1080: If the exact same "everything" command hook is present in multiple configuration files, only run it once. 2.0.3 * #1065: Fix a regression in monitoring hooks in which an error pinged the finish state instead of the fail state. * #1066: Add a "states" option to command hooks, so you can optionally skip an "after" hook if borgmatic encounters an error. * #1071: Fix an error in the LVM hook when removing a snapshot directory. 2.0.2 * #1035: Document potential performance issues and workarounds with the ZFS, Btrfs, and LVM hooks: https://torsion.org/borgmatic/docs/how-to/snapshot-your-filesystems/ * #1053: Display a nicer error message when the "recreate" action encounters an archive that already exists. * #1059: Fix a regression in which soft failure exit codes in command hooks were not respected. * #1060: Fix action command hooks getting run too many times when multiple borgmatic actions are executed (implicitly or explicitly). * #1060: Don't run action command hooks for actions listed in the "skip_actions" option. * #1062: Fix a regression that broke environment variable interpolation. * #1063: List the configured "when" action names in the log entries for command hooks. 2.0.1 * #1057: Fix argument parsing to avoid using Python 3.12+ string features. Now borgmatic will work with Python 3.9, 3.10, and 3.11 again. 2.0.0 * TL;DR: More flexible, completely revamped command hooks. All configuration options settable on the command-line. New configuration options for many command-line flags (including verbosity!). New "key import" and "recreate" actions. Almost everything is backwards compatible—but mind those deprecation warnings! * #262: Add a "default_actions" option that supports disabling default actions when borgmatic is run without any command-line arguments. * #303: Deprecate the "--override" flag in favor of direct command-line flags for every borgmatic configuration option. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#configuration-overrides * #303: Add configuration options that serve as defaults for some (but not all) command-line action flags. For example, each entry in "repositories:" now has an "encryption" option that applies to the "repo-create" action, serving as a default for the "--encryption" flag. See the documentation for more information: https://torsion.org/borgmatic/docs/reference/configuration/ * #345: Add a "key import" action to import a repository key from backup. * #422: Add home directory expansion to file-based and KeePassXC credential hooks. * #610: Add a "recreate" action for recreating archives, for instance for retroactively excluding particular files from existing archives. * #790, #821: Deprecate all "before_*", "after_*" and "on_error" command hooks in favor of more flexible "commands:". See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/ * #790: BREAKING: For both new and deprecated command hooks, run a configured "after" hook even if an error occurs first. This allows you to perform cleanup steps that correspond to "before" preparation commands—even when something goes wrong. * #790: BREAKING: Run all command hooks (both new and deprecated) respecting the "working_directory" option if configured, meaning that hook commands are run in that directory. * #793: Add configuration options for all verbosity and logging flags, so you don't have to set them on the command-line. * #836: Add a custom command option for the SQLite hook. * #837: Add custom command options for the MongoDB hook. * #1010: When using Borg 2, don't pass the "--stats" flag to "borg prune". * #1020: Document a database use case involving a temporary database client container: https://torsion.org/borgmatic/docs/how-to/backup-your-databases/#database-containers * #1037: Fix an error with the "extract" action when both a remote repository and a "working_directory" are used. * #1044: Fix an error in the systemd credential hook when the credential name contains a "." character. * #1047: Add "key-file" and "yubikey" options to the KeePassXC credential hook. * #1048: Fix a "no such file or directory" error in ZFS, Btrfs, and LVM hooks with nested directories that reside on separate devices/filesystems. * #1050: Fix a failure in the "spot" check when the archive contains a symlink. * #1051: Add configuration filename to the "Successfully ran configuration file" log message. 1.9.14 * #409: With the PagerDuty monitoring hook, send borgmatic logs to PagerDuty so they show up in the incident UI. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#pagerduty-hook * #936: Clarify Zabbix monitoring hook documentation about creating items: https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#zabbix-hook * #1017: Fix a regression in which some MariaDB/MySQL passwords were not escaped correctly. * #1021: Fix a regression in which the "exclude_patterns" option didn't expand "~" (the user's home directory). This fix means that all "patterns" and "patterns_from" also now expand "~". * #1023: Fix an error in the Btrfs hook when attempting to snapshot a read-only subvolume. Now, read-only subvolumes are ignored since Btrfs can't actually snapshot them. 1.9.13 * #975: Add a "compression" option to the PostgreSQL database hook. * #1001: Fix a ZFS error during snapshot cleanup. * #1003: In the Zabbix monitoring hook, support Zabbix 7.2's authentication changes. * #1009: Send database passwords to MariaDB and MySQL via anonymous pipe, which is more secure than using an environment variable. * #1013: Send database passwords to MongoDB via anonymous pipe, which is more secure than using "--password" on the command-line! * #1015: When ctrl-C is pressed, more strongly encourage Borg to actually exit. * Add a "verify_tls" option to the Uptime Kuma monitoring hook for disabling TLS verification. * Add "tls" options to the MariaDB and MySQL database hooks to enable or disable TLS encryption between client and server. 1.9.12 * #1005: Fix the credential hooks to avoid using Python 3.12+ string features. Now borgmatic will work with Python 3.9, 3.10, and 3.11 again. 1.9.11 * #795: Add credential loading from file, KeePassXC, and Docker/Podman secrets. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/provide-your-passwords/ * #996: Fix the "create" action to omit the repository label prefix from Borg's output when databases are enabled. * #998: Send the "encryption_passphrase" option to Borg via an anonymous pipe, which is more secure than using an environment variable. * #999: Fix a runtime directory error from a conflict between "extra_borg_options" and special file detection. * #1001: For the ZFS, Btrfs, and LVM hooks, only make snapshots for root patterns that come from a borgmatic configuration option (e.g. "source_directories")—not from other hooks within borgmatic. * #1001: Fix a ZFS/LVM error due to colliding snapshot mount points for nested datasets or logical volumes. * #1001: Don't try to snapshot ZFS datasets that have the "canmount=off" property. * Fix another error in the Btrfs hook when a subvolume mounted at "/" is configured in borgmatic's source directories. 1.9.10 * #966: Add a "{credential ...}" syntax for loading systemd credentials into borgmatic configuration files. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/provide-your-passwords/ * #987: Fix a "list" action error when the "encryption_passcommand" option is set. * #987: When both "encryption_passcommand" and "encryption_passphrase" are configured, prefer "encryption_passphrase" even if it's an empty value. * #988: With the "max_duration" option or the "--max-duration" flag, run the archives and repository checks separately so they don't interfere with one another. Previously, borgmatic refused to run checks in this situation. * #989: Fix the log message code to avoid using Python 3.10+ logging features. Now borgmatic will work with Python 3.9 again. * Capture and delay any log records produced before logging is fully configured, so early log records don't get lost. * Add support for Python 3.13. 1.9.9 * #635: Log the repository path or label on every relevant log message, not just some logs. * #961: When the "encryption_passcommand" option is set, call the command once from borgmatic to collect the encryption passphrase and then pass it to Borg multiple times. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/provide-your-passwords/ * #981: Fix a "spot" check file count delta error. * #982: Fix for borgmatic "exclude_patterns" and "exclude_from" recursing into excluded subdirectories. * #983: Fix the Btrfs hook to support subvolumes with names like "@home" different from their mount points. * #985: Change the default value for the "--original-hostname" flag from "localhost" to no host specified. This way, the "restore" action works without a hostname if there's a single matching database dump. 1.9.8 * #979: Fix root patterns so they don't have an invalid "sh:" prefix before getting passed to Borg. * Expand the recent contributors documentation section to include ticket submitters—not just code contributors—because there are multiple ways to contribute to the project! See: https://torsion.org/borgmatic/#recent-contributors 1.9.7 * #855: Add a Sentry monitoring hook. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#sentry-hook * #968: Fix for a "spot" check error when a filename in the most recent archive contains a newline. * #970: Fix for an error when there's a blank line in the configured patterns or excludes. * #971: Fix for "exclude_from" files being completely ignored. * #977: Fix for "exclude_patterns" and "exclude_from" not supporting explicit pattern styles (e.g., "sh:" or "re:"). 1.9.6 * #959: Fix an error in the Btrfs hook when a subvolume mounted at "/" is configured in borgmatic's source directories. * #960: Fix for archives storing relative source directory paths such that they contain the working directory. * #960: Fix the "spot" check to support relative source directory paths. * #962: For the ZFS, Btrfs, and LVM hooks, perform path rewriting for excludes and patterns in addition to the existing source directories rewriting. * #962: Under the hood, merge all configured source directories, excludes, and patterns into a unified temporary patterns file for passing to Borg. The borgmatic configuration options remain unchanged. * #962: For the LVM hook, add support for nested logical volumes. * #965: Fix a borgmatic runtime directory error when running the "spot" check with a database hook enabled. * #969: Fix the "restore" action to work on database dumps without a port when a default port is present in configuration. * Fix the "spot" check to no longer consider pipe files within an archive for file comparisons. * Fix the "spot" check to have a nicer error when there are no source paths to compare. * Fix auto-excluding of special files (when databases are configured) to support relative source directory paths. * Drop support for Python 3.8, which has been end-of-lifed. 1.9.5 * #418: Backup and restore databases that have the same name but with different ports, hostnames, or hooks. * #947: To avoid a hang in the database hooks, error and exit when the borgmatic runtime directory overlaps with the configured excludes. * #954: Fix a findmnt command error in the Btrfs hook by switching to parsing JSON output. * #956: Fix the printing of a color reset code even when color is disabled. * #958: Drop colorama as a library dependency. * When the ZFS, Btrfs, or LVM hooks aren't configured, don't try to cleanup snapshots for them. 1.9.4 * #80 (beta): Add an LVM hook for snapshotting and backing up LVM logical volumes. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/snapshot-your-filesystems/ * #251 (beta): Add a Btrfs hook for snapshotting and backing up Btrfs subvolumes. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/snapshot-your-filesystems/ * #926: Fix a library error when running within a PyInstaller bundle. * #950: Fix a snapshot unmount error in the ZFS hook when using nested datasets. * Update the ZFS hook to discover and snapshot ZFS datasets even if they are parent/grandparent directories of your source directories. * Reorganize data source and monitoring hooks to make developing new hooks easier. 1.9.3 * #261 (beta): Add a ZFS hook for snapshotting and backing up ZFS datasets. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/snapshot-your-filesystems/ * Remove any temporary copies of the manifest file created in support of the "bootstrap" action. * Deprecate the "store_config_files" option at the global scope and move it under the "bootstrap" hook. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/extract-a-backup/#extract-the-configuration-files-used-to-create-an-archive * Require the runtime directory to be an absolute path. * Add a "--deleted" flag to the "repo-list" action for listing deleted archives that haven't yet been compacted (Borg 2 only). * Promote the "spot" check from a beta feature to stable. 1.9.2 * #441: Apply the "umask" option to all relevant actions, not just some of them. * #722: Remove the restriction that the "extract" and "mount" actions must match a single repository. Now they work more like other actions, where each repository is applied in turn. * #932: Fix the missing build backend setting in pyproject.toml to allow Fedora builds. * #934: Update the logic that probes for the borgmatic streaming database dump, bootstrap metadata, and check state directories to support more platforms and use cases. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/backup-your-databases/#runtime-directory * #934: Add the "RuntimeDirectory" and "StateDirectory" options to the sample systemd service file to support the new runtime and state directory logic. * #939: Fix borgmatic ignoring the "BORG_RELOCATED_REPO_ACCESS_IS_OK" and "BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK" environment variables. * Add a Pushover monitoring hook. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#pushover-hook 1.9.1 * #928: Fix the user runtime directory location on macOS (and possibly Cygwin). * #930: Fix an error with the sample systemd service when no credentials are configured. * #931: Fix an error when implicitly upgrading the check state directory from ~/.borgmatic to ~/.local/state/borgmatic across filesystems. 1.9.0 * #609: Fix the glob expansion of "source_directories" values to respect the "working_directory" option. * #609: BREAKING: Apply the "working_directory" option to all actions, not just "create". This includes repository paths, destination paths, mount points, etc. * #562: Deprecate the "borgmatic_source_directory" option in favor of "user_runtime_directory" and "user_state_directory". * #562: BREAKING: Move the default borgmatic streaming database dump and bootstrap metadata directory from ~/.borgmatic to /run/user/$UID/borgmatic, which is more XDG-compliant. You can override this location with the new "user_runtime_directory" option. Existing archives with database dumps at the old location are still restorable. * #562, #638: Move the default check state directory from ~/.borgmatic to ~/.local/state/borgmatic. This is more XDG-compliant and also prevents these state files from getting backed up (unless you explicitly include them). You can override this location with the new "user_state_directory" option. After the first time you run the "check" action with borgmatic 1.9.0, you can safely delete the ~/.borgmatic directory. * #838: BREAKING: With Borg 1.4+, store database dumps and bootstrap metadata in a "/borgmatic" directory within a backup archive, so the path doesn't depend on the current user. This means that you can now backup as one user and restore or bootstrap as another user, among other use cases. * #902: Add loading of encrypted systemd credentials. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/provide-your-passwords/#using-systemd-service-credentials * #911: Add a "key change-passphrase" action to change the passphrase protecting a repository key. * #914: Fix a confusing apparent hang when when the repository location changes, and instead show a helpful error message. * #915: BREAKING: Rename repository actions like "rcreate" to more explicit names like "repo-create" for compatibility with recent changes in Borg 2.0.0b10. * #918: BREAKING: When databases are configured, don't auto-enable the "one_file_system" option, as existing auto-excludes of special files should be sufficient to prevent Borg from hanging on them. But if this change causes problems for you, you can always enable "one_file_system" explicitly. * #919: Clarify the command-line help for the "--config" flag. * #919: Document a policy for versioning and breaking changes: https://torsion.org/borgmatic/docs/how-to/upgrade/#versioning-and-breaking-changes * #921: BREAKING: Change soft failure command hooks to skip only the current repository rather than all repositories in the configuration file. * #922: Replace setup.py (Python packaging metadata) with the more modern pyproject.toml. * When using Borg 2, default the "archive_name_format" option to just "{hostname}", as Borg 2 does not require unique archive names; identical archive names form a common "series" that can be targeted together. See the Borg 2 documentation for more information: https://borgbackup.readthedocs.io/en/2.0.0b13/changes.html#borg-1-2-x-1-4-x-to-borg-2-0 * Add support for Borg 2's "rclone:" repository URLs, so you can backup to 70+ cloud storage services whether or not they support Borg explicitly. * Add support for Borg 2's "sftp://" repository URLs. * Update the "--match-archives" and "--archive" flags to support Borg 2 series names or archive hashes. * Add a "--match-archives" flag to the "prune" action. * Add "--local-path" and "--remote-path" flags to the "config bootstrap" action for setting the Borg executable paths used for bootstrapping. * Add a "--user-runtime-directory" flag to the "config bootstrap" action for helping borgmatic locate the bootstrap metadata stored in an archive. * Add a Zabbix monitoring hook. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#zabbix-hook * Add a tarball of borgmatic's HTML documentation to the packages on the project page. 1.8.14 * #896: Fix an error in borgmatic rcreate/init on an empty repository directory with Borg 1.4. * #898: Add glob ("*") support to the "--repository" flag. Just quote any values containing globs so your shell doesn't interpret them. * #899: Fix for a "bad character" Borg error in which the "spot" check fed Borg an invalid pattern. * #900: Fix for a potential traceback (TypeError) during the handling of another error. * #904: Clarify the configuration reference about the "spot" check options: https://torsion.org/borgmatic/docs/reference/configuration/ * #905: Fix the "source_directories_must_exist" option to work with relative "source_directories" paths when a "working_directory" is set. * #906: Add documentation details for how to run custom database dump commands using binaries from running containers: https://torsion.org/borgmatic/docs/how-to/backup-your-databases/#database-containers * Fix a regression in which the "color" option had no effect. * Add a recent contributors section to the documentation, because credit where credit's due! See: https://torsion.org/borgmatic/#recent-contributors 1.8.13 * #298: Add "delete" and "rdelete" actions to delete archives or entire repositories. * #785: Add an "only_run_on" option to consistency checks so you can limit a check to running on particular days of the week. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/deal-with-very-large-backups/#check-days * #885: Add an Uptime Kuma monitoring hook. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#uptime-kuma-hook * #886: Fix a PagerDuty hook traceback with Python < 3.10. * #889: Fix the Healthchecks ping body size limit, restoring it to the documented 100,000 bytes. 1.8.12 * #817: Add a "--max-duration" flag to the "check" action and a "max_duration" option to the repository check configuration. This tells Borg to interrupt a repository check after a certain duration. * #860: Fix interaction between environment variable interpolation in constants and shell escaping. * #863: When color output is disabled (explicitly or implicitly), don't prefix each log line with the log level. * #865: Add an "upload_buffer_size" option to set the size of the upload buffer used in "create" action. * #866: Fix "Argument list too long" error in the "spot" check when checking hundreds of thousands of files at once. * #874: Add the configured repository label as "repository_label" to the interpolated variables passed to before/after command hooks. * #881: Fix "Unrecognized argument" error when the same value is used with different command-line flags. * In the "spot" check, don't try to hash symlinked directories. 1.8.11 * #815: Add optional Healthchecks auto-provisioning via "create_slug" option. * #851: Fix lack of file extraction when using "extract --strip-components all" on a path with a leading slash. * #854: Fix a traceback when the "data" consistency check is used. * #857: Fix a traceback with "check --only spot" when the "spot" check is unconfigured. 1.8.10 * #656 (beta): Add a "spot" consistency check that compares file counts and contents between your source files and the latest archive, ensuring they fall within configured tolerances. This can catch problems like incorrect excludes, inadvertent deletes, files changed by malware, etc. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/deal-with-very-large-backups/#spot-check * #779: When "--match-archives *" is used with "check" action, don't skip Borg's orphaned objects check. * #842: When a command hook exits with a soft failure, ping the log and finish states for any configured monitoring hooks. * #843: Add documentation link to Loki dashboard for borgmatic: https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#loki-hook * #847: Fix "--json" error when Borg includes non-JSON warnings in JSON output. * #848: SECURITY: Mask the password when logging a MongoDB dump or restore command. * Fix handling of the NO_COLOR environment variable to ignore an empty value. * Add documentation about backing up containerized databases by configuring borgmatic to exec into a container to run a dump command: https://torsion.org/borgmatic/docs/how-to/backup-your-databases/#database-containers 1.8.9 * #311: Add custom dump/restore command options for MySQL and MariaDB. * #811: Add an "access_token" option to the ntfy monitoring hook for authenticating without username/password. * #827: When the "--json" flag is given, suppress console escape codes so as not to interfere with JSON output. * #829: Fix "--override" values containing deprecated section headers not actually overriding configuration options under deprecated section headers. * #835: Add support for the NO_COLOR environment variable. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/set-up-backups/#colored-output * #839: Add log sending for the Apprise logging hook, enabled by default. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#apprise-hook * #839: Document a potentially breaking shell quoting edge case within error hooks: https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#error-hooks * #840: When running the "rcreate" action and the repository already exists but with a different encryption mode than requested, error. * Switch from Drone to Gitea Actions for continuous integration. * Rename scripts/run-end-to-end-dev-tests to scripts/run-end-to-end-tests and use it in both dev and CI for better dev-CI parity. * Clarify documentation about restoring a database: borgmatic does not create the database upon restore. 1.8.8 * #370: For the PostgreSQL hook, pass the "PGSSLMODE" environment variable through to Borg when the database's configuration omits the "ssl_mode" option. * #818: Allow the "--repository" flag to match across multiple configuration files. * #820: Fix broken repository detection in the "rcreate" action with Borg 1.4. The issue did not occur with other versions of Borg. * #822: Fix broken escaping logic in the PostgreSQL hook's "pg_dump_command" option. * SECURITY: Prevent additional shell injection attacks within the PostgreSQL hook. 1.8.7 * #736: Store included configuration files within each backup archive in support of the "config bootstrap" action. Previously, only top-level configuration files were stored. * #798: Elevate specific Borg warnings to errors or squash errors to * warnings. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/customize-warnings-and-errors/ * #810: SECURITY: Prevent shell injection attacks within the PostgreSQL hook, the MongoDB hook, the SQLite hook, the "borgmatic borg" action, and command hook variable/constant interpolation. * #814: Fix a traceback when providing an invalid "--override" value for a list option. 1.8.6 * #767: Add an "--ssh-command" flag to the "config bootstrap" action for setting a custom SSH command, as no configuration is available (including the "ssh_command" option) until bootstrapping completes. * #794: Fix a traceback when the "repositories" option contains both strings and key/value pairs. * #800: Add configured repository labels to the JSON output for all actions. * #802: The "check --force" flag now runs checks even if "check" is in "skip_actions". * #804: Validate the configured action names in the "skip_actions" option. * #807: Stream SQLite databases directly to Borg instead of dumping to an intermediate file. * When logging commands that borgmatic executes, log the environment variables that borgmatic sets for those commands. (But don't log their values, since they often contain passwords.) 1.8.5 * #701: Add a "skip_actions" option to skip running particular actions, handy for append-only or checkless configurations. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/set-up-backups/#skipping-actions * #701: Deprecate the "disabled" value for the "checks" option in favor of the new "skip_actions" option. * #745: Constants now apply to included configuration, not just the file doing the includes. As a side effect of this change, constants no longer apply to option names and only substitute into configuration values. * #779: Add a "--match-archives" flag to the "check" action for selecting the archives to check, overriding the existing "archive_name_format" and "match_archives" options in configuration. * #779: Only parse "--override" values as complex data types when they're for options of those types. * #782: Fix environment variable interpolation within configured repository paths. * #782: Add configuration constant overriding via the existing "--override" flag. * #783: Upgrade ruamel.yaml dependency to support version 0.18.x. * #784: Drop support for Python 3.7, which has been end-of-lifed. 1.8.4 * #715: Add a monitoring hook for sending backup status to a variety of monitoring services via the Apprise library. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#apprise-hook * #748: When an archive filter causes no matching archives for the "rlist" or "info" actions, warn the user and suggest how to remove the filter. * #768: Fix a traceback when an invalid command-line flag or action is used. * #771: Fix normalization of deprecated sections ("location:", "storage:", "hooks:", etc.) to support empty sections without erroring. * #774: Disallow the "--dry-run" flag with the "borg" action, as borgmatic can't guarantee the Borg command won't have side effects. 1.8.3 * #665: BREAKING: Simplify logging logic as follows: Syslog verbosity is now disabled by default, but setting the "--syslog-verbosity" flag enables it regardless of whether you're at an interactive console. Additionally, "--log-file-verbosity" and "--monitoring-verbosity" now default to 1 (info about steps borgmatic is taking) instead of 0. And both syslog logging and file logging can be enabled simultaneously. * #743: Add a monitoring hook for sending backup status and logs to Grafana Loki. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#loki-hook * #753: When "archive_name_format" is not set, filter archives using the default archive name format. * #754: Fix error handling to log command output as one record per line instead of truncating too-long output and swallowing the end of some Borg error messages. * #757: Update documentation so "sudo borgmatic" works for pipx borgmatic installations. * #761: Fix for borgmatic not stopping Borg immediately when the user presses ctrl-C. * Update documentation to recommend installing/upgrading borgmatic with pipx instead of pip. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/set-up-backups/#installation https://torsion.org/borgmatic/docs/how-to/upgrade/#upgrading-borgmatic 1.8.2 * #345: Add "key export" action to export a copy of the repository key for safekeeping in case the original goes missing or gets damaged. * #727: Add a MariaDB database hook that uses native MariaDB commands instead of the deprecated MySQL ones. Be aware though that any existing backups made with the "mysql_databases:" hook are only restorable with a "mysql_databases:" configuration. * #738: Fix for potential data loss (data not getting restored) in which the database "restore" action didn't actually restore anything and indicated success anyway. * Remove the deprecated use of the MongoDB hook's "--db" flag for database restoration. * Add source code reference documentation for getting oriented with the borgmatic code as a developer: https://torsion.org/borgmatic/docs/reference/source-code/ 1.8.1 * #326: Add documentation for restoring a database to an alternate host: https://torsion.org/borgmatic/docs/how-to/backup-your-databases/#restore-to-an-alternate-host * #697: Add documentation for "bootstrap" action: https://torsion.org/borgmatic/docs/how-to/extract-a-backup/#extract-the-configuration-files-used-to-create-an-archive * #725: Add "store_config_files" option for disabling the automatic backup of configuration files used by the "config bootstrap" action. * #728: Fix for "prune" action error when using the "keep_exclude_tags" option. * #730: Fix for Borg's interactive prompt on the "check --repair" action automatically getting answered "NO" even when the "check_i_know_what_i_am_doing" option isn't set. * #732: Include multiple configuration files with a single "!include". See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#multiple-merge-includes * #734: Omit "--glob-archives" or "--match-archives" Borg flag when its value would be "*" (meaning all archives). 1.8.0 * #575: BREAKING: For the "borgmatic borg" action, instead of implicitly injecting repository/archive into the resulting Borg command-line, pass repository to Borg via an environment variable and make archive available for explicit use in your commands. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/run-arbitrary-borg-commands/ * #719: Fix an error when running "borg key export" through borgmatic. * #720: Fix an error when dumping a database and the "exclude_nodump" option is set. * #724: Add "check_i_know_what_i_am_doing" option to bypass Borg confirmation prompt when running "check --repair". * When merging two configuration files, error gracefully if the two files do not adhere to the same format. * #721: Remove configuration sections ("location:", "storage:", "hooks:", etc.), while still keeping deprecated support for them. Now, all options are at the same level, and you don't need to worry about commenting/uncommenting section headers when you change an option (if you remove your sections first). * #721: BREAKING: The retention prefix and the consistency prefix can no longer have different values (unless one is not set). * #721: BREAKING: The storage umask and the hooks umask can no longer have different values (unless one is not set). * BREAKING: Flags like "--config" that previously took multiple values now need to be given once per value, e.g. "--config first.yaml --config second.yaml" instead of "--config first.yaml second.yaml". This prevents argument parsing errors on ambiguous commands. * BREAKING: Remove the deprecated (and silently ignored) "--successful" flag on the "list" action, as newer versions of Borg list successful (non-checkpoint) archives by default. * All deprecated configuration option values now generate warning logs. * Remove the deprecated (and non-functional) "--excludes" flag in favor of excludes within configuration. * Fix an error when logging too-long command output during error handling. Now, long command output is truncated before logging. 1.7.15 * #326: Add configuration options and command-line flags for backing up a database from one location while restoring it somewhere else. * #399: Add a documentation troubleshooting note for MySQL/MariaDB authentication errors. * #529: Remove upgrade-borgmatic-config command for upgrading borgmatic 1.1.0 INI-style configuration. * #529: Deprecate generate-borgmatic-config in favor of new "config generate" action. * #529: Deprecate validate-borgmatic-config in favor of new "config validate" action. * #697, #712, #716: Extract borgmatic configuration from backup via new "config bootstrap" action—even when borgmatic has no configuration yet! * #669: Add sample systemd user service for running borgmatic as a non-root user. * #711, #713: Fix an error when "data" check time files are accessed without getting upgraded first. 1.7.14 * #484: Add a new verbosity level (-2) to disable output entirely (for console, syslog, log file, or monitoring), so not even errors are shown. * #688: Tweak archive check probing logic to use the newest timestamp found when multiple exist. * #659: Add Borg 2 date-based matching flags to various actions for archive selection. * #703: Fix an error when loading the configuration schema on Fedora Linux. * #704: Fix "check" action error when repository and archive checks are configured but the archive check gets skipped due to the configured frequency. * #706: Fix "--archive latest" on "list" and "info" actions that only worked on the first of multiple configured repositories. 1.7.13 * #375: Restore particular PostgreSQL schemas from a database dump via "borgmatic restore --schema" flag. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/backup-your-databases/#restore-particular-schemas * #678: Fix error from PostgreSQL when dumping a database with a "format" of "plain". * #678: Fix PostgreSQL hook to support "psql_command" and "pg_restore_command" options containing commands with arguments. * #678: Fix calls to psql in PostgreSQL hook to ignore "~/.psqlrc", whose settings can break database dumping. * #680: Add support for logging each log line as a JSON object via global "--log-json" flag. * #682: Fix "source_directories_must_exist" option to expand globs and tildes in source directories. * #684: Rename "master" development branch to "main" to use more inclusive language. You'll need to update your development checkouts accordingly. * #686: Add fish shell completion script so you can tab-complete on the borgmatic command-line. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/set-up-backups/#shell-completion * #687: Fix borgmatic error when not finding the configuration schema for certain "pip install --editable" development installs. * #688: Fix archive checks being skipped even when particular archives haven't been checked recently. This occurred when using multiple borgmatic configuration files with different "archive_name_format"s, for instance. * #691: Fix error in "borgmatic restore" action when the configured repository path is relative instead of absolute. * #694: Run "borgmatic borg" action without capturing output so interactive prompts and flags like "--progress" still work. 1.7.12 * #413: Add "log_file" context to command hooks so your scripts can consume the borgmatic log file. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/ * #666, #670: Fix error when running the "info" action with the "--match-archives" or "--archive" flags. Also fix the "--match-archives"/"--archive" flags to correctly override the "match_archives" configuration option for the "transfer", "list", "rlist", and "info" actions. * #668: Fix error when running the "prune" action with both "archive_name_format" and "prefix" options set. * #672: Selectively shallow merge certain mappings or sequences when including configuration files. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#shallow-merge * #672: Selectively omit list values when including configuration files. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#list-merge * #673: View the results of configuration file merging via "validate-borgmatic-config --show" flag. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#debugging-includes * Add optional support for running end-to-end tests and building documentation with rootless Podman instead of Docker. 1.7.11 * #479, #588: BREAKING: Automatically use the "archive_name_format" option to filter which archives get used for borgmatic actions that operate on multiple archives. Override this behavior with the new "match_archives" option in the storage section. This change is "breaking" in that it silently changes which archives get considered for "rlist", "prune", "check", etc. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#archive-naming * #479, #588: The "prefix" options have been deprecated in favor of the new "archive_name_format" auto-matching behavior and the "match_archives" option. * #658: Add "--log-file-format" flag for customizing the log message format. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/inspect-your-backups/#logging-to-file * #662: Fix regression in which the "check_repositories" option failed to match repositories. * #663: Fix regression in which the "transfer" action produced a traceback. * Add spellchecking of source code during test runs. 1.7.10 * #396: When a database command errors, display and log the error message instead of swallowing it. * #501: Optionally error if a source directory does not exist via "source_directories_must_exist" option in borgmatic's location configuration. * #576: Add support for "file://" paths within "repositories" option. * #612: Define and use custom constants in borgmatic configuration files. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#constant-interpolation * #618: Add support for BORG_FILES_CACHE_TTL environment variable via "borg_files_cache_ttl" option in borgmatic's storage configuration. * #623: Fix confusing message when an error occurs running actions for a configuration file. * #635: Add optional repository labels so you can select a repository via "--repository yourlabel" at the command-line. See the configuration reference for more information: https://torsion.org/borgmatic/docs/reference/configuration/ * #649: Add documentation on backing up a database running in a container: https://torsion.org/borgmatic/docs/how-to/backup-your-databases/#database-containers * #655: Fix error when databases are configured and a source directory doesn't exist. * Add code style plugins to enforce use of Python f-strings and prevent single-letter variables. To join in the pedantry, refresh your test environment with "tox --recreate". * Rename scripts/run-full-dev-tests to scripts/run-end-to-end-dev-tests and make it run end-to-end tests only. Continue using tox to run unit and integration tests. 1.7.9 * #295: Add a SQLite database dump/restore hook. * #304: Change the default action order when no actions are specified on the command-line to: "create", "prune", "compact", "check". If you'd like to retain the old ordering ("prune" and "compact" first), then specify actions explicitly on the command-line. * #304: Run any command-line actions in the order specified instead of using a fixed ordering. * #564: Add "--repository" flag to all actions where it makes sense, so you can run borgmatic on a single configured repository instead of all of them. * #628: Add a Healthchecks "log" state to send borgmatic logs to Healthchecks without signalling success or failure. * #647: Add "--strip-components all" feature on the "extract" action to remove leading path components of files you extract. Must be used with the "--path" flag. * Add support for Python 3.11. 1.7.8 * #620: With the "create" action and the "--list" ("--files") flag, only show excluded files at verbosity 2. * #621: Add optional authentication to the ntfy monitoring hook. * With the "create" action, only one of "--list" ("--files") and "--progress" flags can be used. This lines up with the new behavior in Borg 2.0.0b5. * Internally support new Borg 2.0.0b5 "--filter" status characters / item flags for the "create" action. * Fix the "create" action with the "--dry-run" flag querying for databases when a PostgreSQL/MySQL "all" database is configured. Now, these queries are skipped due to the dry run. * Add "--repository" flag to the "rcreate" action to optionally select one configured repository to create. * Add "--progress" flag to the "transfer" action, new in Borg 2.0.0b5. * Add "checkpoint_volume" configuration option to creates checkpoints every specified number of bytes during a long-running backup, new in Borg 2.0.0b5. 1.7.7 * #642: Add MySQL database hook "add_drop_database" configuration option to control whether dumped MySQL databases get dropped right before restore. * #643: Fix for potential data loss (data not getting backed up) when dumping large "directory" format PostgreSQL/MongoDB databases. Prior to the fix, these dumps would not finish writing to disk before Borg consumed them. Now, the dumping process completes before Borg starts. This only applies to "directory" format databases; other formats still stream to Borg without using temporary disk space. * Fix MongoDB "directory" format to work with mongodump/mongorestore without error. Prior to this fix, only the "archive" format worked. 1.7.6 * #393, #438, #560: Optionally dump "all" PostgreSQL/MySQL databases to separate files instead of one combined dump file, allowing more convenient restores of individual databases. You can enable this by specifying the database dump "format" option when the database is named "all". * #602: Fix logs that interfere with JSON output by making warnings go to stderr instead of stdout. * #622: Fix traceback when include merging configuration files on ARM64. * #629: Skip warning about excluded special files when no special files have been excluded. * #630: Add configuration options for database command customization: "list_options", "restore_options", and "analyze_options" for PostgreSQL, "restore_options" for MySQL, and "restore_options" for MongoDB. 1.7.5 * #311: Override PostgreSQL dump/restore commands via configuration options. * #604: Fix traceback when a configuration section is present but lacking any options. * #607: Clarify documentation examples for include merging and deep merging. * #611: Fix "data" consistency check to support "check_last" and consistency "prefix" options. * #613: Clarify documentation about multiple repositories and separate configuration files. 1.7.4 * #596: Fix special file detection erroring when broken symlinks are encountered. * #597, #598: Fix regression in which "check" action errored on certain systems ("Cannot determine Borg repository ID"). 1.7.3 * #357: Add "break-lock" action for removing any repository and cache locks leftover from Borg aborting. * #360: To prevent Borg hangs, unconditionally delete stale named pipes before dumping databases. * #587: When database hooks are enabled, auto-exclude special files from a "create" action to prevent Borg from hanging. You can override/prevent this behavior by explicitly setting the "read_special" option to true. * #587: Warn when ignoring a configured "read_special" value of false, as true is needed when database hooks are enabled. * #589: Update sample systemd service file to allow system "idle" (e.g. a video monitor turning off) while borgmatic is running. * #590: Fix for potential data loss (data not getting backed up) when the "patterns_from" option was used with "source_directories" (or the "~/.borgmatic" path existed, which got injected into "source_directories" implicitly). The fix is for borgmatic to convert "source_directories" into patterns whenever "patterns_from" is used, working around a Borg bug: https://github.com/borgbackup/borg/issues/6994 * #590: In "borgmatic create --list" output, display which files get excluded from the backup due to patterns or excludes. * #591: Add support for Borg 2's "--match-archives" flag. This replaces "--glob-archives", which borgmatic now treats as an alias for "--match-archives". But note that the two flags have slightly different syntax. See the Borg 2 changelog for more information: https://borgbackup.readthedocs.io/en/2.0.0b3/changes.html#version-2-0-0b3-2022-10-02 * Fix for "borgmatic --archive latest" not finding the latest archive when a verbosity is set. 1.7.2 * #577: Fix regression in which "borgmatic info --archive ..." showed repository info instead of archive info with Borg 1. * #582: Fix hang when database hooks are enabled and "patterns" contains a parent directory of "~/.borgmatic". 1.7.1 * #542: Make the "source_directories" option optional. This is useful for "check"-only setups or using "patterns" exclusively. * #574: Fix for potential data loss (data not getting backed up) when the "patterns" option was used with "source_directories" (or the "~/.borgmatic" path existed, which got injected into "source_directories" implicitly). The fix is for borgmatic to convert "source_directories" into patterns whenever "patterns" is used, working around a Borg bug: https://github.com/borgbackup/borg/issues/6994 1.7.0 * #463: Add "before_actions" and "after_actions" command hooks that run before/after all the actions for each repository. These new hooks are a good place to run per-repository steps like mounting/unmounting a remote filesystem. * #463: Update documentation to cover per-repository configurations: https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/ * #557: Support for Borg 2 while still working with Borg 1. This includes new borgmatic actions like "rcreate" (replaces "init"), "rlist" (list archives in repository), "rinfo" (show repository info), and "transfer" (for upgrading Borg repositories). For the most part, borgmatic tries to smooth over differences between Borg 1 and 2 to make your upgrade process easier. However, there are still a few cases where Borg made breaking changes. See the Borg 2.0 changelog for more information: https://www.borgbackup.org/releases/borg-2.0.html * #557: If you install Borg 2, you'll need to manually upgrade your existing Borg 1 repositories before use. Note that Borg 2 stable is not yet released as of this borgmatic release, so don't use Borg 2 for production until it is! See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/upgrade/#upgrading-borg * #557: Rename several configuration options to match Borg 2: "remote_rate_limit" is now "upload_rate_limit", "numeric_owner" is "numeric_ids", and "bsd_flags" is "flags". borgmatic still works with the old options. * #557: Remote repository paths without the "ssh://" syntax are deprecated but still supported for now. Remote repository paths containing "~" are deprecated in borgmatic and no longer work in Borg 2. * #557: Omitting the "--archive" flag on the "list" action is deprecated when using Borg 2. Use the new "rlist" action instead. * #557: The "--dry-run" flag can now be used with the "rcreate"/"init" action. * #565: Fix handling of "repository" and "data" consistency checks to prevent invalid Borg flags. * #566: Modify "mount" and "extract" actions to require the "--repository" flag when multiple repositories are configured. * #571: BREAKING: Remove old-style command-line action flags like "--create, "--list", etc. If you're already using actions like "create" and "list" instead, this change should not affect you. * #571: BREAKING: Rename "--files" flag on "prune" action to "--list", as it lists archives, not files. * #571: Add "--list" as alias for "--files" flag on "create" and "export-tar" actions. * Add support for disabling TLS verification in Healthchecks monitoring hook with "verify_tls" option. 1.6.6 * #559: Update documentation about configuring multiple consistency checks or multiple databases. * #560: Fix all database hooks to error when the requested database to restore isn't present in the Borg archive. * #561: Fix command-line "--override" flag to continue supporting old configuration file formats. * #563: Fix traceback with "create" action and "--json" flag when a database hook is configured. 1.6.5 * #553: Fix logging to include the full traceback when Borg experiences an internal error, not just the first few lines. * #554: Fix all monitoring hooks to warn if the server returns an HTTP 4xx error. This can happen with Healthchecks, for instance, when using an invalid ping URL. * #555: Fix environment variable plumbing so options like "encryption_passphrase" and "encryption_passcommand" in one configuration file aren't used for other configuration files. 1.6.4 * #546, #382: Keep your repository passphrases and database passwords outside of borgmatic's configuration file with environment variable interpolation. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/provide-your-passwords/ 1.6.3 * #541: Add "borgmatic list --find" flag for searching for files across multiple archives, useful for hunting down that file you accidentally deleted so you can extract it. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/inspect-your-backups/#searching-for-a-file * #543: Add a monitoring hook for sending push notifications via ntfy. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#ntfy-hook * Fix Bash completion script to no longer alter your shell's settings (complain about unset variables or error on pipe failures). * Deprecate "borgmatic list --successful" flag, as listing only non-checkpoint (successful) archives is now the default in newer versions of Borg. 1.6.2 * #523: Reduce the default consistency check frequency and support configuring the frequency independently for each check. Also add "borgmatic check --force" flag to ignore configured frequencies. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/deal-with-very-large-backups/#check-frequency * #536: Fix generate-borgmatic-config to support more complex schema changes like the new Healthchecks configuration options when the "--source" flag is used. * #538: Add support for "borgmatic borg debug" command. * #539: Add "generate-borgmatic-config --overwrite" flag to replace an existing destination file. * Add Bash completion script so you can tab-complete the borgmatic command-line. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/set-up-backups/#shell-completion 1.6.1 * #294: Add Healthchecks monitoring hook "ping_body_limit" option to configure how many bytes of logs to send to the Healthchecks server. * #402: Remove the error when "archive_name_format" is specified but a retention prefix isn't. * #420: Warn when an unsupported variable is used in a hook command. * #439: Change connection failures for monitoring hooks (Healthchecks, Cronitor, PagerDuty, and Cronhub) to be warnings instead of errors. This way, the monitoring system failing does not block backups. * #460: Add Healthchecks monitoring hook "send_logs" option to enable/disable sending borgmatic logs to the Healthchecks server. * #525: Add Healthchecks monitoring hook "states" option to only enable pinging for particular monitoring states (start, finish, fail). * #528: Improve the error message when a configuration override contains an invalid value. * #531: BREAKING: When deep merging common configuration, merge colliding list values by appending them. Previously, one list replaced the other. * #532: When a configuration include is a relative path, load it from either the current working directory or from the directory containing the file doing the including. Previously, only the working directory was used. * Add a randomized delay to the sample systemd timer to spread out the load on a server. * Change the configuration format for borgmatic monitoring hooks (Healthchecks, Cronitor, PagerDuty, and Cronhub) to specify the ping URL / integration key as a named option. The intent is to support additional options (some in this release). This change is backwards-compatible. * Add emojis to documentation table of contents to make it easier to find particular how-to and reference guides at a glance. 1.6.0 * #381: BREAKING: Greatly simplify configuration file reuse by deep merging when including common configuration. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#include-merging * #473: BREAKING: Instead of executing "before" command hooks before all borgmatic actions run (and "after" hooks after), execute these hooks right before/after the corresponding action. E.g., "before_check" now runs immediately before the "check" action. This better supports running timing-sensitive tasks like pausing containers. Side effect: before/after command hooks now run once for each configured repository instead of once per configuration file. Additionally, the "repositories" interpolated variable has been changed to "repository", containing the path to the current repository for the hook. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/ * #513: Add mention of sudo's "secure_path" option to borgmatic installation documentation. * #515: Fix "borgmatic borg key ..." to pass parameters to Borg in the correct order. * #516: Fix handling of TERM signal to exit borgmatic, not just forward the signal to Borg. * #517: Fix borgmatic exit code (so it's zero) when initial Borg calls fail but later retries succeed. * Change Healthchecks logs truncation size from 10k bytes to 100k bytes, corresponding to that same change on Healthchecks.io. 1.5.24 * #431: Add "working_directory" option to support source directories with relative paths. * #444: When loading a configuration file that is unreadable due to file permissions, warn instead of erroring. This supports running borgmatic as a non-root user with configuration in ~/.config even if there is an unreadable global configuration file in /etc. * #469: Add "repositories" context to "before_*" and "after_*" command action hooks. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/ * #486: Fix handling of "patterns_from" and "exclude_from" options to error instead of warning when referencing unreadable files and "create" action is run. * #507: Fix Borg usage error in the "compact" action when running "borgmatic --dry-run". Now, skip "compact" entirely during a dry run. 1.5.23 * #394: Compact repository segments and free space with new "borgmatic compact" action. Borg 1.2+ only. Also run "compact" by default when no actions are specified, as "prune" in Borg 1.2 no longer frees up space unless "compact" is run. * #394: When using the "atime", "bsd_flags", "numeric_owner", or "remote_rate_limit" options, tailor the flags passed to Borg depending on the Borg version. * #480, #482: Fix traceback when a YAML validation error occurs. 1.5.22 * #288: Add database dump hook for MongoDB. * #470: Move mysqldump options to the beginning of the command due to MySQL bug 30994. * #471: When command-line configuration override produces a parse error, error cleanly instead of tracebacking. * #476: Fix unicode error when restoring particular MySQL databases. * Drop support for Python 3.6, which has been end-of-lifed. * Add support for Python 3.10. 1.5.21 * #28: Optionally retry failing backups via "retries" and "retry_wait" configuration options. * #306: Add "list_options" MySQL configuration option for passing additional arguments to MySQL list command. * #459: Add support for old version (2.x) of jsonschema library. 1.5.20 * Re-release with correct version without dev0 tag. 1.5.19 * #387: Fix error when configured source directories are not present on the filesystem at the time of backup. Now, Borg will complain, but the backup will still continue. * #455: Mention changing borgmatic path in cron documentation. * Update sample systemd service file with more granular read-only filesystem settings. * Move Gitea and GitHub hosting from a personal namespace to an organization for better collaboration with related projects. * 1k ★s on GitHub! 1.5.18 * #389: Fix "message too long" error when logging to rsyslog. * #440: Fix traceback that can occur when dumping a database. 1.5.17 * #437: Fix error when configuration file contains "umask" option. * Remove test dependency on vim and /dev/urandom. 1.5.16 * #379: Suppress console output in sample crontab and systemd service files. * #407: Fix syslog logging on FreeBSD. * #430: Fix hang when restoring a PostgreSQL "tar" format database dump. * Better error messages! Switch the library used for validating configuration files (from pykwalify to jsonschema). * Link borgmatic Ansible role from installation documentation: https://torsion.org/borgmatic/docs/how-to/set-up-backups/#other-ways-to-install 1.5.15 * #419: Document use case of running backups conditionally based on laptop power level: https://torsion.org/borgmatic/docs/how-to/backup-to-a-removable-drive-or-an-intermittent-server/ * #425: Run arbitrary Borg commands with new "borgmatic borg" action. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/run-arbitrary-borg-commands/ 1.5.14 * #390: Add link to Hetzner storage offering from the documentation. * #398: Clarify canonical home of borgmatic in documentation. * #406: Clarify that spaces in path names should not be backslashed in path names. * #423: Fix error handling to error loudly when Borg gets killed due to running out of memory! * Fix build so as not to attempt to build and push documentation for a non-main branch. * "Fix" build failure with Alpine Edge by switching from Edge to Alpine 3.13. * Move #borgmatic IRC channel from Freenode to Libera Chat due to Freenode takeover drama. IRC connection info: https://torsion.org/borgmatic/#issues 1.5.13 * #373: Document that passphrase is used for Borg keyfile encryption, not just repokey encryption. * #404: Add support for ruamel.yaml 0.17.x YAML parsing library. * Update systemd service example to return a permission error when a system call isn't permitted (instead of terminating borgmatic outright). * Drop support for Python 3.5, which has been end-of-lifed. * Add support for Python 3.9. * Update versions of test dependencies (test_requirements.txt and test containers). * Only support black code formatter on Python 3.8+. New black dependencies make installation difficult on older versions of Python. * Replace "improve this documentation" form with link to support and ticket tracker. 1.5.12 * Fix for previous release with incorrect version suffix in setup.py. No other changes. 1.5.11 * #341: Add "temporary_directory" option for changing Borg's temporary directory. * #352: Lock down systemd security settings in sample systemd service file. * #355: Fix traceback when a database hook value is null in a configuration file. * #361: Merge override values when specifying the "--override" flag multiple times. The previous behavior was to take the value of the last "--override" flag only. * #367: Fix traceback when upgrading old INI-style configuration with upgrade-borgmatic-config. * #368: Fix signal forwarding from borgmatic to Borg resulting in recursion traceback. * #369: Document support for Borg placeholders in repository names. 1.5.10 * #347: Add hooks that run for the "extract" action: "before_extract" and "after_extract". * #350: Fix traceback when a configuration directory is non-readable due to directory permissions. * Add documentation navigation links on left side of all documentation pages. * Clarify documentation on configuration overrides, specifically the portion about list syntax: http://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#configuration-overrides * Clarify documentation overview of monitoring options: http://torsion.org/borgmatic/docs/how-to/monitor-your-backups/ 1.5.9 * #300: Add "borgmatic export-tar" action to export an archive to a tar-formatted file or stream. * #339: Fix for intermittent timing-related test failure of logging function. * Clarify database documentation about excluding named pipes and character/block devices to prevent hangs. * Add documentation on how to make backups redundant with multiple repositories: https://torsion.org/borgmatic/docs/how-to/make-backups-redundant/ 1.5.8 * #336: Fix for traceback when running Cronitor, Cronhub, and PagerDuty monitor hooks. 1.5.7 * #327: Fix broken pass-through of BORG_* environment variables to Borg. * #328: Fix duplicate logging to Healthchecks and send "after_*" hooks output to Healthchecks. * #331: Add SSL support to PostgreSQL database configuration. * #333: Fix for potential data loss (data not getting backed up) when borgmatic omitted configured source directories in certain situations. Specifically, this occurred when two source directories on different filesystems were related by parentage (e.g. "/foo" and "/foo/bar/baz") and the one_file_system option was enabled. * Update documentation code fragments theme to better match the rest of the page. * Improve configuration reference documentation readability via more aggressive word-wrapping in configuration schema descriptions. 1.5.6 * #292: Allow before_backup and similar hooks to exit with a soft failure without altering the monitoring status on Healthchecks or other providers. Support this by waiting to ping monitoring services with a "start" status until after before_* hooks finish. Failures in before_* hooks still trigger a monitoring "fail" status. * #316: Fix hang when a stale database dump named pipe from an aborted borgmatic run remains on disk. * #323: Fix for certain configuration options like ssh_command impacting Borg invocations for separate configuration files. * #324: Add "borgmatic extract --strip-components" flag to remove leading path components when extracting an archive. * Tweak comment indentation in generated configuration file for clarity. * Link to Borgmacator GNOME AppIndicator from monitoring documentation. 1.5.5 * #314: Fix regression in support for PostgreSQL's "directory" dump format. Unlike other dump formats, the "directory" dump format does not stream directly to/from Borg. * #315: Fix enabled database hooks to implicitly set one_file_system configuration option to true. This prevents Borg from reading devices like /dev/zero and hanging. * #316: Fix hang when streaming a database dump to Borg with implicit duplicate source directories by deduplicating them first. * #319: Fix error message when there are no MySQL databases to dump for "all" databases. * Improve documentation around the installation process. Specifically, making borgmatic commands runnable via the system PATH and offering a global install option. 1.5.4 * #310: Fix legitimate database dump command errors (exit code 1) not being treated as errors by borgmatic. * For database dumps, replace the named pipe on every borgmatic run. This prevent hangs on stale pipes left over from previous runs. * Fix error handling to handle more edge cases when executing commands. 1.5.3 * #258: Stream database dumps and restores directly to/from Borg without using any additional filesystem space. This feature is automatic, and works even on restores from archives made with previous versions of borgmatic. * #293: Documentation on macOS launchd permissions issues with work-around for Full Disk Access. * Remove "borgmatic restore --progress" flag, as it now conflicts with streaming database restores. 1.5.2 * #301: Fix MySQL restore error on "all" database dump by excluding system tables. * Fix PostgreSQL restore error on "all" database dump by using "psql" for the restore instead of "pg_restore". 1.5.1 * #289: Tired of looking up the latest successful archive name in order to pass it to borgmatic actions? Me too. Now you can specify "--archive latest" to all actions that accept an archive flag. * #290: Fix the "--stats" and "--files" flags so that they yield output at verbosity 0. * Reduce the default verbosity of borgmatic logs sent to Healthchecks monitoring hook. Now, it's warnings and errors only. You can increase the verbosity via the "--monitoring-verbosity" flag. * Add security policy documentation in SECURITY.md. 1.5.0 * #245: Monitor backups with PagerDuty hook integration. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#pagerduty-hook * #255: Add per-action hooks: "before_prune", "after_prune", "before_check", and "after_check". * #274: Add ~/.config/borgmatic.d as another configuration directory default. * #277: Customize Healthchecks log level via borgmatic "--monitoring-verbosity" flag. * #280: Change "exclude_if_present" option to support multiple filenames that indicate a directory should be excluded from backups, rather than just a single filename. * #284: Backup to a removable drive or intermittent server via "soft failure" feature. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/backup-to-a-removable-drive-or-an-intermittent-server/ * #287: View consistency check progress via "--progress" flag for "check" action. * For "create" and "prune" actions, no longer list files or show detailed stats at any verbosities by default. You can opt back in with "--files" or "--stats" flags. * For "list" and "info" actions, show repository names even at verbosity 0. 1.4.22 * #276, #285: Disable colored output when "--json" flag is used, so as to produce valid JSON output. * After a backup of a database dump in directory format, properly remove the dump directory. * In "borgmatic --help", don't expand $HOME in listing of default "--config" paths. 1.4.21 * #268: Override particular configuration options from the command-line via "--override" flag. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/make-per-application-backups/#configuration-overrides * #270: Only trigger "on_error" hooks and monitoring failures for "prune", "create", and "check" actions, and not for other actions. * When pruning with verbosity level 1, list pruned and kept archives. Previously, this information was only shown at verbosity level 2. 1.4.20 * Fix repository probing during "borgmatic init" to respect verbosity flag and remote_path option. * #249: Update Healthchecks/Cronitor/Cronhub monitoring integrations to fire for "check" and "prune" actions, not just "create". 1.4.19 * #259: Optionally change the internal database dump path via "borgmatic_source_directory" option in location configuration section. * #271: Support piping "borgmatic list" output to grep by logging certain log levels to console stdout and others to stderr. * Retain colored output when piping or redirecting in an interactive terminal. * Add end-to-end tests for database dump and restore. These are run on developer machines with Docker Compose for approximate parity with continuous integration tests. 1.4.18 * Fix "--repository" flag to accept relative paths. * Fix "borgmatic umount" so it only runs Borg once instead of once per repository / configuration file. * #253: Mount whole repositories via "borgmatic mount" without any "--archive" flag. * #269: Filter listed paths via "borgmatic list --path" flag. 1.4.17 * #235: Pass extra options directly to particular Borg commands, handy for Borg options that borgmatic does not yet support natively. Use "extra_borg_options" in the storage configuration section. * #266: Attempt to repair any inconsistencies found during a consistency check via "borgmatic check --repair" flag. 1.4.16 * #256: Fix for "before_backup" hook not triggering an error when the command contains "borg" and has an exit code of 1. * #257: Fix for garbled Borg file listing when using "borgmatic create --progress" with verbosity level 1 or 2. * #260: Fix for missing Healthchecks monitoring payload or HTTP 500 due to incorrect unicode encoding. 1.4.15 * Fix for database dump removal incorrectly skipping some database dumps. * #123: Support for mounting an archive as a FUSE filesystem via "borgmatic mount" action, and unmounting via "borgmatic umount". See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/extract-a-backup/#mount-a-filesystem 1.4.14 * Show summary log errors regardless of verbosity level, and log the "summary:" header with a log level based on the contained summary logs. 1.4.13 * Show full error logs at "--verbosity 0" so you can see command output without upping the verbosity level. 1.4.12 * #247: With "borgmatic check", consider Borg warnings as errors. * Dial back the display of inline error logs a bit, so failed command output doesn't appear multiple times in the logs (well, except for the summary). 1.4.11 * #241: When using the Healthchecks monitoring hook, include borgmatic logs in the payloads for completion and failure pings. * With --verbosity level 1 or 2, show error logs both inline when they occur and in the summary logs at the bottom. With lower verbosity levels, suppress the summary and show error logs when they occur. 1.4.10 * #246: Fix for "borgmatic restore" showing success and incorrectly extracting archive files, even when no databases are configured to restore. As this can overwrite files from the archive and lead to data loss, please upgrade to get the fix before using "borgmatic restore". * Reopen the file given by "--log-file" flag if an external program rotates the log file while borgmatic is running. 1.4.9 * #228: Database dump hooks for MySQL/MariaDB, so you can easily dump your databases before backups run. * #243: Fix repository does not exist error with "borgmatic extract" when repository is remote. 1.4.8 * Monitor backups with Cronhub hook integration. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#cronhub-hook * Fix Healthchecks/Cronitor hooks to skip actions when the borgmatic "--dry-run" flag is used. 1.4.7 * #238: In documentation, clarify when Healthchecks/Cronitor hooks fire in relation to other hooks. * #239: Upgrade your borgmatic configuration to get new options and comments via "generate-borgmatic-config --source". See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/upgrade/#upgrading-your-configuration 1.4.6 * Verbosity level "-1" for even quieter output: Errors only (#236). 1.4.5 * Log to file instead of syslog via command-line "--log-file" flag (#233). 1.4.4 * #234: Support for Borg --keep-exclude-tags and --exclude-nodump options. 1.4.3 * Monitor backups with Cronitor hook integration. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#cronitor-hook 1.4.2 * Extract files to a particular directory via "borgmatic extract --destination" flag. * Rename "borgmatic extract --restore-path" flag to "--path" to reduce confusion with the separate "borgmatic restore" action. Any uses of "--restore-path" will continue working. 1.4.1 * #229: Restore backed up PostgreSQL databases via "borgmatic restore" action. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/backup-your-databases/ * Documentation on how to develop borgmatic's documentation: https://torsion.org/borgmatic/docs/how-to/develop-on-borgmatic/#documentation-development 1.4.0 * #225: Database dump hooks for PostgreSQL, so you can easily dump your databases before backups run. * #230: Rename "borgmatic list --pattern-from" flag to "--patterns-from" to match Borg. 1.3.26 * #224: Fix "borgmatic list --successful" with a slightly better heuristic for listing successful (non-checkpoint) archives. 1.3.25 * #223: Dead man's switch to detect when backups start failing silently, implemented via healthchecks.io hook integration. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/#healthchecks-hook * Documentation on monitoring and alerting options for borgmatic backups: https://torsion.org/borgmatic/docs/how-to/monitor-your-backups/ * Automatically rewrite links when developing on documentation locally. 1.3.24 * #86: Add "borgmatic list --successful" flag to only list successful (non-checkpoint) archives. * Add a suggestion form to all documentation pages, so users can submit ideas for improving the documentation. * Update documentation link to community Arch Linux borgmatic package. 1.3.23 * #174: More detailed error alerting via runtime context available in "on_error" hook. 1.3.22 * #144: When backups to one of several repositories fails, keep backing up to the other repositories and report errors afterwards. 1.3.21 * #192: User-defined hooks for global setup or cleanup that run before/after all actions. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/add-preparation-and-cleanup-steps-to-backups/ 1.3.20 * #205: More robust sample systemd service: boot delay, network dependency, lowered CPU/IO priority, etc. * #221: Fix "borgmatic create --progress" output so that it updates on the console in real-time. 1.3.19 * #219: Fix visibility of "borgmatic prune --stats" output. 1.3.18 * #220: Fix regression of argument parsing for default actions. 1.3.17 * #217: Fix error with "borgmatic check --only" command-line flag with "extract" consistency check. 1.3.16 * #210: Support for Borg check --verify-data flag via borgmatic "data" consistency check. * #210: Override configured consistency checks via "borgmatic check --only" command-line flag. * When generating sample configuration with generate-borgmatic-config, add a space after each "#" comment indicator. 1.3.15 * #208: Fix for traceback when the "checks" option has an empty value. * #209: Bypass Borg error about a moved repository via "relocated_repo_access_is_ok" option in borgmatic storage configuration section. * #213: Reorder arguments passed to Borg to fix duplicate directories when using Borg patterns. * #214: Fix for hook erroring with exit code 1 not being interpreted as an error. 1.3.14 * #204: Do not treat Borg warnings (exit code 1) as failures. * When validating configuration files, require strings instead of allowing any scalar type. 1.3.13 * #199: Add note to documentation about using spaces instead of tabs for indentation, as YAML does not allow tabs. * #203: Fix compatibility with ruamel.yaml 0.16.x. * If a "prefix" option in borgmatic's configuration has an empty value (blank or ""), then disable default prefix. 1.3.12 * Only log to syslog when run from a non-interactive console (e.g. a cron job). * Remove unicode byte order mark from syslog output so it doesn't show up as a literal in rsyslog output. See discussion on #197. 1.3.11 * #193: Pass through several "borg list" and "borg info" flags like --short, --format, --sort-by, --first, --last, etc. via borgmatic command-line flags. * Add borgmatic info --repository and --archive command-line flags to display info for individual repositories or archives. * Support for Borg --noatime, --noctime, and --nobirthtime flags via corresponding options in borgmatic configuration location section. 1.3.10 * #198: Fix for Borg create error output not showing up at borgmatic verbosity level zero. 1.3.9 * #195: Switch to command-line actions as more traditional sub-commands, e.g. "borgmatic create", "borgmatic prune", etc. However, the classic dashed options like "--create" still work! 1.3.8 * #191: Disable console color via "color" option in borgmatic configuration output section. 1.3.7 * #196: Fix for unclear error message for invalid YAML merge include. * #197: Don't color syslog output. * Change default syslog verbosity to show errors only. 1.3.6 * #53: Log to syslog in addition to existing console logging. Add --syslog-verbosity flag to customize the log level. See the documentation for more information: https://torsion.org/borgmatic/docs/how-to/inspect-your-backups/ * #178: Look for .yml configuration file extension in addition to .yaml. * #189: Set umask used when executing hooks via "umask" option in borgmatic hooks section. * Remove Python cache files before each Tox run. * Add #borgmatic Freenode IRC channel to documentation. * Add Borg/borgmatic hosting providers section to documentation. * Add files for building documentation into a Docker image for web serving. * Upgrade project build server from Drone 0.8 to 1.1. * Build borgmatic documentation during continuous integration. * We're nearly at 500 ★s on GitHub. We can do this! 1.3.5 * #153: Support for various Borg directory environment variables (BORG_CONFIG_DIR, BORG_CACHE_DIR, etc.) via options in borgmatic's storage configuration. * #177: Fix for regression with missing verbose log entries. 1.3.4 * Part of #125: Color borgmatic (but not Borg) output when using an interactive terminal. * #166: Run tests for all installed versions of Python. * #168: Update README with continuous integration badge. * #169: Automatically sort Python imports in code. * Document installing borgmatic with pip install --user instead of a system Python install. * Get more reproducible builds by pinning the versions of pip and tox used to run tests. * Factor out build/test configuration from tox.ini file. 1.3.3 * Add validate-borgmatic-config command, useful for validating borgmatic config generated by configuration management or even edited by hand. 1.3.2 * #160: Fix for hooks executing when using --dry-run. Now hooks are skipped during a dry run. 1.3.1 * #155: Fix for invalid JSON output when using multiple borgmatic configuration files. * #157: Fix for seemingly random filename ordering when running through a directory of configuration files. * Fix for empty JSON output when using --create --json. * Now capturing Borg output only when --json flag is used. Previously, borgmatic delayed Borg output even without the --json flag. 1.3.0 * #148: Configuration file includes and merging via "!include" tag to support reuse of common options across configuration files. 1.2.18 * #147: Support for Borg create/extract --numeric-owner flag via "numeric_owner" option in borgmatic's location section. 1.2.17 * #140: List the files within an archive via --list --archive option. 1.2.16 * #119: Include a sample borgmatic configuration file in the documentation. * #123: Support for Borg archive restoration via borgmatic --extract command-line flag. * Refactor documentation into multiple separate pages for clarity and findability. * Organize options within command-line help into logical groups. * Exclude tests from distribution packages. 1.2.15 * #127: Remove date echo from schema example, as it's not a substitute for real logging. * #132: Leave exclude_patterns glob expansion to Borg, since doing it in borgmatic leads to confusing behavior. * #136: Handle and format validation errors raised during argument parsing. * #138: Allow use of --stats flag when --create or --prune flags are implied. 1.2.14 * #103: When generating sample configuration with generate-borgmatic-config, document the defaults for each option. * #116: When running multiple configuration files, attempt all configuration files even if one of them errors. Log a summary of results at the end. * Add borgmatic --version command-line flag to get the current installed version number. 1.2.13 * #100: Support for --stats command-line flag independent of --verbosity. * #117: With borgmatic --init command-line flag, proceed without erroring if a repository already exists. 1.2.12 * #110: Support for Borg repository initialization via borgmatic --init command-line flag. * #111: Update Borg create --filter values so a dry run lists files to back up. * #113: Update README with link to a new/forked Docker image. * Prevent deprecated --excludes command-line option from being used. * Refactor README a bit to flow better for first-time users. * Update README with a few additional borgmatic packages (Debian and Ubuntu). 1.2.11 * #108: Support for Borg create --progress via borgmatic command-line flag. 1.2.10 * #105: Support for Borg --chunker-params create option via "chunker_params" option in borgmatic's storage section. 1.2.9 * #102: Fix for syntax error that occurred in Python 3.5 and below. * Make automated tests support running in Python 3.5. 1.2.8 * #73: Enable consistency checks for only certain repositories via "check_repositories" option in borgmatic's consistency configuration. Handy for large repositories that take forever to check. * Include link to issue tracker within various command output. * Run continuous integration tests on a matrix of Python and Borg versions. 1.2.7 * #98: Support for Borg --keep-secondly prune option. * Use Black code formatter and Flake8 code checker as part of running automated tests. * Add an end-to-end automated test that actually integrates with Borg. * Set up continuous integration for borgmatic automated tests on projects.evoworx.org. 1.2.6 * Fix generated configuration to also include a "keep_daily" value so pruning works out of the box. 1.2.5 * #57: When generating sample configuration with generate-borgmatic-config, comment out all optional configuration so as to streamline the initial configuration process. 1.2.4 * Fix for archive checking traceback due to parameter mismatch. 1.2.3 * #64, #90, #92: Rewrite of logging system. Now verbosity flags passed to Borg are derived from borgmatic's log level. Note that the output of borgmatic might slightly change. * Part of #80: Support for Borg create --read-special via "read_special" option in borgmatic's location configuration. * #87: Support for Borg create --checkpoint-interval via "checkpoint_interval" option in borgmatic's storage configuration. * #88: Fix declared pykwalify compatibility version range in setup.py to prevent use of ancient versions of pykwalify with large version numbers. * #89: Pass --show-rc option to Borg when at highest verbosity level. * #94: Support for Borg --json option via borgmatic command-line to --create archives. 1.2.2 * #85: Fix compatibility issue between pykwalify and ruamel.yaml 0.15.52, which manifested in borgmatic as a pykwalify RuleError. 1.2.1 * Skip before/after backup hooks when only doing --prune, --check, --list, and/or --info. * #71: Support for XDG_CONFIG_HOME environment variable for specifying alternate user ~/.config/ path. * #74, #83: Support for Borg --json option via borgmatic command-line to --list archives or show archive --info in JSON format, ideal for programmatic consumption. * #38, #76: Upgrade ruamel.yaml compatibility version range and fix support for Python 3.7. * #77: Skip non-"*.yaml" config filenames in /etc/borgmatic.d/ so as not to parse backup files, editor swap files, etc. * #81: Document user-defined hooks run before/after backup, or on error. * Add code style guidelines to the documentation. 1.2.0 * #61: Support for Borg --list option via borgmatic command-line to list all archives. * #61: Support for Borg --info option via borgmatic command-line to display summary information. * #62: Update README to mention other ways of installing borgmatic. * Support for Borg --prefix option for consistency checks via "prefix" option in borgmatic's consistency configuration. * Add introductory screencast link to documentation. * #59: Ignore "check_last" and consistency "prefix" when "archives" not in consistency checks. * #60: Add "Persistent" flag to systemd timer example. * #63: Support for Borg --nobsdflags option to skip recording bsdflags (e.g. NODUMP, IMMUTABLE) in archive. * #69: Support for Borg prune --umask option using value of existing "umask" option in borgmatic's storage configuration. * Update tox.ini to only assume Python 3.x instead of Python 3.4 specifically. * Add ~/.config/borgmatic/config.yaml to default configuration path probing. * Document how to develop on and contribute to borgmatic. 1.1.15 * Support for Borg BORG_PASSCOMMAND environment variable to read a password from an external file. * Fix for Borg create error when using borgmatic's --dry-run and --verbosity options together. Work-around for behavior introduced in Borg 1.1.3: https://github.com/borgbackup/borg/issues/3298 * #55: Fix for missing tags/releases on Gitea and GitHub project hosting. * #56: Support for Borg --lock-wait option for the maximum wait for a repository/cache lock. * #58: Support for using tilde in exclude_patterns to reference home directory. 1.1.14 * #49: Fix for typo in --patterns-from option. * #47: Support for Borg --dry-run option via borgmatic command-line. 1.1.13 * #54: Fix for incorrect consistency check flags passed to Borg when all three checks ("repository", "archives", and "extract") are specified in borgmatic configuration. * #48: Add "local_path" to configuration for specifying an alternative Borg executable path. * #49: Support for Borg experimental --patterns-from and --patterns options for specifying mixed includes/excludes. * Moved issue tracker from Taiga to integrated Gitea tracker at https://projects.torsion.org/borgmatic-collective/borgmatic/issues 1.1.12 * #46: Declare dependency on pykwalify 1.6 or above, as older versions yield "Unknown key: version" rule errors. * Support for Borg --keep-minutely prune option. 1.1.11 * #26: Add "ssh_command" to configuration for specifying a custom SSH command or options. * Fix for incorrect /etc/borgmatic.d/ configuration path probing on macOS. This problem manifested as an error on startup: "[Errno 2] No such file or directory: '/etc/borgmatic.d'". 1.1.10 * Pass several Unix signals through to child processes like Borg. This means that Borg now properly shuts down if borgmatic is terminated (e.g. due to a system suspend). * #30: Support for using tilde in repository paths to reference home directory. * #43: Support for Borg --files-cache option for setting the files cache operation mode. * #45: Support for Borg --remote-ratelimit option for limiting upload rate. * Log invoked Borg commands when at highest verbosity level. 1.1.9 * #17, #39: Support for user-defined hooks before/after backup, or on error. * #34: Improve clarity of logging spew at high verbosity levels. * #30: Support for using tilde in source directory path to reference home directory. * Require "prefix" in retention section when "archive_name_format" is set. This is to avoid accidental pruning of archives with a different archive name format. For similar reasons, default "prefix" to "{hostname}-" if not specified. * Convert main source repository from Mercurial to Git. * Update dead links to Borg documentation. 1.1.8 * #40: Fix to make /etc/borgmatic/config.yaml optional rather than required when using the default config paths. 1.1.7 * #29: Add "archive_name_format" to configuration for customizing archive names. * Fix for traceback when "exclude_from" value is empty in configuration file. * When pruning, make highest verbosity level list archives kept and pruned. * Clarification of Python 3 pip usage in documentation. 1.1.6 * #13, #36: Support for Borg --exclude-from, --exclude-caches, and --exclude-if-present options. 1.1.5 * #35: New "extract" consistency check that performs a dry-run extraction of the most recent archive. 1.1.4 * #18: Added command-line flags for performing a borgmatic run with only pruning, creating, or checking enabled. This supports use cases like running consistency checks from a different cron job with a different frequency, or running pruning with a different verbosity level. 1.1.3 * #15: Support for running multiple config files in /etc/borgmatic.d/ from a single borgmatic run. * Fix for generate-borgmatic-config writing config with invalid one_file_system value. 1.1.2 * #33: Fix for passing check_last as integer to subprocess when calling Borg. 1.1.1 * Part of #33: Fix for upgrade-borgmatic-config converting check_last option as a string instead of an integer. * Fix for upgrade-borgmatic-config erroring when consistency checks option is not present. 1.1.0 * Switched config file format to YAML. Run upgrade-borgmatic-config to upgrade. * Added generate-borgmatic-config command for initial config creation. * Dropped Python 2 support. Now Python 3 only. * #19: Fix for README mention of sample files not included in package. * #23: Sample files for triggering borgmatic from a systemd timer. * Support for backing up to multiple repositories. * To free up space, now pruning backups prior to creating a new backup. * Enabled test coverage output during tox runs. * Added logo. 1.0.3 * #22: Fix for verbosity flag not actually causing verbose output. 1.0.2 * #21: Fix for traceback when remote_path option is missing. 1.0.1 * #20: Support for Borg's --remote-path option to use an alternate Borg executable. See sample/config. 1.0.0 * Attic is no longer supported, as there hasn't been any recent development on it. Dropping Attic support will allow faster iteration on Borg-specific features. If you're still using Attic, this is a good time to switch to Borg! * Project renamed from atticmatic to borgmatic. See the borgmatic README for information on upgrading. 0.1.8 * Fix for handling of spaces in source_directories which resulted in backup up everything. * Fix for broken links to Borg documentation. * At verbosity zero, suppressing Borg check stderr spew about "Checking segments". * Support for Borg --one-file-system. * Support for Borg create --umask. * Support for file globs in source_directories. 0.1.7 * #12: Fixed parsing of punctuation in configuration file. * Better error message when configuration file is missing. 0.1.6 * #10: New configuration option for the encryption passphrase. * #11: Support for Borg's new archive compression feature. 0.1.5 * Changes to support release on PyPI. Now pip installable by name! 0.1.4 * Adding test that setup.py version matches release version. 0.1.3 * #2: Add support for "borg check --last N" to Borg backend. 0.1.2 * As a convenience to new users, allow a missing default excludes file. * New issue tracker, linked from documentation. 0.1.1 * Adding borgmatic cron example, and updating documentation to refer to it. 0.1.0 * New "borgmatic" command to support Borg backup software, a fork of Attic. 0.0.7 * Flag for multiple levels of verbosity: some, and lots. * Improved mocking of Python builtins in unit tests. 0.0.6 * New configuration section for customizing which Attic consistency checks run, if any. 0.0.5 * Fixed regression with --verbose output being buffered. This means dropping the helpful error message introduced in 0.0.4. 0.0.4 * Now using tox to run tests against multiple versions of Python in one go. * Helpful error message about how to create a repository if one is missing. * Troubleshooting section with steps to deal with broken pipes. * Nosetests config file (setup.cfg) with defaults. 0.0.3 * After pruning, run attic's consistency checks on all archives. * Integration tests for argument parsing. * Documentation updates about repository encryption. 0.0.2 * Configuration support for additional attic prune flags: keep_within, keep_hourly, keep_yearly, and prefix. 0.0.1 * Initial release. borgmatic/README.md000066400000000000000000000221501510202216200142730ustar00rootroot00000000000000--- title: borgmatic permalink: index.html --- borgmatic logo borgmatic is simple, configuration-driven backup software for servers and workstations. Protect your files with client-side encryption. Backup your databases too. Monitor it all with integrated third-party services. The canonical home of borgmatic is at https://torsion.org/borgmatic/ Here's an example configuration file: ```yaml # List of source directories to backup. source_directories: - /home - /etc # Paths of local or remote repositories to backup to. repositories: - path: ssh://k8pDxu32@k8pDxu32.repo.borgbase.com/./repo label: borgbase - path: /var/lib/backups/local.borg label: local # Retention policy for how many backups to keep. keep_daily: 7 keep_weekly: 4 keep_monthly: 6 # List of checks to run to validate your backups. checks: - name: repository - name: archives frequency: 2 weeks # Custom preparation scripts to run. commands: - before: action when: [create] run: [prepare-for-backup.sh] # Databases to dump and include in backups. postgresql_databases: - name: users # Third-party services to notify you if backups aren't happening. healthchecks: ping_url: https://hc-ping.com/be067061-cf96-4412-8eae-62b0c50d6a8c ``` borgmatic is powered by [Borg Backup](https://www.borgbackup.org/). ## Integrations ### Data PostgreSQL MySQL MariaDB MongoDB SQLite OpenZFS Btrfs LVM rclone BorgBase ### Monitoring Healthchecks Uptime Kuma Cronitor Cronhub PagerDuty Pushover ntfy Loki Apprise Zabbix Sentry ### Credentials Sentry Docker Podman Podman ## Getting started Your first step is to [install and configure borgmatic](https://torsion.org/borgmatic/how-to/set-up-backups/). For additional documentation, check out the links above (left panel on wide screens) for borgmatic how-to and reference guides. ## Hosting providers Need somewhere to store your encrypted off-site backups? The following hosting providers include specific support for Borg/borgmatic—and fund borgmatic development and hosting when you use these referral links to sign up:
  • BorgBase: Borg hosting service with support for monitoring, 2FA, and append-only repos
  • Hetzner: A "storage box" that includes support for Borg
Additionally, rsync.net has a compatible storage offering, but does not fund borgmatic development or hosting. ## Support and contributing ### Issues Are you experiencing an issue with borgmatic? Or do you have an idea for a feature enhancement? Head on over to our [issue tracker](https://projects.torsion.org/borgmatic-collective/borgmatic/issues). In order to create a new issue or add a comment, you'll need to [register](https://projects.torsion.org/user/sign_up?invite_code=borgmatic) first. If you prefer to use an existing GitHub account, you can skip account creation and [login directly](https://projects.torsion.org/user/login). Also see the [security policy](https://torsion.org/borgmatic/security-policy/) for any security issues. ### Social Follow borgmatic on Mastodon. ### Chat To chat with borgmatic developers or users, check out the `#borgmatic` IRC channel on Libera Chat, either via web chat or a native IRC client. If you don't get a response right away, please hang around a while—or file a ticket instead. ### Other Other questions or comments? Contact [witten@torsion.org](mailto:witten@torsion.org). ### Contributing borgmatic [source code is available](https://projects.torsion.org/borgmatic-collective/borgmatic) and also has a read-only mirror on [GitHub](https://github.com/borgmatic-collective/borgmatic) for convenience. borgmatic is licensed under the GNU General Public License version 3 or any later version. If you'd like to contribute to borgmatic development, please feel free to submit a [pull request](https://projects.torsion.org/borgmatic-collective/borgmatic/pulls) or open an [issue](https://projects.torsion.org/borgmatic-collective/borgmatic/issues) to discuss your idea. Note that you'll need to [register](https://projects.torsion.org/user/sign_up?invite_code=borgmatic) first. In general, contributions are very welcome. We don't bite! Also, please check out the [borgmatic development how-to](https://torsion.org/borgmatic/how-to/develop-on-borgmatic/) for info on cloning source code, running tests, etc. ### Recent contributors Thanks to all borgmatic contributors! There are multiple ways to contribute to this project, so the following includes those who have fixed bugs, contributed features, *or* filed tickets. {% include borgmatic/contributors.html %}borgmatic/SECURITY.md000066400000000000000000000012221510202216200146020ustar00rootroot00000000000000--- title: Security policy permalink: security-policy/index.html --- ## Supported versions While we want to hear about security vulnerabilities in all versions of borgmatic, security fixes are only made to the most recently released version. It's not practical for our small volunteer effort to maintain multiple release branches and put out separate security patches for each. ## Reporting a vulnerability If you find a security vulnerability, please [file a ticket](https://torsion.org/borgmatic/#issues) or [send email directly](mailto:witten@torsion.org) as appropriate. You should expect to hear back within a few days at most and generally sooner. borgmatic/borgmatic/000077500000000000000000000000001510202216200147635ustar00rootroot00000000000000borgmatic/borgmatic/__init__.py000066400000000000000000000000001510202216200170620ustar00rootroot00000000000000borgmatic/borgmatic/actions/000077500000000000000000000000001510202216200164235ustar00rootroot00000000000000borgmatic/borgmatic/actions/__init__.py000066400000000000000000000000001510202216200205220ustar00rootroot00000000000000borgmatic/borgmatic/actions/arguments.py000066400000000000000000000005071510202216200210040ustar00rootroot00000000000000import argparse def update_arguments(arguments, **updates): ''' Given an argparse.Namespace instance of command-line arguments and one or more keyword argument updates to perform, return a copy of the arguments with those updates applied. ''' return argparse.Namespace(**dict(vars(arguments), **updates)) borgmatic/borgmatic/actions/borg.py000066400000000000000000000015541510202216200177330ustar00rootroot00000000000000import logging import borgmatic.borg.borg import borgmatic.borg.repo_list logger = logging.getLogger(__name__) def run_borg( repository, config, local_borg_version, borg_arguments, global_arguments, local_path, remote_path, ): ''' Run the "borg" action for the given repository. ''' logger.info('Running arbitrary Borg command') archive_name = borgmatic.borg.repo_list.resolve_archive_name( repository['path'], borg_arguments.archive, config, local_borg_version, global_arguments, local_path, remote_path, ) borgmatic.borg.borg.run_arbitrary_borg( repository['path'], config, local_borg_version, options=borg_arguments.options, archive=archive_name, local_path=local_path, remote_path=remote_path, ) borgmatic/borgmatic/actions/break_lock.py000066400000000000000000000011101510202216200210620ustar00rootroot00000000000000import logging import borgmatic.borg.break_lock logger = logging.getLogger(__name__) def run_break_lock( repository, config, local_borg_version, break_lock_arguments, global_arguments, local_path, remote_path, ): ''' Run the "break-lock" action for the given repository. ''' logger.info('Breaking repository and cache locks') borgmatic.borg.break_lock.break_lock( repository['path'], config, local_borg_version, global_arguments, local_path=local_path, remote_path=remote_path, ) borgmatic/borgmatic/actions/change_passphrase.py000066400000000000000000000012261510202216200224540ustar00rootroot00000000000000import logging import borgmatic.borg.change_passphrase logger = logging.getLogger(__name__) def run_change_passphrase( repository, config, local_borg_version, change_passphrase_arguments, global_arguments, local_path, remote_path, ): ''' Run the "key change-passphrase" action for the given repository. ''' logger.info('Changing repository passphrase') borgmatic.borg.change_passphrase.change_passphrase( repository['path'], config, local_borg_version, change_passphrase_arguments, global_arguments, local_path=local_path, remote_path=remote_path, ) borgmatic/borgmatic/actions/check.py000066400000000000000000000674001510202216200200610ustar00rootroot00000000000000import calendar import contextlib import datetime import hashlib import itertools import logging import os import pathlib import random import shlex import shutil import textwrap import borgmatic.actions.config.bootstrap import borgmatic.actions.pattern import borgmatic.borg.check import borgmatic.borg.create import borgmatic.borg.environment import borgmatic.borg.extract import borgmatic.borg.list import borgmatic.borg.pattern import borgmatic.borg.repo_list import borgmatic.borg.state import borgmatic.config.paths import borgmatic.execute import borgmatic.hooks.command DEFAULT_CHECKS = ( {'name': 'repository', 'frequency': '1 month'}, {'name': 'archives', 'frequency': '1 month'}, ) logger = logging.getLogger(__name__) def parse_checks(config, only_checks=None): ''' Given a configuration dict with a "checks" sequence of dicts and an optional list of override checks, return a tuple of named checks to run. For example, given a config of: {'checks': ({'name': 'repository'}, {'name': 'archives'})} This will be returned as: ('repository', 'archives') If no "checks" option is present in the config, return the DEFAULT_CHECKS. If a checks value has a name of "disabled", return an empty tuple, meaning that no checks should be run. ''' checks = only_checks or tuple( check_config['name'] for check_config in (config.get('checks', None) or DEFAULT_CHECKS) ) checks = tuple(check.lower() for check in checks) if 'disabled' in checks: logger.warning( 'The "disabled" value for the "checks" option is deprecated and will be removed from a future release; use "skip_actions" instead', ) if len(checks) > 1: logger.warning( 'Multiple checks are configured, but one of them is "disabled"; not running any checks', ) return () return checks def parse_frequency(frequency): ''' Given a frequency string with a number and a unit of time, return a corresponding datetime.timedelta instance or None if the frequency is None or "always". For instance, given "3 weeks", return datetime.timedelta(weeks=3) Raise ValueError if the given frequency cannot be parsed. ''' if not frequency: return None frequency = frequency.strip().lower() if frequency == 'always': return None try: number, time_unit = frequency.split(' ') number = int(number) except ValueError: raise ValueError(f"Could not parse consistency check frequency '{frequency}'") if not time_unit.endswith('s'): time_unit += 's' if time_unit == 'months': number *= 30 time_unit = 'days' elif time_unit == 'years': number *= 365 time_unit = 'days' try: return datetime.timedelta(**{time_unit: number}) except TypeError: raise ValueError(f"Could not parse consistency check frequency '{frequency}'") WEEKDAY_DAYS = calendar.day_name[0:5] WEEKEND_DAYS = calendar.day_name[5:7] def filter_checks_on_frequency( config, borg_repository_id, checks, force, archives_check_id=None, datetime_now=datetime.datetime.now, ): ''' Given a configuration dict with a "checks" sequence of dicts, a Borg repository ID, a sequence of checks, whether to force checks to run, and an ID for the archives check potentially being run (if any), filter down those checks based on the configured "frequency" for each check as compared to its check time file. In other words, a check whose check time file's timestamp is too new (based on the configured frequency) will get cut from the returned sequence of checks. Example: config = { 'checks': [ { 'name': 'archives', 'frequency': '2 weeks', }, ] } When this function is called with that config and "archives" in checks, "archives" will get filtered out of the returned result if its check time file is newer than 2 weeks old, indicating that it's not yet time to run that check again. Raise ValueError if a frequency cannot be parsed. ''' if not checks: return checks filtered_checks = list(checks) if force: return tuple(filtered_checks) for check_config in config.get('checks', DEFAULT_CHECKS): check = check_config['name'] if checks and check not in checks: continue only_run_on = check_config.get('only_run_on') if only_run_on: # Use a dict instead of a set to preserve ordering. days = dict.fromkeys(only_run_on) if 'weekday' in days: days = { **dict.fromkeys(day for day in days if day != 'weekday'), **dict.fromkeys(WEEKDAY_DAYS), } if 'weekend' in days: days = { **dict.fromkeys(day for day in days if day != 'weekend'), **dict.fromkeys(WEEKEND_DAYS), } if calendar.day_name[datetime_now().weekday()] not in days: logger.info( f"Skipping {check} check due to day of the week; check only runs on {'/'.join(day.title() for day in days)} (use --force to check anyway)", ) filtered_checks.remove(check) continue frequency_delta = parse_frequency(check_config.get('frequency')) if not frequency_delta: continue check_time = probe_for_check_time(config, borg_repository_id, check, archives_check_id) if not check_time: continue # If we've not yet reached the time when the frequency dictates we're ready for another # check, skip this check. if datetime_now() < check_time + frequency_delta: remaining = check_time + frequency_delta - datetime_now() logger.info( f'Skipping {check} check due to configured frequency; {remaining} until next check (use --force to check anyway)', ) filtered_checks.remove(check) return tuple(filtered_checks) def make_archives_check_id(archive_filter_flags): ''' Given a sequence of flags to filter archives, return a unique hash corresponding to those particular flags. If there are no flags, return None. ''' if not archive_filter_flags: return None return hashlib.sha256(' '.join(archive_filter_flags).encode()).hexdigest() def make_check_time_path(config, borg_repository_id, check_type, archives_check_id=None): ''' Given a configuration dict, a Borg repository ID, the name of a check type ("repository", "archives", etc.), and a unique hash of the archives filter flags, return a path for recording that check's time (the time of that check last occurring). ''' borgmatic_state_directory = borgmatic.config.paths.get_borgmatic_state_directory(config) if check_type in {'archives', 'data'}: return os.path.join( borgmatic_state_directory, 'checks', borg_repository_id, check_type, archives_check_id if archives_check_id else 'all', ) return os.path.join( borgmatic_state_directory, 'checks', borg_repository_id, check_type, ) def write_check_time(path): # pragma: no cover ''' Record a check time of now as the modification time of the given path. ''' logger.debug(f'Writing check time at {path}') os.makedirs(os.path.dirname(path), mode=0o700, exist_ok=True) pathlib.Path(path).touch(mode=0o600) def read_check_time(path): ''' Return the check time based on the modification time of the given path. Return None if the path doesn't exist. ''' logger.debug(f'Reading check time from {path}') try: return datetime.datetime.fromtimestamp(os.stat(path).st_mtime) # noqa: DTZ006 except FileNotFoundError: return None def probe_for_check_time(config, borg_repository_id, check, archives_check_id): ''' Given a configuration dict, a Borg repository ID, the name of a check type ("repository", "archives", etc.), and a unique hash of the archives filter flags, return the corresponding check time or None if such a check time does not exist. When the check type is "archives" or "data", this function probes two different paths to find the check time, e.g.: ~/.borgmatic/checks/1234567890/archives/9876543210 ~/.borgmatic/checks/1234567890/archives/all ... and returns the maximum modification time of the files found (if any). The first path represents a more specific archives check time (a check on a subset of archives), and the second is a fallback to the last "all" archives check. For other check types, this function reads from a single check time path, e.g.: ~/.borgmatic/checks/1234567890/repository ''' check_times = ( read_check_time(group[0]) for group in itertools.groupby( ( make_check_time_path(config, borg_repository_id, check, archives_check_id), make_check_time_path(config, borg_repository_id, check), ), ) ) try: return max(check_time for check_time in check_times if check_time) except ValueError: return None def upgrade_check_times(config, borg_repository_id): ''' Given a configuration dict and a Borg repository ID, upgrade any corresponding check times on disk from old-style paths to new-style paths. One upgrade performed is moving the checks directory from: {borgmatic_source_directory}/checks (e.g., ~/.borgmatic/checks) to: {borgmatic_state_directory}/checks (e.g. ~/.local/state/borgmatic) Another upgrade is renaming an archive or data check path that looks like: {borgmatic_state_directory}/checks/1234567890/archives to: {borgmatic_state_directory}/checks/1234567890/archives/all ''' borgmatic_source_checks_path = os.path.join( borgmatic.config.paths.get_borgmatic_source_directory(config), 'checks', ) borgmatic_state_path = borgmatic.config.paths.get_borgmatic_state_directory(config) borgmatic_state_checks_path = os.path.join(borgmatic_state_path, 'checks') if os.path.exists(borgmatic_source_checks_path) and not os.path.exists( borgmatic_state_checks_path, ): logger.debug( f'Upgrading archives check times directory from {borgmatic_source_checks_path} to {borgmatic_state_checks_path}', ) os.makedirs(borgmatic_state_path, mode=0o700, exist_ok=True) shutil.move(borgmatic_source_checks_path, borgmatic_state_checks_path) for check_type in ('archives', 'data'): new_path = make_check_time_path(config, borg_repository_id, check_type, 'all') old_path = os.path.dirname(new_path) temporary_path = f'{old_path}.temp' if not os.path.isfile(old_path) and not os.path.isfile(temporary_path): continue logger.debug(f'Upgrading archives check time file from {old_path} to {new_path}') with contextlib.suppress(FileNotFoundError): shutil.move(old_path, temporary_path) os.mkdir(old_path) shutil.move(temporary_path, new_path) def collect_spot_check_source_paths( repository, config, local_borg_version, global_arguments, local_path, remote_path, borgmatic_runtime_directory, bootstrap_config_paths, ): ''' Given a repository configuration dict, a configuration dict, the local Borg version, global arguments as an argparse.Namespace instance, the local Borg path, the remote Borg path, and the bootstrap configuration paths as read from an archive's manifest, collect the source paths that Borg would use in an actual create (but only include files). As part of this, include the bootstrap configuration paths, so that any configuration files included in the archive to support bootstrapping are also spot checked. ''' stream_processes = any( borgmatic.hooks.dispatch.call_hooks( 'use_streaming', config, borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE, ).values(), ) working_directory = borgmatic.config.paths.get_working_directory(config) (create_flags, create_positional_arguments, _) = borgmatic.borg.create.make_base_create_command( dry_run=True, repository_path=repository['path'], # Omit "progress" because it interferes with "list_details". config=dict( {option: value for option, value in config.items() if option != 'progress'}, list_details=True, ), patterns=borgmatic.actions.pattern.process_patterns( borgmatic.actions.pattern.collect_patterns(config) + tuple( borgmatic.borg.pattern.Pattern( config_path, source=borgmatic.borg.pattern.Pattern_source.INTERNAL, ) for config_path in bootstrap_config_paths ), config, working_directory, ), local_borg_version=local_borg_version, global_arguments=global_arguments, borgmatic_runtime_directory=borgmatic_runtime_directory, local_path=local_path, remote_path=remote_path, stream_processes=stream_processes, ) working_directory = borgmatic.config.paths.get_working_directory(config) paths_output = borgmatic.execute.execute_command_and_capture_output( create_flags + create_positional_arguments, capture_stderr=True, environment=borgmatic.borg.environment.make_environment(config), working_directory=working_directory, borg_local_path=local_path, borg_exit_codes=config.get('borg_exit_codes'), ) paths = tuple( path_line.split(' ', 1)[1] for path_line in paths_output.splitlines() if path_line and path_line.startswith(('- ', '+ ')) ) return tuple( path for path in paths if os.path.isfile(os.path.join(working_directory or '', path)) ) BORG_DIRECTORY_FILE_TYPE = 'd' BORG_PIPE_FILE_TYPE = 'p' def collect_spot_check_archive_paths( repository, archive, config, local_borg_version, global_arguments, local_path, remote_path, borgmatic_runtime_directory, ): ''' Given a repository configuration dict, the name of the latest archive, a configuration dict, the local Borg version, global arguments as an argparse.Namespace instance, the local Borg path, the remote Borg path, and the borgmatic runtime directory, collect the paths from the given archive (but only include files and symlinks and exclude borgmatic runtime directories). These paths do not have a leading slash, as that's how Borg stores them. As a result, we don't know whether they came from absolute or relative source directories. ''' borgmatic_source_directory = borgmatic.config.paths.get_borgmatic_source_directory(config) return tuple( path for line in borgmatic.borg.list.capture_archive_listing( repository['path'], archive, config, local_borg_version, global_arguments, path_format='{type} {path}{NUL}', local_path=local_path, remote_path=remote_path, ) for (file_type, path) in (line.split(' ', 1),) if file_type not in {BORG_DIRECTORY_FILE_TYPE, BORG_PIPE_FILE_TYPE} if pathlib.Path('borgmatic') not in pathlib.Path(path).parents if pathlib.Path(borgmatic_source_directory.lstrip(os.path.sep)) not in pathlib.Path(path).parents if pathlib.Path(borgmatic_runtime_directory.lstrip(os.path.sep)) not in pathlib.Path(path).parents ) SAMPLE_PATHS_SUBSET_COUNT = 5000 def compare_spot_check_hashes( repository, archive, config, local_borg_version, global_arguments, local_path, remote_path, source_paths, ): ''' Given a repository configuration dict, the name of the latest archive, a configuration dict, the local Borg version, global arguments as an argparse.Namespace instance, the local Borg path, the remote Borg path, and spot check source paths, compare the hashes for a sampling of the source paths with hashes from corresponding paths in the given archive. Return a sequence of the paths that fail that hash comparison. ''' # Based on the configured sample percentage, come up with a list of random sample files from the # source directories. spot_check_config = next(check for check in config['checks'] if check['name'] == 'spot') sample_count = max( int(len(source_paths) * (min(spot_check_config['data_sample_percentage'], 100) / 100)), 1, ) source_sample_paths = tuple(random.SystemRandom().sample(source_paths, sample_count)) working_directory = borgmatic.config.paths.get_working_directory(config) hashable_source_sample_path = { source_path for source_path in source_sample_paths for full_source_path in (os.path.join(working_directory or '', source_path),) if os.path.exists(full_source_path) if not os.path.islink(full_source_path) } logger.debug( f'Sampling {sample_count} source paths (~{spot_check_config["data_sample_percentage"]}%) for spot check', ) source_sample_paths_iterator = iter(source_sample_paths) source_hashes = {} archive_hashes = {} # Only hash a few thousand files at a time (a subset of the total paths) to avoid an "Argument # list too long" shell error. while True: # Hash each file in the sample paths (if it exists). source_sample_paths_subset = tuple( itertools.islice(source_sample_paths_iterator, SAMPLE_PATHS_SUBSET_COUNT), ) if not source_sample_paths_subset: break hash_output = borgmatic.execute.execute_command_and_capture_output( tuple( shlex.quote(part) for part in shlex.split(spot_check_config.get('xxh64sum_command', 'xxh64sum')) ) + tuple( path for path in source_sample_paths_subset if path in hashable_source_sample_path ), working_directory=working_directory, ) source_hashes.update( **dict( (reversed(line.split(' ', 1)) for line in hash_output.splitlines()), # Represent non-existent files as having empty hashes so the comparison below still # works. Same thing for filesystem links, since Borg produces empty archive hashes # for them. **{ path: '' for path in source_sample_paths_subset if path not in hashable_source_sample_path }, ), ) # Get the hash for each file in the archive. archive_hashes.update( **dict( reversed(line.split(' ', 1)) for line in borgmatic.borg.list.capture_archive_listing( repository['path'], archive, config, local_borg_version, global_arguments, list_paths=source_sample_paths_subset, path_format='{xxh64} {path}{NUL}', local_path=local_path, remote_path=remote_path, ) if line ), ) # Compare the source hashes with the archive hashes to see how many match. failing_paths = [] for path, source_hash in source_hashes.items(): archive_hash = archive_hashes.get(path.lstrip(os.path.sep)) if archive_hash is not None and archive_hash == source_hash: continue failing_paths.append(path) return tuple(failing_paths) MAX_SPOT_CHECK_PATHS_LENGTH = 1000 def spot_check( repository, config, local_borg_version, global_arguments, local_path, remote_path, borgmatic_runtime_directory, ): ''' Given a repository dict, a loaded configuration dict, the local Borg version, global arguments as an argparse.Namespace instance, the local Borg path, the remote Borg path, and the borgmatic runtime directory, perform a spot check for the latest archive in the given repository. A spot check compares file counts and also the hashes for a random sampling of source files on disk to those stored in the latest archive. If any differences are beyond configured tolerances, then the check fails. ''' logger.debug('Running spot check') try: spot_check_config = next( check for check in config.get('checks', ()) if check.get('name') == 'spot' ) except StopIteration: raise ValueError('Cannot run spot check because it is unconfigured') if spot_check_config['data_tolerance_percentage'] > spot_check_config['data_sample_percentage']: raise ValueError( 'The data_tolerance_percentage must be less than or equal to the data_sample_percentage', ) archive = borgmatic.borg.repo_list.resolve_archive_name( repository['path'], 'latest', config, local_borg_version, global_arguments, local_path, remote_path, ) logger.debug(f'Using archive {archive} for spot check') source_paths = collect_spot_check_source_paths( repository, config, local_borg_version, global_arguments, local_path, remote_path, borgmatic_runtime_directory, bootstrap_config_paths=borgmatic.actions.config.bootstrap.load_config_paths_from_archive( repository['path'], archive, config, local_borg_version, global_arguments, borgmatic_runtime_directory, ), ) logger.debug(f'{len(source_paths)} total source paths for spot check') archive_paths = collect_spot_check_archive_paths( repository, archive, config, local_borg_version, global_arguments, local_path, remote_path, borgmatic_runtime_directory, ) logger.debug(f'{len(archive_paths)} total archive paths for spot check') if len(source_paths) == 0: truncated_archive_paths = textwrap.shorten( ', '.join(set(archive_paths)) or 'none', width=MAX_SPOT_CHECK_PATHS_LENGTH, placeholder=' ...', ) logger.debug(f'Paths in latest archive but not source paths: {truncated_archive_paths}') raise ValueError( 'Spot check failed: There are no source paths to compare against the archive', ) # Calculate the percentage delta between the source paths count and the archive paths count, and # compare that delta to the configured count tolerance percentage. count_delta_percentage = abs(len(source_paths) - len(archive_paths)) / len(source_paths) * 100 if count_delta_percentage > spot_check_config['count_tolerance_percentage']: rootless_source_paths = {path.lstrip(os.path.sep) for path in source_paths} truncated_exclusive_source_paths = textwrap.shorten( ', '.join(rootless_source_paths - set(archive_paths)) or 'none', width=MAX_SPOT_CHECK_PATHS_LENGTH, placeholder=' ...', ) logger.debug( f'Paths in source paths but not latest archive: {truncated_exclusive_source_paths}', ) truncated_exclusive_archive_paths = textwrap.shorten( ', '.join(set(archive_paths) - rootless_source_paths) or 'none', width=MAX_SPOT_CHECK_PATHS_LENGTH, placeholder=' ...', ) logger.debug( f'Paths in latest archive but not source paths: {truncated_exclusive_archive_paths}', ) raise ValueError( f'Spot check failed: {count_delta_percentage:.2f}% file count delta between source paths and latest archive (tolerance is {spot_check_config["count_tolerance_percentage"]}%)', ) failing_paths = compare_spot_check_hashes( repository, archive, config, local_borg_version, global_arguments, local_path, remote_path, source_paths, ) # Error if the percentage of failing hashes exceeds the configured tolerance percentage. logger.debug(f'{len(failing_paths)} non-matching spot check hashes') data_tolerance_percentage = spot_check_config['data_tolerance_percentage'] failing_percentage = (len(failing_paths) / len(source_paths)) * 100 if failing_percentage > data_tolerance_percentage: truncated_failing_paths = textwrap.shorten( ', '.join(failing_paths), width=MAX_SPOT_CHECK_PATHS_LENGTH, placeholder=' ...', ) logger.debug( f'Source paths with data not matching the latest archive: {truncated_failing_paths}', ) raise ValueError( f'Spot check failed: {failing_percentage:.2f}% of source paths with data not matching the latest archive (tolerance is {data_tolerance_percentage}%)', ) logger.info( f'Spot check passed with a {count_delta_percentage:.2f}% file count delta and a {failing_percentage:.2f}% file data delta', ) def run_check( config_filename, repository, config, local_borg_version, check_arguments, global_arguments, local_path, remote_path, ): ''' Run the "check" action for the given repository. Raise ValueError if the Borg repository ID cannot be determined. ''' logger.info('Running consistency checks') repository_id = borgmatic.borg.check.get_repository_id( repository['path'], config, local_borg_version, global_arguments, local_path=local_path, remote_path=remote_path, ) upgrade_check_times(config, repository_id) configured_checks = parse_checks(config, check_arguments.only_checks) archive_filter_flags = borgmatic.borg.check.make_archive_filter_flags( local_borg_version, config, configured_checks, check_arguments, ) archives_check_id = make_archives_check_id(archive_filter_flags) checks = filter_checks_on_frequency( config, repository_id, configured_checks, check_arguments.force, archives_check_id, ) borg_specific_checks = set(checks).intersection({'repository', 'archives', 'data'}) if borg_specific_checks: borgmatic.borg.check.check_archives( repository['path'], config, local_borg_version, check_arguments, global_arguments, borg_specific_checks, archive_filter_flags, local_path=local_path, remote_path=remote_path, ) for check in borg_specific_checks: write_check_time(make_check_time_path(config, repository_id, check, archives_check_id)) if 'extract' in checks: borgmatic.borg.extract.extract_last_archive_dry_run( config, local_borg_version, global_arguments, repository['path'], config.get('lock_wait'), local_path, remote_path, ) write_check_time(make_check_time_path(config, repository_id, 'extract')) if 'spot' in checks: with borgmatic.config.paths.Runtime_directory(config) as borgmatic_runtime_directory: spot_check( repository, config, local_borg_version, global_arguments, local_path, remote_path, borgmatic_runtime_directory, ) write_check_time(make_check_time_path(config, repository_id, 'spot')) borgmatic/borgmatic/actions/compact.py000066400000000000000000000017741510202216200204340ustar00rootroot00000000000000import logging import borgmatic.borg.compact import borgmatic.borg.feature import borgmatic.hooks.command logger = logging.getLogger(__name__) def run_compact( config_filename, repository, config, local_borg_version, compact_arguments, global_arguments, dry_run_label, local_path, remote_path, ): ''' Run the "compact" action for the given repository. ''' if borgmatic.borg.feature.available(borgmatic.borg.feature.Feature.COMPACT, local_borg_version): logger.info(f'Compacting segments{dry_run_label}') borgmatic.borg.compact.compact_segments( global_arguments.dry_run, repository['path'], config, local_borg_version, global_arguments, local_path=local_path, remote_path=remote_path, cleanup_commits=compact_arguments.cleanup_commits, ) else: # pragma: nocover logger.info('Skipping compact (only available/needed in Borg 1.2+)') borgmatic/borgmatic/actions/config/000077500000000000000000000000001510202216200176705ustar00rootroot00000000000000borgmatic/borgmatic/actions/config/__init__.py000066400000000000000000000000001510202216200217670ustar00rootroot00000000000000borgmatic/borgmatic/actions/config/bootstrap.py000066400000000000000000000120661510202216200222640ustar00rootroot00000000000000import json import logging import os import borgmatic.borg.extract import borgmatic.borg.repo_list import borgmatic.config.paths import borgmatic.hooks.command logger = logging.getLogger(__name__) def make_bootstrap_config(bootstrap_arguments): ''' Given the bootstrap arguments as an argparse.Namespace, return a corresponding config dict. ''' return { 'borgmatic_source_directory': bootstrap_arguments.borgmatic_source_directory, 'local_path': bootstrap_arguments.local_path, 'remote_path': bootstrap_arguments.remote_path, # In case the repo has been moved or is accessed from a different path at the point of # bootstrapping. 'relocated_repo_access_is_ok': True, 'ssh_command': bootstrap_arguments.ssh_command, 'user_runtime_directory': bootstrap_arguments.user_runtime_directory, } def load_config_paths_from_archive( repository_path, archive_name, config, local_borg_version, global_arguments, borgmatic_runtime_directory, ): ''' Given a repository path, an archive name, a configuration dict, the local Borg version, the global arguments as an argparse.Namespace, and the borgmatic runtime directory, return the config paths from the manifest.json file in the borgmatic source directory or runtime directory within the repository archive. Raise ValueError if the manifest JSON is missing, can't be decoded, or doesn't contain the expected configuration path data. ''' # Probe for the manifest file in multiple locations, as the default location has moved to the # borgmatic runtime directory (which gets stored as just "/borgmatic" with Borg 1.4+). But we # still want to support reading the manifest from previously created archives as well. for base_directory in ( 'borgmatic', borgmatic.config.paths.make_runtime_directory_glob(borgmatic_runtime_directory), borgmatic.config.paths.get_borgmatic_source_directory(config), ): borgmatic_manifest_path = 'sh:' + os.path.join( base_directory, 'bootstrap', 'manifest.json', ) extract_process = borgmatic.borg.extract.extract_archive( global_arguments.dry_run, repository_path, archive_name, [borgmatic_manifest_path], config, local_borg_version, global_arguments, local_path=config.get('local_path', 'borg'), remote_path=config.get('remote_path'), extract_to_stdout=True, ) manifest_json = extract_process.stdout.read() if manifest_json: break else: raise ValueError( 'Cannot read configuration paths from archive due to missing archive or bootstrap manifest', ) try: manifest_data = json.loads(manifest_json) except json.JSONDecodeError as error: raise ValueError( f'Cannot read configuration paths from archive due to invalid bootstrap manifest JSON: {error}', ) try: return manifest_data['config_paths'] except KeyError: raise ValueError( 'Cannot read configuration paths from archive due to invalid bootstrap manifest', ) def run_bootstrap(bootstrap_arguments, global_arguments, local_borg_version): ''' Run the "bootstrap" action for the given repository. Raise ValueError if the bootstrap configuration could not be loaded. Raise CalledProcessError or OSError if Borg could not be run. ''' config = make_bootstrap_config(bootstrap_arguments) archive_name = borgmatic.borg.repo_list.resolve_archive_name( bootstrap_arguments.repository, bootstrap_arguments.archive, config, local_borg_version, global_arguments, local_path=bootstrap_arguments.local_path, remote_path=bootstrap_arguments.remote_path, ) with borgmatic.config.paths.Runtime_directory(config) as borgmatic_runtime_directory: manifest_config_paths = load_config_paths_from_archive( bootstrap_arguments.repository, archive_name, config, local_borg_version, global_arguments, borgmatic_runtime_directory, ) logger.info(f"Bootstrapping config paths: {', '.join(manifest_config_paths)}") borgmatic.borg.extract.extract_archive( global_arguments.dry_run, bootstrap_arguments.repository, archive_name, [config_path.lstrip(os.path.sep) for config_path in manifest_config_paths], # Only add progress here and not the extract_archive() call above, because progress # conflicts with extract_to_stdout. dict(config, progress=bootstrap_arguments.progress or False), local_borg_version, global_arguments, local_path=bootstrap_arguments.local_path, remote_path=bootstrap_arguments.remote_path, extract_to_stdout=False, destination_path=bootstrap_arguments.destination, strip_components=bootstrap_arguments.strip_components, ) borgmatic/borgmatic/actions/config/generate.py000066400000000000000000000035551510202216200220440ustar00rootroot00000000000000import logging import borgmatic.config.generate import borgmatic.config.validate import borgmatic.logger logger = logging.getLogger(__name__) def run_generate(generate_arguments, global_arguments): ''' Given the generate arguments and the global arguments, each as an argparse.Namespace instance, run the "generate" action. Raise FileExistsError if a file already exists at the destination path and the generate arguments do not have overwrite set. ''' borgmatic.logger.add_custom_log_levels() dry_run_label = ' (dry run; not actually writing anything)' if global_arguments.dry_run else '' logger.answer( f'Generating configuration files within: {generate_arguments.destination_path}{dry_run_label}' if generate_arguments.split else f'Generating a configuration file at: {generate_arguments.destination_path}{dry_run_label}' ) borgmatic.config.generate.generate_sample_configuration( global_arguments.dry_run, generate_arguments.source_filename, generate_arguments.destination_path, borgmatic.config.validate.schema_filename(), overwrite=generate_arguments.overwrite, split=generate_arguments.split, ) if generate_arguments.source_filename: logger.answer( f''' Merged in the contents of configuration file at: {generate_arguments.source_filename}''' ) if not generate_arguments.split: logger.answer( '''To review the changes made, run: diff --unified {generate_arguments.source_filename} {generate_arguments.destination_path}''', ) logger.answer( ''' This includes all available configuration options with example values, the few required options as indicated. Please edit the file to suit your needs. If you ever need help: https://torsion.org/borgmatic/#issues''', ) borgmatic/borgmatic/actions/config/validate.py000066400000000000000000000015161510202216200220360ustar00rootroot00000000000000import logging import borgmatic.config.generate import borgmatic.logger logger = logging.getLogger(__name__) def run_validate(validate_arguments, configs): ''' Given the validate arguments as an argparse.Namespace instance and a dict of configuration filename to corresponding parsed configuration, run the "validate" action. Most of the validation is actually performed implicitly by the standard borgmatic configuration loading machinery prior to here, so this function mainly exists to support additional validate flags like "--show". ''' borgmatic.logger.add_custom_log_levels() if validate_arguments.show: for config in configs.values(): if len(configs) > 1: logger.answer('---') logger.answer(borgmatic.config.generate.render_configuration(config)) borgmatic/borgmatic/actions/create.py000066400000000000000000000144211510202216200202420ustar00rootroot00000000000000import logging import borgmatic.actions.json import borgmatic.borg.create import borgmatic.borg.feature import borgmatic.borg.rename import borgmatic.borg.repo_list import borgmatic.config.paths import borgmatic.hooks.dispatch from borgmatic.actions import pattern logger = logging.getLogger(__name__) def run_create( config_filename, repository, config, config_paths, local_borg_version, create_arguments, global_arguments, dry_run_label, local_path, remote_path, ): ''' Run the "create" action for the given repository. If create_arguments.json is True, yield the JSON output from creating the archive. ''' if config.get('list_details') and config.get('progress'): raise ValueError( 'With the create action, only one of --list/--files/list_details and --progress/progress can be used.', ) if config.get('list_details') and create_arguments.json: raise ValueError( 'With the create action, only one of --list/--files/list_details and --json can be used.', ) logger.info(f'Creating archive{dry_run_label}') working_directory = borgmatic.config.paths.get_working_directory(config) with borgmatic.config.paths.Runtime_directory(config) as borgmatic_runtime_directory: patterns = pattern.process_patterns( pattern.collect_patterns(config), config, working_directory, borgmatic_runtime_directory, ) borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( 'remove_data_source_dumps', config, borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE, borgmatic_runtime_directory, patterns, global_arguments.dry_run, ) active_dumps = borgmatic.hooks.dispatch.call_hooks( 'dump_data_sources', config, borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE, config_paths, borgmatic_runtime_directory, patterns, global_arguments.dry_run, ) # Process the patterns again in case any data source hooks updated them. Without this step, # we could end up with duplicate paths that cause Borg to hang when it tries to read from # the same named pipe twice. patterns = pattern.process_patterns( patterns, config, working_directory, borgmatic_runtime_directory, skip_expand_paths=config_paths, ) stream_processes = [process for processes in active_dumps.values() for process in processes] # If we have stream processes, we first create an archive with .checkpoint suffix. This is # to make sure we only create a real archive if all the streaming processes completed # successfully (create_archive will fail if a streaming process fails, but the archive might # have already been created at this point). use_checkpoint = bool(stream_processes) json_output = borgmatic.borg.create.create_archive( global_arguments.dry_run, repository['path'], config, patterns, local_borg_version, global_arguments, borgmatic_runtime_directory, archive_suffix='.checkpoint' if use_checkpoint else '', local_path=local_path, remote_path=remote_path, json=create_arguments.json, comment=create_arguments.comment, stream_processes=stream_processes, ) if use_checkpoint: rename_checkpoint_archive( repository['path'], global_arguments, config, local_borg_version, local_path, remote_path, ) if json_output: output = borgmatic.actions.json.parse_json(json_output, repository.get('label')) if use_checkpoint: # Patch archive name and ID renamed_archive = borgmatic.borg.repo_list.get_latest_archive( repository['path'], config, local_borg_version, global_arguments, local_path, remote_path, ) output['archive']['name'] = renamed_archive['name'] output['archive']['id'] = renamed_archive['id'] yield output borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( 'remove_data_source_dumps', config, borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE, borgmatic_runtime_directory, patterns, global_arguments.dry_run, ) def rename_checkpoint_archive( repository_path, global_arguments, config, local_borg_version, local_path, remote_path, ): ''' Renames the latest archive to not have a '.checkpoint' suffix. Raises ValueError if - there is not latest archive - the latest archive does not have a '.checkpoint' suffix Implementation note: We cannot reliably get the just created archive name. So we resort to listing the archives and picking the last one. A similar comment applies to retrieving the ID of the renamed archive. ''' archive = borgmatic.borg.repo_list.get_latest_archive( repository_path, config, local_borg_version, global_arguments, local_path, remote_path, consider_checkpoints=True, ) archive_name = archive['name'] if not archive_name.endswith('.checkpoint'): raise ValueError(f'Latest archive did not have a .checkpoint suffix. Got: {archive_name}') new_archive_name = archive_name.removesuffix('.checkpoint') logger.info(f'Renaming archive {archive_name} -> {new_archive_name}') borgmatic.borg.rename.rename_archive( repository_path, ( archive['id'] if borgmatic.borg.feature.available( borgmatic.borg.feature.Feature.ARCHIVE_SERIES, local_borg_version ) else archive['name'] ), new_archive_name, global_arguments.dry_run, config, local_borg_version, local_path, remote_path, ) borgmatic/borgmatic/actions/delete.py000066400000000000000000000020711510202216200202370ustar00rootroot00000000000000import logging import borgmatic.actions.arguments import borgmatic.borg.delete import borgmatic.borg.repo_delete import borgmatic.borg.repo_list logger = logging.getLogger(__name__) def run_delete( repository, config, local_borg_version, delete_arguments, global_arguments, local_path, remote_path, ): ''' Run the "delete" action for the given repository and archive(s). ''' logger.answer('Deleting archives') archive_name = ( borgmatic.borg.repo_list.resolve_archive_name( repository['path'], delete_arguments.archive, config, local_borg_version, global_arguments, local_path, remote_path, ) if delete_arguments.archive else None ) borgmatic.borg.delete.delete_archives( repository, config, local_borg_version, borgmatic.actions.arguments.update_arguments(delete_arguments, archive=archive_name), global_arguments, local_path, remote_path, ) borgmatic/borgmatic/actions/export_key.py000066400000000000000000000011231510202216200211630ustar00rootroot00000000000000import logging import borgmatic.borg.export_key logger = logging.getLogger(__name__) def run_export_key( repository, config, local_borg_version, export_arguments, global_arguments, local_path, remote_path, ): ''' Run the "key export" action for the given repository. ''' logger.info('Exporting repository key') borgmatic.borg.export_key.export_key( repository['path'], config, local_borg_version, export_arguments, global_arguments, local_path=local_path, remote_path=remote_path, ) borgmatic/borgmatic/actions/export_tar.py000066400000000000000000000021751510202216200211710ustar00rootroot00000000000000import logging import borgmatic.borg.export_tar import borgmatic.borg.repo_list logger = logging.getLogger(__name__) def run_export_tar( repository, config, local_borg_version, export_tar_arguments, global_arguments, local_path, remote_path, ): ''' Run the "export-tar" action for the given repository. ''' logger.info(f'Exporting archive {export_tar_arguments.archive} as tar file') borgmatic.borg.export_tar.export_tar_archive( global_arguments.dry_run, repository['path'], borgmatic.borg.repo_list.resolve_archive_name( repository['path'], export_tar_arguments.archive, config, local_borg_version, global_arguments, local_path, remote_path, ), export_tar_arguments.paths, export_tar_arguments.destination, config, local_borg_version, global_arguments, local_path=local_path, remote_path=remote_path, tar_filter=export_tar_arguments.tar_filter, strip_components=export_tar_arguments.strip_components, ) borgmatic/borgmatic/actions/extract.py000066400000000000000000000021421510202216200204460ustar00rootroot00000000000000import logging import borgmatic.borg.extract import borgmatic.borg.repo_list import borgmatic.hooks.command logger = logging.getLogger(__name__) def run_extract( config_filename, repository, config, local_borg_version, extract_arguments, global_arguments, local_path, remote_path, ): ''' Run the "extract" action for the given repository. ''' logger.info(f'Extracting archive {extract_arguments.archive}') borgmatic.borg.extract.extract_archive( global_arguments.dry_run, repository['path'], borgmatic.borg.repo_list.resolve_archive_name( repository['path'], extract_arguments.archive, config, local_borg_version, global_arguments, local_path, remote_path, ), extract_arguments.paths, config, local_borg_version, global_arguments, local_path=local_path, remote_path=remote_path, destination_path=extract_arguments.destination, strip_components=extract_arguments.strip_components, ) borgmatic/borgmatic/actions/import_key.py000066400000000000000000000011231510202216200211540ustar00rootroot00000000000000import logging import borgmatic.borg.import_key logger = logging.getLogger(__name__) def run_import_key( repository, config, local_borg_version, import_arguments, global_arguments, local_path, remote_path, ): ''' Run the "key import" action for the given repository. ''' logger.info('Importing repository key') borgmatic.borg.import_key.import_key( repository['path'], config, local_borg_version, import_arguments, global_arguments, local_path=local_path, remote_path=remote_path, ) borgmatic/borgmatic/actions/info.py000066400000000000000000000023261510202216200177330ustar00rootroot00000000000000import logging import borgmatic.actions.arguments import borgmatic.actions.json import borgmatic.borg.info import borgmatic.borg.repo_list logger = logging.getLogger(__name__) def run_info( repository, config, local_borg_version, info_arguments, global_arguments, local_path, remote_path, ): ''' Run the "info" action for the given repository and archive. If info_arguments.json is True, yield the JSON output from the info for the archive. ''' if not info_arguments.json: logger.answer('Displaying archive summary information') archive_name = borgmatic.borg.repo_list.resolve_archive_name( repository['path'], info_arguments.archive, config, local_borg_version, global_arguments, local_path, remote_path, ) json_output = borgmatic.borg.info.display_archives_info( repository['path'], config, local_borg_version, borgmatic.actions.arguments.update_arguments(info_arguments, archive=archive_name), global_arguments, local_path, remote_path, ) if json_output: yield borgmatic.actions.json.parse_json(json_output, repository.get('label')) borgmatic/borgmatic/actions/json.py000066400000000000000000000015131510202216200177460ustar00rootroot00000000000000import json import logging logger = logging.getLogger(__name__) def parse_json(borg_json_output, label): ''' Given a Borg JSON output string, parse it as JSON into a dict. Inject the given borgmatic repository label into it and return the dict. Raise JSONDecodeError if the JSON output cannot be parsed. ''' lines = borg_json_output.splitlines() start_line_index = 0 # Scan forward to find the first line starting with "{" and assume that's where the JSON starts. for line_index, line in enumerate(lines): if line.startswith('{'): start_line_index = line_index break json_data = json.loads('\n'.join(lines[start_line_index:])) if 'repository' not in json_data: return json_data json_data['repository']['label'] = label or '' return json_data borgmatic/borgmatic/actions/list.py000066400000000000000000000024751510202216200177600ustar00rootroot00000000000000import logging import borgmatic.actions.arguments import borgmatic.actions.json import borgmatic.borg.list logger = logging.getLogger(__name__) def run_list( repository, config, local_borg_version, list_arguments, global_arguments, local_path, remote_path, ): ''' Run the "list" action for the given repository and archive. If list_arguments.json is True, yield the JSON output from listing the archive. ''' if not list_arguments.json: if list_arguments.find_paths: # pragma: no cover logger.answer('Searching archives') elif not list_arguments.archive: # pragma: no cover logger.answer('Listing archives') archive_name = borgmatic.borg.repo_list.resolve_archive_name( repository['path'], list_arguments.archive, config, local_borg_version, global_arguments, local_path, remote_path, ) json_output = borgmatic.borg.list.list_archive( repository['path'], config, local_borg_version, borgmatic.actions.arguments.update_arguments(list_arguments, archive=archive_name), global_arguments, local_path, remote_path, ) if json_output: yield borgmatic.actions.json.parse_json(json_output, repository.get('label')) borgmatic/borgmatic/actions/mount.py000066400000000000000000000017521510202216200201440ustar00rootroot00000000000000import logging import borgmatic.borg.mount import borgmatic.borg.repo_list logger = logging.getLogger(__name__) def run_mount( repository, config, local_borg_version, mount_arguments, global_arguments, local_path, remote_path, ): ''' Run the "mount" action for the given repository. ''' if mount_arguments.archive: logger.info(f'Mounting archive {mount_arguments.archive}') else: # pragma: nocover logger.info('Mounting repository') borgmatic.borg.mount.mount_archive( repository['path'], borgmatic.borg.repo_list.resolve_archive_name( repository['path'], mount_arguments.archive, config, local_borg_version, global_arguments, local_path, remote_path, ), mount_arguments, config, local_borg_version, global_arguments, local_path=local_path, remote_path=remote_path, ) borgmatic/borgmatic/actions/pattern.py000066400000000000000000000320351510202216200204550ustar00rootroot00000000000000import glob import itertools import logging import os import pathlib import borgmatic.borg.pattern logger = logging.getLogger(__name__) def parse_pattern(pattern_line, default_style=borgmatic.borg.pattern.Pattern_style.NONE): ''' Given a Borg pattern as a string, parse it into a borgmatic.borg.pattern.Pattern instance and return it. ''' try: (pattern_type, remainder) = pattern_line.split(' ', maxsplit=1) except ValueError: raise ValueError(f'Invalid pattern: {pattern_line}') try: (parsed_pattern_style, path) = remainder.split(':', maxsplit=1) pattern_style = borgmatic.borg.pattern.Pattern_style(parsed_pattern_style) except ValueError: pattern_style = default_style path = remainder return borgmatic.borg.pattern.Pattern( path, borgmatic.borg.pattern.Pattern_type(pattern_type), borgmatic.borg.pattern.Pattern_style(pattern_style), source=borgmatic.borg.pattern.Pattern_source.CONFIG, ) def collect_patterns(config): ''' Given a configuration dict, produce a single sequence of patterns comprised of the configured source directories, patterns, excludes, pattern files, and exclude files. The idea is that Borg has all these different ways of specifying includes, excludes, source directories, etc., but we'd like to collapse them all down to one common format (patterns) for ease of manipulation within borgmatic. ''' try: return ( tuple( borgmatic.borg.pattern.Pattern( source_directory, source=borgmatic.borg.pattern.Pattern_source.CONFIG, ) for source_directory in config.get('source_directories', ()) ) + tuple( parse_pattern(pattern_line.strip()) for pattern_line in config.get('patterns', ()) if not pattern_line.lstrip().startswith('#') if pattern_line.strip() ) + tuple( parse_pattern( f'{borgmatic.borg.pattern.Pattern_type.NO_RECURSE.value} {exclude_line.strip()}', borgmatic.borg.pattern.Pattern_style.FNMATCH, ) for exclude_line in config.get('exclude_patterns', ()) ) + tuple( parse_pattern(pattern_line.strip()) for filename in config.get('patterns_from', ()) for pattern_line in open(filename, encoding='utf-8').readlines() if not pattern_line.lstrip().startswith('#') if pattern_line.strip() ) + tuple( parse_pattern( f'{borgmatic.borg.pattern.Pattern_type.NO_RECURSE.value} {exclude_line.strip()}', borgmatic.borg.pattern.Pattern_style.FNMATCH, ) for filename in config.get('exclude_from', ()) for exclude_line in open(filename, encoding='utf-8').readlines() if not exclude_line.lstrip().startswith('#') if exclude_line.strip() ) ) except (FileNotFoundError, OSError) as error: logger.debug(error) raise ValueError(f'Cannot read patterns_from/exclude_from file: {error.filename}') def expand_directory(directory, working_directory): ''' Given a directory path, expand any tilde (representing a user's home directory) and any globs therein. Return a list of one or more resulting paths. Take into account the given working directory so that relative paths are supported. ''' expanded_directory = os.path.expanduser(directory) # This would be a lot easier to do with glob(..., root_dir=working_directory), but root_dir is # only available in Python 3.10+. normalized_directory = os.path.join(working_directory or '', expanded_directory) glob_paths = glob.glob(normalized_directory) if not glob_paths: return [expanded_directory] working_directory_prefix = os.path.join(working_directory or '', '') return [ ( glob_path # If these are equal, that means we didn't add any working directory prefix above. if normalized_directory == expanded_directory # Remove the working directory prefix added above in order to make glob() work. We # can't use os.path.relpath() here because it collapses any use of Borg's slashdot hack. else glob_path.removeprefix(working_directory_prefix) ) for glob_path in glob_paths ] def expand_patterns(patterns, working_directory=None, skip_paths=None): ''' Given a sequence of borgmatic.borg.pattern.Pattern instances and an optional working directory, expand tildes and globs in each root pattern and expand just tildes in each non-root pattern. The idea is that non-root patterns may be regular expressions or other pattern styles containing "*" that borgmatic should not expand as a shell glob. Return all the resulting patterns as a tuple. If a set of paths are given to skip, then don't expand any patterns matching them. ''' if patterns is None: return () return tuple( itertools.chain.from_iterable( ( ( borgmatic.borg.pattern.Pattern( expanded_path, pattern.type, pattern.style, pattern.device, pattern.source, ) for expanded_path in expand_directory(pattern.path, working_directory) ) if pattern.type == borgmatic.borg.pattern.Pattern_type.ROOT and pattern.path not in (skip_paths or ()) else ( borgmatic.borg.pattern.Pattern( os.path.expanduser(pattern.path), pattern.type, pattern.style, pattern.device, pattern.source, ), ) ) for pattern in patterns ), ) def get_existent_path_or_parent(path): ''' Given a path, return it if it exists. Otherwise, return the longest parent directory of the path that exists. Return None if none of these paths exist. This is used below for finding an existent path prefix of pattern's path, which is necessary if the path contain globs or other special characters that we don't want to try to interpret (because we want to leave that responsibility to Borg). ''' if path.startswith('/e2e/'): return None try: return next( candidate_path for candidate_path in ( path, *tuple(str(parent) for parent in pathlib.PurePath(path).parents), ) if os.path.exists(candidate_path) ) except StopIteration: return None def device_map_patterns(patterns, working_directory=None): ''' Given a sequence of borgmatic.borg.pattern.Pattern instances and an optional working directory, determine the identifier for the device on which the pattern's path resides—or None if the path doesn't exist or is from a non-root pattern. Return an updated sequence of patterns with the device field populated. But if the device field is already set, don't bother setting it again. This is handy for determining whether two different pattern paths are on the same filesystem (have the same device identifier). This function only considers the start of a pattern's path—from the start of the path up until there's a path component with a glob or other non-literal character. If there are no such characters, the whole path is considered. The rationale is that it's not feasible for borgmatic to interpret Borg's patterns to see which actual files (and therefore devices) they map to. So for instance, a pattern with a path of "/var/log/*/data" would end up with its device set to the device of "/var/log"—ignoring the "/*/data" part due to that glob. The one exception is that if a regular expression pattern path starts with "^", that will get stripped off for purposes of determining its device. ''' return tuple( borgmatic.borg.pattern.Pattern( pattern.path, pattern.type, pattern.style, device=pattern.device or (os.stat(existent_path).st_dev if existent_path else None), source=pattern.source, ) for pattern in patterns for existent_path in ( get_existent_path_or_parent( os.path.join(working_directory or '', pattern.path.lstrip('^')), ), ) ) def deduplicate_runtime_directory_patterns(patterns, config, borgmatic_runtime_directory=None): ''' Given a sequence of borgmatic.borg.pattern.Pattern instances, the borgmatic runtime directory, and a configuration dict, return them without any duplicate root child patterns that contain the runtime directory. For instance, if two root patterns are given with paths "/foo" and "/foo/bar", and the runtime directory is "/foo/bar", return just the "/foo" pattern. Non-root patterns and patterns not containing the runtime directory are passed through without modification. One exception to deduplication is if two paths are on different filesystems (devices) and "one_file_system" is True in the given configuration. In that case, the paths won't get deduplicated, because Borg won't cross filesystem boundaries when "one_file_system" is True. The idea is that if Borg is given a root parent pattern containing the borgmatic runtime directory, then Borg doesn't also need to be given child patterns, because it will naturally spider the contents of the parent pattern's path. Additionally, there are cases where Borg coming across the same file twice will result in duplicate reads and even hangs, e.g. when a database hook in the borgmatic runtime directory is using a named pipe for streaming database dumps to Borg. This deduplication is limited to the borgmatic runtime directory (where borgmatic's named pipes exist), because there are other legitimate use cases for parent and child patterns to both exist in patterns. For instance, with some snapshotted filesystems, snapshots don't traverse from a parent filesystem to a child and therefore both need to remain in patterns. And for the case of named pipes outside of the borgmatic runtime directory, there is code elsewhere (in the "create" action) that auto-excludes special files to prevent Borg hangs. ''' if borgmatic_runtime_directory is None: return patterns deduplicated = {} # Use just the keys as an ordered set. for pattern in patterns: if pattern.type != borgmatic.borg.pattern.Pattern_type.ROOT: deduplicated[pattern] = True continue parents = pathlib.PurePath(pattern.path).parents # If another directory in the given list is a parent of current directory (even n levels up) # and both are on the same filesystem (or one_file_system is not set), then the current # directory is a duplicate. for other_pattern in patterns: if other_pattern.type != borgmatic.borg.pattern.Pattern_type.ROOT: continue if any( pathlib.PurePath(other_pattern.path) == parent and pathlib.PurePosixPath(other_pattern.path) in pathlib.PurePath(borgmatic_runtime_directory).parents and pattern.device is not None and ( other_pattern.device == pattern.device or config.get('one_file_system') is not True ) for parent in parents ): break else: deduplicated[pattern] = True return tuple(deduplicated.keys()) def process_patterns( patterns, config, working_directory, borgmatic_runtime_directory=None, skip_expand_paths=None ): ''' Given a sequence of Borg patterns, a configuration dict, a configured working directory, the borgmatic runtime directory, and a sequence of paths to skip path expansion for, expand and deduplicate any "root" patterns, returning the resulting root and non-root patterns as a list. If the borgmatic runtime directory is None, then don't deduplicate patterns. Deduplication is really only necessary for the "create" action when the runtime directory might contain named pipes for database dumps. ''' skip_paths = set(skip_expand_paths or ()) return list( deduplicate_runtime_directory_patterns( device_map_patterns( expand_patterns( patterns, working_directory=working_directory, skip_paths=skip_paths, ), ), config, borgmatic_runtime_directory, ), ) borgmatic/borgmatic/actions/prune.py000066400000000000000000000012621510202216200201270ustar00rootroot00000000000000import logging import borgmatic.borg.prune import borgmatic.hooks.command logger = logging.getLogger(__name__) def run_prune( config_filename, repository, config, local_borg_version, prune_arguments, global_arguments, dry_run_label, local_path, remote_path, ): ''' Run the "prune" action for the given repository. ''' logger.info(f'Pruning archives{dry_run_label}') borgmatic.borg.prune.prune_archives( global_arguments.dry_run, repository['path'], config, local_borg_version, prune_arguments, global_arguments, local_path=local_path, remote_path=remote_path, ) borgmatic/borgmatic/actions/recreate.py000066400000000000000000000046651510202216200206020ustar00rootroot00000000000000import logging import subprocess import borgmatic.borg.info import borgmatic.borg.recreate import borgmatic.borg.repo_list from borgmatic.actions.pattern import collect_patterns, process_patterns logger = logging.getLogger(__name__) BORG_EXIT_CODE_ARCHIVE_ALREADY_EXISTS = 30 def run_recreate( repository, config, local_borg_version, recreate_arguments, global_arguments, local_path, remote_path, ): ''' Run the "recreate" action for the given repository. ''' if recreate_arguments.archive: logger.answer(f'Recreating archive {recreate_arguments.archive}') else: logger.answer('Recreating repository') # Collect and process patterns. processed_patterns = process_patterns( collect_patterns(config), config, borgmatic.config.paths.get_working_directory(config), ) archive = borgmatic.borg.repo_list.resolve_archive_name( repository['path'], recreate_arguments.archive, config, local_borg_version, global_arguments, local_path, remote_path, ) if archive and archive.endswith('.recreate'): if recreate_arguments.archive == 'latest': raise ValueError( f'The latest archive "{archive}" is leftover from a prior recreate. Delete it first or select a different archive.', ) raise ValueError( f'The archive "{recreate_arguments.archive}" is leftover from a prior recreate. Select a different archive.', ) try: borgmatic.borg.recreate.recreate_archive( repository['path'], archive, config, local_borg_version, recreate_arguments, global_arguments, local_path=local_path, remote_path=remote_path, patterns=processed_patterns, ) except subprocess.CalledProcessError as error: if error.returncode == BORG_EXIT_CODE_ARCHIVE_ALREADY_EXISTS: if recreate_arguments.target: raise ValueError( f'The archive "{recreate_arguments.target}" already exists. Delete it first or set a different target archive name.', ) if archive: raise ValueError( f'The archive "{archive}.recreate" is leftover from a prior recreate. Delete it first or select a different archive.', ) raise borgmatic/borgmatic/actions/repo_create.py000066400000000000000000000030421510202216200212640ustar00rootroot00000000000000import logging import borgmatic.borg.repo_create logger = logging.getLogger(__name__) def run_repo_create( repository, config, local_borg_version, repo_create_arguments, global_arguments, local_path, remote_path, ): ''' Run the "repo-create" action for the given repository. ''' logger.info('Creating repository') encryption_mode = repo_create_arguments.encryption_mode or repository.get('encryption') if not encryption_mode: raise ValueError( 'With the repo-create action, either the --encryption flag or the repository encryption option is required.', ) borgmatic.borg.repo_create.create_repository( global_arguments.dry_run, repository['path'], config, local_borg_version, global_arguments, encryption_mode, repo_create_arguments.source_repository, repo_create_arguments.copy_crypt_key, ( repository.get('append_only') if repo_create_arguments.append_only is None else repo_create_arguments.append_only ), ( repository.get('storage_quota') if repo_create_arguments.storage_quota is None else repo_create_arguments.storage_quota ), ( repository.get('make_parent_directories') if repo_create_arguments.make_parent_directories is None else repo_create_arguments.make_parent_directories ), local_path=local_path, remote_path=remote_path, ) borgmatic/borgmatic/actions/repo_delete.py000066400000000000000000000012171510202216200212650ustar00rootroot00000000000000import logging import borgmatic.borg.repo_delete logger = logging.getLogger(__name__) def run_repo_delete( repository, config, local_borg_version, repo_delete_arguments, global_arguments, local_path, remote_path, ): ''' Run the "repo-delete" action for the given repository. ''' logger.answer( 'Deleting repository' + (' cache' if repo_delete_arguments.cache_only else ''), ) borgmatic.borg.repo_delete.delete_repository( repository, config, local_borg_version, repo_delete_arguments, global_arguments, local_path, remote_path, ) borgmatic/borgmatic/actions/repo_info.py000066400000000000000000000016741510202216200207650ustar00rootroot00000000000000import logging import borgmatic.actions.json import borgmatic.borg.repo_info logger = logging.getLogger(__name__) def run_repo_info( repository, config, local_borg_version, repo_info_arguments, global_arguments, local_path, remote_path, ): ''' Run the "repo-info" action for the given repository. If repo_info_arguments.json is True, yield the JSON output from the info for the repository. ''' if not repo_info_arguments.json: logger.answer('Displaying repository summary information') json_output = borgmatic.borg.repo_info.display_repository_info( repository['path'], config, local_borg_version, repo_info_arguments=repo_info_arguments, global_arguments=global_arguments, local_path=local_path, remote_path=remote_path, ) if json_output: yield borgmatic.actions.json.parse_json(json_output, repository.get('label')) borgmatic/borgmatic/actions/repo_list.py000066400000000000000000000016301510202216200207750ustar00rootroot00000000000000import logging import borgmatic.actions.json import borgmatic.borg.repo_list logger = logging.getLogger(__name__) def run_repo_list( repository, config, local_borg_version, repo_list_arguments, global_arguments, local_path, remote_path, ): ''' Run the "repo-list" action for the given repository. If repo_list_arguments.json is True, yield the JSON output from listing the repository. ''' if not repo_list_arguments.json: logger.answer('Listing repository') json_output = borgmatic.borg.repo_list.list_repository( repository['path'], config, local_borg_version, repo_list_arguments=repo_list_arguments, global_arguments=global_arguments, local_path=local_path, remote_path=remote_path, ) if json_output: yield borgmatic.actions.json.parse_json(json_output, repository.get('label')) borgmatic/borgmatic/actions/restore.py000066400000000000000000000530031510202216200204610ustar00rootroot00000000000000import collections import logging import os import pathlib import shutil import tempfile import borgmatic.actions.pattern import borgmatic.borg.extract import borgmatic.borg.list import borgmatic.borg.mount import borgmatic.borg.repo_list import borgmatic.config.paths import borgmatic.hooks.data_source.dump import borgmatic.hooks.dispatch logger = logging.getLogger(__name__) UNSPECIFIED = object() Dump = collections.namedtuple( 'Dump', ('hook_name', 'data_source_name', 'hostname', 'port', 'label', 'container'), defaults=(None, None, None, None), ) def dumps_match(first, second, default_port=None): ''' Compare two Dump instances for equality while supporting a field value of UNSPECIFIED, which indicates that the field should match any value. If a default port is given, then consider any dump having that port to match with a dump having a None port. ''' # label kinda counts as an unique id, if they match ignore host/container/port if first.label not in {None, UNSPECIFIED} and first.label == second.label: field_list = ('hook_name', 'data_source_name') else: field_list = Dump._fields for field_name in field_list: first_value = getattr(first, field_name) second_value = getattr(second, field_name) if default_port is not None and field_name == 'port': if first_value == default_port and second_value is None: continue if second_value == default_port and first_value is None: continue if first_value == UNSPECIFIED or second_value == UNSPECIFIED: # noqa: PLR1714 continue if first_value != second_value: return False return True def render_dump_metadata(dump): ''' Given a Dump instance, make a display string describing it for use in log messages. ''' label = dump.label or UNSPECIFIED name = 'unspecified' if dump.data_source_name is UNSPECIFIED else dump.data_source_name host = dump.container or dump.hostname or UNSPECIFIED port = None if dump.port is UNSPECIFIED else dump.port if label is not UNSPECIFIED: metadata = f'{name}@{label}' elif port: metadata = f'{name}@:{port}' if host is UNSPECIFIED else f'{name}@{host}:{port}' else: metadata = f'{name}' if host is UNSPECIFIED else f'{name}@{host}' if dump.hook_name not in {None, UNSPECIFIED}: return f'{metadata} ({dump.hook_name})' return metadata def get_configured_data_source(config, restore_dump): ''' Search in the given configuration dict for dumps corresponding to the given dump to restore. If there are multiple matches, error. Return the found data source as a data source configuration dict or None if not found. ''' try: hooks_to_search = {restore_dump.hook_name: config[restore_dump.hook_name]} except KeyError: return None matching_dumps = tuple( hook_data_source for (hook_name, hook_config) in hooks_to_search.items() for hook_data_source in hook_config for default_port in ( borgmatic.hooks.dispatch.call_hook( function_name='get_default_port', config=config, hook_name=hook_name, ), ) if dumps_match( Dump( hook_name, hook_data_source.get('name'), hook_data_source.get('hostname'), hook_data_source.get('port'), hook_data_source.get('label') or UNSPECIFIED, hook_data_source.get('container'), ), restore_dump, default_port, ) ) if not matching_dumps: return None if len(matching_dumps) > 1: raise ValueError( f'Cannot restore data source {render_dump_metadata(restore_dump)} because there are multiple matching data sources configured', ) return matching_dumps[0] def strip_path_prefix_from_extracted_dump_destination( destination_path, borgmatic_runtime_directory, ): ''' Directory-format dump files get extracted into a temporary directory containing a path prefix that depends how the files were stored in the archive. So, given the destination path where the dump was extracted and the borgmatic runtime directory, move the dump files such that the restore doesn't have to deal with that varying path prefix. For instance, if the dump was extracted to: /run/user/0/borgmatic/tmp1234/borgmatic/postgresql_databases/test/... or: /run/user/0/borgmatic/tmp1234/root/.borgmatic/postgresql_databases/test/... then this function moves it to: /run/user/0/borgmatic/postgresql_databases/test/... ''' for subdirectory_path, _, _ in os.walk(destination_path): databases_directory = os.path.basename(subdirectory_path) if not databases_directory.endswith('_databases'): continue shutil.move( subdirectory_path, os.path.join(borgmatic_runtime_directory, databases_directory), ) break def restore_single_dump( repository, config, local_borg_version, global_arguments, local_path, remote_path, archive_name, hook_name, data_source, connection_params, borgmatic_runtime_directory, ): ''' Given (among other things) an archive name, a data source hook name, the hostname, port, username/password as connection params, and a configured data source configuration dict, restore that data source from the archive. ''' dump_metadata = render_dump_metadata( Dump( hook_name, data_source['name'], data_source.get('hostname'), data_source.get('port'), data_source.get('label') or UNSPECIFIED, data_source.get('container'), ), ) logger.info(f'Restoring data source {dump_metadata}') dump_patterns = borgmatic.hooks.dispatch.call_hooks( 'make_data_source_dump_patterns', config, borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE, borgmatic_runtime_directory, data_source['name'], )[hook_name.split('_databases', 1)[0]] destination_path = ( tempfile.mkdtemp(dir=borgmatic_runtime_directory) if data_source.get('format') == 'directory' else None ) try: # Kick off a single data source extract. If using a directory format, extract to a temporary # directory. Otherwise extract the single dump file to stdout. extract_process = borgmatic.borg.extract.extract_archive( dry_run=global_arguments.dry_run, repository=repository['path'], archive=archive_name, paths=[ borgmatic.hooks.data_source.dump.convert_glob_patterns_to_borg_pattern( dump_patterns, ), ], config=config, local_borg_version=local_borg_version, global_arguments=global_arguments, local_path=local_path, remote_path=remote_path, destination_path=destination_path, # A directory format dump isn't a single file, and therefore can't extract # to stdout. In this case, the extract_process return value is None. extract_to_stdout=bool(data_source.get('format') != 'directory'), ) if destination_path and not global_arguments.dry_run: strip_path_prefix_from_extracted_dump_destination( destination_path, borgmatic_runtime_directory, ) finally: if destination_path and not global_arguments.dry_run: shutil.rmtree(destination_path, ignore_errors=True) # Run a single data source restore, consuming the extract stdout (if any). borgmatic.hooks.dispatch.call_hook( function_name='restore_data_source_dump', config=config, hook_name=hook_name, data_source=data_source, dry_run=global_arguments.dry_run, extract_process=extract_process, connection_params=connection_params, borgmatic_runtime_directory=borgmatic_runtime_directory, ) def collect_dumps_from_archive( repository, archive, config, local_borg_version, global_arguments, local_path, remote_path, borgmatic_runtime_directory, ): ''' Given a local or remote repository path, a resolved archive name, a configuration dict, the local Borg version, global arguments an argparse.Namespace, local and remote Borg paths, and the borgmatic runtime directory, query the archive for the names of data sources dumps it contains and return them as a set of Dump instances. ''' dumps_from_archive = set() # There is (at most) one dump metadata file per data source hook. Load each. for dumps_metadata_path in borgmatic.borg.list.capture_archive_listing( repository, archive, config, local_borg_version, global_arguments, list_paths=[ 'sh:' + borgmatic.hooks.data_source.dump.make_data_source_dump_path( base_directory, '*_databases/dumps.json', ) # Probe for dump metadata files in multiple locations, as the default location is # "/borgmatic/*_databases/dumps.json" with Borg 1.4+, but instead begins with the # borgmatic runtime directory for older versions of Borg. for base_directory in ( 'borgmatic', borgmatic.config.paths.make_runtime_directory_glob(borgmatic_runtime_directory), ) ], local_path=local_path, remote_path=remote_path, ): if not dumps_metadata_path: continue dumps_from_archive.update( set( borgmatic.hooks.data_source.dump.parse_data_source_dumps_metadata( borgmatic.borg.extract.extract_archive( global_arguments.dry_run, repository, archive, [dumps_metadata_path], config, local_borg_version, global_arguments, local_path=local_path, remote_path=remote_path, extract_to_stdout=True, ) .stdout.read() .decode(), dumps_metadata_path, ) ) ) # If we've successfully loaded any dumps metadata, we're done. if dumps_from_archive: logger.debug('Collecting database dumps from archive data source dumps metadata files') return dumps_from_archive # No dumps metadata files were found, so for backwards compatibility, fall back to parsing the # paths of dumps found in the archive to get their respective dump metadata. logger.debug('Collecting database dumps from archive data source dump paths (fallback)') borgmatic_source_directory = str( pathlib.Path(borgmatic.config.paths.get_borgmatic_source_directory(config)), ) # Probe for the data source dumps in multiple locations, as the default location has moved to # the borgmatic runtime directory (which gets stored as just "/borgmatic" with Borg 1.4+). But # we still want to support reading dumps from previously created archives as well. dump_paths = borgmatic.borg.list.capture_archive_listing( repository, archive, config, local_borg_version, global_arguments, list_paths=[ 'sh:' + borgmatic.hooks.data_source.dump.make_data_source_dump_path( base_directory, '*_databases/*/*', ) for base_directory in ( 'borgmatic', borgmatic.config.paths.make_runtime_directory_glob(borgmatic_runtime_directory), borgmatic_source_directory.lstrip('/'), ) ], local_path=local_path, remote_path=remote_path, ) for dump_path in dump_paths: if not dump_path: continue # Probe to find the base directory that's at the start of the dump path. for base_directory in ( 'borgmatic', borgmatic_runtime_directory, borgmatic_source_directory, ): try: (hook_name, host_and_port, data_source_name) = dump_path.split( base_directory + os.path.sep, 1, )[1].split(os.path.sep)[0:3] except (ValueError, IndexError): continue parts = host_and_port.split(':', 1) if len(parts) == 1: parts += (None,) (hostname, port) = parts try: port = int(port) except (ValueError, TypeError): port = None dumps_from_archive.add( Dump( hook_name, data_source_name, None if hostname == 'localhost' else hostname, port ) ) # We've successfully parsed the dump path, so need to probe any further. break else: logger.warning( f'Ignoring invalid data source dump path "{dump_path}" in archive {archive}', ) return dumps_from_archive def get_dumps_to_restore(restore_arguments, dumps_from_archive): ''' Given restore arguments as an argparse.Namespace instance indicating which dumps to restore and a set of Dump instances representing the dumps found in an archive, return a set of specific Dump instances from the archive to restore. As part of this, replace any Dump having a data source name of "all" with multiple named Dump instances as appropriate. Raise ValueError if any of the requested data source names cannot be found in the archive or if there are multiple archive dump matches for a given requested dump. ''' requested_dumps = ( { Dump( hook_name=( ( restore_arguments.hook if restore_arguments.hook.endswith('_databases') else f'{restore_arguments.hook}_databases' ) if restore_arguments.hook else UNSPECIFIED ), data_source_name=name, hostname=restore_arguments.original_hostname or UNSPECIFIED, port=restore_arguments.original_port, label=restore_arguments.original_label or UNSPECIFIED, container=restore_arguments.original_container or UNSPECIFIED, ) for name in restore_arguments.data_sources or (UNSPECIFIED,) } if restore_arguments.hook or restore_arguments.data_sources or restore_arguments.original_hostname or restore_arguments.original_port or restore_arguments.original_label or restore_arguments.original_container else { Dump( hook_name=UNSPECIFIED, data_source_name='all', hostname=UNSPECIFIED, port=UNSPECIFIED, label=UNSPECIFIED, container=UNSPECIFIED, ), } ) missing_dumps = set() dumps_to_restore = set() # If there's a requested "all" dump, add every dump from the archive to the dumps to restore. if any(dump for dump in requested_dumps if dump.data_source_name == 'all'): dumps_to_restore.update(dumps_from_archive) # If any archive dump matches a requested dump, add the archive dump to the dumps to restore. for requested_dump in requested_dumps: if requested_dump.data_source_name == 'all': continue matching_dumps = tuple( archive_dump for archive_dump in dumps_from_archive if dumps_match(requested_dump, archive_dump) ) if len(matching_dumps) == 0: missing_dumps.add(requested_dump) elif len(matching_dumps) == 1: dumps_to_restore.add(matching_dumps[0]) else: raise ValueError( f'Cannot restore data source {render_dump_metadata(requested_dump)} because there are multiple matching dumps in the archive. Try adding flags to disambiguate.', ) if missing_dumps: rendered_dumps = ', '.join( f'{render_dump_metadata(dump)}' for dump in sorted(missing_dumps) ) raise ValueError( f"Cannot restore data source dump{'s' if len(missing_dumps) > 1 else ''} {rendered_dumps} missing from archive", ) return dumps_to_restore def ensure_requested_dumps_restored(dumps_to_restore, dumps_actually_restored): ''' Given a set of requested dumps to restore and a set of dumps actually restored, raise ValueError if any requested dumps to restore weren't restored, indicating that they were missing from the configuration. ''' if not dumps_actually_restored: raise ValueError('No data source dumps were found to restore') missing_dumps = sorted( dumps_to_restore - dumps_actually_restored, key=lambda dump: dump.data_source_name, ) if missing_dumps: rendered_dumps = ', '.join(f'{render_dump_metadata(dump)}' for dump in missing_dumps) raise ValueError( f"Cannot restore data source{'s' if len(missing_dumps) > 1 else ''} {rendered_dumps} missing from borgmatic's configuration", ) def run_restore( repository, config, local_borg_version, restore_arguments, global_arguments, local_path, remote_path, ): ''' Run the "restore" action for the given repository. Raise ValueError if a configured data source could not be found to restore or there's no matching dump in the archive. ''' logger.info(f'Restoring data sources from archive {restore_arguments.archive}') working_directory = borgmatic.config.paths.get_working_directory(config) with borgmatic.config.paths.Runtime_directory(config) as borgmatic_runtime_directory: patterns = borgmatic.actions.pattern.process_patterns( borgmatic.actions.pattern.collect_patterns(config), config, working_directory, ) borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( 'remove_data_source_dumps', config, borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE, borgmatic_runtime_directory, patterns, global_arguments.dry_run, ) archive_name = borgmatic.borg.repo_list.resolve_archive_name( repository['path'], restore_arguments.archive, config, local_borg_version, global_arguments, local_path, remote_path, ) dumps_from_archive = collect_dumps_from_archive( repository['path'], archive_name, config, local_borg_version, global_arguments, local_path, remote_path, borgmatic_runtime_directory, ) dumps_to_restore = get_dumps_to_restore(restore_arguments, dumps_from_archive) dumps_actually_restored = set() connection_params = { 'container': restore_arguments.container, 'hostname': restore_arguments.hostname, 'port': restore_arguments.port, 'username': restore_arguments.username, 'password': restore_arguments.password, 'restore_path': restore_arguments.restore_path, } # Restore each dump. for restore_dump in dumps_to_restore: found_data_source = get_configured_data_source( config, restore_dump, ) # For a dump that wasn't found via an exact match in the configuration, try to fallback # to an "all" data source. if not found_data_source: found_data_source = get_configured_data_source( config, Dump( restore_dump.hook_name, 'all', restore_dump.hostname, restore_dump.port, restore_dump.label, restore_dump.container, ), ) if not found_data_source: continue found_data_source = dict(found_data_source) found_data_source['name'] = restore_dump.data_source_name dumps_actually_restored.add(restore_dump) restore_single_dump( repository, config, local_borg_version, global_arguments, local_path, remote_path, archive_name, restore_dump.hook_name, dict(found_data_source, schemas=restore_arguments.schemas), connection_params, borgmatic_runtime_directory, ) borgmatic.hooks.dispatch.call_hooks_even_if_unconfigured( 'remove_data_source_dumps', config, borgmatic.hooks.dispatch.Hook_type.DATA_SOURCE, borgmatic_runtime_directory, patterns, global_arguments.dry_run, ) ensure_requested_dumps_restored(dumps_to_restore, dumps_actually_restored) borgmatic/borgmatic/actions/transfer.py000066400000000000000000000015351510202216200206250ustar00rootroot00000000000000import logging import borgmatic.borg.transfer logger = logging.getLogger(__name__) def run_transfer( repository, config, local_borg_version, transfer_arguments, global_arguments, local_path, remote_path, ): ''' Run the "transfer" action for the given repository. ''' if transfer_arguments.archive and config.get('match_archives'): raise ValueError( 'With the transfer action, only one of --archive and --match-archives/match_archives can be used.', ) logger.info('Transferring archives to repository') borgmatic.borg.transfer.transfer_archives( global_arguments.dry_run, repository['path'], config, local_borg_version, transfer_arguments, global_arguments, local_path=local_path, remote_path=remote_path, ) borgmatic/borgmatic/borg/000077500000000000000000000000001510202216200157145ustar00rootroot00000000000000borgmatic/borgmatic/borg/__init__.py000066400000000000000000000000001510202216200200130ustar00rootroot00000000000000borgmatic/borgmatic/borg/borg.py000066400000000000000000000047441510202216200172300ustar00rootroot00000000000000import logging import shlex import borgmatic.commands.arguments import borgmatic.config.paths import borgmatic.logger from borgmatic.borg import environment, flags from borgmatic.execute import DO_NOT_CAPTURE, execute_command logger = logging.getLogger(__name__) BORG_SUBCOMMANDS_WITH_SUBCOMMANDS = {'key', 'debug'} def run_arbitrary_borg( repository_path, config, local_borg_version, options, archive=None, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, a configuration dict, the local Borg version, a sequence of arbitrary command-line Borg options, and an optional archive name, run an arbitrary Borg command, passing in REPOSITORY and ARCHIVE environment variables for optional use in the command. ''' borgmatic.logger.add_custom_log_levels() lock_wait = config.get('lock_wait', None) try: options = options[1:] if options[0] == '--' else options # Borg commands like "key" have a sub-command ("export", etc.) that must follow it. command_options_start_index = 2 if options[0] in BORG_SUBCOMMANDS_WITH_SUBCOMMANDS else 1 borg_command = tuple(options[:command_options_start_index]) command_options = tuple(options[command_options_start_index:]) if borg_command and borg_command[0] in borgmatic.commands.arguments.ACTION_ALIASES: logger.warning( f"Borg's {borg_command[0]} subcommand is supported natively by borgmatic. Try this instead: borgmatic {borg_command[0]}", ) except IndexError: borg_command = () command_options = () full_command = ( (local_path,) + borg_command + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + flags.make_flags('remote-path', remote_path) + flags.make_flags('lock-wait', lock_wait) + command_options ) return execute_command( # noqa: S604 tuple(shlex.quote(part) for part in full_command), output_file=DO_NOT_CAPTURE, shell=True, environment=dict( (environment.make_environment(config) or {}), BORG_REPO=repository_path, ARCHIVE=archive if archive else '', ), working_directory=borgmatic.config.paths.get_working_directory(config), borg_local_path=local_path, borg_exit_codes=config.get('borg_exit_codes'), ) borgmatic/borgmatic/borg/break_lock.py000066400000000000000000000032111510202216200203570ustar00rootroot00000000000000import logging import shlex import borgmatic.config.paths from borgmatic.borg import environment, flags from borgmatic.execute import execute_command logger = logging.getLogger(__name__) def break_lock( repository_path, config, local_borg_version, global_arguments, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, a configuration dict, the local Borg version, an argparse.Namespace of global arguments, and optional local and remote Borg paths, break any repository and cache locks leftover from Borg aborting. ''' umask = config.get('umask', None) lock_wait = config.get('lock_wait', None) extra_borg_options = config.get('extra_borg_options', {}).get('break_lock', '') full_command = ( (local_path, 'break-lock') + (('--remote-path', remote_path) if remote_path else ()) + (('--umask', str(umask)) if umask else ()) + (('--log-json',) if config.get('log_json') else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + (tuple(shlex.split(extra_borg_options)) if extra_borg_options else ()) + flags.make_repository_flags(repository_path, local_borg_version) ) execute_command( full_command, environment=environment.make_environment(config), working_directory=borgmatic.config.paths.get_working_directory(config), borg_local_path=local_path, borg_exit_codes=config.get('borg_exit_codes'), ) borgmatic/borgmatic/borg/change_passphrase.py000066400000000000000000000047771510202216200217630ustar00rootroot00000000000000import logging import shlex import borgmatic.config.paths import borgmatic.execute import borgmatic.logger from borgmatic.borg import environment, flags logger = logging.getLogger(__name__) def change_passphrase( repository_path, config, local_borg_version, change_passphrase_arguments, global_arguments, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, a configuration dict, the local Borg version, change passphrase arguments, and optional local and remote Borg paths, change the repository passphrase based on an interactive prompt. ''' borgmatic.logger.add_custom_log_levels() umask = config.get('umask', None) lock_wait = config.get('lock_wait', None) extra_borg_options = config.get('extra_borg_options', {}).get('key_change_passphrase', '') full_command = ( (local_path, 'key', 'change-passphrase') + (('--remote-path', remote_path) if remote_path else ()) + (('--umask', str(umask)) if umask else ()) + (('--log-json',) if config.get('log_json') else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + (tuple(shlex.split(extra_borg_options)) if extra_borg_options else ()) + flags.make_repository_flags( repository_path, local_borg_version, ) ) if global_arguments.dry_run: logger.info('Skipping change password (dry run)') return # If the original passphrase is set programmatically, then Borg won't prompt for a new one! So # don't give Borg any passphrase, and it'll ask the user for both old and new ones. config_without_passphrase = { option_name: value for (option_name, value) in config.items() if option_name not in {'encryption_passphrase', 'encryption_passcommand'} } borgmatic.execute.execute_command( full_command, output_file=borgmatic.execute.DO_NOT_CAPTURE, output_log_level=logging.ANSWER, environment=environment.make_environment(config_without_passphrase), working_directory=borgmatic.config.paths.get_working_directory(config), borg_local_path=local_path, borg_exit_codes=config.get('borg_exit_codes'), ) logger.answer( f"{repository_path}: Don't forget to update your encryption_passphrase option (if needed)", ) borgmatic/borgmatic/borg/check.py000066400000000000000000000155571510202216200173600ustar00rootroot00000000000000import argparse import json import logging import shlex import borgmatic.config.paths from borgmatic.borg import environment, feature, flags, repo_info from borgmatic.execute import DO_NOT_CAPTURE, execute_command logger = logging.getLogger(__name__) def make_archive_filter_flags(local_borg_version, config, checks, check_arguments): ''' Given the local Borg version, a configuration dict, a parsed sequence of checks, and check arguments as an argparse.Namespace instance, transform the checks into tuple of command-line flags for filtering archives in a check command. If "check_last" is set in the configuration and "archives" is in checks, then include a "--last" flag. And if "prefix" is set in configuration and "archives" is in checks, then include a "--match-archives" flag. ''' check_last = config.get('check_last', None) prefix = config.get('prefix') if 'archives' in checks or 'data' in checks: return (('--last', str(check_last)) if check_last else ()) + ( ( ('--match-archives', f'sh:{prefix}*') if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version) else ('--glob-archives', f'{prefix}*') ) if prefix else ( flags.make_match_archives_flags( config.get('match_archives'), config.get('archive_name_format'), local_borg_version, ) ) ) if check_last: logger.warning( 'Ignoring check_last option, as "archives" or "data" are not in consistency checks', ) if prefix: logger.warning( 'Ignoring consistency prefix option, as "archives" or "data" are not in consistency checks', ) return () def make_check_name_flags(checks, archive_filter_flags): ''' Given parsed checks set and a sequence of flags to filter archives, transform the checks into tuple of command-line check flags. For example, given parsed checks of: ('repository',) This will be returned as: ('--repository-only',) However, if both "repository" and "archives" are in checks, then omit the "only" flags from the returned flags because Borg does both checks by default. Note that a "data" check only works along with an "archives" check. ''' data_flags = ('--verify-data',) if 'data' in checks else () common_flags = (archive_filter_flags if 'archives' in checks else ()) + data_flags if {'repository', 'archives'}.issubset(checks): return common_flags return ( tuple(f'--{check}-only' for check in checks if check in {'repository', 'archives'}) + common_flags ) def get_repository_id( repository_path, config, local_borg_version, global_arguments, local_path, remote_path, ): ''' Given a local or remote repository path, a configuration dict, the local Borg version, global arguments, and local/remote commands to run, return the corresponding Borg repository ID. Raise ValueError if the Borg repository ID cannot be determined. ''' try: return json.loads( repo_info.display_repository_info( repository_path, config, local_borg_version, argparse.Namespace(json=True), global_arguments, local_path, remote_path, ), )['repository']['id'] except (json.JSONDecodeError, KeyError): raise ValueError(f'Cannot determine Borg repository ID for {repository_path}') def check_archives( repository_path, config, local_borg_version, check_arguments, global_arguments, checks, archive_filter_flags, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, a configuration dict, the local Borg version, check arguments as an argparse.Namespace instance, global arguments, a set of named Borg checks to run (some combination "repository", "archives", and/or "data"), archive filter flags, and local/remote commands to run, check the contained Borg archives for consistency. ''' lock_wait = config.get('lock_wait') extra_borg_options = config.get('extra_borg_options', {}).get('check', '') verbosity_flags = () if logger.isEnabledFor(logging.INFO): verbosity_flags = ('--info',) if logger.isEnabledFor(logging.DEBUG): verbosity_flags = ('--debug', '--show-rc') try: repository_check_config = next( check for check in config.get('checks', ()) if check.get('name') == 'repository' ) except StopIteration: repository_check_config = {} max_duration = check_arguments.max_duration or repository_check_config.get('max_duration') umask = config.get('umask') borg_exit_codes = config.get('borg_exit_codes') working_directory = borgmatic.config.paths.get_working_directory(config) if 'data' in checks: checks.add('archives') grouped_checks = (checks,) # If max_duration is set, then archives and repository checks need to be run separately, as Borg # doesn't support --max-duration along with an archives checks. if max_duration and 'archives' in checks and 'repository' in checks: checks.remove('repository') grouped_checks = (checks, {'repository'}) for checks_subset in grouped_checks: full_command = ( (local_path, 'check') + (('--repair',) if check_arguments.repair else ()) + ( ('--max-duration', str(max_duration)) if max_duration and 'repository' in checks_subset else () ) + make_check_name_flags(checks_subset, archive_filter_flags) + (('--remote-path', remote_path) if remote_path else ()) + (('--umask', str(umask)) if umask else ()) + (('--log-json',) if config.get('log_json') else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + verbosity_flags + (('--progress',) if config.get('progress') else ()) + (tuple(shlex.split(extra_borg_options)) if extra_borg_options else ()) + flags.make_repository_flags(repository_path, local_borg_version) ) execute_command( full_command, # The Borg repair option triggers an interactive prompt, which won't work when output is # captured. And progress messes with the terminal directly. output_file=( DO_NOT_CAPTURE if check_arguments.repair or config.get('progress') else None ), environment=environment.make_environment(config), working_directory=working_directory, borg_local_path=local_path, borg_exit_codes=borg_exit_codes, ) borgmatic/borgmatic/borg/compact.py000066400000000000000000000042631510202216200177210ustar00rootroot00000000000000import logging import shlex import borgmatic.config.paths from borgmatic.borg import environment, feature, flags from borgmatic.execute import execute_command logger = logging.getLogger(__name__) def compact_segments( dry_run, repository_path, config, local_borg_version, global_arguments, local_path='borg', remote_path=None, cleanup_commits=False, ): ''' Given dry-run flag, a local or remote repository path, a configuration dict, and the local Borg version, compact the segments in a repository. ''' umask = config.get('umask', None) lock_wait = config.get('lock_wait', None) extra_borg_options = config.get('extra_borg_options', {}).get('compact', '') threshold = config.get('compact_threshold') full_command = ( (local_path, 'compact') + (('--remote-path', remote_path) if remote_path else ()) + (('--umask', str(umask)) if umask else ()) + (('--log-json',) if config.get('log_json') else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + (('--progress',) if config.get('progress') else ()) + (('--cleanup-commits',) if cleanup_commits else ()) + (('--threshold', str(threshold)) if threshold else ()) + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + ( ('--dry-run',) if dry_run and feature.available(feature.Feature.DRY_RUN_COMPACT, local_borg_version) else () ) + (tuple(shlex.split(extra_borg_options)) if extra_borg_options else ()) + flags.make_repository_flags(repository_path, local_borg_version) ) if dry_run and not feature.available(feature.Feature.DRY_RUN_COMPACT, local_borg_version): logger.info('Skipping compact (dry run)') return execute_command( full_command, output_log_level=logging.INFO, environment=environment.make_environment(config), working_directory=borgmatic.config.paths.get_working_directory(config), borg_local_path=local_path, borg_exit_codes=config.get('borg_exit_codes'), ) borgmatic/borgmatic/borg/create.py000066400000000000000000000356701510202216200175440ustar00rootroot00000000000000import logging import os import pathlib import shlex import stat import textwrap import borgmatic.borg.pattern import borgmatic.config.paths import borgmatic.logger from borgmatic.borg import environment, feature, flags from borgmatic.execute import ( DO_NOT_CAPTURE, execute_command, execute_command_and_capture_output, execute_command_with_processes, ) logger = logging.getLogger(__name__) def special_file(path, working_directory=None): ''' Return whether the given path is a special file (character device, block device, or named pipe / FIFO). If a working directory is given, take it into account when making the full path to check. ''' try: mode = os.stat(os.path.join(working_directory or '', path)).st_mode except (FileNotFoundError, OSError): return False return stat.S_ISCHR(mode) or stat.S_ISBLK(mode) or stat.S_ISFIFO(mode) def any_parent_directories(path, candidate_parents): ''' Return whether any of the given candidate parent directories are an actual parent of the given path. This includes grandparents, etc. ''' for parent in candidate_parents: if pathlib.PurePosixPath(parent) in pathlib.PurePath(path).parents: return True return False def validate_planned_backup_paths( dry_run, create_command, config, patterns, local_path, working_directory, borgmatic_runtime_directory, ): ''' Given a dry-run flag, a Borg create command as a tuple, a configuration dict, a local Borg path, a working directory, and the borgmatic runtime directory, perform a "borg create --dry-run" to determine whether Borg's planned paths to include in a backup look good. Specifically, if the given runtime directory exists, validate that it will be included in a backup and hasn't been excluded. Raise ValueError if the runtime directory has been excluded via "exclude_patterns" or similar, because any features that rely on the runtime directory getting backed up will break. For instance, without the runtime directory, Borg can't consume any database dumps and borgmatic may hang waiting for them to be consumed. ''' # Omit "--exclude-nodump" from the Borg dry run command, because that flag causes Borg to open # files including any named pipe we've created. And omit "--filter" because that can break the # paths output parsing below such that path lines no longer start with the expected "- ". paths_output = execute_command_and_capture_output( ( *flags.omit_flag_and_value( flags.omit_flag( flags.omit_flag(create_command, '--exclude-nodump'), '--log-json', ), '--filter', ), '--dry-run', '--list', ), capture_stderr=True, working_directory=working_directory, environment=environment.make_environment(config), borg_local_path=local_path, borg_exit_codes=config.get('borg_exit_codes'), ) # These are all the individual files that Borg is planning to backup as determined by the Borg # create dry run above. paths = tuple( path_line.split(' ', 1)[1] for path_line in paths_output.split('\n') if path_line and path_line.startswith(('- ', '+ ')) ) # These are the subset of output paths contained within the borgmatic runtime directory. paths_inside_runtime_directory = { path for path in paths if any_parent_directories(path, (borgmatic_runtime_directory,)) } # If the runtime directory isn't present in the source patterns, then we shouldn't expect it to # be in the paths output from the Borg dry run. runtime_directory_present_in_patterns = any( pattern for pattern in patterns if any_parent_directories(pattern.path, (borgmatic_runtime_directory,)) if pattern.type == borgmatic.borg.pattern.Pattern_type.ROOT ) # If no paths to backup are inside the runtime directory, it must've been excluded. if ( not paths_inside_runtime_directory and runtime_directory_present_in_patterns and not dry_run and os.path.exists(borgmatic_runtime_directory) ): raise ValueError( f'The runtime directory {os.path.normpath(borgmatic_runtime_directory)} overlaps with the configured excludes or patterns with excludes. Please ensure the runtime directory is not excluded.', ) return tuple(path for path in paths if path not in paths_inside_runtime_directory) MAX_SPECIAL_FILE_PATHS_LENGTH = 1000 def make_base_create_command( dry_run, repository_path, config, patterns, local_borg_version, global_arguments, borgmatic_runtime_directory, archive_suffix='', local_path='borg', remote_path=None, json=False, comment=None, stream_processes=None, ): ''' Given verbosity/dry-run flags, a local or remote repository path, a configuration dict, a sequence of patterns as borgmatic.borg.pattern.Pattern instances, the local Borg version, global arguments as an argparse.Namespace instance, the borgmatic runtime directory, a string suffix to add to the archive name, the local Borg path, the remote Borg path, whether to output JSON, comment text to add to the created archive, and a sequence of processes streaming data to Borg, return a tuple of (base Borg create command flags, Borg create command positional arguments, open pattern file handle). ''' if config.get('source_directories_must_exist', False): borgmatic.borg.pattern.check_all_root_patterns_exist(patterns) patterns_file = borgmatic.borg.pattern.write_patterns_file( patterns, borgmatic_runtime_directory, ) checkpoint_interval = config.get('checkpoint_interval', None) checkpoint_volume = config.get('checkpoint_volume', None) chunker_params = config.get('chunker_params', None) compression = config.get('compression', None) upload_rate_limit = config.get('upload_rate_limit', None) upload_buffer_size = config.get('upload_buffer_size', None) umask = config.get('umask', None) lock_wait = config.get('lock_wait', None) list_filter_flags = flags.make_list_filter_flags(local_borg_version, dry_run) files_cache = config.get('files_cache') archive_name_format = ( config.get('archive_name_format', flags.get_default_archive_name_format(local_borg_version)) + archive_suffix ) extra_borg_options = config.get('extra_borg_options', {}).get('create', '') if feature.available(feature.Feature.ATIME, local_borg_version): atime_flags = ('--atime',) if config.get('atime') is True else () else: atime_flags = ('--noatime',) if config.get('atime') is False else () if feature.available(feature.Feature.NOFLAGS, local_borg_version): noflags_flags = ('--noflags',) if config.get('flags') is False else () else: noflags_flags = ('--nobsdflags',) if config.get('flags') is False else () if feature.available(feature.Feature.NUMERIC_IDS, local_borg_version): numeric_ids_flags = ('--numeric-ids',) if config.get('numeric_ids') else () else: numeric_ids_flags = ('--numeric-owner',) if config.get('numeric_ids') else () if feature.available(feature.Feature.UPLOAD_RATELIMIT, local_borg_version): upload_ratelimit_flags = ( ('--upload-ratelimit', str(upload_rate_limit)) if upload_rate_limit else () ) else: upload_ratelimit_flags = ( ('--remote-ratelimit', str(upload_rate_limit)) if upload_rate_limit else () ) create_flags = ( tuple(local_path.split(' ')) + ('create',) + (('--patterns-from', patterns_file.name) if patterns_file else ()) + flags.make_exclude_flags(config) + (('--comment', comment) if comment else ()) + (('--checkpoint-interval', str(checkpoint_interval)) if checkpoint_interval else ()) + (('--checkpoint-volume', str(checkpoint_volume)) if checkpoint_volume else ()) + (('--chunker-params', chunker_params) if chunker_params else ()) + (('--compression', compression) if compression else ()) + upload_ratelimit_flags + (('--upload-buffer', str(upload_buffer_size)) if upload_buffer_size else ()) + (('--one-file-system',) if config.get('one_file_system') else ()) + numeric_ids_flags + atime_flags + (('--noctime',) if config.get('ctime') is False else ()) + (('--nobirthtime',) if config.get('birthtime') is False else ()) + (('--read-special',) if config.get('read_special') or stream_processes else ()) + noflags_flags + (('--files-cache', files_cache) if files_cache else ()) + (('--remote-path', remote_path) if remote_path else ()) + (('--umask', str(umask)) if umask else ()) + (('--log-json',) if config.get('log_json') else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + ( ('--list', '--filter', list_filter_flags) if config.get('list_details') and not json and not config.get('progress') else () ) + (('--dry-run',) if dry_run else ()) + (tuple(shlex.split(extra_borg_options)) if extra_borg_options else ()) ) create_positional_arguments = flags.make_repository_archive_flags( repository_path, archive_name_format, local_borg_version, ) working_directory = borgmatic.config.paths.get_working_directory(config) logger.debug('Checking file paths Borg plans to include') planned_backup_paths = validate_planned_backup_paths( dry_run, create_flags + create_positional_arguments, config, patterns, local_path, working_directory, borgmatic_runtime_directory=borgmatic_runtime_directory, ) # If database hooks are enabled (as indicated by streaming processes), exclude files that might # cause Borg to hang. But skip this if the user has explicitly set the "read_special" to True. if stream_processes and not config.get('read_special'): logger.warning( 'Ignoring configured "read_special" value of false, as true is needed for database hooks.', ) special_file_paths = tuple( path for path in planned_backup_paths if special_file(path, working_directory) ) if special_file_paths: truncated_special_file_paths = textwrap.shorten( ', '.join(special_file_paths), width=MAX_SPECIAL_FILE_PATHS_LENGTH, placeholder=' ...', ) logger.warning( f'Excluding special files to prevent Borg from hanging: {truncated_special_file_paths}', ) patterns_file = borgmatic.borg.pattern.write_patterns_file( tuple( borgmatic.borg.pattern.Pattern( special_file_path, borgmatic.borg.pattern.Pattern_type.NO_RECURSE, borgmatic.borg.pattern.Pattern_style.FNMATCH, source=borgmatic.borg.pattern.Pattern_source.INTERNAL, ) for special_file_path in special_file_paths ), borgmatic_runtime_directory, patterns_file=patterns_file, ) if '--patterns-from' not in create_flags: create_flags += ('--patterns-from', patterns_file.name) return (create_flags, create_positional_arguments, patterns_file) def create_archive( dry_run, repository_path, config, patterns, local_borg_version, global_arguments, borgmatic_runtime_directory, archive_suffix='', local_path='borg', remote_path=None, json=False, comment=None, stream_processes=None, ): ''' Given verbosity/dry-run flags, a local or remote repository path, a configuration dict, a sequence of loaded configuration paths, the local Borg version, global arguments as an argparse.Namespace instance, the borgmatic runtime directory, a string suffix to add to the archive name, the local Borg path, the remote Borg path, whether to output JSON, and comment text to add to the created archive, and a sequence of processes streaming data to Borg, create a Borg archive and return Borg's JSON output (if any). If a sequence of stream processes is given (instances of subprocess.Popen), then execute the create command while also triggering the given processes to produce output. ''' borgmatic.logger.add_custom_log_levels() working_directory = borgmatic.config.paths.get_working_directory(config) (create_flags, create_positional_arguments, _) = make_base_create_command( dry_run, repository_path, config, patterns, local_borg_version, global_arguments, borgmatic_runtime_directory, archive_suffix, local_path, remote_path, json, comment, stream_processes, ) if json: output_log_level = None elif config.get('list_details') or (config.get('statistics') and not dry_run): output_log_level = logging.ANSWER else: output_log_level = logging.INFO # The progress output isn't compatible with captured and logged output, as progress messes with # the terminal directly. output_file = DO_NOT_CAPTURE if config.get('progress') else None create_flags += ( (('--info',) if logger.getEffectiveLevel() == logging.INFO and not json else ()) + (('--stats',) if config.get('statistics') and not json and not dry_run else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) and not json else ()) + (('--progress',) if config.get('progress') else ()) + (('--json',) if json else ()) ) borg_exit_codes = config.get('borg_exit_codes') if stream_processes: return execute_command_with_processes( create_flags + create_positional_arguments, stream_processes, output_log_level, output_file, working_directory=working_directory, environment=environment.make_environment(config), borg_local_path=local_path, borg_exit_codes=borg_exit_codes, ) if output_log_level is None: return execute_command_and_capture_output( create_flags + create_positional_arguments, working_directory=working_directory, environment=environment.make_environment(config), borg_local_path=local_path, borg_exit_codes=borg_exit_codes, ) execute_command( create_flags + create_positional_arguments, output_log_level, output_file, working_directory=working_directory, environment=environment.make_environment(config), borg_local_path=local_path, borg_exit_codes=borg_exit_codes, ) return None borgmatic/borgmatic/borg/delete.py000066400000000000000000000116771510202216200175440ustar00rootroot00000000000000import argparse import logging import shlex import borgmatic.borg.environment import borgmatic.borg.feature import borgmatic.borg.flags import borgmatic.borg.repo_delete import borgmatic.config.paths import borgmatic.execute logger = logging.getLogger(__name__) FORCE_HARDER_FLAG_COUNT = 2 def make_delete_command( repository, config, local_borg_version, delete_arguments, global_arguments, local_path, remote_path, ): ''' Given a local or remote repository dict, a configuration dict, the local Borg version, the arguments to the delete action as an argparse.Namespace, and global arguments, return a command as a tuple to delete archives from the repository. ''' extra_borg_options = config.get('extra_borg_options', {}).get('delete', '') return ( (local_path, 'delete') + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + borgmatic.borg.flags.make_flags('dry-run', global_arguments.dry_run) + borgmatic.borg.flags.make_flags('remote-path', remote_path) + borgmatic.borg.flags.make_flags('umask', config.get('umask')) + borgmatic.borg.flags.make_flags('log-json', config.get('log_json')) + borgmatic.borg.flags.make_flags('lock-wait', config.get('lock_wait')) + borgmatic.borg.flags.make_flags('list', config.get('list_details')) + ( ( ('--force',) + (('--force',) if delete_arguments.force >= FORCE_HARDER_FLAG_COUNT else ()) ) if delete_arguments.force else () ) # Ignore match_archives and archive_name_format options from configuration, so the user has # to be explicit on the command-line about the archives they want to delete. + borgmatic.borg.flags.make_match_archives_flags( delete_arguments.match_archives or delete_arguments.archive, archive_name_format=None, local_borg_version=local_borg_version, default_archive_name_format='*', ) + (('--stats',) if config.get('statistics') else ()) + borgmatic.borg.flags.make_flags_from_arguments( delete_arguments, excludes=( 'list_details', 'statistics', 'force', 'match_archives', 'archive', 'repository', ), ) + (tuple(shlex.split(extra_borg_options)) if extra_borg_options else ()) + borgmatic.borg.flags.make_repository_flags(repository['path'], local_borg_version) ) ARCHIVE_RELATED_ARGUMENT_NAMES = ( 'archive', 'match_archives', 'first', 'last', 'oldest', 'newest', 'older', 'newer', ) def delete_archives( repository, config, local_borg_version, delete_arguments, global_arguments, local_path='borg', remote_path=None, ): ''' Given a local or remote repository dict, a configuration dict, the local Borg version, the arguments to the delete action as an argparse.Namespace, global arguments as an argparse.Namespace, and local and remote Borg paths, delete the selected archives from the repository. If no archives are selected, then delete the entire repository. ''' borgmatic.logger.add_custom_log_levels() if not any( getattr(delete_arguments, argument_name, None) for argument_name in ARCHIVE_RELATED_ARGUMENT_NAMES ): if borgmatic.borg.feature.available( borgmatic.borg.feature.Feature.REPO_DELETE, local_borg_version, ): logger.warning( 'Deleting an entire repository with the delete action is deprecated when using Borg 2.x+. Use the repo-delete action instead.', ) repo_delete_arguments = argparse.Namespace( repository=repository['path'], list_details=delete_arguments.list_details, force=delete_arguments.force, cache_only=delete_arguments.cache_only, keep_security_info=delete_arguments.keep_security_info, ) borgmatic.borg.repo_delete.delete_repository( repository, config, local_borg_version, repo_delete_arguments, global_arguments, local_path, remote_path, ) return command = make_delete_command( repository, config, local_borg_version, delete_arguments, global_arguments, local_path, remote_path, ) borgmatic.execute.execute_command( command, output_log_level=logging.ANSWER, environment=borgmatic.borg.environment.make_environment(config), working_directory=borgmatic.config.paths.get_working_directory(config), borg_local_path=local_path, borg_exit_codes=config.get('borg_exit_codes'), ) borgmatic/borgmatic/borg/environment.py000066400000000000000000000105551510202216200206400ustar00rootroot00000000000000import os import borgmatic.borg.passcommand import borgmatic.hooks.credential.parse OPTION_TO_ENVIRONMENT_VARIABLE = { 'borg_base_directory': 'BORG_BASE_DIR', 'borg_config_directory': 'BORG_CONFIG_DIR', 'borg_cache_directory': 'BORG_CACHE_DIR', 'borg_files_cache_ttl': 'BORG_FILES_CACHE_TTL', 'borg_security_directory': 'BORG_SECURITY_DIR', 'borg_keys_directory': 'BORG_KEYS_DIR', 'borg_key_file': 'BORG_KEY_FILE', 'ssh_command': 'BORG_RSH', 'temporary_directory': 'TMPDIR', } DEFAULT_BOOL_OPTION_TO_UNCONDITIONAL_ENVIRONMENT_VARIABLE = { 'check_i_know_what_i_am_doing': 'BORG_CHECK_I_KNOW_WHAT_I_AM_DOING', } DEFAULT_BOOL_OPTION_TO_ENVIRONMENT_VARIABLE = { 'debug_passphrase': 'BORG_DEBUG_PASSPHRASE', 'display_passphrase': 'BORG_DISPLAY_PASSPHRASE', 'relocated_repo_access_is_ok': 'BORG_RELOCATED_REPO_ACCESS_IS_OK', 'unknown_unencrypted_repo_access_is_ok': 'BORG_UNKNOWN_UNENCRYPTED_REPO_ACCESS_IS_OK', 'use_chunks_archive': 'BORG_USE_CHUNKS_ARCHIVE', } def make_environment(config): ''' Given a borgmatic configuration dict, convert it to a Borg environment variable dict, merge it with a copy of the current environment variables, and return the result. Do not reuse this environment across multiple Borg invocations, because it can include references to resources like anonymous pipes for passphrases—which can only be consumed once. Here's how native Borg precedence works for a few of the environment variables: 1. BORG_PASSPHRASE, if set, is used first. 2. BORG_PASSCOMMAND is used only if BORG_PASSPHRASE isn't set. 3. BORG_PASSPHRASE_FD is used only if neither of the above are set. In borgmatic, we want to simulate this precedence order, but there are some additional complications. First, values can come from either configuration or from environment variables set outside borgmatic; configured options should take precedence. Second, when borgmatic gets a passphrase—directly from configuration or indirectly via a credential hook or a passcommand—we want to pass that passphrase to Borg via an anonymous pipe (+ BORG_PASSPHRASE_FD), since that's more secure than using an environment variable (BORG_PASSPHRASE). ''' environment = dict(os.environ) for option_name, environment_variable_name in OPTION_TO_ENVIRONMENT_VARIABLE.items(): value = config.get(option_name) if value is not None: environment[environment_variable_name] = str(value) if 'encryption_passphrase' in config: environment.pop('BORG_PASSPHRASE', None) environment.pop('BORG_PASSCOMMAND', None) if 'encryption_passcommand' in config: environment.pop('BORG_PASSCOMMAND', None) passphrase = borgmatic.hooks.credential.parse.resolve_credential( config.get('encryption_passphrase'), config, ) if passphrase is None: passphrase = borgmatic.borg.passcommand.get_passphrase_from_passcommand(config) # If there's a passphrase (from configuration, from a configured credential, or from a # configured passcommand), send it to Borg via an anonymous pipe. if passphrase is not None: read_file_descriptor, write_file_descriptor = os.pipe() os.write(write_file_descriptor, passphrase.encode('utf-8')) os.close(write_file_descriptor) # This plus subprocess.Popen(..., close_fds=False) in execute.py is necessary for the Borg # child process to inherit the file descriptor. os.set_inheritable(read_file_descriptor, True) environment['BORG_PASSPHRASE_FD'] = str(read_file_descriptor) for ( option_name, environment_variable_name, ) in DEFAULT_BOOL_OPTION_TO_ENVIRONMENT_VARIABLE.items(): if os.environ.get(environment_variable_name) is None: value = config.get(option_name) environment[environment_variable_name] = 'YES' if value else 'NO' for ( option_name, environment_variable_name, ) in DEFAULT_BOOL_OPTION_TO_UNCONDITIONAL_ENVIRONMENT_VARIABLE.items(): value = config.get(option_name) if value is not None: environment[environment_variable_name] = 'YES' if value else 'NO' # On Borg 1.4.0a1+, take advantage of more specific exit codes. No effect on # older versions of Borg. environment['BORG_EXIT_CODES'] = 'modern' return environment borgmatic/borgmatic/borg/export_key.py000066400000000000000000000053221510202216200204610ustar00rootroot00000000000000import logging import os import shlex import borgmatic.config.paths import borgmatic.logger from borgmatic.borg import environment, flags from borgmatic.execute import DO_NOT_CAPTURE, execute_command logger = logging.getLogger(__name__) def export_key( repository_path, config, local_borg_version, export_arguments, global_arguments, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, a configuration dict, the local Borg version, export arguments, and optional local and remote Borg paths, export the repository key to the destination path indicated in the export arguments. If the destination path is empty or "-", then print the key to stdout instead of to a file. Raise FileExistsError if a path is given but it already exists on disk. ''' borgmatic.logger.add_custom_log_levels() umask = config.get('umask', None) lock_wait = config.get('lock_wait', None) working_directory = borgmatic.config.paths.get_working_directory(config) extra_borg_options = config.get('extra_borg_options', {}).get('key_export', '') if export_arguments.path and export_arguments.path != '-': if os.path.exists(os.path.join(working_directory or '', export_arguments.path)): raise FileExistsError( f'Destination path {export_arguments.path} already exists. Aborting.', ) output_file = None else: output_file = DO_NOT_CAPTURE full_command = ( (local_path, 'key', 'export') + (('--remote-path', remote_path) if remote_path else ()) + (('--umask', str(umask)) if umask else ()) + (('--log-json',) if config.get('log_json') else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + flags.make_flags('paper', export_arguments.paper) + flags.make_flags('qr-html', export_arguments.qr_html) + (tuple(shlex.split(extra_borg_options)) if extra_borg_options else ()) + flags.make_repository_flags( repository_path, local_borg_version, ) + ((export_arguments.path,) if output_file is None else ()) ) if global_arguments.dry_run: logger.info('Skipping key export (dry run)') return execute_command( full_command, output_file=output_file, output_log_level=logging.ANSWER, environment=environment.make_environment(config), working_directory=working_directory, borg_local_path=local_path, borg_exit_codes=config.get('borg_exit_codes'), ) borgmatic/borgmatic/borg/export_tar.py000066400000000000000000000054471510202216200204670ustar00rootroot00000000000000import logging import shlex import borgmatic.config.paths import borgmatic.logger from borgmatic.borg import environment, flags from borgmatic.execute import DO_NOT_CAPTURE, execute_command logger = logging.getLogger(__name__) def export_tar_archive( dry_run, repository_path, archive, paths, destination_path, config, local_borg_version, global_arguments, local_path='borg', remote_path=None, tar_filter=None, strip_components=None, ): ''' Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to export from the archive, a destination path to export to, a configuration dict, the local Borg version, optional local and remote Borg paths, an optional filter program, whether to include per-file details, and an optional number of path components to strip, export the archive into the given destination path as a tar-formatted file. If the destination path is "-", then stream the output to stdout instead of to a file. ''' borgmatic.logger.add_custom_log_levels() umask = config.get('umask', None) lock_wait = config.get('lock_wait', None) extra_borg_options = config.get('extra_borg_options', {}).get('export_tar', '') full_command = ( (local_path, 'export-tar') + (('--remote-path', remote_path) if remote_path else ()) + (('--umask', str(umask)) if umask else ()) + (('--log-json',) if config.get('log_json') else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--list',) if config.get('list_details') else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + (('--dry-run',) if dry_run else ()) + (('--tar-filter', tar_filter) if tar_filter else ()) + (('--strip-components', str(strip_components)) if strip_components else ()) + (tuple(shlex.split(extra_borg_options)) if extra_borg_options else ()) + flags.make_repository_archive_flags( repository_path, archive, local_borg_version, ) + (destination_path,) + (tuple(paths) if paths else ()) ) output_log_level = logging.ANSWER if config.get('list_details') else logging.INFO if dry_run: logger.info('Skipping export to tar file (dry run)') return execute_command( full_command, output_file=DO_NOT_CAPTURE if destination_path == '-' else None, output_log_level=output_log_level, environment=environment.make_environment(config), working_directory=borgmatic.config.paths.get_working_directory(config), borg_local_path=local_path, borg_exit_codes=config.get('borg_exit_codes'), ) borgmatic/borgmatic/borg/extract.py000066400000000000000000000156231510202216200177470ustar00rootroot00000000000000import logging import os import shlex import subprocess import borgmatic.config.paths import borgmatic.config.validate from borgmatic.borg import environment, feature, flags, repo_list from borgmatic.execute import DO_NOT_CAPTURE, execute_command logger = logging.getLogger(__name__) def extract_last_archive_dry_run( config, local_borg_version, global_arguments, repository_path, lock_wait=None, local_path='borg', remote_path=None, ): ''' Perform an extraction dry-run of the most recent archive. If there are no archives, skip the dry-run. ''' extra_borg_options = config.get('extra_borg_options', {}).get('extract', '') verbosity_flags = () if logger.isEnabledFor(logging.DEBUG): verbosity_flags = ('--debug', '--show-rc') elif logger.isEnabledFor(logging.INFO): verbosity_flags = ('--info',) try: last_archive_name = repo_list.resolve_archive_name( repository_path, 'latest', config, local_borg_version, global_arguments, local_path, remote_path, ) except ValueError: logger.warning('No archives found. Skipping extract consistency check.') return list_flag = ('--list',) if logger.isEnabledFor(logging.DEBUG) else () full_extract_command = ( (local_path, 'extract', '--dry-run') + (('--remote-path', remote_path) if remote_path else ()) + (('--log-json',) if config.get('log_json') else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + verbosity_flags + list_flag + (tuple(shlex.split(extra_borg_options)) if extra_borg_options else ()) + flags.make_repository_archive_flags( repository_path, last_archive_name, local_borg_version, ) ) execute_command( full_extract_command, environment=environment.make_environment(config), working_directory=borgmatic.config.paths.get_working_directory(config), borg_local_path=local_path, borg_exit_codes=config.get('borg_exit_codes'), ) def extract_archive( dry_run, repository, archive, paths, config, local_borg_version, global_arguments, local_path='borg', remote_path=None, destination_path=None, strip_components=None, extract_to_stdout=False, ): ''' Given a dry-run flag, a local or remote repository path, an archive name, zero or more paths to restore from the archive, the local Borg version string, an argparse.Namespace of global arguments, a configuration dict, optional local and remote Borg paths, and an optional destination path to extract to, extract the archive into the current directory. If extract to stdout is True, then start the extraction streaming to stdout, and return that extract process as an instance of subprocess.Popen. ''' umask = config.get('umask', None) lock_wait = config.get('lock_wait', None) extra_borg_options = config.get('extra_borg_options', {}).get('extract', '') if config.get('progress') and extract_to_stdout: raise ValueError('progress and extract to stdout cannot both be set') if feature.available(feature.Feature.NUMERIC_IDS, local_borg_version): numeric_ids_flags = ('--numeric-ids',) if config.get('numeric_ids') else () else: numeric_ids_flags = ('--numeric-owner',) if config.get('numeric_ids') else () if strip_components == 'all': if not paths: raise ValueError('The --strip-components flag with "all" requires at least one --path') # Calculate the maximum number of leading path components of the given paths. "if piece" # ignores empty path components, e.g. those resulting from a leading slash. And the "- 1" # is so this doesn't count the final path component, e.g. the filename itself. strip_components = max( 0, *( len(tuple(piece for piece in path.split(os.path.sep) if piece)) - 1 for path in paths ), ) working_directory = borgmatic.config.paths.get_working_directory(config) full_command = ( (local_path, 'extract') + (('--remote-path', remote_path) if remote_path else ()) + numeric_ids_flags + (('--umask', str(umask)) if umask else ()) + (('--log-json',) if config.get('log_json') else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--list', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + (('--dry-run',) if dry_run else ()) + (('--strip-components', str(strip_components)) if strip_components else ()) + (('--progress',) if config.get('progress') else ()) + (('--stdout',) if extract_to_stdout else ()) + (tuple(shlex.split(extra_borg_options)) if extra_borg_options else ()) + flags.make_repository_archive_flags( # Make the repository path absolute so the destination directory used below via changing # the working directory doesn't prevent Borg from finding the repo. But also apply the # user's configured working directory (if any) to the repo path. borgmatic.config.validate.normalize_repository_path(repository, working_directory), archive, local_borg_version, ) + (tuple(paths) if paths else ()) ) borg_exit_codes = config.get('borg_exit_codes') full_destination_path = ( os.path.join(working_directory or '', destination_path) if destination_path else None ) # The progress output isn't compatible with captured and logged output, as progress messes with # the terminal directly. if config.get('progress'): return execute_command( full_command, output_file=DO_NOT_CAPTURE, environment=environment.make_environment(config), working_directory=full_destination_path, borg_local_path=local_path, borg_exit_codes=borg_exit_codes, ) return None if extract_to_stdout: return execute_command( full_command, output_file=subprocess.PIPE, run_to_completion=False, environment=environment.make_environment(config), working_directory=full_destination_path, borg_local_path=local_path, borg_exit_codes=borg_exit_codes, ) # Don't give Borg local path so as to error on warnings, as "borg extract" only gives a warning # if the restore paths don't exist in the archive. execute_command( full_command, environment=environment.make_environment(config), working_directory=full_destination_path, borg_local_path=local_path, borg_exit_codes=borg_exit_codes, ) return None borgmatic/borgmatic/borg/feature.py000066400000000000000000000034241510202216200177240ustar00rootroot00000000000000from enum import Enum from packaging.version import parse class Feature(Enum): COMPACT = 1 ATIME = 2 NOFLAGS = 3 NUMERIC_IDS = 4 UPLOAD_RATELIMIT = 5 SEPARATE_REPOSITORY_ARCHIVE = 6 REPO_CREATE = 7 REPO_LIST = 8 REPO_INFO = 9 REPO_DELETE = 10 MATCH_ARCHIVES = 11 EXCLUDED_FILES_MINUS = 12 ARCHIVE_SERIES = 13 NO_PRUNE_STATS = 14 DRY_RUN_COMPACT = 15 FEATURE_TO_MINIMUM_BORG_VERSION = { Feature.COMPACT: parse('1.2.0a2'), # borg compact Feature.ATIME: parse('1.2.0a7'), # borg create --atime Feature.NOFLAGS: parse('1.2.0a8'), # borg create --noflags Feature.NUMERIC_IDS: parse('1.2.0b3'), # borg create/extract/mount --numeric-ids Feature.UPLOAD_RATELIMIT: parse('1.2.0b3'), # borg create --upload-ratelimit Feature.SEPARATE_REPOSITORY_ARCHIVE: parse('2.0.0a2'), # --repo with separate archive Feature.REPO_CREATE: parse('2.0.0a2'), # borg repo-create Feature.REPO_LIST: parse('2.0.0a2'), # borg repo-list Feature.REPO_INFO: parse('2.0.0a2'), # borg repo-info Feature.REPO_DELETE: parse('2.0.0a2'), # borg repo-delete Feature.MATCH_ARCHIVES: parse('2.0.0b3'), # borg --match-archives Feature.EXCLUDED_FILES_MINUS: parse('2.0.0b5'), # --list --filter uses "-" for excludes Feature.ARCHIVE_SERIES: parse('2.0.0b11'), # identically named archives form a series Feature.NO_PRUNE_STATS: parse('2.0.0b10'), # prune --stats is not available Feature.DRY_RUN_COMPACT: parse('1.2.9'), # borg compact --dry-run support } def available(feature, borg_version): ''' Given a Borg Feature constant and a Borg version string, return whether that feature is available in that version of Borg. ''' return FEATURE_TO_MINIMUM_BORG_VERSION[feature] <= parse(borg_version) borgmatic/borgmatic/borg/flags.py000066400000000000000000000177211510202216200173720ustar00rootroot00000000000000import itertools import json import logging import re from borgmatic.borg import feature logger = logging.getLogger(__name__) def make_flags(name, value): ''' Given a flag name and its value, return it formatted as Borg-compatible flags. ''' if not value: return () flag = f"--{name.replace('_', '-')}" if value is True: return (flag,) return (flag, str(value)) def make_flags_from_arguments(arguments, excludes=()): ''' Given borgmatic command-line arguments as an instance of argparse.Namespace, and optionally a list of named arguments to exclude, generate and return the corresponding Borg command-line flags as a tuple. ''' return tuple( itertools.chain.from_iterable( make_flags(name, value=getattr(arguments, name)) for name in sorted(vars(arguments)) if name not in excludes and not name.startswith('_') ), ) def make_repository_flags(repository_path, local_borg_version): ''' Given the path of a Borg repository and the local Borg version, return Borg-version-appropriate command-line flags (as a tuple) for selecting that repository. ''' return ( ('--repo',) if feature.available(feature.Feature.SEPARATE_REPOSITORY_ARCHIVE, local_borg_version) else () ) + (repository_path,) ARCHIVE_HASH_PATTERN = re.compile(r'[0-9a-fA-F]{8,}$') def make_repository_archive_flags(repository_path, archive, local_borg_version): ''' Given the path of a Borg repository, an archive name or pattern, and the local Borg version, return Borg-version-appropriate command-line flags (as a tuple) for selecting that repository and archive. ''' return ( ( '--repo', repository_path, ( f'aid:{archive}' if feature.available(feature.Feature.ARCHIVE_SERIES, local_borg_version) and ARCHIVE_HASH_PATTERN.match(archive) and not archive.startswith('aid:') else archive ), ) if feature.available(feature.Feature.SEPARATE_REPOSITORY_ARCHIVE, local_borg_version) else (f'{repository_path}::{archive}',) ) DEFAULT_ARCHIVE_NAME_FORMAT_WITHOUT_SERIES = '{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}' DEFAULT_ARCHIVE_NAME_FORMAT_WITH_SERIES = '{hostname}' def get_default_archive_name_format(local_borg_version): ''' Given the local Borg version, return the corresponding default archive name format. ''' if feature.available(feature.Feature.ARCHIVE_SERIES, local_borg_version): return DEFAULT_ARCHIVE_NAME_FORMAT_WITH_SERIES return DEFAULT_ARCHIVE_NAME_FORMAT_WITHOUT_SERIES def make_match_archives_flags( # noqa: PLR0911 match_archives, archive_name_format, local_borg_version, default_archive_name_format=None, ): ''' Return match archives flags based on the given match archives value, if any. If it isn't set, return match archives flags to match archives created with the given (or default) archive name format. This is done by replacing certain archive name format placeholders for ephemeral data (like "{now}") with globs. ''' if match_archives: if match_archives in {'*', 're:.*', 'sh:*'}: return () if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version): if ( feature.available(feature.Feature.ARCHIVE_SERIES, local_borg_version) and ARCHIVE_HASH_PATTERN.match(match_archives) and not match_archives.startswith('aid:') ): return ('--match-archives', f'aid:{match_archives}') return ('--match-archives', match_archives) return ('--glob-archives', re.sub(r'^sh:', '', match_archives)) derived_match_archives = re.sub( r'\{(now|utcnow|pid)([:%\w\.-]*)\}', '*', archive_name_format or default_archive_name_format or get_default_archive_name_format(local_borg_version), ) if derived_match_archives == '*': return () if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version): return ('--match-archives', f'sh:{derived_match_archives}') return ('--glob-archives', f'{derived_match_archives}') def warn_for_aggressive_archive_flags(json_command, json_output): ''' Given a JSON archives command and the resulting JSON string output from running it, parse the JSON and warn if the command used an archive flag but the output indicates zero archives were found. ''' archive_flags_used = {'--glob-archives', '--match-archives'}.intersection(set(json_command)) if not archive_flags_used: return try: if len(json.loads(json_output)['archives']) == 0: logger.warning('An archive filter was applied, but no matching archives were found.') logger.warning( 'Try adding --match-archives "*" or adjusting archive_name_format/match_archives in configuration.', ) except json.JSONDecodeError as error: logger.debug(f'Cannot parse JSON output from archive command: {error}') except (TypeError, KeyError): logger.debug('Cannot parse JSON output from archive command: No "archives" key found') def omit_flag(arguments, flag): ''' Given a sequence of Borg command-line arguments, return them with the given (valueless) flag omitted. For instance, if the flag is "--flag" and arguments is: ('borg', 'create', '--flag', '--other-flag') ... then return: ('borg', 'create', '--other-flag') ''' return tuple(argument for argument in arguments if argument != flag) def omit_flag_and_value(arguments, flag): ''' Given a sequence of Borg command-line arguments, return them with the given flag and its corresponding value omitted. For instance, if the flag is "--flag" and arguments is: ('borg', 'create', '--flag', 'value', '--other-flag') ... or: ('borg', 'create', '--flag=value', '--other-flag') ... then return: ('borg', 'create', '--other-flag') ''' # This works by zipping together a list of overlapping pairwise arguments. E.g., ('one', 'two', # 'three', 'four') becomes ((None, 'one'), ('one, 'two'), ('two', 'three'), ('three', 'four')). # This makes it easy to "look back" at the previous arguments so we can exclude both a flag and # its value. return tuple( argument for (previous_argument, argument) in zip((None, *arguments), arguments) if flag not in {previous_argument, argument} if not argument.startswith(f'{flag}=') ) def make_exclude_flags(config): ''' Given a configuration dict with various exclude options, return the corresponding Borg flags as a tuple. ''' caches_flag = ('--exclude-caches',) if config.get('exclude_caches') else () if_present_flags = tuple( itertools.chain.from_iterable( ('--exclude-if-present', if_present) for if_present in config.get('exclude_if_present', ()) ), ) keep_exclude_tags_flags = ('--keep-exclude-tags',) if config.get('keep_exclude_tags') else () exclude_nodump_flags = ('--exclude-nodump',) if config.get('exclude_nodump') else () return caches_flag + if_present_flags + keep_exclude_tags_flags + exclude_nodump_flags def make_list_filter_flags(local_borg_version, dry_run): ''' Given the local Borg version and whether this is a dry run, return the corresponding flags for passing to "--list --filter". The general idea is that excludes are shown for a dry run or when the verbosity is debug. ''' base_flags = 'AME' show_excludes = logger.isEnabledFor(logging.DEBUG) if feature.available(feature.Feature.EXCLUDED_FILES_MINUS, local_borg_version): if show_excludes or dry_run: return f'{base_flags}+-' return base_flags if show_excludes: return f'{base_flags}x-' return f'{base_flags}-' borgmatic/borgmatic/borg/import_key.py000066400000000000000000000047741510202216200204640ustar00rootroot00000000000000import logging import os import shlex import borgmatic.config.paths import borgmatic.logger from borgmatic.borg import environment, flags from borgmatic.execute import DO_NOT_CAPTURE, execute_command logger = logging.getLogger(__name__) def import_key( repository_path, config, local_borg_version, import_arguments, global_arguments, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, a configuration dict, the local Borg version, import arguments, and optional local and remote Borg paths, import the repository key from the path indicated in the import arguments. If the path is empty or "-", then read the key from stdin. Raise ValueError if the path is given and it does not exist. ''' umask = config.get('umask', None) lock_wait = config.get('lock_wait', None) working_directory = borgmatic.config.paths.get_working_directory(config) extra_borg_options = config.get('extra_borg_options', {}).get('key_import', '') if import_arguments.path and import_arguments.path != '-': if not os.path.exists(os.path.join(working_directory or '', import_arguments.path)): raise ValueError(f'Path {import_arguments.path} does not exist. Aborting.') input_file = None else: input_file = DO_NOT_CAPTURE full_command = ( (local_path, 'key', 'import') + (('--remote-path', remote_path) if remote_path else ()) + (('--umask', str(umask)) if umask else ()) + (('--log-json',) if config.get('log_json') else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + flags.make_flags('paper', import_arguments.paper) + (tuple(shlex.split(extra_borg_options)) if extra_borg_options else ()) + flags.make_repository_flags( repository_path, local_borg_version, ) + ((import_arguments.path,) if input_file is None else ()) ) if global_arguments.dry_run: logger.info('Skipping key import (dry run)') return execute_command( full_command, input_file=input_file, output_log_level=logging.INFO, environment=environment.make_environment(config), working_directory=working_directory, borg_local_path=local_path, borg_exit_codes=config.get('borg_exit_codes'), ) borgmatic/borgmatic/borg/info.py000066400000000000000000000100041510202216200172140ustar00rootroot00000000000000import argparse import logging import shlex import borgmatic.config.paths import borgmatic.logger from borgmatic.borg import environment, feature, flags from borgmatic.execute import execute_command, execute_command_and_capture_output logger = logging.getLogger(__name__) def make_info_command( repository_path, config, local_borg_version, info_arguments, global_arguments, local_path, remote_path, ): ''' Given a local or remote repository path, a configuration dict, the local Borg version, the arguments to the info action as an argparse.Namespace, and global arguments, return a command as a tuple to display summary information for archives in the repository. ''' extra_borg_options = config.get('extra_borg_options', {}).get('info', '') return ( (local_path, 'info') + ( ('--info',) if logger.getEffectiveLevel() == logging.INFO and not info_arguments.json else () ) + ( ('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) and not info_arguments.json else () ) + flags.make_flags('remote-path', remote_path) + flags.make_flags('umask', config.get('umask')) + flags.make_flags('log-json', config.get('log_json')) + flags.make_flags('lock-wait', config.get('lock_wait')) + ( ( flags.make_flags('match-archives', f'sh:{info_arguments.prefix}*') if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version) else flags.make_flags('glob-archives', f'{info_arguments.prefix}*') ) if info_arguments.prefix else ( flags.make_match_archives_flags( info_arguments.archive or config.get('match_archives'), config.get('archive_name_format'), local_borg_version, ) ) ) + flags.make_flags_from_arguments( info_arguments, excludes=('repository', 'archive', 'prefix', 'match_archives'), ) + (tuple(shlex.split(extra_borg_options)) if extra_borg_options else ()) + flags.make_repository_flags(repository_path, local_borg_version) ) def display_archives_info( repository_path, config, local_borg_version, info_arguments, global_arguments, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, a configuration dict, the local Borg version, the arguments to the info action as an argparse.Namespace, and global arguments, display summary information for Borg archives in the repository or return JSON summary information. ''' borgmatic.logger.add_custom_log_levels() main_command = make_info_command( repository_path, config, local_borg_version, info_arguments, global_arguments, local_path, remote_path, ) json_command = make_info_command( repository_path, config, local_borg_version, argparse.Namespace(**dict(info_arguments.__dict__, json=True)), global_arguments, local_path, remote_path, ) borg_exit_codes = config.get('borg_exit_codes') working_directory = borgmatic.config.paths.get_working_directory(config) json_info = execute_command_and_capture_output( json_command, environment=environment.make_environment(config), working_directory=working_directory, borg_local_path=local_path, borg_exit_codes=borg_exit_codes, ) if info_arguments.json: return json_info flags.warn_for_aggressive_archive_flags(json_command, json_info) execute_command( main_command, output_log_level=logging.ANSWER, environment=environment.make_environment(config), working_directory=working_directory, borg_local_path=local_path, borg_exit_codes=borg_exit_codes, ) return None borgmatic/borgmatic/borg/list.py000066400000000000000000000221461510202216200172460ustar00rootroot00000000000000import argparse import copy import logging import re import shlex import borgmatic.config.paths import borgmatic.logger from borgmatic.borg import environment, feature, flags, repo_list from borgmatic.execute import execute_command, execute_command_and_capture_output logger = logging.getLogger(__name__) ARCHIVE_FILTER_FLAGS_MOVED_TO_REPO_LIST = ('prefix', 'match_archives', 'sort_by', 'first', 'last') MAKE_FLAGS_EXCLUDES = ( 'repository', 'archive', 'paths', 'find_paths', *ARCHIVE_FILTER_FLAGS_MOVED_TO_REPO_LIST, ) def make_list_command( repository_path, config, local_borg_version, list_arguments, global_arguments, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, a configuration dict, the arguments to the list action, and local and remote Borg paths, return a command as a tuple to list archives or paths within an archive. ''' extra_borg_options = config.get('extra_borg_options', {}).get('list', '') return ( (local_path, 'list') + ( ('--info',) if logger.getEffectiveLevel() == logging.INFO and not list_arguments.json else () ) + ( ('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) and not list_arguments.json else () ) + flags.make_flags('remote-path', remote_path) + flags.make_flags('umask', config.get('umask')) + flags.make_flags('log-json', config.get('log_json')) + flags.make_flags('lock-wait', config.get('lock_wait')) + flags.make_flags_from_arguments(list_arguments, excludes=MAKE_FLAGS_EXCLUDES) + (tuple(shlex.split(extra_borg_options)) if extra_borg_options else ()) + ( flags.make_repository_archive_flags( repository_path, list_arguments.archive, local_borg_version, ) if list_arguments.archive else flags.make_repository_flags(repository_path, local_borg_version) ) + (tuple(list_arguments.paths) if list_arguments.paths else ()) ) def make_find_paths(find_paths): ''' Given a sequence of path fragments or patterns as passed to `--find`, transform all path fragments into glob patterns. Pass through existing patterns untouched. For example, given find_paths of: ['foo.txt', 'pp:root/somedir'] ... transform that into: ['sh:**/*foo.txt*/**', 'pp:root/somedir'] ''' if not find_paths: return () return tuple( ( find_path if re.compile(r'([-!+RrPp] )|(\w\w:)').match(find_path) else f'sh:**/*{find_path}*/**' ) for find_path in find_paths ) def capture_archive_listing( repository_path, archive, config, local_borg_version, global_arguments, list_paths=None, path_format=None, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, an archive name, a configuration dict, the local Borg version, global arguments as an argparse.Namespace, the archive paths (or Borg patterns) in which to list files, the Borg path format to use for the output, and local and remote Borg paths, capture the output of listing that archive and return it as a list of file paths. ''' return tuple( execute_command_and_capture_output( make_list_command( repository_path, config, local_borg_version, argparse.Namespace( repository=repository_path, archive=archive, paths=list(list_paths) if list_paths else None, find_paths=None, json=None, format=path_format or '{path}{NUL}', ), global_arguments, local_path, remote_path, ), environment=environment.make_environment(config), working_directory=borgmatic.config.paths.get_working_directory(config), borg_local_path=local_path, borg_exit_codes=config.get('borg_exit_codes'), ) .strip('\0') .split('\0'), ) def list_archive( repository_path, config, local_borg_version, list_arguments, global_arguments, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, a configuration dict, the local Borg version, the arguments to the list action as an argparse.Namespace, global arguments as an argparse.Namespace, and local and remote Borg paths, display the output of listing the files of a Borg archive (or return JSON output). If list_arguments.find_paths are given, list the files by searching across multiple archives. If neither find_paths nor archive name are given, instead list the archives in the given repository. ''' borgmatic.logger.add_custom_log_levels() if not list_arguments.archive and not list_arguments.find_paths: if feature.available(feature.Feature.REPO_LIST, local_borg_version): logger.warning( 'Omitting the --archive flag on the list action is deprecated when using Borg 2.x+. Use the repo-list action instead.', ) repo_list_arguments = argparse.Namespace( repository=repository_path, short=list_arguments.short, format=list_arguments.format, json=list_arguments.json, prefix=list_arguments.prefix, match_archives=list_arguments.match_archives, sort_by=list_arguments.sort_by, first=list_arguments.first, last=list_arguments.last, ) return repo_list.list_repository( repository_path, config, local_borg_version, repo_list_arguments, global_arguments, local_path, remote_path, ) if list_arguments.archive: for name in ARCHIVE_FILTER_FLAGS_MOVED_TO_REPO_LIST: if getattr(list_arguments, name, None): logger.warning( f"The --{name.replace('_', '-')} flag on the list action is ignored when using the --archive flag.", ) if list_arguments.json: raise ValueError( 'The --json flag on the list action is not supported when using the --archive/--find flags.', ) borg_exit_codes = config.get('borg_exit_codes') # If there are any paths to find (and there's not a single archive already selected), start by # getting a list of archives to search. if list_arguments.find_paths and not list_arguments.archive: repo_list_arguments = argparse.Namespace( repository=repository_path, short=True, format=None, json=None, prefix=list_arguments.prefix, match_archives=list_arguments.match_archives, sort_by=list_arguments.sort_by, first=list_arguments.first, last=list_arguments.last, ) # Ask Borg to list archives. Capture its output for use below. archive_lines = tuple( execute_command_and_capture_output( repo_list.make_repo_list_command( repository_path, config, local_borg_version, repo_list_arguments, global_arguments, local_path, remote_path, ), environment=environment.make_environment(config), working_directory=borgmatic.config.paths.get_working_directory(config), borg_local_path=local_path, borg_exit_codes=borg_exit_codes, ) .strip('\n') .splitlines(), ) else: archive_lines = (list_arguments.archive,) # For each archive listed by Borg, run list on the contents of that archive. for archive in archive_lines: logger.answer(f'Listing archive {archive}') archive_arguments = copy.copy(list_arguments) archive_arguments.archive = archive # This list call is to show the files in a single archive, not list multiple archives. So # blank out any archive filtering flags. They'll break anyway in Borg 2. for name in ARCHIVE_FILTER_FLAGS_MOVED_TO_REPO_LIST: setattr(archive_arguments, name, None) main_command = make_list_command( repository_path, config, local_borg_version, archive_arguments, global_arguments, local_path, remote_path, ) + make_find_paths(list_arguments.find_paths) execute_command( main_command, output_log_level=logging.ANSWER, environment=environment.make_environment(config), working_directory=borgmatic.config.paths.get_working_directory(config), borg_local_path=local_path, borg_exit_codes=borg_exit_codes, ) return None borgmatic/borgmatic/borg/mount.py000066400000000000000000000064071510202216200174370ustar00rootroot00000000000000import logging import shlex import borgmatic.config.paths from borgmatic.borg import environment, feature, flags from borgmatic.execute import DO_NOT_CAPTURE, execute_command logger = logging.getLogger(__name__) def mount_archive( repository_path, archive, mount_arguments, config, local_borg_version, global_arguments, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, an optional archive name, a filesystem mount point, zero or more paths to mount from the archive, extra Borg mount options, a storage configuration dict, the local Borg version, global arguments as an argparse.Namespace instance, and optional local and remote Borg paths, mount the archive onto the mount point. ''' umask = config.get('umask', None) lock_wait = config.get('lock_wait', None) extra_borg_options = config.get('extra_borg_options', {}).get('mount', '') full_command = ( (local_path, 'mount') + (('--remote-path', remote_path) if remote_path else ()) + (('--umask', str(umask)) if umask else ()) + (('--log-json',) if config.get('log_json') else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + flags.make_flags_from_arguments( mount_arguments, excludes=('repository', 'archive', 'mount_point', 'paths', 'options'), ) + (('-o', mount_arguments.options) if mount_arguments.options else ()) + (tuple(shlex.split(extra_borg_options)) if extra_borg_options else ()) + ( ( flags.make_repository_flags(repository_path, local_borg_version) + ( ('--match-archives', archive) if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version) else ('--glob-archives', archive) ) ) if feature.available(feature.Feature.SEPARATE_REPOSITORY_ARCHIVE, local_borg_version) else ( flags.make_repository_archive_flags(repository_path, archive, local_borg_version) if archive else flags.make_repository_flags(repository_path, local_borg_version) ) ) + (mount_arguments.mount_point,) + (tuple(mount_arguments.paths) if mount_arguments.paths else ()) ) working_directory = borgmatic.config.paths.get_working_directory(config) # Don't capture the output when foreground mode is used so that ctrl-C can work properly. if mount_arguments.foreground: execute_command( full_command, output_file=DO_NOT_CAPTURE, environment=environment.make_environment(config), working_directory=working_directory, borg_local_path=local_path, borg_exit_codes=config.get('borg_exit_codes'), ) return execute_command( full_command, environment=environment.make_environment(config), working_directory=working_directory, borg_local_path=local_path, borg_exit_codes=config.get('borg_exit_codes'), ) borgmatic/borgmatic/borg/passcommand.py000066400000000000000000000024231510202216200205740ustar00rootroot00000000000000import functools import logging import shlex import borgmatic.config.paths import borgmatic.execute logger = logging.getLogger(__name__) @functools.cache def run_passcommand(passcommand, working_directory): ''' Run the given passcommand using the given working directory and return the passphrase produced by the command. Cache the results so that the passcommand only needs to run—and potentially prompt the user—once per borgmatic invocation. ''' return borgmatic.execute.execute_command_and_capture_output( shlex.split(passcommand), working_directory=working_directory, ) def get_passphrase_from_passcommand(config): ''' Given the configuration dict, call the configured passcommand to produce and return an encryption passphrase. In effect, we're doing an end-run around Borg by invoking its passcommand ourselves. This allows us to pass the resulting passphrase to multiple different Borg invocations without the user having to be prompted multiple times. If no passcommand is configured, then return None. ''' passcommand = config.get('encryption_passcommand') if not passcommand: return None return run_passcommand(passcommand, borgmatic.config.paths.get_working_directory(config)) borgmatic/borgmatic/borg/pattern.py000066400000000000000000000060531510202216200177470ustar00rootroot00000000000000import collections import enum import logging import os import tempfile logger = logging.getLogger(__name__) # See https://borgbackup.readthedocs.io/en/stable/usage/help.html#borg-help-patterns class Pattern_type(enum.Enum): ROOT = 'R' # A ROOT pattern always has a NONE pattern style. PATTERN_STYLE = 'P' EXCLUDE = '-' NO_RECURSE = '!' INCLUDE = '+' class Pattern_style(enum.Enum): NONE = '' FNMATCH = 'fm' SHELL = 'sh' REGULAR_EXPRESSION = 're' PATH_PREFIX = 'pp' PATH_FULL_MATCH = 'pf' class Pattern_source(enum.Enum): ''' Where the pattern came from within borgmatic. This is important because certain use cases (like filesystem snapshotting) only want to consider patterns that the user actually put in a configuration file and not patterns from other sources. ''' # The pattern is from a borgmatic configuration option, e.g. listed in "source_directories". CONFIG = 'config' # The pattern is generated internally within borgmatic, e.g. for special file excludes. INTERNAL = 'internal' # The pattern originates from within a borgmatic hook, e.g. a database hook that adds its dump # directory. HOOK = 'hook' Pattern = collections.namedtuple( 'Pattern', ('path', 'type', 'style', 'device', 'source'), defaults=( Pattern_type.ROOT, Pattern_style.NONE, None, Pattern_source.HOOK, ), ) def write_patterns_file(patterns, borgmatic_runtime_directory, patterns_file=None): ''' Given a sequence of patterns as Pattern instances, write them to a named temporary file in the given borgmatic runtime directory and return the file object so it can continue to exist on disk as long as the caller needs it. If an optional open pattern file is given, append to it instead of making a new temporary file. Return None if no patterns are provided. ''' if not patterns: return None if patterns_file is None: patterns_file = tempfile.NamedTemporaryFile( 'w', dir=borgmatic_runtime_directory, encoding='utf-8' ) operation_name = 'Writing' else: patterns_file.write('\n') operation_name = 'Appending' patterns_output = '\n'.join( f'{pattern.type.value} {pattern.style.value}{":" if pattern.style.value else ""}{pattern.path}' for pattern in patterns ) logger.debug(f'{operation_name} patterns to {patterns_file.name}:\n{patterns_output}') patterns_file.write(patterns_output) patterns_file.flush() return patterns_file def check_all_root_patterns_exist(patterns): ''' Given a sequence of Pattern instances, check that all root pattern paths exist. If any don't, raise an exception. ''' missing_paths = [ pattern.path for pattern in patterns if pattern.type == Pattern_type.ROOT if not os.path.exists(pattern.path) ] if missing_paths: raise ValueError( f"Source directories or root pattern paths do not exist: {', '.join(missing_paths)}", ) borgmatic/borgmatic/borg/prune.py000066400000000000000000000072761510202216200174330ustar00rootroot00000000000000import logging import shlex import borgmatic.config.paths import borgmatic.logger from borgmatic.borg import environment, feature, flags from borgmatic.execute import execute_command logger = logging.getLogger(__name__) def make_prune_flags(config, prune_arguments, local_borg_version): ''' Given a configuration dict mapping from option name to value, prune arguments as an argparse.Namespace instance, and the local Borg version, produce a corresponding sequence of command-line flags. For example, given a retention config of: {'keep_weekly': 4, 'keep_monthly': 6} This will be returned as an iterable of: ( ('--keep-weekly', '4'), ('--keep-monthly', '6'), ) ''' flag_pairs = ( ('--' + option_name.replace('_', '-'), str(value)) for option_name, value in config.items() if option_name.startswith('keep_') and option_name != 'keep_exclude_tags' ) prefix = config.get('prefix') return tuple(element for pair in flag_pairs for element in pair) + ( ( ('--match-archives', f'sh:{prefix}*') if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version) else ('--glob-archives', f'{prefix}*') ) if prefix else ( flags.make_match_archives_flags( config.get('match_archives'), config.get('archive_name_format'), local_borg_version, ) ) ) def prune_archives( dry_run, repository_path, config, local_borg_version, prune_arguments, global_arguments, local_path='borg', remote_path=None, ): ''' Given dry-run flag, a local or remote repository path, and a configuration dict, prune Borg archives according to the retention policy specified in that configuration. ''' borgmatic.logger.add_custom_log_levels() umask = config.get('umask', None) lock_wait = config.get('lock_wait', None) extra_borg_options = config.get('extra_borg_options', {}).get('prune', '') full_command = ( (local_path, 'prune') + make_prune_flags(config, prune_arguments, local_borg_version) + (('--remote-path', remote_path) if remote_path else ()) + (('--umask', str(umask)) if umask else ()) + (('--log-json',) if config.get('log_json') else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + ( ('--stats',) if config.get('statistics') and not dry_run and not feature.available(feature.Feature.NO_PRUNE_STATS, local_borg_version) else () ) + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + flags.make_flags_from_arguments( prune_arguments, excludes=('repository', 'match_archives', 'statistics', 'list_details'), ) + (('--list',) if config.get('list_details') else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + (('--dry-run',) if dry_run else ()) + (tuple(shlex.split(extra_borg_options)) if extra_borg_options else ()) + flags.make_repository_flags(repository_path, local_borg_version) ) if config.get('statistics') or config.get('list_details'): output_log_level = logging.ANSWER else: output_log_level = logging.INFO execute_command( full_command, output_log_level=output_log_level, environment=environment.make_environment(config), working_directory=borgmatic.config.paths.get_working_directory(config), borg_local_path=local_path, borg_exit_codes=config.get('borg_exit_codes'), ) borgmatic/borgmatic/borg/recreate.py000066400000000000000000000100551510202216200200610ustar00rootroot00000000000000import logging import shlex import borgmatic.borg.environment import borgmatic.borg.feature import borgmatic.config.paths import borgmatic.execute from borgmatic.borg import flags from borgmatic.borg.pattern import write_patterns_file logger = logging.getLogger(__name__) def recreate_archive( repository, archive, config, local_borg_version, recreate_arguments, global_arguments, local_path, remote_path=None, patterns=None, ): ''' Given a local or remote repository path, an archive name, a configuration dict, the local Borg version string, an argparse.Namespace of recreate arguments, an argparse.Namespace of global arguments, optional local and remote Borg paths, executes the recreate command with the given arguments. ''' lock_wait = config.get('lock_wait', None) exclude_flags = flags.make_exclude_flags(config) compression = config.get('compression', None) chunker_params = config.get('chunker_params', None) extra_borg_options = config.get('extra_borg_options', {}).get('recreate', '') # Available recompress MODES: "if-different", "always", "never" (default) recompress = config.get('recompress', None) # Write patterns to a temporary file and use that file with --patterns-from. patterns_file = write_patterns_file( patterns, borgmatic.config.paths.get_working_directory(config), ) recreate_command = ( (local_path, 'recreate') + (('--remote-path', remote_path) if remote_path else ()) + (('--log-json',) if config.get('log_json') else ()) + (('--lock-wait', str(lock_wait)) if lock_wait is not None else ()) + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + (('--patterns-from', patterns_file.name) if patterns_file else ()) + ( ( '--list', '--filter', flags.make_list_filter_flags(local_borg_version, global_arguments.dry_run), ) if config.get('list_details') else () ) # Flag --target works only for a single archive. + (('--target', recreate_arguments.target) if recreate_arguments.target and archive else ()) + ( ('--comment', shlex.quote(recreate_arguments.comment)) if recreate_arguments.comment else () ) + (('--timestamp', recreate_arguments.timestamp) if recreate_arguments.timestamp else ()) + (('--compression', compression) if compression else ()) + (('--chunker-params', chunker_params) if chunker_params else ()) + (('--recompress', recompress) if recompress else ()) + exclude_flags + (tuple(shlex.split(extra_borg_options)) if extra_borg_options else ()) + ( ( flags.make_repository_flags(repository, local_borg_version) + flags.make_match_archives_flags( archive or config.get('match_archives'), config.get('archive_name_format'), local_borg_version, ) ) if borgmatic.borg.feature.available( borgmatic.borg.feature.Feature.SEPARATE_REPOSITORY_ARCHIVE, local_borg_version, ) else ( flags.make_repository_archive_flags(repository, archive, local_borg_version) if archive else flags.make_repository_flags(repository, local_borg_version) ) ) ) if global_arguments.dry_run: logger.info('Skipping the archive recreation (dry run)') return borgmatic.execute.execute_command( full_command=recreate_command, output_log_level=logging.INFO, environment=borgmatic.borg.environment.make_environment(config), working_directory=borgmatic.config.paths.get_working_directory(config), borg_local_path=local_path, borg_exit_codes=config.get('borg_exit_codes'), ) borgmatic/borgmatic/borg/rename.py000066400000000000000000000036221510202216200175400ustar00rootroot00000000000000import logging import shlex import borgmatic.borg.flags logger = logging.getLogger(__name__) def make_rename_command( dry_run, repository_name, old_archive_name, new_archive_name, config, local_borg_version, local_path, remote_path, ): extra_borg_options = config.get('extra_borg_options', {}).get('rename', '') return ( (local_path, 'rename') + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + borgmatic.borg.flags.make_flags('dry-run', dry_run) + borgmatic.borg.flags.make_flags('remote-path', remote_path) + borgmatic.borg.flags.make_flags('umask', config.get('umask')) + borgmatic.borg.flags.make_flags('log-json', config.get('log_json')) + borgmatic.borg.flags.make_flags('lock-wait', config.get('lock_wait')) + (tuple(shlex.split(extra_borg_options)) if extra_borg_options else ()) + borgmatic.borg.flags.make_repository_archive_flags( repository_name, old_archive_name, local_borg_version, ) + (new_archive_name,) ) def rename_archive( repository_name, old_archive_name, new_archive_name, dry_run, config, local_borg_version, local_path, remote_path, ): command = make_rename_command( dry_run, repository_name, old_archive_name, new_archive_name, config, local_borg_version, local_path, remote_path, ) borgmatic.execute.execute_command( command, output_log_level=logging.INFO, environment=borgmatic.borg.environment.make_environment(config), working_directory=borgmatic.config.paths.get_working_directory(config), borg_local_path=local_path, borg_exit_codes=config.get('borg_exit_codes'), ) borgmatic/borgmatic/borg/repo_create.py000066400000000000000000000105131510202216200205560ustar00rootroot00000000000000import argparse import json import logging import shlex import subprocess import borgmatic.config.paths from borgmatic.borg import environment, feature, flags, repo_info from borgmatic.execute import DO_NOT_CAPTURE, execute_command logger = logging.getLogger(__name__) REPO_INFO_REPOSITORY_NOT_FOUND_EXIT_CODES = {2, 13, 15} def create_repository( dry_run, repository_path, config, local_borg_version, global_arguments, encryption_mode, source_repository=None, copy_crypt_key=False, append_only=None, storage_quota=None, make_parent_directories=False, local_path='borg', remote_path=None, ): ''' Given a dry-run flag, a local or remote repository path, a configuration dict, the local Borg version, a Borg encryption mode, the path to another repo whose key material should be reused, whether the repository should be append-only, and the storage quota to use, create the repository. If the repository already exists, then log and skip creation. Raise ValueError if the requested encryption mode does not match that of the repository. Raise json.decoder.JSONDecodeError if the "borg info" JSON outputcannot be decoded. Raise subprocess.CalledProcessError if "borg info" returns an error exit code. ''' try: info_data = json.loads( repo_info.display_repository_info( repository_path, config, local_borg_version, argparse.Namespace(json=True), global_arguments, local_path, remote_path, ), ) repository_encryption_mode = info_data.get('encryption', {}).get('mode') if repository_encryption_mode != encryption_mode: raise ValueError( f'Requested encryption mode "{encryption_mode}" does not match existing repository encryption mode "{repository_encryption_mode}"', ) logger.info('Repository already exists. Skipping creation.') return except subprocess.CalledProcessError as error: if error.returncode not in REPO_INFO_REPOSITORY_NOT_FOUND_EXIT_CODES: raise lock_wait = config.get('lock_wait') umask = config.get('umask') extra_borg_options_from_init = config.get('extra_borg_options', {}).get('init', '') extra_borg_options = config.get('extra_borg_options', {}).get('repo-create', '') if extra_borg_options_from_init: logger.warning( 'The "init" option in "extra_borg_options" is deprecated and will be removed from a future release; use "repo_create" instead.' ) repo_create_command = ( (local_path,) + ( ('repo-create',) if feature.available(feature.Feature.REPO_CREATE, local_borg_version) else ('init',) ) + (('--encryption', encryption_mode) if encryption_mode else ()) + (('--other-repo', source_repository) if source_repository else ()) + (('--copy-crypt-key',) if copy_crypt_key else ()) + (('--append-only',) if append_only else ()) + (('--storage-quota', storage_quota) if storage_quota else ()) + (('--make-parent-dirs',) if make_parent_directories else ()) + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug',) if logger.isEnabledFor(logging.DEBUG) else ()) + (('--log-json',) if config.get('log_json') else ()) + (('--lock-wait', str(lock_wait)) if lock_wait else ()) + (('--remote-path', remote_path) if remote_path else ()) + (('--umask', str(umask)) if umask else ()) + (tuple(shlex.split(extra_borg_options)) if extra_borg_options else ()) + (tuple(shlex.split(extra_borg_options_from_init)) if extra_borg_options_from_init else ()) + flags.make_repository_flags(repository_path, local_borg_version) ) if dry_run: logger.info('Skipping repository creation (dry run)') return # Do not capture output here, so as to support interactive prompts. execute_command( repo_create_command, output_file=DO_NOT_CAPTURE, environment=environment.make_environment(config), working_directory=borgmatic.config.paths.get_working_directory(config), borg_local_path=local_path, borg_exit_codes=config.get('borg_exit_codes'), ) borgmatic/borgmatic/borg/repo_delete.py000066400000000000000000000073251510202216200205640ustar00rootroot00000000000000import logging import shlex import borgmatic.borg.environment import borgmatic.borg.feature import borgmatic.borg.flags import borgmatic.config.paths import borgmatic.execute logger = logging.getLogger(__name__) FORCE_HARDER_FLAG_COUNT = 2 def make_repo_delete_command( repository, config, local_borg_version, repo_delete_arguments, global_arguments, local_path, remote_path, ): ''' Given a local or remote repository dict, a configuration dict, the local Borg version, the arguments to the repo_delete action as an argparse.Namespace, and global arguments, return a command as a tuple to repo_delete the entire repository. ''' extra_borg_options = config.get('extra_borg_options', {}).get( 'repo_delete' if borgmatic.borg.feature.available( borgmatic.borg.feature.Feature.REPO_DELETE, local_borg_version ) else 'delete', '', ) return ( (local_path,) + ( ('repo-delete',) if borgmatic.borg.feature.available( borgmatic.borg.feature.Feature.REPO_DELETE, local_borg_version, ) else ('delete',) ) + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + borgmatic.borg.flags.make_flags('dry-run', global_arguments.dry_run) + borgmatic.borg.flags.make_flags('remote-path', remote_path) + borgmatic.borg.flags.make_flags('umask', config.get('umask')) + borgmatic.borg.flags.make_flags('log-json', config.get('log_json')) + borgmatic.borg.flags.make_flags('lock-wait', config.get('lock_wait')) + borgmatic.borg.flags.make_flags('list', config.get('list_details')) + ( ( ('--force',) + (('--force',) if repo_delete_arguments.force >= FORCE_HARDER_FLAG_COUNT else ()) ) if repo_delete_arguments.force else () ) + borgmatic.borg.flags.make_flags_from_arguments( repo_delete_arguments, excludes=('list_details', 'force', 'repository'), ) + (tuple(shlex.split(extra_borg_options)) if extra_borg_options else ()) + borgmatic.borg.flags.make_repository_flags(repository['path'], local_borg_version) ) def delete_repository( repository, config, local_borg_version, repo_delete_arguments, global_arguments, local_path='borg', remote_path=None, ): ''' Given a local or remote repository dict, a configuration dict, the local Borg version, the arguments to the repo_delete action as an argparse.Namespace, global arguments as an argparse.Namespace, and local and remote Borg paths, repo_delete the entire repository. ''' borgmatic.logger.add_custom_log_levels() command = make_repo_delete_command( repository, config, local_borg_version, repo_delete_arguments, global_arguments, local_path, remote_path, ) borgmatic.execute.execute_command( command, output_log_level=logging.ANSWER, # Don't capture output when Borg is expected to prompt for interactive confirmation, or the # prompt won't work. output_file=( None if repo_delete_arguments.force or repo_delete_arguments.cache_only else borgmatic.execute.DO_NOT_CAPTURE ), environment=borgmatic.borg.environment.make_environment(config), working_directory=borgmatic.config.paths.get_working_directory(config), borg_local_path=local_path, borg_exit_codes=config.get('borg_exit_codes'), ) borgmatic/borgmatic/borg/repo_info.py000066400000000000000000000052251510202216200202520ustar00rootroot00000000000000import logging import shlex import borgmatic.config.paths import borgmatic.logger from borgmatic.borg import environment, feature, flags from borgmatic.execute import execute_command, execute_command_and_capture_output logger = logging.getLogger(__name__) def display_repository_info( repository_path, config, local_borg_version, repo_info_arguments, global_arguments, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, a configuration dict, the local Borg version, the arguments to the repo_info action, and global arguments as an argparse.Namespace, display summary information for the Borg repository or return JSON summary information. ''' borgmatic.logger.add_custom_log_levels() lock_wait = config.get('lock_wait', None) extra_borg_options = config.get('extra_borg_options', {}).get( 'repo_info' if feature.available(feature.Feature.REPO_INFO, local_borg_version) else 'info', '', ) full_command = ( (local_path,) + ( ('repo-info',) if feature.available(feature.Feature.REPO_INFO, local_borg_version) else ('info',) ) + ( ('--info',) if logger.getEffectiveLevel() == logging.INFO and not repo_info_arguments.json else () ) + ( ('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) and not repo_info_arguments.json else () ) + flags.make_flags('remote-path', remote_path) + flags.make_flags('umask', config.get('umask')) + flags.make_flags('log-json', config.get('log_json')) + flags.make_flags('lock-wait', lock_wait) + (('--json',) if repo_info_arguments.json else ()) + (tuple(shlex.split(extra_borg_options)) if extra_borg_options else ()) + flags.make_repository_flags(repository_path, local_borg_version) ) working_directory = borgmatic.config.paths.get_working_directory(config) borg_exit_codes = config.get('borg_exit_codes') if repo_info_arguments.json: return execute_command_and_capture_output( full_command, environment=environment.make_environment(config), working_directory=working_directory, borg_local_path=local_path, borg_exit_codes=borg_exit_codes, ) execute_command( full_command, output_log_level=logging.ANSWER, environment=environment.make_environment(config), working_directory=working_directory, borg_local_path=local_path, borg_exit_codes=borg_exit_codes, ) return None borgmatic/borgmatic/borg/repo_list.py000066400000000000000000000170361510202216200202750ustar00rootroot00000000000000import argparse import json import logging import shlex import borgmatic.config.paths import borgmatic.logger from borgmatic.borg import environment, feature, flags from borgmatic.execute import execute_command, execute_command_and_capture_output logger = logging.getLogger(__name__) def resolve_archive_name( repository_path, archive, config, local_borg_version, global_arguments, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, an archive name, a configuration dict, the local Borg version, global arguments as an argparse.Namespace, a local Borg path, and a remote Borg path, return the archive name. But if the archive name is "latest", then instead introspect the repository for the latest archive and return its name or ID, depending on whether the version of Borg in use supports archive series—different archives that share the same name but have unique IDs. Raise ValueError if "latest" is given but there are no archives in the repository. ''' if archive != 'latest': return archive latest_archive = get_latest_archive( repository_path, config, local_borg_version, global_arguments, local_path=local_path, remote_path=remote_path, ) return ( latest_archive['id'] if feature.available(feature.Feature.ARCHIVE_SERIES, local_borg_version) else latest_archive['name'] ) def get_latest_archive( repository_path, config, local_borg_version, global_arguments, local_path='borg', remote_path=None, consider_checkpoints=False, ): ''' Returns a dict with information about the latest archive of a repository. Raises ValueError if there are no archives in the repository. ''' extra_borg_options = config.get('extra_borg_options', {}).get( 'repo_list' if feature.available(feature.Feature.REPO_LIST, local_borg_version) else 'list', '', ) full_command = ( local_path, ( 'repo-list' if feature.available(feature.Feature.REPO_LIST, local_borg_version) else 'list' ), *flags.make_flags('remote-path', remote_path), *flags.make_flags('umask', config.get('umask')), *flags.make_flags('log-json', config.get('log_json')), *flags.make_flags('lock-wait', config.get('lock_wait')), *( flags.make_flags('consider-checkpoints', consider_checkpoints) if not feature.available(feature.Feature.REPO_LIST, local_borg_version) else () ), *flags.make_flags('last', 1), '--json', *(tuple(shlex.split(extra_borg_options)) if extra_borg_options else ()), *flags.make_repository_flags(repository_path, local_borg_version), ) json_output = execute_command_and_capture_output( full_command, environment=environment.make_environment(config), working_directory=borgmatic.config.paths.get_working_directory(config), borg_local_path=local_path, borg_exit_codes=config.get('borg_exit_codes'), ) archives = json.loads(json_output)['archives'] try: latest_archive = archives[-1] except IndexError: raise ValueError('No archives found in the repository') logger.debug(f'Latest archive is {latest_archive["name"]} ({latest_archive["id"]})') return latest_archive MAKE_FLAGS_EXCLUDES = ('repository', 'prefix', 'match_archives') def make_repo_list_command( repository_path, config, local_borg_version, repo_list_arguments, global_arguments, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, a configuration dict, the local Borg version, the arguments to the repo_list action, global arguments as an argparse.Namespace instance, and local and remote Borg paths, return a command as a tuple to list archives with a repository. ''' extra_borg_options = config.get('extra_borg_options', {}).get( 'repo_list' if feature.available(feature.Feature.REPO_LIST, local_borg_version) else 'list', '', ) return ( ( local_path, ( 'repo-list' if feature.available(feature.Feature.REPO_LIST, local_borg_version) else 'list' ), ) + ( ('--info',) if logger.getEffectiveLevel() == logging.INFO and not repo_list_arguments.json else () ) + ( ('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) and not repo_list_arguments.json else () ) + flags.make_flags('remote-path', remote_path) + flags.make_flags('umask', config.get('umask')) + flags.make_flags('log-json', config.get('log_json')) + flags.make_flags('lock-wait', config.get('lock_wait')) + ( ( flags.make_flags('match-archives', f'sh:{repo_list_arguments.prefix}*') if feature.available(feature.Feature.MATCH_ARCHIVES, local_borg_version) else flags.make_flags('glob-archives', f'{repo_list_arguments.prefix}*') ) if repo_list_arguments.prefix else ( flags.make_match_archives_flags( config.get('match_archives'), config.get('archive_name_format'), local_borg_version, ) ) ) + flags.make_flags_from_arguments(repo_list_arguments, excludes=MAKE_FLAGS_EXCLUDES) + (tuple(shlex.split(extra_borg_options)) if extra_borg_options else ()) + flags.make_repository_flags(repository_path, local_borg_version) ) def list_repository( repository_path, config, local_borg_version, repo_list_arguments, global_arguments, local_path='borg', remote_path=None, ): ''' Given a local or remote repository path, a configuration dict, the local Borg version, the arguments to the list action, global arguments as an argparse.Namespace instance, and local and remote Borg paths, display the output of listing Borg archives in the given repository (or return JSON output). ''' borgmatic.logger.add_custom_log_levels() main_command = make_repo_list_command( repository_path, config, local_borg_version, repo_list_arguments, global_arguments, local_path, remote_path, ) json_command = make_repo_list_command( repository_path, config, local_borg_version, argparse.Namespace(**dict(repo_list_arguments.__dict__, json=True)), global_arguments, local_path, remote_path, ) working_directory = borgmatic.config.paths.get_working_directory(config) borg_exit_codes = config.get('borg_exit_codes') json_listing = execute_command_and_capture_output( json_command, environment=environment.make_environment(config), working_directory=working_directory, borg_local_path=local_path, borg_exit_codes=borg_exit_codes, ) if repo_list_arguments.json: return json_listing flags.warn_for_aggressive_archive_flags(json_command, json_listing) execute_command( main_command, output_log_level=logging.ANSWER, environment=environment.make_environment(config), working_directory=working_directory, borg_local_path=local_path, borg_exit_codes=borg_exit_codes, ) return None borgmatic/borgmatic/borg/state.py000066400000000000000000000000641510202216200174060ustar00rootroot00000000000000DEFAULT_BORGMATIC_SOURCE_DIRECTORY = '~/.borgmatic' borgmatic/borgmatic/borg/transfer.py000066400000000000000000000051071510202216200201150ustar00rootroot00000000000000import logging import shlex import borgmatic.config.paths import borgmatic.logger from borgmatic.borg import environment, flags from borgmatic.execute import DO_NOT_CAPTURE, execute_command logger = logging.getLogger(__name__) def transfer_archives( dry_run, repository_path, config, local_borg_version, transfer_arguments, global_arguments, local_path='borg', remote_path=None, ): ''' Given a dry-run flag, a local or remote repository path, a configuration dict, the local Borg version, the arguments to the transfer action, and global arguments as an argparse.Namespace instance, transfer archives to the given repository. ''' borgmatic.logger.add_custom_log_levels() extra_borg_options = config.get('extra_borg_options', {}).get('transfer', '') full_command = ( (local_path, 'transfer') + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + flags.make_flags('remote-path', remote_path) + flags.make_flags('umask', config.get('umask')) + flags.make_flags('log-json', config.get('log_json')) + flags.make_flags('lock-wait', config.get('lock_wait')) + flags.make_flags('progress', config.get('progress')) + ( flags.make_flags_from_arguments( transfer_arguments, excludes=( 'repository', 'source_repository', 'archive', 'match_archives', 'progress', ), ) or ( flags.make_match_archives_flags( transfer_arguments.archive or config.get('match_archives'), config.get('archive_name_format'), local_borg_version, ) ) ) + (tuple(shlex.split(extra_borg_options)) if extra_borg_options else ()) + flags.make_repository_flags(repository_path, local_borg_version) + flags.make_flags('other-repo', transfer_arguments.source_repository) + flags.make_flags('dry-run', dry_run) ) return execute_command( full_command, output_log_level=logging.ANSWER, output_file=DO_NOT_CAPTURE if config.get('progress') else None, environment=environment.make_environment(config), working_directory=borgmatic.config.paths.get_working_directory(config), borg_local_path=local_path, borg_exit_codes=config.get('borg_exit_codes'), ) borgmatic/borgmatic/borg/umount.py000066400000000000000000000017161510202216200176220ustar00rootroot00000000000000import logging import shlex import borgmatic.config.paths from borgmatic.execute import execute_command logger = logging.getLogger(__name__) def unmount_archive(config, mount_point, local_path='borg'): ''' Given a mounted filesystem mount point, and an optional local Borg paths, umount the filesystem from the mount point. ''' extra_borg_options = config.get('extra_borg_options', {}).get('umount', '') full_command = ( (local_path, 'umount') + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) + (tuple(shlex.split(extra_borg_options)) if extra_borg_options else ()) + (mount_point,) ) execute_command( full_command, working_directory=borgmatic.config.paths.get_working_directory(config), borg_local_path=local_path, borg_exit_codes=config.get('borg_exit_codes'), ) borgmatic/borgmatic/borg/version.py000066400000000000000000000022031510202216200177500ustar00rootroot00000000000000import logging import borgmatic.config.paths from borgmatic.borg import environment from borgmatic.execute import execute_command_and_capture_output logger = logging.getLogger(__name__) def local_borg_version(config, local_path='borg'): ''' Given a configuration dict and a local Borg executable path, return a version string for it. Raise OSError or CalledProcessError if there is a problem running Borg. Raise ValueError if the version cannot be parsed. ''' full_command = ( (local_path, '--version') + (('--info',) if logger.getEffectiveLevel() == logging.INFO else ()) + (('--debug', '--show-rc') if logger.isEnabledFor(logging.DEBUG) else ()) ) output = execute_command_and_capture_output( full_command, environment=environment.make_environment(config), working_directory=borgmatic.config.paths.get_working_directory(config), borg_local_path=local_path, borg_exit_codes=config.get('borg_exit_codes'), ) try: return output.split(' ')[1].strip() except IndexError: raise ValueError('Could not parse Borg version string') borgmatic/borgmatic/commands/000077500000000000000000000000001510202216200165645ustar00rootroot00000000000000borgmatic/borgmatic/commands/__init__.py000066400000000000000000000000001510202216200206630ustar00rootroot00000000000000borgmatic/borgmatic/commands/arguments.py000066400000000000000000002273211510202216200211520ustar00rootroot00000000000000import collections import io import itertools import re import sys from argparse import ArgumentParser import ruamel.yaml import borgmatic.config.schema from borgmatic.config import collect ACTION_ALIASES = { 'repo-create': ['rcreate', 'init', '-I'], 'prune': ['-p'], 'compact': [], 'create': ['-C'], 'check': ['-k'], 'config': [], 'delete': [], 'extract': ['-x'], 'export-tar': [], 'mount': ['-m'], 'umount': ['-u'], 'restore': ['-r'], 'repo-delete': ['rdelete'], 'repo-list': ['rlist'], 'list': ['-l'], 'repo-info': ['rinfo'], 'info': ['-i'], 'transfer': [], 'break-lock': [], 'key': [], 'borg': [], 'recreate': [], } def get_subaction_parsers(action_parser): ''' Given an argparse.ArgumentParser instance, lookup the subactions in it and return a dict from subaction name to subaction parser. ''' if not action_parser._subparsers: return {} return { subaction_name: subaction_parser for group_action in action_parser._subparsers._group_actions for subaction_name, subaction_parser in group_action.choices.items() } def get_subactions_for_actions(action_parsers): ''' Given a dict from action name to an argparse.ArgumentParser instance, make a map from action name to the names of contained sub-actions. ''' return { action: tuple( subaction_name for group_action in action_parser._subparsers._group_actions for subaction_name in group_action.choices ) for action, action_parser in action_parsers.items() if action_parser._subparsers } def omit_values_colliding_with_action_names(unparsed_arguments, parsed_arguments): ''' Given unparsed arguments as a sequence of strings and a dict from action name to parsed argparse.Namespace arguments, return the string arguments with any values omitted that happen to be the same as the name of a borgmatic action. This prevents, for instance, "check --only extract" from triggering the "extract" action. ''' remaining_arguments = list(unparsed_arguments) for parsed in parsed_arguments.values(): for value in vars(parsed).values(): if isinstance(value, str): if value in ACTION_ALIASES and value in remaining_arguments: remaining_arguments.remove(value) elif isinstance(value, list): for item in value: if item in ACTION_ALIASES and item in remaining_arguments: remaining_arguments.remove(item) return tuple(remaining_arguments) def parse_and_record_action_arguments( unparsed_arguments, parsed_arguments, action_parser, action_name, canonical_name=None, ): ''' Given unparsed arguments as a sequence of strings, parsed arguments as a dict from action name to parsed argparse.Namespace, a parser to parse with, an action name, and an optional canonical action name (in case this the action name is an alias), parse the arguments and return a list of any remaining string arguments that were not parsed. Also record the parsed argparse.Namespace by setting it into the given parsed arguments. Return None if no parsing occurs because the given action doesn't apply to the given unparsed arguments. ''' filtered_arguments = omit_values_colliding_with_action_names( unparsed_arguments, parsed_arguments, ) if action_name not in filtered_arguments: return tuple(unparsed_arguments) parsed, remaining = action_parser.parse_known_args(filtered_arguments) parsed_arguments[canonical_name or action_name] = parsed # Special case: If this is a "borg" action, greedily consume all arguments after (+1) the "borg" # argument. if action_name == 'borg': borg_options_index = remaining.index('borg') + 1 parsed_arguments['borg'].options = remaining[borg_options_index:] remaining = remaining[:borg_options_index] return tuple(argument for argument in remaining if argument != action_name) def argument_is_flag(argument): ''' Return True if the given argument looks like a flag, e.g. '--some-flag', as opposed to a non-flag value. ''' return isinstance(argument, str) and argument.startswith('--') def group_arguments_with_values(arguments): ''' Given a sequence of arguments, return a sequence of tuples where each one contains either a single argument (such as for a stand-alone flag) or a flag argument and its corresponding value. For instance, given the following arguments sequence as input: ('--foo', '--bar', '33', '--baz') ... return the following output: (('--foo',), ('--bar', '33'), ('--baz',)) ''' grouped_arguments = [] index = 0 while index < len(arguments): this_argument = arguments[index] try: next_argument = arguments[index + 1] except IndexError: grouped_arguments.append((this_argument,)) break if ( argument_is_flag(this_argument) and not argument_is_flag(next_argument) and next_argument not in ACTION_ALIASES ): grouped_arguments.append((this_argument, next_argument)) index += 2 continue grouped_arguments.append((this_argument,)) index += 1 return tuple(grouped_arguments) def get_unparsable_arguments(remaining_action_arguments): ''' Given a sequence of argument tuples (one per action parser that parsed arguments), determine the remaining arguments that no action parsers have consumed. ''' if not remaining_action_arguments: return () grouped_action_arguments = tuple( group_arguments_with_values(action_arguments) for action_arguments in remaining_action_arguments ) return tuple( itertools.chain.from_iterable( argument_group for argument_group in dict.fromkeys( itertools.chain.from_iterable(grouped_action_arguments), ) if all( argument_group in action_arguments for action_arguments in grouped_action_arguments ) ), ) def parse_arguments_for_actions(unparsed_arguments, action_parsers, global_parser): ''' Given a sequence of arguments, a dict from action name to argparse.ArgumentParser instance, and the global parser as a argparse.ArgumentParser instance, give each requested action's parser a shot at parsing all arguments. This allows common arguments like "--repository" to be shared across multiple action parsers. Return the result as a tuple of: (a dict mapping from action name to an argparse.Namespace of parsed arguments, a tuple of argument tuples where each is the remaining arguments not claimed by any action parser). ''' arguments = collections.OrderedDict() help_requested = bool('--help' in unparsed_arguments or '-h' in unparsed_arguments) remaining_action_arguments = [] alias_to_action_name = { alias: action_name for action_name, aliases in ACTION_ALIASES.items() for alias in aliases } # If the "borg" action is used, skip all other action parsers. This avoids confusion like # "borg list" triggering borgmatic's own list action. if 'borg' in unparsed_arguments: action_parsers = {'borg': action_parsers['borg']} # Ask each action parser, one by one, to parse arguments. for argument in unparsed_arguments: action_name = argument canonical_name = alias_to_action_name.get(action_name, action_name) action_parser = action_parsers.get(action_name) if not action_parser: continue subaction_parsers = get_subaction_parsers(action_parser) # But first parse with subaction parsers, if any. if subaction_parsers: subactions_parsed = False for subaction_name, subaction_parser in subaction_parsers.items(): remaining_action_arguments.append( tuple( argument for argument in parse_and_record_action_arguments( unparsed_arguments, arguments, subaction_parser, subaction_name, ) if argument != action_name ), ) if subaction_name in arguments: subactions_parsed = True if not subactions_parsed: if help_requested: action_parser.print_help() sys.exit(0) else: raise ValueError( f"Missing sub-action after {action_name} action. Expected one of: {', '.join(get_subactions_for_actions(action_parsers)[action_name])}", ) # Otherwise, parse with the main action parser. else: remaining_action_arguments.append( parse_and_record_action_arguments( unparsed_arguments, arguments, action_parser, action_name, canonical_name, ), ) # If no actions were explicitly requested, assume defaults. if not arguments and not help_requested: for default_action_name in ('create', 'prune', 'compact', 'check'): default_action_parser = action_parsers[default_action_name] remaining_action_arguments.append( parse_and_record_action_arguments( (*unparsed_arguments, default_action_name), arguments, default_action_parser, default_action_name, ), ) arguments['global'], remaining = global_parser.parse_known_args(unparsed_arguments) remaining_action_arguments.append(remaining) return ( arguments, tuple(remaining_action_arguments) if arguments else unparsed_arguments, ) OMITTED_FLAG_NAMES = {'match-archives', 'progress', 'statistics', 'list-details'} def make_argument_description(schema, flag_name): ''' Given a configuration schema dict and a flag name for it, extend the schema's description with an example or additional information as appropriate based on its type. Return the updated description for use in a command-line argument. ''' description = schema.get('description') schema_type = schema.get('type') example = schema.get('example') pieces = [description] if description else [] if '[0]' in flag_name: pieces.append( ' To specify a different list element, replace the "[0]" with another array index ("[1]", "[2]", etc.).', ) if example and schema_type in ('array', 'object'): # noqa: PLR6201 example_buffer = io.StringIO() yaml = ruamel.yaml.YAML(typ='safe') yaml.default_flow_style = True yaml.dump(example, example_buffer) pieces.append(f'Example value: "{example_buffer.getvalue().strip()}"') return ' '.join(pieces).replace('%', '%%') def add_array_element_arguments(arguments_group, unparsed_arguments, flag_name): r''' Given an argparse._ArgumentGroup instance, a sequence of unparsed argument strings, and a dotted flag name, add command-line array element flags that correspond to the given unparsed arguments. Here's the background. We want to support flags that can have arbitrary indices like: --foo.bar[1].baz But argparse doesn't support that natively because the index can be an arbitrary number. We won't let that stop us though, will we? If the current flag name has an array component in it (e.g. a name with "[0]"), then make a pattern that would match the flag name regardless of the number that's in it. The idea is that we want to look for unparsed arguments that appear like the flag name, but instead of "[0]" they have, say, "[1]" or "[123]". Next, we check each unparsed argument against that pattern. If one of them matches, add an argument flag for it to the argument parser group. Example: Let's say flag_name is: --foo.bar[0].baz ... then the regular expression pattern will be: ^--foo\.bar\[\d+\]\.baz ... and, if that matches an unparsed argument of: --foo.bar[1].baz ... then an argument flag will get added equal to that unparsed argument. And so the unparsed argument will match it when parsing is performed! In this manner, we're using the actual user CLI input to inform what exact flags we support. ''' if '[0]' not in flag_name or not unparsed_arguments or '--help' in unparsed_arguments: return pattern = re.compile('^--' + flag_name.replace('[0]', r'\[\d+\]').replace('.', r'\.') + '$') try: # Find an existing list index flag (and its action) corresponding to the given flag name. (argument_action, existing_flag_name) = next( (action, action_flag_name) for action in arguments_group._group_actions for action_flag_name in action.option_strings if pattern.match(action_flag_name) if f'--{flag_name}'.startswith(action_flag_name) ) # Based on the type of the action (e.g. argparse._StoreTrueAction), look up the corresponding # action registry name (e.g., "store_true") to pass to add_argument(action=...) below. action_registry_name = next( registry_name for registry_name, action_type in arguments_group._registries['action'].items() # Not using isinstance() here because we only want an exact match—no parent classes. if type(argument_action) is action_type ) except StopIteration: return for unparsed in unparsed_arguments: unparsed_flag_name = unparsed.split('=', 1)[0] destination_name = unparsed_flag_name.lstrip('-').replace('-', '_') if not pattern.match(unparsed_flag_name) or unparsed_flag_name == existing_flag_name: continue if action_registry_name in {'store_true', 'store_false'}: arguments_group.add_argument( unparsed_flag_name, action=action_registry_name, default=argument_action.default, dest=destination_name, required=argument_action.nargs, ) else: arguments_group.add_argument( unparsed_flag_name, action=action_registry_name, choices=argument_action.choices, default=argument_action.default, dest=destination_name, nargs=argument_action.nargs, required=argument_action.nargs, type=argument_action.type, ) def add_arguments_from_schema(arguments_group, schema, unparsed_arguments, names=None): # noqa: PLR0912 ''' Given an argparse._ArgumentGroup instance, a configuration schema dict, and a sequence of unparsed argument strings, convert the entire schema into corresponding command-line flags and add them to the arguments group. For instance, given a schema of: { 'type': 'object', 'properties': { 'foo': { 'type': 'object', 'properties': { 'bar': {'type': 'integer'} } } } } ... the following flag will be added to the arguments group: --foo.bar If "foo" is instead an array of objects, both of the following will get added: --foo --foo[0].bar And if names are also passed in, they are considered to be the name components of an option (e.g. "foo" and "bar") and are used to construct a resulting flag. Bail if the schema is not a dict. ''' if names is None: names = () if not isinstance(schema, dict): return schema_type = schema.get('type') # If this option has multiple types, just use the first one (that isn't "null"). if isinstance(schema_type, list): try: schema_type = next(single_type for single_type in schema_type if single_type != 'null') except StopIteration: raise ValueError(f'Unknown type in configuration schema: {schema_type}') # If this is an "object" type, recurse for each child option ("property"). if schema_type == 'object': properties = schema.get('properties') # If there are child properties, recurse for each one. But if there are no child properties, # fall through so that a flag gets added below for the (empty) object. if properties: for name, child in properties.items(): add_arguments_from_schema( arguments_group, child, unparsed_arguments, (*names, name), ) return # If this is an "array" type, recurse for each items type child option. Don't return yet so that # a flag also gets added below for the array itself. if schema_type == 'array': items = schema.get('items', {}) properties = borgmatic.config.schema.get_properties(items) if properties: for name, child in properties.items(): add_arguments_from_schema( arguments_group, child, unparsed_arguments, (*names[:-1], f'{names[-1]}[0]', name), ) # If there aren't any children, then this is an array of scalars. Recurse accordingly. else: add_arguments_from_schema( arguments_group, items, unparsed_arguments, (*names[:-1], f'{names[-1]}[0]'), ) flag_name = '.'.join(names).replace('_', '-') # Certain options already have corresponding flags on individual actions (like "create # --progress"), so don't bother adding them to the global flags. if not flag_name or flag_name in OMITTED_FLAG_NAMES: return metavar = names[-1].upper() description = make_argument_description(schema, flag_name) # The object=str and array=str given here is to support specifying an object or an array as a # YAML string on the command-line. argument_type = borgmatic.config.schema.parse_type(schema_type, object=str, array=str) # As a UX nicety, add separate true and false flags for boolean options. if schema_type == 'boolean': arguments_group.add_argument( f'--{flag_name}', action='store_true', default=None, help=description, ) if names[-1].startswith('no_'): no_flag_name = '.'.join((*names[:-1], names[-1][len('no_') :])).replace('_', '-') else: no_flag_name = '.'.join((*names[:-1], 'no-' + names[-1])).replace('_', '-') arguments_group.add_argument( f'--{no_flag_name}', dest=flag_name.replace('-', '_'), action='store_false', default=None, help=f'Set the --{flag_name} value to false.', ) elif flag_name == 'verbosity': arguments_group.add_argument( '-v', '--verbosity', type=argument_type, metavar=metavar, help=description, ) else: arguments_group.add_argument( f'--{flag_name}', type=argument_type, metavar=metavar, help=description, ) add_array_element_arguments(arguments_group, unparsed_arguments, flag_name) def make_parsers(schema, unparsed_arguments): # noqa: PLR0915 ''' Given a configuration schema dict and unparsed arguments as a sequence of strings, build a global arguments parser, individual action parsers, and a combined parser containing both. Return them as a tuple. The global parser is useful for parsing just global arguments while ignoring actions, and the combined parser is handy for displaying help that includes everything: global flags, a list of actions, etc. ''' config_paths = collect.get_default_config_paths(expand_home=True) unexpanded_config_paths = collect.get_default_config_paths(expand_home=False) # Using allow_abbrev=False here prevents the global parser from erroring about "ambiguous" # options like --encryption. Such options are intended for an action parser rather than the # global parser, and so we don't want to error on them here. global_parser = ArgumentParser(allow_abbrev=False, add_help=False) global_group = global_parser.add_argument_group('global arguments') global_group.add_argument( '-c', '--config', dest='config_paths', action='append', help=f"Configuration filename or directory, can specify flag multiple times, defaults to: -c {' -c '.join(unexpanded_config_paths)}", ) global_group.add_argument( '-n', '--dry-run', dest='dry_run', action='store_true', help='Go through the motions, but do not actually write to any repositories', ) global_group.add_argument( '--override', metavar='OPTION.SUBOPTION=VALUE', dest='overrides', action='append', help='Deprecated. Configuration file option to override with specified value, see documentation for overriding list or key/value options, can specify flag multiple times', ) global_group.add_argument( '--no-environment-interpolation', dest='resolve_env', action='store_false', help='Do not resolve environment variables in configuration files', ) global_group.add_argument( '--bash-completion', default=False, action='store_true', help='Show bash completion script and exit', ) global_group.add_argument( '--fish-completion', default=False, action='store_true', help='Show fish completion script and exit', ) global_group.add_argument( '--version', dest='version', default=False, action='store_true', help='Display installed version number of borgmatic and exit', ) add_arguments_from_schema(global_group, schema, unparsed_arguments) global_plus_action_parser = ArgumentParser( description=''' Simple, configuration-driven backup software for servers and workstations. If no actions are given, then borgmatic defaults to: create, prune, compact, and check. ''', parents=[global_parser], ) action_parsers = global_plus_action_parser.add_subparsers( title='actions', metavar='', help='Specify zero or more actions. Defaults to create, prune, compact, and check. Use --help with action for details:', ) repo_create_parser = action_parsers.add_parser( 'repo-create', aliases=ACTION_ALIASES['repo-create'], help='Create a new, empty Borg repository (also known as "init")', description='Create a new, empty Borg repository (also known as "init")', add_help=False, ) repo_create_group = repo_create_parser.add_argument_group('repo-create arguments') repo_create_group.add_argument( '-e', '--encryption', dest='encryption_mode', help='Borg repository encryption mode', ) repo_create_group.add_argument( '--source-repository', '--other-repo', metavar='KEY_REPOSITORY', help='Path to an existing Borg repository whose key material should be reused [Borg 2.x+ only]', ) repo_create_group.add_argument( '--repository', help='Path of the new repository to create (must be already specified in a borgmatic configuration file), defaults to the configured repository if there is only one, quoted globs supported', ) repo_create_group.add_argument( '--copy-crypt-key', action='store_true', help='Copy the crypt key used for authenticated encryption from the source repository, defaults to a new random key [Borg 2.x+ only]', ) repo_create_group.add_argument( '--append-only', default=None, action='store_true', help='Create an append-only repository', ) repo_create_group.add_argument( '--storage-quota', help='Create a repository with a fixed storage quota', ) repo_create_group.add_argument( '--make-parent-dirs', dest='make_parent_directories', default=None, action='store_true', help='Create any missing parent directories of the repository directory [Borg 1.x only]', ) repo_create_group.add_argument( '-h', '--help', action='help', help='Show this help message and exit', ) transfer_parser = action_parsers.add_parser( 'transfer', aliases=ACTION_ALIASES['transfer'], help='Transfer archives from one repository to another, optionally upgrading the transferred data [Borg 2.0+ only]', description='Transfer archives from one repository to another, optionally upgrading the transferred data [Borg 2.0+ only]', add_help=False, ) transfer_group = transfer_parser.add_argument_group('transfer arguments') transfer_group.add_argument( '--repository', help='Path of existing destination repository to transfer archives to, defaults to the configured repository if there is only one, quoted globs supported', ) transfer_group.add_argument( '--source-repository', help='Path of existing source repository to transfer archives from', required=True, ) transfer_group.add_argument( '--archive', help='Name or hash of a single archive to transfer (or "latest"), defaults to transferring all archives', ) transfer_group.add_argument( '--upgrader', help='Upgrader type used to convert the transferred data, e.g. "From12To20" to upgrade data from Borg 1.2 to 2.0 format, defaults to no conversion', ) transfer_group.add_argument( '--progress', default=None, action='store_true', help='Display progress as each archive is transferred', ) transfer_group.add_argument( '-a', '--match-archives', '--glob-archives', metavar='PATTERN', help='Only transfer archives with names, hashes, or series matching this pattern', ) transfer_group.add_argument( '--sort-by', metavar='KEYS', help='Comma-separated list of sorting keys', ) transfer_group.add_argument( '--first', metavar='N', help='Only transfer first N archives after other filters are applied', ) transfer_group.add_argument( '--last', metavar='N', help='Only transfer last N archives after other filters are applied', ) transfer_group.add_argument( '--oldest', metavar='TIMESPAN', help='Transfer archives within a specified time range starting from the timestamp of the oldest archive (e.g. 7d or 12m) [Borg 2.x+ only]', ) transfer_group.add_argument( '--newest', metavar='TIMESPAN', help='Transfer archives within a time range that ends at timestamp of the newest archive and starts a specified time range ago (e.g. 7d or 12m) [Borg 2.x+ only]', ) transfer_group.add_argument( '--older', metavar='TIMESPAN', help='Transfer archives that are older than the specified time range (e.g. 7d or 12m) from the current time [Borg 2.x+ only]', ) transfer_group.add_argument( '--newer', metavar='TIMESPAN', help='Transfer archives that are newer than the specified time range (e.g. 7d or 12m) from the current time [Borg 2.x+ only]', ) transfer_group.add_argument( '-h', '--help', action='help', help='Show this help message and exit', ) prune_parser = action_parsers.add_parser( 'prune', aliases=ACTION_ALIASES['prune'], help='Prune archives according to the retention policy (with Borg 1.2+, you must run compact afterwards to actually free space)', description='Prune archives according to the retention policy (with Borg 1.2+, you must run compact afterwards to actually free space)', add_help=False, ) prune_group = prune_parser.add_argument_group('prune arguments') prune_group.add_argument( '--repository', help='Path of specific existing repository to prune (must be already specified in a borgmatic configuration file), quoted globs supported', ) prune_group.add_argument( '-a', '--match-archives', '--glob-archives', metavar='PATTERN', help='When pruning, only consider archives with names, hashes, or series matching this pattern', ) prune_group.add_argument( '--stats', dest='statistics', default=None, action='store_true', help='Display statistics of the pruned archive [Borg 1 only]', ) prune_group.add_argument( '--list', dest='list_details', default=None, action='store_true', help='List archives kept/pruned', ) prune_group.add_argument( '--oldest', metavar='TIMESPAN', help='Prune archives within a specified time range starting from the timestamp of the oldest archive (e.g. 7d or 12m) [Borg 2.x+ only]', ) prune_group.add_argument( '--newest', metavar='TIMESPAN', help='Prune archives within a time range that ends at timestamp of the newest archive and starts a specified time range ago (e.g. 7d or 12m) [Borg 2.x+ only]', ) prune_group.add_argument( '--older', metavar='TIMESPAN', help='Prune archives that are older than the specified time range (e.g. 7d or 12m) from the current time [Borg 2.x+ only]', ) prune_group.add_argument( '--newer', metavar='TIMESPAN', help='Prune archives that are newer than the specified time range (e.g. 7d or 12m) from the current time [Borg 2.x+ only]', ) prune_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') compact_parser = action_parsers.add_parser( 'compact', aliases=ACTION_ALIASES['compact'], help='Compact segments to free space [Borg 1.2+, borgmatic 1.5.23+ only]', description='Compact segments to free space [Borg 1.2+, borgmatic 1.5.23+ only]', add_help=False, ) compact_group = compact_parser.add_argument_group('compact arguments') compact_group.add_argument( '--repository', help='Path of specific existing repository to compact (must be already specified in a borgmatic configuration file), quoted globs supported', ) compact_group.add_argument( '--progress', default=None, action='store_true', help='Display progress as each segment is compacted', ) compact_group.add_argument( '--cleanup-commits', dest='cleanup_commits', default=False, action='store_true', help='Cleanup commit-only 17-byte segment files left behind by Borg 1.1 [flag in Borg 1.2 only]', ) compact_group.add_argument( '--threshold', type=int, dest='compact_threshold', help='Minimum saved space percentage threshold for compacting a segment, defaults to 10', ) compact_group.add_argument( '-h', '--help', action='help', help='Show this help message and exit', ) create_parser = action_parsers.add_parser( 'create', aliases=ACTION_ALIASES['create'], help='Create an archive (actually perform a backup)', description='Create an archive (actually perform a backup)', add_help=False, ) create_group = create_parser.add_argument_group('create arguments') create_group.add_argument( '--repository', help='Path of specific existing repository to backup to (must be already specified in a borgmatic configuration file), quoted globs supported', ) create_group.add_argument( '--progress', default=None, action='store_true', help='Display progress for each file as it is backed up', ) create_group.add_argument( '--stats', dest='statistics', default=None, action='store_true', help='Display statistics of archive', ) create_group.add_argument( '--list', '--files', dest='list_details', default=None, action='store_true', help='Show per-file details', ) create_group.add_argument( '--json', dest='json', default=False, action='store_true', help='Output results as JSON', ) create_group.add_argument( '--comment', metavar='COMMENT', help='Add a comment text to the archive', ) create_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') check_parser = action_parsers.add_parser( 'check', aliases=ACTION_ALIASES['check'], help='Check archives for consistency', description='Check archives for consistency', add_help=False, ) check_group = check_parser.add_argument_group('check arguments') check_group.add_argument( '--repository', help='Path of specific existing repository to check (must be already specified in a borgmatic configuration file), quoted globs supported', ) check_group.add_argument( '--progress', default=None, action='store_true', help='Display progress for each file as it is checked', ) check_group.add_argument( '--repair', dest='repair', default=False, action='store_true', help='Attempt to repair any inconsistencies found (for interactive use)', ) check_group.add_argument( '--max-duration', metavar='SECONDS', help='How long to check the repository before interrupting the check, defaults to no interruption', ) check_group.add_argument( '-a', '--match-archives', '--glob-archives', metavar='PATTERN', help='Only check archives with names, hashes, or series matching this pattern', ) check_group.add_argument( '--only', metavar='CHECK', choices=('repository', 'archives', 'data', 'extract', 'spot'), dest='only_checks', action='append', help='Run a particular consistency check (repository, archives, data, extract, or spot) instead of configured checks (subject to configured frequency, can specify flag multiple times)', ) check_group.add_argument( '--force', default=False, action='store_true', help='Ignore configured check frequencies and run checks unconditionally', ) check_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') delete_parser = action_parsers.add_parser( 'delete', aliases=ACTION_ALIASES['delete'], help='Delete an archive from a repository or delete an entire repository (with Borg 1.2+, you must run compact afterwards to actually free space)', description='Delete an archive from a repository or delete an entire repository (with Borg 1.2+, you must run compact afterwards to actually free space)', add_help=False, ) delete_group = delete_parser.add_argument_group('delete arguments') delete_group.add_argument( '--repository', help='Path of repository to delete or delete archives from, defaults to the configured repository if there is only one, quoted globs supported', ) delete_group.add_argument( '--archive', help='Archive name, hash, or series to delete', ) delete_group.add_argument( '--list', dest='list_details', default=None, action='store_true', help='Show details for the deleted archives', ) delete_group.add_argument( '--stats', dest='statistics', default=None, action='store_true', help='Display statistics for the deleted archives', ) delete_group.add_argument( '--cache-only', action='store_true', help='Delete only the local cache for the given repository', ) delete_group.add_argument( '--force', action='count', help='Force deletion of corrupted archives, can be given twice if once does not work', ) delete_group.add_argument( '--keep-security-info', action='store_true', help='Do not delete the local security info when deleting a repository', ) delete_group.add_argument( '--save-space', action='store_true', help='Work slower, but using less space [Not supported in Borg 2.x+]', ) delete_group.add_argument( '--checkpoint-interval', type=int, metavar='SECONDS', help='Write a checkpoint at the given interval, defaults to 1800 seconds (30 minutes)', ) delete_group.add_argument( '-a', '--match-archives', '--glob-archives', metavar='PATTERN', help='Only delete archives with names, hashes, or series matching this pattern', ) delete_group.add_argument( '--sort-by', metavar='KEYS', help='Comma-separated list of sorting keys', ) delete_group.add_argument( '--first', metavar='N', help='Delete first N archives after other filters are applied', ) delete_group.add_argument( '--last', metavar='N', help='Delete last N archives after other filters are applied', ) delete_group.add_argument( '--oldest', metavar='TIMESPAN', help='Delete archives within a specified time range starting from the timestamp of the oldest archive (e.g. 7d or 12m) [Borg 2.x+ only]', ) delete_group.add_argument( '--newest', metavar='TIMESPAN', help='Delete archives within a time range that ends at timestamp of the newest archive and starts a specified time range ago (e.g. 7d or 12m) [Borg 2.x+ only]', ) delete_group.add_argument( '--older', metavar='TIMESPAN', help='Delete archives that are older than the specified time range (e.g. 7d or 12m) from the current time [Borg 2.x+ only]', ) delete_group.add_argument( '--newer', metavar='TIMESPAN', help='Delete archives that are newer than the specified time range (e.g. 7d or 12m) from the current time [Borg 2.x+ only]', ) delete_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') extract_parser = action_parsers.add_parser( 'extract', aliases=ACTION_ALIASES['extract'], help='Extract files from a named archive to the current directory', description='Extract a named archive to the current directory', add_help=False, ) extract_group = extract_parser.add_argument_group('extract arguments') extract_group.add_argument( '--repository', help='Path of repository to extract, defaults to the configured repository if there is only one, quoted globs supported', ) extract_group.add_argument( '--archive', help='Name or hash of a single archive to extract (or "latest")', required=True, ) extract_group.add_argument( '--path', '--restore-path', metavar='PATH', dest='paths', action='append', help='Path to extract from archive, can specify flag multiple times, defaults to the entire archive', ) extract_group.add_argument( '--destination', metavar='PATH', dest='destination', help='Directory to extract files into, defaults to the current directory', ) extract_group.add_argument( '--strip-components', type=lambda number: number if number == 'all' else int(number), metavar='NUMBER', help='Number of leading path components to remove from each extracted path or "all" to strip all leading path components. Skip paths with fewer elements', ) extract_group.add_argument( '--progress', default=None, action='store_true', help='Display progress for each file as it is extracted', ) extract_group.add_argument( '-h', '--help', action='help', help='Show this help message and exit', ) config_parser = action_parsers.add_parser( 'config', aliases=ACTION_ALIASES['config'], help='Perform configuration file related operations', description='Perform configuration file related operations', add_help=False, ) config_group = config_parser.add_argument_group('config arguments') config_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') config_parsers = config_parser.add_subparsers( title='config sub-actions', ) config_bootstrap_parser = config_parsers.add_parser( 'bootstrap', help='Extract the borgmatic configuration files from a named archive', description='Extract the borgmatic configuration files from a named archive', add_help=False, ) config_bootstrap_group = config_bootstrap_parser.add_argument_group( 'config bootstrap arguments', ) config_bootstrap_group.add_argument( '--repository', help='Path of repository to extract config files from, quoted globs supported', required=True, ) config_bootstrap_group.add_argument( '--local-path', help='Alternate Borg local executable. Defaults to "borg"', default='borg', ) config_bootstrap_group.add_argument( '--remote-path', help='Alternate Borg remote executable. Defaults to "borg"', default='borg', ) config_bootstrap_group.add_argument( '--user-runtime-directory', help='Path used for temporary runtime data like bootstrap metadata. Defaults to $XDG_RUNTIME_DIR or $TMPDIR or $TEMP or /var/run/$UID', ) config_bootstrap_group.add_argument( '--borgmatic-source-directory', help='Deprecated. Path formerly used for temporary runtime data like bootstrap metadata. Defaults to ~/.borgmatic', ) config_bootstrap_group.add_argument( '--archive', help='Name or hash of a single archive to extract config files from, defaults to "latest"', default='latest', ) config_bootstrap_group.add_argument( '--destination', metavar='PATH', dest='destination', help='Directory to extract config files into, defaults to /', default='/', ) config_bootstrap_group.add_argument( '--strip-components', type=lambda number: number if number == 'all' else int(number), metavar='NUMBER', help='Number of leading path components to remove from each extracted path or "all" to strip all leading path components. Skip paths with fewer elements', ) config_bootstrap_group.add_argument( '--progress', default=None, action='store_true', help='Display progress for each file as it is extracted', ) config_bootstrap_group.add_argument( '--ssh-command', metavar='COMMAND', help='Command to use instead of "ssh"', ) config_bootstrap_group.add_argument( '-h', '--help', action='help', help='Show this help message and exit', ) config_generate_parser = config_parsers.add_parser( 'generate', help='Generate a sample borgmatic configuration file', description='Generate a sample borgmatic configuration file', add_help=False, ) config_generate_group = config_generate_parser.add_argument_group('config generate arguments') config_generate_group.add_argument( '-s', '--source', dest='source_filename', help='Optional configuration file to merge into the generated configuration, useful for upgrading your configuration', ) config_generate_group.add_argument( '-d', '--destination', dest='destination_path', default=config_paths[0], help=f'Destination configuration file (or directory if using --split), default: {unexpanded_config_paths[0]}', ) config_generate_group.add_argument( '--overwrite', default=False, action='store_true', help='Whether to overwrite any existing destination file, defaults to false', ) config_generate_group.add_argument( '--split', action='store_true', help='Assuming the destination is a directory instead of a file, split the configuration into separate files within it, one per option, useful for documentation', ) config_generate_group.add_argument( '-h', '--help', action='help', help='Show this help message and exit', ) config_validate_parser = config_parsers.add_parser( 'validate', help='Validate borgmatic configuration files specified with --config (see borgmatic --help)', description='Validate borgmatic configuration files specified with --config (see borgmatic --help)', add_help=False, ) config_validate_group = config_validate_parser.add_argument_group('config validate arguments') config_validate_group.add_argument( '-s', '--show', action='store_true', help='Show the validated configuration after all include merging has occurred', ) config_validate_group.add_argument( '-h', '--help', action='help', help='Show this help message and exit', ) export_tar_parser = action_parsers.add_parser( 'export-tar', aliases=ACTION_ALIASES['export-tar'], help='Export an archive to a tar-formatted file or stream', description='Export an archive to a tar-formatted file or stream', add_help=False, ) export_tar_group = export_tar_parser.add_argument_group('export-tar arguments') export_tar_group.add_argument( '--repository', help='Path of repository to export from, defaults to the configured repository if there is only one, quoted globs supported', ) export_tar_group.add_argument( '--archive', help='Name or hash of a single archive to export (or "latest")', required=True, ) export_tar_group.add_argument( '--path', metavar='PATH', dest='paths', action='append', help='Path to export from archive, can specify flag multiple times, defaults to the entire archive', ) export_tar_group.add_argument( '--destination', metavar='PATH', dest='destination', help='Path to destination export tar file, or "-" for stdout (but be careful about dirtying output with --verbosity or --list)', required=True, ) export_tar_group.add_argument( '--tar-filter', help='Name of filter program to pipe data through', ) export_tar_group.add_argument( '--list', '--files', dest='list_details', default=None, action='store_true', help='Show per-file details', ) export_tar_group.add_argument( '--strip-components', type=int, metavar='NUMBER', dest='strip_components', help='Number of leading path components to remove from each exported path. Skip paths with fewer elements', ) export_tar_group.add_argument( '-h', '--help', action='help', help='Show this help message and exit', ) mount_parser = action_parsers.add_parser( 'mount', aliases=ACTION_ALIASES['mount'], help='Mount files from a named archive as a FUSE filesystem', description='Mount a named archive as a FUSE filesystem', add_help=False, ) mount_group = mount_parser.add_argument_group('mount arguments') mount_group.add_argument( '--repository', help='Path of repository to use, defaults to the configured repository if there is only one, quoted globs supported', ) mount_group.add_argument( '--archive', help='Name or hash of a single archive to mount (or "latest")', ) mount_group.add_argument( '--mount-point', metavar='PATH', dest='mount_point', help='Path where filesystem is to be mounted', required=True, ) mount_group.add_argument( '--path', metavar='PATH', dest='paths', action='append', help='Path to mount from archive, can specify multiple times, defaults to the entire archive', ) mount_group.add_argument( '--foreground', dest='foreground', default=False, action='store_true', help='Stay in foreground until ctrl-C is pressed', ) mount_group.add_argument( '--first', metavar='N', help='Mount first N archives after other filters are applied', ) mount_group.add_argument( '--last', metavar='N', help='Mount last N archives after other filters are applied', ) mount_group.add_argument( '--oldest', metavar='TIMESPAN', help='Mount archives within a specified time range starting from the timestamp of the oldest archive (e.g. 7d or 12m) [Borg 2.x+ only]', ) mount_group.add_argument( '--newest', metavar='TIMESPAN', help='Mount archives within a time range that ends at timestamp of the newest archive and starts a specified time range ago (e.g. 7d or 12m) [Borg 2.x+ only]', ) mount_group.add_argument( '--older', metavar='TIMESPAN', help='Mount archives that are older than the specified time range (e.g. 7d or 12m) from the current time [Borg 2.x+ only]', ) mount_group.add_argument( '--newer', metavar='TIMESPAN', help='Mount archives that are newer than the specified time range (e.g. 7d or 12m) from the current time [Borg 2.x+ only]', ) mount_group.add_argument('--options', dest='options', help='Extra Borg mount options') mount_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') umount_parser = action_parsers.add_parser( 'umount', aliases=ACTION_ALIASES['umount'], help='Unmount a FUSE filesystem that was mounted with "borgmatic mount"', description='Unmount a mounted FUSE filesystem', add_help=False, ) umount_group = umount_parser.add_argument_group('umount arguments') umount_group.add_argument( '--mount-point', metavar='PATH', dest='mount_point', help='Path of filesystem to unmount', required=True, ) umount_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') repo_delete_parser = action_parsers.add_parser( 'repo-delete', aliases=ACTION_ALIASES['repo-delete'], help='Delete an entire repository (with Borg 1.2+, you must run compact afterwards to actually free space)', description='Delete an entire repository (with Borg 1.2+, you must run compact afterwards to actually free space)', add_help=False, ) repo_delete_group = repo_delete_parser.add_argument_group('delete arguments') repo_delete_group.add_argument( '--repository', help='Path of repository to delete, defaults to the configured repository if there is only one, quoted globs supported', ) repo_delete_group.add_argument( '--list', dest='list_details', default=None, action='store_true', help='Show details for the archives in the given repository', ) repo_delete_group.add_argument( '--force', action='count', help='Force deletion of corrupted archives, can be given twice if once does not work', ) repo_delete_group.add_argument( '--cache-only', action='store_true', help='Delete only the local cache for the given repository', ) repo_delete_group.add_argument( '--keep-security-info', action='store_true', help='Do not delete the local security info when deleting a repository', ) repo_delete_group.add_argument( '-h', '--help', action='help', help='Show this help message and exit', ) restore_parser = action_parsers.add_parser( 'restore', aliases=ACTION_ALIASES['restore'], help='Restore data source (e.g. database) dumps from a named archive', description='Restore data source (e.g. database) dumps from a named archive. (To extract files instead, use "borgmatic extract".)', add_help=False, ) restore_group = restore_parser.add_argument_group('restore arguments') restore_group.add_argument( '--repository', help='Path of repository to restore from, defaults to the configured repository if there is only one, quoted globs supported', ) restore_group.add_argument( '--archive', help='Name or hash of a single archive to restore from (or "latest")', required=True, ) restore_group.add_argument( '--data-source', '--database', metavar='NAME', dest='data_sources', action='append', help="Name of data source (e.g. database) to restore from the archive, must be defined in borgmatic's configuration, can specify the flag multiple times, defaults to all data sources in the archive", ) restore_group.add_argument( '--schema', metavar='NAME', dest='schemas', action='append', help='Name of schema to restore from the data source, can specify flag multiple times, defaults to all schemas. Schemas are only supported for PostgreSQL and MongoDB databases', ) restore_group.add_argument( '--hostname', help='Database hostname to restore to. Defaults to the "restore_hostname" option in borgmatic\'s configuration', ) restore_group.add_argument( '--port', help='Database port to restore to. Defaults to the "restore_port" option in borgmatic\'s configuration', ) restore_group.add_argument( '--container', help='Container to restore to. Defaults to the "restore_container" option in borgmatic\'s configuration', ) restore_group.add_argument( '--username', help='Username with which to connect to the database. Defaults to the "restore_username" option in borgmatic\'s configuration', ) restore_group.add_argument( '--password', help='Password with which to connect to the restore database. Defaults to the "restore_password" option in borgmatic\'s configuration', ) restore_group.add_argument( '--restore-path', help='Path to restore SQLite database dumps to. Defaults to the "restore_path" option in borgmatic\'s configuration', ) restore_group.add_argument( '--original-label', help='The label where the dump to restore came from, only necessary if you need to disambiguate dumps', ) restore_group.add_argument( '--original-hostname', help='The hostname where the dump to restore came from, only necessary if you need to disambiguate dumps', ) restore_group.add_argument( '--original-container', help='The container where the dump to restore came from, only necessary if you need to disambiguate dumps', ) restore_group.add_argument( '--original-port', type=int, help="The port where the dump to restore came from (if that port is in borgmatic's configuration), only necessary if you need to disambiguate dumps", ) restore_group.add_argument( '--hook', help='The name of the data source hook for the dump to restore, only necessary if you need to disambiguate dumps', ) restore_group.add_argument( '-h', '--help', action='help', help='Show this help message and exit', ) repo_list_parser = action_parsers.add_parser( 'repo-list', aliases=ACTION_ALIASES['repo-list'], help='List repository', description='List the archives in a repository', add_help=False, ) repo_list_group = repo_list_parser.add_argument_group('repo-list arguments') repo_list_group.add_argument( '--repository', help='Path of repository to list, defaults to the configured repositories, quoted globs supported', ) repo_list_group.add_argument( '--short', default=False, action='store_true', help='Output only archive names', ) repo_list_group.add_argument('--format', help='Format for archive listing') repo_list_group.add_argument( '--json', default=False, action='store_true', help='Output results as JSON', ) repo_list_group.add_argument( '-P', '--prefix', help='Deprecated. Only list archive names starting with this prefix', ) repo_list_group.add_argument( '-a', '--match-archives', '--glob-archives', metavar='PATTERN', help='Only list archive names, hashes, or series matching this pattern', ) repo_list_group.add_argument( '--sort-by', metavar='KEYS', help='Comma-separated list of sorting keys', ) repo_list_group.add_argument( '--first', metavar='N', help='List first N archives after other filters are applied', ) repo_list_group.add_argument( '--last', metavar='N', help='List last N archives after other filters are applied', ) repo_list_group.add_argument( '--oldest', metavar='TIMESPAN', help='List archives within a specified time range starting from the timestamp of the oldest archive (e.g. 7d or 12m) [Borg 2.x+ only]', ) repo_list_group.add_argument( '--newest', metavar='TIMESPAN', help='List archives within a time range that ends at timestamp of the newest archive and starts a specified time range ago (e.g. 7d or 12m) [Borg 2.x+ only]', ) repo_list_group.add_argument( '--older', metavar='TIMESPAN', help='List archives that are older than the specified time range (e.g. 7d or 12m) from the current time [Borg 2.x+ only]', ) repo_list_group.add_argument( '--newer', metavar='TIMESPAN', help='List archives that are newer than the specified time range (e.g. 7d or 12m) from the current time [Borg 2.x+ only]', ) repo_list_group.add_argument( '--deleted', default=False, action='store_true', help="List only deleted archives that haven't yet been compacted [Borg 2.x+ only]", ) repo_list_group.add_argument( '-h', '--help', action='help', help='Show this help message and exit', ) list_parser = action_parsers.add_parser( 'list', aliases=ACTION_ALIASES['list'], help='List archive', description='List the files in an archive or search for a file across archives', add_help=False, ) list_group = list_parser.add_argument_group('list arguments') list_group.add_argument( '--repository', help='Path of repository containing archive to list, defaults to the configured repositories, quoted globs supported', ) list_group.add_argument( '--archive', help='Name or hash of a single archive to list (or "latest")', ) list_group.add_argument( '--path', metavar='PATH', dest='paths', action='append', help='Path or pattern to list from a single selected archive (via "--archive"), can specify flag multiple times, defaults to listing the entire archive', ) list_group.add_argument( '--find', metavar='PATH', dest='find_paths', action='append', help='Partial path or pattern to search for and list across multiple archives, can specify flag multiple times', ) list_group.add_argument( '--short', default=False, action='store_true', help='Output only path names', ) list_group.add_argument('--format', help='Format for file listing') list_group.add_argument( '--json', default=False, action='store_true', help='Output results as JSON', ) list_group.add_argument( '-P', '--prefix', help='Deprecated. Only list archive names starting with this prefix', ) list_group.add_argument( '-a', '--match-archives', '--glob-archives', metavar='PATTERN', help='Only list archive names matching this pattern', ) list_group.add_argument( '--sort-by', metavar='KEYS', help='Comma-separated list of sorting keys', ) list_group.add_argument( '--first', metavar='N', help='List first N archives after other filters are applied', ) list_group.add_argument( '--last', metavar='N', help='List last N archives after other filters are applied', ) list_group.add_argument( '-e', '--exclude', metavar='PATTERN', help='Exclude paths matching the pattern', ) list_group.add_argument( '--exclude-from', metavar='FILENAME', help='Exclude paths from exclude file, one per line', ) list_group.add_argument('--pattern', help='Include or exclude paths matching a pattern') list_group.add_argument( '--patterns-from', metavar='FILENAME', help='Include or exclude paths matching patterns from pattern file, one per line', ) list_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') repo_info_parser = action_parsers.add_parser( 'repo-info', aliases=ACTION_ALIASES['repo-info'], help='Show repository summary information such as disk space used', description='Show repository summary information such as disk space used', add_help=False, ) repo_info_group = repo_info_parser.add_argument_group('repo-info arguments') repo_info_group.add_argument( '--repository', help='Path of repository to show info for, defaults to the configured repository if there is only one, quoted globs supported', ) repo_info_group.add_argument( '--json', dest='json', default=False, action='store_true', help='Output results as JSON', ) repo_info_group.add_argument( '-h', '--help', action='help', help='Show this help message and exit', ) info_parser = action_parsers.add_parser( 'info', aliases=ACTION_ALIASES['info'], help='Show archive summary information such as disk space used', description='Show archive summary information such as disk space used', add_help=False, ) info_group = info_parser.add_argument_group('info arguments') info_group.add_argument( '--repository', help='Path of repository containing archive to show info for, defaults to the configured repository if there is only one, quoted globs supported', ) info_group.add_argument( '--archive', help='Archive name, hash, or series to show info for (or "latest")', ) info_group.add_argument( '--json', dest='json', default=False, action='store_true', help='Output results as JSON', ) info_group.add_argument( '-P', '--prefix', help='Deprecated. Only show info for archive names starting with this prefix', ) info_group.add_argument( '-a', '--match-archives', '--glob-archives', metavar='PATTERN', help='Only show info for archive names, hashes, or series matching this pattern', ) info_group.add_argument( '--sort-by', metavar='KEYS', help='Comma-separated list of sorting keys', ) info_group.add_argument( '--first', metavar='N', help='Show info for first N archives after other filters are applied', ) info_group.add_argument( '--last', metavar='N', help='Show info for last N archives after other filters are applied', ) info_group.add_argument( '--oldest', metavar='TIMESPAN', help='Show info for archives within a specified time range starting from the timestamp of the oldest archive (e.g. 7d or 12m) [Borg 2.x+ only]', ) info_group.add_argument( '--newest', metavar='TIMESPAN', help='Show info for archives within a time range that ends at timestamp of the newest archive and starts a specified time range ago (e.g. 7d or 12m) [Borg 2.x+ only]', ) info_group.add_argument( '--older', metavar='TIMESPAN', help='Show info for archives that are older than the specified time range (e.g. 7d or 12m) from the current time [Borg 2.x+ only]', ) info_group.add_argument( '--newer', metavar='TIMESPAN', help='Show info for archives that are newer than the specified time range (e.g. 7d or 12m) from the current time [Borg 2.x+ only]', ) info_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') break_lock_parser = action_parsers.add_parser( 'break-lock', aliases=ACTION_ALIASES['break-lock'], help='Break the repository and cache locks left behind by Borg aborting', description='Break Borg repository and cache locks left behind by Borg aborting', add_help=False, ) break_lock_group = break_lock_parser.add_argument_group('break-lock arguments') break_lock_group.add_argument( '--repository', help='Path of repository to break the lock for, defaults to the configured repository if there is only one, quoted globs supported', ) break_lock_group.add_argument( '-h', '--help', action='help', help='Show this help message and exit', ) key_parser = action_parsers.add_parser( 'key', aliases=ACTION_ALIASES['key'], help='Perform repository key related operations', description='Perform repository key related operations', add_help=False, ) key_group = key_parser.add_argument_group('key arguments') key_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') key_parsers = key_parser.add_subparsers( title='key sub-actions', ) key_export_parser = key_parsers.add_parser( 'export', help='Export a copy of the repository key for safekeeping in case the original goes missing or gets damaged', description='Export a copy of the repository key for safekeeping in case the original goes missing or gets damaged', add_help=False, ) key_export_group = key_export_parser.add_argument_group('key export arguments') key_export_group.add_argument( '--paper', action='store_true', help='Export the key in a text format suitable for printing and later manual entry', ) key_export_group.add_argument( '--qr-html', action='store_true', help='Export the key in an HTML format suitable for printing and later manual entry or QR code scanning', ) key_export_group.add_argument( '--repository', help='Path of repository to export the key for, defaults to the configured repository if there is only one, quoted globs supported', ) key_export_group.add_argument( '--path', metavar='PATH', help='Path to export the key to, defaults to stdout (but be careful about dirtying the output with --verbosity)', ) key_export_group.add_argument( '-h', '--help', action='help', help='Show this help message and exit', ) key_import_parser = key_parsers.add_parser( 'import', help='Import a copy of the repository key from backup', description='Import a copy of the repository key from backup', add_help=False, ) key_import_group = key_import_parser.add_argument_group('key import arguments') key_import_group.add_argument( '--paper', action='store_true', help='Import interactively from a backup done with --paper', ) key_import_group.add_argument( '--repository', help='Path of repository to import the key from, defaults to the configured repository if there is only one, quoted globs supported', ) key_import_group.add_argument( '--path', metavar='PATH', help='Path to import the key from backup, defaults to stdin', ) key_import_group.add_argument( '-h', '--help', action='help', help='Show this help message and exit', ) key_change_passphrase_parser = key_parsers.add_parser( 'change-passphrase', help='Change the passphrase protecting the repository key', description='Change the passphrase protecting the repository key', add_help=False, ) key_change_passphrase_group = key_change_passphrase_parser.add_argument_group( 'key change-passphrase arguments', ) key_change_passphrase_group.add_argument( '--repository', help='Path of repository to change the passphrase for, defaults to the configured repository if there is only one, quoted globs supported', ) key_change_passphrase_group.add_argument( '-h', '--help', action='help', help='Show this help message and exit', ) recreate_parser = action_parsers.add_parser( 'recreate', aliases=ACTION_ALIASES['recreate'], help='Recreate an archive in a repository (with Borg 1.2+, you must run compact afterwards to actually free space)', description='Recreate an archive in a repository (with Borg 1.2+, you must run compact afterwards to actually free space)', add_help=False, ) recreate_group = recreate_parser.add_argument_group('recreate arguments') recreate_group.add_argument( '--repository', help='Path of repository containing archive to recreate, defaults to the configured repository if there is only one, quoted globs supported', ) recreate_group.add_argument( '--archive', help='Archive name, hash, or series to recreate', ) recreate_group.add_argument( '--list', dest='list_details', default=None, action='store_true', help='Show per-file details', ) recreate_group.add_argument( '--target', metavar='TARGET', help='Create a new archive from the specified archive (via --archive), without replacing it', ) recreate_group.add_argument( '--comment', metavar='COMMENT', help='Add a comment text to the archive or, if an archive is not provided, to all matching archives', ) recreate_group.add_argument( '--timestamp', metavar='TIMESTAMP', help='Manually override the archive creation date/time (UTC)', ) recreate_group.add_argument( '-a', '--match-archives', '--glob-archives', dest='match_archives', metavar='PATTERN', help='Only consider archive names, hashes, or series matching this pattern [Borg 2.x+ only]', ) recreate_group.add_argument( '-h', '--help', action='help', help='Show this help message and exit', ) borg_parser = action_parsers.add_parser( 'borg', aliases=ACTION_ALIASES['borg'], help='Run an arbitrary Borg command', description="Run an arbitrary Borg command based on borgmatic's configuration", add_help=False, ) borg_group = borg_parser.add_argument_group('borg arguments') borg_group.add_argument( '--repository', help='Path of repository to pass to Borg, defaults to the configured repositories, quoted globs supported', ) borg_group.add_argument( '--archive', help='Archive name, hash, or series to pass to Borg (or "latest")', ) borg_group.add_argument( '--', metavar='OPTION', dest='options', nargs='+', help='Options to pass to Borg, command first ("create", "list", etc). "--" is optional. To specify the repository or the archive, you must use --repository or --archive instead of providing them here.', ) borg_group.add_argument('-h', '--help', action='help', help='Show this help message and exit') return global_parser, action_parsers, global_plus_action_parser HIGHLANDER_ACTION_ARGUMENTS_COUNT = 2 # 1 for "global" + 1 for the action def parse_arguments(schema, *unparsed_arguments): ''' Given a configuration schema dict and the command-line arguments with which this script was invoked and unparsed arguments as a sequence of strings, parse the arguments and return them as a dict mapping from action name (or "global") to an argparse.Namespace instance. Raise ValueError if the arguments cannot be parsed. Raise SystemExit with an error code of 0 if "--help" was requested. ''' global_parser, action_parsers, global_plus_action_parser = make_parsers( schema, unparsed_arguments, ) arguments, remaining_action_arguments = parse_arguments_for_actions( unparsed_arguments, action_parsers.choices, global_parser, ) if not arguments['global'].config_paths: arguments['global'].config_paths = collect.get_default_config_paths(expand_home=True) for action_name in ('bootstrap', 'generate', 'validate'): if action_name in arguments and len(arguments) > HIGHLANDER_ACTION_ARGUMENTS_COUNT: raise ValueError( f'The {action_name} action cannot be combined with other actions. Please run it separately.', ) unknown_arguments = get_unparsable_arguments(remaining_action_arguments) if unknown_arguments: if '--help' in unknown_arguments or '-h' in unknown_arguments: global_plus_action_parser.print_help() sys.exit(0) global_plus_action_parser.print_usage() raise ValueError( f"Unrecognized argument{'s' if len(unknown_arguments) > 1 else ''}: {' '.join(unknown_arguments)}", ) if ( ('list' in arguments and 'repo-info' in arguments and arguments['list'].json) # noqa: PLR0916 or ('list' in arguments and 'info' in arguments and arguments['list'].json) or ('repo-info' in arguments and 'info' in arguments and arguments['repo-info'].json) ): raise ValueError('With the --json flag, multiple actions cannot be used together.') if 'list' in arguments and (arguments['list'].prefix and arguments['list'].match_archives): raise ValueError( 'With the list action, only one of --prefix or --match-archives flags can be used.', ) if 'repo-list' in arguments and ( arguments['repo-list'].prefix and arguments['repo-list'].match_archives ): raise ValueError( 'With the repo-list action, only one of --prefix or --match-archives flags can be used.', ) if 'info' in arguments and ( # noqa: PLR0916 (arguments['info'].archive and arguments['info'].prefix) or (arguments['info'].archive and arguments['info'].match_archives) or (arguments['info'].prefix and arguments['info'].match_archives) ): raise ValueError( 'With the info action, only one of --archive, --prefix, or --match-archives flags can be used.', ) if 'borg' in arguments and arguments['global'].dry_run: raise ValueError('With the borg action, --dry-run is not supported.') return arguments borgmatic/borgmatic/commands/borgmatic.py000066400000000000000000001270741510202216200211200ustar00rootroot00000000000000import collections import importlib.metadata import json import logging import os import sys import time from queue import Queue from subprocess import CalledProcessError import ruamel.yaml import borgmatic.actions.borg import borgmatic.actions.break_lock import borgmatic.actions.change_passphrase import borgmatic.actions.check import borgmatic.actions.compact import borgmatic.actions.config.bootstrap import borgmatic.actions.config.generate import borgmatic.actions.config.validate import borgmatic.actions.create import borgmatic.actions.delete import borgmatic.actions.export_key import borgmatic.actions.export_tar import borgmatic.actions.extract import borgmatic.actions.import_key import borgmatic.actions.info import borgmatic.actions.list import borgmatic.actions.mount import borgmatic.actions.prune import borgmatic.actions.recreate import borgmatic.actions.repo_create import borgmatic.actions.repo_delete import borgmatic.actions.repo_info import borgmatic.actions.repo_list import borgmatic.actions.restore import borgmatic.actions.transfer import borgmatic.commands.completion.bash import borgmatic.commands.completion.fish import borgmatic.config.load import borgmatic.config.paths from borgmatic.borg import umount as borg_umount from borgmatic.borg import version as borg_version from borgmatic.commands.arguments import parse_arguments from borgmatic.config import checks, collect, validate from borgmatic.hooks import command, dispatch from borgmatic.hooks.monitoring import monitor from borgmatic.logger import ( DISABLED, Log_prefix, add_custom_log_levels, configure_delayed_logging, configure_logging, should_do_markup, ) from borgmatic.signals import configure_signals from borgmatic.verbosity import get_verbosity, verbosity_to_log_level logger = logging.getLogger(__name__) def get_skip_actions(config, arguments): ''' Given a configuration dict and command-line arguments as an argparse.Namespace, return a list of the configured action names to skip. Omit "check" from this list though if "check --force" is part of the command-like arguments. ''' skip_actions = config.get('skip_actions', []) if 'check' in arguments and arguments['check'].force: return [action for action in skip_actions if action != 'check'] return skip_actions class Monitoring_hooks: ''' A Python context manager for pinging monitoring hooks for the start state before the wrapped code and log and finish (or failure) after the wrapped code. Also responsible for initializing/destroying the monitoring hooks. Example use as a context manager: with Monitoring_hooks(config_filename, config, arguments, global_arguments): do_stuff() ''' def __init__(self, config_filename, config, arguments, global_arguments): ''' Given a configuration filename, a configuration dict, command-line arguments as an argparse.Namespace, and global arguments as an argparse.Namespace, save relevant data points for use below. ''' using_primary_action = {'create', 'prune', 'compact', 'check'}.intersection(arguments) self.config_filename = config_filename self.config = config self.dry_run = global_arguments.dry_run self.monitoring_log_level = verbosity_to_log_level( get_verbosity({config_filename: config}, 'monitoring_verbosity'), ) self.monitoring_hooks_are_activated = ( using_primary_action and self.monitoring_log_level != DISABLED ) def __enter__(self): ''' If monitoring hooks are enabled and a primary action is in use, initialize monitoring hooks and ping them for the "start" state. ''' if not self.monitoring_hooks_are_activated: return dispatch.call_hooks( 'initialize_monitor', self.config, dispatch.Hook_type.MONITORING, self.config_filename, self.monitoring_log_level, self.dry_run, ) try: dispatch.call_hooks( 'ping_monitor', self.config, dispatch.Hook_type.MONITORING, self.config_filename, monitor.State.START, self.monitoring_log_level, self.dry_run, ) except (OSError, CalledProcessError) as error: raise ValueError(f'Error pinging monitor: {error}') def __exit__(self, exception_type, exception, traceback): ''' If monitoring hooks are enabled and a primary action is in use, ping monitoring hooks for the "log" state and also the "finish" or "fail" states (depending on whether there's an exception). Lastly, destroy monitoring hooks. ''' if not self.monitoring_hooks_are_activated: return # Send logs irrespective of error. try: dispatch.call_hooks( 'ping_monitor', self.config, dispatch.Hook_type.MONITORING, self.config_filename, monitor.State.LOG, self.monitoring_log_level, self.dry_run, ) except (OSError, CalledProcessError) as error: raise ValueError(f'Error pinging monitor: {error}') try: dispatch.call_hooks( 'ping_monitor', self.config, dispatch.Hook_type.MONITORING, self.config_filename, monitor.State.FAIL if exception else monitor.State.FINISH, self.monitoring_log_level, self.dry_run, ) except (OSError, CalledProcessError) as error: # If the wrapped code errored, prefer raising that exception, as it's probably more # important than a monitor failing to ping. if exception: return raise ValueError(f'Error pinging monitor: {error}') dispatch.call_hooks( 'destroy_monitor', self.config, dispatch.Hook_type.MONITORING, self.monitoring_log_level, self.dry_run, ) def run_configuration(config_filename, config, config_paths, arguments): # noqa: PLR0912, PLR0915 ''' Given a config filename, the corresponding parsed config dict, a sequence of loaded configuration paths, and command-line arguments as a dict from subparser name to a namespace of parsed arguments, execute the defined create, prune, compact, check, and/or other actions. Yield a combination of: * JSON output strings from successfully executing any actions that produce JSON * logging.LogRecord instances containing errors from any actions or backup hooks that fail ''' global_arguments = arguments['global'] local_path = config.get('local_path', 'borg') remote_path = config.get('remote_path') retries = config.get('retries', 0) retry_wait = config.get('retry_wait', 0) repo_queue = Queue() encountered_error = None error_repository = None skip_actions = get_skip_actions(config, arguments) if skip_actions: logger.debug( f"Skipping {'/'.join(skip_actions)} action{'s' if len(skip_actions) > 1 else ''} due to configured skip_actions", ) try: # noqa: PLR1702 with ( Monitoring_hooks(config_filename, config, arguments, global_arguments), borgmatic.hooks.command.Before_after_hooks( command_hooks=config.get('commands'), before_after='configuration', umask=config.get('umask'), working_directory=borgmatic.config.paths.get_working_directory(config), dry_run=global_arguments.dry_run, action_names=arguments.keys(), configuration_filename=config_filename, log_file=config.get('log_file', ''), ), ): try: local_borg_version = borg_version.local_borg_version(config, local_path) logger.debug(f'Borg {local_borg_version}') except (OSError, CalledProcessError, ValueError) as error: yield from log_error_records( f'{config_filename}: Error getting local Borg version', error, ) raise for repo in config['repositories']: repo_queue.put( (repo, 0), ) while not repo_queue.empty(): repository, retry_num = repo_queue.get() with Log_prefix(repository.get('label', repository['path'])): logger.debug('Running actions for repository') timeout = retry_num * retry_wait if timeout: logger.warning(f'Sleeping {timeout}s before next retry') time.sleep(timeout) try: yield from run_actions( arguments=arguments, config_filename=config_filename, config=config, config_paths=config_paths, local_path=local_path, remote_path=remote_path, local_borg_version=local_borg_version, repository=repository, ) except (OSError, CalledProcessError, ValueError) as error: if retry_num < retries: repo_queue.put( (repository, retry_num + 1), ) tuple( # Consume the generator so as to trigger logging. log_error_records( 'Error running actions for repository', error, levelno=logging.WARNING, log_command_error_output=True, ), ) logger.warning(f'Retrying... attempt {retry_num + 1}/{retries}') continue if command.considered_soft_failure(error): continue yield from log_error_records( 'Error running actions for repository', error, ) encountered_error = error error_repository = repository # Re-raise any error, so that the Monitoring_hooks context manager wrapping this # code can see the error and act accordingly. Do this here rather than as soon as # the error is encountered so that an error with one repository doesn't prevent # other repositories from running. if encountered_error: raise encountered_error except (OSError, CalledProcessError, ValueError) as error: yield from log_error_records('Error running configuration') encountered_error = error if not encountered_error: return try: command.execute_hooks( command.filter_hooks( config.get('commands'), after='error', action_names=arguments.keys(), state_names=['fail'], ), config.get('umask'), borgmatic.config.paths.get_working_directory(config), global_arguments.dry_run, configuration_filename=config_filename, log_file=config.get('log_file', ''), repository=error_repository.get('path', '') if error_repository else '', repository_label=error_repository.get('label', '') if error_repository else '', error=encountered_error, output=getattr(encountered_error, 'output', ''), ) except (OSError, CalledProcessError) as error: if command.considered_soft_failure(error): return yield from log_error_records(f'{config_filename}: Error running after error hook', error) def run_actions( # noqa: PLR0912, PLR0915 *, arguments, config_filename, config, config_paths, local_path, remote_path, local_borg_version, repository, ): ''' Given parsed command-line arguments as an argparse.ArgumentParser instance, the configuration filename, a configuration dict, a sequence of loaded configuration paths, local and remote paths to Borg, a local Borg version string, and a repository dict, run all actions from the command-line arguments on the given repository. Yield JSON output strings from executing any actions that produce JSON. Raise OSError or subprocess.CalledProcessError if an error occurs running a command for an action or a hook. Raise ValueError if the arguments or configuration passed to action are invalid. ''' add_custom_log_levels() repository_path = os.path.expanduser(repository['path']) global_arguments = arguments['global'] dry_run_label = ' (dry run; not making any changes)' if global_arguments.dry_run else '' hook_context = { 'configuration_filename': config_filename, 'repository_label': repository.get('label', ''), 'log_file': config.get('log_file', ''), # Deprecated: For backwards compatibility with borgmatic < 1.6.0. 'repositories': ','.join([repo['path'] for repo in config['repositories']]), 'repository': repository_path, } skip_actions = set(get_skip_actions(config, arguments)) requested_repository = next( ( repository for action_arguments in arguments.values() for repository in (getattr(action_arguments, 'repository', None),) if repository is not None ), None, ) if requested_repository and not borgmatic.config.validate.repositories_match( repository, requested_repository, ): logger.debug('Skipping actions because the requested --repository does not match') return with borgmatic.hooks.command.Before_after_hooks( command_hooks=config.get('commands'), before_after='repository', umask=config.get('umask'), working_directory=borgmatic.config.paths.get_working_directory(config), dry_run=global_arguments.dry_run, action_names=arguments.keys(), **hook_context, ): for action_name, action_arguments in arguments.items(): if action_name == 'global' or action_name in skip_actions: continue with borgmatic.hooks.command.Before_after_hooks( command_hooks=config.get('commands'), before_after='action', umask=config.get('umask'), working_directory=borgmatic.config.paths.get_working_directory(config), dry_run=global_arguments.dry_run, action_names=(action_name,), **hook_context, ): if action_name == 'repo-create': borgmatic.actions.repo_create.run_repo_create( repository, config, local_borg_version, action_arguments, global_arguments, local_path, remote_path, ) elif action_name == 'transfer': borgmatic.actions.transfer.run_transfer( repository, config, local_borg_version, action_arguments, global_arguments, local_path, remote_path, ) elif action_name == 'create': yield from borgmatic.actions.create.run_create( config_filename, repository, config, config_paths, local_borg_version, action_arguments, global_arguments, dry_run_label, local_path, remote_path, ) elif action_name == 'recreate': borgmatic.actions.recreate.run_recreate( repository, config, local_borg_version, action_arguments, global_arguments, local_path, remote_path, ) elif action_name == 'prune': borgmatic.actions.prune.run_prune( config_filename, repository, config, local_borg_version, action_arguments, global_arguments, dry_run_label, local_path, remote_path, ) elif action_name == 'compact': borgmatic.actions.compact.run_compact( config_filename, repository, config, local_borg_version, action_arguments, global_arguments, dry_run_label, local_path, remote_path, ) elif action_name == 'check': if checks.repository_enabled_for_checks(repository, config): borgmatic.actions.check.run_check( config_filename, repository, config, local_borg_version, action_arguments, global_arguments, local_path, remote_path, ) elif action_name == 'extract': borgmatic.actions.extract.run_extract( config_filename, repository, config, local_borg_version, action_arguments, global_arguments, local_path, remote_path, ) elif action_name == 'export-tar': borgmatic.actions.export_tar.run_export_tar( repository, config, local_borg_version, action_arguments, global_arguments, local_path, remote_path, ) elif action_name == 'mount': borgmatic.actions.mount.run_mount( repository, config, local_borg_version, action_arguments, global_arguments, local_path, remote_path, ) elif action_name == 'restore': borgmatic.actions.restore.run_restore( repository, config, local_borg_version, action_arguments, global_arguments, local_path, remote_path, ) elif action_name == 'repo-list': yield from borgmatic.actions.repo_list.run_repo_list( repository, config, local_borg_version, action_arguments, global_arguments, local_path, remote_path, ) elif action_name == 'list': yield from borgmatic.actions.list.run_list( repository, config, local_borg_version, action_arguments, global_arguments, local_path, remote_path, ) elif action_name == 'repo-info': yield from borgmatic.actions.repo_info.run_repo_info( repository, config, local_borg_version, action_arguments, global_arguments, local_path, remote_path, ) elif action_name == 'info': yield from borgmatic.actions.info.run_info( repository, config, local_borg_version, action_arguments, global_arguments, local_path, remote_path, ) elif action_name == 'break-lock': borgmatic.actions.break_lock.run_break_lock( repository, config, local_borg_version, action_arguments, global_arguments, local_path, remote_path, ) elif action_name == 'export': borgmatic.actions.export_key.run_export_key( repository, config, local_borg_version, action_arguments, global_arguments, local_path, remote_path, ) elif action_name == 'import': borgmatic.actions.import_key.run_import_key( repository, config, local_borg_version, action_arguments, global_arguments, local_path, remote_path, ) elif action_name == 'change-passphrase': borgmatic.actions.change_passphrase.run_change_passphrase( repository, config, local_borg_version, action_arguments, global_arguments, local_path, remote_path, ) elif action_name == 'delete': borgmatic.actions.delete.run_delete( repository, config, local_borg_version, action_arguments, global_arguments, local_path, remote_path, ) elif action_name == 'repo-delete': borgmatic.actions.repo_delete.run_repo_delete( repository, config, local_borg_version, action_arguments, global_arguments, local_path, remote_path, ) elif action_name == 'borg': borgmatic.actions.borg.run_borg( repository, config, local_borg_version, action_arguments, global_arguments, local_path, remote_path, ) def load_configurations(config_filenames, arguments, overrides=None, resolve_env=True): ''' Given a sequence of configuration filenames, arguments as a dict from action name to argparse.Namespace, a sequence of configuration file override strings in the form of "option.suboption=value", and whether to resolve environment variables, load and validate each configuration file. Return the results as a tuple of: dict of configuration filename to corresponding parsed configuration, a sequence of paths for all loaded configuration files (including includes), and a sequence of logging.LogRecord instances containing any parse errors. Log records are returned here instead of being logged directly because logging isn't yet initialized at this point! (Although with the Delayed_logging_handler now in place, maybe this approach could change.) ''' # Dict mapping from config filename to corresponding parsed config dict. configs = collections.OrderedDict() config_paths = set() logs = [] # Parse and load each configuration file. for config_filename in config_filenames: logs.extend( [ logging.makeLogRecord( dict( levelno=logging.DEBUG, levelname='DEBUG', msg=f'{config_filename}: Loading configuration file', ), ), ], ) try: configs[config_filename], paths, parse_logs = validate.parse_configuration( config_filename, validate.schema_filename(), arguments, overrides, resolve_env, ) config_paths.update(paths) logs.extend(parse_logs) except PermissionError: logs.extend( [ logging.makeLogRecord( dict( levelno=logging.CRITICAL, levelname='CRITICAL', msg=f'{config_filename}: Insufficient permissions to read configuration file', ), ), ], ) except (ValueError, OSError, validate.Validation_error) as error: logs.extend( [ logging.makeLogRecord( dict( levelno=logging.CRITICAL, levelname='CRITICAL', msg=f'{config_filename}: Error parsing configuration file', ), ), logging.makeLogRecord( dict(levelno=logging.CRITICAL, levelname='CRITICAL', msg=str(error)), ), ], ) return (configs, sorted(config_paths), logs) def log_record(suppress_log=False, **kwargs): ''' Create a log record based on the given makeLogRecord() arguments, one of which must be named "levelno". Log the record (unless suppress log is set) and return it. ''' record = logging.makeLogRecord(kwargs) if suppress_log: return record logger.handle(record) return record BORG_REPOSITORY_ACCESS_ABORTED_EXIT_CODE = 62 def log_error_records( message, error=None, levelno=logging.CRITICAL, log_command_error_output=False, ): ''' Given error message text, an optional exception object, an optional log level, and whether to log the error output of a CalledProcessError (if any), log error summary information and also yield it as a series of logging.LogRecord instances. Note that because the logs are yielded as a generator, logs won't get logged unless you consume the generator output. ''' level_name = logging._levelToName[levelno] if not error: yield log_record(levelno=levelno, levelname=level_name, msg=str(message)) return try: raise error except CalledProcessError as called_process_error: yield log_record(levelno=levelno, levelname=level_name, msg=str(message)) if called_process_error.output: try: output = called_process_error.output.decode('utf-8') except (UnicodeDecodeError, AttributeError): output = called_process_error.output # Suppress these logs for now and save the error output for the log summary at the end. # Log a separate record per line, as some errors can be really verbose and overflow the # per-record size limits imposed by some logging backends. for output_line in output.splitlines(): yield log_record( levelno=levelno, levelname=level_name, msg=output_line, suppress_log=True, ) yield log_record(levelno=levelno, levelname=level_name, msg=str(called_process_error)) if called_process_error.returncode == BORG_REPOSITORY_ACCESS_ABORTED_EXIT_CODE: yield log_record( levelno=levelno, levelname=level_name, msg='\nTo work around this, set either the "relocated_repo_access_is_ok" or "unknown_unencrypted_repo_access_is_ok" option to "true", as appropriate.', ) except (ValueError, OSError) as other_error: yield log_record(levelno=levelno, levelname=level_name, msg=str(message)) yield log_record(levelno=levelno, levelname=level_name, msg=str(other_error)) except: # noqa: E722, S110 # Raising above only as a means of determining the error type. Swallow the exception here # because we don't want the exception to propagate out of this function. pass def get_local_path(configs): ''' Arbitrarily return the local path from the first configuration dict. Default to "borg" if not set. ''' return next(iter(configs.values())).get('local_path', 'borg') def collect_highlander_action_summary_logs(configs, arguments, configuration_parse_errors): ''' Given a dict of configuration filename to corresponding parsed configuration, parsed command-line arguments as a dict from subparser name to a parsed namespace of arguments, and whether any configuration files encountered errors during parsing, run a highlander action specified in the arguments, if any, and yield a series of logging.LogRecord instances containing summary information. A highlander action is an action that cannot coexist with other actions on the borgmatic command-line, and borgmatic exits after processing such an action. ''' add_custom_log_levels() if 'bootstrap' in arguments: try: # No configuration file is needed for bootstrap. local_borg_version = borg_version.local_borg_version( {}, arguments['bootstrap'].local_path, ) except (OSError, CalledProcessError, ValueError) as error: yield from log_error_records('Error getting local Borg version', error) return try: borgmatic.actions.config.bootstrap.run_bootstrap( arguments['bootstrap'], arguments['global'], local_borg_version, ) yield logging.makeLogRecord( dict( levelno=logging.ANSWER, levelname='ANSWER', msg='Bootstrap successful', ), ) except ( CalledProcessError, ValueError, OSError, ) as error: yield from log_error_records(error) return if 'generate' in arguments: try: borgmatic.actions.config.generate.run_generate( arguments['generate'], arguments['global'], ) yield logging.makeLogRecord( dict( levelno=logging.ANSWER, levelname='ANSWER', msg='Generate successful', ), ) except ( CalledProcessError, ValueError, OSError, ) as error: yield from log_error_records(error) return if 'validate' in arguments: if configuration_parse_errors: yield logging.makeLogRecord( dict( levelno=logging.CRITICAL, levelname='CRITICAL', msg='Configuration validation failed', ), ) return try: borgmatic.actions.config.validate.run_validate(arguments['validate'], configs) yield logging.makeLogRecord( dict( levelno=logging.ANSWER, levelname='ANSWER', msg='All configuration files are valid', ), ) except ( CalledProcessError, ValueError, OSError, ) as error: yield from log_error_records(error) return def collect_configuration_run_summary_logs(configs, config_paths, arguments, log_file_path): # noqa: PLR0912 ''' Given a dict of configuration filename to corresponding parsed configuration, a sequence of loaded configuration paths, parsed command-line arguments as a dict from subparser name to a parsed namespace of arguments, and the path of a log file (if any), run each configuration file and yield a series of logging.LogRecord instances containing summary information about each run. As a side effect of running through these configuration files, output their JSON results, if any, to stdout. ''' # Run cross-file validation checks. repository = None for action_arguments in arguments.values(): if hasattr(action_arguments, 'repository'): repository = action_arguments.repository break try: validate.guard_configuration_contains_repository(repository, configs) except ValueError as error: yield from log_error_records(str(error)) return if not configs: yield from log_error_records( f"{' '.join(arguments['global'].config_paths)}: No valid configuration files found", ) return try: seen_command_hooks = [] for config_filename, config in configs.items(): command_hooks = command.filter_hooks( tuple( command_hook for command_hook in config.get('commands', ()) if command_hook not in seen_command_hooks ), before='everything', action_names=arguments.keys(), ) if command_hooks: command.execute_hooks( command_hooks, config.get('umask'), borgmatic.config.paths.get_working_directory(config), arguments['global'].dry_run, configuration_filename=config_filename, log_file=log_file_path or '', ) seen_command_hooks += list(command_hooks) except (CalledProcessError, ValueError, OSError) as error: yield from log_error_records('Error running before everything hook', error) return # Execute the actions corresponding to each configuration file. json_results = [] encountered_error = False for config_filename, config in configs.items(): with Log_prefix(config_filename): results = list(run_configuration(config_filename, config, config_paths, arguments)) error_logs = tuple( result for result in results if isinstance(result, logging.LogRecord) ) if error_logs: encountered_error = True yield from log_error_records('An error occurred') yield from error_logs else: yield logging.makeLogRecord( dict( levelno=logging.INFO, levelname='INFO', msg=f'{config_filename}: Successfully ran configuration file', ), ) if results: json_results.extend(results) if 'umount' in arguments: logger.info(f"Unmounting mount point {arguments['umount'].mount_point}") try: borg_umount.unmount_archive( config, mount_point=arguments['umount'].mount_point, local_path=get_local_path(configs), ) except (CalledProcessError, OSError) as error: encountered_error = True yield from log_error_records('Error unmounting mount point', error) if json_results: sys.stdout.write(json.dumps(json_results)) try: seen_command_hooks = [] for config_filename, config in configs.items(): command_hooks = command.filter_hooks( tuple( command_hook for command_hook in config.get('commands', ()) if command_hook not in seen_command_hooks ), after='everything', action_names=arguments.keys(), state_names=['fail' if encountered_error else 'finish'], ) if command_hooks: command.execute_hooks( command_hooks, config.get('umask'), borgmatic.config.paths.get_working_directory(config), arguments['global'].dry_run, configuration_filename=config_filename, log_file=log_file_path or '', ) seen_command_hooks += list(command_hooks) except (CalledProcessError, ValueError, OSError) as error: yield from log_error_records('Error running after everything hook', error) def exit_with_help_link(): # pragma: no cover ''' Display a link to get help and exit with an error code. ''' logger.critical('') logger.critical('Need some help? https://torsion.org/borgmatic/#issues') sys.exit(1) def check_and_show_help_on_no_args(configs): ''' Given a dict of configuration filename to corresponding parsed configuration, check if the borgmatic command is run without any arguments. If the configuration option "default_actions" is set to False, show the help message. Otherwise, trigger the default backup behavior. ''' if len(sys.argv) == 1: # No arguments provided default_actions = any(config.get('default_actions', True) for config in configs.values()) if not default_actions: parse_arguments('--help') sys.exit(0) def get_singular_option_value(configs, option_name): ''' Given a dict of configuration filename to corresponding parsed configuration, return the value of the given option from the configuration files or None if it's not set. Log and exit if there are conflicting values for the option across the configuration files. ''' distinct_values = { value for config in configs.values() for value in (config.get(option_name),) if value is not None } if len(distinct_values) > 1: configure_logging(logging.CRITICAL) joined_values = ', '.join(str(value) for value in distinct_values) logger.critical( f'The {option_name} option has conflicting values across configuration files: {joined_values}', ) exit_with_help_link() try: return next(iter(distinct_values)) except StopIteration: return None def main(extra_summary_logs=()): # pragma: no cover configure_signals() configure_delayed_logging() schema_filename = validate.schema_filename() try: schema = borgmatic.config.load.load_configuration(schema_filename) except (ruamel.yaml.error.YAMLError, RecursionError) as error: configure_logging(logging.CRITICAL) logger.critical(error) exit_with_help_link() try: arguments = parse_arguments(schema, *sys.argv[1:]) except ValueError as error: configure_logging(logging.CRITICAL) logger.critical(error) exit_with_help_link() except SystemExit as error: if error.code == 0: raise error configure_logging(logging.CRITICAL) logger.critical(f"Error parsing arguments: {' '.join(sys.argv)}") exit_with_help_link() global_arguments = arguments['global'] if global_arguments.version: print(importlib.metadata.version('borgmatic')) # noqa: T201 sys.exit(0) if global_arguments.bash_completion: print(borgmatic.commands.completion.bash.bash_completion()) # noqa: T201 sys.exit(0) if global_arguments.fish_completion: print(borgmatic.commands.completion.fish.fish_completion()) # noqa: T201 sys.exit(0) config_filenames = tuple(collect.collect_config_filenames(global_arguments.config_paths)) configs, config_paths, parse_logs = load_configurations( config_filenames, arguments, global_arguments.overrides, resolve_env=global_arguments.resolve_env and not arguments.get('validate'), ) # Use the helper function to check and show help on no arguments, passing the preloaded configs check_and_show_help_on_no_args(configs) configuration_parse_errors = ( (max(log.levelno for log in parse_logs) >= logging.CRITICAL) if parse_logs else False ) any_json_flags = any( getattr(sub_arguments, 'json', False) for sub_arguments in arguments.values() ) log_file_path = get_singular_option_value(configs, 'log_file') try: configure_logging( verbosity_to_log_level(get_verbosity(configs, 'verbosity')), verbosity_to_log_level(get_verbosity(configs, 'syslog_verbosity')), verbosity_to_log_level(get_verbosity(configs, 'log_file_verbosity')), verbosity_to_log_level(get_verbosity(configs, 'monitoring_verbosity')), log_file_path, get_singular_option_value(configs, 'log_file_format'), color_enabled=should_do_markup(configs, any_json_flags), ) except (FileNotFoundError, PermissionError) as error: configure_logging(logging.CRITICAL) logger.critical(f'Error configuring logging: {error}') exit_with_help_link() summary_logs = ( list(extra_summary_logs) + parse_logs + ( list( collect_highlander_action_summary_logs( configs, arguments, configuration_parse_errors, ), ) or list( collect_configuration_run_summary_logs( configs, config_paths, arguments, log_file_path, ), ) ) ) summary_logs_max_level = max(log.levelno for log in summary_logs) for message in ('', 'summary:'): log_record( levelno=summary_logs_max_level, levelname=logging.getLevelName(summary_logs_max_level), msg=message, ) for log in summary_logs: logger.handle(log) if summary_logs_max_level >= logging.CRITICAL: exit_with_help_link() borgmatic/borgmatic/commands/completion/000077500000000000000000000000001510202216200207355ustar00rootroot00000000000000borgmatic/borgmatic/commands/completion/__init__.py000066400000000000000000000000001510202216200230340ustar00rootroot00000000000000borgmatic/borgmatic/commands/completion/actions.py000066400000000000000000000025351510202216200227540ustar00rootroot00000000000000import borgmatic.commands.arguments def upgrade_message(language: str, upgrade_command: str, completion_file: str): return f''' Your {language} completions script is from a different version of borgmatic than is currently installed. Please upgrade your script so your completions match the command-line flags in your installed borgmatic! Try this to upgrade: {upgrade_command} source {completion_file} ''' def available_actions(subparsers, current_action=None): ''' Given subparsers as an argparse._SubParsersAction instance and a current action name (if any), return the actions names that can follow the current action on a command-line. This takes into account which sub-actions that the current action supports. For instance, if "bootstrap" is a sub-action for "config", then "bootstrap" should be able to follow a current action of "config" but not "list". ''' action_to_subactions = borgmatic.commands.arguments.get_subactions_for_actions( subparsers.choices, ) current_subactions = action_to_subactions.get(current_action) if current_subactions: return current_subactions all_subactions = { subaction for subactions in action_to_subactions.values() for subaction in subactions } return tuple(action for action in subparsers.choices if action not in all_subactions) borgmatic/borgmatic/commands/completion/bash.py000066400000000000000000000052351510202216200222310ustar00rootroot00000000000000import borgmatic.commands.arguments import borgmatic.commands.completion.actions import borgmatic.commands.completion.flag import borgmatic.config.validate def parser_flags(parser): ''' Given an argparse.ArgumentParser instance, return its argument flags in a space-separated string. ''' return ' '.join( flag_variant for action in parser._actions for flag_name in action.option_strings for flag_variant in borgmatic.commands.completion.flag.variants(flag_name) ) def bash_completion(): ''' Return a bash completion script for the borgmatic command. Produce this by introspecting borgmatic's command-line argument parsers. ''' ( _, action_parsers, global_plus_action_parser, ) = borgmatic.commands.arguments.make_parsers( schema=borgmatic.config.validate.load_schema(borgmatic.config.validate.schema_filename()), unparsed_arguments=(), ) global_flags = parser_flags(global_plus_action_parser) # Avert your eyes. # fmt: off return '\n'.join( ( 'check_version() {', ' local this_script="$(cat "$BASH_SOURCE" 2> /dev/null)"', ' local installed_script="$(borgmatic --bash-completion 2> /dev/null)"', ' if [ "$this_script" != "$installed_script" ] && [ "$installed_script" != "" ];' f''' then cat << EOF\n{borgmatic.commands.completion.actions.upgrade_message( 'bash', 'sudo sh -c "borgmatic --bash-completion > $BASH_SOURCE"', '$BASH_SOURCE', )}\nEOF''', ' fi', '}', 'complete_borgmatic() {', *tuple( ''' if [[ " ${COMP_WORDS[*]} " =~ " %s " ]]; then COMPREPLY=($(compgen -W "%s %s %s" -- "${COMP_WORDS[COMP_CWORD]}")) return 0 fi''' # noqa: UP031 % ( action, parser_flags(action_parser), ' '.join( borgmatic.commands.completion.actions.available_actions(action_parsers, action), ), global_flags, ) for action, action_parser in reversed(action_parsers.choices.items()) ), ' COMPREPLY=($(compgen -W "%s %s" -- "${COMP_WORDS[COMP_CWORD]}"))' # noqa: UP031 % ( ' '.join(borgmatic.commands.completion.actions.available_actions(action_parsers)), global_flags, ), ' (check_version &)', '}', '\ncomplete -o bashdefault -o default -F complete_borgmatic borgmatic', ), ) borgmatic/borgmatic/commands/completion/fish.py000066400000000000000000000154431510202216200222470ustar00rootroot00000000000000import shlex from argparse import Action from textwrap import dedent import borgmatic.commands.arguments import borgmatic.commands.completion.actions import borgmatic.config.validate def has_file_options(action: Action): ''' Given an argparse.Action instance, return True if it takes a file argument. ''' return action.metavar in { 'FILENAME', 'PATH', } or action.dest in {'config_paths'} def has_choice_options(action: Action): ''' Given an argparse.Action instance, return True if it takes one of a predefined set of arguments. ''' return action.choices is not None def has_unknown_required_param_options(action: Action): ''' A catch-all for options that take a required parameter, but we don't know what the parameter is. This should be used last. These are actions that take something like a glob, a list of numbers, or a string. Actions that match this pattern should not show the normal arguments, because those are unlikely to be valid. ''' return ( action.required is True or action.nargs in { '+', '*', } or action.metavar in {'PATTERN', 'KEYS', 'N'} or (action.type is not None and action.default is None) ) def has_exact_options(action: Action): return ( has_file_options(action) or has_choice_options(action) or has_unknown_required_param_options(action) ) def exact_options_completion(action: Action): ''' Given an argparse.Action instance, return a completion invocation that forces file completions, options completion, or just that some value follow the action, if the action takes such an argument and was the last action on the command line prior to the cursor. Otherwise, return an empty string. ''' if not has_exact_options(action): return '' args = ' '.join(action.option_strings) if has_file_options(action): return f'''\ncomplete -c borgmatic -Fr -n "__borgmatic_current_arg {args}"''' if has_choice_options(action): return f'''\ncomplete -c borgmatic -f -a '{' '.join(map(str, action.choices))}' -n "__borgmatic_current_arg {args}"''' if has_unknown_required_param_options(action): return f'''\ncomplete -c borgmatic -x -n "__borgmatic_current_arg {args}"''' raise ValueError( f'Unexpected action: {action} passes has_exact_options but has no choices produced', ) def dedent_strip_as_tuple(string: str): ''' Dedent a string, then strip it to avoid requiring your first line to have content, then return a tuple of the string. Makes it easier to write multiline strings for completions when you join them with a tuple. ''' return (dedent(string).strip('\n'),) def fish_completion(): ''' Return a fish completion script for the borgmatic command. Produce this by introspecting borgmatic's command-line argument parsers. ''' ( _, action_parsers, global_plus_action_parser, ) = borgmatic.commands.arguments.make_parsers( schema=borgmatic.config.validate.load_schema(borgmatic.config.validate.schema_filename()), unparsed_arguments=(), ) all_action_parsers = ' '.join(action for action in action_parsers.choices) exact_option_args = tuple( ' '.join(action.option_strings) for action_parser in action_parsers.choices.values() for action in action_parser._actions if has_exact_options(action) ) + tuple( ' '.join(action.option_strings) for action in global_plus_action_parser._actions if len(action.option_strings) > 0 if has_exact_options(action) ) # Avert your eyes. # fmt: off return '\n'.join(( *dedent_strip_as_tuple( f''' function __borgmatic_check_version set -fx this_filename (status current-filename) fish -c ' if test -f "$this_filename" set this_script (cat $this_filename 2> /dev/null) set installed_script (borgmatic --fish-completion 2> /dev/null) if [ "$this_script" != "$installed_script" ] && [ "$installed_script" != "" ] echo "{borgmatic.commands.completion.actions.upgrade_message( 'fish', 'borgmatic --fish-completion | sudo tee $this_filename', '$this_filename', )}" end end ' & end __borgmatic_check_version function __borgmatic_current_arg --description 'Check if any of the given arguments are the last on the command line before the cursor' set -l all_args (commandline -poc) # premature optimization to avoid iterating all args if there aren't enough # to have a last arg beyond borgmatic if [ (count $all_args) -lt 2 ] return 1 end for arg in $argv if [ "$arg" = "$all_args[-1]" ] return 0 end end return 1 end set --local action_parser_condition "not __fish_seen_subcommand_from {all_action_parsers}" set --local exact_option_condition "not __borgmatic_current_arg {' '.join(exact_option_args)}" ''', ), '\n# action_parser completions', *tuple( f'''complete -c borgmatic -f -n "$action_parser_condition" -n "$exact_option_condition" -a '{action_name}' -d {shlex.quote(action_parser.description)}''' for action_name, action_parser in action_parsers.choices.items() ), '\n# global flags', *tuple( # -n is checked in order, so put faster / more likely to be true checks first f'''complete -c borgmatic -f -n "$exact_option_condition" -a '{' '.join(action.option_strings)}' -d {shlex.quote(action.help)}{exact_options_completion(action)}''' for action in global_plus_action_parser._actions # ignore the noargs action, as this is an impossible completion for fish if len(action.option_strings) > 0 if 'Deprecated' not in action.help ), '\n# action_parser flags', *tuple( f'''complete -c borgmatic -f -n "$exact_option_condition" -a '{' '.join(action.option_strings)}' -d {shlex.quote(action.help)} -n "__fish_seen_subcommand_from {action_name}"{exact_options_completion(action)}''' for action_name, action_parser in action_parsers.choices.items() for action in action_parser._actions if 'Deprecated' not in (action.help or ()) ), )) borgmatic/borgmatic/commands/completion/flag.py000066400000000000000000000006321510202216200222210ustar00rootroot00000000000000def variants(flag_name): ''' Given a flag name as a string, yield it and any variations that should be complete-able as well. For instance, for a string like "--foo[0].bar", yield "--foo[0].bar", "--foo[1].bar", ..., "--foo[9].bar". ''' if '[0]' in flag_name: for index in range(10): yield flag_name.replace('[0]', f'[{index}]') return yield flag_name borgmatic/borgmatic/commands/generate_config.py000066400000000000000000000007511510202216200222600ustar00rootroot00000000000000import logging import sys import borgmatic.commands.borgmatic def main(): warning_log = logging.makeLogRecord( dict( levelno=logging.WARNING, levelname='WARNING', msg='generate-borgmatic-config is deprecated and will be removed from a future release. Please use "borgmatic config generate" instead.', ), ) sys.argv = ['borgmatic', 'config', 'generate', *sys.argv[1:]] borgmatic.commands.borgmatic.main([warning_log]) borgmatic/borgmatic/commands/validate_config.py000066400000000000000000000007511510202216200222570ustar00rootroot00000000000000import logging import sys import borgmatic.commands.borgmatic def main(): warning_log = logging.makeLogRecord( dict( levelno=logging.WARNING, levelname='WARNING', msg='validate-borgmatic-config is deprecated and will be removed from a future release. Please use "borgmatic config validate" instead.', ), ) sys.argv = ['borgmatic', 'config', 'validate', *sys.argv[1:]] borgmatic.commands.borgmatic.main([warning_log]) borgmatic/borgmatic/config/000077500000000000000000000000001510202216200162305ustar00rootroot00000000000000borgmatic/borgmatic/config/__init__.py000066400000000000000000000000001510202216200203270ustar00rootroot00000000000000borgmatic/borgmatic/config/arguments.py000066400000000000000000000132251510202216200206120ustar00rootroot00000000000000import io import re import ruamel.yaml import borgmatic.config.schema LIST_INDEX_KEY_PATTERN = re.compile(r'^(?P[a-zA-z-]+)\[(?P\d+)\]$') def set_values(config, keys, value): ''' Given a configuration dict, a sequence of parsed key strings, and a string value, descend into the configuration hierarchy based on the given keys and set the value into the right place. For example, consider these keys: ('foo', 'bar', 'baz') This looks up "foo" in the given configuration dict. And within that, it looks up "bar". And then within that, it looks up "baz" and sets it to the given value. Another example: ('mylist[0]', 'foo') This looks for the zeroth element of "mylist" in the given configuration. And within that, it looks up "foo" and sets it to the given value. ''' if not keys: return first_key = keys[0] # Support "mylist[0]" list index syntax. match = LIST_INDEX_KEY_PATTERN.match(first_key) if match: list_key = match.group('list_name') list_index = int(match.group('index')) try: if len(keys) == 1: config[list_key][list_index] = value return if list_key not in config: config[list_key] = [] set_values(config[list_key][list_index], keys[1:], value) except (IndexError, KeyError): raise ValueError(f'Argument list index {first_key} is out of range') return if len(keys) == 1: config[first_key] = value return if first_key not in config: config[first_key] = {} set_values(config[first_key], keys[1:], value) def type_for_option(schema, option_keys): ''' Given a configuration schema dict and a sequence of keys identifying a potentially nested option, e.g. ('extra_borg_options', 'create'), return the schema type of that option as a string. Return None if the option or its type cannot be found in the schema. ''' option_schema = schema for key in option_keys: # Support "name[0]"-style list index syntax. match = LIST_INDEX_KEY_PATTERN.match(key) properties = borgmatic.config.schema.get_properties(option_schema) try: if match: option_schema = properties[match.group('list_name')]['items'] else: option_schema = properties[key] except KeyError: return None try: return option_schema['type'] except KeyError: return None def convert_value_type(value, option_type): ''' Given a string value and its schema type as a string, determine its logical type (string, boolean, integer, etc.), and return it converted to that type. If the destination option type is a string, then leave the value as-is so that special characters in it don't get interpreted as YAML during conversion. And if the source value isn't a string, return it as-is. Raise ruamel.yaml.error.YAMLError if there's a parse issue with the YAML. Raise ValueError if the parsed value doesn't match the option type. ''' if not isinstance(value, str): return value if option_type == 'string': return value try: parsed_value = ruamel.yaml.YAML(typ='safe').load(io.StringIO(value)) except ruamel.yaml.error.YAMLError as error: raise ValueError(f'Argument value "{value}" is invalid: {error.problem}') if not isinstance(parsed_value, borgmatic.config.schema.parse_type(option_type)): raise ValueError(f'Argument value "{value}" is not of the expected type: {option_type}') return parsed_value def prepare_arguments_for_config(global_arguments, schema): ''' Given global arguments as an argparse.Namespace and a configuration schema dict, parse each argument that corresponds to an option in the schema and return a sequence of tuples (keys, values) for that option, where keys is a sequence of strings. For instance, given the following arguments: argparse.Namespace(**{'my_option.sub_option': 'value1', 'other_option': 'value2'}) ... return this: ( (('my_option', 'sub_option'), 'value1'), (('other_option',), 'value2'), ) ''' prepared_values = [] for argument_name, value in global_arguments.__dict__.items(): if value is None: continue keys = tuple(argument_name.split('.')) option_type = type_for_option(schema, keys) # The argument doesn't correspond to any option in the schema, or it is a complex argument, so ignore it. # It's probably a flag that borgmatic has on the command-line but not in configuration. if option_type in {'object', None}: continue prepared_values.append( ( keys, convert_value_type(value, option_type), ), ) return tuple(prepared_values) def apply_arguments_to_config(config, schema, arguments): ''' Given a configuration dict, a corresponding configuration schema dict, and arguments as a dict from action name to argparse.Namespace, set those given argument values into their corresponding configuration options in the configuration dict. This supports argument flags of the from "--foo.bar.baz" where each dotted component is a nested configuration object. Additionally, flags like "--foo.bar[0].baz" are supported to update a list element in the configuration. ''' for action_arguments in arguments.values(): for keys, value in prepare_arguments_for_config(action_arguments, schema): set_values(config, keys, value) borgmatic/borgmatic/config/checks.py000066400000000000000000000005021510202216200200370ustar00rootroot00000000000000def repository_enabled_for_checks(repository, config): ''' Given a repository name and a configuration dict, return whether the repository is enabled to have consistency checks run. ''' if not config.get('check_repositories'): return True return repository in config['check_repositories'] borgmatic/borgmatic/config/collect.py000066400000000000000000000042121510202216200202260ustar00rootroot00000000000000import os def get_default_config_paths(expand_home=True): ''' Based on the value of the XDG_CONFIG_HOME and HOME environment variables, return a list of default configuration paths. This includes both system-wide configuration and configuration in the current user's home directory. Don't expand the home directory ($HOME) if the expand home flag is False. ''' user_config_directory = os.getenv('XDG_CONFIG_HOME') or os.path.join('$HOME', '.config') if expand_home: user_config_directory = os.path.expandvars(user_config_directory) return [ '/etc/borgmatic/config.yaml', '/etc/borgmatic.d', os.path.join(user_config_directory, 'borgmatic/config.yaml'), os.path.join(user_config_directory, 'borgmatic.d'), ] def collect_config_filenames(config_paths): ''' Given a sequence of config paths, both filenames and directories, resolve that to an iterable of absolute files. Accomplish this by listing any given directories looking for contained config files (ending with the ".yaml" or ".yml" extension). This is non-recursive, so any directories within the given directories are ignored. Return paths even if they don't exist on disk, so the user can find out about missing configuration paths. However, skip a default config path if it's missing, so the user doesn't have to create a default config path unless they need it. ''' real_default_config_paths = set(map(os.path.realpath, get_default_config_paths())) for path in config_paths: exists = os.path.exists(path) if os.path.realpath(path) in real_default_config_paths and not exists: continue if not os.path.isdir(path) or not exists: yield os.path.abspath(path) continue if not os.access(path, os.R_OK): continue for filename in sorted(os.listdir(path)): full_filename = os.path.join(path, filename) matching_filetype = full_filename.endswith(('.yaml', '.yml')) if matching_filetype and not os.path.isdir(full_filename): yield os.path.abspath(full_filename) borgmatic/borgmatic/config/constants.py000066400000000000000000000043541510202216200206240ustar00rootroot00000000000000import contextlib import shlex def coerce_scalar(value): ''' Given a configuration value, coerce it to an integer or a boolean as appropriate and return the result. ''' with contextlib.suppress(TypeError, ValueError): return int(value) try: return { 'true': True, 'True': True, 'false': False, 'False': False, }.get(value, value) except TypeError: # e.g. for an unhashable type return value def apply_constants(value, constants, shell_escape=False): ''' Given a configuration value (bool, dict, int, list, or string) and a dict of named constants, replace any configuration string values of the form "{constant}" (or containing it) with the value of the correspondingly named key from the constants. Recurse as necessary into nested configuration to find values to replace. For instance, if a configuration value contains "{foo}", replace it with the value of the "foo" key found within the configuration's "constants". If shell escape is True, then escape the constant's value before applying it. Return the configuration value and modify the original. ''' if not value or not constants: return value if isinstance(value, str): for constant_name, constant_value in constants.items(): value = value.replace( '{' + constant_name + '}', shlex.quote(str(constant_value)) if shell_escape else str(constant_value), ) # Support constants within non-string scalars by coercing the value to its appropriate type. value = coerce_scalar(value) elif isinstance(value, list): for index, list_value in enumerate(value): value[index] = apply_constants(list_value, constants, shell_escape) elif isinstance(value, dict): for option_name, option_value in value.items(): value[option_name] = apply_constants( option_value, constants, shell_escape=( shell_escape or option_name.startswith(('before_', 'after_')) or option_name == 'on_error' ), ) return value borgmatic/borgmatic/config/environment.py000066400000000000000000000030461510202216200211510ustar00rootroot00000000000000import os import re VARIABLE_PATTERN = re.compile( r'(?P\\)?(?P\$\{(?P[A-Za-z0-9_]+)((:?-)(?P[^}]+))?\})', ) def resolve_string(matcher): ''' Given a matcher containing a name and an optional default value, get the value from environment. Raise ValueError if the variable is not defined in environment and no default value is provided. ''' if matcher.group('escape') is not None: # In the case of an escaped environment variable, unescape it. return matcher.group('variable') # Resolve the environment variable. name, default = matcher.group('name'), matcher.group('default') out = os.getenv(name, default=default) if out is None: raise ValueError(f'Cannot find variable {name} in environment') return out def resolve_env_variables(item): ''' Resolves variables like or ${FOO} from given configuration with values from process environment. Supported formats: * ${FOO} will return FOO env variable * ${FOO-bar} or ${FOO:-bar} will return FOO env variable if it exists, else "bar" Raise if any variable is missing in environment and no default value is provided. ''' if isinstance(item, str): return VARIABLE_PATTERN.sub(resolve_string, item) if isinstance(item, list): for index, subitem in enumerate(item): item[index] = resolve_env_variables(subitem) if isinstance(item, dict): for key, value in item.items(): item[key] = resolve_env_variables(value) return item borgmatic/borgmatic/config/generate.py000066400000000000000000000335611510202216200204040ustar00rootroot00000000000000import collections import contextlib import io import os import re import ruamel.yaml import borgmatic.config.schema from borgmatic.config import load, normalize INDENT = 4 SEQUENCE_INDENT = 2 def insert_newline_before_comment(config, field_name): ''' Using some ruamel.yaml black magic, insert a blank line in the config right before the given field and its comments. ''' config.ca.items[field_name][1].insert( 0, ruamel.yaml.tokens.CommentToken('\n', ruamel.yaml.error.CommentMark(0), None), ) SCALAR_SCHEMA_TYPES = {'string', 'boolean', 'integer', 'number'} def schema_to_sample_configuration(schema, source_config=None, level=0, parent_is_sequence=False): ''' Given a loaded configuration schema and a source configuration, generate and return sample config for the schema. Include comments for each option based on the schema "description". If a source config is given, walk it alongside the given schema so that both can be taken into account when commenting out particular options in add_comments_to_configuration_object(). ''' schema_type = schema.get('type') example = schema.get('example') if borgmatic.config.schema.compare_types(schema_type, {'array'}): config = ruamel.yaml.comments.CommentedSeq( example if borgmatic.config.schema.compare_types( schema['items'].get('type'), SCALAR_SCHEMA_TYPES, ) else [ schema_to_sample_configuration( schema['items'], source_config, level, parent_is_sequence=True, ), ], ) add_comments_to_configuration_sequence(config, schema, indent=(level * INDENT)) elif borgmatic.config.schema.compare_types(schema_type, {'object'}): if source_config and isinstance(source_config, list) and isinstance(source_config[0], dict): source_config = source_config[0] config = ( ruamel.yaml.comments.CommentedMap( [ ( field_name, schema_to_sample_configuration( sub_schema, (source_config or {}).get(field_name, {}), level + 1, ), ) for field_name, sub_schema in borgmatic.config.schema.get_properties( schema, ).items() ], ) or example ) indent = (level * INDENT) + (SEQUENCE_INDENT if parent_is_sequence else 0) add_comments_to_configuration_object( config, schema, source_config, indent=indent, skip_first_field=parent_is_sequence, ) elif borgmatic.config.schema.compare_types(schema_type, SCALAR_SCHEMA_TYPES, match=all): return example else: raise ValueError(f'Schema at level {level} is unsupported: {schema}') return config def comment_out_line(line): # If it's already is commented out (or empty), there's nothing further to do! stripped_line = line.lstrip() if not stripped_line or stripped_line.startswith('#'): return line # Comment out the names of optional options, inserting the '#' after any indent for aesthetics. matches = re.match(r'(\s*)', line) indent_spaces = matches.group(0) if matches else '' count_indent_spaces = len(indent_spaces) return '# '.join((indent_spaces, line[count_indent_spaces:])) def transform_optional_configuration(rendered_config, comment_out=True): ''' Post-process a rendered configuration string to comment out optional key/values, as determined by a sentinel in the comment before each key. The idea is that the pre-commented configuration prevents the user from having to comment out a bunch of configuration they don't care about to get to a minimal viable configuration file. Ideally ruamel.yaml would support commenting out keys during configuration generation, but it's not terribly easy to accomplish that way. If comment_out is False, then just strip the comment sentinel without actually commenting anything out. ''' lines = [] optional = False indent_characters = None indent_characters_at_sentinel = None for line in rendered_config.split('\n'): indent_characters = len(line) - len(line.lstrip()) # Upon encountering an optional configuration option, comment out lines until the next blank # line. if line.strip().startswith(f'# {COMMENTED_OUT_SENTINEL}'): if comment_out is False: continue optional = True indent_characters_at_sentinel = indent_characters continue # Hit a blank line or dedented, so reset commenting. if not line.strip() or ( indent_characters_at_sentinel is not None and indent_characters < indent_characters_at_sentinel ): optional = False indent_characters_at_sentinel = None lines.append(comment_out_line(line) if optional else line) return '\n'.join(lines) def render_configuration(config): ''' Given a config data structure of nested OrderedDicts, render the config as YAML and return it. ''' dumper = ruamel.yaml.YAML(typ='rt') dumper.indent(mapping=INDENT, sequence=INDENT + SEQUENCE_INDENT, offset=INDENT) rendered = io.StringIO() dumper.dump(config, rendered) return rendered.getvalue() def write_configuration(config_filename, rendered_config, mode=0o600, overwrite=False): ''' Given a target config filename and rendered config YAML, write it out to file. Create any containing directories as needed. But if the file already exists and overwrite is False, abort before writing anything. ''' if not overwrite and os.path.exists(config_filename): raise FileExistsError( f'{config_filename} already exists. Aborting. Use --overwrite to replace the file.', ) with contextlib.suppress(FileExistsError, FileNotFoundError): os.makedirs(os.path.dirname(config_filename), mode=0o700) with open(config_filename, 'w', encoding='utf-8') as config_file: config_file.write(rendered_config) os.chmod(config_filename, mode) def add_comments_to_configuration_sequence(config, schema, indent=0): ''' If the given config sequence's items are object, then mine the schema for the description of the object's first item, and slap that atop the sequence. Indent the comment the given number of characters. Doing this for sequences of maps results in nice comments that look like: ``` things: # First key description. Added by this function. - key: foo # Second key description. Added by add_comments_to_configuration_object(). other: bar ``` ''' if schema['items'].get('type') != 'object': return for field_name in config[0]: field_schema = borgmatic.config.schema.get_properties(schema['items']).get(field_name, {}) description = field_schema.get('description') # No description to use? Skip it. if not field_schema or not description: return config[0].yaml_set_start_comment(description, indent=indent) # We only want the first key's description here, as the rest of the keys get commented by # add_comments_to_configuration_object(). return DEFAULT_KEYS = {'source_directories', 'repositories', 'keep_daily'} COMMENTED_OUT_SENTINEL = 'COMMENT_OUT' def add_comments_to_configuration_object( config, schema, source_config=None, indent=0, skip_first_field=False, ): ''' Using descriptions from a schema as a source, add those descriptions as comments to the given configuration dict, putting them before each field. Indent the comment the given number of characters. If skip_first_field is True, omit the comment for the initial field. This is useful for sequences, where the comment for the first field goes before the sequence itself. And a sentinel for commenting out options that are neither in DEFAULT_KEYS nor the the given source configuration dict. The idea is that any options used in the source configuration should stay active in the generated configuration. ''' for index, field_name in enumerate(config.keys()): if skip_first_field and index == 0: continue field_schema = borgmatic.config.schema.get_properties(schema).get(field_name, {}) description = field_schema.get('description', '').strip() # If this isn't a default key, add an indicator to the comment, flagging it to be commented # out from the sample configuration. This sentinel is consumed by downstream processing that # does the actual commenting out. if field_name not in DEFAULT_KEYS and ( source_config is None or field_name not in source_config ): description = ( f'{description}\n{COMMENTED_OUT_SENTINEL}' if description else COMMENTED_OUT_SENTINEL ) # No description to use? Skip it. if not field_schema or not description: # pragma: no cover continue config.yaml_set_comment_before_after_key(key=field_name, before=description, indent=indent) if index > 0: insert_newline_before_comment(config, field_name) RUAMEL_YAML_COMMENTS_INDEX = 1 def merge_source_configuration_into_destination(destination_config, source_config): ''' Deep merge the given source configuration dict into the destination configuration CommentedMap, favoring values from the source when there are collisions. The purpose of this is to upgrade configuration files from old versions of borgmatic by adding new configuration keys and comments. ''' if not source_config: return destination_config if not destination_config or not isinstance(source_config, collections.abc.Mapping): return source_config for field_name, source_value in source_config.items(): # This is a mapping. Recurse for this key/value. if isinstance(source_value, collections.abc.Mapping): destination_config[field_name] = merge_source_configuration_into_destination( destination_config[field_name], source_value, ) continue # This is a sequence. Recurse for each item in it. if isinstance(source_value, collections.abc.Sequence) and not isinstance(source_value, str): destination_value = destination_config[field_name] destination_config[field_name] = ruamel.yaml.comments.CommentedSeq( [ merge_source_configuration_into_destination( destination_value[index] if index < len(destination_value) else None, source_item, ) for index, source_item in enumerate(source_value) ], ) continue # This is some sort of scalar. Set it into the destination. destination_config[field_name] = source_value return destination_config def generate_sample_configuration( dry_run, source_filename, destination_path, schema_filename, overwrite=False, split=False, ): ''' Given an optional source configuration filename, a required destination configuration path, the path to a schema filename in a YAML rendition of the JSON Schema format, whether to overwrite a destination file, and whether to split the configuration into multiple files (one per option) in the assumed destination directory, write out sample configuration file(s) based on that schema. If a source filename is provided, merge the parsed contents of that configuration into the generated configuration. ''' schema = ruamel.yaml.YAML(typ='safe').load(open(schema_filename, encoding='utf-8')) source_config = None if source_filename: source_config = load.load_configuration(source_filename) normalize.normalize(source_filename, source_config) # The borgmatic.config.normalize.normalize() function tacks on an empty "bootstrap" if # needed, so the hook gets used by default. But we don't want it to end up in the generated # config unless the user has set it explicitly, as an empty "bootstrap:" won't validate. if source_config and source_config.get('bootstrap') == {}: del source_config['bootstrap'] destination_config = merge_source_configuration_into_destination( schema_to_sample_configuration(schema, source_config), source_config, ) if dry_run: return if split: if os.path.exists(destination_path) and not os.path.isdir(destination_path): raise ValueError('With the --split flag, the destination path must be a directory') os.makedirs(destination_path, exist_ok=True) for option_name, option_config in destination_config.items(): write_configuration( os.path.join(destination_path, f'{option_name}.yaml'), transform_optional_configuration( render_configuration({option_name: option_config}), comment_out=False, ), overwrite=overwrite, ) return if os.path.exists(destination_path) and not os.path.isfile(destination_path): raise ValueError('Without the --split flag, the destination path must be a file') write_configuration( destination_path, transform_optional_configuration( render_configuration(destination_config), comment_out=True ), overwrite=overwrite, ) borgmatic/borgmatic/config/load.py000066400000000000000000000363531510202216200175330ustar00rootroot00000000000000import functools import itertools import logging import operator import os import ruamel.yaml logger = logging.getLogger(__name__) def probe_and_include_file(filename, include_directories, config_paths): ''' Given a filename to include, a list of include directories to search for matching files, and a set of configuration paths, probe for the file, load it, and return the loaded configuration as a data structure of nested dicts, lists, etc. Add the filename to the given configuration paths. Raise FileNotFoundError if the included file was not found. ''' expanded_filename = os.path.expanduser(filename) if os.path.isabs(expanded_filename): return load_configuration(expanded_filename, config_paths) candidate_filenames = { os.path.join(directory, expanded_filename) for directory in include_directories } for candidate_filename in candidate_filenames: if os.path.exists(candidate_filename): return load_configuration(candidate_filename, config_paths) raise FileNotFoundError( f'Could not find include {filename} at {" or ".join(candidate_filenames)}', ) def include_configuration(loader, filename_node, include_directory, config_paths): ''' Given a ruamel.yaml.loader.Loader, a ruamel.yaml.nodes.ScalarNode containing the included filename (or a list containing multiple such filenames), an include directory path to search for matching files, and a set of configuration paths, load the given YAML filenames (ignoring the given loader so we can use our own) and return their contents as data structure of nested dicts, lists, etc. Add the names of included files to the given configuration paths. If the given filename node's value is a scalar string, then the return value will be a single value. But if the given node value is a list, then the return value will be a list of values, one per loaded configuration file. If a filename is relative, probe for it within: 1. the current working directory and 2. the given include directory. Raise FileNotFoundError if an included file was not found. ''' include_directories = [os.getcwd(), os.path.abspath(include_directory)] if isinstance(filename_node.value, str): return probe_and_include_file(filename_node.value, include_directories, config_paths) if ( isinstance(filename_node.value, list) and len(filename_node.value) and isinstance(filename_node.value[0], ruamel.yaml.nodes.ScalarNode) ): # Reversing the values ensures the correct ordering if these includes are subsequently # merged together. return [ probe_and_include_file(node.value, include_directories, config_paths) for node in reversed(filename_node.value) ] raise ValueError( 'The value given for the !include tag is invalid; use a single filename or a list of filenames instead', ) def raise_retain_node_error(loader, node): ''' Given a ruamel.yaml.loader.Loader and a YAML node, raise an error about "!retain" usage. Raise ValueError if a mapping or sequence node is given, as that indicates that "!retain" was used in a configuration file without a merge. In configuration files with a merge, mapping and sequence nodes with "!retain" tags are handled by deep_merge_nodes() below. Also raise ValueError if a scalar node is given, as "!retain" is not supported on scalar nodes. ''' if isinstance(node, (ruamel.yaml.nodes.MappingNode, ruamel.yaml.nodes.SequenceNode)): raise ValueError( 'The !retain tag may only be used within a configuration file containing a merged !include tag.', ) raise ValueError('The !retain tag may only be used on a mapping or list.') def raise_omit_node_error(loader, node): ''' Given a ruamel.yaml.loader.Loader and a YAML node, raise an error about "!omit" usage. Raise ValueError unconditionally, as an "!omit" node here indicates it was used in a configuration file without a merge. In configuration files with a merge, nodes with "!omit" tags are handled by deep_merge_nodes() below. ''' raise ValueError( 'The !omit tag may only be used on a scalar (e.g., string) or list element within a configuration file containing a merged !include tag.', ) class Include_constructor(ruamel.yaml.SafeConstructor): ''' A YAML "constructor" (a ruamel.yaml concept) that supports a custom "!include" tag for including separate YAML configuration files. Example syntax: `option: !include common.yaml` ''' def __init__( self, preserve_quotes=None, loader=None, include_directory=None, config_paths=None, ): super().__init__(preserve_quotes, loader) self.add_constructor( '!include', functools.partial( include_configuration, include_directory=include_directory, config_paths=config_paths, ), ) # These are catch-all error handlers for tags that don't get applied and removed by # deep_merge_nodes() below. self.add_constructor('!retain', raise_retain_node_error) self.add_constructor('!omit', raise_omit_node_error) def flatten_mapping(self, node): ''' Support the special case of deep merging included configuration into an existing mapping using the YAML '<<' merge key. Example syntax: ``` option: sub_option: 1 <<: !include common.yaml ``` These includes are deep merged into the current configuration file. For instance, in this example, any "option" with sub-options in common.yaml will get merged into the corresponding "option" with sub-options in the example configuration file. ''' representer = ruamel.yaml.representer.SafeRepresenter() for index, (key_node, value_node) in enumerate(node.value): if key_node.tag == 'tag:yaml.org,2002:merge' and value_node.tag == '!include': # Replace the merge include with a sequence of included configuration nodes ready # for merging. The construct_object() call here triggers include_configuration() # among other constructors. node.value[index] = ( key_node, representer.represent_data(self.construct_object(value_node)), ) # This super().flatten_mapping() call actually performs "<<" merges. super().flatten_mapping(node) node.value = deep_merge_nodes(node.value) def load_configuration(filename, config_paths=None): ''' Load the given configuration file and return its contents as a data structure of nested dicts and lists. Add the filename to the given configuration paths set, and also add any included configuration filenames. Raise ruamel.yaml.error.YAMLError if something goes wrong parsing the YAML, or RecursionError if there are too many recursive includes. ''' if config_paths is None: config_paths = set() # Use an embedded derived class for the include constructor so as to capture the include # directory and configuration paths values. (functools.partial doesn't work for this use case # because yaml.Constructor has to be an actual class.) class Include_constructor_with_extras(Include_constructor): def __init__(self, preserve_quotes=None, loader=None): super().__init__( preserve_quotes, loader, include_directory=os.path.dirname(filename), config_paths=config_paths, ) yaml = ruamel.yaml.YAML(typ='safe') yaml.Constructor = Include_constructor_with_extras config_paths.add(filename) with open(filename, encoding='utf-8') as file: return yaml.load(file.read()) def filter_omitted_nodes(nodes, values): ''' Given a nested borgmatic configuration data structure as a list of tuples in the form of: [ ( ruamel.yaml.nodes.ScalarNode as a key, ruamel.yaml.nodes.MappingNode or other Node as a value, ), ... ] ... and a combined list of all values for those nodes, return a filtered list of the values, omitting any that have an "!omit" tag (or with a value matching such nodes). But if only a single node is given, bail and return the given values unfiltered, as "!omit" only applies when there are merge includes (and therefore multiple nodes). ''' if len(nodes) <= 1: return values omitted_values = tuple(node.value for node in values if node.tag == '!omit') return [node for node in values if node.value not in omitted_values] def merge_values(nodes): ''' Given a nested borgmatic configuration data structure as a list of tuples in the form of: [ ( ruamel.yaml.nodes.ScalarNode as a key, ruamel.yaml.nodes.MappingNode or other Node as a value, ), ... ] ... merge its sequence or mapping node values and return the result. For sequence nodes, this means appending together its contained lists. For mapping nodes, it means merging its contained dicts. ''' return functools.reduce(operator.add, (value.value for key, value in nodes)) def deep_merge_nodes(nodes): ''' Given a nested borgmatic configuration data structure as a list of tuples in the form of: [ ( ruamel.yaml.nodes.ScalarNode as a key, ruamel.yaml.nodes.MappingNode or other Node as a value, ), ... ] ... deep merge any node values corresponding to duplicate keys and return the result. The purpose of merging like this is to support, for instance, merging one borgmatic configuration file into another for reuse, such that a configuration option with sub-options does not completely replace the corresponding option in a merged file. If there are colliding keys with scalar values (e.g., integers or strings), the last of the values wins. For instance, given node values of: [ ( ScalarNode(tag='tag:yaml.org,2002:str', value='option'), MappingNode(tag='tag:yaml.org,2002:map', value=[ ( ScalarNode(tag='tag:yaml.org,2002:str', value='sub_option1'), ScalarNode(tag='tag:yaml.org,2002:int', value='1') ), ( ScalarNode(tag='tag:yaml.org,2002:str', value='sub_option2'), ScalarNode(tag='tag:yaml.org,2002:int', value='2') ), ]), ), ( ScalarNode(tag='tag:yaml.org,2002:str', value='option'), MappingNode(tag='tag:yaml.org,2002:map', value=[ ( ScalarNode(tag='tag:yaml.org,2002:str', value='sub_option2'), ScalarNode(tag='tag:yaml.org,2002:int', value='5') ), ]), ), ] ... the returned result would be: [ ( ScalarNode(tag='tag:yaml.org,2002:str', value='option'), MappingNode(tag='tag:yaml.org,2002:map', value=[ ( ScalarNode(tag='tag:yaml.org,2002:str', value='sub_option1'), ScalarNode(tag='tag:yaml.org,2002:int', value='1') ), ( ScalarNode(tag='tag:yaml.org,2002:str', value='sub_option2'), ScalarNode(tag='tag:yaml.org,2002:int', value='5') ), ]), ), ] This function supports multi-way merging, meaning that if the same option name exists three or more times (at the same scope level), all of those instances get merged together. If a mapping or sequence node has a YAML "!retain" tag, then that node is not merged. Raise ValueError if a merge is implied using multiple incompatible types. ''' merged_nodes = [] def get_node_key_name(node): return node[0].value # Bucket the nodes by their keys. Then merge all of the values sharing the same key. for key_name, grouped_nodes in itertools.groupby( sorted(nodes, key=get_node_key_name), get_node_key_name, ): grouped_nodes = list(grouped_nodes) # noqa: PLW2901 # The merged node inherits its attributes from the final node in the group. (last_node_key, last_node_value) = grouped_nodes[-1] value_types = {type(value) for (_, value) in grouped_nodes} if len(value_types) > 1: raise ValueError( f'Incompatible types found when trying to merge "{key_name}:" values across configuration files: {", ".join(value_type.id for value_type in value_types)}', ) # If we're dealing with MappingNodes, recurse and merge its values as well. if ruamel.yaml.nodes.MappingNode in value_types: # A "!retain" tag says to skip deep merging for this node. Replace the tag so # downstream schema validation doesn't break on our application-specific tag. if last_node_value.tag == '!retain' and len(grouped_nodes) > 1: last_node_value.tag = 'tag:yaml.org,2002:map' merged_nodes.append((last_node_key, last_node_value)) else: merged_nodes.append( ( last_node_key, ruamel.yaml.nodes.MappingNode( tag=last_node_value.tag, value=deep_merge_nodes(merge_values(grouped_nodes)), start_mark=last_node_value.start_mark, end_mark=last_node_value.end_mark, flow_style=last_node_value.flow_style, comment=last_node_value.comment, anchor=last_node_value.anchor, ), ), ) continue # If we're dealing with SequenceNodes, merge by appending sequences together. if ruamel.yaml.nodes.SequenceNode in value_types: if last_node_value.tag == '!retain' and len(grouped_nodes) > 1: last_node_value.tag = 'tag:yaml.org,2002:seq' merged_nodes.append((last_node_key, last_node_value)) else: merged_nodes.append( ( last_node_key, ruamel.yaml.nodes.SequenceNode( tag=last_node_value.tag, value=filter_omitted_nodes(grouped_nodes, merge_values(grouped_nodes)), start_mark=last_node_value.start_mark, end_mark=last_node_value.end_mark, flow_style=last_node_value.flow_style, comment=last_node_value.comment, anchor=last_node_value.anchor, ), ), ) continue merged_nodes.append((last_node_key, last_node_value)) return merged_nodes borgmatic/borgmatic/config/normalize.py000066400000000000000000000370311510202216200206060ustar00rootroot00000000000000import logging import os def normalize_sections(config_filename, config): ''' Given a configuration filename and a configuration dict of its loaded contents, airlift any options out of sections ("location:", etc.) to the global scope and delete those sections. Return any log message warnings produced based on the normalization performed. Raise ValueError if the "prefix" option is set in both "location" and "consistency" sections. ''' try: location = config.get('location') or {} except AttributeError: raise ValueError('Configuration does not contain any options') storage = config.get('storage') or {} consistency = config.get('consistency') or {} hooks = config.get('hooks') or {} if ( location.get('prefix') and consistency.get('prefix') and location.get('prefix') != consistency.get('prefix') ): raise ValueError( 'The retention prefix and the consistency prefix cannot have different values (unless one is not set).', ) if storage.get('umask') and hooks.get('umask') and storage.get('umask') != hooks.get('umask'): raise ValueError( 'The storage umask and the hooks umask cannot have different values (unless one is not set).', ) any_section_upgraded = False # Move any options from deprecated sections into the global scope. for section_name in ('location', 'storage', 'retention', 'consistency', 'output', 'hooks'): section_config = config.get(section_name) if section_config is not None: any_section_upgraded = True del config[section_name] config.update(section_config) if any_section_upgraded: return [ logging.makeLogRecord( dict( levelno=logging.WARNING, levelname='WARNING', msg=f'{config_filename}: Configuration sections (like location:, storage:, retention:, consistency:, and hooks:) are deprecated and support will be removed from a future release. To prepare for this, move your options out of sections to the global scope.', ), ), ] return [] def make_command_hook_deprecation_log(config_filename, option_name): # pragma: no cover ''' Given a configuration filename and the name of a configuration option, return a deprecation warning log for it. ''' return logging.makeLogRecord( dict( levelno=logging.WARNING, levelname='WARNING', msg=f'{config_filename}: {option_name} is deprecated and support will be removed from a future release. Use commands: instead.', ), ) def normalize_commands(config_filename, config): ''' Given a configuration filename and a configuration dict, transform any "before_*"- and "after_*"-style command hooks into "commands:". ''' logs = [] # Normalize "before_actions" and "after_actions". for preposition in ('before', 'after'): option_name = f'{preposition}_actions' commands = config.pop(option_name, None) if commands: logs.append(make_command_hook_deprecation_log(config_filename, option_name)) config.setdefault('commands', []).append( { preposition: 'repository', 'run': commands, }, ) # Normalize "before_backup", "before_prune", "after_backup", "after_prune", etc. for action_name in ('create', 'prune', 'compact', 'check', 'extract'): for preposition in ('before', 'after'): option_name = f'{preposition}_{"backup" if action_name == "create" else action_name}' commands = config.pop(option_name, None) if not commands: continue logs.append(make_command_hook_deprecation_log(config_filename, option_name)) config.setdefault('commands', []).append( { preposition: 'action', 'when': [action_name], 'run': commands, }, ) # Normalize "on_error". commands = config.pop('on_error', None) if commands: logs.append(make_command_hook_deprecation_log(config_filename, 'on_error')) config.setdefault('commands', []).append( { 'after': 'error', 'when': ['create', 'prune', 'compact', 'check'], 'run': commands, }, ) # Normalize "before_everything" and "after_everything". for preposition in ('before', 'after'): option_name = f'{preposition}_everything' commands = config.pop(option_name, None) if commands: logs.append(make_command_hook_deprecation_log(config_filename, option_name)) config.setdefault('commands', []).append( { preposition: 'everything', 'when': ['create'], 'run': commands, }, ) return logs def normalize(config_filename, config): # noqa: PLR0912, PLR0915 ''' Given a configuration filename and a configuration dict of its loaded contents, apply particular hard-coded rules to normalize the configuration to adhere to the current schema. Return any log message warnings produced based on the normalization performed. Raise ValueError the configuration cannot be normalized. ''' logs = normalize_sections(config_filename, config) logs += normalize_commands(config_filename, config) if config.get('borgmatic_source_directory'): logs.append( logging.makeLogRecord( dict( levelno=logging.WARNING, levelname='WARNING', msg=f'{config_filename}: The borgmatic_source_directory option is deprecated and will be removed from a future release. Use user_runtime_directory and user_state_directory instead.', ), ), ) # Upgrade exclude_if_present from a string to a list. exclude_if_present = config.get('exclude_if_present') if isinstance(exclude_if_present, str): logs.append( logging.makeLogRecord( dict( levelno=logging.WARNING, levelname='WARNING', msg=f'{config_filename}: The exclude_if_present option now expects a list value. String values for this option are deprecated and support will be removed from a future release.', ), ), ) config['exclude_if_present'] = [exclude_if_present] # Unconditionally set the bootstrap hook so that it's enabled by default and config files get # stored in each Borg archive. config.setdefault('bootstrap', {}) # Move store_config_files from the global scope to the bootstrap hook. store_config_files = config.get('store_config_files') if store_config_files is not None: logs.append( logging.makeLogRecord( dict( levelno=logging.WARNING, levelname='WARNING', msg=f'{config_filename}: The store_config_files option has moved under the bootstrap hook. Specifying store_config_files at the global scope is deprecated and support will be removed from a future release.', ), ), ) del config['store_config_files'] config['bootstrap']['store_config_files'] = store_config_files # Upgrade various monitoring hooks from a string to a dict. healthchecks = config.get('healthchecks') if isinstance(healthchecks, str): logs.append( logging.makeLogRecord( dict( levelno=logging.WARNING, levelname='WARNING', msg=f'{config_filename}: The healthchecks hook now expects a key/value pair with "ping_url" as a key. String values for this option are deprecated and support will be removed from a future release.', ), ), ) config['healthchecks'] = {'ping_url': healthchecks} cronitor = config.get('cronitor') if isinstance(cronitor, str): logs.append( logging.makeLogRecord( dict( levelno=logging.WARNING, levelname='WARNING', msg=f'{config_filename}: The healthchecks hook now expects key/value pairs. String values for this option are deprecated and support will be removed from a future release.', ), ), ) config['cronitor'] = {'ping_url': cronitor} pagerduty = config.get('pagerduty') if isinstance(pagerduty, str): logs.append( logging.makeLogRecord( dict( levelno=logging.WARNING, levelname='WARNING', msg=f'{config_filename}: The healthchecks hook now expects key/value pairs. String values for this option are deprecated and support will be removed from a future release.', ), ), ) config['pagerduty'] = {'integration_key': pagerduty} cronhub = config.get('cronhub') if isinstance(cronhub, str): logs.append( logging.makeLogRecord( dict( levelno=logging.WARNING, levelname='WARNING', msg=f'{config_filename}: The healthchecks hook now expects key/value pairs. String values for this option are deprecated and support will be removed from a future release.', ), ), ) config['cronhub'] = {'ping_url': cronhub} # Upgrade consistency checks from a list of strings to a list of dicts. checks = config.get('checks') if isinstance(checks, list) and len(checks) and isinstance(checks[0], str): logs.append( logging.makeLogRecord( dict( levelno=logging.WARNING, levelname='WARNING', msg=f'{config_filename}: The checks option now expects a list of key/value pairs. Lists of strings for this option are deprecated and support will be removed from a future release.', ), ), ) config['checks'] = [{'name': check_type} for check_type in checks] # Rename various configuration options. numeric_owner = config.pop('numeric_owner', None) if numeric_owner is not None: logs.append( logging.makeLogRecord( dict( levelno=logging.WARNING, levelname='WARNING', msg=f'{config_filename}: The numeric_owner option has been renamed to numeric_ids. numeric_owner is deprecated and support will be removed from a future release.', ), ), ) config['numeric_ids'] = numeric_owner bsd_flags = config.pop('bsd_flags', None) if bsd_flags is not None: logs.append( logging.makeLogRecord( dict( levelno=logging.WARNING, levelname='WARNING', msg=f'{config_filename}: The bsd_flags option has been renamed to flags. bsd_flags is deprecated and support will be removed from a future release.', ), ), ) config['flags'] = bsd_flags remote_rate_limit = config.pop('remote_rate_limit', None) if remote_rate_limit is not None: logs.append( logging.makeLogRecord( dict( levelno=logging.WARNING, levelname='WARNING', msg=f'{config_filename}: The remote_rate_limit option has been renamed to upload_rate_limit. remote_rate_limit is deprecated and support will be removed from a future release.', ), ), ) config['upload_rate_limit'] = remote_rate_limit # Upgrade remote repositories to ssh:// syntax, required in Borg 2. repositories = config.get('repositories') if repositories: if any(isinstance(repository, str) for repository in repositories): logs.append( logging.makeLogRecord( dict( levelno=logging.WARNING, levelname='WARNING', msg=f'{config_filename}: The repositories option now expects a list of key/value pairs. Lists of strings for this option are deprecated and support will be removed from a future release.', ), ), ) config['repositories'] = [ {'path': repository} if isinstance(repository, str) else repository for repository in repositories ] repositories = config['repositories'] config['repositories'] = [] for repository_dict in repositories: repository_path = repository_dict.get('path') if repository_path is None: continue if '~' in repository_path: logs.append( logging.makeLogRecord( dict( levelno=logging.WARNING, levelname='WARNING', msg=f'{config_filename}: Repository paths containing "~" are deprecated in borgmatic and support will be removed from a future release.', ), ), ) if ':' in repository_path: if repository_path.startswith('file://'): updated_repository_path = os.path.abspath( repository_path.partition('file://')[-1], ) config['repositories'].append( dict( repository_dict, path=updated_repository_path, ), ) elif repository_path.startswith(('ssh://', 'sftp://', 'rclone:', 's3:', 'b2:')): config['repositories'].append(repository_dict) else: rewritten_repository_path = f"ssh://{repository_path.replace(':~', '/~').replace(':/', '/').replace(':', '/./')}" logs.append( logging.makeLogRecord( dict( levelno=logging.WARNING, levelname='WARNING', msg=f'{config_filename}: Remote repository paths without ssh://, sftp://, rclone:, s3:, or b2:, syntax are deprecated and support will be removed from a future release. Interpreting "{repository_path}" as "{rewritten_repository_path}"', ), ), ) config['repositories'].append( dict( repository_dict, path=rewritten_repository_path, ), ) else: config['repositories'].append(repository_dict) if config.get('prefix'): logs.append( logging.makeLogRecord( dict( levelno=logging.WARNING, levelname='WARNING', msg=f'{config_filename}: The prefix option is deprecated and support will be removed from a future release. Use archive_name_format or match_archives instead.', ), ), ) return logs borgmatic/borgmatic/config/override.py000066400000000000000000000112151510202216200204210ustar00rootroot00000000000000import io import logging import ruamel.yaml logger = logging.getLogger(__name__) def set_values(config, keys, value): ''' Given a hierarchy of configuration dicts, a sequence of parsed key strings, and a string value, descend into the hierarchy based on the keys to set the value into the right place. ''' if not keys: return first_key = keys[0] if len(keys) == 1: if isinstance(config, list): raise ValueError( 'When overriding a list option, the value must use list syntax (e.g., "[foo, bar]" or "[{key: value}]" as appropriate)', ) config[first_key] = value return if first_key not in config: config[first_key] = {} set_values(config[first_key], keys[1:], value) def convert_value_type(value, option_type): ''' Given a string value and its schema type as a string, determine its logical type (string, boolean, integer, etc.), and return it converted to that type. If the option type is a string, leave the value as a string so that special characters in it don't get interpreted as YAML during conversion. Raise ruamel.yaml.error.YAMLError if there's a parse issue with the YAML. ''' if option_type == 'string': return value return ruamel.yaml.YAML(typ='safe').load(io.StringIO(value)) LEGACY_SECTION_NAMES = {'location', 'storage', 'retention', 'consistency', 'output', 'hooks'} def strip_section_names(parsed_override_key): ''' Given a parsed override key as a tuple of option and suboption names, strip out any initial legacy section names, since configuration file normalization also strips them out. ''' if parsed_override_key[0] in LEGACY_SECTION_NAMES: return parsed_override_key[1:] return parsed_override_key def type_for_option(schema, option_keys): ''' Given a configuration schema and a sequence of keys identifying an option, e.g. ('extra_borg_options', 'init'), return the schema type of that option as a string. Return None if the option or its type cannot be found in the schema. ''' option_schema = schema try: for key in option_keys: option_schema = option_schema['properties'][key] except KeyError: return None try: return option_schema['type'] except KeyError: return None def parse_overrides(raw_overrides, schema): ''' Given a sequence of configuration file override strings in the form of "option.suboption=value" and a configuration schema dict, parse and return a sequence of tuples (keys, values), where keys is a sequence of strings. For instance, given the following raw overrides: ['my_option.suboption=value1', 'other_option=value2'] ... return this: ( (('my_option', 'suboption'), 'value1'), (('other_option'), 'value2'), ) Raise ValueError if an override can't be parsed. ''' if not raw_overrides: return () parsed_overrides = [] try: for raw_override in raw_overrides: raw_keys, value = raw_override.split('=', 1) keys = tuple(raw_keys.split('.')) option_type = type_for_option(schema, keys) parsed_overrides.append( ( keys, convert_value_type(value, option_type), ), ) except ValueError: raise ValueError( f"Invalid override '{raw_override}'. Make sure you use the form: OPTION=VALUE or OPTION.SUBOPTION=VALUE", ) except ruamel.yaml.error.YAMLError as error: raise ValueError(f"Invalid override '{raw_override}': {error.problem}") return tuple(parsed_overrides) def apply_overrides(config, schema, raw_overrides): ''' Given a configuration dict, a corresponding configuration schema dict, and a sequence of configuration file override strings in the form of "option.suboption=value", parse each override and set it into the configuration dict. Set the overrides into the configuration both with and without deprecated section names (if used), so that the overrides work regardless of whether the configuration is also using deprecated section names. ''' overrides = parse_overrides(raw_overrides, schema) if overrides: logger.warning( "The --override flag is deprecated and will be removed from a future release. Instead, use a command-line flag corresponding to the configuration option you'd like to set.", ) for keys, value in overrides: set_values(config, keys, value) set_values(config, strip_section_names(keys), value) borgmatic/borgmatic/config/paths.py000066400000000000000000000161351510202216200177270ustar00rootroot00000000000000import contextlib import logging import os import tempfile from enum import Enum logger = logging.getLogger(__name__) class Systemd_directories(Enum): RUNTIME_DIRECTORY = 0 STATE_DIRECTORY = 1 CACHE_DIRECTORY = 2 LOGS_DIRECTORY = 3 CONFIGURATION_DIRECTORY = 4 def expand_user_in_path(path): ''' Given a directory path, expand any tildes in it. ''' try: return os.path.expanduser(path or '') or None except TypeError: return None def resolve_systemd_directory(directory): ''' Given a systemd directory environment variable enum, read the value if set and return the first configured directory. ''' separator = ':' paths = os.environ.get(directory.name) return paths.split(separator)[0] if paths else None def get_working_directory(config): # pragma: no cover ''' Given a configuration dict, get the working directory from it, expanding any tildes. ''' return expand_user_in_path(config.get('working_directory')) def get_borgmatic_source_directory(config): ''' Given a configuration dict, get the (deprecated) borgmatic source directory, expanding any tildes. Defaults to ~/.borgmatic. ''' return expand_user_in_path(config.get('borgmatic_source_directory') or '~/.borgmatic') TEMPORARY_DIRECTORY_PREFIX = 'borgmatic-' def replace_temporary_subdirectory_with_glob( path, temporary_directory_prefix=TEMPORARY_DIRECTORY_PREFIX, ): ''' Given an absolute temporary directory path and an optional temporary directory prefix, look for a subdirectory within it starting with the temporary directory prefix (or a default) and replace it with an appropriate glob. For instance, given: /tmp/borgmatic-aet8kn93/borgmatic ... replace it with: /tmp/borgmatic-*/borgmatic This is useful for finding previous temporary directories from prior borgmatic runs. ''' return os.path.join( '/', *( ( f'{temporary_directory_prefix}*' if subdirectory.startswith(temporary_directory_prefix) else subdirectory ) for subdirectory in path.split(os.path.sep) ), ) class Runtime_directory: ''' A Python context manager for creating and cleaning up the borgmatic runtime directory used for storing temporary runtime data like streaming database dumps and bootstrap metadata. Example use as a context manager: with borgmatic.config.paths.Runtime_directory(config) as borgmatic_runtime_directory: do_something_with(borgmatic_runtime_directory) For the scope of that "with" statement, the runtime directory is available. Afterwards, it automatically gets cleaned up as necessary. ''' def __init__(self, config): ''' Given a configuration dict determine the borgmatic runtime directory, creating a secure, temporary directory within it if necessary. Defaults to $XDG_RUNTIME_DIR/./borgmatic or $RUNTIME_DIRECTORY/./borgmatic or $TMPDIR/borgmatic-[random]/./borgmatic or $TEMP/borgmatic-[random]/./borgmatic or /tmp/borgmatic-[random]/./borgmatic where "[random]" is a randomly generated string intended to avoid path collisions. If XDG_RUNTIME_DIR or RUNTIME_DIRECTORY is set and already ends in "/borgmatic", then don't tack on a second "/borgmatic" path component. The "/./" is taking advantage of a Borg feature such that the part of the path before the "/./" does not get stored in the file path within an archive. That way, the path of the runtime directory can change without leaving database dumps within an archive inaccessible. ''' runtime_directory = ( config.get('user_runtime_directory') or os.environ.get('XDG_RUNTIME_DIR') # Set by PAM on Linux. or resolve_systemd_directory( Systemd_directories.RUNTIME_DIRECTORY ) # Set by systemd if configured. ) if runtime_directory: if not runtime_directory.startswith(os.path.sep): raise ValueError('The runtime directory must be an absolute path') self.temporary_directory = None else: base_directory = ( os.environ.get('TMPDIR') or os.environ.get('TEMP') or '/tmp' # noqa: S108 ) if not base_directory.startswith(os.path.sep): raise ValueError('The temporary directory must be an absolute path') os.makedirs(base_directory, mode=0o700, exist_ok=True) self.temporary_directory = tempfile.TemporaryDirectory( prefix=TEMPORARY_DIRECTORY_PREFIX, dir=base_directory, ) runtime_directory = self.temporary_directory.name (base_path, final_directory) = os.path.split(runtime_directory.rstrip(os.path.sep)) self.runtime_path = expand_user_in_path( os.path.join( base_path if final_directory == 'borgmatic' else runtime_directory, '.', # Borg 1.4+ "slashdot" hack. 'borgmatic', ), ) os.makedirs(self.runtime_path, mode=0o700, exist_ok=True) logger.debug(f'Using runtime directory {os.path.normpath(self.runtime_path)}') def __enter__(self): ''' Return the borgmatic runtime path as a string. ''' return self.runtime_path def __exit__(self, exception_type, exception, traceback): ''' Delete any temporary directory that was created as part of initialization. ''' if self.temporary_directory: # The cleanup() call errors if, for instance, there's still a # mounted filesystem within the temporary directory. There's # nothing we can do about that here, so swallow the error. with contextlib.suppress(OSError): self.temporary_directory.cleanup() def make_runtime_directory_glob(borgmatic_runtime_directory): ''' Given a borgmatic runtime directory path, make a glob that would match that path, specifically replacing any randomly generated temporary subdirectory with "*" since such a directory's name changes on every borgmatic run. ''' return os.path.join( *( '*' if subdirectory.startswith(TEMPORARY_DIRECTORY_PREFIX) else subdirectory for subdirectory in os.path.normpath(borgmatic_runtime_directory).split(os.path.sep) ), ) def get_borgmatic_state_directory(config): ''' Given a configuration dict, get the borgmatic state directory used for storing borgmatic state files like records of when checks last ran. Defaults to $XDG_STATE_HOME/borgmatic or ~/.local/state/./borgmatic. ''' return expand_user_in_path( os.path.join( config.get('user_state_directory') or os.environ.get('XDG_STATE_HOME') or resolve_systemd_directory( Systemd_directories.STATE_DIRECTORY ) # Set by systemd if configured. or '~/.local/state', 'borgmatic', ), ) borgmatic/borgmatic/config/schema.py000066400000000000000000000041621510202216200200450ustar00rootroot00000000000000import decimal import itertools def get_properties(schema): ''' Given a schema dict, return its properties. But if it's got sub-schemas with multiple different potential properties, return their merged properties instead (interleaved so the first properties of each sub-schema come first). The idea is that the user should see all possible options even if they're not all possible together. ''' if 'oneOf' in schema: return dict( item for item in itertools.chain( *itertools.zip_longest( *[sub_schema['properties'].items() for sub_schema in schema['oneOf']], ), ) if item is not None ) return schema.get('properties', {}) SCHEMA_TYPE_TO_PYTHON_TYPE = { 'array': list, 'boolean': bool, 'integer': int, 'number': decimal.Decimal, 'object': dict, 'string': str, } def parse_type(schema_type, **overrides): ''' Given a schema type as a string, return the corresponding Python type. If any overrides are given in the from of a schema type string to a Python type, then override the default type mapping with them. Raise ValueError if the schema type is unknown. ''' try: return dict( SCHEMA_TYPE_TO_PYTHON_TYPE, **overrides, )[schema_type] except KeyError: raise ValueError(f'Unknown type in configuration schema: {schema_type}') def compare_types(schema_type, target_types, match=any): ''' Given a schema type as a string or a list of strings (representing multiple types) and a set of target type strings, return whether every schema type is in the set of target types. If the schema type is a list of strings, use the given match function (such as any or all) to compare elements. For instance, if match is given as all, then every element of the schema_type list must be in the target types. ''' if isinstance(schema_type, list): return match(element_schema_type in target_types for element_schema_type in schema_type) return schema_type in target_types borgmatic/borgmatic/config/schema.yaml000066400000000000000000004243331510202216200203650ustar00rootroot00000000000000type: object required: - repositories additionalProperties: false properties: constants: type: object description: | Constants to use in the configuration file. Within option values, all occurrences of the constant name in curly braces will be replaced with the constant value. For example, if you have a constant named "app_name" with the value "myapp", then the string "{app_name}" will be replaced with "myapp" in the configuration file. example: app_name: myapp user: myuser source_directories: type: array items: type: string description: | List of source directories and files to back up. Globs and tildes are expanded. Do not backslash spaces in path names. Be aware that by default, Borg treats missing source directories as warnings rather than errors. If you'd like to change that behavior, see https://torsion.org/borgmatic/how-to/customize-warnings-and-errors/ or the "source_directories_must_exist" option. example: - /home - /etc - /var/log/syslog* - /home/user/path with spaces source_directories_must_exist: type: boolean description: | If true, then source directories (and root pattern paths) must exist. If they don't, an error is raised. Defaults to false. example: true repositories: type: array items: type: object required: - path additionalProperties: false properties: path: type: string description: The local path or Borg URL of the repository. example: ssh://user@backupserver/./sourcehostname.borg label: type: string description: | An optional label for the repository, used in logging and to make selecting the repository easier on the command-line. example: backupserver encryption: type: string description: | The encryption mode with which to create the repository, only used for the repo-create action. To see the available encryption modes, run "borg init --help" with Borg 1 or "borg repo-create --help" with Borg 2. example: repokey-blake2 append_only: type: boolean description: | Whether the repository should be created append-only, only used for the repo-create action. Defaults to false. example: true storage_quota: type: string description: | The storage quota with which to create the repository, only used for the repo-create action. Defaults to no quota. example: 5G make_parent_directories: type: boolean description: | Whether any missing parent directories of the repository path should be created, only used for the repo-create action. Defaults to false. (This option is supported for Borg 1.x only.) example: true description: | A required list of local or remote repositories with paths and optional labels (which can be used with the --repository flag to select a repository). Tildes are expanded. Multiple repositories are backed up to in sequence. Borg placeholders can be used. See the output of "borg help placeholders" for details. See ssh_command for SSH options like identity file or port. If systemd service is used, then add local repository paths in the systemd service file to the ReadWritePaths list. example: - path: ssh://user@backupserver/./sourcehostname.borg label: backupserver - path: /mnt/backup label: local working_directory: type: string description: | Working directory to use when running actions, useful for backing up using relative source directory paths. Does not currently apply to borgmatic configuration file paths or includes. Tildes are expanded. See http://borgbackup.readthedocs.io/en/stable/usage/create.html for details. Defaults to not set. example: /path/to/working/directory one_file_system: type: boolean description: | Stay in same file system; do not cross mount points beyond the given source directories. Defaults to false. example: true numeric_ids: type: boolean description: | Only store/extract numeric user and group identifiers. Defaults to false. example: true atime: type: boolean description: | Store atime into archive. Defaults to true in Borg < 1.2, false in Borg 1.2+. example: false ctime: type: boolean description: Store ctime into archive. Defaults to true. example: false birthtime: type: boolean description: | Store birthtime (creation date) into archive. Defaults to true. example: false read_special: type: boolean description: | Use Borg's --read-special flag to allow backup of block and other special devices. Use with caution, as it will lead to problems if used when backing up special devices such as /dev/zero. Defaults to false. But when a database hook is used, the setting here is ignored and read_special is considered true. example: true flags: type: boolean description: | Record filesystem flags (e.g. NODUMP, IMMUTABLE) in archive. Defaults to true. example: false files_cache: type: string description: | Mode in which to operate the files cache. See http://borgbackup.readthedocs.io/en/stable/usage/create.html for details. Defaults to "ctime,size,inode". example: ctime,size,inode local_path: type: string description: | Alternate Borg local executable. Defaults to "borg". example: borg1 remote_path: type: string description: | Alternate Borg remote executable. Defaults to "borg". example: borg1 patterns: type: array items: type: string description: | Any paths matching these patterns are included/excluded from backups. Recursion root patterns ("R ...") are effectively the same as "source_directories"; they tell Borg which paths to backup (modulo any excludes). Globs are expanded. (Tildes are not.) See the output of "borg help patterns" for more details. Quote any value if it contains leading punctuation, so it parses correctly. example: - 'R /' - '- /home/*/.cache' - '+ /home/susan' - '- /home/*' patterns_from: type: array items: type: string description: | Read include/exclude patterns from one or more separate named files, one pattern per line. See the output of "borg help patterns" for more details. example: - /etc/borgmatic/patterns exclude_patterns: type: array items: type: string description: | Any paths matching these patterns are excluded from backups. Globs and tildes are expanded. Note that a glob pattern must either start with a glob or be an absolute path. Do not backslash spaces in path names. See the output of "borg help patterns" for more details. example: - '*.pyc' - /home/*/.cache - '*/.vim*.tmp' - /etc/ssl - /home/user/path with spaces exclude_from: type: array items: type: string description: | Read exclude patterns from one or more separate named files, one pattern per line. See the output of "borg help patterns" for more details. example: - /etc/borgmatic/excludes exclude_caches: type: boolean description: | Exclude directories that contain a CACHEDIR.TAG file. See http://www.brynosaurus.com/cachedir/spec.html for details. Defaults to false. example: true exclude_if_present: type: array items: type: string description: | Exclude directories that contain a file with the given filenames. Defaults to not set. example: - .nobackup keep_exclude_tags: type: boolean description: | If true, the exclude_if_present filename is included in backups. Defaults to false, meaning that the exclude_if_present filename is omitted from backups. example: true exclude_nodump: type: boolean description: | Exclude files with the NODUMP flag. Defaults to false. (This option is supported for Borg 1.x only.) example: true borgmatic_source_directory: type: string description: | Deprecated. Only used for locating database dumps and bootstrap metadata within backup archives created prior to deprecation. Replaced by user_runtime_directory and user_state_directory. Defaults to ~/.borgmatic example: /tmp/borgmatic user_runtime_directory: type: string description: | Path for storing temporary runtime data like streaming database dumps and bootstrap metadata. borgmatic automatically creates and uses a "borgmatic" subdirectory here. Defaults to $XDG_RUNTIME_DIR or $TMPDIR or $TEMP or /run/user/$UID. example: /run/user/1001 user_state_directory: type: string description: | Path for storing borgmatic state files like records of when checks last ran. borgmatic automatically creates and uses a "borgmatic" subdirectory here. If you change this option, borgmatic must create the check records again (and therefore re-run checks). Defaults to $XDG_STATE_HOME or ~/.local/state. example: /var/lib/borgmatic encryption_passcommand: type: string description: | The standard output of this command is used to unlock the encryption key. Only use on repositories that were initialized with passcommand/repokey/keyfile encryption. Note that if both encryption_passcommand and encryption_passphrase are set, then encryption_passphrase takes precedence. This can also be used to access encrypted systemd service credentials. Defaults to not set. For more details, see: https://torsion.org/borgmatic/how-to/provide-your-passwords/ example: "secret-tool lookup borg-repository repo-name" encryption_passphrase: type: string description: | Passphrase to unlock the encryption key with. Only use on repositories that were initialized with passphrase/repokey/keyfile encryption. Quote the value if it contains punctuation, so it parses correctly. And backslash any quote or backslash literals as well. Defaults to not set. Supports the "{credential ...}" syntax. example: "!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~" checkpoint_interval: type: integer description: | Number of seconds between each checkpoint during a long-running backup. See https://borgbackup.readthedocs.io/en/stable/faq.html for details. Defaults to checkpoints every 1800 seconds (30 minutes). example: 1800 checkpoint_volume: type: integer description: | Number of backed up bytes between each checkpoint during a long-running backup. Only supported with Borg 2+. See https://borgbackup.readthedocs.io/en/stable/faq.html for details. Defaults to only time-based checkpointing (see "checkpoint_interval") instead of volume-based checkpointing. example: 1048576 chunker_params: type: string description: | Specify the parameters passed to the chunker (CHUNK_MIN_EXP, CHUNK_MAX_EXP, HASH_MASK_BITS, HASH_WINDOW_SIZE). See https://borgbackup.readthedocs.io/en/stable/internals.html for details. Defaults to "19,23,21,4095". example: 19,23,21,4095 compression: type: string description: | Type of compression to use when creating archives. (Compression level can be added separated with a comma, like "zstd,7".) See http://borgbackup.readthedocs.io/en/stable/usage/create.html for details. Defaults to "lz4". example: lz4 recompress: type: string enum: ['if-different', 'always', 'never'] description: | Mode for recompressing data chunks according to MODE. Possible modes are: * "if-different": Recompress if the current compression is with a different compression algorithm. * "always": Recompress even if the current compression is with the same compression algorithm. Use this to change the compression level. * "never": Do not recompress. Use this option to explicitly prevent recompression. See https://borgbackup.readthedocs.io/en/stable/usage/recreate.html for details. Defaults to "never". example: if-different upload_rate_limit: type: integer description: | Remote network upload rate limit in kiBytes/second. Defaults to unlimited. example: 100 upload_buffer_size: type: integer description: | Size of network upload buffer in MiB. Defaults to no buffer. example: 160 retries: type: integer description: | Number of times to retry a failing backup before giving up. Defaults to 0 (i.e., does not attempt retry). example: 3 retry_wait: type: integer description: | Wait time between retries (in seconds) to allow transient issues to pass. Increases after each retry by that same wait time as a form of backoff. Defaults to 0 (no wait). example: 10 temporary_directory: type: string description: | Directory where temporary Borg files are stored. Defaults to $TMPDIR. See "Resource Usage" at https://borgbackup.readthedocs.io/en/stable/usage/general.html for details. example: /path/to/tmpdir ssh_command: type: string description: | Command to use instead of "ssh". This can be used to specify ssh options. Defaults to not set. example: ssh -i /path/to/private/key borg_base_directory: type: string description: | Base path used for various Borg directories. Defaults to $HOME, ~$USER, or ~. example: /path/to/base borg_config_directory: type: string description: | Path for Borg configuration files. Defaults to $borg_base_directory/.config/borg example: /path/to/base/config borg_cache_directory: type: string description: | Path for Borg cache files. Defaults to $borg_base_directory/.cache/borg example: /path/to/base/cache use_chunks_archive: type: boolean description: | Enables or disables the use of chunks.archive.d for faster cache resyncs in Borg. If true, value is set to "yes" (default) else it's set to "no", reducing disk usage but slowing resyncs. example: true borg_files_cache_ttl: type: integer description: | Maximum time to live (ttl) for entries in the Borg files cache. example: 20 borg_security_directory: type: string description: | Path for Borg security and encryption nonce files. Defaults to $borg_config_directory/security example: /path/to/base/config/security borg_keys_directory: type: string description: | Path for Borg encryption key files. Defaults to $borg_config_directory/keys example: /path/to/base/config/keys borg_key_file: type: string description: | Path for the Borg repository key file, for use with a repository created with "keyfile" encryption. example: /path/to/base/config/keyfile borg_exit_codes: type: array items: type: object required: ['code', 'treat_as'] additionalProperties: false properties: code: type: integer not: {enum: [0]} description: | The exit code for an existing Borg warning or error. example: 100 treat_as: type: string enum: ['error', 'warning'] description: | Whether to consider the exit code as an error or as a warning in borgmatic. example: error description: | A list of Borg exit codes that should be elevated to errors or squashed to warnings as indicated. By default, Borg error exit codes (2 to 99) are treated as errors while warning exit codes (1 and 100+) are treated as warnings. Exit codes other than 1 and 2 are only present in Borg 1.4.0+. example: - code: 13 treat_as: warning - code: 100 treat_as: error umask: type: integer description: | Umask used for when executing Borg or calling hooks. Defaults to 0077 for Borg or the umask that borgmatic is run with for hooks. example: 0077 lock_wait: type: integer description: | Maximum seconds to wait for acquiring a repository/cache lock. Defaults to 1. example: 5 archive_name_format: type: string description: | Name of the archive to create. Borg placeholders can be used. See the output of "borg help placeholders" for details. Defaults to "{hostname}-{now:%Y-%m-%dT%H:%M:%S.%f}" with Borg 1 and "{hostname}" with Borg 2, as Borg 2 does not require unique archive names; identical archive names form a common "series" that can be targeted together. When running actions like repo-list, info, or check, borgmatic automatically tries to match only archives created with this name format. example: "{hostname}-documents-{now}" match_archives: type: string description: | A Borg pattern for filtering down the archives used by borgmatic actions that operate on multiple archives. For Borg 1.x, use a shell pattern here and see the output of "borg help placeholders" for details. For Borg 2.x, see the output of "borg help match-archives". If match_archives is not specified, borgmatic defaults to deriving the match_archives value from archive_name_format. example: "sh:{hostname}-*" relocated_repo_access_is_ok: type: boolean description: | Bypass Borg error about a repository that has been moved. Defaults to false. example: true unknown_unencrypted_repo_access_is_ok: type: boolean description: | Bypass Borg error about a previously unknown unencrypted repository. Defaults to false. example: true debug_passphrase: type: boolean description: | When set true, display debugging information that includes passphrases used and passphrase related environment variables set. Defaults to false. example: true display_passphrase: type: boolean description: | When set true, always shows passphrase and its hex UTF-8 byte sequence. Defaults to false. example: true check_i_know_what_i_am_doing: type: boolean description: | Bypass Borg confirmation about check with repair option. Defaults to false and an interactive prompt from Borg. example: true extra_borg_options: type: object additionalProperties: false properties: break_lock: type: string description: | Extra command-line options to pass to "borg break-lock". example: "--extra-option" check: type: string description: | Extra command-line options to pass to "borg check". example: "--extra-option" compact: type: string description: | Extra command-line options to pass to "borg compact". example: "--extra-option" create: type: string description: | Extra command-line options to pass to "borg create". example: "--extra-option" delete: type: string description: | Extra command-line options to pass to "borg delete". example: "--extra-option" export_tar: type: string description: | Extra command-line options to pass to "borg export-tar". example: "--extra-option" extract: type: string description: | Extra command-line options to pass to "borg extract". example: "--extra-option" key_export: type: string description: | Extra command-line options to pass to "borg key export". example: "--extra-option" key_import: type: string description: | Extra command-line options to pass to "borg key import". example: "--extra-option" key_change_passphrase: type: string description: | Extra command-line options to pass to "borg key change-passphrase". example: "--extra-option" info: type: string description: | Extra command-line options to pass to "borg info". example: "--extra-option" init: type: string description: | Deprecated. Use "repo_create" instead. Extra command-line options to pass to "borg init" / "borg repo-create". example: "--extra-option" list: type: string description: | Extra command-line options to pass to "borg list". example: "--extra-option" mount: type: string description: | Extra command-line options to pass to "borg mount". example: "--extra-option" prune: type: string description: | Extra command-line options to pass to "borg prune". example: "--extra-option" recreate: type: string description: | Extra command-line options to pass to "borg recreate". example: "--extra-option" rename: type: string description: | Extra command-line options to pass to "borg rename". example: "--extra-option" repo_create: type: string description: | Extra command-line options to pass to "borg init" / "borg repo-create". example: "--extra-option" repo_delete: type: string description: | Extra command-line options to pass to "borg repo-delete". example: "--extra-option" repo_info: type: string description: | Extra command-line options to pass to "borg repo-info". example: "--extra-option" repo_list: type: string description: | Extra command-line options to pass to "borg repo-list". example: "--extra-option" transfer: type: string description: | Extra command-line options to pass to "borg transfer". example: "--extra-option" umount: type: string description: | Extra command-line options to pass to "borg umount". example: "--extra-option" description: | Additional options to pass directly to particular Borg commands, handy for Borg options that borgmatic does not yet support natively. Note that borgmatic does not perform any validation on these options. Running borgmatic with "--verbosity 2" shows the exact Borg command-line invocation. keep_within: type: string description: | Keep all archives within this time interval. See "skip_actions" for disabling pruning altogether. example: 3H keep_secondly: type: integer description: Number of secondly archives to keep. example: 60 keep_minutely: type: integer description: Number of minutely archives to keep. example: 60 keep_hourly: type: integer description: Number of hourly archives to keep. example: 24 keep_daily: type: integer description: Number of daily archives to keep. example: 7 keep_weekly: type: integer description: Number of weekly archives to keep. example: 4 keep_monthly: type: integer description: Number of monthly archives to keep. example: 6 keep_yearly: type: integer description: Number of yearly archives to keep. example: 1 keep_13weekly: type: integer description: Number of quarterly archives to keep (13 week strategy). example: 13 keep_3monthly: type: integer description: Number of quarterly archives to keep (3 month strategy). example: 3 prefix: type: string description: | Deprecated. When pruning or checking archives, only consider archive names starting with this prefix. Borg placeholders can be used. See the output of "borg help placeholders" for details. If a prefix is not specified, borgmatic defaults to matching archives based on the archive_name_format (see above). example: sourcehostname compact_threshold: type: integer description: | Minimum saved space percentage threshold for compacting a segment, defaults to 10. example: 20 checks: type: array items: type: object oneOf: - required: [name] additionalProperties: false properties: name: type: string enum: - archives - data - extract - disabled description: | Name of the consistency check to run: * "repository" checks the consistency of the repository. * "archives" checks all of the archives. * "data" verifies the integrity of the data within the archives and implies the "archives" check as well. * "spot" checks that some percentage of source files are found in the most recent archive (with identical contents). * "extract" does an extraction dry-run of the most recent archive. * See "skip_actions" for disabling checks altogether. example: spot frequency: type: string description: | How frequently to run this type of consistency check (as a best effort). The value is a number followed by a unit of time. E.g., "2 weeks" to run this consistency check no more than every two weeks for a given repository or "1 month" to run it no more than monthly. Defaults to "always": running this check every time checks are run. example: 2 weeks only_run_on: type: array items: type: string description: | After the "frequency" duration has elapsed, only run this check if the current day of the week matches one of these values (the name of a day of the week in the current locale). "weekday" and "weekend" are also accepted. Defaults to running the check on any day of the week. example: - Saturday - Sunday - required: [name] additionalProperties: false properties: name: type: string enum: - repository description: | Name of the consistency check to run: * "repository" checks the consistency of the repository. * "archives" checks all of the archives. * "data" verifies the integrity of the data within the archives and implies the "archives" check as well. * "spot" checks that some percentage of source files are found in the most recent archive (with identical contents). * "extract" does an extraction dry-run of the most recent archive. * See "skip_actions" for disabling checks altogether. example: spot frequency: type: string description: | How frequently to run this type of consistency check (as a best effort). The value is a number followed by a unit of time. E.g., "2 weeks" to run this consistency check no more than every two weeks for a given repository or "1 month" to run it no more than monthly. Defaults to "always": running this check every time checks are run. example: 2 weeks only_run_on: type: array items: type: string description: | After the "frequency" duration has elapsed, only run this check if the current day of the week matches one of these values (the name of a day of the week in the current locale). "weekday" and "weekend" are also accepted. Defaults to running the check on any day of the week. example: - Saturday - Sunday max_duration: type: integer description: | How many seconds to check the repository before interrupting the check. Useful for splitting a long-running repository check into multiple partial checks. Defaults to no interruption. Only applies to the "repository" check, does not check the repository index and is not compatible with the "--repair" flag. example: 3600 - required: - name - count_tolerance_percentage - data_sample_percentage - data_tolerance_percentage additionalProperties: false properties: name: type: string enum: - spot description: | Name of the consistency check to run: * "repository" checks the consistency of the repository. * "archives" checks all of the archives. * "data" verifies the integrity of the data within the archives and implies the "archives" check as well. * "spot" checks that some percentage of source files are found in the most recent archive (with identical contents). * "extract" does an extraction dry-run of the most recent archive. * See "skip_actions" for disabling checks altogether. example: repository frequency: type: string description: | How frequently to run this type of consistency check (as a best effort). The value is a number followed by a unit of time. E.g., "2 weeks" to run this consistency check no more than every two weeks for a given repository or "1 month" to run it no more than monthly. Defaults to "always": running this check every time checks are run. example: 2 weeks only_run_on: type: array items: type: string description: | After the "frequency" duration has elapsed, only run this check if the current day of the week matches one of these values (the name of a day of the week in the current locale). "weekday" and "weekend" are also accepted. Defaults to running the check on any day of the week. example: - Saturday - Sunday count_tolerance_percentage: type: number description: | The percentage delta between the source directories file count and the most recent backup archive file count that is allowed before the entire consistency check fails. This can catch problems like incorrect excludes, inadvertent deletes, etc. Required (and only valid) for the "spot" check. example: 10 data_sample_percentage: type: number description: | The percentage of total files in the source directories to randomly sample and compare to their corresponding files in the most recent backup archive. Required (and only valid) for the "spot" check. example: 1 data_tolerance_percentage: type: number description: | The percentage of total files in the source directories that can fail a spot check comparison without failing the entire consistency check. This can catch problems like source files that have been bulk-changed by malware, backups that have been tampered with, etc. The value must be lower than or equal to the "contents_sample_percentage". Required (and only valid) for the "spot" check. example: 0.5 xxh64sum_command: type: string description: | Command to use instead of "xxh64sum" to hash source files, usually found in an OS package named "xxhash". Do not substitute with a different hash type (SHA, MD5, etc.) or the check will never succeed. Only valid for the "spot" check. example: /usr/local/bin/xxh64sum description: | List of one or more consistency checks to run on a periodic basis (if "frequency" is set) or every time borgmatic runs checks (if "frequency" is omitted). example: - name: archives frequency: 2 weeks - name: repository check_repositories: type: array items: type: string description: | Paths or labels for a subset of the configured "repositories" (see above) on which to run consistency checks. Handy in case some of your repositories are very large, and so running consistency checks on them would take too long. Defaults to running consistency checks on all configured repositories. example: - user@backupserver:sourcehostname.borg check_last: type: integer description: | Restrict the number of checked archives to the last n. Applies only to the "archives" check. Defaults to checking all archives. example: 3 color: type: boolean description: | Apply color to console output. Defaults to true. example: false verbosity: type: integer enum: - -2 - -1 - 0 - 1 - 2 description: | Display verbose output to the console: -2 (disabled), -1 (errors only), 0 (warnings and responses to actions, the default), 1 (info about steps borgmatic is taking), or 2 (debug). example: 2 syslog_verbosity: type: integer enum: - -2 - -1 - 0 - 1 - 2 description: | Log verbose output to syslog: -2 (disabled, the default), -1 (errors only), 0 (warnings and responses to actions), 1 (info about steps borgmatic is taking), or 2 (debug). example: 2 log_file_verbosity: type: integer enum: - -2 - -1 - 0 - 1 - 2 description: | Log verbose output to file: -2 (disabled), -1 (errors only), 0 (warnings and responses to actions), 1 (info about steps borgmatic is taking, the default), or 2 (debug). example: 2 log_file: type: string description: | Write log messages to the file at this path. example: /var/log/borgmatic/logfile.txt log_file_format: type: string description: | Python format string used for log messages written to the log file. example: "[{asctime}] {levelname}: {prefix}{message}" monitoring_verbosity: type: integer enum: - -2 - -1 - 0 - 1 - 2 description: | When a monitoring integration supporting logging is configured, log verbose output to it: -2 (disabled), -1 (errors only), 0 (warnings and responses to actions), 1 (info about steps borgmatic is taking, the default), or 2 (debug). example: 2 log_json: type: boolean description: | Write Borg log messages and console output as one JSON object per log line instead of formatted text. Defaults to false. example: true progress: type: boolean description: | Display progress as each file or archive is processed when running supported actions. Corresponds to the "--progress" flag on those actions. Defaults to false. example: true statistics: type: boolean description: | Display statistics for an archive when running supported actions. Corresponds to the "--stats" flag on those actions. Defaults to false. example: true list_details: type: boolean description: | Display details for each file or archive as it is processed when running supported actions. Corresponds to the "--list" flag on those actions. Defaults to false. example: true default_actions: type: boolean description: | Whether to apply default actions (create, prune, compact and check) when no arguments are supplied to the borgmatic command. If set to false, borgmatic displays the help message instead. example: true skip_actions: type: array items: type: string enum: - repo-create - transfer - prune - compact - create - recreate - check - delete - extract - config - export-tar - mount - umount - repo-delete - restore - repo-list - list - repo-info - info - break-lock - key - borg description: | List of one or more actions to skip running for this configuration file, even if specified on the command-line (explicitly or implicitly). This is handy for append-only configurations where you never want to run "compact" or checkless configuration where you want to skip "check". Defaults to not skipping any actions. example: - compact before_actions: type: array items: type: string description: | Deprecated. Use "commands:" instead. List of one or more shell commands or scripts to execute before all the actions for each repository. example: - "echo Starting actions." before_backup: type: array items: type: string description: | Deprecated. Use "commands:" instead. List of one or more shell commands or scripts to execute before creating a backup, run once per repository. example: - "echo Starting a backup." before_prune: type: array items: type: string description: | Deprecated. Use "commands:" instead. List of one or more shell commands or scripts to execute before pruning, run once per repository. example: - "echo Starting pruning." before_compact: type: array items: type: string description: | Deprecated. Use "commands:" instead. List of one or more shell commands or scripts to execute before compaction, run once per repository. example: - "echo Starting compaction." before_check: type: array items: type: string description: | Deprecated. Use "commands:" instead. List of one or more shell commands or scripts to execute before consistency checks, run once per repository. example: - "echo Starting checks." before_extract: type: array items: type: string description: | Deprecated. Use "commands:" instead. List of one or more shell commands or scripts to execute before extracting a backup, run once per repository. example: - "echo Starting extracting." after_backup: type: array items: type: string description: | Deprecated. Use "commands:" instead. List of one or more shell commands or scripts to execute after creating a backup, run once per repository. example: - "echo Finished a backup." after_compact: type: array items: type: string description: | Deprecated. Use "commands:" instead. List of one or more shell commands or scripts to execute after compaction, run once per repository. example: - "echo Finished compaction." after_prune: type: array items: type: string description: | Deprecated. Use "commands:" instead. List of one or more shell commands or scripts to execute after pruning, run once per repository. example: - "echo Finished pruning." after_check: type: array items: type: string description: | Deprecated. Use "commands:" instead. List of one or more shell commands or scripts to execute after consistency checks, run once per repository. example: - "echo Finished checks." after_extract: type: array items: type: string description: | Deprecated. Use "commands:" instead. List of one or more shell commands or scripts to execute after extracting a backup, run once per repository. example: - "echo Finished extracting." after_actions: type: array items: type: string description: | Deprecated. Use "commands:" instead. List of one or more shell commands or scripts to execute after all actions for each repository. example: - "echo Finished actions." on_error: type: array items: type: string description: | Deprecated. Use "commands:" instead. List of one or more shell commands or scripts to execute when an exception occurs during a "create", "prune", "compact", or "check" action or an associated before/after hook. example: - "echo Error during create/prune/compact/check." before_everything: type: array items: type: string description: | Deprecated. Use "commands:" instead. List of one or more shell commands or scripts to execute before running all actions (if one of them is "create"). These are collected from all configuration files and then run once before all of them (prior to all actions). example: - "echo Starting actions." after_everything: type: array items: type: string description: | Deprecated. Use "commands:" instead. List of one or more shell commands or scripts to execute after running all actions (if one of them is "create"). These are collected from all configuration files and then run once after all of them (after any action). example: - "echo Completed actions." commands: type: array items: type: object oneOf: - required: [before, run] additionalProperties: false properties: before: type: string enum: - action - repository - configuration - everything description: | Name for the point in borgmatic's execution that the commands should be run before (required if "after" isn't set): * "action" runs before each action for each repository. * "repository" runs before all actions for each repository. * "configuration" runs before all actions and repositories in the current configuration file. * "everything" runs before all configuration files. example: action when: type: array items: type: string enum: - repo-create - transfer - prune - compact - create - recreate - check - delete - extract - config - export-tar - mount - umount - repo-delete - restore - repo-list - list - repo-info - info - break-lock - key - borg description: | List of actions for which the commands will be run. Defaults to running for all actions. example: [create, prune, compact, check] run: type: array items: type: string description: | List of one or more shell commands or scripts to run when this command hook is triggered. Required. example: - "echo Doing stuff." - required: [after, run] additionalProperties: false properties: after: type: string enum: - action - repository - configuration - everything - error description: | Name for the point in borgmatic's execution that the commands should be run after (required if "before" isn't set): * "action" runs after each action for each repository. * "repository" runs after all actions for each repository. * "configuration" runs after all actions and repositories in the current configuration file. * "everything" runs after all configuration files. * "error" runs after an error occurs. example: action when: type: array items: type: string enum: - repo-create - transfer - prune - compact - create - recreate - check - delete - extract - config - export-tar - mount - umount - repo-delete - restore - repo-list - list - repo-info - info - break-lock - key - borg description: | Only trigger the hook when borgmatic is run with particular actions listed here. Defaults to running for all actions. example: [create, prune, compact, check] states: type: array items: type: string enum: - finish - fail description: | Only trigger the hook if borgmatic encounters one of the states (execution results) listed here, where: * "finish": No errors occurred. * "fail": An error occurred. This state is evaluated only for the scope of the configured "action", "repository", etc., rather than for the entire borgmatic run. Only available for "after" hooks. Defaults to running the hook for all states. example: - finish run: type: array items: type: string description: | List of one or more shell commands or scripts to run when this command hook is triggered. Required. example: - "echo Doing stuff." description: | List of one or more command hooks to execute, triggered at particular points during borgmatic's execution. For each command hook, specify one of "before" or "after", not both. example: - before: action when: [create] run: [echo Backing up.] bootstrap: type: object additionalProperties: false properties: store_config_files: type: boolean description: | Store configuration files used to create a backup inside the backup itself. Defaults to true. Changing this to false prevents "borgmatic bootstrap" from extracting configuration files from the backup. example: false description: | Support for the "borgmatic bootstrap" action, used to extract borgmatic configuration files from a backup archive. postgresql_databases: type: array items: type: object required: ['name'] additionalProperties: false properties: name: type: string description: | Database name (required if using this hook). Or "all" to dump all databases on the host. (Also set the "format" to dump each database to a separate file instead of one combined file.) Note that using this database hook implicitly enables read_special (see above) to support dump and restore streaming. example: users label: type: string description: | Label to identify the database dump in the backup. example: my_backup_label container: type: string description: | Container name/id to connect to. When specified the hostname is ignored. Requires docker/podman CLI. example: debian_stable restore_container: type: string description: | Container name/id to restore to. Defaults to the "container" option. example: restore_container hostname: type: string description: | Database hostname to connect to. Defaults to connecting via local Unix socket. example: database.example.org restore_hostname: type: string description: | Database hostname to restore to. Defaults to the "hostname" option. example: database.example.org port: type: integer description: Port to connect to. Defaults to 5432. example: 5433 restore_port: type: integer description: | Port to restore to. Defaults to the "port" option. example: 5433 username: type: string description: | Username with which to connect to the database. Defaults to the username of the current user. You probably want to specify the "postgres" superuser here when the database name is "all". Supports the "{credential ...}" syntax. example: dbuser restore_username: type: string description: | Username with which to restore the database. Defaults to the "username" option. Supports the "{credential ...}" syntax. example: dbuser password: type: string description: | Password with which to connect to the database. Omitting a password will only work if PostgreSQL is configured to trust the configured username without a password or you create a ~/.pgpass file. Supports the "{credential ...}" syntax. example: trustsome1 restore_password: type: string description: | Password with which to connect to the restore database. Defaults to the "password" option. Supports the "{credential ...}" syntax. example: trustsome1 no_owner: type: boolean description: | Do not output commands to set ownership of objects to match the original database. By default, pg_dump and pg_restore issue ALTER OWNER or SET SESSION AUTHORIZATION statements to set ownership of created schema elements. These statements will fail unless the initial connection to the database is made by a superuser. example: true format: type: string enum: ['plain', 'custom', 'directory', 'tar'] description: | Database dump output format. One of "plain", "custom", "directory", or "tar". Defaults to "custom" (unlike raw pg_dump) for a single database. Or, when database name is "all" and format is blank, dumps all databases to a single file. But if a format is specified with an "all" database name, dumps each database to a separate file of that format, allowing more convenient restores of individual databases. See the pg_dump documentation for more about formats. example: directory compression: type: ["string", "integer"] description: | Database dump compression level (integer) or method ("gzip", "lz4", "zstd", or "none") and optional colon-separated detail. Defaults to moderate "gzip" for "custom" and "directory" formats and no compression for the "plain" format. Compression is not supported for the "tar" format. Be aware that Borg does its own compression as well, so you may not need it in both places. example: none ssl_mode: type: string enum: ['disable', 'allow', 'prefer', 'require', 'verify-ca', 'verify-full'] description: | SSL mode to use to connect to the database server. One of "disable", "allow", "prefer", "require", "verify-ca" or "verify-full". Defaults to "disable". example: require ssl_cert: type: string description: | Path to a client certificate. example: "/root/.postgresql/postgresql.crt" ssl_key: type: string description: | Path to a private client key. example: "/root/.postgresql/postgresql.key" ssl_root_cert: type: string description: | Path to a root certificate containing a list of trusted certificate authorities. example: "/root/.postgresql/root.crt" ssl_crl: type: string description: | Path to a certificate revocation list. example: "/root/.postgresql/root.crl" pg_dump_command: type: string description: | Command to use instead of "pg_dump" or "pg_dumpall". This can be used to run a specific pg_dump version (e.g., one inside a running container). If you run it from within a container, make sure to mount the path in the "user_runtime_directory" option from the host into the container at the same location. Defaults to "pg_dump" for single database dump or "pg_dumpall" to dump all databases. example: docker exec my_pg_container pg_dump pg_restore_command: type: string description: | Command to use instead of "pg_restore". This can be used to run a specific pg_restore version (e.g., one inside a running container). Defaults to "pg_restore". example: docker exec my_pg_container pg_restore psql_command: type: string description: | Command to use instead of "psql". This can be used to run a specific psql version (e.g., one inside a running container). Defaults to "psql". example: docker exec my_pg_container psql options: type: string description: | Additional pg_dump/pg_dumpall options to pass directly to the dump command, without performing any validation on them. See pg_dump documentation for details. example: --role=someone list_options: type: string description: | Additional psql options to pass directly to the psql command that lists available databases, without performing any validation on them. See psql documentation for details. example: --role=someone restore_options: type: string description: | Additional pg_restore/psql options to pass directly to the restore command, without performing any validation on them. See pg_restore/psql documentation for details. example: --role=someone analyze_options: type: string description: | Additional psql options to pass directly to the analyze command run after a restore, without performing any validation on them. See psql documentation for details. example: --role=someone description: | List of one or more PostgreSQL databases to dump before creating a backup, run once per configuration file. The database dumps are added to your source directories at runtime and streamed directly to Borg. Requires pg_dump/pg_dumpall/pg_restore commands. See https://www.postgresql.org/docs/current/app-pgdump.html and https://www.postgresql.org/docs/current/libpq-ssl.html for details. example: - name: users hostname: database.example.org mariadb_databases: type: array items: type: object required: ['name'] additionalProperties: false properties: name: type: string description: | Database name (required if using this hook). Or "all" to dump all databases on the host. Note that using this database hook implicitly enables read_special (see above) to support dump and restore streaming. example: users skip_names: type: array items: type: string description: | Database names to skip when dumping "all" databases. Ignored when the database name is not "all". example: - cache label: type: string description: | Label to identify the database dump in the backup. example: my_backup_label container: type: string description: | Container name/id to connect to. When specified the hostname is ignored. Requires docker/podman CLI. example: debian_stable restore_container: type: string description: | Container name/id to restore to. Defaults to the "container" option. example: restore_container hostname: type: string description: | Database hostname to connect to. Defaults to connecting via local Unix socket. example: database.example.org restore_hostname: type: string description: | Database hostname to restore to. Defaults to the "hostname" option. example: database.example.org port: type: integer description: Port to connect to. Defaults to 3306. example: 3307 restore_port: type: integer description: | Port to restore to. Defaults to the "port" option. example: 5433 username: type: string description: | Username with which to connect to the database. Defaults to the username of the current user. Supports the "{credential ...}" syntax. example: dbuser restore_username: type: string description: | Username with which to restore the database. Defaults to the "username" option. Supports the "{credential ...}" syntax. example: dbuser password: type: string description: | Password with which to connect to the database. Omitting a password will only work if MariaDB is configured to trust the configured username without a password. Supports the "{credential ...}" syntax. example: trustsome1 restore_password: type: string description: | Password with which to connect to the restore database. Defaults to the "password" option. Supports the "{credential ...}" syntax. example: trustsome1 password_transport: type: string enum: - pipe - environment description: | How to transmit database passwords from borgmatic to the MariaDB client, one of: * "pipe": Securely transmit passwords via anonymous pipe. Only works if the database client is on the same host as borgmatic. (The server can be somewhere else.) This is the default value. * "environment": Transmit passwords via environment variable. Potentially less secure than a pipe, but necessary when the database client is elsewhere, e.g. when "mariadb_dump_command" is configured to "exec" into a container and run a client there. tls: type: boolean description: | Whether to TLS-encrypt data transmitted between the client and server. The default varies based on the MariaDB version. example: false restore_tls: type: boolean description: | Whether to TLS-encrypt data transmitted between the client and restore server. The default varies based on the MariaDB version. example: false mariadb_dump_command: type: string description: | Command to use instead of "mariadb-dump". This can be used to run a specific mariadb_dump version (e.g., one inside a running container). If you run it from within a container, make sure to mount the path in the "user_runtime_directory" option from the host into the container at the same location. Defaults to "mariadb-dump". example: docker exec mariadb_container mariadb-dump mariadb_command: type: string description: | Command to run instead of "mariadb". This can be used to run a specific mariadb version (e.g., one inside a running container). Defaults to "mariadb". example: docker exec mariadb_container mariadb format: type: string enum: ['sql'] description: | Database dump output format. Currently only "sql" is supported. Defaults to "sql" for a single database. Or, when database name is "all" and format is blank, dumps all databases to a single file. But if a format is specified with an "all" database name, dumps each database to a separate file of that format, allowing more convenient restores of individual databases. example: directory add_drop_database: type: boolean description: | Use the "--add-drop-database" flag with mariadb-dump, causing the database to be dropped right before restore. Defaults to true. example: false options: type: string description: | Additional mariadb-dump options to pass directly to the dump command, without performing any validation on them. See mariadb-dump documentation for details. example: --skip-comments list_options: type: string description: | Additional options to pass directly to the mariadb command that lists available databases, without performing any validation on them. See mariadb command documentation for details. example: --defaults-extra-file=mariadb.cnf restore_options: type: string description: | Additional options to pass directly to the mariadb command that restores database dumps, without performing any validation on them. See mariadb command documentation for details. example: --defaults-extra-file=mariadb.cnf description: | List of one or more MariaDB databases to dump before creating a backup, run once per configuration file. The database dumps are added to your source directories at runtime and streamed directly to Borg. Requires mariadb-dump/mariadb commands. See https://mariadb.com/kb/en/library/mysqldump/ for details. example: - name: users hostname: database.example.org mysql_databases: type: array items: type: object required: ['name'] additionalProperties: false properties: name: type: string description: | Database name (required if using this hook). Or "all" to dump all databases on the host. Note that using this database hook implicitly enables read_special (see above) to support dump and restore streaming. example: users skip_names: type: array items: type: string description: | Database names to skip when dumping "all" databases. Ignored when the database name is not "all". example: - cache label: type: string description: | Label to identify the database dump in the backup. example: my_backup_label container: type: string description: | Container name/id to connect to. When specified the hostname is ignored. Requires docker/podman CLI. example: debian_stable restore_container: type: string description: | Container name/id to restore to. Defaults to the "container" option. example: restore_container hostname: type: string description: | Database hostname to connect to. Defaults to connecting via local Unix socket. example: database.example.org restore_hostname: type: string description: | Database hostname to restore to. Defaults to the "hostname" option. example: database.example.org port: type: integer description: Port to connect to. Defaults to 3306. example: 3307 restore_port: type: integer description: | Port to restore to. Defaults to the "port" option. example: 5433 username: type: string description: | Username with which to connect to the database. Defaults to the username of the current user. Supports the "{credential ...}" syntax. example: dbuser restore_username: type: string description: | Username with which to restore the database. Defaults to the "username" option. Supports the "{credential ...}" syntax. example: dbuser password: type: string description: | Password with which to connect to the database. Omitting a password will only work if MySQL is configured to trust the configured username without a password. Supports the "{credential ...}" syntax. example: trustsome1 restore_password: type: string description: | Password with which to connect to the restore database. Defaults to the "password" option. Supports the "{credential ...}" syntax. example: trustsome1 password_transport: type: string enum: - pipe - environment description: | How to transmit database passwords from borgmatic to the MySQL client, one of: * "pipe": Securely transmit passwords via anonymous pipe. Only works if the database client is on the same host as borgmatic. (The server can be somewhere else.) This is the default value. * "environment": Transmit passwords via environment variable. Potentially less secure than a pipe, but necessary when the database client is elsewhere, e.g. when "mysql_dump_command" is configured to "exec" into a container and run a client there. tls: type: boolean description: | Whether to TLS-encrypt data transmitted between the client and server. The default varies based on the MySQL installation. example: false restore_tls: type: boolean description: | Whether to TLS-encrypt data transmitted between the client and restore server. The default varies based on the MySQL installation. example: false mysql_dump_command: type: string description: | Command to use instead of "mysqldump". This can be used to run a specific mysql_dump version (e.g., one inside a running container). If you run it from within a container, make sure to mount the path in the "user_runtime_directory" option from the host into the container at the same location. Defaults to "mysqldump". example: docker exec mysql_container mysqldump mysql_command: type: string description: | Command to run instead of "mysql". This can be used to run a specific mysql version (e.g., one inside a running container). Defaults to "mysql". example: docker exec mysql_container mysql format: type: string enum: ['sql'] description: | Database dump output format. Currently only "sql" is supported. Defaults to "sql" for a single database. Or, when database name is "all" and format is blank, dumps all databases to a single file. But if a format is specified with an "all" database name, dumps each database to a separate file of that format, allowing more convenient restores of individual databases. example: directory add_drop_database: type: boolean description: | Use the "--add-drop-database" flag with mysqldump, causing the database to be dropped right before restore. Defaults to true. example: false options: type: string description: | Additional mysqldump options to pass directly to the dump command, without performing any validation on them. See mysqldump documentation for details. example: --skip-comments list_options: type: string description: | Additional options to pass directly to the mysql command that lists available databases, without performing any validation on them. See mysql command documentation for details. example: --defaults-extra-file=my.cnf restore_options: type: string description: | Additional options to pass directly to the mysql command that restores database dumps, without performing any validation on them. See mysql command documentation for details. example: --defaults-extra-file=my.cnf description: | List of one or more MySQL databases to dump before creating a backup, run once per configuration file. The database dumps are added to your source directories at runtime and streamed directly to Borg. Requires mysqldump/mysql commands. See https://dev.mysql.com/doc/refman/8.0/en/mysqldump.html for details. example: - name: users hostname: database.example.org sqlite_databases: type: array items: type: object required: ['path','name'] additionalProperties: false properties: name: type: string description: | This is used to tag the database dump file with a name. It is not the path to the database file itself. The name "all" has no special meaning for SQLite databases. example: users path: type: string description: | Path to the SQLite database file to dump. If relative, it is relative to the current working directory. Note that using this database hook implicitly enables read_special (see above) to support dump and restore streaming. example: /var/lib/sqlite/users.db label: type: string description: | Label to identify the database dump in the backup. example: my_backup_label restore_path: type: string description: | Path to the SQLite database file to restore to. Defaults to the "path" option. example: /var/lib/sqlite/users.db sqlite_command: type: string description: | Command to use instead of "sqlite3". This can be used to run a specific sqlite3 version (e.g., one inside a running container). If you run it from within a container, make sure to mount the path in the "user_runtime_directory" option from the host into the container at the same location. Defaults to "sqlite3". example: docker exec sqlite_container sqlite3 sqlite_restore_command: type: string description: | Command to run when restoring a database instead of "sqlite3". This can be used to run a specific sqlite3 version (e.g., one inside a running container). Defaults to "sqlite3". example: docker exec sqlite_container sqlite3 description: | List of one or more SQLite databases to dump before creating a backup, run once per configuration file. The database dumps are added to your source directories at runtime and streamed directly to Borg. Requires the sqlite3 command. See https://sqlite.org/cli.html for details. example: - name: users path: /var/lib/db.sqlite mongodb_databases: type: array items: type: object required: ['name'] additionalProperties: false properties: name: type: string description: | Database name (required if using this hook). Or "all" to dump all databases on the host. Note that using this database hook implicitly enables read_special (see above) to support dump and restore streaming. example: users label: type: string description: | Label to identify the database dump in the backup. example: my_backup_label container: type: string description: | Container name/id to connect to. When specified the hostname is ignored. Requires docker/podman CLI. example: debian_stable restore_container: type: string description: | Container name/id to restore to. Defaults to the "container" option. example: restore_container hostname: type: string description: | Database hostname to connect to. Defaults to connecting to localhost. example: database.example.org restore_hostname: type: string description: | Database hostname to restore to. Defaults to the "hostname" option. example: database.example.org port: type: integer description: Port to connect to. Defaults to 27017. example: 27018 restore_port: type: integer description: | Port to restore to. Defaults to the "port" option. example: 5433 username: type: string description: | Username with which to connect to the database. Skip it if no authentication is needed. Supports the "{credential ...}" syntax. example: dbuser restore_username: type: string description: | Username with which to restore the database. Defaults to the "username" option. Supports the "{credential ...}" syntax. example: dbuser password: type: string description: | Password with which to connect to the database. Skip it if no authentication is needed. Supports the "{credential ...}" syntax. example: trustsome1 restore_password: type: string description: | Password with which to connect to the restore database. Defaults to the "password" option. Supports the "{credential ...}" syntax. example: trustsome1 authentication_database: type: string description: | Authentication database where the specified username exists. If no authentication database is specified, the database provided in "name" is used. If "name" is "all", the "admin" database is used. example: admin format: type: string enum: ['archive', 'directory'] description: | Database dump output format. One of "archive", or "directory". Defaults to "archive". See mongodump documentation for details. Note that format is ignored when the database name is "all". example: directory options: type: string description: | Additional mongodump options to pass directly to the dump command, without performing any validation on them. See mongodump documentation for details. example: --dumpDbUsersAndRoles restore_options: type: string description: | Additional mongorestore options to pass directly to the dump command, without performing any validation on them. See mongorestore documentation for details. example: --restoreDbUsersAndRoles mongodump_command: type: string description: | Command to use instead of "mongodump". This can be used to run a specific mongodump version (e.g., one inside a running container). If you run it from within a container, make sure to mount the path in the "user_runtime_directory" option from the host into the container at the same location. Defaults to "mongodump". example: docker exec mongodb_container mongodump mongorestore_command: type: string description: | Command to run when restoring a database instead of "mongorestore". This can be used to run a specific mongorestore version (e.g., one inside a running container). Defaults to "mongorestore". example: docker exec mongodb_container mongorestore description: | List of one or more MongoDB databases to dump before creating a backup, run once per configuration file. The database dumps are added to your source directories at runtime and streamed directly to Borg. Requires mongodump/mongorestore commands. See https://docs.mongodb.com/database-tools/mongodump/ and https://docs.mongodb.com/database-tools/mongorestore/ for details. example: - name: users hostname: database.example.org ntfy: type: object required: ['topic'] additionalProperties: false properties: topic: type: string description: | The topic to publish to. See https://ntfy.sh/docs/publish/ for details. example: topic server: type: string description: | The address of your self-hosted ntfy.sh instance. example: https://ntfy.your-domain.com username: type: string description: | The username used for authentication. Supports the "{credential ...}" syntax. example: testuser password: type: string description: | The password used for authentication. Supports the "{credential ...}" syntax. example: fakepassword access_token: type: string description: | An ntfy access token to authenticate with instead of username/password. Supports the "{credential ...}" syntax. example: tk_AgQdq7mVBoFD37zQVN29RhuMzNIz2 start: type: object additionalProperties: false properties: title: type: string description: | The title of the message. example: Ping! message: type: string description: | The message body to publish. example: Your backups have started. priority: type: string description: | The priority to set. example: min tags: type: string description: | Tags to attach to the message. example: borgmatic finish: type: object additionalProperties: false properties: title: type: string description: | The title of the message. example: Ping! message: type: string description: | The message body to publish. example: Your backups have finished. priority: type: string description: | The priority to set. example: min tags: type: string description: | Tags to attach to the message. example: borgmatic,+1 fail: type: object additionalProperties: false properties: title: type: string description: | The title of the message. example: Ping! message: type: string description: | The message body to publish. example: Your backups have failed. priority: type: string description: | The priority to set. example: max tags: type: string description: | Tags to attach to the message. example: borgmatic,-1,skull states: type: array items: type: string enum: - start - finish - fail uniqueItems: true description: | List of one or more monitoring states to ping for: "start", "finish", and/or "fail". Defaults to pinging for failure only. example: - start - finish pushover: type: object required: ['token', 'user'] additionalProperties: false properties: token: type: string description: | Your application's API token. Supports the "{credential ...}" syntax. example: 7ms6TXHpTokTou2P6x4SodDeentHRa user: type: string description: | Your user/group key (or that of your target user), viewable when logged into your dashboard: often referred to as USER_KEY in Pushover documentation and code examples. Supports the "{credential ...}" syntax. example: hwRwoWsXMBWwgrSecfa9EfPey55WSN start: type: object additionalProperties: false properties: message: type: string description: | Message to be sent to the user or group. If omitted the default is the name of the state. example: A backup job has started. priority: type: integer description: | A value of -2, -1, 0 (default), 1 or 2 that indicates the message priority. example: 0 expire: type: integer description: | How many seconds your notification will continue to be retried (every retry seconds). Defaults to 600. This settings only applies to priority 2 notifications. example: 600 retry: type: integer description: | The retry parameter specifies how often (in seconds) the Pushover servers will send the same notification to the user. Defaults to 30. This settings only applies to priority 2 notifications. example: 30 device: type: string description: | The name of one of your devices to send just to that device instead of all devices. example: pixel8 html: type: boolean description: | Set to True to enable HTML parsing of the message. Set to false for plain text. example: true sound: type: string description: | The name of a supported sound to override your default sound choice. All options can be found here: https://pushover.net/api#sounds example: bike title: type: string description: | Your message's title, otherwise your app's name is used. example: A backup job has started. ttl: type: integer description: | The number of seconds that the message will live, before being deleted automatically. The ttl parameter is ignored for messages with a priority. value of 2. example: 3600 url: type: string description: | A supplementary URL to show with your message. example: https://pushover.net/apps/xxxxx-borgbackup url_title: type: string description: | A title for the URL specified as the url parameter, otherwise just the URL is shown. example: Pushover Link finish: type: object additionalProperties: false properties: message: type: string description: | Message to be sent to the user or group. If omitted the default is the name of the state. example: A backup job has finished. priority: type: integer description: | A value of -2, -1, 0 (default), 1 or 2 that indicates the message priority. example: 0 expire: type: integer description: | How many seconds your notification will continue to be retried (every retry seconds). Defaults to 600. This settings only applies to priority 2 notifications. example: 600 retry: type: integer description: | The retry parameter specifies how often (in seconds) the Pushover servers will send the same notification to the user. Defaults to 30. This settings only applies to priority 2 notifications. example: 30 device: type: string description: | The name of one of your devices to send just to that device instead of all devices. example: pixel8 html: type: boolean description: | Set to True to enable HTML parsing of the message. Set to false for plain text. example: true sound: type: string description: | The name of a supported sound to override your default sound choice. All options can be found here: https://pushover.net/api#sounds example: bike title: type: string description: | Your message's title, otherwise your app's name is used. example: A backup job has started. ttl: type: integer description: | The number of seconds that the message will live, before being deleted automatically. The ttl parameter is ignored for messages with a priority. value of 2. example: 3600 url: type: string description: | A supplementary URL to show with your message. example: https://pushover.net/apps/xxxxx-borgbackup url_title: type: string description: | A title for the URL specified as the url parameter, otherwise just the URL is shown. example: Pushover Link fail: type: object additionalProperties: false properties: message: type: string description: | Message to be sent to the user or group. If omitted the default is the name of the state. example: A backup job has failed. priority: type: integer description: | A value of -2, -1, 0 (default), 1 or 2 that indicates the message priority. example: 0 expire: type: integer description: | How many seconds your notification will continue to be retried (every retry seconds). Defaults to 600. This settings only applies to priority 2 notifications. example: 600 retry: type: integer description: | The retry parameter specifies how often (in seconds) the Pushover servers will send the same notification to the user. Defaults to 30. This settings only applies to priority 2 notifications. example: 30 device: type: string description: | The name of one of your devices to send just to that device instead of all devices. example: pixel8 html: type: boolean description: | Set to True to enable HTML parsing of the message. Set to false for plain text. example: true sound: type: string description: | The name of a supported sound to override your default sound choice. All options can be found here: https://pushover.net/api#sounds example: bike title: type: string description: | Your message's title, otherwise your app's name is used. example: A backup job has started. ttl: type: integer description: | The number of seconds that the message will live, before being deleted automatically. The ttl parameter is ignored for messages with a priority. value of 2. example: 3600 url: type: string description: | A supplementary URL to show with your message. example: https://pushover.net/apps/xxxxx-borgbackup url_title: type: string description: | A title for the URL specified as the url parameter, otherwise just the URL is shown. example: Pushover Link states: type: array items: type: string enum: - start - finish - fail uniqueItems: true description: | List of one or more monitoring states to ping for: "start", "finish", and/or "fail". Defaults to pinging for failure only. example: - start - finish zabbix: type: object additionalProperties: false required: - server properties: itemid: type: integer description: | The ID of the Zabbix item used for collecting data. Unique across the entire Zabbix system. example: 55105 host: type: string description: | Host name where the item is stored. Required if "itemid" is not set. example: borg-server key: type: string description: | Key of the host where the item is stored. Required if "itemid" is not set. example: borg.status server: type: string description: | The API endpoint URL of your Zabbix instance, usually ending with "/api_jsonrpc.php". Required. example: https://zabbix.your-domain.com username: type: string description: | The username used for authentication. Not needed if using an API key. Supports the "{credential ...}" syntax. example: testuser password: type: string description: | The password used for authentication. Not needed if using an API key. Supports the "{credential ...}" syntax. example: fakepassword api_key: type: string description: | The API key used for authentication. Not needed if using an username/password. Supports the "{credential ...}" syntax. example: fakekey start: type: object additionalProperties: false properties: value: type: ["integer", "string"] description: | The value to set the item to on start. example: STARTED finish: type: object additionalProperties: false properties: value: type: ["integer", "string"] description: | The value to set the item to on finish. example: FINISH fail: type: object additionalProperties: false properties: value: type: ["integer", "string"] description: | The value to set the item to on fail. example: ERROR states: type: array items: type: string enum: - start - finish - fail uniqueItems: true description: | List of one or more monitoring states to ping for: "start", "finish", and/or "fail". Defaults to pinging for failure only. example: - start - finish apprise: type: object required: ['services'] additionalProperties: false properties: services: type: array items: type: object additionalProperties: false required: - url - label properties: url: type: string description: URL of this Apprise service. example: "gotify://hostname/token" label: type: string description: | Label used in borgmatic logs for this Apprise service. example: gotify description: | A list of Apprise services to publish to with URLs and labels. The labels are used for logging. A full list of services and their configuration can be found at https://github.com/caronc/apprise/wiki. example: - url: "kodi://user@hostname" label: kodi - url: "line://Token@User" label: line send_logs: type: boolean description: | Send borgmatic logs to Apprise services as part of the "finish", "fail", and "log" states. Defaults to true. example: false logs_size_limit: type: integer description: | Number of bytes of borgmatic logs to send to Apprise services. Set to 0 to send all logs and disable this truncation. Defaults to 1500. example: 100000 start: type: object required: ['body'] additionalProperties: false properties: title: type: string description: | Specify the message title. If left unspecified, no title is sent. example: Ping! body: type: string description: | Specify the message body. example: Starting backup process. finish: type: object required: ['body'] additionalProperties: false properties: title: type: string description: | Specify the message title. If left unspecified, no title is sent. example: Ping! body: type: string description: | Specify the message body. example: Backups successfully made. fail: type: object required: ['body'] additionalProperties: false properties: title: type: string description: | Specify the message title. If left unspecified, no title is sent. example: Ping! body: type: string description: | Specify the message body. example: Your backups have failed. log: type: object required: ['body'] additionalProperties: false properties: title: type: string description: | Specify the message title. If left unspecified, no title is sent. example: Ping! body: type: string description: | Specify the message body. example: Here is some info about your backups. states: type: array items: type: string enum: - start - finish - fail - log uniqueItems: true description: | List of one or more monitoring states to ping for: "start", "finish", "fail", and/or "log". Defaults to pinging for failure only. For each selected state, corresponding configuration for the message title and body should be given. If any is left unspecified, a generic message is emitted instead. example: - start - finish healthchecks: type: object required: ['ping_url'] additionalProperties: false properties: ping_url: type: string description: | Healthchecks ping URL or UUID to notify when a backup begins, ends, errors, or to send only logs. example: https://hc-ping.com/your-uuid-here verify_tls: type: boolean description: | Verify the TLS certificate of the ping URL host. Defaults to true. example: false send_logs: type: boolean description: | Send borgmatic logs to Healthchecks as part of the "finish", "fail", and "log" states. Defaults to true. example: false ping_body_limit: type: integer description: | Number of bytes of borgmatic logs to send to Healthchecks, ideally the same as PING_BODY_LIMIT configured on the Healthchecks server. Set to 0 to send all logs and disable this truncation. Defaults to 100000. example: 200000 states: type: array items: type: string enum: - start - finish - fail - log uniqueItems: true description: | List of one or more monitoring states to ping for: "start", "finish", "fail", and/or "log". Defaults to pinging for all states. example: - finish create_slug: type: boolean description: | Create the check if it does not exist. Only works with the slug URL scheme (https://hc-ping.com// as opposed to https://hc-ping.com/). Defaults to false. example: true description: | Configuration for a monitoring integration with Healthchecks. Create an account at https://healthchecks.io (or self-host Healthchecks) if you'd like to use this service. See borgmatic monitoring documentation for details. uptime_kuma: type: object required: ['push_url'] additionalProperties: false properties: push_url: type: string description: | Uptime Kuma push URL without query string (do not include the question mark or anything after it). example: https://example.uptime.kuma/api/push/abcd1234 states: type: array items: type: string enum: - start - finish - fail uniqueItems: true description: | List of one or more monitoring states to push for: "start", "finish", and/or "fail". Defaults to pushing for all states. example: - start - finish - fail verify_tls: type: boolean description: | Verify the TLS certificate of the push URL host. Defaults to true. example: false description: | Configuration for a monitoring integration with Uptime Kuma using the Push monitor type. See more information here: https://uptime.kuma.pet cronitor: type: object required: ['ping_url'] additionalProperties: false properties: ping_url: type: string description: | Cronitor ping URL to notify when a backup begins, ends, or errors. example: https://cronitor.link/d3x0c1 description: | Configuration for a monitoring integration with Cronitor. Create an account at https://cronitor.io if you'd like to use this service. See borgmatic monitoring documentation for details. pagerduty: type: object required: ['integration_key'] additionalProperties: false properties: integration_key: type: string description: | PagerDuty integration key used to notify PagerDuty when a backup errors. Supports the "{credential ...}" syntax. example: a177cad45bd374409f78906a810a3074 send_logs: type: boolean description: | Send borgmatic logs to PagerDuty when a backup errors. Defaults to true. example: false description: | Configuration for a monitoring integration with PagerDuty. Create an account at https://www.pagerduty.com if you'd like to use this service. See borgmatic monitoring documentation for details. cronhub: type: object required: ['ping_url'] additionalProperties: false properties: ping_url: type: string description: | Cronhub ping URL to notify when a backup begins, ends, or errors. example: https://cronhub.io/ping/1f5e3410-254c-5587 description: | Configuration for a monitoring integration with Cronhub. Create an account at https://cronhub.io if you'd like to use this service. See borgmatic monitoring documentation for details. loki: type: object required: ['url', 'labels'] additionalProperties: false properties: url: type: string description: | Grafana loki log URL to notify when a backup begins, ends, or fails. example: "http://localhost:3100/loki/api/v1/push" labels: type: object additionalProperties: type: string description: | Allows setting custom labels for the logging stream. At least one label is required. "__hostname" gets replaced by the machine hostname automatically. "__config" gets replaced by the name of the configuration file. "__config_path" gets replaced by the full path of the configuration file. example: app: "borgmatic" config: "__config" hostname: "__hostname" description: | Configuration for a monitoring integration with Grafana Loki. You can send the logs to a self-hosted instance or create an account at https://grafana.com/auth/sign-up/create-user. See borgmatic monitoring documentation for details. sentry: type: object required: ['data_source_name_url', 'monitor_slug'] additionalProperties: false properties: data_source_name_url: type: string description: | Sentry Data Source Name (DSN) URL, associated with a particular Sentry project. Used to construct a cron URL, notified when a backup begins, ends, or errors. example: https://5f80ec@o294220.ingest.us.sentry.io/203069 monitor_slug: type: string description: | Sentry monitor slug, associated with a particular Sentry project monitor. Used along with the data source name URL to construct a cron URL. example: mymonitor environment: type: string description: | Sentry monitor environment used in the call to Sentry. If not set, the Sentry default is used. example: production states: type: array items: type: string enum: - start - finish - fail uniqueItems: true description: | List of one or more monitoring states to ping for: "start", "finish", and/or "fail". Defaults to pinging for all states. example: - start - finish description: | Configuration for a monitoring integration with Sentry. You can use a self-hosted instance via https://develop.sentry.dev/self-hosted/ or create a cloud-hosted account at https://sentry.io. See borgmatic monitoring documentation for details. zfs: type: ["object", "null"] additionalProperties: false properties: zfs_command: type: string description: | Command to use instead of "zfs". example: /usr/local/bin/zfs mount_command: type: string description: | Command to use instead of "mount". example: /usr/local/bin/mount umount_command: type: string description: | Command to use instead of "umount". example: /usr/local/bin/umount description: | Configuration for integration with the ZFS filesystem. btrfs: type: ["object", "null"] additionalProperties: false properties: btrfs_command: type: string description: | Command to use instead of "btrfs". example: /usr/local/bin/btrfs findmnt_command: type: string description: | Deprecated and unused. Was the command to use instead of "findmnt". example: /usr/local/bin/findmnt description: | Configuration for integration with the Btrfs filesystem. lvm: type: ["object", "null"] additionalProperties: false properties: snapshot_size: type: string description: | Size to allocate for each snapshot taken, including the units to use for that size. Defaults to "10%ORIGIN" (10% of the size of logical volume being snapshotted). See the lvcreate "--size" and "--extents" documentation for more information: https://www.man7.org/linux/man-pages/man8/lvcreate.8.html example: 5GB lvcreate_command: type: string description: | Command to use instead of "lvcreate". example: /usr/local/bin/lvcreate lvremove_command: type: string description: | Command to use instead of "lvremove". example: /usr/local/bin/lvremove lvs_command: type: string description: | Command to use instead of "lvs". example: /usr/local/bin/lvs lsblk_command: type: string description: | Command to use instead of "lsblk". example: /usr/local/bin/lsblk mount_command: type: string description: | Command to use instead of "mount". example: /usr/local/bin/mount umount_command: type: string description: | Command to use instead of "umount". example: /usr/local/bin/umount description: | Configuration for integration with Linux LVM (Logical Volume Manager). systemd: type: object additionalProperties: false properties: systemd_creds_command: type: string description: | Command to use instead of "systemd-creds". Only used as a fallback when borgmatic is run outside of a systemd service. example: /usr/local/bin/systemd-creds encrypted_credentials_directory: type: string description: | Directory containing encrypted credentials for "systemd-creds" to use instead of "/etc/credstore.encrypted". example: /path/to/credstore.encrypted description: | Configuration for integration with systemd credentials. container: type: object additionalProperties: false properties: secrets_directory: type: string description: | Secrets directory to use instead of "/run/secrets". example: /path/to/secrets description: | Configuration for integration with Docker or Podman secrets. keepassxc: type: object additionalProperties: false properties: keepassxc_cli_command: type: string description: | Command to use instead of "keepassxc-cli". example: /usr/local/bin/keepassxc-cli key_file: type: string description: | Path to a key file for unlocking the KeePassXC database. example: /path/to/keyfile yubikey: type: string description: | YubiKey slot and optional serial number used to access the KeePassXC database. The format is "", where: * is the YubiKey slot number (e.g., `1` or `2`). * (optional) is the YubiKey's serial number (e.g., `7370001`). example: "1:7370001" description: | Configuration for integration with the KeePassXC password manager. borgmatic/borgmatic/config/validate.py000066400000000000000000000175221510202216200204020ustar00rootroot00000000000000import fnmatch import os import jsonschema import ruamel.yaml import borgmatic.config.arguments from borgmatic.config import constants, environment, load, normalize, override def schema_filename(): ''' Path to the installed YAML configuration schema file, used to validate and parse the configuration. Raise FileNotFoundError when the schema path does not exist. ''' schema_path = os.path.join(os.path.dirname(borgmatic.config.__file__), 'schema.yaml') with open(schema_path, encoding='utf-8'): return schema_path def load_schema(schema_path): # pragma: no cover ''' Given a schema filename path, load the schema and return it as a dict. Raise Validation_error if the schema could not be parsed. ''' try: return load.load_configuration(schema_path) except (ruamel.yaml.error.YAMLError, RecursionError) as error: raise Validation_error(schema_path, (str(error),)) def format_json_error_path_element(path_element): ''' Given a path element into a JSON data structure, format it for display as a string. ''' if isinstance(path_element, int): return str(f'[{path_element}]') return str(f'.{path_element}') def format_json_error(error): ''' Given an instance of jsonschema.exceptions.ValidationError, format it for display as a string. ''' if not error.path: return f'At the top level: {error.message}' formatted_path = ''.join(format_json_error_path_element(element) for element in error.path) return f"At '{formatted_path.lstrip('.')}': {error.message}" class Validation_error(ValueError): ''' A collection of error messages generated when attempting to validate a particular configuration file. ''' def __init__(self, config_filename, errors): ''' Given a configuration filename path and a sequence of string error messages, create a Validation_error. ''' self.config_filename = config_filename self.errors = errors def __str__(self): ''' Render a validation error as a user-facing string. ''' return ( f'An error occurred while parsing a configuration file at {self.config_filename}:\n' + '\n'.join(error for error in self.errors) ) def apply_logical_validation(config_filename, parsed_configuration): ''' Given a parsed and schematically valid configuration as a data structure of nested dicts (see below), run through any additional logical validation checks. If there are any such validation problems, raise a Validation_error. ''' repositories = parsed_configuration.get('repositories') check_repositories = parsed_configuration.get('check_repositories', []) for repository in check_repositories: if not any( repositories_match(repository, config_repository) for config_repository in repositories ): raise Validation_error( config_filename, (f'Unknown repository in "check_repositories": {repository}',), ) def parse_configuration( config_filename, schema_filename, arguments, overrides=None, resolve_env=True, ): ''' Given the path to a config filename in YAML format, the path to a schema filename in a YAML rendition of JSON Schema format, arguments as dict from action name to argparse.Namespace, a sequence of configuration file override strings in the form of "option.suboption=value", and whether to resolve environment variables, return the parsed configuration as a data structure of nested dicts and lists corresponding to the schema. Example return value. Example return value: { 'source_directories': ['/home', '/etc'], 'repository': 'hostname.borg', 'keep_daily': 7, 'checks': ['repository', 'archives'], } Also return a set of loaded configuration paths and a sequence of logging.LogRecord instances containing any warnings about the configuration. Raise FileNotFoundError if the file does not exist, PermissionError if the user does not have permissions to read the file, or Validation_error if the config does not match the schema. ''' config_paths = set() try: config = load.load_configuration(config_filename, config_paths) schema = load.load_configuration(schema_filename) except (ruamel.yaml.error.YAMLError, RecursionError) as error: raise Validation_error(config_filename, (str(error),)) borgmatic.config.arguments.apply_arguments_to_config(config, schema, arguments) override.apply_overrides(config, schema, overrides) constants.apply_constants(config, config.get('constants') if config else {}) if resolve_env: environment.resolve_env_variables(config) logs = normalize.normalize(config_filename, config) try: validator = jsonschema.Draft7Validator(schema) except AttributeError: # pragma: no cover validator = jsonschema.Draft4Validator(schema) validation_errors = tuple(validator.iter_errors(config)) if validation_errors: raise Validation_error( config_filename, tuple(format_json_error(error) for error in validation_errors), ) apply_logical_validation(config_filename, config) return config, config_paths, logs def normalize_repository_path(repository, base=None): ''' Given a repository path, return the absolute path of it (for local repositories). Optionally, use a base path for resolving relative paths, e.g. to the configured working directory. ''' # A colon in the repository could mean that it's either a file:// URL or a remote repository. # If it's a remote repository, we don't want to normalize it. If it's a file:// URL, we do. if ':' not in repository: return ( os.path.abspath(os.path.join(base, repository)) if base else os.path.abspath(repository) ) if repository.startswith('file://'): local_path = repository.partition('file://')[-1] return ( os.path.abspath(os.path.join(base, local_path)) if base else os.path.abspath(local_path) ) return repository def glob_match(first, second): ''' Given two strings, return whether the first matches the second. Globs are supported. ''' if first is None or second is None: return False return fnmatch.fnmatch(first, second) or fnmatch.fnmatch(second, first) def repositories_match(first, second): ''' Given two repository dicts with keys "path" (relative and/or absolute), and "label", two repository paths as strings, or a mix of the two formats, return whether they match. Globs are supported. ''' if isinstance(first, str): first = {'path': first, 'label': first} if isinstance(second, str): second = {'path': second, 'label': second} return glob_match(first.get('label'), second.get('label')) or glob_match( normalize_repository_path(first.get('path')), normalize_repository_path(second.get('path')), ) def guard_configuration_contains_repository(repository, configurations): ''' Given a repository path and a dict mapping from config filename to corresponding parsed config dict, ensure that the repository is declared at least once in all of the configurations. If no repository is given, skip this check. Raise ValueError if the repository is not found in any configurations. ''' if not repository: return count = len( tuple( config_repository for config in configurations.values() for config_repository in config['repositories'] if repositories_match(config_repository, repository) ), ) if count == 0: raise ValueError(f'Repository "{repository}" not found in configuration files') borgmatic/borgmatic/execute.py000066400000000000000000000435621510202216200170110ustar00rootroot00000000000000import collections import enum import logging import select import subprocess import textwrap import borgmatic.logger logger = logging.getLogger(__name__) ERROR_OUTPUT_MAX_LINE_COUNT = 25 BORG_ERROR_EXIT_CODE_START = 2 BORG_ERROR_EXIT_CODE_END = 99 class Exit_status(enum.Enum): STILL_RUNNING = 1 SUCCESS = 2 WARNING = 3 ERROR = 4 def interpret_exit_code(command, exit_code, borg_local_path=None, borg_exit_codes=None): ''' Return an Exit_status value (e.g. SUCCESS, ERROR, or WARNING) based on interpreting the given exit code. If a Borg local path is given and matches the process' command, then interpret the exit code based on Borg's documented exit code semantics. And if Borg exit codes are given as a sequence of exit code configuration dicts, then take those configured preferences into account. ''' if exit_code is None: return Exit_status.STILL_RUNNING if exit_code == 0: return Exit_status.SUCCESS if borg_local_path and command[0] == borg_local_path: # First try looking for the exit code in the borg_exit_codes configuration. for entry in borg_exit_codes or (): if entry.get('code') == exit_code: treat_as = entry.get('treat_as') if treat_as == 'error': logger.error( f'Treating exit code {exit_code} as an error, as per configuration', ) return Exit_status.ERROR if treat_as == 'warning': logger.warning( f'Treating exit code {exit_code} as a warning, as per configuration', ) return Exit_status.WARNING # If the exit code doesn't have explicit configuration, then fall back to the default Borg # behavior. return ( Exit_status.ERROR if ( exit_code < 0 or ( exit_code >= BORG_ERROR_EXIT_CODE_START and exit_code <= BORG_ERROR_EXIT_CODE_END ) ) else Exit_status.WARNING ) return Exit_status.ERROR def command_for_process(process): ''' Given a process as an instance of subprocess.Popen, return the command string that was used to invoke it. ''' return process.args if isinstance(process.args, str) else ' '.join(process.args) def output_buffer_for_process(process, exclude_stdouts): ''' Given a process as an instance of subprocess.Popen and a sequence of stdouts to exclude, return either the process's stdout or stderr. The idea is that if stdout is excluded for a process, we still have stderr to log. ''' return process.stderr if process.stdout in exclude_stdouts else process.stdout def append_last_lines(last_lines, captured_output, line, output_log_level): ''' Given a rolling list of last lines, a list of captured output, a line to append, and an output log level, append the line to the last lines and (if necessary) the captured output. Then log the line at the requested output log level. ''' last_lines.append(line) if len(last_lines) > ERROR_OUTPUT_MAX_LINE_COUNT: last_lines.pop(0) if output_log_level is None: captured_output.append(line) else: logger.log(output_log_level, line) def log_outputs(processes, exclude_stdouts, output_log_level, borg_local_path, borg_exit_codes): # noqa: PLR0912 ''' Given a sequence of subprocess.Popen() instances for multiple processes, log the output for each process with the requested log level. Additionally, raise a CalledProcessError if a process exits with an error (or a warning for exit code 1, if that process does not match the Borg local path). If output log level is None, then instead of logging, capture output for each process and return it as a dict from the process to its output. Use the given Borg local path and exit code configuration to decide what's an error and what's a warning. For simplicity, it's assumed that the output buffer for each process is its stdout. But if any stdouts are given to exclude, then for any matching processes, log from their stderr instead. Note that stdout for a process can be None if output is intentionally not captured. In which case it won't be logged. ''' # Map from output buffer to sequence of last lines. buffer_last_lines = collections.defaultdict(list) process_for_output_buffer = { output_buffer_for_process(process, exclude_stdouts): process for process in processes if process.stdout or process.stderr } output_buffers = list(process_for_output_buffer.keys()) captured_outputs = collections.defaultdict(list) still_running = True # Log output for each process until they all exit. while True: # noqa: PLR1702 if output_buffers: (ready_buffers, _, _) = select.select(output_buffers, [], []) for ready_buffer in ready_buffers: ready_process = process_for_output_buffer.get(ready_buffer) # The "ready" process has exited, but it might be a pipe destination with other # processes (pipe sources) waiting to be read from. So as a measure to prevent # hangs, vent all processes when one exits. if ready_process and ready_process.poll() is not None: for other_process in processes: if ( other_process.poll() is None and other_process.stdout and other_process.stdout not in output_buffers ): # Add the process's output to output_buffers to ensure it'll get read. output_buffers.append(other_process.stdout) while True: line = ready_buffer.readline().rstrip().decode() if not line or not ready_process: break # Keep the last few lines of output in case the process errors, and we need the # output for the exception below. append_last_lines( buffer_last_lines[ready_buffer], captured_outputs[ready_process], line, output_log_level, ) if not still_running: break still_running = False for process in processes: exit_code = process.poll() if output_buffers else process.wait() if exit_code is None: still_running = True command = process.args.split(' ') if isinstance(process.args, str) else process.args continue command = process.args.split(' ') if isinstance(process.args, str) else process.args exit_status = interpret_exit_code(command, exit_code, borg_local_path, borg_exit_codes) if exit_status in {Exit_status.ERROR, Exit_status.WARNING}: # If an error occurs, include its output in the raised exception so that we don't # inadvertently hide error output. output_buffer = output_buffer_for_process(process, exclude_stdouts) last_lines = buffer_last_lines[output_buffer] if output_buffer else [] # Collect any straggling output lines that came in since we last gathered output. while output_buffer: # pragma: no cover line = output_buffer.readline().rstrip().decode() if not line: break append_last_lines( last_lines, captured_outputs[process], line, output_log_level, ) if len(last_lines) == ERROR_OUTPUT_MAX_LINE_COUNT: last_lines.insert(0, '...') # Something has gone wrong. So vent each process' output buffer to prevent it from # hanging. And then kill the process. for other_process in processes: if other_process.poll() is None: other_process.stdout.read(0) other_process.kill() if exit_status == Exit_status.ERROR: raise subprocess.CalledProcessError( exit_code, command_for_process(process), '\n'.join(last_lines), ) still_running = False break if captured_outputs: return { process: '\n'.join(output_lines) for process, output_lines in captured_outputs.items() } return None SECRET_COMMAND_FLAG_NAMES = {'--password'} def mask_command_secrets(full_command): ''' Given a command as a sequence, mask secret values for flags like "--password" in preparation for logging. ''' masked_command = [] previous_piece = None for piece in full_command: masked_command.append('***' if previous_piece in SECRET_COMMAND_FLAG_NAMES else piece) previous_piece = piece return tuple(masked_command) MAX_LOGGED_COMMAND_LENGTH = 1000 PREFIXES_OF_ENVIRONMENT_VARIABLES_TO_LOG = ('BORG_', 'PG', 'MARIADB_', 'MYSQL_') def log_command(full_command, input_file=None, output_file=None, environment=None): ''' Log the given command (a sequence of command/argument strings), along with its input/output file paths and extra environment variables (with omitted values in case they contain passwords). ''' logger.debug( textwrap.shorten( ' '.join( tuple( f'{key}=***' for key in (environment or {}) if any( key.startswith(prefix) for prefix in PREFIXES_OF_ENVIRONMENT_VARIABLES_TO_LOG ) ) + mask_command_secrets(full_command), ), width=MAX_LOGGED_COMMAND_LENGTH, placeholder=' ...', ) + (f" < {getattr(input_file, 'name', input_file)}" if input_file else '') + (f" > {getattr(output_file, 'name', output_file)}" if output_file else ''), ) # A sentinel passed as an output file to execute_command() to indicate that the command's output # should be allowed to flow through to stdout without being captured for logging. Useful for # commands with interactive prompts or those that mess directly with the console. DO_NOT_CAPTURE = object() def execute_command( full_command, output_log_level=logging.INFO, output_file=None, input_file=None, shell=False, environment=None, working_directory=None, borg_local_path=None, borg_exit_codes=None, run_to_completion=True, close_fds=False, # Necessary for passing credentials via anonymous pipe. ): ''' Execute the given command (a sequence of command/argument strings) and log its output at the given log level. If an open output file object is given, then write stdout to the file and only log stderr. If an open input file object is given, then read stdin from the file. If shell is True, execute the command within a shell. If an environment variables dict is given, then pass it into the command. If a working directory is given, use that as the present working directory when running the command. If a Borg local path is given, and the command matches it (regardless of arguments), treat exit code 1 as a warning instead of an error. But if Borg exit codes are given as a sequence of exit code configuration dicts, then use that configuration to decide what's an error and what's a warning. If run to completion is False, then return the process for the command without executing it to completion. Raise subprocesses.CalledProcessError if an error occurs while running the command. ''' log_command(full_command, input_file, output_file, environment) do_not_capture = bool(output_file is DO_NOT_CAPTURE) command = ' '.join(full_command) if shell else full_command process = subprocess.Popen( # noqa: S603 command, stdin=input_file, stdout=None if do_not_capture else (output_file or subprocess.PIPE), stderr=None if do_not_capture else (subprocess.PIPE if output_file else subprocess.STDOUT), shell=shell, env=environment, cwd=working_directory, close_fds=close_fds, ) if not run_to_completion: return process with borgmatic.logger.Log_prefix(None): # Log command output without any prefix. log_outputs( (process,), (input_file, output_file), output_log_level, borg_local_path, borg_exit_codes, ) return None def execute_command_and_capture_output( full_command, input_file=None, capture_stderr=False, shell=False, environment=None, working_directory=None, borg_local_path=None, borg_exit_codes=None, close_fds=False, # Necessary for passing credentials via anonymous pipe. ): ''' Execute the given command (a sequence of command/argument strings), capturing and returning its output (stdout). If an input file descriptor is given, then pipe it to the command's stdin. If capture stderr is True, then capture and return stderr in addition to stdout. If shell is True, execute the command within a shell. If an environment variables dict is given, then pass it into the command. If a working directory is given, use that as the present working directory when running the command. If a Borg local path is given, and the command matches it (regardless of arguments), treat exit code 1 as a warning instead of an error. But if Borg exit codes are given as a sequence of exit code configuration dicts, then use that configuration to decide what's an error and what's a warning. Raise subprocesses.CalledProcessError if an error occurs while running the command. ''' log_command(full_command, input_file, environment=environment) command = ' '.join(full_command) if shell else full_command try: output = subprocess.check_output( # noqa: S603 command, stdin=input_file, stderr=subprocess.STDOUT if capture_stderr else None, shell=shell, env=environment, cwd=working_directory, close_fds=close_fds, ) except subprocess.CalledProcessError as error: if ( interpret_exit_code(command, error.returncode, borg_local_path, borg_exit_codes) == Exit_status.ERROR ): raise output = error.output return output.decode() if output is not None else None def execute_command_with_processes( full_command, processes, output_log_level=logging.INFO, output_file=None, input_file=None, shell=False, environment=None, working_directory=None, borg_local_path=None, borg_exit_codes=None, close_fds=False, # Necessary for passing credentials via anonymous pipe. ): ''' Execute the given command (a sequence of command/argument strings) and log its output at the given log level. Simultaneously, continue to poll one or more active processes so that they run as well. This is useful, for instance, for processes that are streaming output to a named pipe that the given command is consuming from. If an open output file object is given, then write stdout to the file and only log stderr. But if output log level is None, instead suppress logging and return the captured output for (only) the given command. If an open input file object is given, then read stdin from the file. If shell is True, execute the command within a shell. If an environment variables dict is given, then pass it into the command. If a working directory is given, use that as the present working directory when running the command. If a Borg local path is given, then for any matching command or process (regardless of arguments), treat exit code 1 as a warning instead of an error. But if Borg exit codes are given as a sequence of exit code configuration dicts, then use that configuration to decide what's an error and what's a warning. Raise subprocesses.CalledProcessError if an error occurs while running the command or in the upstream process. ''' log_command(full_command, input_file, output_file, environment) do_not_capture = bool(output_file is DO_NOT_CAPTURE) command = ' '.join(full_command) if shell else full_command try: command_process = subprocess.Popen( # noqa: S603 command, stdin=input_file, stdout=None if do_not_capture else (output_file or subprocess.PIPE), stderr=( None if do_not_capture else (subprocess.PIPE if output_file else subprocess.STDOUT) ), shell=shell, env=environment, cwd=working_directory, close_fds=close_fds, ) except (subprocess.CalledProcessError, OSError): # Something has gone wrong. So vent each process' output buffer to prevent it from hanging. # And then kill the process. for process in processes: if process.poll() is None: process.stdout.read(0) process.kill() raise with borgmatic.logger.Log_prefix(None): # Log command output without any prefix. captured_outputs = log_outputs( (*processes, command_process), (input_file, output_file), output_log_level, borg_local_path, borg_exit_codes, ) if output_log_level is None: return captured_outputs.get(command_process) return None borgmatic/borgmatic/hooks/000077500000000000000000000000001510202216200161065ustar00rootroot00000000000000borgmatic/borgmatic/hooks/__init__.py000066400000000000000000000000001510202216200202050ustar00rootroot00000000000000borgmatic/borgmatic/hooks/command.py000066400000000000000000000227431510202216200201060ustar00rootroot00000000000000import functools import logging import os import re import shlex import subprocess import sys import borgmatic.execute import borgmatic.logger logger = logging.getLogger(__name__) SOFT_FAIL_EXIT_CODE = 75 BORG_PLACEHOLDERS = { '{hostname}', '{fqdn}', '{reverse-fqdn}', '{now}', '{utcnow}', '{unixtime}', '{user}', '{pid}', '{borgversion}', '{borgmajor}', '{borgminor}', '{borgpatch}', } def interpolate_context(hook_description, command, context): ''' Given a config filename, a hook description, a single hook command, and a dict of context names/values, interpolate the values by "{name}" into the command and return the result. ''' for name, value in context.items(): command = command.replace(f'{{{name}}}', shlex.quote(str(value))) for unsupported_variable in re.findall(r'\{\w+\}', command): # Warn about variables unknown to borgmatic, but don't warn if the variable name happens to # be a Borg placeholder, as Borg should hopefully consume it. if unsupported_variable not in BORG_PLACEHOLDERS: logger.warning( f'Variable "{unsupported_variable}" is not supported in the {hook_description} hook', ) return command def make_environment(current_environment, sys_module=sys): ''' Given the existing system environment as a map from environment variable name to value, return a copy of it, augmented with any extra environment variables that should be used when running command hooks. ''' environment = dict(current_environment) # Detect whether we're running within a PyInstaller bundle. If so, set or clear LD_LIBRARY_PATH # based on the value of LD_LIBRARY_PATH_ORIG. This prevents library version information errors. if getattr(sys_module, 'frozen', False) and hasattr(sys_module, '_MEIPASS'): environment['LD_LIBRARY_PATH'] = environment.get('LD_LIBRARY_PATH_ORIG', '') return environment def filter_hooks(command_hooks, before=None, after=None, action_names=None, state_names=None): ''' Given a sequence of command hook dicts from configuration and one or more filters (before name, after name, a sequence of action names, and/or a sequence of execution result state names), filter down the command hooks to just the ones that match the given filters. ''' return tuple( hook_config for hook_config in command_hooks or () for config_action_names in (hook_config.get('when'),) for config_state_names in (hook_config.get('states'),) if before is None or hook_config.get('before') == before if after is None or hook_config.get('after') == after if action_names is None or config_action_names is None or set(config_action_names or ()).intersection(set(action_names)) if state_names is None or config_state_names is None or set(config_state_names or ()).intersection(set(state_names)) ) def execute_hooks(command_hooks, umask, working_directory, dry_run, **context): # noqa: PLR0912 ''' Given a sequence of command hook dicts from configuration, a umask to execute with (or None), a working directory to execute with, and whether this is a dry run, run the commands for each hook. Or don't run them if this is a dry run. The context contains optional values interpolated by name into the hook commands. Raise ValueError if the umask cannot be parsed or a hook is invalid. Raise subprocesses.CalledProcessError if an error occurs in a hook. ''' borgmatic.logger.add_custom_log_levels() dry_run_label = ' (dry run; not actually running hooks)' if dry_run else '' for hook_config in command_hooks: commands = hook_config.get('run') when_description = ( f"{'/'.join(hook_config.get('when'))} " if hook_config.get('when') else '' ) if 'before' in hook_config: description = f'before {when_description}{hook_config.get("before")}' elif 'after' in hook_config: description = f'after {when_description}{hook_config.get("after")}' else: raise ValueError(f'Invalid hook configuration: {hook_config}') if not commands: logger.debug(f'No commands to run for {description} hook') continue commands = [interpolate_context(description, command, context) for command in commands] if len(commands) == 1: logger.info(f'Running {description} command hook{dry_run_label}') else: logger.info( f'Running {len(commands)} commands for {description} hook{dry_run_label}', ) if umask: parsed_umask = int(str(umask), 8) logger.debug(f'Setting hook umask to {oct(parsed_umask)}') original_umask = os.umask(parsed_umask) else: original_umask = None try: for command in commands: if dry_run: continue borgmatic.execute.execute_command( # noqa: S604 [command], output_log_level=( logging.ERROR if hook_config.get('after') == 'error' else logging.ANSWER ), shell=True, environment=make_environment(os.environ), working_directory=working_directory, ) finally: if original_umask: os.umask(original_umask) class Before_after_hooks: ''' A Python context manager for executing command hooks both before and after the wrapped code. Example use as a context manager: with borgmatic.hooks.command.Before_after_hooks( command_hooks=config.get('commands'), before_after='do_stuff', umask=config.get('umask'), dry_run=dry_run, action_names=['create'], ): do() some() stuff() With that context manager in place, "before" command hooks execute before the wrapped code runs, and "after" command hooks execute after the wrapped code completes. ''' def __init__( self, command_hooks, before_after, umask, working_directory, dry_run, action_names=None, **context, ): ''' Given a sequence of command hook configuration dicts, the before/after name, a umask to run commands with, a working directory to run commands with, a dry run flag, a sequence of action names, and any context for the executed commands, save those data points for use below. ''' self.command_hooks = command_hooks self.before_after = before_after self.umask = umask self.working_directory = working_directory self.dry_run = dry_run self.action_names = action_names self.context = context def __enter__(self): ''' Run the configured "before" command hooks that match the initialized data points. ''' try: execute_hooks( borgmatic.hooks.command.filter_hooks( self.command_hooks, before=self.before_after, action_names=self.action_names, ), self.umask, self.working_directory, self.dry_run, **self.context, ) except (OSError, subprocess.CalledProcessError) as error: if considered_soft_failure(error): raise # Trigger the after hook manually, since raising here will prevent it from being run # otherwise. self.__exit__(exception_type=type(error), exception=error, traceback=None) raise ValueError(f'Error running before {self.before_after} hook: {error}') def __exit__(self, exception_type, exception, traceback): ''' Run the configured "after" command hooks that match the initialized data points. ''' try: execute_hooks( borgmatic.hooks.command.filter_hooks( self.command_hooks, after=self.before_after, action_names=self.action_names, state_names=['fail' if exception_type else 'finish'], ), self.umask, self.working_directory, self.dry_run, **self.context, ) except (OSError, subprocess.CalledProcessError) as error: if considered_soft_failure(error): raise raise ValueError(f'Error running after {self.before_after} hook: {error}') @functools.cache def considered_soft_failure(error): ''' Given a configuration filename and an exception object, return whether the exception object represents a subprocess.CalledProcessError with a return code of SOFT_FAIL_EXIT_CODE. If so, that indicates that the error is a "soft failure", and should not result in an error. The results of this function are cached so that it can be called multiple times without logging multiple times. ''' exit_code = getattr(error, 'returncode', None) if exit_code is None: return False if exit_code == SOFT_FAIL_EXIT_CODE: logger.info( f'Command hook exited with soft failure exit code ({SOFT_FAIL_EXIT_CODE}); skipping remaining repository actions', ) return True return False borgmatic/borgmatic/hooks/credential/000077500000000000000000000000001510202216200202205ustar00rootroot00000000000000borgmatic/borgmatic/hooks/credential/__init__.py000066400000000000000000000000001510202216200223170ustar00rootroot00000000000000borgmatic/borgmatic/hooks/credential/container.py000066400000000000000000000026551510202216200225640ustar00rootroot00000000000000import logging import os import re logger = logging.getLogger(__name__) SECRET_NAME_PATTERN = re.compile(r'^\w+$') DEFAULT_SECRETS_DIRECTORY = '/run/secrets' def load_credential(hook_config, config, credential_parameters): ''' Given the hook configuration dict, the configuration dict, and a credential parameters tuple containing a secret name to load, read the secret from the corresponding container secrets file and return it. Raise ValueError if the credential parameters is not one element, the secret name is invalid, or the secret file cannot be read. ''' try: (secret_name,) = credential_parameters except ValueError: name = ' '.join(credential_parameters) raise ValueError(f'Cannot load invalid secret name: "{name}"') if not SECRET_NAME_PATTERN.match(secret_name): raise ValueError(f'Cannot load invalid secret name: "{secret_name}"') try: with open( os.path.join( config.get('working_directory', ''), (hook_config or {}).get('secrets_directory', DEFAULT_SECRETS_DIRECTORY), secret_name, ), encoding='utf-8', ) as secret_file: return secret_file.read().rstrip(os.linesep) except (FileNotFoundError, OSError) as error: logger.warning(error) raise ValueError(f'Cannot load secret "{secret_name}" from file: {error.filename}') borgmatic/borgmatic/hooks/credential/file.py000066400000000000000000000021171510202216200215120ustar00rootroot00000000000000import logging import os logger = logging.getLogger(__name__) def load_credential(hook_config, config, credential_parameters): ''' Given the hook configuration dict, the configuration dict, and a credential parameters tuple containing a credential path to load, load the credential from file and return it. Raise ValueError if the credential parameters is not one element or the secret file cannot be read. ''' try: (credential_path,) = credential_parameters except ValueError: name = ' '.join(credential_parameters) raise ValueError(f'Cannot load invalid credential: "{name}"') expanded_credential_path = os.path.expanduser(credential_path) try: with open( os.path.join(config.get('working_directory', ''), expanded_credential_path), encoding='utf-8', ) as credential_file: return credential_file.read().rstrip(os.linesep) except (FileNotFoundError, OSError) as error: logger.warning(error) raise ValueError(f'Cannot load credential file: {error.filename}') borgmatic/borgmatic/hooks/credential/keepassxc.py000066400000000000000000000031111510202216200225540ustar00rootroot00000000000000import logging import os import shlex import borgmatic.execute logger = logging.getLogger(__name__) def load_credential(hook_config, config, credential_parameters): ''' Given the hook configuration dict, the configuration dict, and a credential parameters tuple containing a KeePassXC database path and an attribute name to load, run keepassxc-cli to fetch the corresponding KeePassXC credential and return it. Raise ValueError if keepassxc-cli can't retrieve the credential. ''' try: (database_path, attribute_name) = credential_parameters except ValueError: raise ValueError(f'Invalid KeePassXC credential parameters: {credential_parameters}') expanded_database_path = os.path.expanduser(database_path) if not os.path.exists(expanded_database_path): raise ValueError(f'KeePassXC database path does not exist: {database_path}') # Build the keepassxc-cli command. command = ( tuple(shlex.split((hook_config or {}).get('keepassxc_cli_command', 'keepassxc-cli'))) + ('show', '--show-protected', '--attributes', 'Password') + ( ('--key-file', hook_config['key_file']) if hook_config and hook_config.get('key_file') else () ) + ( ('--yubikey', hook_config['yubikey']) if hook_config and hook_config.get('yubikey') else () ) + (expanded_database_path, attribute_name) # Ensure database and entry are last. ) return borgmatic.execute.execute_command_and_capture_output(command).rstrip(os.linesep) borgmatic/borgmatic/hooks/credential/parse.py000066400000000000000000000101401510202216200217000ustar00rootroot00000000000000import functools import re import shlex import borgmatic.hooks.dispatch IS_A_HOOK = False class Hash_adapter: ''' A Hash_adapter instance wraps an unhashable object and pretends it's hashable. This is intended for passing to a @functools.cache-decorated function to prevent it from complaining that an argument is unhashable. It should only be used for arguments that you don't want to actually impact the cache hashing, because Hash_adapter doesn't actually hash the object's contents. Example usage: @functools.cache def func(a, b): print(a, b.actual_value) return a func(5, Hash_adapter({1: 2, 3: 4})) # Calls func(), prints, and returns. func(5, Hash_adapter({1: 2, 3: 4})) # Hits the cache and just returns the value. func(5, Hash_adapter({5: 6, 7: 8})) # Also uses cache, since the Hash_adapter is ignored. In the above function, the "b" value is one that has been wrapped with Hash_adappter, and therefore "b.actual_value" is necessary to access the original value. ''' def __init__(self, actual_value): self.actual_value = actual_value def __eq__(self, other): return True def __hash__(self): return 0 UNHASHABLE_TYPES = (dict, list, set) def cache_ignoring_unhashable_arguments(function): ''' A function decorator that caches calls to the decorated function but ignores any unhashable arguments when performing cache lookups. This is intended to be a drop-in replacement for functools.cache. Example usage: @cache_ignoring_unhashable_arguments def func(a, b): print(a, b) return a func(5, {1: 2, 3: 4}) # Calls func(), prints, and returns. func(5, {1: 2, 3: 4}) # Hits the cache and just returns the value. func(5, {5: 6, 7: 8}) # Also uses cache, since the unhashable value (the dict) is ignored. ''' @functools.cache def cached_function(*args, **kwargs): return function( *(arg.actual_value if isinstance(arg, Hash_adapter) else arg for arg in args), **{ key: value.actual_value if isinstance(value, Hash_adapter) else value for (key, value) in kwargs.items() }, ) @functools.wraps(function) def wrapper_function(*args, **kwargs): return cached_function( *(Hash_adapter(arg) if isinstance(arg, UNHASHABLE_TYPES) else arg for arg in args), **{ key: Hash_adapter(value) if isinstance(value, UNHASHABLE_TYPES) else value for (key, value) in kwargs.items() }, ) wrapper_function.cache_clear = cached_function.cache_clear return wrapper_function CREDENTIAL_PATTERN = re.compile(r'\{credential( +(?P.*))?\}') @cache_ignoring_unhashable_arguments def resolve_credential(value, config): ''' Given a configuration value containing a string like "{credential hookname credentialname}" and a configuration dict, resolve the credential by calling the relevant hook to get the actual credential value. If the given value does not actually contain a credential tag, then return it unchanged. Cache the value (ignoring the config for purposes of caching), so repeated calls to this function don't need to load the credential repeatedly. Raise ValueError if the config could not be parsed or the credential could not be loaded. ''' if value is None: return value matcher = CREDENTIAL_PATTERN.match(value) if not matcher: return value hook_and_parameters = matcher.group('hook_and_parameters') if not hook_and_parameters: raise ValueError(f'Cannot load credential with invalid syntax "{value}"') (hook_name, *credential_parameters) = shlex.split(hook_and_parameters) if not credential_parameters: raise ValueError(f'Cannot load credential with invalid syntax "{value}"') return borgmatic.hooks.dispatch.call_hook( 'load_credential', config, hook_name, tuple(credential_parameters), ) borgmatic/borgmatic/hooks/credential/systemd.py000066400000000000000000000041251510202216200222640ustar00rootroot00000000000000import logging import os import re import shlex import borgmatic.execute logger = logging.getLogger(__name__) CREDENTIAL_NAME_PATTERN = re.compile(r'^[\w.-]+$') def load_credential(hook_config, config, credential_parameters): ''' Given the hook configuration dict, the configuration dict, and a credential parameters tuple containing a credential name to load, read the credential from the corresponding systemd credential file and return it. Raise ValueError if the systemd CREDENTIALS_DIRECTORY environment variable is not set, the credential name is invalid, or the credential file cannot be read. ''' try: (credential_name,) = credential_parameters except ValueError: name = ' '.join(credential_parameters) raise ValueError(f'Cannot load invalid credential name: "{name}"') if not CREDENTIAL_NAME_PATTERN.match(credential_name): raise ValueError(f'Cannot load invalid credential name "{credential_name}"') credentials_directory = os.environ.get('CREDENTIALS_DIRECTORY') if not credentials_directory: logger.debug( f'Falling back to loading credential "{credential_name}" via systemd-creds because the systemd CREDENTIALS_DIRECTORY environment variable is not set' ) command = ( *shlex.split((hook_config or {}).get('systemd_creds_command', 'systemd-creds')), 'decrypt', os.path.join( (hook_config or {}).get( 'encrypted_credentials_directory', '/etc/credstore.encrypted' ), credential_name, ), ) return borgmatic.execute.execute_command_and_capture_output(command).rstrip(os.linesep) try: with open( os.path.join(credentials_directory, credential_name), encoding='utf-8' ) as credential_file: return credential_file.read().rstrip(os.linesep) except (FileNotFoundError, OSError) as error: logger.warning(error) raise ValueError(f'Cannot load credential "{credential_name}" from file: {error.filename}') borgmatic/borgmatic/hooks/data_source/000077500000000000000000000000001510202216200203775ustar00rootroot00000000000000borgmatic/borgmatic/hooks/data_source/__init__.py000066400000000000000000000000001510202216200224760ustar00rootroot00000000000000borgmatic/borgmatic/hooks/data_source/bootstrap.py000066400000000000000000000100401510202216200227610ustar00rootroot00000000000000import contextlib import glob import importlib import json import logging import os import borgmatic.borg.pattern import borgmatic.config.paths import borgmatic.hooks.data_source.config logger = logging.getLogger(__name__) def use_streaming(hook_config, config): # pragma: no cover ''' Return whether dump streaming is used for this hook. (Spoiler: It isn't.) ''' return False def dump_data_sources( hook_config, config, config_paths, borgmatic_runtime_directory, patterns, dry_run, ): ''' Given a bootstrap configuration dict, a configuration dict, the borgmatic configuration file paths, the borgmatic runtime directory, the configured patterns, and whether this is a dry run, create a borgmatic manifest file to store the paths of the configuration files used to create the archive. But skip this if the bootstrap store_config_files option is False or if this is a dry run. Return an empty sequence, since there are no ongoing dump processes from this hook. ''' if hook_config and hook_config.get('store_config_files') is False: return [] borgmatic_manifest_path = os.path.join( borgmatic_runtime_directory, 'bootstrap', 'manifest.json', ) if dry_run: return [] os.makedirs(os.path.dirname(borgmatic_manifest_path), exist_ok=True) with open(borgmatic_manifest_path, 'w', encoding='utf-8') as manifest_file: json.dump( { 'borgmatic_version': importlib.metadata.version('borgmatic'), 'config_paths': config_paths, }, manifest_file, ) borgmatic.hooks.data_source.config.inject_pattern( patterns, borgmatic.borg.pattern.Pattern( os.path.join(borgmatic_runtime_directory, 'bootstrap'), source=borgmatic.borg.pattern.Pattern_source.HOOK, ), ) for config_path in config_paths: borgmatic.hooks.data_source.config.inject_pattern( patterns, borgmatic.borg.pattern.Pattern( config_path, source=borgmatic.borg.pattern.Pattern_source.HOOK, ), ) return [] def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, patterns, dry_run): ''' Given a bootstrap configuration dict, a configuration dict, the borgmatic runtime directory, the configured patterns, and whether this is a dry run, then remove the manifest file created above. If this is a dry run, then don't actually remove anything. ''' dry_run_label = ' (dry run; not actually removing anything)' if dry_run else '' manifest_glob = os.path.join( borgmatic.config.paths.replace_temporary_subdirectory_with_glob( os.path.normpath(borgmatic_runtime_directory), ), 'bootstrap', ) logger.debug( f'Looking for bootstrap manifest files to remove in {manifest_glob}{dry_run_label}', ) for manifest_directory in glob.glob(manifest_glob): manifest_file_path = os.path.join(manifest_directory, 'manifest.json') logger.debug(f'Removing bootstrap manifest at {manifest_file_path}{dry_run_label}') if dry_run: continue with contextlib.suppress(FileNotFoundError): os.remove(manifest_file_path) with contextlib.suppress(FileNotFoundError): os.rmdir(manifest_directory) def make_data_source_dump_patterns( hook_config, config, borgmatic_runtime_directory, name=None, ): # pragma: no cover ''' Restores are implemented via the separate, purpose-specific "bootstrap" action rather than the generic "restore". ''' return () def restore_data_source_dump( hook_config, config, data_source, dry_run, extract_process, connection_params, borgmatic_runtime_directory, ): # pragma: no cover ''' Restores are implemented via the separate, purpose-specific "bootstrap" action rather than the generic "restore". ''' raise NotImplementedError() borgmatic/borgmatic/hooks/data_source/btrfs.py000066400000000000000000000364041510202216200221000ustar00rootroot00000000000000import collections import functools import glob import logging import os import pathlib import shutil import subprocess import borgmatic.borg.pattern import borgmatic.config.paths import borgmatic.execute import borgmatic.hooks.data_source.config import borgmatic.hooks.data_source.snapshot logger = logging.getLogger(__name__) def use_streaming(hook_config, config): # pragma: no cover ''' Return whether dump streaming is used for this hook. (Spoiler: It isn't.) ''' return False @functools.cache def path_is_a_subvolume(btrfs_command, path): ''' Given a btrfs command and a path, return whether the path is a Btrfs subvolume. Return False if the btrfs command errors, which probably indicates there isn't a containing Btrfs subvolume for the given path. As a performance optimization, multiple calls to this function with the same arguments are cached. ''' try: borgmatic.execute.execute_command( ( *btrfs_command.split(' '), 'subvolume', 'show', path, ), output_log_level=None, close_fds=True, ) # An error from the command (probably) indicates that the path is not actually a subvolume. except subprocess.CalledProcessError: return False return True @functools.cache def get_subvolume_property(btrfs_command, subvolume_path, property_name): ''' Given a btrfs command, a subvolume path, and a property name to lookup, return the value of the corresponding property. Raise subprocess.CalledProcessError if the btrfs command errors. As a performance optimization, multiple calls to this function with the same arguments are cached. ''' output = borgmatic.execute.execute_command_and_capture_output( ( *btrfs_command.split(' '), 'property', 'get', '-t', # Type. 'subvol', subvolume_path, property_name, ), close_fds=True, ) try: value = output.strip().split('=')[1] except IndexError: raise ValueError(f'Invalid {btrfs_command} property output') return { 'true': True, 'false': False, }.get(value, value) def get_containing_subvolume_path(btrfs_command, path): ''' Given a btrfs command and a path, return the subvolume path that contains the given path (or is the same as the path). If there is no such subvolume path or the containing subvolume is read-only, return None. ''' # Probe the given pattern's path and all of its parents, grandparents, etc. to try to find a # Btrfs subvolume. for candidate_path in ( path, *tuple(str(ancestor) for ancestor in pathlib.PurePath(path).parents), ): if not path_is_a_subvolume(btrfs_command, candidate_path): continue try: if get_subvolume_property(btrfs_command, candidate_path, 'ro'): logger.debug(f'Ignoring Btrfs subvolume {candidate_path} because it is read-only') return None logger.debug(f'Path {candidate_path} is a Btrfs subvolume') return candidate_path except subprocess.CalledProcessError as error: logger.debug( f'Error determining read-only status of Btrfs subvolume {candidate_path}: {error}', ) return None return None def get_all_subvolume_paths(btrfs_command, patterns): ''' Given a btrfs command and a sequence of patterns, get the sorted paths for all Btrfs subvolumes containing those patterns. ''' return tuple( sorted( { subvolume_path for pattern in patterns if pattern.type == borgmatic.borg.pattern.Pattern_type.ROOT if pattern.source == borgmatic.borg.pattern.Pattern_source.CONFIG for subvolume_path in (get_containing_subvolume_path(btrfs_command, pattern.path),) if subvolume_path } ), ) Subvolume = collections.namedtuple('Subvolume', ('path', 'contained_patterns'), defaults=((),)) def get_subvolumes(btrfs_command, patterns): ''' Given a Btrfs command to run and a sequence of configured patterns, find the intersection between the current Btrfs filesystem and subvolume paths and the paths of any patterns. The idea is that these pattern paths represent the requested subvolumes to snapshot. Only include subvolumes that contain at least one root pattern sourced from borgmatic configuration (as opposed to generated elsewhere in borgmatic). Return the result as a sequence of matching Subvolume instances. ''' candidate_patterns = set(patterns or ()) subvolumes = [] # For each subvolume path, match it against the given patterns to find the subvolumes to # backup. Sort the subvolumes from longest to shortest mount points, so longer subvolumes get # a whack at the candidate pattern piñata before their parents do. (Patterns are consumed during # this process, so no two subvolumes end up with the same contained patterns.) for subvolume_path in reversed(get_all_subvolume_paths(btrfs_command, patterns)): subvolumes.extend( Subvolume(subvolume_path, contained_patterns) for contained_patterns in ( borgmatic.hooks.data_source.snapshot.get_contained_patterns( subvolume_path, candidate_patterns, ), ) if any( pattern.type == borgmatic.borg.pattern.Pattern_type.ROOT and pattern.source == borgmatic.borg.pattern.Pattern_source.CONFIG for pattern in contained_patterns ) ) return tuple(sorted(subvolumes, key=lambda subvolume: subvolume.path)) BORGMATIC_SNAPSHOT_PREFIX = '.borgmatic-snapshot-' def make_snapshot_path(subvolume_path): ''' Given the path to a subvolume, make a corresponding snapshot path for it. ''' return os.path.join( subvolume_path, f'{BORGMATIC_SNAPSHOT_PREFIX}{os.getpid()}', # Included so that the snapshot ends up in the Borg archive at the "original" subvolume path. ) + subvolume_path.rstrip(os.path.sep) def make_snapshot_exclude_pattern(subvolume_path): # pragma: no cover ''' Given the path to a subvolume, make a corresponding exclude pattern for its embedded snapshot path. This is to work around a quirk of Btrfs: If you make a snapshot path as a child directory of a subvolume, then the snapshot's own initial directory component shows up as an empty directory within the snapshot itself. For instance, if you have a Btrfs subvolume at /mnt and make a snapshot of it at: /mnt/.borgmatic-snapshot-1234/mnt ... then the snapshot itself will have an empty directory at: /mnt/.borgmatic-snapshot-1234/mnt/.borgmatic-snapshot-1234 So to prevent that from ending up in the Borg archive, this function produces an exclude pattern to exclude that path. ''' snapshot_directory = f'{BORGMATIC_SNAPSHOT_PREFIX}{os.getpid()}' return borgmatic.borg.pattern.Pattern( os.path.join( subvolume_path, snapshot_directory, subvolume_path.lstrip(os.path.sep), snapshot_directory, ), borgmatic.borg.pattern.Pattern_type.NO_RECURSE, borgmatic.borg.pattern.Pattern_style.FNMATCH, source=borgmatic.borg.pattern.Pattern_source.HOOK, ) def make_borg_snapshot_pattern(subvolume_path, pattern): ''' Given the path to a subvolume and a pattern as a borgmatic.borg.pattern.Pattern instance whose path is inside the subvolume, return a new Pattern with its path rewritten to be in a snapshot path intended for giving to Borg. Move any initial caret in a regular expression pattern path to the beginning, so as not to break the regular expression. ''' initial_caret = ( '^' if pattern.style == borgmatic.borg.pattern.Pattern_style.REGULAR_EXPRESSION and pattern.path.startswith('^') else '' ) rewritten_path = initial_caret + os.path.join( subvolume_path, f'{BORGMATIC_SNAPSHOT_PREFIX}{os.getpid()}', # Use the Borg 1.4+ "slashdot" hack to prevent the snapshot path prefix from getting # included in the archive—but only if there's not already a slashdot hack present in the # pattern. ('' if f'{os.path.sep}.{os.path.sep}' in pattern.path else '.'), # Included so that the source directory ends up in the Borg archive at its "original" path. pattern.path.lstrip('^').lstrip(os.path.sep), ) return borgmatic.borg.pattern.Pattern( rewritten_path, pattern.type, pattern.style, pattern.device, source=borgmatic.borg.pattern.Pattern_source.HOOK, ) def snapshot_subvolume(btrfs_command, subvolume_path, snapshot_path): # pragma: no cover ''' Given a Btrfs command to run, the path to a subvolume, and the path for a snapshot, create a new Btrfs snapshot of the subvolume. ''' os.makedirs(os.path.dirname(snapshot_path), mode=0o700, exist_ok=True) borgmatic.execute.execute_command( ( *btrfs_command.split(' '), 'subvolume', 'snapshot', '-r', # Read-only. subvolume_path, snapshot_path, ), output_log_level=logging.DEBUG, close_fds=True, ) def dump_data_sources( hook_config, config, config_paths, borgmatic_runtime_directory, patterns, dry_run, ): ''' Given a Btrfs configuration dict, a configuration dict, the borgmatic configuration file paths, the borgmatic runtime directory, the configured patterns, and whether this is a dry run, auto-detect and snapshot any Btrfs subvolume paths listed in the given patterns. Also update those patterns, replacing subvolume paths with corresponding snapshot directories so they get stored in the Borg archive instead. Return an empty sequence, since there are no ongoing dump processes from this hook. If this is a dry run, then don't actually snapshot anything. ''' dry_run_label = ' (dry run; not actually snapshotting anything)' if dry_run else '' logger.info(f'Snapshotting Btrfs subvolumes{dry_run_label}') if 'findmnt_command' in hook_config: logger.warning( 'The Btrfs "findmnt_command" option is deprecated and will be removed from a future release; findmnt is no longer used', ) # Based on the configured patterns, determine Btrfs subvolumes to backup. Only consider those # patterns that came from actual user configuration (as opposed to, say, other hooks). btrfs_command = hook_config.get('btrfs_command', 'btrfs') subvolumes = get_subvolumes(btrfs_command, patterns) if not subvolumes: logger.warning(f'No Btrfs subvolumes found to snapshot{dry_run_label}') # Snapshot each subvolume, rewriting patterns to use their snapshot paths. for subvolume in subvolumes: logger.debug(f'Creating Btrfs snapshot for {subvolume.path} subvolume') snapshot_path = make_snapshot_path(subvolume.path) if dry_run: continue snapshot_subvolume(btrfs_command, subvolume.path, snapshot_path) last_contained_pattern_index = borgmatic.hooks.data_source.config.get_last_pattern_index( patterns, subvolume.contained_patterns ) for pattern in subvolume.contained_patterns: snapshot_pattern = make_borg_snapshot_pattern(subvolume.path, pattern) borgmatic.hooks.data_source.config.replace_pattern( patterns, pattern, snapshot_pattern, last_contained_pattern_index ) borgmatic.hooks.data_source.config.inject_pattern( patterns, make_snapshot_exclude_pattern(subvolume.path) ) return [] def delete_snapshot(btrfs_command, snapshot_path): # pragma: no cover ''' Given a Btrfs command to run and the name of a snapshot path, delete it. ''' borgmatic.execute.execute_command( ( *btrfs_command.split(' '), 'subvolume', 'delete', snapshot_path, ), output_log_level=logging.DEBUG, close_fds=True, ) def remove_data_source_dumps(hook_config, config, borgmatic_runtime_directory, patterns, dry_run): ''' Given a Btrfs configuration dict, a configuration dict, the borgmatic runtime directory, the configured patterns, and whether this is a dry run, delete any Btrfs snapshots created by borgmatic. If this is a dry run or Btrfs isn't configured in borgmatic's configuration, then don't actually remove anything. ''' if hook_config is None: return dry_run_label = ' (dry run; not actually removing anything)' if dry_run else '' btrfs_command = hook_config.get('btrfs_command', 'btrfs') try: all_subvolumes = get_subvolumes(btrfs_command, patterns) except FileNotFoundError as error: logger.debug(f'Could not find "{error.filename}" command') return except subprocess.CalledProcessError as error: logger.debug(error) return # Reversing the sorted subvolumes ensures that we remove longer paths of child subvolumes before # the shorter paths of parent subvolumes. for subvolume in reversed(all_subvolumes): subvolume_snapshots_glob = borgmatic.config.paths.replace_temporary_subdirectory_with_glob( os.path.normpath(make_snapshot_path(subvolume.path)), temporary_directory_prefix=BORGMATIC_SNAPSHOT_PREFIX, ) logger.debug( f'Looking for snapshots to remove in {subvolume_snapshots_glob}{dry_run_label}', ) for snapshot_path in glob.glob(subvolume_snapshots_glob): if not os.path.isdir(snapshot_path): continue logger.debug(f'Deleting Btrfs snapshot {snapshot_path}{dry_run_label}') if dry_run: continue try: delete_snapshot(btrfs_command, snapshot_path) except FileNotFoundError: logger.debug(f'Could not find "{btrfs_command}" command') return except subprocess.CalledProcessError as error: logger.debug(error) return # Remove the snapshot parent directory if it still exists. (It might not exist if the # snapshot was for "/".) snapshot_parent_dir = snapshot_path.rsplit(subvolume.path, 1)[0] if os.path.isdir(snapshot_parent_dir): shutil.rmtree(snapshot_parent_dir) def make_data_source_dump_patterns( hook_config, config, borgmatic_runtime_directory, name=None, ): # pragma: no cover ''' Restores aren't implemented, because stored files can be extracted directly with "extract". ''' return () def restore_data_source_dump( hook_config, config, data_source, dry_run, extract_process, connection_params, borgmatic_runtime_directory, ): # pragma: no cover ''' Restores aren't implemented, because stored files can be extracted directly with "extract". ''' raise NotImplementedError() borgmatic/borgmatic/hooks/data_source/config.py000066400000000000000000000172221510202216200222220ustar00rootroot00000000000000import contextlib import json import logging import shutil import subprocess import borgmatic.borg.pattern from borgmatic.execute import execute_command_and_capture_output IS_A_HOOK = False logger = logging.getLogger(__name__) def resolve_database_option(option, data_source, connection_params=None, restore=False): ''' Resolves a database option from the given data source configuration dict and connection parameters dict. If restore is set to True it will consider the `restore_