././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1705763123.7210603 ostree-push-1.2.0/0000775000175100017510000000000014552760464012137 5ustar00dandan././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1705762176.0 ostree-push-1.2.0/.flake80000664000175100017510000000035014552756600013305 0ustar00dandan[flake8] # Check python scripts without .py suffix filename = *.py, ./scripts/ostree-receive-shell, ./tests/dumpenv, ./tests/ostree-push, ./tests/ostree-receive # Exclude virtualenv. extend-exclude = .venv,venv ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1670088793.0 ostree-push-1.2.0/COPYING0000664000175100017510000004325414342704131013164 0ustar00dandan GNU GENERAL PUBLIC LICENSE Version 2, June 1991 Copyright (C) 1989, 1991 Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things. To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it. For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software. Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations. Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all. The precise terms and conditions for copying, distribution and modification follow. GNU GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you". Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does. 1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change. b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License. c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program. In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following: a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.) The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code. 4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it. 6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License. 7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation. 10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. Also add information on how to contact you by electronic and paper mail. If the program is interactive, make it output a short notice like this when it starts in an interactive mode: Gnomovision version 69, Copyright (C) year name of author Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program. You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the program `Gnomovision' (which makes passes at compilers) written by James Hacker. , 1 April 1989 Ty Coon, President of Vice This General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1670088793.0 ostree-push-1.2.0/MANIFEST.in0000664000175100017510000000036214342704131013660 0ustar00dandaninclude NEWS include TODO include ostree-receive.conf include .flake8 include pyproject.toml include pytest.ini include tox.ini include tests/*.py include tests/dumpenv include tests/ostree-push include tests/ostree-receive graft tests/data ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1705762883.0 ostree-push-1.2.0/NEWS0000664000175100017510000000414614552760103012631 0ustar00dandan# 1.2.0 (2024-01-20) * ostree-push now waits for the SSH master process to exit and kills it if necessary. This should help in situations where the process is slow to exit and its temporary socket is deleted before that happens. Thanks to Emanuele Aina for the fix. * ostree-receive has gained support for pre-0.1.1 ostree-push clients. This allows deployments using the old protocol to upgrade ostree-push on clients and servers independently. The ostree-receive entry point on the server will now detect if the client is attempting to push using the old or new processes and dispatch accordingly. Moving forward, ostree-push will attempt to push to a versioned ostree-receive on the server to ensure it's using a compatible version. The old protocol support will be removed in a later version after deployments have been given time to migrate to the new tunneled pull process. Thanks to Emanuele Aina for diagnosing the problem and implementing a compatibility scheme. See #11 for details. # 1.1.0 (2022-12-02) ostree-receive now supports optional per-repository configuration. This is useful if you have repositories that require different settings such as the key IDs to sign commits with. # 1.0.1 (2022-10-27) ostree-receive now supports ostree ed25519 signing and verification. See the example ostree-receive.conf for details on the configuration. Thanks to Ryan Gonzalez for the contribution! # 1.0.0 (2022-06-30) ostree-receive now supports GPG verification of received commits. It now supports all the features I need from it, so let's call it 1.0.0! # 0.1.1 (2022-04-06) * An alternate repository root can be configured for ostree-receive. * ostree-receive now supports GPG signing of commits and repo metadata. * A repo update hook can now be configured to customize the post-commit process in ostree-receive. # 0.1.0 (2022-03-04) ostree-receive now reads configuration files for options. See the example ostree-receive.conf for details on the supported options. # 0.0.1 (2021-05-27) This is a rewrite of ostree-push and ostree-receive roughly as we've been using it in Endless for several years. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1705763123.7210603 ostree-push-1.2.0/PKG-INFO0000644000175100017510000001363514552760464013242 0ustar00dandanMetadata-Version: 2.1 Name: ostree-push Version: 1.2.0 Summary: Push and receive OSTree commits Home-page: https://github.com/dbnicholson/ostree-push Author: Dan Nicholson Author-email: dbn@endlessos.org Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Classifier: Development Status :: 5 - Production/Stable Classifier: License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+) Classifier: Operating System :: POSIX Classifier: Topic :: Software Development :: Build Tools Classifier: Topic :: System :: Archiving :: Mirroring Classifier: Topic :: System :: Archiving :: Packaging Classifier: Topic :: System :: Software Distribution Requires-Python: >=3.7 Description-Content-Type: text/markdown License-File: COPYING Requires-Dist: PyGObject Requires-Dist: PyYAML # ostree-push ## Background `ostree-push` uses `ssh` to push commits from a local OSTree repo to a remote OSTree repo. This is to fill a gap where currently you can only pull commits in core ostree. To publish commits to a remote repository, you either have to `pull` from the local repo to the remote repo or use an out of band mechanism like `rsync`. Both approaches have significant limitations. To pull over the network, only http is supported. So, in addition to having to login on the remote machine and run `ostree pull`, the local repository needs to be served over http. This means your build machine needs to be an http server with appropriate configuration in addition to simply making commits. This pushes the builds to be done on the public repository server, which prevents reasonable separation of duties and makes multiarch repositories impossible. Using `rsync` for publishing has some major benefits since only updated objects are published. However, it has no concept of the OSTree object store or refs structures. There are a few problems deriving from this issue. First, objects are published in sort order, but this means that objects can be published before their children. In the most extreme case, a commit object could be published before it's complete. The remote repo would assume this commit object was valid even though some children might be missing. Second, the refs might get updated before the commit objects are in place. If a client pulls while `rsync` is publishing, it may attempt to pull an incomplete or entirely missing commit. Finally, `rsync` will push the objects directly into the store rather than using a staging directory like `pull` or `commit` do. If `rsync` is interrupted, it could leave partial objects in the store. `ostree-push` tries to offer functionality like `git` where commits can be pushed over `ssh` to avoid these issues. ## Operation When `ostree-push` is started, it first starts a local HTTP server providing the contents of the local ostree repo. It then connects to the remote host with `ssh` and tunnels the HTTP server port through the SSH connection. Finally, it runs `ostree-receive` on the remote host with the URL of the tunneled HTTP server. `ostree-receive` then creates a temporary remote using this URL and pulls the desired refs from it. In essence, `ostree-push` and `ostree-receive` coordinate to pull from the local repo to a remote repo while avoiding the limitations described above. Namely, no HTTP server needs to be running and no port needs to be exposed on the local host. Both resources are created temporarily and only exposed to the remote host through the secure SSH connection. ## Installation Use `pip` to install the `otpush` package and the `ostree-push` and `ostree-receive` scripts. From a git checkout, run: ``` pip install . ``` If `ostree-receive` is not in a default `PATH` location, it may not be located when run in the environment spawned by the SSH server. As a workaround, make a symbolic link in a standard location: ``` sudo ln -s /path/to/ostree-receive /usr/bin/ostree-receive ``` In order to restrict SSH usage to only running `ostree-receive`, the `ostree-receive-shell` script can be used as a login shell. This way someone with SSH access to the remote machine cannot run arbitrary commands as the user owning the repositories. To use it, set the login shell of the repo owner to `ostree-receive-shell`: ``` sudo chsh -s /path/to/ostree-receive-shell ``` `ostree-receive-shell` will also append the directory it's installed in to `PATH` to allow `ostree-receive` to be found in non-standard locations. In that scenario, the symbolic link to `ostree-receive` described above is not needed. Both `ostree-push` and `ostree-receive` require the OSTree GObject Introspection bindings. Typically these would be installed from the host distro. On Debian systems the package is `gir1.2-ostree-1.0` while on RedHat systems they are in the `ostree-libs` package. `ostree-push` relies on the connection sharing and port forwarding features of OpenSSH and is unlikely to work with another SSH client. Similarly, `ostree-receive` has only be tested with the OpenSSH server, but it might work correctly with other SSH servers. ## Configuration `ostree-receive` can be configured from YAML formatted files. It will load `~/.config/ostree/ostree-receive.conf` and `/etc/ostree/ostree-receive.conf` or a file specified in the `OSTREE_RECEIVE_CONF` environment variable. See the example [`ostree-receive.conf`](ostree-receive.conf) file for available options. ## Testing A test suite is provided using [pytest][pytest]. Most of the time simply running `pytest` from a git checkout will run it correctly. [tox][tox] can also be used to automate running the test suite in a prepared Python environment. In addition to the `ostree-push` dependencies, many of the tests depend on using OpenSSH `sshd` locally. On both Debian and RedHat systems this is available in the `openssh-server` package. [pytest]: https://docs.pytest.org/en/stable/ [tox]: https://tox.readthedocs.io/en/stable/ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1670088793.0 ostree-push-1.2.0/README.md0000664000175100017510000001176714342704131013414 0ustar00dandan# ostree-push ## Background `ostree-push` uses `ssh` to push commits from a local OSTree repo to a remote OSTree repo. This is to fill a gap where currently you can only pull commits in core ostree. To publish commits to a remote repository, you either have to `pull` from the local repo to the remote repo or use an out of band mechanism like `rsync`. Both approaches have significant limitations. To pull over the network, only http is supported. So, in addition to having to login on the remote machine and run `ostree pull`, the local repository needs to be served over http. This means your build machine needs to be an http server with appropriate configuration in addition to simply making commits. This pushes the builds to be done on the public repository server, which prevents reasonable separation of duties and makes multiarch repositories impossible. Using `rsync` for publishing has some major benefits since only updated objects are published. However, it has no concept of the OSTree object store or refs structures. There are a few problems deriving from this issue. First, objects are published in sort order, but this means that objects can be published before their children. In the most extreme case, a commit object could be published before it's complete. The remote repo would assume this commit object was valid even though some children might be missing. Second, the refs might get updated before the commit objects are in place. If a client pulls while `rsync` is publishing, it may attempt to pull an incomplete or entirely missing commit. Finally, `rsync` will push the objects directly into the store rather than using a staging directory like `pull` or `commit` do. If `rsync` is interrupted, it could leave partial objects in the store. `ostree-push` tries to offer functionality like `git` where commits can be pushed over `ssh` to avoid these issues. ## Operation When `ostree-push` is started, it first starts a local HTTP server providing the contents of the local ostree repo. It then connects to the remote host with `ssh` and tunnels the HTTP server port through the SSH connection. Finally, it runs `ostree-receive` on the remote host with the URL of the tunneled HTTP server. `ostree-receive` then creates a temporary remote using this URL and pulls the desired refs from it. In essence, `ostree-push` and `ostree-receive` coordinate to pull from the local repo to a remote repo while avoiding the limitations described above. Namely, no HTTP server needs to be running and no port needs to be exposed on the local host. Both resources are created temporarily and only exposed to the remote host through the secure SSH connection. ## Installation Use `pip` to install the `otpush` package and the `ostree-push` and `ostree-receive` scripts. From a git checkout, run: ``` pip install . ``` If `ostree-receive` is not in a default `PATH` location, it may not be located when run in the environment spawned by the SSH server. As a workaround, make a symbolic link in a standard location: ``` sudo ln -s /path/to/ostree-receive /usr/bin/ostree-receive ``` In order to restrict SSH usage to only running `ostree-receive`, the `ostree-receive-shell` script can be used as a login shell. This way someone with SSH access to the remote machine cannot run arbitrary commands as the user owning the repositories. To use it, set the login shell of the repo owner to `ostree-receive-shell`: ``` sudo chsh -s /path/to/ostree-receive-shell ``` `ostree-receive-shell` will also append the directory it's installed in to `PATH` to allow `ostree-receive` to be found in non-standard locations. In that scenario, the symbolic link to `ostree-receive` described above is not needed. Both `ostree-push` and `ostree-receive` require the OSTree GObject Introspection bindings. Typically these would be installed from the host distro. On Debian systems the package is `gir1.2-ostree-1.0` while on RedHat systems they are in the `ostree-libs` package. `ostree-push` relies on the connection sharing and port forwarding features of OpenSSH and is unlikely to work with another SSH client. Similarly, `ostree-receive` has only be tested with the OpenSSH server, but it might work correctly with other SSH servers. ## Configuration `ostree-receive` can be configured from YAML formatted files. It will load `~/.config/ostree/ostree-receive.conf` and `/etc/ostree/ostree-receive.conf` or a file specified in the `OSTREE_RECEIVE_CONF` environment variable. See the example [`ostree-receive.conf`](ostree-receive.conf) file for available options. ## Testing A test suite is provided using [pytest][pytest]. Most of the time simply running `pytest` from a git checkout will run it correctly. [tox][tox] can also be used to automate running the test suite in a prepared Python environment. In addition to the `ostree-push` dependencies, many of the tests depend on using OpenSSH `sshd` locally. On both Debian and RedHat systems this is available in the `openssh-server` package. [pytest]: https://docs.pytest.org/en/stable/ [tox]: https://tox.readthedocs.io/en/stable/ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1670088793.0 ostree-push-1.2.0/TODO0000664000175100017510000000056414342704131012616 0ustar00dandan * There's no local repository locking. If another process writes to the repository while the HTTP server is active, it might serve the wrong contents. If the OSTree repo locking API is made public, the local repo should be locked while the push is happening. In practice this shouldn't be an issue since you're unlikely to be making commits into a shared repo. ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1670088793.0 ostree-push-1.2.0/ostree-receive.conf0000664000175100017510000000623414342704131015716 0ustar00dandan# Example ostree-receive.conf file. All supported options with defaults are # shown below. Install to ~/.config/ostree/ostree-receive.conf or # /etc/ostree/ostree-receive.conf. # Specify a repo root directory. When null or '', any repo path is allowed and # paths are resolved relative to the current working directory. This is # typically the user's home directory. #root: null # GPG key IDs for signing received commits and repo metadata. #gpg_sign: [] # GnuPG home directory for loading GPG signing keys. #gpg_homedir: null # Whether to verify received commits with GPG. #gpg_verify: no # GPG keyring for verifying received commits. If null or '', keyrings at # ~/.config/ostree/ostree-receive-trustedkeys.gpg or # /etc/ostree/ostree-receive-trustedkeys.gpg will be used. OSTree will also # use the global trusted keyrings in /usr/share/ostree/trusted.gpg.d. #gpg_trustedkeys: null # Signature implementation to use for OSTree's alternative, non-GPG signing # system (used by the sign_* keys below). Also affects the format of the # "keyfiles" in the subsequent settings. # For ed25519 keys, the keyfiles described below should be files consisting of a # series of base64-encoded keys, one key per line. #sign_type: ed25519 # Keyfiles containing private keys for signing received commits and repo # metadata, using the signing system set in sign_type. #sign_keyfiles: [] # Whether to verify received commits with the signing system set in sign_type. #sign_verify: no # Keyfile containing public keys for verifying received commits, using the # signing system set in sign_type. If null or '', the keyfile at # ~/.config/ostree/ostree-receive-trustedkeyfile.SIGNTYPE or # /etc/ostree/ostree-receive-trustedkeyfile.SIGNTYPE, where SIGNTYPE is the # value of sign_type, will be used. # For ed25519 signatures, OSTree will also use the global trusted keyfiles # /usr/share/ostree/trusted.ed25519 and /etc/ostree/trusted.ed25519, as well as # the keyfiles located within the directories # /usr/share/ostree/trusted.ed25519.d and /etc/ostree/trusted.ed25519.d. #sign_trustedkeyfile: null # Update the repo metadata after receiving commits. #update: yes # Program to run after new commits have been made. The program will be # executed with the environment variable OSTREE_RECEIVE_REPO set to the # absolute path of the OSTree repository and the environment variable # OSTREE_RECEIVE_REFS set to the set of refs received separated by # whitespace. #update_hook: null # Optional per-repository configuration settings. All of the above settings # except for root can be set and will override the global value. The value is a # map of repository path to map of settings. The repository path can be # relative or absolute. If root is specified, relative paths are resolved below # it. # # For example: # # repos: # foo: # gpg_sign: ['76543210'] # /path/to/bar: # update: no # #repos: {} # Set the log level. See https://docs.python.org/3/library/logging.html#levels # for the list of log levels. #log_level: INFO # Force receiving commits even if nothing changed or the remote commits are # not newer than the current commits. #force: no # Only show what would be done without making any commits. #dry_run: no ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1705763123.7210603 ostree-push-1.2.0/ostree_push.egg-info/0000775000175100017510000000000014552760464016171 5ustar00dandan././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1705763123.0 ostree-push-1.2.0/ostree_push.egg-info/PKG-INFO0000644000175100017510000001363514552760463017273 0ustar00dandanMetadata-Version: 2.1 Name: ostree-push Version: 1.2.0 Summary: Push and receive OSTree commits Home-page: https://github.com/dbnicholson/ostree-push Author: Dan Nicholson Author-email: dbn@endlessos.org Classifier: Programming Language :: Python :: 3 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Classifier: Development Status :: 5 - Production/Stable Classifier: License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+) Classifier: Operating System :: POSIX Classifier: Topic :: Software Development :: Build Tools Classifier: Topic :: System :: Archiving :: Mirroring Classifier: Topic :: System :: Archiving :: Packaging Classifier: Topic :: System :: Software Distribution Requires-Python: >=3.7 Description-Content-Type: text/markdown License-File: COPYING Requires-Dist: PyGObject Requires-Dist: PyYAML # ostree-push ## Background `ostree-push` uses `ssh` to push commits from a local OSTree repo to a remote OSTree repo. This is to fill a gap where currently you can only pull commits in core ostree. To publish commits to a remote repository, you either have to `pull` from the local repo to the remote repo or use an out of band mechanism like `rsync`. Both approaches have significant limitations. To pull over the network, only http is supported. So, in addition to having to login on the remote machine and run `ostree pull`, the local repository needs to be served over http. This means your build machine needs to be an http server with appropriate configuration in addition to simply making commits. This pushes the builds to be done on the public repository server, which prevents reasonable separation of duties and makes multiarch repositories impossible. Using `rsync` for publishing has some major benefits since only updated objects are published. However, it has no concept of the OSTree object store or refs structures. There are a few problems deriving from this issue. First, objects are published in sort order, but this means that objects can be published before their children. In the most extreme case, a commit object could be published before it's complete. The remote repo would assume this commit object was valid even though some children might be missing. Second, the refs might get updated before the commit objects are in place. If a client pulls while `rsync` is publishing, it may attempt to pull an incomplete or entirely missing commit. Finally, `rsync` will push the objects directly into the store rather than using a staging directory like `pull` or `commit` do. If `rsync` is interrupted, it could leave partial objects in the store. `ostree-push` tries to offer functionality like `git` where commits can be pushed over `ssh` to avoid these issues. ## Operation When `ostree-push` is started, it first starts a local HTTP server providing the contents of the local ostree repo. It then connects to the remote host with `ssh` and tunnels the HTTP server port through the SSH connection. Finally, it runs `ostree-receive` on the remote host with the URL of the tunneled HTTP server. `ostree-receive` then creates a temporary remote using this URL and pulls the desired refs from it. In essence, `ostree-push` and `ostree-receive` coordinate to pull from the local repo to a remote repo while avoiding the limitations described above. Namely, no HTTP server needs to be running and no port needs to be exposed on the local host. Both resources are created temporarily and only exposed to the remote host through the secure SSH connection. ## Installation Use `pip` to install the `otpush` package and the `ostree-push` and `ostree-receive` scripts. From a git checkout, run: ``` pip install . ``` If `ostree-receive` is not in a default `PATH` location, it may not be located when run in the environment spawned by the SSH server. As a workaround, make a symbolic link in a standard location: ``` sudo ln -s /path/to/ostree-receive /usr/bin/ostree-receive ``` In order to restrict SSH usage to only running `ostree-receive`, the `ostree-receive-shell` script can be used as a login shell. This way someone with SSH access to the remote machine cannot run arbitrary commands as the user owning the repositories. To use it, set the login shell of the repo owner to `ostree-receive-shell`: ``` sudo chsh -s /path/to/ostree-receive-shell ``` `ostree-receive-shell` will also append the directory it's installed in to `PATH` to allow `ostree-receive` to be found in non-standard locations. In that scenario, the symbolic link to `ostree-receive` described above is not needed. Both `ostree-push` and `ostree-receive` require the OSTree GObject Introspection bindings. Typically these would be installed from the host distro. On Debian systems the package is `gir1.2-ostree-1.0` while on RedHat systems they are in the `ostree-libs` package. `ostree-push` relies on the connection sharing and port forwarding features of OpenSSH and is unlikely to work with another SSH client. Similarly, `ostree-receive` has only be tested with the OpenSSH server, but it might work correctly with other SSH servers. ## Configuration `ostree-receive` can be configured from YAML formatted files. It will load `~/.config/ostree/ostree-receive.conf` and `/etc/ostree/ostree-receive.conf` or a file specified in the `OSTREE_RECEIVE_CONF` environment variable. See the example [`ostree-receive.conf`](ostree-receive.conf) file for available options. ## Testing A test suite is provided using [pytest][pytest]. Most of the time simply running `pytest` from a git checkout will run it correctly. [tox][tox] can also be used to automate running the test suite in a prepared Python environment. In addition to the `ostree-push` dependencies, many of the tests depend on using OpenSSH `sshd` locally. On both Debian and RedHat systems this is available in the `openssh-server` package. [pytest]: https://docs.pytest.org/en/stable/ [tox]: https://tox.readthedocs.io/en/stable/ ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1705763123.0 ostree-push-1.2.0/ostree_push.egg-info/SOURCES.txt0000664000175100017510000000153514552760463020060 0ustar00dandan.flake8 COPYING MANIFEST.in NEWS README.md TODO ostree-receive.conf pyproject.toml pytest.ini setup.cfg setup.py tox.ini ostree_push.egg-info/PKG-INFO ostree_push.egg-info/SOURCES.txt ostree_push.egg-info/dependency_links.txt ostree_push.egg-info/entry_points.txt ostree_push.egg-info/requires.txt ostree_push.egg-info/top_level.txt otpush/__init__.py otpush/push.py otpush/receive.py otpush/receive_legacy.py scripts/ostree-receive-shell tests/__init__.py tests/conftest.py tests/dumpenv tests/ostree-push tests/ostree-receive tests/test_full.py tests/test_push.py tests/test_receive.py tests/test_receive_legacy.py tests/test_receive_shell.py tests/test_sshd.py tests/util.py tests/data/host_rsa_key tests/data/host_rsa_key.pub tests/data/id_rsa tests/data/id_rsa.pub tests/data/pgp-key.asc tests/data/pgp-pub.asc tests/data/pgp-pub.gpg tests/data/sshd_config././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1705763123.0 ostree-push-1.2.0/ostree_push.egg-info/dependency_links.txt0000664000175100017510000000000114552760463022236 0ustar00dandan ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1705763123.0 ostree-push-1.2.0/ostree_push.egg-info/entry_points.txt0000664000175100017510000000026214552760463021466 0ustar00dandan[console_scripts] ostree-push = otpush.push:main ostree-receive = otpush.receive:compat_main ostree-receive-0 = otpush.receive_legacy:main ostree-receive-1 = otpush.receive:main ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1705763123.0 ostree-push-1.2.0/ostree_push.egg-info/requires.txt0000664000175100017510000000002114552760463020561 0ustar00dandanPyGObject PyYAML ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1705763123.0 ostree-push-1.2.0/ostree_push.egg-info/top_level.txt0000664000175100017510000000000714552760463020717 0ustar00dandanotpush ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1705763123.7210603 ostree-push-1.2.0/otpush/0000775000175100017510000000000014552760464013461 5ustar00dandan././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1705763062.0 ostree-push-1.2.0/otpush/__init__.py0000664000175100017510000000006014552760366015567 0ustar00dandanVERSION = '1.2.0' MAJOR = VERSION.split('.')[0] ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1705762176.0 ostree-push-1.2.0/otpush/push.py0000775000175100017510000005117314552756600015021 0ustar00dandan#!/usr/bin/python3 # ostree-push - Push local ostree repo to remote server over SSH # Copyright (C) 2017 Endless Mobile, Inc. # Copyright (C) 2021 Endless OS Foundation LLC # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. """Push local ostree repo to remote server over SSH ostree-push allows coherently publishing an ostree repo to a remote server. It works by opening an SSH connection to the remote server and initiating the ostree-receive service to pull from an HTTP server on the local host. This has a distinct advantage over using rsync where files can be pushed in the wrong order and there's no ability to push a subset of the refs in the local repo. ostree-push will start an HTTP server and tunnel its port to the remote server. This allows publishing from a host that is not running an HTTP server and avoids any firewalls between the local and remote hosts. In either case, ostree-receive must be installed on the remote host to pull from the tunnelled HTTP server. """ from . import VERSION from argparse import Action, ArgumentError, ArgumentParser from collections import namedtuple import gi from http.server import SimpleHTTPRequestHandler import logging import multiprocessing import os import queue import shlex import subprocess from tempfile import TemporaryDirectory import threading import time from urllib.parse import urlparse try: from http.server import ThreadingHTTPServer except ImportError: from http.server import HTTPServer from socketserver import ThreadingMixIn class ThreadingHTTPServer(ThreadingMixIn, HTTPServer): daemon_threads = True gi.require_version('OSTree', '1.0') from gi.repository import Gio, OSTree # noqa: E402 logger = logging.getLogger(__name__) # Timeout in seconds when waiting for ports or sockets RESOURCE_TIMEOUT = 60 # Default remote commands to attempt. MAJOR = VERSION.split('.')[0] DEFAULT_COMMANDS = (f'ostree-receive-{MAJOR}', 'ostree-receive') class OTPushError(Exception): """Exceptions from ostree-push""" pass class OSTreeRequestHandler(SimpleHTTPRequestHandler): """SimpleHTTPRequestHandler with logging""" def log_message(self, format, *args): logger.debug("%s: %s - - [%s] %s", threading.current_thread().name, self.address_string(), self.log_date_time_string(), format % args) class RepoServer: """HTTP server for repo Start an HTTP server running at the repository path. The server listens on an ephemeral port on the loopback address. The timeout parameter controls how long to wait for the server process to send back its URL. When used as a context manager, the server is stopped when the context closes. """ def __init__(self, path, timeout=RESOURCE_TIMEOUT): self.path = path self.timeout = timeout self.proc = None self.address = None self.url = None if not os.path.isdir(path): raise ValueError('{} is not a directory'.format(path)) def __enter__(self): self.start() return self def __exit__(self, exc_type, exc_value, traceback): self.stop() def _run_server(self, path, queue): # FIXME: When python 3.7 is the minimum, use the # SimpleHTTPRequestHandler directory parameter with # functools.partial instead of changing directory. os.chdir(path) server = ThreadingHTTPServer(('127.0.0.1', 0), OSTreeRequestHandler) queue.put(server.server_address) server.serve_forever() def start(self): """Start an HTTP server for the repo The server is started in a separate process which send back the address it is bound to. If the address is not received by the instance's timeout value, an exception is raised. """ addr_queue = multiprocessing.Queue() self.proc = multiprocessing.Process( target=self._run_server, args=(self.path, addr_queue) ) self.proc.start() if not self.proc.is_alive(): raise OTPushError(f'HTTP process {self.proc.pid} exited') try: self.address = addr_queue.get(True, self.timeout) except queue.Empty: raise OTPushError('HTTP process did not write port to queue') \ from None self.url = f'http://{self.address[0]}:{self.address[1]}' logger.info('Serving %s on %s from process %d', self.path, self.url, self.proc.pid) def stop(self): """Stop the HTTP server""" if self.proc is not None: if self.proc.is_alive(): logger.debug('Stopping HTTP server process %d', self.proc.pid) self.proc.terminate() self.proc = None class SSHMultiplexer: """SSH multiplexer for connecting with a remote host The remote host and a path to a non-existent socket. """ def __init__(self, host, socket, ssh_options=None, user=None, port=None): self.host = host self.user = user self.port = port self.socket = socket self.ssh_options = ssh_options self.master_proc = None def __enter__(self): self.start() return self def __exit__(self, exc_type, exc_value, traceback): self.stop() def __del__(self): self.stop() def start(self): """Start an SSH master connection Run an SSH master connection to host in the background. """ if self.master_proc is not None: raise OTPushError( f'SSH master process already running in {self.master_proc.pid}' ) if os.path.exists(self.socket): raise OTPushError(f'Socket {self.socket} already exists') # Create the socket file if necessary # Options used: # -N: Don't execute a remote command # -M: Puts the client in master mode for connection sharing # -S: Specify the location of the control socket master_cmd = ['ssh', '-N', '-M', '-S', self.socket] if self.port: master_cmd += ['-p', str(self.port)] if self.ssh_options: master_cmd += self.ssh_options if self.user: master_cmd.append(f'{self.user}@{self.host}') else: master_cmd.append(self.host) logger.debug('Starting SSH master process %s', ' '.join(map(shlex.quote, master_cmd))) self.master_proc = subprocess.Popen(master_cmd) # Loop until the socket shows up timeout = 0 while timeout < RESOURCE_TIMEOUT: if self.master_proc.poll() is not None: raise OTPushError( f'SSH master process {self.master_proc.pid} exited') if os.path.exists(self.socket): break timeout += 0.5 time.sleep(0.5) if timeout >= RESOURCE_TIMEOUT: raise OTPushError( f'SSH control socket {self.socket} does not exist') def wait_for_exit(self, timeout): try: self.master_proc.wait(timeout) return True except subprocess.TimeoutExpired: return False def stop(self): wait_timeout = 20 if self.master_proc is not None: if self.master_proc.poll() is None: logger.debug('Stopping SSH master process %d', self.master_proc.pid) self.master_proc.terminate() if self.wait_for_exit(wait_timeout): return self.master_proc.kill() if self.wait_for_exit(wait_timeout): return logger.error('Failed to stop the SSH master process %d', self.master_proc.pid) self.master_proc = None def forward_port(self, port): """Forward a local port over an SSH master connection Forward a local port to host over the SSH master connection. The remote port is returned. """ if self.master_proc is None: raise OTPushError('SSH master process not running') # Options used: # -S: Specify the location of the control socket # -O: Makes the client print the forwarded port on stdout # -R: Forward the local port to the remote. Use 0 as the remote port # so it binds one dynamically. forward_cmd = [ 'ssh', '-S', self.socket, '-O', 'forward', '-R', f'0:127.0.0.1:{port}' ] if self.ssh_options: forward_cmd += self.ssh_options forward_cmd.append(self.host) logger.debug('Forwarding HTTP port with %s', ' '.join(map(shlex.quote, forward_cmd))) out = subprocess.check_output(forward_cmd) # Pass the output through int() so that any whitespace is stripped # and we know a port number was returned. Anything else will raise a # ValueError. return int(out) def run(self, cmd): """Run a command on the remote host using the master connection""" if self.master_proc is None: raise OTPushError('SSH master process not running') run_cmd = ['ssh', '-S', self.socket] if self.ssh_options: run_cmd += self.ssh_options run_cmd += [self.host] + cmd logger.debug('Executing ' + ' '.join(map(shlex.quote, cmd))) subprocess.check_call(run_cmd) def push_refs(local_repo, dest, refs=None, ssh_options=None, commands=None, dry_run=False): """Run ostree-receive on remote with a tunneled HTTP server Start a local HTTP server and tunnel its port to the remote host. Use this tunneled HTTP server as the URL for ostree_receive(). The remote command to be run is specied as an iterable of strings in the commands argument. If multiple commands are specified, each command is attempted until one is found on the remote host. """ local_repo_path = local_repo.get_path().get_path() # If refs were specified, make sure they exist before bothering with # the remote connection if refs: list_refs_flags = OSTree.RepoListRefsExtFlags.EXCLUDE_REMOTES try: # EXCLUDE_MIRRORS only available since ostree 2019.2 list_refs_flags |= OSTree.RepoListRefsExtFlags.EXCLUDE_MIRRORS except AttributeError: pass _, local_refs = local_repo.list_refs_ext(None, list_refs_flags) missing_refs = sorted(set(refs) - local_refs.keys()) if missing_refs: raise OTPushError( f'Refs {" ".join(missing_refs)} not found in {local_repo_path}' ) if not commands: commands = DEFAULT_COMMANDS summary = os.path.join(local_repo_path, 'summary') update_summary = False if not os.path.exists(summary): logger.debug('%s does not exist, regenerating', summary) update_summary = True else: # OSTree updates the mtime of the repo directory when refs have # been updated, so if that's newer than the summary, it needs to # be regenerated. repo_mtime = os.path.getmtime(local_repo_path) summary_mtime = os.path.getmtime(summary) if summary_mtime < repo_mtime: logger.debug('Repo %s has been modified more recently than %s, ' 'regenerating', local_repo_path, summary) update_summary = True if update_summary: logger.info('Regenerating summary file') local_repo.regenerate_summary() with RepoServer(local_repo_path) as http_server: http_port = http_server.address[1] with TemporaryDirectory(prefix='ostree-push-') as tempdir: socket_path = os.path.join(tempdir, 'socket') # Start an SSH master, forward the HTTP port to the remote # server and run ostree-receive there with SSHMultiplexer(dest.host, socket_path, ssh_options, user=dest.user, port=dest.port) as ssh: remote_port = ssh.forward_port(http_port) logger.info('Connected local HTTP port %d to remote port %d', http_port, remote_port) remote_url = f'http://127.0.0.1:{remote_port}' for command in commands: cmd = shlex.split(command) if dry_run: cmd.append('-n') cmd += [dest.repo, remote_url] if refs is not None: cmd += refs logger.debug('Remote command: %s', cmd) try: ssh.run(cmd) break except subprocess.CalledProcessError as err: # Ignore command not found errors to try the # next command. if err.returncode != 127: raise logger.debug( f'Command {cmd[0]} not found on remote host' ) else: # None of the commands were found. cmd_names = " ".join( [shlex.split(cmd)[0] for cmd in commands] ) raise OTPushError( f'Could not find commands {cmd_names} on server' ) PushDest = namedtuple('PushDest', ('host', 'repo', 'user', 'port')) def parse_dest(dest): """Parse the push destination into host and repo Allowed destinations are: * [user@]host:path/to/repo * ssh://[user@]host[:port]/path/to/repo """ # A bit of care is needed because urlparse parses host:repo into a # scheme and path but user@host:repo into just a path. parts = urlparse(dest) if parts.scheme and parts.netloc: # ssh:// URL if parts.scheme != 'ssh': raise ValueError( f'Destination scheme "{parts.scheme}" not allowed') if not parts.path: raise ValueError('Destintion repo missing') return PushDest(host=parts.hostname, user=parts.username, port=parts.port, repo=parts.path) else: # scp form. There should be at least 1 : to separate the host # and repo. host, sep, repo = dest.partition(':') if not sep: raise ValueError('Destination not in form "host:repo"') if not host: raise ValueError('Destination host missing') if not repo: raise ValueError('Destination repo missing') # Try to split user@ from the host. user = None tmpuser, sep, tmphost = host.partition('@') if sep: if not tmpuser or not tmphost: raise ValueError(f'Invalid destination host {host}') host = tmphost user = tmpuser return PushDest(host=host, user=user, repo=repo, port=None) class DestArgAction(Action): """Action to set push destination""" def __init__(self, option_strings, dest, nargs=None, default=None, **kwargs): if nargs is not None: raise ValueError('nargs not allowed') if default is not None: raise ValueError('default not allowed') super().__init__(option_strings, dest, **kwargs) def __call__(self, parser, namespace, values, option_string): dest = parse_dest(values) setattr(namespace, self.dest, dest) class SSHOptAction(Action): """Action to collect ssh option verbatim""" # Options that can be specified multiple times MULTI_OPTS = {'-o'} def __init__(self, option_strings, dest, nargs=None, default=None, **kwargs): if option_strings is None: raise ValueError('option strings are required') if nargs is not None: raise ValueError('nargs not allowed') if default is not None: raise ValueError('default not allowed') dest = 'ssh_options' default = [] super().__init__(option_strings, dest, default=default, **kwargs) def __call__(self, parser, namespace, values, option_string): logger.debug('Parsing SSH option %r, %r, %r', namespace, option_string, values) # A bit of care is needed in case parse_args() is called # multiple times. The default list needs to be copied so it's # not updated and the set of options seen needs to be reset. ssh_opts = getattr(namespace, self.dest) if ssh_opts is self.default: ssh_opts = ssh_opts.copy() self._single_opts_seen = set() if option_string not in self.MULTI_OPTS: if option_string in self._single_opts_seen: raise ArgumentError( self, f'Option {option_string} can only be specified once' ) self._single_opts_seen.add(option_string) ssh_opts += [option_string, values] setattr(namespace, self.dest, ssh_opts) class OTPushArgParser(ArgumentParser): """ArgumentParser for ostree-push""" def __init__(self): super().__init__( description='Push ostree refs to remote repository' ) self.add_argument( 'dest', metavar='DEST', action=DestArgAction, help=( 'remote host and repo destination. DEST can take one of two ' 'forms: [user@]host:path/to/repo ' 'or ssh://[user@]host[:port]/path/to/repo.' ) ) self.add_argument('refs', metavar='REF', nargs='*', default=[], help='ostree refs to push, all if none specified') self.add_argument('-n', '--dry-run', action='store_true', help='only show what would be done') self.add_argument('-p', dest='port', type=int, help='ssh port to connect to') self.set_defaults(log_level=logging.INFO) self.add_argument('-q', '--quiet', dest='log_level', action='store_const', const=logging.WARNING, help='disable most messages') self.add_argument('-v', '--verbose', dest='log_level', action='store_const', const=logging.DEBUG, help='enable verbose messages') self.add_argument( '--repo', help='local repository path (default: current directory)' ) self.add_argument( '--command', metavar='COMMAND', dest='commands', action='append', default=None, help=( 'remote pull command. Can be specified multiple times to ' 'attempt commands that may be missing ' f'(default: {" ".join(DEFAULT_COMMANDS)})' ), ) self.add_argument('-i', '-o', metavar='OPTION', action=SSHOptAction, help='options passed through to ssh') self.add_argument('--version', action='version', version=f'%(prog)s {VERSION}') def parse_args(self, *args, **kwargs): args = super().parse_args(*args, **kwargs) # If a port option has been supplied, replace the port in the # dest argument with it. if args.port: args.dest = args.dest._replace(port=args.port) return args def main(argv=None): aparser = OTPushArgParser() args = aparser.parse_args(argv) logging.basicConfig(level=args.log_level) # Open the local repo and get the full path if args.repo: repo_file = Gio.File.new_for_path(args.repo) repo = OSTree.Repo.new(repo_file) else: repo = OSTree.Repo.new_default() repo.open() push_refs( repo, args.dest, refs=args.refs, ssh_options=args.ssh_options, commands=args.commands, dry_run=args.dry_run, ) if __name__ == '__main__': main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1705762176.0 ostree-push-1.2.0/otpush/receive.py0000775000175100017510000010204214552756600015454 0ustar00dandan#!/usr/bin/python3 # ostree-receive - Initiate pull from remote # Copyright (C) 2017 Endless Mobile, Inc. # Copyright (C) 2021 Endless OS Foundation LLC # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. """Initiate pull from ostree remote repo ostree-receive pulls updates from a remote ostree repository. The intended use case of ostree-receive is to use it to initiate pulls from the remote server. This would typically be used to publish commits from a build host to a master server. ostree-receive offers a few advantages over a standard "ostree pull". First, the remote does not need to be pre-configured in the repository configuration. ostree-receive will use a fake remote and override the URL and credentials based on the specified URL and its own configuration. Second, ostree-receive checks that the refs to be updated are newer than the refs it has. This prevents accidental downgrades, but it also allows ostree-receive to be run to pull all refs from the remote and unintended refs will be ignored. """ from . import VERSION from argparse import ArgumentParser, Namespace, SUPPRESS from collections import OrderedDict from configparser import ConfigParser import dataclasses import fnmatch import gi import logging import os from pathlib import Path import shlex import subprocess import sys from tempfile import TemporaryDirectory import yaml gi.require_version('OSTree', '1.0') from gi.repository import GLib, Gio, OSTree # noqa: E402 logger = logging.getLogger(__name__) class OTReceiveError(Exception): """Errors from ostree-receive""" pass class OTReceiveConfigError(OTReceiveError): """Errors from ostree-receive configuration""" pass @dataclasses.dataclass class OTReceiveRepoConfig: """OTReceiveRepo configuration The path and url fields are required. See the OTReceiveConfig class for details on the remaining optional fields. """ path: Path url: str gpg_sign: list = dataclasses.field(default_factory=list) gpg_homedir: str = None gpg_verify: bool = False gpg_trustedkeys: str = None sign_type: str = 'ed25519' sign_keyfiles: list = dataclasses.field(default_factory=list) sign_verify: bool = False sign_trustedkeyfile: str = None update: bool = True update_hook: str = None @dataclasses.dataclass class OTReceiveConfig: """OTReceive configuration Configuration can be provided from a file or command line arguments using the load method. Config files are YAML mappings with the option names below using hypens instead of underscores. By default, the paths ~/.config/ostree/ostree-receive.conf and /etc/ostree/ostree-receive.conf are read unless the OSTREE_RECEIVE_CONF environment variable is set. That can be used to point to a file to be read. Supported configuration options: root: Specify a repo root directory. When None or '', any repo path is allowed and paths are resolved relative to the current working directory. This is typically the user's home directory. gpg_sign: GPG key IDs for signing received commits and repo metadata. gpg_homedir: GnuPG home directory for loading GPG signing keys. gpg_verify: Whether to verify received commits with GPG. gpg_trustedkeys: GPG keyring for verifying received commits. If None or '', keyrings at ~/.config/ostree/ostree-receive-trustedkeys.gpg or /etc/ostree/ostree-receive-trustedkeys.gpg will be used. OSTree will also use the global trusted keyrings in /usr/share/ostree/trusted.gpg.d. sign_type: OSTree non-GPG signature type. sign_keyfiles: sign_type key files for signing received commits and repo metadata. sign_verify: Whether to verify received commits with sign_type. sign_trustedkeyfile: Keyfile for verifying received commits using sign_type. If null or '', the keyfile at ~/.config/ostree/ostree-receive-trustedkeyfile.SIGNTYPE or /etc/ostree/ostree-receive-trustedkeyfile.SIGNTYPE will be used. update: Update the repo metadata after receiving commits. update_hook: Program to run after new commits have been made. The program will be executed with the environment variable OSTREE_RECEIVE_REPO set to the absolute path of the OSTree repository and the environment variable OSTREE_RECEIVE_REFS set to the set of refs received separated by whitespace. repos: Optional per-repository configuration settings. All of the above settings except for root can be set and will override the global value. The value is a map of repository path to map of settings. The repository path can be relative or absolute. If root is specified, relative paths are resolved below it. log_level: Set the log level. See the logging module for available levels. force: Force receiving commits even if nothing changed or the remote commits are not newer than the current commits. dry_run: Only show what would be done without making any commits. """ root: str = None # It would be nice to make this list[str], but that would break gpg_sign: list = dataclasses.field(default_factory=list) gpg_homedir: str = None gpg_verify: bool = False gpg_trustedkeys: str = None sign_type: str = 'ed25519' sign_keyfiles: list = dataclasses.field(default_factory=list) sign_verify: bool = False sign_trustedkeyfile: str = None update: bool = True update_hook: str = None repos: dict = dataclasses.field(default_factory=dict) log_level: str = 'INFO' force: bool = False dry_run: bool = False def __post_init__(self): # Validate the instance. for field in dataclasses.fields(self): value = getattr(self, field.name) # Validate the type. None is allowed if the default is None. if value is None and field.default is None: continue elif not isinstance(value, field.type): inst_type = type(value) raise OTReceiveConfigError( f'{field.name} must be an instance of ' f'{field.type}, but found {inst_type}' ) @classmethod def load(cls, paths=None, args=None): """Create instance from config files and arguments If paths is None, default_paths() will be used. """ conf = {} fields = {field.name for field in dataclasses.fields(cls)} if paths is None: paths = cls.default_paths() # Load config file options for p in paths: try: path = Path(p).expanduser().resolve() except TypeError as err: raise OTReceiveConfigError(err) from None if not path.exists(): logger.debug('Skipping missing config file %s', path) continue logger.debug('Loading config file %s', path) with path.open() as f: data = yaml.safe_load(f) if data is None: logger.debug('Ignoring empty config file %s', path) continue elif not isinstance(data, dict): raise OTReceiveConfigError( f'Config file {path} is not a YAML mapping' ) for option, value in data.items(): if option not in fields: logger.warning( 'Unrecognized option %s in config file %s', option, path ) continue logger.debug( 'Setting option %s to %s from %s', option, value, path ) conf[option] = value # Load argument options if args is not None: if not isinstance(args, Namespace): raise OTReceiveConfigError( 'args is not an argparse.Namespace instance' ) logger.debug('Loading arguments %s', args) for arg, value in vars(args).items(): if arg not in fields: logger.debug('Ignoring argument %s', arg) continue logger.debug('Setting option %s to %s from args', arg, value) conf[arg] = value return cls(**conf) @staticmethod def default_paths(): """Return list of default configuration files""" env_config = os.getenv('OSTREE_RECEIVE_CONF') if env_config: return [Path(env_config)] config_home = Path(os.getenv('XDG_CONFIG_HOME', '~/.config')) return [ Path('/etc/ostree/ostree-receive.conf'), config_home / 'ostree/ostree-receive.conf', ] def get_repo_config(self, path, url): """Get OTReceiveRepoConfig instance for repo path and URL""" repo_path = Path(path) repo_root = ( Path(self.root).resolve() if self.root else None ) if repo_root: if not repo_path.is_absolute(): # Join the relative path to the root. repo_path = repo_root.joinpath(repo_path) # Make sure the path is below the root. repo_path = repo_path.resolve() try: repo_path.relative_to(repo_root) except ValueError: raise OTReceiveError(f'repo {path} not found') from None # Ensure the repository exists. if not repo_path.exists(): raise OTReceiveError(f'repo {path} not found') # See if there's a matching path in repos. for key, values in self.repos.items(): config_path = Path(key) if repo_root and not config_path.is_absolute(): config_path = repo_root.joinpath(config_path) try: matches = repo_path.samefile(config_path) except FileNotFoundError: matches = False if matches: logger.debug(f'Applying repos {key} configuration') per_repo_config = values break else: per_repo_config = {} # Copy all but path and url from the per-repo or the global # receive config. repo_config_fields = { field.name for field in dataclasses.fields(OTReceiveRepoConfig) } receive_config_fields = { field.name for field in dataclasses.fields(self) } common_fields = repo_config_fields & receive_config_fields repo_config_args = { field: per_repo_config.get(field, getattr(self, field)) for field in common_fields } repo_config_args['path'] = repo_path repo_config_args['url'] = url return OTReceiveRepoConfig(**repo_config_args) class OTReceiveRepo(OSTree.Repo): """OSTree repository receiving pushed commits An OTReceiveRepoConfig instance is required. """ # The fake remote name REMOTE_NAME = '_receive' # Generated ref patterns to be excluded when pulling everything EXCLUDED_REF_PATTERNS = ( 'appstream/*', 'appstream2/*', OSTree.REPO_METADATA_REF, ) def __init__(self, config): self.config = config self.remotes_dir = None if not isinstance(self.config, OTReceiveRepoConfig): raise OTReceiveError( 'config is not an OTReceiveRepoConfig instance' ) # Ensure the repository exists. if not self.path.exists(): raise OTReceiveError(f'repo {self.path} not found') logger.debug('Using repo path %s', self.path) # Create a temporary remote config file. Just an empty URL is # needed and the rest of the parameters will be supplied in the # pull options. self.remotes_dir = TemporaryDirectory(prefix='ostree-receive-') remote_config_path = os.path.join(self.remotes_dir.name, f'{self.REMOTE_NAME}.conf') remote_config = ConfigParser() remote_section = f'remote "{self.REMOTE_NAME}"' remote_config.add_section(remote_section) remote_config[remote_section]['url'] = self.url if self.config.gpg_verify: trustedkeys = self._get_gpg_trustedkeys() if trustedkeys: remote_config[remote_section]['gpgkeypath'] = trustedkeys remote_config[remote_section]['gpg-verify'] = 'true' else: remote_config[remote_section]['gpg-verify'] = 'false' remote_config[remote_section]['gpg-verify-summary'] = 'false' if self.config.sign_verify: verification_config = f'verification-{self.config.sign_type}-file' trustedkeyfile = self._get_sign_trustedkeyfile() if trustedkeyfile: remote_config[remote_section][verification_config] = \ trustedkeyfile remote_config[remote_section]['sign-verify'] = 'true' remote_config[remote_section]['sign-verify-summary'] = 'false' with open(remote_config_path, 'w') as f: remote_config.write(f, space_around_delimiters=False) repo_file = Gio.File.new_for_path(os.fspath(self.path)) super().__init__(path=repo_file, remotes_config_dir=self.remotes_dir.name) self.open() @property def path(self): return self.config.path @property def url(self): return self.config.url def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): self.cleanup() def cleanup(self): """Cleanup instance temporary directory This will be called automatically when the instance is deleted or the context exits. """ if self.remotes_dir: self.remotes_dir.cleanup() self.remotes_dir = None def _get_gpg_trustedkeys(self): """Get the GPG trusted keyring for verification""" if self.config.gpg_trustedkeys: if not os.path.exists(self.config.gpg_trustedkeys): raise OTReceiveConfigError( f'gpg_trustedkeys keyring "{self.config.gpg_trustedkeys}" ' 'does not exist', ) path = os.path.realpath(self.config.gpg_trustedkeys) logger.debug('Using GPG trusted keyring %s', path) return path else: config_home = Path(os.getenv('XDG_CONFIG_HOME', '~/.config')) default_paths = [ Path('/etc/ostree/ostree-receive-trustedkeys.gpg'), config_home / 'ostree/ostree-receive-trustedkeys.gpg' ] for path in default_paths: path = path.expanduser().resolve() if path.exists(): logger.debug('Using default GPG trusted keyring %s', path) return os.fspath(path) return None def _get_sign_trustedkeyfile(self): """Get the GPG trusted keyring for verification""" if self.config.sign_trustedkeyfile: if not os.path.exists(self.config.sign_trustedkeyfile): self._report_missing_keyfile(self.config.sign_trustedkeyfile, from_config='sign_trustedkeyfile') path = os.path.realpath(self.config.sign_trustedkeyfile) logger.debug('Using trusted keyfile %s', path) return path else: config_home = Path(os.getenv('XDG_CONFIG_HOME', '~/.config')) basename = f'ostree-receive-trustedkeyfile.{self.config.sign_type}' default_paths = [ Path(f'/etc/ostree/{basename}'), config_home / f'ostree/{basename}' ] for path in default_paths: path = path.expanduser().resolve() if path.exists(): logger.debug('Using default trusted keyfile %s', path) return os.fspath(path) return None def _read_keyfile_keys(self, keyfile, *, from_config): try: with open(keyfile) as f: for line in f: line = line.strip() if not line: continue yield line except FileNotFoundError: self._report_missing_keyfile(keyfile, from_config=from_config) def _report_missing_keyfile(self, keyfile, *, from_config): raise OTReceiveConfigError( f'{from_config} keyfile "{keyfile}" does not exist' ) def _get_commit_timestamp(self, rev): """Get the timestamp of a commit""" _, commit, _ = self.load_commit(rev) return OSTree.commit_get_timestamp(commit) def _pull_commits(self, commits): """Pull commits from repo The caller is responsible for managing the repository transaction. """ opts = GLib.Variant('a{sv}', { 'refs': GLib.Variant('as', commits), 'depth': GLib.Variant('i', 0), 'inherit-transaction': GLib.Variant('b', True), # Pull objects directly instead of processing deltas since # it's an error to pull deltas into an archive repo. 'disable-static-deltas': GLib.Variant('b', True), }) progress = OSTree.AsyncProgress.new() progress.connect('changed', OSTree.Repo.pull_default_console_progress_changed, None) try: self.pull_with_options(self.REMOTE_NAME, opts, progress) finally: progress.finish() def copy_commit(self, rev, ref): _, src_variant, src_state = self.load_commit(rev) if src_state != OSTree.RepoCommitState.NORMAL: raise OTReceiveError(f'Cannot copy irregular commit {rev}') _, src_root, _ = self.read_commit(rev) # Make a copy of the commit metadata to update. Like flatpak # build-commit-from, the detached metadata is not copied since # the only known usage is for GPG signatures, which would become # invalid. commit_metadata = GLib.VariantDict.new(src_variant.get_child_value(0)) commit_metadata.insert_value(OSTree.COMMIT_META_KEY_REF_BINDING, GLib.Variant('as', [ref])) collection_id = self.get_collection_id() if collection_id is not None: commit_metadata.insert_value( OSTree.COMMIT_META_KEY_COLLECTION_BINDING, GLib.Variant('s', collection_id)) else: commit_metadata.remove(OSTree.COMMIT_META_KEY_COLLECTION_BINDING) # Add flatpak specific metadata if self._is_flatpak_ref(ref): # Deprecated in favor of ostree.ref-binding, but add it for # older flatpak clients commit_metadata.insert_value('xa.ref', GLib.Variant('s', ref)) # Nothing really uses this, but flatpak build-commit-from # adds it commit_metadata.insert_value('xa.from_commit', GLib.Variant('s', rev)) # Convert from GVariantDict to GVariant vardict commit_metadata = commit_metadata.end() # Copy other commit data from source commit commit_subject = src_variant.get_child_value(3).get_string() commit_body = src_variant.get_child_value(4).get_string() # If the dest ref exists, use the current commit as the parent. # Prior to ostree 2019.2, the GIR for # OSTree.RepoResolveRevExtFlags was represented as an # enumeration and the longer name is required. try: resolve_flags = OSTree.RepoResolveRevExtFlags.NONE except AttributeError: resolve_flags = \ OSTree.RepoResolveRevExtFlags.REPO_RESOLVE_REV_EXT_NONE _, parent = self.resolve_rev_ext(ref, allow_noent=True, flags=resolve_flags) # Keep the source commit's timestamp commit_time = OSTree.commit_get_timestamp(src_variant) # Make the new commit mtree = OSTree.MutableTree.new() self.write_directory_to_mtree(src_root, mtree, None) _, commit_root = self.write_mtree(mtree) _, commit_checksum = self.write_commit_with_time(parent, commit_subject, commit_body, commit_metadata, commit_root, commit_time) for key in self.config.gpg_sign: logger.debug('Signing commit %s with GPG key %s', commit_checksum, key) self.sign_commit(commit_checksum, key, self.config.gpg_homedir) if self.config.sign_keyfiles: sign = OSTree.Sign.get_by_name(self.config.sign_type) for keyfile in self.config.sign_keyfiles: logging.debug('Signing commit %s with %s keys from %s', commit_checksum, sign.get_name(), keyfile) for key in self._read_keyfile_keys( keyfile, from_config='sign_keyfiles'): sign.set_sk(GLib.Variant('s', key)) sign.commit(self, commit_checksum, None) # Update the ref self.transaction_set_refspec(ref, commit_checksum) return commit_checksum def _get_local_refs(self): flags = OSTree.RepoListRefsExtFlags.EXCLUDE_REMOTES try: # EXCLUDE_MIRRORS only available since ostree 2019.2 flags |= OSTree.RepoListRefsExtFlags.EXCLUDE_MIRRORS except AttributeError: pass _, refs = self.list_refs_ext(None, flags) return refs @staticmethod def _is_flatpak_ref(ref): return ref.startswith('app/') or ref.startswith('runtime/') def _is_flatpak_repo(self): refs = self._get_local_refs() return any(filter(self._is_flatpak_ref, refs)) def update_repo_metadata(self): sign_opts = [] if self.config.gpg_sign: sign_opts += [f'--gpg-sign={key}' for key in self.config.gpg_sign] if self.config.gpg_homedir: sign_opts.append(f'--gpg-homedir={self.config.gpg_homedir}') # Since --sign= keys are passed directly on the CLI, make a separate # copy of the options list with the key "censored", so that the command # line can be safely printed. safe_sign_opts = sign_opts[:] if self.config.sign_keyfiles: for opts in sign_opts, safe_sign_opts: opts.append(f'--sign-type={self.config.sign_type}') for keyfile in self.config.sign_keyfiles: for i, key in enumerate(self._read_keyfile_keys( keyfile, from_config='sign_keyfiles'), start=1): sign_opts.append(f'--sign={key}') safe_sign_opts.append(f'--sign=') if self._is_flatpak_repo(): cmd_prefix = [ 'flatpak', 'build-update-repo', str(self.path), ] else: cmd_prefix = [ 'ostree', f'--repo={self.path}', 'summary', '--update', ] logger.info('Updating repo metadata with %s', ' '.join(cmd_prefix + safe_sign_opts)) subprocess.check_call(cmd_prefix + sign_opts) def update_repo_hook(self, refs): """Run the configured update_hook The program will be executed with the environment variable OSTREE_RECEIVE_REPO set to the absolute path of the OSTree repository and the environment variable OSTREE_RECEIVE_REFS set to the set of refs received separated by whitespace. """ if not self.config.update_hook: raise OTReceiveConfigError('update_hook not set in configuration') cmd = shlex.split(self.config.update_hook) env = os.environ.copy() env['OSTREE_RECEIVE_REPO'] = os.fspath(self.path.absolute()) env['OSTREE_RECEIVE_REFS'] = ' '.join(refs) logger.info('Updating repo with %s', self.config.update_hook) logger.debug('OSTREE_RECEIVE_REPO=%s', env['OSTREE_RECEIVE_REPO']) logger.debug('OSTREE_RECEIVE_REFS=%s', env['OSTREE_RECEIVE_REFS']) subprocess.check_call(cmd, env=env) def receive(self, refs, force=False, dry_run=False): # See what revisions we're pulling. _, remote_refs = self.remote_list_refs(self.REMOTE_NAME) if len(refs) == 0: # Pull all the remote refs refs = remote_refs.keys() # Strip duplicate and generated refs refs = set(refs) for pattern in self.EXCLUDED_REF_PATTERNS: refs -= set(fnmatch.filter(refs, pattern)) wanted_refs = sorted(refs) logger.info('Remote commits:') for ref in wanted_refs: logger.info(' %s %s', ref, remote_refs.get(ref)) # See what commits we have on these refs. current_refs = self._get_local_refs() logger.info('Current commits:') for ref in wanted_refs: logger.info(' %s %s', ref, current_refs.get(ref)) # See what refs need to be pulled, erroring if the ref doesn't # exist on the remote refs_to_pull = OrderedDict() for ref in wanted_refs: current_rev = current_refs.get(ref) remote_rev = remote_refs.get(ref) if remote_rev is None: raise OTReceiveError( f'Could not find ref {ref} in summary file') if force or remote_rev != current_rev: logger.debug('Pulling %s', ref) refs_to_pull[ref] = remote_rev if len(refs_to_pull) == 0: logger.info('No refs need updating') return set() # Start a transaction for the entire operation self.prepare_transaction() try: # Pull the refs by commit commits_to_pull = list(set(refs_to_pull.values())) self._pull_commits(commits_to_pull) # See what refs should be merged, skipping older commits and # commits on the same root # # FIXME: Newer ostree puts the commit timestamps in the # summary file in the ostree.commit.timestamp key. When # that's deployed and used everywhere we care about, switch # to doing this in the pre-pull checks. refs_to_merge = OrderedDict() for ref, remote_rev in refs_to_pull.items(): if ref not in current_refs: refs_to_merge[ref] = remote_rev else: current_rev = current_refs[ref] current_timestamp = self._get_commit_timestamp(current_rev) remote_timestamp = self._get_commit_timestamp(remote_rev) _, current_root, _ = self.read_commit(current_rev) _, remote_root, _ = self.read_commit(remote_rev) if remote_timestamp > current_timestamp and \ not current_root.equal(remote_root): refs_to_merge[ref] = remote_rev else: if remote_timestamp <= current_timestamp: logger.warning( 'received %s commit %s is not newer than ' 'current %s commit %s', ref, remote_rev, ref, current_rev ) if current_root.equal(remote_root): logger.warning( 'received %s commit %s has the same content ' 'as current %s commit %s', ref, remote_rev, ref, current_rev ) if force: logger.info('Forcing merge of ref %s', ref) refs_to_merge[ref] = remote_rev if len(refs_to_merge) == 0: logger.info('No refs need updating') self.abort_transaction() return set() # For a dry run, exit now before creating the refs if dry_run: self.abort_transaction() return refs_to_merge.keys() # Copy the pulled commits to the local ref so they get the # correct collection and ref bindings for ref, rev in refs_to_merge.items(): new_rev = self.copy_commit(rev, ref) logger.debug('Set %s ref to %s', ref, new_rev) # All done, commit the changes self.commit_transaction() except: # noqa: E722 self.abort_transaction() raise # Finally, regenerate the summary and metadata if self.config.update: if self.config.update_hook: self.update_repo_hook(refs_to_merge.keys()) else: self.update_repo_metadata() return refs_to_merge.keys() class OTReceiver: """Pushed commit receiver An OTReceiveConfig instance can be provided to configure the receiver. """ def __init__(self, config=None): self.config = config or OTReceiveConfig() if not isinstance(self.config, OTReceiveConfig): raise OTReceiveError( 'config is not an OTReceiveConfig instance' ) def receive(self, path, url, refs): """Receive pushed commits Creates an OTReceiveRepo at path and receives commits on refs from url. """ repo_config = self.config.get_repo_config(path, url) with OTReceiveRepo(repo_config) as repo: return repo.receive(refs, self.config.force, self.config.dry_run) class OTReceiveArgParser(ArgumentParser): """ArgumentParse for ostree-receive""" def __init__(self): config_paths = ' or '.join(map(str, OTReceiveConfig.default_paths())) super().__init__( description='Pull from a remote repo to a dev repo', epilog=( 'Many options can also be set in a config file ' f'({config_paths}). The config file uses YAML syntax and ' 'must represent a YAML mapping.' ), # The global default is set to SUPPRESS so that options # don't override config defaults. argument_default=SUPPRESS, ) self.add_argument('repo', metavar='REPO', help='repository name to use') self.add_argument('url', metavar='URL', help='remote repository URL') self.add_argument('refs', metavar='REF', nargs='*', default=None, help='ostree refs to pull, all if none specified') self.add_argument('--no-update', dest='update', action='store_false', help="""don't update repo metadata""") self.add_argument('-n', '--dry-run', action='store_true', help='only show what would be done') self.add_argument('-f', '--force', action='store_true', help=('force pull even if nothing changed or ' 'remote commits are not newer')) self.add_argument('-q', '--quiet', dest='log_level', action='store_const', const='WARNING', help='disable most messages') self.add_argument('-v', '--verbose', dest='log_level', action='store_const', const='DEBUG', help='enable verbose messages') self.add_argument('--version', action='version', version=f'%(prog)s {VERSION}') def main(): aparser = OTReceiveArgParser() args = aparser.parse_args() config = OTReceiveConfig.load(args=args) logging.basicConfig(level=config.log_level) receiver = OTReceiver(config) receiver.receive(args.repo, args.url, args.refs) def compat_main(): """Dispatch to legacy main if needed""" # The repo path is an option in legacy receive. if any(arg.startswith('--repo') for arg in sys.argv[1:]): from . import receive_legacy return receive_legacy.main() return main() if __name__ == '__main__': main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1705762176.0 ostree-push-1.2.0/otpush/receive_legacy.py0000664000175100017510000003232014552756600016776 0ustar00dandan#!/usr/bin/python3 # ostree-receive-0 - Receive OSTree commits from remote client # Copyright (C) 2015 Dan Nicholson # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. from argparse import ArgumentParser from enum import Enum import gi import logging import os import sys import tempfile import shutil gi.require_version('OSTree', '1.0') from gi.repository import GLib, Gio, OSTree # noqa: E402 PROTO_VERSION = 0 HEADER_SIZE = 5 class PushException(Exception): pass class PushCommandType(Enum): info = 0 update = 1 putobject = 2 status = 3 done = 4 def msg_byteorder(sys_byteorder=sys.byteorder): if sys_byteorder == 'little': return 'l' elif sys_byteorder == 'big': return 'B' else: raise PushException('Unrecognized system byteorder %s' % sys_byteorder) def sys_byteorder(msg_byteorder): if msg_byteorder == 'l': return 'little' elif msg_byteorder == 'B': return 'big' else: raise PushException('Unrecognized message byteorder %s' % msg_byteorder) def ostree_object_path(repo, obj): repodir = repo.get_path().get_path() return os.path.join(repodir, 'objects', obj[0:2], obj[2:]) class PushCommand(object): def __init__(self, cmdtype, args): self.cmdtype = cmdtype self.args = args self.validate(self.cmdtype, self.args) self.variant = GLib.Variant('a{sv}', self.args) @staticmethod def validate(command, args): if not isinstance(command, PushCommandType): raise PushException('Message command must be PushCommandType') if not isinstance(args, dict): raise PushException('Message args must be dict') # Ensure all values are variants for a{sv} vardict for val in args.values(): if not isinstance(val, GLib.Variant): raise PushException('Message args values must be ' 'GLib.Variant') class PushMessageWriter(object): def __init__(self, file, byteorder=sys.byteorder): self.file = file self.byteorder = byteorder self.msg_byteorder = msg_byteorder(self.byteorder) def encode_header(self, cmdtype, size): header = self.msg_byteorder.encode() + \ PROTO_VERSION.to_bytes(1, self.byteorder) + \ cmdtype.value.to_bytes(1, self.byteorder) + \ size.to_bytes(2, self.byteorder) return header def encode_message(self, command): if not isinstance(command, PushCommand): raise PushException('Command must by GLib.Variant') data = command.variant.get_data_as_bytes() size = data.get_size() # Build the header header = self.encode_header(command.cmdtype, size) return header + data.get_data() def write(self, command): msg = self.encode_message(command) self.file.write(msg) self.file.flush() def send_info(self, repo): cmdtype = PushCommandType.info mode = repo.get_mode() _, refs = repo.list_refs(None, None) args = { 'mode': GLib.Variant('i', mode), 'refs': GLib.Variant('a{ss}', refs) } command = PushCommand(cmdtype, args) self.write(command) def send_update(self, refs): cmdtype = PushCommandType.update args = {} for branch, revs in refs.items(): args[branch] = GLib.Variant('(ss)', revs) command = PushCommand(cmdtype, args) self.write(command) def send_putobject(self, repo, obj): cmdtype = PushCommandType.putobject objpath = ostree_object_path(repo, obj) size = os.stat(objpath).st_size args = { 'object': GLib.Variant('s', obj), 'size': GLib.Variant('t', size) } command = PushCommand(cmdtype, args) self.write(command) # Now write the file after the command logging.info('Sending object {}'.format(obj)) logging.debug('Size {} from {}'.format(size, objpath)) with open(objpath, 'rb') as objf: remaining = size while remaining > 0: chunk = min(2 ** 20, remaining) buf = objf.read(chunk) logging.debug('Sending {} bytes for {}' .format(len(buf), obj)) self.file.write(buf) self.file.flush() remaining -= chunk logging.debug('{} bytes remaining for {}' .format(remaining, obj)) def send_status(self, result, message=''): cmdtype = PushCommandType.status args = { 'result': GLib.Variant('b', result), 'message': GLib.Variant('s', message) } command = PushCommand(cmdtype, args) self.write(command) def send_done(self): command = PushCommand(PushCommandType.done, {}) self.write(command) class PushMessageReader(object): def __init__(self, file, byteorder=sys.byteorder, tmpdir=None): self.file = file self.byteorder = byteorder self.tmpdir = tmpdir def decode_header(self, header): if len(header) != HEADER_SIZE: raise Exception('Header is %d bytes, not %d' % (len(header), HEADER_SIZE)) order = sys_byteorder(chr(header[0])) version = int(header[1]) if version != PROTO_VERSION: raise Exception('Unsupported protocol version %d' % version) cmdtype = PushCommandType(int(header[2])) vlen = int.from_bytes(header[3:], order) return order, version, cmdtype, vlen def decode_message(self, message, size, order): if len(message) != size: raise Exception('Expected %d bytes, but got %d' % (size, len(message))) data = GLib.Bytes.new(message) variant = GLib.Variant.new_from_bytes(GLib.VariantType.new('a{sv}'), data, False) if order != self.byteorder: variant = GLib.Variant.byteswap(variant) return variant def read(self): header = self.file.read(HEADER_SIZE) if len(header) == 0: # Remote end quit return None, None order, version, cmdtype, size = self.decode_header(header) msg = self.file.read(size) if len(msg) != size: raise PushException('Did not receive full message') args = self.decode_message(msg, size, order) return cmdtype, args def receive(self, allowed): cmdtype, args = self.read() if cmdtype is None: raise PushException('Expected reply, got none') if cmdtype not in allowed: raise PushException('Unexpected reply type', cmdtype.name) return cmdtype, args.unpack() def receive_info(self): cmdtype, args = self.receive([PushCommandType.info]) return args def receive_update(self): cmdtype, args = self.receive([PushCommandType.update]) return args def receive_putobject_data(self, repo, args): # Read in the object and store it in the tmp directory obj = args['object'] size = args['size'] tmppath = os.path.join(self.tmpdir, obj) logging.info('Receiving object {}'.format(obj)) logging.debug('Size {} to {}'.format(size, tmppath)) with open(tmppath, 'wb') as tmpf: remaining = size while remaining > 0: chunk = min(2 ** 20, remaining) buf = self.file.read(chunk) logging.debug('Receiving {} bytes for {}' .format(len(buf), obj)) tmpf.write(buf) remaining -= chunk logging.debug('{} bytes remaining for {}' .format(remaining, obj)) def receive_putobject(self, repo): cmdtype, args = self.receive([PushCommandType.putobject]) self.receive_putobject_data(repo, args) return args def receive_status(self): cmdtype, args = self.receive([PushCommandType.status]) return args def receive_done(self): cmdtype, args = self.receive([PushCommandType.done]) return args class OSTreeReceiver(object): def __init__(self, repopath): self.repopath = repopath if self.repopath is None: self.repo = OSTree.Repo.new_default() else: self.repo = OSTree.Repo.new(Gio.File.new_for_path(self.repopath)) self.repo.open(None) repo_tmp = os.path.join(self.repopath, 'tmp') self.tmpdir = tempfile.mkdtemp(dir=repo_tmp, prefix='ostree-push-') self.writer = PushMessageWriter(sys.stdout.buffer) self.reader = PushMessageReader(sys.stdin.buffer, tmpdir=self.tmpdir) # Set a sane umask before writing any objects os.umask(0o0022) def close(self): shutil.rmtree(self.tmpdir) sys.stdout.close() return 0 def run(self): try: return self.do_run() except PushException: # Ensure we cleanup files if there was an error self.close() raise def do_run(self): # Send info immediately self.writer.send_info(self.repo) # Wait for update or done command cmdtype, args = self.reader.receive([PushCommandType.update, PushCommandType.done]) if cmdtype == PushCommandType.done: return 0 update_refs = args for branch, revs in update_refs.items(): # Check that each branch can be updated appropriately _, current = self.repo.resolve_rev(branch, True) if current is None: # From commit should be all 0s if revs[0] != '0' * 64: self.writer.send_status(False, 'Invalid from commit %s ' 'for new branch %s' % (revs[0], branch)) self.reader.receive_done() return 1 elif revs[0] != current: self.writer.send_status(False, 'Branch %s is at %s, not %s' % (branch, current, revs[0])) self.reader.receive_done() return 1 # All updates valid self.writer.send_status(True) # Wait for putobject or done command received_objects = [] while True: cmdtype, args = self.reader.receive([PushCommandType.putobject, PushCommandType.done]) if cmdtype == PushCommandType.done: logging.debug('Received done, exiting putobject loop') break self.reader.receive_putobject_data(self.repo, args) received_objects.append(args['object']) self.writer.send_status(True) # If we didn't get any objects, we're done if len(received_objects) == 0: return 0 # Got all objects, move them to the object store for obj in received_objects: tmp_path = os.path.join(self.tmpdir, obj) obj_path = ostree_object_path(self.repo, obj) os.makedirs(os.path.dirname(obj_path), exist_ok=True) logging.debug('Renaming {} to {}'.format(tmp_path, obj_path)) os.rename(tmp_path, obj_path) # Finally, update the refs for branch, revs in update_refs.items(): logging.debug('Setting ref {} to {}'.format(branch, revs[1])) self.repo.set_ref_immediate(None, branch, revs[1], None) return 0 def main(): aparser = ArgumentParser(description='Receive pushed ostree objects') aparser.add_argument('--repo', help='repository path') aparser.add_argument('-v', '--verbose', action='store_true', help='enable verbose output') aparser.add_argument('--debug', action='store_true', help='enable debugging output') args = aparser.parse_args() loglevel = logging.WARNING if args.verbose: loglevel = logging.INFO if args.debug: loglevel = logging.DEBUG logging.basicConfig(format='%(module)s: %(levelname)s: %(message)s', level=loglevel, stream=sys.stderr) receiver = OSTreeReceiver(args.repo) return receiver.run() if __name__ == '__main__': exit(main()) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1670088793.0 ostree-push-1.2.0/pyproject.toml0000664000175100017510000000014214342704131015032 0ustar00dandan[build-system] requires = ["setuptools>=40.8.0", "wheel"] build-backend = "setuptools.build_meta" ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1670088793.0 ostree-push-1.2.0/pytest.ini0000664000175100017510000000066314342704131014157 0ustar00dandan[pytest] # Run in verbose mode by default. Use -q to bump the verbosity down. addopts = -v # Enable debug logging by default to help track down issues. This is set # on the root logger, but it seems the only current log messages are # from our modules. The alternative is to use the caplog fixture on all # tests to just set the level on our loggers. log_level = DEBUG # Raise an error if tests marked xfail pass xfail_strict = True ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1705763123.7210603 ostree-push-1.2.0/scripts/0000775000175100017510000000000014552760464013626 5ustar00dandan././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1705762176.0 ostree-push-1.2.0/scripts/ostree-receive-shell0000775000175100017510000000544114552756600017603 0ustar00dandan#!/usr/bin/env python3 # ostree-receive-shell - Login shell for ostree-receive # Copyright (C) 2017 Endless Mobile, Inc. # Copyright (C) 2021 Endless OS Foundation LLC # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. """Login shell for ostree repository owner When a remote user runs ostree-push, it opens an SSH tunnel to the repository server and runs ostree-receive. In order to not allow running arbitrary commands over SSH, ostree-receive-shell restricts the command run by SSH to only run ostree-receive. This is inspired by `git-shell`, which only allows running `git-receive-pack` to allow git pushes but no other usage. """ import errno import os import shlex import sys # Allow all possible ostree-receive installed names. Remember to add to # this tuple when bumping the major version. ALLOWED_COMMANDS = ( 'ostree-receive-1', 'ostree-receive-0', 'ostree-receive', ) PROG = os.path.basename(__file__) BINDIR = os.path.dirname(os.path.realpath(__file__)) # Can only run as "ostree-receive-shell -c 'command'" argc = len(sys.argv) if argc == 1: print(f'{PROG}: Cannot run interactively', file=sys.stderr) sys.exit(1) elif argc != 3 or sys.argv[1] != '-c': print(f'{PROG}: Must be run with no arguments or with -c cmd', file=sys.stderr) sys.exit(1) # Make sure required command has been specified. args = shlex.split(sys.argv[2]) if args[0] not in ALLOWED_COMMANDS: print(f'{PROG}: Executing {args[0]} not allowed', file=sys.stderr) sys.exit(1) # Add this program's directory to PATH for convenience in case # ostree-receive is installed in a non-standard location. env = os.environ.copy() path_value = env.get('PATH', os.defpath) path_components = path_value.split(os.pathsep) if BINDIR not in path_components: path_components.append(BINDIR) env['PATH'] = os.pathsep.join(path_components) # Execute the command try: os.execvpe(args[0], args, env) except OSError as err: print(f'{PROG}: {args[0]}: {err.strerror}', file=sys.stderr) # Emulate bash's exit codes if err.errno == errno.ENOENT: sys.exit(127) elif err.errno == errno.EACCES: sys.exit(126) else: sys.exit(1) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1705763123.7210603 ostree-push-1.2.0/setup.cfg0000664000175100017510000000224014552760464013756 0ustar00dandan[metadata] name = ostree-push version = attr: otpush.VERSION author = Dan Nicholson author_email = dbn@endlessos.org description = Push and receive OSTree commits long_description = file: README.md long_description_content_type = text/markdown url = https://github.com/dbnicholson/ostree-push license_file = COPYING classifiers = Programming Language :: Python :: 3 Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Development Status :: 5 - Production/Stable License :: OSI Approved :: GNU General Public License v2 or later (GPLv2+) Operating System :: POSIX Topic :: Software Development :: Build Tools Topic :: System :: Archiving :: Mirroring Topic :: System :: Archiving :: Packaging Topic :: System :: Software Distribution [options] packages = otpush scripts = scripts/ostree-receive-shell install_requires = PyGObject PyYAML python_requires = >=3.7 [options.entry_points] console_scripts = ostree-push = otpush.push:main ostree-receive = otpush.receive:compat_main ostree-receive-1 = otpush.receive:main ostree-receive-0 = otpush.receive_legacy:main [egg_info] tag_build = tag_date = 0 ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1670088793.0 ostree-push-1.2.0/setup.py0000775000175100017510000000007414342704131013637 0ustar00dandan#!/usr/bin/env python3 import setuptools setuptools.setup() ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1705763123.7210603 ostree-push-1.2.0/tests/0000775000175100017510000000000014552760464013301 5ustar00dandan././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1670088793.0 ostree-push-1.2.0/tests/__init__.py0000664000175100017510000000000014342704131015362 0ustar00dandan././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1670088793.0 ostree-push-1.2.0/tests/conftest.py0000664000175100017510000001016614342704131015466 0ustar00dandanfrom otpush import push, receive import os import pytest import shutil import subprocess import yaml from .util import ( DATADIR, ED25519_PRIVATE_KEY, ED25519_PUBLIC_KEY, PGP_KEY, TESTSDIR, SRCDIR, kill_gpg_agent, ssh_server, TmpRepo, ) @pytest.fixture(scope='session') def cli_env_vars(): """Provide environment variables for running via CLI Provide adjusted PATH and PYTHONPATH environment variables for the processes run subprocess or sshd so that they more closely matches the current environment. """ env_vars = {} path = os.getenv('PATH', os.defpath) cli_path_parts = path.split(os.pathsep) cli_path_parts.insert(0, TESTSDIR) toxbindir = os.getenv('TOXBINDIR') if toxbindir: cli_path_parts.insert(0, toxbindir) cli_path = os.pathsep.join(cli_path_parts) env_vars['PATH'] = cli_path pypath = os.getenv('PYTHONPATH') cli_pypath_parts = pypath.split(os.pathsep) if pypath else [] if not toxbindir: cli_pypath_parts.insert(0, SRCDIR) cli_pypath = os.pathsep.join(cli_pypath_parts) env_vars['PYTHONPATH'] = cli_pypath return env_vars @pytest.fixture(scope='session') def ssh_datadir(tmp_path_factory): datadir = tmp_path_factory.mktemp('ssh-data') for src in ('host_rsa_key', 'host_rsa_key.pub', 'id_rsa', 'id_rsa.pub', 'sshd_config'): shutil.copy(os.path.join(DATADIR, src), datadir) # ssh and sshd refuse to start when the private keys are group or # world accessible. os.chmod(datadir / 'host_rsa_key', 0o600) os.chmod(datadir / 'id_rsa', 0o600) return datadir @pytest.fixture(scope='session') def sshd(ssh_datadir, cli_env_vars): sshd_config = ssh_datadir / 'sshd_config' host_key = ssh_datadir / 'host_rsa_key' authorized_keys = ssh_datadir / 'id_rsa.pub' with ssh_server(sshd_config, host_key, authorized_keys, cli_env_vars) \ as server_info: yield server_info @pytest.fixture def ssh_options(sshd, ssh_datadir): id_file = ssh_datadir / 'id_rsa' return [ '-i', str(id_file), '-o', 'IdentitiesOnly=yes', '-o', 'StrictHostKeyChecking=no', '-o', 'UserKnownHostsFile=/dev/null', ] @pytest.fixture def ssh_socket(tmp_path): return str(tmp_path / 'ssh_socket') @pytest.fixture def tmp_files_path(tmp_path): return tmp_path / 'files' @pytest.fixture def source_repo(tmp_path, tmp_files_path): repo_path = tmp_path / 'source-repo' repo = TmpRepo(repo_path, collection_id=None) # Turn on auto summary generation config = repo.copy_config() config.set_boolean('core', 'auto-update-summary', True) repo.write_config(config) return repo @pytest.fixture def source_server(source_repo): with push.RepoServer(source_repo.path) as server: yield server @pytest.fixture def dest_repo(tmp_path): repo_path = tmp_path / 'dest-repo' return TmpRepo(repo_path) @pytest.fixture def receiver(): config = receive.OTReceiveConfig(update=False) return receive.OTReceiver(config) @pytest.fixture def receive_repo(dest_repo, source_server): config = receive.OTReceiveRepoConfig( dest_repo.path, source_server.url, update=False, ) with receive.OTReceiveRepo(config) as repo: yield repo @pytest.fixture def receive_config_path(tmp_path): path = tmp_path / 'ostree-receive.conf' config = { 'update': False, } with path.open('w') as f: yaml.dump(config, f) return path @pytest.fixture def gpg_homedir(tmp_path): """Temporary GPG homedir with private key imported""" homedir = tmp_path / 'gnupg' homedir.mkdir(mode=0o700) cmd = ('gpg', '--homedir', str(homedir), '--import', str(PGP_KEY)) subprocess.run(cmd, check=True) yield homedir kill_gpg_agent(homedir) @pytest.fixture def ed25519_public_keyfile(tmp_path): dest = tmp_path / 'public.ed25519' dest.write_text(ED25519_PUBLIC_KEY) return str(dest) @pytest.fixture def ed25519_private_keyfile(tmp_path): dest = tmp_path / 'private.ed25519' dest.write_text(ED25519_PRIVATE_KEY) return str(dest) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1705763123.7210603 ostree-push-1.2.0/tests/data/0000775000175100017510000000000014552760464014212 5ustar00dandan././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1670088793.0 ostree-push-1.2.0/tests/data/host_rsa_key0000664000175100017510000000343714342704131016620 0ustar00dandan-----BEGIN OPENSSH PRIVATE KEY----- b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAABFwAAAAdzc2gtcn NhAAAAAwEAAQAAAQEArUsa8jfSeLjPaT0gmX5r+DkaHCiKmdYEkD4C/JPZ7YWzUUM+Tj6q Ir5Lgz/Hsv0uetsc4aJZ5k/vOkRgR8gJolCao0LqEXvM7xC87xapU6l3chBAqUsl/qZ3lU TDMfSrgN3B4Ak5Vhzy5W4wiUtIq1DaQGPL5cHirU+haQ5J8K6JZUr2xB/iZRXAbj56LM43 Y9L1YRx4om1CPCKl3C+kE+Ny4/jdJmBdeLOjq78WdgjBFzlD0IdrMNt6O4E5mZJQZypeOx LrjdM8bOnzV4YSSyrTIf5DkQbZaXEHGzrIHUy+vvCjiWPQUHW8sJFVu85uMtJ6mvcM7Fss JfAvxeELNwAAA8iUg48AlIOPAAAAAAdzc2gtcnNhAAABAQCtSxryN9J4uM9pPSCZfmv4OR ocKIqZ1gSQPgL8k9nthbNRQz5OPqoivkuDP8ey/S562xzholnmT+86RGBHyAmiUJqjQuoR e8zvELzvFqlTqXdyEECpSyX+pneVRMMx9KuA3cHgCTlWHPLlbjCJS0irUNpAY8vlweKtT6 FpDknwrollSvbEH+JlFcBuPnoszjdj0vVhHHiibUI8IqXcL6QT43Lj+N0mYF14s6OrvxZ2 CMEXOUPQh2sw23o7gTmZklBnKl47EuuN0zxs6fNXhhJLKtMh/kORBtlpcQcbOsgdTL6+8K OJY9BQdbywkVW7zm4y0nqa9wzsWywl8C/F4Qs3AAAAAwEAAQAAAQB364lyJP8ZW4rsoMh2 b+33kExpQATqABiYWNFoSEh+/g7hJU9whhzmfhlzcieXzWzkx8xsuBTONHKwN7EI+3zvPi hE4oFoJ+vhZAvayCvVIWjvHiAhB0f7uOb3/OW3JEGqo8sOZlOvK8lKy736xIY9R25BMM5g 6JMcrAZe1OyGCJ3ZeKnosgVYyH8D+Xrd8wDOYuflA2FTA9NBluZm0S3qiCmoLglTEJpmaa PllxL16UheXQynJuuQXPXUEgImj7nCYYsTqxaciv45/zFKZKo5/Q5zO8m2HL+FuC1VWRCY kTzv7xhGOxdhgmSH7ki4rQAJpyvCluDjnIMI3vnzbQkBAAAAgE4nFXJdi7ambiv5/SuT59 VetSRMBIqZlvxPPXQZMBmgYjOoNCS4Q5nF7iZm7QtZSMm7+eRgf7Vr7SsQWobmzKpDY7jD G8QPA912LejrQ9ZCPlOmMr3iStgJor0EMo0eHpAZbTePhZ/bPOgu3ZGSFhdGJS5lchfISz 1k6FgK8wrIAAAAgQDjjmvYsWIy//vviB56egdOj0XJrmLr584jSAoqppH8B0GM7fUVRr5o /8951z7eGR2/VTpdOwuzb0HcmRI7tNH13yGNg7vkDJ6kgAhTis2kPII4LNd1EPbV0mZAgA QY16WkBaqJxqNtmQKKZ46hO6Q/a/e0odxU4h2dv+hjP4CrcQAAAIEAwvRSqEs2Fks80IMu aSUshZC/nyrThl7sCNc6QmnpOViwalypDNLMAuIWGFAr6EUYiSSilLvwB3bFP5woVGmvgg havvwgMd1opRVP7SVsCrewR5g0PFvPOc+JSns0qgZJMKnxSv94SC7WX8qNeLetOhLnqG4M YlyIY/cFY769PScAAAAQdGVzdC5leGFtcGxlLmNvbQECAw== -----END OPENSSH PRIVATE KEY----- ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1670088793.0 ostree-push-1.2.0/tests/data/host_rsa_key.pub0000664000175100017510000000061614342704131017401 0ustar00dandanssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQCtSxryN9J4uM9pPSCZfmv4ORocKIqZ1gSQPgL8k9nthbNRQz5OPqoivkuDP8ey/S562xzholnmT+86RGBHyAmiUJqjQuoRe8zvELzvFqlTqXdyEECpSyX+pneVRMMx9KuA3cHgCTlWHPLlbjCJS0irUNpAY8vlweKtT6FpDknwrollSvbEH+JlFcBuPnoszjdj0vVhHHiibUI8IqXcL6QT43Lj+N0mYF14s6OrvxZ2CMEXOUPQh2sw23o7gTmZklBnKl47EuuN0zxs6fNXhhJLKtMh/kORBtlpcQcbOsgdTL6+8KOJY9BQdbywkVW7zm4y0nqa9wzsWywl8C/F4Qs3 test.example.com ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1670088793.0 ostree-push-1.2.0/tests/data/id_rsa0000664000175100017510000000343714342704131015367 0ustar00dandan-----BEGIN OPENSSH PRIVATE KEY----- b3BlbnNzaC1rZXktdjEAAAAABG5vbmUAAAAEbm9uZQAAAAAAAAABAAABFwAAAAdzc2gtcn NhAAAAAwEAAQAAAQEAu27BAzSJhNvLly27y0akfL0i0ayP5L8ocCkQiAvb9K0N6bwQ7eZb YwOS25ml0slG69Uxc1mLKxm9Z3sVmbyWR+3GooBp4YncLqwabOnsitZT1XAsC640EHt63Q o9Y4d4/Flk7/sLeAnON2PXi7lhr4mSyunIQ7lvbEfyyNrTs7nyctTYUVcMpVoCwmKF1hFt uUDmH9n4PXRB6bkk7cI4koMWy8mrPvLYiT3C3SgM1YbW687Jaa6hlGPlQnhb9rEW/nY099 /RZMuBnCoYPmeRaw08MmsvNLBcbknXKdg9A0GmDhMBUSqfuzIoCIUjnTsyodaBdQjIYRek OoeXa6DybQAAA8gkLyTBJC8kwQAAAAdzc2gtcnNhAAABAQC7bsEDNImE28uXLbvLRqR8vS LRrI/kvyhwKRCIC9v0rQ3pvBDt5ltjA5LbmaXSyUbr1TFzWYsrGb1nexWZvJZH7caigGnh idwurBps6eyK1lPVcCwLrjQQe3rdCj1jh3j8WWTv+wt4Cc43Y9eLuWGviZLK6chDuW9sR/ LI2tOzufJy1NhRVwylWgLCYoXWEW25QOYf2fg9dEHpuSTtwjiSgxbLyas+8tiJPcLdKAzV htbrzslprqGUY+VCeFv2sRb+djT339Fky4GcKhg+Z5FrDTwyay80sFxuSdcp2D0DQaYOEw FRKp+7MigIhSOdOzKh1oF1CMhhF6Q6h5droPJtAAAAAwEAAQAAAQARfRUPWoCWX/fQJFlI b0/7v54XGFXYzylTxdEgEJM2cktBCuY9fGf8hiHW0bBSSauPZ7YUgD9nByDQHgD29kzDa2 FEWl0GyVk9FZvlm7IRhkwON0g00s1S2GKvazwtJD/as/CiH7eDPM78BhnflK2Ttm929XdV MmE+Jjx2F0z3kPIo+TY8zaSRz81+QBgYBEomrWX9dcju9dVKHKstEqCQeDJONk/WWf/1SG WC15i3jFM6JoitA2527272Wm1gaN1hbkEtEWdqfxDf8mLYz1HWrQ3/p64XmXV2J2LwZsya wvQRJrRAwXW7XzdZbxwJMknEv9OZAwGkNHAR9dtauCuBAAAAgQCnVYLjx1bY58dBmdTgoF /T9bu8GsYqUCdXBut1YNoHMOu0lAyS3P6lb3fKMrkbQneqYmF8VmR+0Eb43BM/lwhnrjgF s/8QbTs3K+wyxumHC5qCLR8BM+lKZi0w11DfChN5wXNBoPlUPlox2V+Ip9rqRGvRaBBRq/ v83X1wiZxcNwAAAIEA51oOuuf1mr/JrHjVvo8WItjA9XEplSlqKmXBRL12IdrZKNfYI/4l OaMEjoW6lex76CDWIoflrPcms1lFZ5JcJ3VIYPCV0a8agaaRt9EAEcUi35yZQXtEPzXzju nfaAt9kzQfCJ01rRXsXuhJZEfH31Ije7mJslWPeOPzIR2aH+EAAACBAM9m1c4evd2YLt1B Z1ZdXOm2HDEIn3UXMGMf6hcgGp7QamOTCzBmHHAqvYvJNc2wW1JTgCx9Y9VThTSu0u3o5+ NXSsXdZoewZXYO1BVwtGBKvVRO6liXkqsn+Xca2W72h0s8+LojdBG3RsKcFy4ivUJaz28H Hq/t44Fvs5iRR9QNAAAAEHRlc3RAZXhhbXBsZS5jb20BAg== -----END OPENSSH PRIVATE KEY----- ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1670088793.0 ostree-push-1.2.0/tests/data/id_rsa.pub0000664000175100017510000000061614342704131016150 0ustar00dandanssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQC7bsEDNImE28uXLbvLRqR8vSLRrI/kvyhwKRCIC9v0rQ3pvBDt5ltjA5LbmaXSyUbr1TFzWYsrGb1nexWZvJZH7caigGnhidwurBps6eyK1lPVcCwLrjQQe3rdCj1jh3j8WWTv+wt4Cc43Y9eLuWGviZLK6chDuW9sR/LI2tOzufJy1NhRVwylWgLCYoXWEW25QOYf2fg9dEHpuSTtwjiSgxbLyas+8tiJPcLdKAzVhtbrzslprqGUY+VCeFv2sRb+djT339Fky4GcKhg+Z5FrDTwyay80sFxuSdcp2D0DQaYOEwFRKp+7MigIhSOdOzKh1oF1CMhhF6Q6h5droPJt test@example.com ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1670088793.0 ostree-push-1.2.0/tests/data/pgp-key.asc0000664000175100017510000000507514342704131016247 0ustar00dandan-----BEGIN PGP PRIVATE KEY BLOCK----- lQVYBGIjoZEBDADZ0XXogNa5clnor83OR3Rl7cc3W7HWSBlQx7y8FAql9jWELNDz CQ+zW4hxQC8GIjZ/IecJh9qU9yVsUEJ/obo3T9cq14ScJ59McTJOqWUYvRjBoGjs iLKV296v3XKrWpTYWxUjYovvNMn88CoRS7bZtv0RjKVwCk7l9tpN4fLc5Wx/BKe8 gLB8UlxNBHr2KHpJpeffFh0T5a6YjMhXQR585Xs8M+8GTjP/xpZmqQWtKbIfyc1y kXzyrpHxLw6IUYXeGf08LepCdEkM2ZDFoYHE9RHQDTnpSJtCKzYPYYt4prPEuWef Nb8lXj7juMFhEwvOfnPJ5irCTcD98fUpJZryGWyrqEW8/yfjxyZ0LSF+AefiqxBA 3xLR+l6kaHo1G1MLZ3ANuLA6X0Nzm/cd1qEfPvZqtr367YPlsJ2QnUz5xud2NJgx aLMcG9cE3HLE9UeU5w89oaOb5hwi6onP7ohCIrVttNso4eYKjLwEiwGemN5mPb4u xOEz0AJR2AjCt4cAEQEAAQAL/1y042fFal68tlqsdL5uuDJn3SoJG2xV/HxevswY BZ40Z/utLXO7cBTJCQ4WoM0OA77heZmbWmObuzBaCquS9JR1JWHFH9psnMbmyOAl BfPdC+vhb7b4yXWyApo8eQFIiOB7B8irOV7iy3Od7nG1FYqdcSGmiIVixcjn42tR l1IATcjT/OZkqXkIfuOC3Cjn+oBdHg7btR2SXNjt1pkYjLbg3y409m9rGoOrDLl+ B/33KKqR9CDmnlUWCuSpwBgyBaxB2wpWuhX03bsA57uHjEo39uJTxQ3lriGELA0K Q3t3AyqU3fhfuivtc6qGDr2vVztwfYuHCqB7hb8Pqj9q/1L+V2jrQZCaYiOsvfaY sZNQqb6bhYXtyF/9rJlBopYxWSzhLxnpzRICoqIvPMLlhEfq4AW++eA/PsKfnih9 yjXNffBvgHyM0C++ZJlRpoKaEJm7yD+qUDkklVq9IFXKyx9dCR+DY+qAR4NzTsXI 1P6XvI5oEi2j4RGNiEiHGQNMwQYA3+KYdgs+G1ADIGoobA89BUlwhowq0nI63qmr CsCJSSER8IKASglLUwuMgybUtjbomXxoJ7ej1Q38C2XK1LG1g9/oR5dcJN2NuYKA a1bjn4GUFZNo78KIpPaPLZeBoMCfcSuh3qfUKHT0R7/M7PvuBcThovtuFGWsSSVR Vbax1tZtcDv4IOOiz6lpo6t0df1epnLzJHbmfmKWQq/XwQbFnvEPC585IpK4a805 W2uYA8wSKDR0HEaIfq0+NtphsrTBBgD5EBP1FiCNhSW3JKvAgt2GIMrdQ8aHPD/c Y2AHCQPukQCPnW2QgrtwWDwV0dTYJGrzoM11ZeZR+nFmx7ePqSvBWcvWX6Ti3vEe i25KThaDyme245GNXrC3IXCj3PtHg5lR4cpBP3bo6Dl+vt/dDLBzzGWofGpvm87y 6s25BtdPRUzIPfkhMpcQfOe2da+aK5oPQYbTt93C0PcHuwXhJyk9JkYzIFgRDf40 pnGhZOGMsjxqj6DLrDvBxc0mSi0qFkcF/2Px3/Dbywkym2qcvz+7EIREYHlFOyHm bjQimYBZM3FZ7NVthjGhehy45us6uAguNm6FPUTme/bbtFwWcguIfiNTY0C3UUl8 c8t/sARi+Y4RgDAzzmSMfJeQO0uvoA0/7nXh0vcbUJ4ddMCdwiBkIGUH0dCI8sHl 2Q02n2JZRNK7M49YMDAija6Ec3jb6WtslI9WsFj6BwzaNyxZyuKQhCCPLSnGI5FI 2QSDyvbLNLEXEA0WjZunrWm54K1/3hEe/+TGtCVvc3RyZWUtcHVzaCA8b3N0cmVl LXB1c2hAZXhhbXBsZS5jb20+iQHOBBMBCgA4FiEEKB0N3G7dd89qipNsJH0+Uc2g i2sFAmIjoZECGwMFCwkIBwIGFQoJCAsCBBYCAwECHgECF4AACgkQJH0+Uc2gi2sT 1AwAjkaX3Yr7uZiLu3+xCuDDnxmSNjTU+IIsJxJbl+8GFI2PYlTBQapCdAbNzmxL TnhJ8home1qxbsgsMgqNDjJiwB7PDS8CiI9AjhngzbOVJSWsb+xN3QktwZlskBPt TE2LtUKU4yPD/3wxIDkr8UaJmZH4BCHVcyllmzM4TcGJ1/Ap4gDOkA/Oh5ptPgqI UOKhHCC1ZtUW917XmvW6PNZKY37Oyw9LMb9+ykgWZpaBaXJq8cuw00IIK4rJaCbJ OqNLgf/oIQx5cOHwdNsZ+WHr4e82Mxm2ophiiCzweCaSkZj17ig6L2uQsY0Uh/j+ fauEvp4CDt4x4ywkQ5j6qquhlE2VX12xnGllbSSlCNoQxG3C1m0Mh1E4L+1VyWq4 aYmJNDlk7nAhcF45Go+czFeGTFLBWouiQukUF7WNzfcRvG0uXQ0wMz11I8yYleQV pzPod9JiO0xg+3BMP/6yahNTkoqlIAuixl4NH6ujRRsc5ezgldCU9y9hg5ZGigXJ 0kE/ =Lty7 -----END PGP PRIVATE KEY BLOCK----- ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1670088793.0 ostree-push-1.2.0/tests/data/pgp-pub.asc0000664000175100017510000000243214342704131016237 0ustar00dandan-----BEGIN PGP PUBLIC KEY BLOCK----- mQGNBGIjoZEBDADZ0XXogNa5clnor83OR3Rl7cc3W7HWSBlQx7y8FAql9jWELNDz CQ+zW4hxQC8GIjZ/IecJh9qU9yVsUEJ/obo3T9cq14ScJ59McTJOqWUYvRjBoGjs iLKV296v3XKrWpTYWxUjYovvNMn88CoRS7bZtv0RjKVwCk7l9tpN4fLc5Wx/BKe8 gLB8UlxNBHr2KHpJpeffFh0T5a6YjMhXQR585Xs8M+8GTjP/xpZmqQWtKbIfyc1y kXzyrpHxLw6IUYXeGf08LepCdEkM2ZDFoYHE9RHQDTnpSJtCKzYPYYt4prPEuWef Nb8lXj7juMFhEwvOfnPJ5irCTcD98fUpJZryGWyrqEW8/yfjxyZ0LSF+AefiqxBA 3xLR+l6kaHo1G1MLZ3ANuLA6X0Nzm/cd1qEfPvZqtr367YPlsJ2QnUz5xud2NJgx aLMcG9cE3HLE9UeU5w89oaOb5hwi6onP7ohCIrVttNso4eYKjLwEiwGemN5mPb4u xOEz0AJR2AjCt4cAEQEAAbQlb3N0cmVlLXB1c2ggPG9zdHJlZS1wdXNoQGV4YW1w bGUuY29tPokBzgQTAQoAOBYhBCgdDdxu3XfPaoqTbCR9PlHNoItrBQJiI6GRAhsD BQsJCAcCBhUKCQgLAgQWAgMBAh4BAheAAAoJECR9PlHNoItrE9QMAI5Gl92K+7mY i7t/sQrgw58ZkjY01PiCLCcSW5fvBhSNj2JUwUGqQnQGzc5sS054SfIaJntasW7I LDIKjQ4yYsAezw0vAoiPQI4Z4M2zlSUlrG/sTd0JLcGZbJAT7UxNi7VClOMjw/98 MSA5K/FGiZmR+AQh1XMpZZszOE3BidfwKeIAzpAPzoeabT4KiFDioRwgtWbVFvde 15r1ujzWSmN+zssPSzG/fspIFmaWgWlyavHLsNNCCCuKyWgmyTqjS4H/6CEMeXDh 8HTbGflh6+HvNjMZtqKYYogs8HgmkpGY9e4oOi9rkLGNFIf4/n2rhL6eAg7eMeMs JEOY+qqroZRNlV9dsZxpZW0kpQjaEMRtwtZtDIdROC/tVclquGmJiTQ5ZO5wIXBe ORqPnMxXhkxSwVqLokLpFBe1jc33EbxtLl0NMDM9dSPMmJXkFacz6HfSYjtMYPtw TD/+smoTU5KKpSALosZeDR+ro0UbHOXs4JXQlPcvYYOWRooFydJBPw== =jf4o -----END PGP PUBLIC KEY BLOCK----- ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1670088793.0 ostree-push-1.2.0/tests/data/pgp-pub.gpg0000664000175100017510000000161014342704131016243 0ustar00dandan™b#¡‘ ÙÑuè€Ö¹rYè¯ÍÎGteíÇ7[±ÖHPǼ¼ ¥ö5„,Ðó ³[ˆq@/"6!ç ‡Ú”÷%lPB¡º7O×*ׄœ'ŸLq2N©e½Á h숲•ÛÞ¯Ýr«Z”Ø[#b‹ï4Éüð*K¶Ù¶ýŒ¥p NåöÚMáòÜål§¼€°|R\Mzö(zI¥çß官ŒÈWA|å{<3ïN3ÿÆ–f©­)²ÉÍr‘|ò®‘ñ/ˆQ…Þý<-êBtI ÙÅ¡ÄõÐ 9éH›B+6a‹x¦³Ä¹gŸ5¿%^>ã¸Áa Î~sÉæ*ÂMÀýñõ)%šòl«¨E¼ÿ'ãÇ&t-!~çâ«@ßÑú^¤hz5S gp ¸°:_Cs›÷Ö¡>öj¶½úíƒå°LùÆçv4˜1h³×ÜrÄõG”ç=¡£›æ"ê‰ÏîˆB"µm´Û(áæ Œ¼‹ž˜Þf=¾.Äá3ÐQØ·‡´%ostree-push ‰Î 8!( ÜnÝwÏjŠ“l$}>QÍ ‹kb#¡‘   € $}>QÍ ‹kÔ ŽF—ÝŠû¹˜‹»± àß’64Ôø‚,'[—ïbTÁAªBtÍÎlKNxIò&{Z±nÈ,2 2bÀÏ /ˆ@Žàͳ•%%¬oìMÝ -Á™líLM‹µB”ã#Ãÿ|1 9+ñF‰™‘ø!Õs)e›38MÁ‰×ð)âηšm> ˆPâ¡ µfÕ÷^ךõº<ÖJc~ÎËK1¿~ÊHf–irjñ˰ÓB+ŠÉh&É:£Kÿè! ypáðtÛùaëáï63¶¢˜bˆ,ðx&’‘˜õî(:/k±‡øþ}«„¾žÞ1ã,$C˜úª«¡”M•_]±œiem$¥ÚÄmÂÖm ‡Q8/íUÉj¸i‰‰49dîp!p^9œÌW†LRÁZ‹¢BéµÍ÷¼m.] 03=u#̘•ä§3èwÒb;L`ûpL?þ²jS’Š¥ ¢Æ^ «£Eåìà•Д÷/aƒ–FŠÉÒA?././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1670088793.0 ostree-push-1.2.0/tests/data/sshd_config0000664000175100017510000000061014342704131016402 0ustar00dandan# Configuration for testing sshd. Some other options are set on the # command line based on the test environment AcceptEnv LANG LC_* AllowTcpForwarding yes ChallengeResponseAuthentication no HostbasedAuthentication no ListenAddress 127.0.0.1 PasswordAuthentication no PermitRootLogin no PermitUserEnvironment no PidFile none PubkeyAuthentication yes StrictModes no UsePAM no X11Forwarding no ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1670088793.0 ostree-push-1.2.0/tests/dumpenv0000775000175100017510000000046614342704131014675 0ustar00dandan#!/usr/bin/python3 # Dump argv and environ in JSON import json import os import sys data = { 'args': sys.argv, 'env': dict(os.environ), } dest_path = os.getenv('DUMPENV_DEST') if dest_path: dest = open(dest_path, 'w') else: dest = sys.stdout json.dump(data, dest, indent=2, sort_keys=True) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1670088793.0 ostree-push-1.2.0/tests/ostree-push0000775000175100017510000000046714342704131015476 0ustar00dandan#!/usr/bin/env python3 # ostree-push script for testing import logging import sys logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger('tests/ostree-push') logger.debug('sys.path=%s', ':'.join(sys.path)) logger.debug('sys.argv=%s', sys.argv) from otpush import push # noqa: E402 push.main() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1705762176.0 ostree-push-1.2.0/tests/ostree-receive0000775000175100017510000000052414552756600016146 0ustar00dandan#!/usr/bin/env python3 # ostree-receive script for testing import logging import sys logging.basicConfig(level=logging.DEBUG) logger = logging.getLogger('tests/ostree-receive') logger.debug('sys.path=%s', ':'.join(sys.path)) logger.debug('sys.argv=%s', sys.argv) from otpush import receive # noqa: E402 sys.exit(receive.compat_main()) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1705762176.0 ostree-push-1.2.0/tests/test_full.py0000664000175100017510000001276114552756600015660 0ustar00dandanimport logging import os import subprocess from otpush import VERSION from .util import ( TESTSDIR, get_content_checksum, needs_sshd, random_commit, wipe_repo, ) MAJOR = VERSION.split('.')[0] ostree_receive_versioned = f'ostree-receive-{MAJOR}' ostree_receive_abspath = os.path.join(TESTSDIR, ostree_receive_versioned) logger = logging.getLogger(__name__) # Skip all tests here if the required sshd is not available. pytestmark = needs_sshd def run_push(source_repo, dest_repo, sshd, ssh_options, env_vars, receive_config_path, command=ostree_receive_versioned, dest=None, options=None, refs=None, **popen_kwargs): dest = dest or f'ssh://{sshd.address}:{sshd.port}/{dest_repo.path}' options = options or [] refs = refs or [] env = os.environ.copy() env['OSTREE_RECEIVE_CONF'] = receive_config_path if env_vars: env.update(env_vars) popen_kwargs['env'] = env if 'check' not in popen_kwargs: popen_kwargs['check'] = True cmd = [ 'ostree-push', f'--repo={source_repo.path}', f'--command={command}', ] + ssh_options + options + [dest] + refs logger.debug('push command: %s', ' '.join(cmd)) return subprocess.run(cmd, **popen_kwargs) def test_no_commits(source_repo, dest_repo, sshd, ssh_options, cli_env_vars, receive_config_path, capfd): """Test push with no commits in source repo""" args = ( source_repo, dest_repo, sshd, ssh_options, cli_env_vars, receive_config_path ) run_push(*args) capfd.readouterr() _, receive_refs = dest_repo.list_refs() assert receive_refs == {} ret = run_push(*args, refs=['foo', 'bar'], check=False) _, err = capfd.readouterr() assert ret != 0 assert 'otpush.push.OTPushError: Refs bar foo not found' in err _, receive_refs = dest_repo.list_refs() assert receive_refs == {} def test_basic(source_repo, dest_repo, sshd, ssh_options, cli_env_vars, receive_config_path, tmp_files_path, capfd): """Test push with one commit in source repo""" args = ( source_repo, dest_repo, sshd, ssh_options, cli_env_vars, receive_config_path ) rev = random_commit(source_repo, tmp_files_path, 'test') source_content = get_content_checksum(source_repo, rev) wipe_repo(dest_repo) run_push(*args) capfd.readouterr() _, receive_refs = dest_repo.list_refs() assert receive_refs.keys() == {'test', 'ostree-metadata'} receive_content = get_content_checksum(dest_repo, receive_refs['test']) assert receive_content == source_content wipe_repo(dest_repo) run_push(*args, refs=['test']) capfd.readouterr() _, receive_refs = dest_repo.list_refs() assert receive_refs.keys() == {'test', 'ostree-metadata'} receive_content = get_content_checksum(dest_repo, receive_refs['test']) assert receive_content == source_content wipe_repo(dest_repo) ret = run_push(*args, refs=['test', 'foo'], check=False) _, err = capfd.readouterr() assert ret != 0 assert 'otpush.push.OTPushError: Refs foo not found' in err _, receive_refs = dest_repo.list_refs() assert receive_refs == {} def test_dry_run(source_repo, dest_repo, sshd, ssh_options, cli_env_vars, receive_config_path, tmp_files_path): """Test push dry run""" args = ( source_repo, dest_repo, sshd, ssh_options, cli_env_vars, receive_config_path ) random_commit(source_repo, tmp_files_path, 'test') wipe_repo(dest_repo) run_push(*args, options=['-n']) _, receive_refs = dest_repo.list_refs() assert receive_refs == {} wipe_repo(dest_repo) run_push(*args, options=['--dry-run']) _, receive_refs = dest_repo.list_refs() assert receive_refs == {} wipe_repo(dest_repo) run_push(*args, options=['-n'], refs=['test']) _, receive_refs = dest_repo.list_refs() assert receive_refs == {} def test_scp_dest(source_repo, dest_repo, sshd, ssh_options, cli_env_vars, receive_config_path, tmp_files_path): """Test push with scp style destination""" args = ( source_repo, dest_repo, sshd, ssh_options, cli_env_vars, receive_config_path ) dest = f'{sshd.address}:{dest_repo.path}' options = ['-p', str(sshd.port)] random_commit(source_repo, tmp_files_path, 'test') run_push(*args, dest=dest, options=options) _, receive_refs = dest_repo.list_refs() assert receive_refs.keys() == {'test', 'ostree-metadata'} def test_command_abspath(source_repo, dest_repo, sshd, ssh_options, cli_env_vars, receive_config_path, tmp_files_path): """Test push with absolute path to ostree-receive""" args = ( source_repo, dest_repo, sshd, ssh_options, cli_env_vars, receive_config_path ) random_commit(source_repo, tmp_files_path, 'test') run_push(*args, command=ostree_receive_abspath) _, receive_refs = dest_repo.list_refs() assert receive_refs.keys() == {'test', 'ostree-metadata'} def test_unversioned(source_repo, dest_repo, sshd, ssh_options, cli_env_vars, receive_config_path, tmp_files_path): """Test push with unversioned ostree-receive""" args = ( source_repo, dest_repo, sshd, ssh_options, cli_env_vars, receive_config_path ) random_commit(source_repo, tmp_files_path, 'test') run_push(*args, command='ostree-receive') _, receive_refs = dest_repo.list_refs() assert receive_refs.keys() == {'test', 'ostree-metadata'} ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1705762176.0 ostree-push-1.2.0/tests/test_push.py0000664000175100017510000005076414552756600015702 0ustar00dandanfrom otpush import push import argparse from gi.repository import GLib, Gio import logging import json import os from pathlib import Path import pytest import re import socket import subprocess from urllib.error import HTTPError from urllib.request import urlopen from .util import ( TESTSDIR, needs_sshd, random_commit, TmpRepo, ) logger = logging.getLogger(__name__) class TestRepoServer: def populate_repo(self): self.repo.mkdir() sub = self.repo / 'sub' sub.mkdir() with open(self.repo / 'a', 'w') as f: f.write('foo') with open(sub / 'b', 'w') as f: f.write('bar') def check_server(self): assert self.server.path == self.repo assert self.server.proc.pid > 0 assert self.server.address[0] == '127.0.0.1' assert self.server.address[1] > 0 assert self.server.url.startswith('http://127.0.0.1:') with urlopen(f'{self.server.url}/a') as resp: assert resp.read().decode('utf-8') == 'foo' with urlopen(f'{self.server.url}/sub/b') as resp: assert resp.read().decode('utf-8') == 'bar' with pytest.raises(HTTPError) as excinfo: urlopen(f'{self.server.url}/missing') assert excinfo.value.getcode() == 404 def test_missing(self, tmp_path): # Nonexistent directory should fail repo = tmp_path / 'repo' server = None with pytest.raises(ValueError) as excinfo: server = push.RepoServer(repo) assert str(excinfo.value) == f'{repo} is not a directory' assert server is None def test_non_context(self, tmp_path): # Without context manager self.repo = tmp_path / 'repo' self.populate_repo() self.server = push.RepoServer(self.repo) assert self.server.path == self.repo assert self.server.proc is None assert self.server.address is None assert self.server.url is None # This should do nothing self.server.stop() # Make sure to clean up so the tests don't hang if there are failures try: self.server.start() self.check_server() self.server.stop() finally: self.server.stop() def test_context(self, tmp_path): self.repo = tmp_path / 'repo' self.populate_repo() with push.RepoServer(self.repo) as self.server: self.check_server() def test_pull(self, tmp_path, tmp_files_path): local_repo = TmpRepo(tmp_path / 'local') remote_repo = TmpRepo(tmp_path / 'remote') random_commit(remote_repo, tmp_files_path, 'test') _, remote_refs = remote_repo.list_refs() with push.RepoServer(str(remote_repo.path)) as remote_server: repo_options = GLib.Variant('a{sv}', { 'gpg-verify': GLib.Variant('b', False), 'gpg-verify-summary': GLib.Variant('b', False), }) local_repo.remote_add('origin', remote_server.url, repo_options) # Pulling a missing ref should fail pull_options = GLib.Variant('a{sv}', { 'refs': GLib.Variant('as', ['missing']), }) with pytest.raises(GLib.Error) as excinfo: local_repo.pull_with_options('origin', pull_options) assert excinfo.value.matches(Gio.io_error_quark(), Gio.IOErrorEnum.NOT_FOUND) _, local_refs = local_repo.list_refs() assert local_refs == {} # Pulling the existing ref should succeed pull_options = GLib.Variant('a{sv}', { 'refs': GLib.Variant('as', ['test']), }) local_repo.pull_with_options('origin', pull_options) _, local_refs = local_repo.list_refs() assert local_refs == {'origin:test': remote_refs['test']} # Pulling with all refs should fail because no branches were # setup in the configuration local_repo.set_ref_immediate('origin', 'test', None) _, local_refs = local_repo.list_refs() assert local_refs == {} pull_options = GLib.Variant('a{sv}', {}) with pytest.raises(GLib.Error) as excinfo: local_repo.pull_with_options('origin', pull_options) assert excinfo.value.matches(Gio.io_error_quark(), Gio.IOErrorEnum.FAILED) assert ('No configured branches for remote origin' in str(excinfo.value)) assert local_refs == {} @needs_sshd class TestSSHMultiplexer: def test_socket_exists(self, sshd, ssh_options, ssh_socket): ssh = push.SSHMultiplexer(sshd.address, ssh_socket, ssh_options, port=sshd.port) with open(ssh_socket, 'w'): pass with pytest.raises(push.OTPushError) as excinfo: ssh.start() assert str(excinfo.value) == f'Socket {ssh_socket} already exists' def test_master_non_context(self, sshd, ssh_options, ssh_socket): ssh = push.SSHMultiplexer(sshd.address, ssh_socket, ssh_options, port=sshd.port) # Stopping without starting should do nothing assert ssh.master_proc is None assert not os.path.exists(ssh_socket) ssh.stop() try: ssh.start() assert ssh.master_proc.pid > 0 assert os.path.exists(ssh_socket) with pytest.raises(push.OTPushError) as excinfo: ssh.start() assert str(excinfo.value).startswith( 'SSH master process already running') finally: ssh.stop() def test_master_context(self, sshd, ssh_options, ssh_socket): with push.SSHMultiplexer(sshd.address, ssh_socket, ssh_options, port=sshd.port) as ssh: assert ssh.master_proc.pid > 0 assert os.path.exists(ssh_socket) def test_forward_port(self, sshd, ssh_options, ssh_socket): sock1 = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) sock1.bind(('127.0.0.1', 0)) sock1_port = sock1.getsockname()[1] assert sock1_port > 0 ssh = push.SSHMultiplexer(sshd.address, ssh_socket, ssh_options, port=sshd.port) with pytest.raises(push.OTPushError) as excinfo: ssh.forward_port(sock1_port) assert str(excinfo.value) == 'SSH master process not running' assert ssh.master_proc is None with push.SSHMultiplexer(sshd.address, ssh_socket, ssh_options, port=sshd.port) as ssh: sock2_port = ssh.forward_port(sock1_port) assert sock2_port > 0 sock3 = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0) sock3.connect(('127.0.0.1', sock2_port)) def test_run(self, tmp_path, sshd, ssh_options, ssh_socket): ssh = push.SSHMultiplexer(sshd.address, ssh_socket, ssh_options, port=sshd.port) with pytest.raises(push.OTPushError) as excinfo: ssh.run(['true']) assert str(excinfo.value) == 'SSH master process not running' assert ssh.master_proc is None with push.SSHMultiplexer(sshd.address, ssh_socket, ssh_options, port=sshd.port) as ssh: test_file = tmp_path / 'test_file' assert not os.path.exists(test_file) ssh.run(['touch', str(test_file)]) assert os.path.exists(test_file) with pytest.raises(subprocess.CalledProcessError, match='returned non-zero exit status 1'): ssh.run(['rmdir', str(tmp_path)]) @needs_sshd class TestPushRefs: DUMPENV_PATH = os.path.join(TESTSDIR, 'dumpenv') def push_refs(self, source_repo, dest_repo, sshd, ssh_options, capfd, refs=None, dry_run=False): """Run push.push_refs and check the remote command is correct""" dest = push.PushDest(host=sshd.address, port=sshd.port, repo=str(dest_repo.path), user=None) push.push_refs(source_repo, dest, refs=refs, dry_run=dry_run, ssh_options=ssh_options, commands=['dumpenv']) out, _ = capfd.readouterr() data = json.loads(out) args = data['args'] num_args = len(args) num_refs = len(refs) if refs else 0 expected_num_args = num_refs + 3 if dry_run: expected_num_args += 1 assert num_args == expected_num_args args_iter = iter(args) assert next(args_iter) == self.DUMPENV_PATH if dry_run: assert next(args_iter) == '-n' assert next(args_iter) == str(dest_repo.path) assert next(args_iter).startswith('http://127.0.0.1:') remaining = list(args_iter) if refs: assert remaining == refs else: assert remaining == [] def test_no_refs(self, source_repo, dest_repo, sshd, ssh_options, tmp_files_path, capfd): self.push_refs(source_repo, dest_repo, sshd, ssh_options, capfd, refs=None) self.push_refs(source_repo, dest_repo, sshd, ssh_options, capfd, refs=[]) def test_refs(self, source_repo, dest_repo, sshd, ssh_options, tmp_files_path, capfd): random_commit(source_repo, tmp_files_path, 'test1') random_commit(source_repo, tmp_files_path, 'test2') self.push_refs(source_repo, dest_repo, sshd, ssh_options, capfd, refs=['test1']) self.push_refs(source_repo, dest_repo, sshd, ssh_options, capfd, refs=['test2']) self.push_refs(source_repo, dest_repo, sshd, ssh_options, capfd, refs=['test1', 'test2']) def test_missing_ref(self, source_repo, dest_repo, sshd, ssh_options, tmp_files_path, capfd): random_commit(source_repo, tmp_files_path, 'test') with pytest.raises(push.OTPushError) as excinfo: self.push_refs(source_repo, dest_repo, sshd, ssh_options, capfd, refs=['missing']) assert str(excinfo.value) == \ f'Refs missing not found in {source_repo.path}' with pytest.raises(push.OTPushError) as excinfo: self.push_refs(source_repo, dest_repo, sshd, ssh_options, capfd, refs=['test', 'missing']) assert str(excinfo.value) == \ f'Refs missing not found in {source_repo.path}' def test_summary(self, source_repo, dest_repo, sshd, ssh_options, tmp_files_path, capfd): summary = Path(source_repo.path) / 'summary' random_commit(source_repo, tmp_files_path, 'test') # Delete the summary file and check that it gets generated. summary.unlink() self.push_refs(source_repo, dest_repo, sshd, ssh_options, capfd) assert summary.exists() # Set the summary mtime behind the repo and check that it gets # regenerated. repo_mtime = os.path.getmtime(source_repo.path) os.utime(summary, (repo_mtime - 1, repo_mtime - 1)) orig_summary_mtime = summary.stat().st_mtime assert orig_summary_mtime < repo_mtime self.push_refs(source_repo, dest_repo, sshd, ssh_options, capfd) assert summary.exists() new_summary_mtime = summary.stat().st_mtime assert new_summary_mtime > orig_summary_mtime def test_dry_run(self, source_repo, dest_repo, sshd, ssh_options, tmp_files_path, capfd): self.push_refs(source_repo, dest_repo, sshd, ssh_options, capfd, dry_run=True) def test_commands(self, source_repo, dest_repo, sshd, ssh_options, tmp_path): dest = push.PushDest( host=sshd.address, port=sshd.port, repo=str(dest_repo.path), user=None, ) nonexistent = str(tmp_path / 'nonexistent') # Only specifying missing commands should fail. with pytest.raises(push.OTPushError) as excinfo: push.push_refs( source_repo, dest, ssh_options=ssh_options, commands=[nonexistent], ) assert str(excinfo.value) == ( f'Could not find commands {nonexistent} on server' ) # Specifying a successful command as a fallback should succeed. push.push_refs( source_repo, dest, ssh_options=ssh_options, commands=[nonexistent, 'dumpenv'], ) class TestParseDest: def test_bad_scheme(self): for scheme in ('http', 'ftp', 'scp', 'blah'): with pytest.raises( ValueError, match=f'Destination scheme "{scheme}" not allowed'): push.parse_dest(f'{scheme}://host/repo') def test_missing_repo(self): for dest in ('ssh://', 'http://', 'host:', 'user@host:'): with pytest.raises(ValueError, match='Destination repo missing'): push.parse_dest('host:') def test_empty_dest(self): with pytest.raises(ValueError, match='Destination not in form "host:repo"'): push.parse_dest('') def test_missing_host(self): for dest in (':', ':repo', ':/path/:/repo'): with pytest.raises(ValueError, match='Destination host missing'): push.parse_dest(dest) def test_invalid_host(self): for dest in ('@:repo', '@host:repo', 'user@:repo'): with pytest.raises(ValueError, match='Invalid destination host'): push.parse_dest(dest) def test_invalid_port(self): match = re.compile( r'(Port could not be cast to integer|invalid literal for int)' ) for dest in ('ssh://host:port/repo', 'ssh://host:$/repo'): with pytest.raises(ValueError, match=match): push.parse_dest(dest) def test_good_dest(self): cases = ( ('ssh://host/repo', push.PushDest(host='host', repo='/repo', user=None, port=None)), ('ssh://host.example.com/repo', push.PushDest(host='host.example.com', repo='/repo', user=None, port=None)), ('ssh://host/path/to/repo/', push.PushDest(host='host', repo='/path/to/repo/', user=None, port=None)), ('ssh://host/path/:/repo', push.PushDest(host='host', repo='/path/:/repo', user=None, port=None)), ('ssh://user@host/repo', push.PushDest(host='host', user='user', repo='/repo', port=None)), ('ssh://host:22/repo', push.PushDest(host='host', port=22, repo='/repo', user=None)), ('host:repo', push.PushDest(host='host', repo='repo', user=None, port=None)), ('host:path/to/repo', push.PushDest(host='host', repo='path/to/repo', user=None, port=None)), ('host:/repo', push.PushDest(host='host', repo='/repo', user=None, port=None)), ('host:/path/:/repo', push.PushDest(host='host', repo='/path/:/repo', user=None, port=None)), ('user@host:repo', push.PushDest(host='host', user='user', repo='repo', port=None)), ('user@host.example.com:path/to/repo', push.PushDest(host='host.example.com', user='user', repo='path/to/repo', port=None)), ) for arg, expected in cases: dest = push.parse_dest(arg) assert dest == expected class TestArgParser: def test_no_dest(self, capsys): ap = push.OTPushArgParser() with pytest.raises(SystemExit) as excinfo: ap.parse_args([]) assert excinfo.value.code == 2 out, err = capsys.readouterr() assert out == '' assert err.endswith('error: the following arguments are required: ' 'DEST\n') def test_defaults(self): ap = push.OTPushArgParser() args = ap.parse_args(['host:repo']) assert args == argparse.Namespace( commands=None, dest=push.PushDest(host='host', repo='repo', user=None, port=None), dry_run=False, log_level=logging.INFO, port=None, refs=[], repo=None, ssh_options=[], ) def test_dest(self): ap = push.OTPushArgParser() args = ap.parse_args(['host:repo']) assert args.dest == push.PushDest(host='host', repo='repo', user=None, port=None) args = ap.parse_args(['user@host:repo']) assert args.dest == push.PushDest(host='host', user='user', repo='repo', port=None) args = ap.parse_args(['ssh://user@host/repo']) assert args.dest == push.PushDest(host='host', user='user', repo='/repo', port=None) args = ap.parse_args(['ssh://user@host:1234/repo']) assert args.dest == push.PushDest(host='host', user='user', port=1234, repo='/repo') def test_refs(self): ap = push.OTPushArgParser() args = ap.parse_args(['host:repo', 'foo']) assert args.refs == ['foo'] args = ap.parse_args(['host:repo', 'foo', 'bar', 'baz']) assert args.refs == ['foo', 'bar', 'baz'] def test_port(self, capsys): ap = push.OTPushArgParser() args = ap.parse_args(['-p', '22', 'host:repo']) assert args.port == 22 with pytest.raises(SystemExit) as excinfo: ap.parse_args(['-p', 'foo', 'host:repo']) assert excinfo.value.code == 2 out, err = capsys.readouterr() assert out == '' assert err.endswith("invalid int value: 'foo'\n") def test_port_and_dest_port(self): ap = push.OTPushArgParser() args = ap.parse_args(['-p', '22', 'ssh://host:2200/repo']) assert args.port == 22 assert args.dest.port == 22 def test_dry_run(self): ap = push.OTPushArgParser() args = ap.parse_args(['-n', 'host:repo']) assert args.dry_run is True args = ap.parse_args(['--dry-run', 'host:repo']) assert args.dry_run is True def test_log_level(self): ap = push.OTPushArgParser() args = ap.parse_args(['-v', 'host:repo']) assert args.log_level == logging.DEBUG args = ap.parse_args(['--verbose', 'host:repo']) assert args.log_level == logging.DEBUG args = ap.parse_args(['-q', 'host:repo']) assert args.log_level == logging.WARNING args = ap.parse_args(['--quiet', 'host:repo']) assert args.log_level == logging.WARNING def test_repo(self): ap = push.OTPushArgParser() args = ap.parse_args(['--repo', '/repo', 'host:repo']) assert args.repo == '/repo' def test_commands(self): ap = push.OTPushArgParser() args = ap.parse_args(['--command=ls', 'host:repo']) assert args.commands == ['ls'] args = ap.parse_args(['--command=ostree-receive', 'host:repo']) assert args.commands == ['ostree-receive'] args = ap.parse_args(['--command', '/path/to/ostree-receive', 'host:repo']) assert args.commands == ['/path/to/ostree-receive'] args = ap.parse_args(['--command=foo', '--command=bar', 'host:repo']) assert args.commands == ['foo', 'bar'] def test_ssh_options(self, capsys): ap = push.OTPushArgParser() args = ap.parse_args(['-ifoo', 'host:repo']) assert args.ssh_options == ['-i', 'foo'] args = ap.parse_args(['-i', 'foo', 'host:repo']) assert args.ssh_options == ['-i', 'foo'] args = ap.parse_args(['-o', 'Foo=yes', 'host:repo']) assert args.ssh_options == ['-o', 'Foo=yes'] args = ap.parse_args(['-o', 'Foo=yes', '-o', 'Bar=no', 'host:repo']) assert args.ssh_options == ['-o', 'Foo=yes', '-o', 'Bar=no'] with pytest.raises(SystemExit) as excinfo: ap.parse_args(['-ifoo', '-ibar', 'host:repo']) assert excinfo.value.code == 2 out, err = capsys.readouterr() assert out == '' assert err.endswith('Option -i can only be specified once\n') ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1705762176.0 ostree-push-1.2.0/tests/test_receive.py0000664000175100017510000014522614552756600016343 0ustar00dandanfrom otpush import receive import argparse import dataclasses import gi from gi.repository import GLib, Gio import json import logging import os from pathlib import Path import pytest import time import sys import yaml from .util import ( ED25519_PRIVATE_KEY, ED25519_PUBLIC_KEY, PGP_PUB, PGP_PUB_KEYRING, PGP_KEY_ID, TESTSDIR, get_ostree_ed25519_sign, get_summary_variant, local_refs, needs_ed25519, needs_flatpak, needs_gpg, needs_ostree, oneshot_transaction, random_commit, wipe_repo, ) gi.require_version('OSTree', '1.0') from gi.repository import OSTree # noqa: E402 logger = logging.getLogger(__name__) class TestReceiveRepo: def test_cleanup(self, dest_repo): url = 'http://example.com' config = receive.OTReceiveRepoConfig(dest_repo.path, url) repo = receive.OTReceiveRepo(config) remotes_dir = Path(repo.remotes_dir.name) assert remotes_dir.exists() del repo assert not remotes_dir.exists() with receive.OTReceiveRepo(config) as repo: remotes_dir = Path(repo.remotes_dir.name) assert remotes_dir.exists() assert not remotes_dir.exists() def test_missing_repo(self, tmp_path): repo_path = tmp_path / 'repo' url = 'http://example.com' config = receive.OTReceiveRepoConfig(repo_path, url) with pytest.raises(receive.OTReceiveError) as excinfo: receive.OTReceiveRepo(config) assert str(excinfo.value) == f'repo {repo_path} not found' def test_get_commit_timestamp(self, tmp_files_path, receive_repo): with pytest.raises(GLib.Error) as excinfo: receive_repo._get_commit_timestamp('missing') assert excinfo.value.matches(Gio.io_error_quark(), Gio.IOErrorEnum.NOT_FOUND) commit = random_commit(receive_repo, tmp_files_path, 'someref', timestamp=0) timestamp = receive_repo._get_commit_timestamp(commit) assert timestamp == 0 now = int(time.time()) commit = random_commit(receive_repo, tmp_files_path, 'someref', timestamp=now) timestamp = receive_repo._get_commit_timestamp(commit) assert timestamp == now now = int(time.time()) commit = random_commit(receive_repo, tmp_files_path, 'someref') timestamp = receive_repo._get_commit_timestamp(commit) assert timestamp >= now def test_is_flatpak_repo(self, tmp_files_path, receive_repo): assert not receive_repo._is_flatpak_repo() random_commit(receive_repo, tmp_files_path, 'someref') assert not receive_repo._is_flatpak_repo() random_commit(receive_repo, tmp_files_path, 'app/com.example.App/x86_64/stable') assert receive_repo._is_flatpak_repo() def test_pull_commits(self, tmp_files_path, receive_repo, source_repo, source_server): rev1 = random_commit(source_repo, tmp_files_path, 'ref1') rev2 = random_commit(source_repo, tmp_files_path, 'ref2') _, remote_refs = receive_repo.remote_list_refs( receive_repo.REMOTE_NAME) assert remote_refs == {'ref1': rev1, 'ref2': rev2} # Pull by single ref with oneshot_transaction(receive_repo): receive_repo._pull_commits(['ref1']) _, refs = receive_repo.list_refs(None) assert refs == {'_receive:ref1': rev1} _, _, state = receive_repo.load_commit(rev1) assert state == OSTree.RepoCommitState.NORMAL wipe_repo(receive_repo) # Pull by multiple refs with oneshot_transaction(receive_repo): receive_repo._pull_commits(['ref1', 'ref2']) _, refs = receive_repo.list_refs(None) assert refs == {'_receive:ref1': rev1, '_receive:ref2': rev2} _, _, state = receive_repo.load_commit(rev1) assert state == OSTree.RepoCommitState.NORMAL _, _, state = receive_repo.load_commit(rev2) assert state == OSTree.RepoCommitState.NORMAL wipe_repo(receive_repo) # Pull by single rev with oneshot_transaction(receive_repo): receive_repo._pull_commits([rev1]) _, refs = receive_repo.list_refs(None) assert refs == {} _, _, state = receive_repo.load_commit(rev1) assert state == OSTree.RepoCommitState.NORMAL wipe_repo(receive_repo) # Pull by multiple revs with oneshot_transaction(receive_repo): receive_repo._pull_commits([rev1, rev2]) _, refs = receive_repo.list_refs(None) assert refs == {} _, _, state = receive_repo.load_commit(rev1) assert state == OSTree.RepoCommitState.NORMAL _, _, state = receive_repo.load_commit(rev2) assert state == OSTree.RepoCommitState.NORMAL wipe_repo(receive_repo) # Pull by missing ref with pytest.raises(GLib.Error) as excinfo: with oneshot_transaction(receive_repo): receive_repo._pull_commits(['missing']) assert excinfo.value.matches(Gio.io_error_quark(), Gio.IOErrorEnum.FAILED) wipe_repo(receive_repo) def test_copy_commit(self, tmp_files_path, receive_repo): # Non-flatpak ref src = random_commit(receive_repo, tmp_files_path, 'src') with oneshot_transaction(receive_repo): dst = receive_repo.copy_commit(src, 'dst') assert src != dst _, src_commit, _ = receive_repo.load_commit(src) _, dst_commit, dst_state = receive_repo.load_commit(dst) assert dst_state == OSTree.RepoCommitState.NORMAL assert OSTree.commit_get_parent(dst_commit) is None assert OSTree.commit_get_content_checksum(src_commit) == \ OSTree.commit_get_content_checksum(dst_commit) assert OSTree.commit_get_timestamp(src_commit) == \ OSTree.commit_get_timestamp(dst_commit) expected_metadata = { OSTree.COMMIT_META_KEY_REF_BINDING: ['dst'], OSTree.COMMIT_META_KEY_COLLECTION_BINDING: 'com.example.Test', } dst_metadata = dst_commit.get_child_value(0).unpack() assert dst_metadata == expected_metadata # Make another commit and check that the previous commit is used # as the parent expected_parent = dst src = random_commit(receive_repo, tmp_files_path, 'src') with oneshot_transaction(receive_repo): dst = receive_repo.copy_commit(src, 'dst') assert src != dst _, src_commit, _ = receive_repo.load_commit(src) _, dst_commit, dst_state = receive_repo.load_commit(dst) assert dst_state == OSTree.RepoCommitState.NORMAL assert OSTree.commit_get_parent(dst_commit) == expected_parent assert OSTree.commit_get_content_checksum(src_commit) == \ OSTree.commit_get_content_checksum(dst_commit) assert OSTree.commit_get_timestamp(src_commit) == \ OSTree.commit_get_timestamp(dst_commit) dst_metadata = dst_commit.get_child_value(0).unpack() assert dst_metadata == expected_metadata # Check that additional metadata is copied extra_metadata = {'foo': GLib.Variant('s', 'bar')} expected_metadata['foo'] = 'bar' src = random_commit(receive_repo, tmp_files_path, 'src', extra_metadata=extra_metadata) with oneshot_transaction(receive_repo): dst = receive_repo.copy_commit(src, 'dst') assert src != dst _, dst_commit, _ = receive_repo.load_commit(dst) dst_metadata = dst_commit.get_child_value(0).unpack() assert dst_metadata == expected_metadata # Flatpak metadata ref = 'app/com.example.Foo/x86_64/stable' src = random_commit(receive_repo, tmp_files_path, 'src') with oneshot_transaction(receive_repo): dst = receive_repo.copy_commit(src, ref) assert src != dst _, dst_commit, _ = receive_repo.load_commit(dst) expected_metadata = { OSTree.COMMIT_META_KEY_REF_BINDING: [ref], OSTree.COMMIT_META_KEY_COLLECTION_BINDING: 'com.example.Test', 'xa.ref': ref, 'xa.from_commit': src, } dst_metadata = dst_commit.get_child_value(0).unpack() assert dst_metadata == expected_metadata # Copying partial commit should fail receive_repo.mark_commit_partial(src, True) with pytest.raises(receive.OTReceiveError) as excinfo: with oneshot_transaction(receive_repo): receive_repo.copy_commit(src, ref) assert str(excinfo.value) == f'Cannot copy irregular commit {src}' def test_receive(self, tmp_files_path, receive_repo, source_repo, source_server): random_commit(source_repo, tmp_files_path, 'ref1') random_commit(source_repo, tmp_files_path, 'ref2') with pytest.raises(receive.OTReceiveError) as excinfo: receive_repo.receive(['missing']) assert str(excinfo.value) == \ 'Could not find ref missing in summary file' merged = receive_repo.receive(['ref1']) assert merged == {'ref1'} refs = local_refs(receive_repo) assert refs.keys() == {'ref1'} @needs_ostree def test_receive_update(self, tmp_files_path, receive_repo, source_repo, source_server): receive_repo.config.update = True random_commit(source_repo, tmp_files_path, 'ref1') merged = receive_repo.receive(['ref1']) assert merged == {'ref1'} refs = local_refs(receive_repo) assert refs.keys() == {'ostree-metadata', 'ref1'} summary = Path(receive_repo.path) / 'summary' assert summary.exists() def test_receive_update_hook(self, tmp_files_path, receive_repo, source_repo, source_server): dumpenv = os.path.join(TESTSDIR, 'dumpenv') receive_repo.config.update = True receive_repo.config.update_hook = dumpenv random_commit(source_repo, tmp_files_path, 'ref1') merged = receive_repo.receive(['ref1']) assert merged == {'ref1'} refs = local_refs(receive_repo) assert refs.keys() == {'ref1'} summary = Path(receive_repo.path) / 'summary' assert not summary.exists() @needs_gpg def test_receive_gpg_sign(self, tmp_files_path, receive_repo, source_repo, source_server, gpg_homedir, monkeypatch): random_commit(source_repo, tmp_files_path, 'ref1') # Specifying a bogus GPG key should fail receive_repo.config.gpg_sign = ['DEADBEEF'] receive_repo.config.gpg_homedir = str(gpg_homedir) with pytest.raises(GLib.Error) as excinfo: receive_repo.receive(['ref1']) assert excinfo.value.matches(Gio.io_error_quark(), Gio.IOErrorEnum.FAILED) # Specifying both key and homedir receive_repo.config.gpg_sign = [PGP_KEY_ID] receive_repo.config.gpg_homedir = str(gpg_homedir) merged = receive_repo.receive(['ref1']) assert merged == {'ref1'} refs = local_refs(receive_repo) assert refs.keys() == {'ref1'} # Validate the signature and make sure it was signed by the correct # key commit = refs['ref1'] keyring_file = Gio.File.new_for_path(str(PGP_PUB_KEYRING)) result = receive_repo.verify_commit_ext(commit, None, keyring_file) OSTree.GpgVerifyResult.require_valid_signature(result) assert OSTree.GpgVerifyResult.count_all(result) == 1 sig = OSTree.GpgVerifyResult.get_all(result, 0).unpack() key_id = sig[OSTree.GpgSignatureAttr.FINGERPRINT] assert key_id == PGP_KEY_ID # Using the default homedir via GNUPGHOME monkeypatch.setenv('GNUPGHOME', str(gpg_homedir)) receive_repo.config.gpg_sign = [PGP_KEY_ID] receive_repo.config.gpg_homedir = None wipe_repo(receive_repo) merged = receive_repo.receive(['ref1']) assert merged == {'ref1'} refs = local_refs(receive_repo) assert refs.keys() == {'ref1'} @needs_gpg def test_receive_gpg_verify(self, tmp_path, tmp_files_path, dest_repo, source_repo, source_server, gpg_homedir, monkeypatch): # Specifying a missing GPG keyring should fail keyring_path = str(tmp_path / 'missing.gpg') config = receive.OTReceiveRepoConfig( dest_repo.path, source_server.url, gpg_verify=True, gpg_trustedkeys=keyring_path, update=False, ) with pytest.raises(receive.OTReceiveConfigError) as excinfo: receive.OTReceiveRepo(config) assert str(excinfo.value) == ( f'gpg_trustedkeys keyring "{keyring_path}" does not exist' ) # Receiving an unsigned commit should fail. random_commit(source_repo, tmp_files_path, 'ref1') config = receive.OTReceiveRepoConfig( dest_repo.path, source_server.url, gpg_verify=True, gpg_trustedkeys=str(PGP_PUB_KEYRING), update=False, ) repo = receive.OTReceiveRepo(config) with pytest.raises(GLib.Error) as excinfo: repo.receive(['ref1']) assert excinfo.value.matches(OSTree.gpg_error_quark(), OSTree.GpgError.NO_SIGNATURE) # Receiving a signed commit should succeed. random_commit(source_repo, tmp_files_path, 'ref1', gpg_key_id=PGP_KEY_ID, gpg_homedir=str(gpg_homedir)) config = receive.OTReceiveRepoConfig( dest_repo.path, source_server.url, gpg_verify=True, gpg_trustedkeys=str(PGP_PUB_KEYRING), update=False, ) repo = receive.OTReceiveRepo(config) wipe_repo(repo) merged = repo.receive(['ref1']) assert merged == {'ref1'} refs = local_refs(repo) assert refs.keys() == {'ref1'} # Using an ASCII armored key instead of a PGP keyring should # also work. random_commit(source_repo, tmp_files_path, 'ref1', gpg_key_id=PGP_KEY_ID, gpg_homedir=str(gpg_homedir)) config = receive.OTReceiveRepoConfig( dest_repo.path, source_server.url, gpg_verify=True, gpg_trustedkeys=str(PGP_PUB), update=False, ) repo = receive.OTReceiveRepo(config) wipe_repo(repo) merged = repo.receive(['ref1']) assert merged == {'ref1'} refs = local_refs(repo) assert refs.keys() == {'ref1'} # Using the user's default keyring. random_commit(source_repo, tmp_files_path, 'ref1', gpg_key_id=PGP_KEY_ID, gpg_homedir=str(gpg_homedir)) monkeypatch.setenv('XDG_CONFIG_HOME', str(tmp_path)) keyring = tmp_path / 'ostree/ostree-receive-trustedkeys.gpg' keyring.parent.mkdir(exist_ok=True) keyring.symlink_to(PGP_PUB_KEYRING) config = receive.OTReceiveRepoConfig( dest_repo.path, source_server.url, gpg_verify=True, update=False, ) repo = receive.OTReceiveRepo(config) wipe_repo(repo) merged = repo.receive(['ref1']) assert merged == {'ref1'} refs = local_refs(repo) assert refs.keys() == {'ref1'} @needs_ed25519 def test_receive_ed25519_sign(self, tmp_files_path, tmp_path, receive_repo, source_repo, ed25519_private_keyfile): random_commit(source_repo, tmp_files_path, 'ref1') # Specifying a missing keyfile should fail. keyfile_path = str(tmp_path / 'missing') receive_repo.config.sign_keyfiles = [keyfile_path] with pytest.raises(receive.OTReceiveConfigError, match=f'sign_keyfiles keyfile "{keyfile_path}"' + ' does not exist'): receive_repo.receive(['ref1']) # Specifying the key. receive_repo.config.sign_keyfiles = [ed25519_private_keyfile] merged = receive_repo.receive(['ref1']) assert merged == {'ref1'} refs = local_refs(receive_repo) assert refs.keys() == {'ref1'} # Validate the signature and make sure it was signed by the correct # key. sign = get_ostree_ed25519_sign() sign.set_pk(GLib.Variant('s', ED25519_PUBLIC_KEY)) commit = refs['ref1'] assert sign.commit_verify(receive_repo, commit) @needs_ed25519 def test_receive_ed25519_verify(self, tmp_path, tmp_files_path, dest_repo, source_repo, source_server, ed25519_public_keyfile, monkeypatch): # Specifying a missing keyfile should fail. keyfile_path = str(tmp_path / 'missing') config = receive.OTReceiveRepoConfig( dest_repo.path, source_server.url, sign_verify=True, sign_trustedkeyfile=keyfile_path, update=False, ) with pytest.raises(receive.OTReceiveConfigError, match='sign_trustedkeyfile keyfile' + f' "{keyfile_path}" does not' + ' exist') as excinfo: receive.OTReceiveRepo(config) # Receiving an unsigned commit should fail. random_commit(source_repo, tmp_files_path, 'ref1') config = receive.OTReceiveRepoConfig( dest_repo.path, source_server.url, sign_verify=True, sign_trustedkeyfile=ed25519_public_keyfile, update=False, ) repo = receive.OTReceiveRepo(config) with pytest.raises(GLib.Error, match="Can't verify commit") as excinfo: repo.receive(['ref1']) assert excinfo.value.matches(Gio.io_error_quark(), Gio.IOErrorEnum.FAILED) # Receiving a signed commit should succeed. random_commit(source_repo, tmp_files_path, 'ref1', ed25519_key=ED25519_PRIVATE_KEY) config = receive.OTReceiveRepoConfig( dest_repo.path, source_server.url, sign_verify=True, sign_trustedkeyfile=ed25519_public_keyfile, update=False, ) repo = receive.OTReceiveRepo(config) wipe_repo(repo) merged = repo.receive(['ref1']) assert merged == {'ref1'} refs = local_refs(repo) assert refs.keys() == {'ref1'} # Using the user's default keyfile. random_commit(source_repo, tmp_files_path, 'ref1', ed25519_key=ED25519_PRIVATE_KEY) monkeypatch.setenv('XDG_CONFIG_HOME', str(tmp_path)) keyring = tmp_path / 'ostree/ostree-receive-trustedkeyfile.ed25519' keyring.parent.mkdir(exist_ok=True) keyring.symlink_to(ed25519_public_keyfile) config = receive.OTReceiveRepoConfig( dest_repo.path, source_server.url, sign_verify=True, update=False, ) repo = receive.OTReceiveRepo(config) wipe_repo(repo) merged = repo.receive(['ref1']) assert merged == {'ref1'} refs = local_refs(repo) assert refs.keys() == {'ref1'} @needs_ostree def test_update_repo_metadata(self, tmp_files_path, receive_repo): summary = Path(receive_repo.path) / 'summary' receive_repo.update_repo_metadata() assert summary.exists() summary_refs, summary_metadata = get_summary_variant(summary) ref_names = {ref[0] for ref in summary_refs} assert ref_names == {'ostree-metadata'} assert 'xa.cache' not in summary_metadata random_commit(receive_repo, tmp_files_path, 'someref') receive_repo.update_repo_metadata() assert summary.exists() summary_refs, summary_metadata = get_summary_variant(summary) ref_names = {ref[0] for ref in summary_refs} assert ref_names == {'ostree-metadata', 'someref'} assert 'xa.cache' not in summary_metadata @needs_flatpak def test_update_repo_metadata_flatpak(self, tmp_files_path, receive_repo): summary = Path(receive_repo.path) / 'summary' random_commit(receive_repo, tmp_files_path, 'app/com.example.App/x86_64/stable') receive_repo.update_repo_metadata() assert summary.exists() summary_refs, summary_metadata = get_summary_variant(summary) ref_names = {ref[0] for ref in summary_refs} # Flatpak < 1.10 creates the ostree-metadata commit when the # repo has a collecton ID, but newer versions don't. Add it to # the generated set if it's not there already so the expected # set is consistent. ref_names.add('ostree-metadata') assert ref_names == { 'ostree-metadata', 'app/com.example.App/x86_64/stable', 'appstream/x86_64', 'appstream2/x86_64', } assert 'xa.cache' in summary_metadata @needs_gpg @needs_ostree def test_update_repo_metadata_gpg_sign(self, receive_repo, gpg_homedir): receive_repo.config.gpg_sign = [PGP_KEY_ID] receive_repo.config.gpg_homedir = str(gpg_homedir) receive_repo.update_repo_metadata() summary = Path(receive_repo.path) / 'summary' summary_sig = summary.with_suffix('.sig') assert summary.exists() assert summary_sig.exists() @needs_ed25519 @needs_ostree def test_update_repo_metadata_ed25519_sign(self, receive_repo, ed25519_private_keyfile): receive_repo.config.sign_keyfiles = [ed25519_private_keyfile] receive_repo.update_repo_metadata() summary = Path(receive_repo.path) / 'summary' summary_sig = summary.with_suffix('.sig') assert summary.exists() assert summary_sig.exists() def test_update_repo_hook(self, receive_repo, tmp_path, monkeypatch): dumpenv = os.path.join(TESTSDIR, 'dumpenv') dumpenv_dest = tmp_path / 'dumpenv.json' monkeypatch.setenv('DUMPENV_DEST', str(dumpenv_dest)) # Exported environment variables receive_repo.config.update_hook = dumpenv receive_repo.update_repo_hook(['foo', 'bar']) with dumpenv_dest.open() as f: data = json.load(f) assert data['env']['OSTREE_RECEIVE_REPO'] == str( receive_repo.path.absolute() ) assert data['env']['OSTREE_RECEIVE_REFS'] == 'foo bar' # Wrong refs passed with pytest.raises(TypeError): receive_repo.update_repo_hook(None) # No hook configured receive_repo.config.update_hook = None with pytest.raises(receive.OTReceiveConfigError) as excinfo: receive_repo.update_repo_hook([]) assert str(excinfo.value) == 'update_hook not set in configuration' # Missing or non-executable hook hook = tmp_path / 'hook' receive_repo.config.update_hook = str(hook) with pytest.raises(FileNotFoundError): receive_repo.update_repo_hook([]) hook.touch() with pytest.raises(PermissionError): receive_repo.update_repo_hook([]) # Hook argument parsing receive_repo.config.update_hook = f'{dumpenv} foo bar' receive_repo.update_repo_hook([]) with dumpenv_dest.open() as f: data = json.load(f) assert data['args'] == [dumpenv, 'foo', 'bar'] receive_repo.config.update_hook = f'{dumpenv} "foo bar"' receive_repo.update_repo_hook([]) with dumpenv_dest.open() as f: data = json.load(f) assert data['args'] == [dumpenv, 'foo bar'] receive_repo.config.update_hook = fr'{dumpenv} foo\ bar' receive_repo.update_repo_hook([]) with dumpenv_dest.open() as f: data = json.load(f) assert data['args'] == [dumpenv, 'foo bar'] def test_receive_missing(self, tmp_files_path, receive_repo, source_repo, source_server): random_commit(source_repo, tmp_files_path, 'ref1') with pytest.raises(receive.OTReceiveError) as excinfo: receive_repo.receive(['missing']) assert str(excinfo.value) == \ 'Could not find ref missing in summary file' with pytest.raises(receive.OTReceiveError) as excinfo: receive_repo.receive(['missing', 'ref1']) assert str(excinfo.value) == \ 'Could not find ref missing in summary file' def test_receive_specific(self, tmp_files_path, receive_repo, source_repo, source_server): random_commit(source_repo, tmp_files_path, 'ref1') random_commit(source_repo, tmp_files_path, 'ref2') merged = receive_repo.receive(['ref1']) assert merged == {'ref1'} refs = local_refs(receive_repo) assert refs.keys() == {'ref1'} merged = receive_repo.receive(['ref1', 'ref2']) assert merged == {'ref2'} refs = local_refs(receive_repo) assert refs.keys() == {'ref1', 'ref2'} merged = receive_repo.receive(['ref1', 'ref2']) assert merged == set() refs = local_refs(receive_repo) assert refs.keys() == {'ref1', 'ref2'} def test_receive_all(self, tmp_files_path, receive_repo, source_repo, source_server): random_commit(source_repo, tmp_files_path, 'ref1') random_commit(source_repo, tmp_files_path, 'ref2') random_commit(source_repo, tmp_files_path, 'appstream/x86_64') random_commit(source_repo, tmp_files_path, 'appstream2/x86_64') random_commit(source_repo, tmp_files_path, 'ostree-metadata') source_refs = local_refs(source_repo) assert source_refs.keys() == { 'ref1', 'ref2', 'appstream/x86_64', 'appstream2/x86_64', 'ostree-metadata', } merged = receive_repo.receive([]) assert merged == {'ref1', 'ref2'} refs = local_refs(receive_repo) assert refs.keys() == {'ref1', 'ref2'} merged = receive_repo.receive([]) assert merged == set() refs = local_refs(receive_repo) assert refs.keys() == {'ref1', 'ref2'} def test_receive_dry_run(self, tmp_files_path, receive_repo, source_repo, source_server): random_commit(source_repo, tmp_files_path, 'ref1') merged = receive_repo.receive(['ref1'], dry_run=True) assert merged == {'ref1'} refs = local_refs(receive_repo) assert refs.keys() == set() def test_receive_force(self, tmp_files_path, receive_repo, source_repo, source_server, caplog): caplog.set_level(logging.WARNING, receive.logger.name) # First make a commit and pull it directly so the destination # has the exact same commit. checksum = random_commit( source_repo, tmp_files_path, 'ref1', timestamp=0, ) opts = GLib.Variant('a{sv}', { 'refs': GLib.Variant('as', ['ref1']), }) receive_repo.pull_with_options(source_repo.path.as_uri(), opts) refs = local_refs(receive_repo) assert refs == {'ref1': checksum} # Non-forced receive will get nothing. There should be no # warnings since the commits are exactly the same. caplog.clear() merged = receive_repo.receive(['ref1']) assert merged == set() refs = local_refs(receive_repo) assert refs == {'ref1': checksum} assert caplog.record_tuples == [] # Forced merge will make a new commit. This will have warnings # about both timestamp and content. caplog.clear() merged = receive_repo.receive(['ref1'], force=True) assert merged == {'ref1'} refs = local_refs(receive_repo) assert refs.keys() == {'ref1'} assert refs['ref1'] != checksum assert caplog.record_tuples == [ ( receive.logger.name, logging.WARNING, f'received ref1 commit {checksum} is not newer than ' f'current ref1 commit {checksum}' ), ( receive.logger.name, logging.WARNING, f'received ref1 commit {checksum} has the same content as ' f'current ref1 commit {checksum}' ), ] # Make a new commit with the same content and set the # destination repo back to the original commit. with oneshot_transaction(source_repo): mtree = OSTree.MutableTree.new() _, root, _ = source_repo.read_commit(checksum) _, commit, _ = source_repo.load_commit(checksum) source_repo.write_directory_to_mtree(root, mtree, None) _, new_root = source_repo.write_mtree(mtree) metadata = commit.get_child_value(0) _, new_checksum = source_repo.write_commit_with_time( checksum, 'Test commit', None, metadata, new_root, 1, ) source_repo.transaction_set_ref(None, 'ref1', new_checksum) receive_repo.set_ref_immediate(None, 'ref1', checksum) # Non-forced receive will get nothing but there will be a # warning about the content. caplog.clear() merged = receive_repo.receive(['ref1']) assert merged == set() refs = local_refs(receive_repo) assert refs == {'ref1': checksum} assert caplog.record_tuples == [ ( receive.logger.name, logging.WARNING, f'received ref1 commit {new_checksum} has the same content ' f'as current ref1 commit {checksum}' ), ] # Forced merge will make a new commit. caplog.clear() merged = receive_repo.receive(['ref1'], force=True) assert merged == {'ref1'} refs = local_refs(receive_repo) assert refs.keys() == {'ref1'} assert refs['ref1'] != checksum # Make a random commit in the destination so it's newer and has # different content. dest_checksum = random_commit( receive_repo, tmp_files_path, 'ref1', timestamp=2, ) # Non-forced receive will get nothing but there will be a # warning about the timestamp. caplog.clear() merged = receive_repo.receive(['ref1']) assert merged == set() refs = local_refs(receive_repo) assert refs == {'ref1': dest_checksum} assert caplog.record_tuples == [ ( receive.logger.name, logging.WARNING, f'received ref1 commit {new_checksum} is not newer than ' f'current ref1 commit {dest_checksum}' ), ] # Forced merge will make a new commit. caplog.clear() merged = receive_repo.receive(['ref1'], force=True) assert merged == {'ref1'} refs = local_refs(receive_repo) assert refs.keys() == {'ref1'} assert refs['ref1'] != dest_checksum class TestReceiver: """Tests for OTReceiver class""" def test_default_config(self): receiver = receive.OTReceiver() assert receiver.config == receive.OTReceiveConfig() def test_receive(self, receiver, tmp_files_path, source_repo, dest_repo, source_server): random_commit(source_repo, tmp_files_path, 'ref1') source_refs = local_refs(source_repo) assert source_refs.keys() == {'ref1'} merged = receiver.receive(dest_repo.path, source_server.url, ['ref1']) assert merged == {'ref1'} dest_refs = local_refs(dest_repo) assert dest_refs.keys() == {'ref1'} merged = receiver.receive(dest_repo.path, source_server.url, ['ref1']) assert merged == set() dest_refs = local_refs(dest_repo) assert dest_refs.keys() == {'ref1'} # Test that repos override is applied. summary_path = dest_repo.path / 'summary' assert not summary_path.exists() assert not receiver.config.update receiver.config.repos = {str(dest_repo.path): {'update': True}} random_commit(source_repo, tmp_files_path, 'ref2') merged = receiver.receive(dest_repo.path, source_server.url, ['ref2']) assert merged == {'ref2'} assert summary_path.exists() class TestRepoConfig: """Tests for OTReceiveRepoConfig""" def test_defaults(self): config = receive.OTReceiveRepoConfig(Path('foo'), 'http://bar') assert dataclasses.asdict(config) == { 'path': Path('foo'), 'url': 'http://bar', 'gpg_sign': [], 'gpg_homedir': None, 'gpg_verify': False, 'gpg_trustedkeys': None, 'sign_type': 'ed25519', 'sign_keyfiles': [], 'sign_verify': False, 'sign_trustedkeyfile': None, 'update': True, 'update_hook': None, } def test_required(self): with pytest.raises(TypeError): receive.OTReceiveRepoConfig() with pytest.raises(TypeError): receive.OTReceiveRepoConfig(path=Path('foo')) with pytest.raises(TypeError): receive.OTReceiveRepoConfig(url='http://bar') class TestConfig: """Tests for OTReceiveConfig""" def test_defaults(self): config = receive.OTReceiveConfig() assert dataclasses.asdict(config) == { 'root': None, 'gpg_sign': [], 'gpg_homedir': None, 'gpg_verify': False, 'gpg_trustedkeys': None, 'sign_type': 'ed25519', 'sign_keyfiles': [], 'sign_verify': False, 'sign_trustedkeyfile': None, 'update': True, 'update_hook': None, 'repos': {}, 'log_level': 'INFO', 'force': False, 'dry_run': False, } def test_invalid(self): with pytest.raises(receive.OTReceiveConfigError) as excinfo: receive.OTReceiveConfig(update=None) assert str(excinfo.value) == ( "update must be an instance of , " "but found " ) with pytest.raises(receive.OTReceiveConfigError) as excinfo: receive.OTReceiveConfig(update='true') assert str(excinfo.value) == ( "update must be an instance of , " "but found " ) def test_default_paths(self, tmp_path, monkeypatch): assert receive.OTReceiveConfig.default_paths() == [ Path('/etc/ostree/ostree-receive.conf'), Path('~/.config/ostree/ostree-receive.conf'), ] monkeypatch.setenv('XDG_CONFIG_HOME', str(tmp_path)) assert receive.OTReceiveConfig.default_paths() == [ Path('/etc/ostree/ostree-receive.conf'), tmp_path / 'ostree/ostree-receive.conf', ] monkeypatch.setenv('OSTREE_RECEIVE_CONF', str(tmp_path)) assert receive.OTReceiveConfig.default_paths() == [tmp_path] def test_load_valid(self, tmp_path): path = tmp_path / 'ostree-receive.conf' data = { 'root': str(tmp_path / 'pub/repos'), 'gpg_sign': ['01234567', '89ABCDEF'], 'gpg_homedir': str(tmp_path / 'gnupg'), 'gpg_verify': True, 'gpg_trustedkeys': str(tmp_path / 'trustedkeys.gpg'), 'sign_type': 'ed25519', 'sign_keyfiles': [ str(tmp_path / 'signkey1'), str(tmp_path / 'signkey2'), ], 'sign_verify': True, 'sign_trustedkeyfile': str(tmp_path / 'trustedkey'), 'update': False, 'update_hook': '/foo/bar baz', 'repos': { 'foo': { 'gpg_sign': ['76543210'], }, 'bar': { 'gpg_verify': False, }, }, 'log_level': 'DEBUG', 'force': True, 'dry_run': True, } with path.open('w') as f: yaml.dump(data, f) config = receive.OTReceiveConfig.load([path]) assert dataclasses.asdict(config) == data def test_load_none(self): config = receive.OTReceiveConfig.load([]) assert config == receive.OTReceiveConfig() def test_load_empty(self, tmp_path, caplog): caplog.set_level(logging.DEBUG, receive.logger.name) path = tmp_path / 'ostree-receive.conf' path.touch() config = receive.OTReceiveConfig.load([path]) assert config == receive.OTReceiveConfig() expected_log_record = ( receive.logger.name, logging.DEBUG, f'Ignoring empty config file {path}' ) assert expected_log_record in caplog.record_tuples def test_load_missing(self, tmp_path, caplog): caplog.set_level(logging.DEBUG, receive.logger.name) path = tmp_path / 'ostree-receive.conf' config = receive.OTReceiveConfig.load([path]) assert config == receive.OTReceiveConfig() expected_log_record = ( receive.logger.name, logging.DEBUG, f'Skipping missing config file {path}' ) assert expected_log_record in caplog.record_tuples def test_load_multiple(self, tmp_path): path1 = tmp_path / 'receive1.conf' data = { 'log_level': 'DEBUG', } with path1.open('w') as f: yaml.dump(data, f) path2 = tmp_path / 'receive2.conf' data = { 'log_level': 'WARNING', } with path2.open('w') as f: yaml.dump(data, f) config = receive.OTReceiveConfig.load([path1, path2]) assert config.log_level == 'WARNING' def test_load_unknown(self, tmp_path, caplog): caplog.set_level(logging.WARNING, receive.logger.name) path = tmp_path / 'ostree-receive.conf' data = { 'fake_option': False, } with path.open('w') as f: yaml.dump(data, f) config = receive.OTReceiveConfig.load([path]) assert config == receive.OTReceiveConfig() expected_log_record = ( receive.logger.name, logging.WARNING, f'Unrecognized option fake_option in config file {path}', ) assert expected_log_record in caplog.record_tuples def test_load_invalid(self, tmp_path): # Passing a non-path as the config file should fail. with pytest.raises(receive.OTReceiveConfigError, match='PathLike'): receive.OTReceiveConfig.load([True]) path = tmp_path / 'ostree-receive.conf' data = { 'update': None, } with path.open('w') as f: yaml.dump(data, f) with pytest.raises(receive.OTReceiveConfigError) as excinfo: receive.OTReceiveConfig.load([path]) assert str(excinfo.value) == ( "update must be an instance of , " "but found " ) data = ['not', 'a', 'mapping'] with path.open('w') as f: yaml.dump(data, f) with pytest.raises(receive.OTReceiveConfigError) as excinfo: receive.OTReceiveConfig.load([path]) assert str(excinfo.value) == ( f'Config file {path} is not a YAML mapping' ) def test_load_env(self, tmp_path, monkeypatch): path = tmp_path / 'ostree-receive.conf' data = { 'log_level': 'DEBUG', } with path.open('w') as f: yaml.dump(data, f) monkeypatch.setenv('OSTREE_RECEIVE_CONF', str(path)) config = receive.OTReceiveConfig.load() assert config.log_level == 'DEBUG' def test_load_args(self, caplog): caplog.set_level(logging.DEBUG, receive.logger.name) ap = argparse.ArgumentParser() ap.add_argument('--log-level', default='WARNING') ap.add_argument('--someopt', default='someval') ap.add_argument('pos') args = ap.parse_args(['foo']) config = receive.OTReceiveConfig.load(paths=[], args=args) assert dataclasses.asdict(config) == { 'root': None, 'gpg_sign': [], 'gpg_homedir': None, 'gpg_verify': False, 'gpg_trustedkeys': None, 'sign_type': 'ed25519', 'sign_keyfiles': [], 'sign_verify': False, 'sign_trustedkeyfile': None, 'update': True, 'update_hook': None, 'repos': {}, 'log_level': 'WARNING', 'force': False, 'dry_run': False, } expected_log_record = ( receive.logger.name, logging.DEBUG, 'Ignoring argument someopt' ) assert expected_log_record in caplog.record_tuples expected_log_record = ( receive.logger.name, logging.DEBUG, 'Ignoring argument pos' ) assert expected_log_record in caplog.record_tuples def test_load_args_invalid(self): with pytest.raises(receive.OTReceiveConfigError) as excinfo: receive.OTReceiveConfig.load(paths=[], args='foo') assert str(excinfo.value) == ( 'args is not an argparse.Namespace instance' ) def test_load_conf_and_args(self, tmp_path): path = tmp_path / 'ostree-receive.conf' data = { 'log-level': 'DEBUG', } with path.open('w') as f: yaml.dump(data, f) ap = argparse.ArgumentParser() ap.add_argument('--log-level', default='WARNING') args = ap.parse_args([]) config = receive.OTReceiveConfig.load(paths=[path], args=args) assert config.log_level == 'WARNING' def test_repo_config(self, tmp_path, monkeypatch): monkeypatch.chdir(tmp_path) config = receive.OTReceiveConfig() url = 'http://example.com' rel_root = Path('root') root = rel_root.resolve() root.mkdir() root_repo = root / 'repo' rel_root_repo = root_repo.relative_to(root) root_repo.mkdir() rel_nonroot_repo = Path('repo') nonroot_repo = rel_nonroot_repo.resolve() nonroot_repo.mkdir() # Non-existent repo should raise an exception. repo_path = tmp_path / 'nonexistent' with pytest.raises(receive.OTReceiveError) as excinfo: config.get_repo_config(repo_path, url) assert str(excinfo.value) == f'repo {repo_path} not found' # Without root setup, the path should be passed back as is. repo_config = config.get_repo_config(str(rel_nonroot_repo), url) assert repo_config.path == rel_nonroot_repo repo_config = config.get_repo_config(rel_nonroot_repo, url) assert repo_config.path == rel_nonroot_repo repo_config = config.get_repo_config(str(nonroot_repo), url) assert repo_config.path == nonroot_repo repo_config = config.get_repo_config(nonroot_repo, url) assert repo_config.path == nonroot_repo # Requesting a repo outside the root should fail. config.root = str(root) with pytest.raises(receive.OTReceiveError) as excinfo: config.get_repo_config(nonroot_repo, url) assert str(excinfo.value) == f'repo {nonroot_repo} not found' # All combinations of root, repo path, and config override path. base_expected_config = { 'path': nonroot_repo, 'url': url, 'gpg_sign': config.gpg_sign, 'gpg_homedir': config.gpg_homedir, 'gpg_verify': config.gpg_verify, 'gpg_trustedkeys': config.gpg_trustedkeys, 'sign_type': config.sign_type, 'sign_keyfiles': config.sign_keyfiles, 'sign_verify': config.sign_verify, 'sign_trustedkeyfile': config.sign_trustedkeyfile, 'update': config.update, 'update_hook': config.update_hook, } for root_path, repo_path, override_path, expected_repo_path in ( # Absolute repo path with no root and no override. (None, nonroot_repo, None, nonroot_repo), # Relative repo path with no root and no override. (None, rel_nonroot_repo, None, rel_nonroot_repo), # Absolute repo path with absolute root and no override. (root, root_repo, None, root_repo), # Relative repo path with absolute root and no override. (root, rel_root_repo, None, root_repo), # Absolute repo path with relative root and no override. (rel_root, root_repo, None, root_repo), # Relative repo path with relative root and no override. (rel_root, rel_root_repo, None, root_repo), # Absolute repo path with no root and absolute override. (None, nonroot_repo, nonroot_repo, nonroot_repo), # Relative repo path with no root and absolute override. (None, rel_nonroot_repo, nonroot_repo, rel_nonroot_repo), # Absolute repo path with absolute root and absolute override. (root, root_repo, root_repo, root_repo), # Relative repo path with absolute root and absolute override. (root, rel_root_repo, root_repo, root_repo), # Absolute repo path with relative root and absolute override. (rel_root, root_repo, root_repo, root_repo), # Relative repo path with relative root and absolute override. (rel_root, rel_root_repo, root_repo, root_repo), # Absolute repo path with no root and relative override. (None, nonroot_repo, rel_nonroot_repo, nonroot_repo), # Relative repo path with no root and relative override. (None, rel_nonroot_repo, rel_nonroot_repo, rel_nonroot_repo), # Absolute repo path with absolute root and relative override. (root, root_repo, rel_root_repo, root_repo), # Relative repo path with absolute root and relative override. (root, rel_root_repo, rel_root_repo, root_repo), # Absolute repo path with relative root and relative override. (rel_root, root_repo, rel_root_repo, root_repo), # Relative repo path with relative root and relative override. (rel_root, rel_root_repo, rel_root_repo, root_repo), ): logger.debug( f'Testing {root_path=}, {repo_path=}, {override_path=}, ' f'{expected_repo_path=}', ) expected_config = base_expected_config.copy() expected_config['path'] = expected_repo_path config.root = str(root_path) if root_path else None if override_path: config.repos = {str(override_path): {'update': False}} expected_config['update'] = False else: config.repos = {} expected_config['update'] = True repo_config = config.get_repo_config(repo_path, url) assert dataclasses.asdict(repo_config) == expected_config if override_path: assert repo_config.update != config.update class TestArgParser: def test_no_repo(self, capsys): ap = receive.OTReceiveArgParser() with pytest.raises(SystemExit) as excinfo: ap.parse_args([]) assert excinfo.value.code == 2 out, err = capsys.readouterr() assert out == '' assert err.endswith( 'error: the following arguments are required: REPO, URL\n' ) def test_no_url(self, capsys): ap = receive.OTReceiveArgParser() with pytest.raises(SystemExit) as excinfo: ap.parse_args(['repo']) assert excinfo.value.code == 2 out, err = capsys.readouterr() assert out == '' assert err.endswith( 'error: the following arguments are required: URL\n' ) def test_defaults(self): ap = receive.OTReceiveArgParser() args = ap.parse_args(['repo', 'url']) assert args == argparse.Namespace( repo='repo', url='url', refs=[], ) def test_refs(self): ap = receive.OTReceiveArgParser() args = ap.parse_args(['repo', 'url', 'foo']) assert args.refs == ['foo'] args = ap.parse_args(['repo', 'url', 'foo', 'bar', 'baz']) assert args.refs == ['foo', 'bar', 'baz'] def test_update(self): ap = receive.OTReceiveArgParser() args = ap.parse_args(['--no-update', 'repo', 'url']) assert args.update is False def test_dry_run(self): ap = receive.OTReceiveArgParser() args = ap.parse_args(['-n', 'repo', 'url']) assert args.dry_run is True args = ap.parse_args(['--dry-run', 'repo', 'url']) assert args.dry_run is True def test_force(self): ap = receive.OTReceiveArgParser() args = ap.parse_args(['-f', 'repo', 'url']) assert args.force is True args = ap.parse_args(['--force', 'repo', 'url']) assert args.force is True def test_log_level(self): ap = receive.OTReceiveArgParser() args = ap.parse_args(['-v', 'repo', 'url']) assert args.log_level == 'DEBUG' args = ap.parse_args(['--verbose', 'repo', 'url']) assert args.log_level == 'DEBUG' args = ap.parse_args(['-q', 'repo', 'url']) assert args.log_level == 'WARNING' args = ap.parse_args(['--quiet', 'repo', 'url']) assert args.log_level == 'WARNING' def test_compat_main(monkeypatch): """Check compat_main dispatching""" from otpush import receive_legacy monkeypatch.setattr(receive, 'main', lambda: 'current') monkeypatch.setattr(receive_legacy, 'main', lambda: 'legacy') with monkeypatch.context() as ctx: ctx.setattr(sys, 'argv', ['ostree-receive']) assert receive.compat_main() == 'current' with monkeypatch.context() as ctx: ctx.setattr(sys, 'argv', ['ostree-receive', '--opt']) assert receive.compat_main() == 'current' with monkeypatch.context() as ctx: ctx.setattr(sys, 'argv', ['ostree-receive', '--opt', 'repo']) assert receive.compat_main() == 'current' with monkeypatch.context() as ctx: ctx.setattr(sys, 'argv', ['ostree-receive', '--repo=repo']) assert receive.compat_main() == 'legacy' with monkeypatch.context() as ctx: ctx.setattr(sys, 'argv', ['ostree-receive', '--opt', '--repo=repo']) assert receive.compat_main() == 'legacy' ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1705762176.0 ostree-push-1.2.0/tests/test_receive_legacy.py0000664000175100017510000001042514552756600017657 0ustar00dandan# Tests for receive_legacy/ostree-receive-0. from contextlib import contextmanager import gi import logging import os import subprocess from otpush.receive_legacy import ( PushMessageReader, PushMessageWriter, ostree_object_path, ) from .util import random_commit gi.require_version('OSTree', '1.0') from gi.repository import OSTree # noqa: E402 logger = logging.getLogger(__name__) @contextmanager def run_receive( dest_repo, env_vars, check=True, command='ostree-receive-0', options=None, ): """Run ostree-receive-0 and return reader/writer buffers The receiver needs to run in a subprocess since it uses sys.stdin/stdout directly and that interacts poorly with pytest. """ # Start the receiver with pipes for stdin and stdout to use for the # protocol. env = os.environ.copy() if env_vars: env.update(env_vars) options = options or [] cmd = [command, '--debug', f'--repo={dest_repo.path}'] + options proc = subprocess.Popen( cmd, env=env, stdin=subprocess.PIPE, stdout=subprocess.PIPE, ) # Create a reader and writer and yield them to the caller. reader = PushMessageReader(proc.stdout) writer = PushMessageWriter(proc.stdin) yield reader, writer # Make sure the process exits. This is basically how subprocess.run() # cleans up. try: out, _ = proc.communicate(timeout=5) except subprocess.TimeoutExpired: logger.warning(f'{cmd} did not exit, killing it') proc.kill() proc.wait() raise except: # noqa: E722 logger.warning(f'Exception stopping {cmd}, killing it') proc.kill() raise if check: ret = proc.poll() assert ret == 0 assert out == b'' def test_noop(dest_repo, cli_env_vars): """Basic protocol smoketest""" with run_receive(dest_repo, cli_env_vars) as (reader, writer): data = reader.receive_info() assert data['mode'] == OSTree.RepoMode.ARCHIVE_Z2 assert data['refs'] == {} writer.send_done() def commit_objects_iter(repo, rev): """Get the path for all objects referenced by a commit""" _, reachable = repo.traverse_commit(rev, 0, None) for obj in reachable: objname = OSTree.object_to_string(obj[0], obj[1]) if obj[1] == OSTree.ObjectType.FILE: # Make this a filez since we're archive-z2 objname += 'z' elif obj[1] == OSTree.ObjectType.COMMIT: # Add in detached metadata metaobj = objname + 'meta' metapath = ostree_object_path(repo, metaobj) if os.path.exists(metapath): yield metaobj yield objname def test_update(dest_repo, cli_env_vars, source_repo, tmp_files_path): """Test the update and putobject commands""" rev = random_commit(source_repo, tmp_files_path, 'test') # Try to update with an invalid from_rev. with run_receive(dest_repo, cli_env_vars, check=False) as (reader, writer): data = reader.receive_info() assert data['refs'] == {} # There's no remote commit, so from_rev should be all 0s. from_rev = '1' * 64 update_refs = {'test': (from_rev, rev)} writer.send_update(update_refs) data = reader.receive_status() assert not data['result'] assert data['message'].startswith('Invalid from commit') writer.send_done() # Send the update correctly. with run_receive(dest_repo, cli_env_vars) as (reader, writer): data = reader.receive_info() assert data['refs'] == {} from_rev = '0' * 64 update_refs = {'test': (from_rev, rev)} writer.send_update(update_refs) data = reader.receive_status() assert data['result'] for obj in set(commit_objects_iter(source_repo, rev)): writer.send_putobject(source_repo, obj) data = reader.receive_status() assert data['result'] writer.send_done() # The destination repo should now have the commit and ref. _, dest_refs = dest_repo.list_refs() assert dest_refs == {'test': rev} with run_receive(dest_repo, cli_env_vars) as (reader, writer): data = reader.receive_info() assert data['refs'] == {'test': rev} writer.send_done() ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1705762176.0 ostree-push-1.2.0/tests/test_receive_shell.py0000664000175100017510000001365514552756600017532 0ustar00dandanimport json import os import pytest import shutil import subprocess from otpush import VERSION from .util import SCRIPTSDIR, TESTSDIR shell_abspath = os.path.join(SCRIPTSDIR, 'ostree-receive-shell') dumpenv_abspath = os.path.join(TESTSDIR, 'dumpenv') MAJOR = VERSION.split('.')[0] ostree_receive_versioned = f'ostree-receive-{MAJOR}' # Some tests can't be run if ostree-receive is in PATH skip_ostree_receive_in_path = pytest.mark.skipif( shutil.which(ostree_receive_versioned) is not None, reason=f'cannot test correctly with {ostree_receive_versioned} in PATH', ) @pytest.fixture def tmp_bindir(tmp_path): """A temporary directory to be included in PATH""" bindir = tmp_path / 'bin' bindir.mkdir() return bindir @pytest.fixture def tmp_receive(tmp_bindir): """Copy dumpenv to a temporary ostree-receive""" receive = tmp_bindir / ostree_receive_versioned shutil.copy(dumpenv_abspath, receive) os.chmod(receive, 0o755) return receive @pytest.fixture def tmp_shell(tmp_bindir): """Copy ostree-receive-shell to the temporary bindir""" shell = tmp_bindir / 'ostree-receive-shell' shutil.copy(shell_abspath, shell) os.chmod(shell, 0o755) return shell @pytest.fixture def shell_env_vars(tmp_bindir): """Environment variables for shell tests""" env = os.environ.copy() path = env.get('PATH', os.defpath).split(os.pathsep) path.insert(0, str(tmp_bindir)) env['PATH'] = os.pathsep.join(path) return env def test_command_args(shell_env_vars, tmp_shell, tmp_receive): """Test how arguments are passed to ostree-receive""" cmd = ('ostree-receive-shell', '-c', tmp_receive.name) proc = subprocess.run(cmd, check=True, env=shell_env_vars, stdout=subprocess.PIPE) data = json.loads(proc.stdout.decode('utf-8')) assert data['args'] == [str(tmp_receive)] cmd = ('ostree-receive-shell', '-c', f'{tmp_receive.name} -n foo bar') proc = subprocess.run(cmd, check=True, env=shell_env_vars, stdout=subprocess.PIPE) data = json.loads(proc.stdout.decode('utf-8')) assert data['args'] == [str(tmp_receive), '-n', 'foo', 'bar'] def test_auto_path(shell_env_vars, tmp_receive): """Test that the shell's directory is appended to PATH""" # Here we use the shell in the source directory to ensure that it's # directory isn't in PATH. Otherwise it won't get appended. cmd = (shell_abspath, '-c', tmp_receive.name) proc = subprocess.run(cmd, check=True, env=shell_env_vars, stdout=subprocess.PIPE) data = json.loads(proc.stdout.decode('utf-8')) path = data['env']['PATH'].split(os.pathsep) assert path[-1] == SCRIPTSDIR def test_no_interactive(): """Test trying to run the shell interactively with no arguments""" cmd = (shell_abspath,) proc = subprocess.run(cmd, stderr=subprocess.PIPE) assert proc.returncode == 1 assert proc.stderr.decode('utf-8') == ( 'ostree-receive-shell: Cannot run interactively\n' ) def test_wrong_args(): """Test passing incorrect arguments""" commands = ( (shell_abspath, 'foo'), (shell_abspath, 'foo', 'bar'), (shell_abspath, 'foo', 'bar', 'baz'), (shell_abspath, '-c'), (shell_abspath, '-c', 'foo', 'bar'), ) for cmd in commands: proc = subprocess.run(cmd, stderr=subprocess.PIPE) assert proc.returncode == 1 assert proc.stderr.decode('utf-8') == ( 'ostree-receive-shell: Must be run with no arguments or ' 'with -c cmd\n' ) def test_allowed_commands(shell_env_vars, tmp_shell, tmp_bindir): """Test when allowed and disallowed commands are requested""" # Allowed commands allowed = [ f'ostree-receive-{major}' for major in range(int(MAJOR) + 1) ] + ['ostree-receive'] for name in allowed: cmd = (shell_abspath, '-c', name) receive = tmp_bindir / name shutil.copy(dumpenv_abspath, receive) os.chmod(receive, 0o755) proc = subprocess.run(cmd, check=True, env=shell_env_vars, stdout=subprocess.PIPE) data = json.loads(proc.stdout.decode('utf-8')) assert data['args'] == [str(receive)] # Disallowed commands arguments = ( ('foo',), ('foo', 'bar'), (f'/usr/bin/{ostree_receive_versioned}'), ('/usr/bin/ostree-receive'), (f'ostree-receive-{int(MAJOR) + 1}'), ) for args in arguments: cmd = (shell_abspath, '-c', ' '.join(args)) proc = subprocess.run(cmd, stderr=subprocess.PIPE) assert proc.returncode == 1 assert proc.stderr.decode('utf-8') == ( f'ostree-receive-shell: Executing {args[0]} not allowed\n' ) # This test depends on the temporary ostree-receive being the only one # in PATH, so it has to be skipped otherwise. It might be possible to # mangle PATH so it's avoided, but then the required python might also # be removed from PATH. @skip_ostree_receive_in_path def test_exec_errors(shell_env_vars, tmp_shell, tmp_receive, tmp_path): """Test how errors from execve are handled""" cmd = ('ostree-receive-shell', '-c', tmp_receive.name) # Make the temporary ostree-receive non-executable to get a # permission denied error. tmp_receive.chmod(0o644) proc = subprocess.run(cmd, env=shell_env_vars, stderr=subprocess.PIPE) assert proc.returncode == 126 assert proc.stderr.decode('utf-8') == ( f'ostree-receive-shell: {tmp_receive.name}: Permission denied\n' ) # Make the temporary ostree-receive into a dangling symlink to get a # file not found error. tmp_receive.unlink() tmp_receive.symlink_to(tmp_path / 'nonexistent') proc = subprocess.run(cmd, env=shell_env_vars, stderr=subprocess.PIPE) assert proc.returncode == 127 assert proc.stderr.decode('utf-8') == ( f'ostree-receive-shell: {tmp_receive.name}: ' 'No such file or directory\n' ) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1670088793.0 ostree-push-1.2.0/tests/test_sshd.py0000664000175100017510000000221314342704131015633 0ustar00dandanimport json import logging import os import subprocess from .util import ( SRCDIR, TESTSDIR, needs_sshd, ) logger = logging.getLogger(__name__) # Skip all tests here if the required sshd is not available. pytestmark = needs_sshd def test_basic(sshd, ssh_options): cmd = ( ['ssh', '-p', str(sshd.port)] + ssh_options + [sshd.address, 'dumpenv'] ) logger.debug('SSH command: %s', ' '.join(cmd)) logger.debug('Source PATH=%s', os.getenv('PATH')) out = subprocess.check_output(cmd) data = json.loads(out.decode('utf-8')) args = data['args'] assert args == [os.path.join(TESTSDIR, 'dumpenv')] env = data['env'] assert 'PATH' in env assert 'PYTHONPATH' in env path = env['PATH'].split(os.pathsep) logger.debug('Destination PATH=%s', path) assert TESTSDIR in path toxbindir = os.getenv('TOXBINDIR') if toxbindir: assert toxbindir in path assert path.index(toxbindir) < path.index(TESTSDIR) pypath = env['PYTHONPATH'].split(os.pathsep) logger.debug('Destination PYTHONPATH=%s', path) if not toxbindir: assert SRCDIR in pypath ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1670088793.0 ostree-push-1.2.0/tests/util.py0000664000175100017510000003375614342704131014630 0ustar00dandanfrom collections import namedtuple from contextlib import contextmanager import getpass import gi import logging import os import pytest import random import shutil import socket import subprocess from tempfile import TemporaryDirectory import time gi.require_version('OSTree', '1.0') from gi.repository import GLib, Gio, OSTree # noqa: E402 logger = logging.getLogger(__name__) TESTSDIR = os.path.abspath(os.path.dirname(__file__)) DATADIR = os.path.join(TESTSDIR, 'data') SRCDIR = os.path.dirname(TESTSDIR) SCRIPTSDIR = os.path.join(SRCDIR, 'scripts') PGP_KEY = os.path.join(DATADIR, 'pgp-key.asc') PGP_PUB = os.path.join(DATADIR, 'pgp-pub.asc') PGP_PUB_KEYRING = os.path.join(DATADIR, 'pgp-pub.gpg') PGP_KEY_ID = '281D0DDC6EDD77CF6A8A936C247D3E51CDA08B6B' ED25519_PUBLIC_KEY = \ 'KDurJTccLAQ4AqtSAU8g7aPV25i6oEsy1TS6gh4LdlE=' ED25519_PRIVATE_KEY = ( '2b19zVhdsWKxoUOGP8OTBwQGAGSshWJCpage7Ov+tUcoO6slNxwsBDgCq1IBTyDto9XbmLqgS' 'zLVNLqCHgt2UQ==' ) class OTPushTestError(Exception): pass @contextmanager def oneshot_transaction(repo): """Start an OSTree repo transaction and abort on any failures""" repo.prepare_transaction() try: yield # Commit the transaction repo.commit_transaction() except: # noqa: E722 # Abort on any failures repo.abort_transaction() raise needs_ostree = pytest.mark.skipif( not shutil.which('ostree'), reason='ostree required' ) needs_flatpak = pytest.mark.skipif( not shutil.which('flatpak'), reason='flatpak required' ) def random_commit(repo, tmpdir, refspec, parent=None, timestamp=None, extra_metadata=None, gpg_key_id=None, gpg_homedir=None, ed25519_key=None): """Create a random commit and set refspec to it Returns the new commit checksum. """ for path in ('a', 'sub/b'): full_path = tmpdir / path full_path.parent.mkdir(exist_ok=True) rand_size = random.randrange(1000, 10000) with open(full_path, 'wb') as f: f.write(os.urandom(rand_size)) # Use current UTC time if no timestamp specified if timestamp is None: timestamp = int(time.time()) logger.info('Using timestamp %u for random commit on %s', timestamp, refspec) # Include the collection and ref bindings in the commit metadata = {} if extra_metadata: metadata.update(extra_metadata) collection_id = repo.get_collection_id() if collection_id is not None: metadata[OSTree.COMMIT_META_KEY_COLLECTION_BINDING] = \ GLib.Variant('s', collection_id) _, remote, ref = OSTree.parse_refspec(refspec) metadata[OSTree.COMMIT_META_KEY_REF_BINDING] = GLib.Variant('as', [ref]) metadata_var = GLib.Variant('a{sv}', metadata) with oneshot_transaction(repo): # Populate a mutable tree from the random files mtree = OSTree.MutableTree.new() repo.write_directory_to_mtree(Gio.File.new_for_path(str(tmpdir)), mtree, None) _, root = repo.write_mtree(mtree) # Commit the mtree root _, checksum = repo.write_commit_with_time(parent, 'Test commit', None, metadata_var, root, timestamp) # Sign the commit if gpg_key_id: repo.sign_commit(checksum, gpg_key_id, gpg_homedir) if ed25519_key: sign = get_ostree_ed25519_sign() sign.set_sk(GLib.Variant('s', ed25519_key)) sign.commit(repo, checksum, None) # Set the ref if remote is None: # Local ref, set a collection ref collection_ref = OSTree.CollectionRef() collection_ref.collection_id = collection_id collection_ref.ref_name = ref repo.transaction_set_collection_ref(collection_ref, checksum) else: # Remote ref repo.transaction_set_ref(remote, ref, checksum) logger.info('Created random commit %s on %s', checksum, refspec) return checksum def wipe_repo(repo): """Delete all refs and objects in repo""" _, refs = repo.list_refs(None) for refspec in refs.keys(): _, remote, ref = OSTree.parse_refspec(refspec) repo.set_ref_immediate(remote, ref, None) repo.prune(OSTree.RepoPruneFlags.REFS_ONLY, -1) def local_refs(repo, prefix=None): """Get local refs in repo excluding remotes and mirrors""" flags = OSTree.RepoListRefsExtFlags.EXCLUDE_REMOTES try: # EXCLUDE_MIRRORS only available since ostree 2019.2 flags |= OSTree.RepoListRefsExtFlags.EXCLUDE_MIRRORS except AttributeError: pass _, refs = repo.list_refs_ext(prefix, flags) return refs def get_summary_variant(path): summary_file = Gio.File.new_for_path(str(path)) summary_bytes, _ = summary_file.load_bytes() summary_variant = GLib.Variant.new_from_bytes( type=GLib.VariantType(OSTree.SUMMARY_GVARIANT_STRING), bytes=summary_bytes, trusted=False) if not summary_variant.is_normal_form(): raise OTPushTestError( f'Summary file {path} is not in normal GVariant format') return summary_variant def get_content_checksum(repo, rev): """Get a commit's content checksum""" _, commit, _ = repo.load_commit(rev) return OSTree.commit_get_content_checksum(commit) class TmpRepo(OSTree.Repo): """Temporary OSTree repo""" COLLECTION_ID = 'com.example.Test' def __init__(self, path, collection_id=COLLECTION_ID, **kwargs): self.path = path self.path.mkdir() repo_file = Gio.File.new_for_path(str(self.path)) super().__init__(path=repo_file, **kwargs) if collection_id: self.set_collection_id(collection_id) self.create(OSTree.RepoMode.ARCHIVE) SSHServerInfo = namedtuple('SSHServerInfo', ('proc', 'address', 'port')) def get_sshd(): """Returns the path to sshd or None Looks in PATH and typical sbin directories not in PATH. """ path = os.getenv('PATH', os.defpath) sshd_path = os.pathsep.join([path, '/usr/local/sbin', '/usr/sbin', '/sbin']) sshd = shutil.which('sshd', path=sshd_path) if sshd: logger.debug('Found sshd %s', sshd) else: logger.debug('sshd not found in %s', sshd_path) return sshd def have_required_sshd(sshd=None): """Check whether sshd meets requirements sshd needs to be OpenSSH version 7.8 or newer to support the SetEnv config option. """ if not sshd: sshd = get_sshd() if not sshd: return False # Run sshd -V to get the version. This is actually only an option on # the ssh client, but it will print the version after complaining # about the unknown option. Maybe someday it will exist... proc = subprocess.run([sshd, '-V'], stdout=subprocess.PIPE, stderr=subprocess.STDOUT) lines = iter(proc.stdout.decode('utf-8').splitlines()) try: version_line = next(lines) if version_line.startswith('unknown option'): version_line = next(lines) except StopIteration: logger.debug('No version information from %s -V', sshd) return False logger.debug('sshd version line: %s', version_line) if not version_line.startswith('OpenSSH_'): logger.debug('OpenSSH not in version') return False # The version line should look something like: # # OpenSSH_7.6p1 Ubuntu-4ubuntu0.3, OpenSSL 1.0.2n 7 Dec 2017 # # Get the first word, strip the OpenSSH_ prefix, strip the portable # pX suffix, and then try to get the major and minor version # numbers. openssh_version = version_line.split()[0] _, _, full_version = openssh_version.partition('OpenSSH_') version, _, _ = full_version.partition('p') version_parts = version.split('.') try: major = int(version_parts[0]) except ValueError: logger.debug('Could not get major version from %s', version) return False try: minor = int(version_parts[1]) except IndexError: minor = 0 except ValueError: logger.debug('Could not get minor version from %s', version) return False logger.debug('Detected OpenSSH sshd version %d.%d', major, minor) # OpenSSH 7.8 is needed for the SetEnv option if major < 7 or (major == 7 and minor < 8): logger.debug('OpenSSH sshd version < 7.8') return False return True needs_sshd = pytest.mark.skipif( not have_required_sshd(), reason='OpenSSH sshd version 7.8+ required' ) def get_ssh_server_id(address): """Open a connection to an SSH server and get the identification string If a connection could not be established or no data is received, an empty string is returned. """ for _ in range(10): try: sock = socket.create_connection(address, timeout=1) break except ConnectionRefusedError: logger.debug('Could not connect to port %d', address[1]) time.sleep(0.01) else: logger.debug('Could not connect in 5 attempts') return '' try: return sock.recv(256).decode('utf-8') except socket.timeout: logger.debug('No data received from port %d', address[1]) return '' finally: sock.close() @contextmanager def ssh_server(sshd_config, host_key, authorized_keys, env_vars=None): # Running sshd requires an absolute path sshd = get_sshd() if not sshd: raise OTPushTestError('Could not find sshd') if not have_required_sshd(sshd): raise OTPushTestError(f'{sshd} is not the required version') # Build a SetEnv option value from the provided environment variables. env_vars = env_vars or {} setenv = ' '.join([ f'{var}="{value}"' for var, value in env_vars.items() ]) cmd = [ sshd, # Don't fork '-D', # Write logs to stderr '-e', # Config file '-f', str(sshd_config), # Host key file '-h', str(host_key), # Authorized keys file '-o', f'AuthorizedKeysFile={authorized_keys}', # Only allow running user '-o', f'AllowUsers={getpass.getuser()}', # Allow root login '-o', 'PermitRootLogin=yes', # Set environment variables for the process '-o', f'SetEnv={setenv}', ] logger.debug('SSH server args: %s', ' '.join(cmd)) # Loop a few times trying to find an open ephemeral port with open('/proc/sys/net/ipv4/ip_local_port_range') as f: start_port, stop_port = map(int, f.readline().split()) proc = None try: for _ in range(5): port = random.randrange(start_port, stop_port) logger.debug('Starting %s with port %d', sshd, port) proc = subprocess.Popen(cmd + ['-p', str(port)]) server_id = get_ssh_server_id(('127.0.0.1', port)) if server_id.startswith('SSH-2.0-'): logger.info('%s started on port %d', sshd, port) break else: raise OTPushTestError(f'Could not start {sshd}') yield SSHServerInfo(proc, '127.0.0.1', port) finally: if proc is not None and proc.poll() is None: logger.debug('Stopping sshd process %d', proc.pid) proc.terminate() def have_gpg_support(): if shutil.which('gpg') is None: return False with TemporaryDirectory() as tempdir: repo = OSTree.Repo.new(Gio.File.new_for_path(tempdir)) try: repo.gpg_sign_data(GLib.Bytes.new(b''), GLib.Bytes.new(b''), [], None) except GLib.Error as ex: assert 'GPG feature is disabled' in ex.message return False return True needs_gpg = pytest.mark.skipif( not have_gpg_support(), reason='gpg support required' ) def get_gnupg_version(): """Get the GnuPG version from gpg""" proc = subprocess.run(['gpg', '--version'], stdout=subprocess.PIPE, check=True) lines = proc.stdout.decode('utf-8').splitlines() if len(lines) == 0: raise OTPushTestError( 'No version information from gpg --version' ) version = lines[0].split()[-1] logger.debug('Found GnuPG version %s', version) return version def kill_gpg_agent(gpg_homedir): """Kill gpg-agent in GPG homedir""" version = get_gnupg_version() version_parts = version.split('.') if len(version_parts) < 3: raise OTPushTestError( f'GnuPG version {version} has less than 3 components' ) major, minor, patch = map(int, version_parts[0:3]) # If gpg is at least version 2.1.17, gpg-agent (properly) watches # the homedir with an inotify watch and exits when it's deleted. if ( major > 2 or (major == 2 and minor > 1) or (major == 2 and minor == 1 and patch > 17) ): logger.debug('GnuPG >= 2.1.17, skipping gpg-agent cleanup') return # Otherwise, it needs to be told to exit. Note that this is # asynchronous. If the caller is deleting the homedir, there can be # failures as both the caller and gpg-agent try to delete the # sockets concurrently. cmd = ( 'gpg-connect-agent', '--homedir', str(gpg_homedir), 'killagent', '/bye' ) logger.debug('Killing GnuPG agent: %s', ' '.join(cmd)) subprocess.run(cmd, check=True) def get_ostree_ed25519_sign(): """Retrieve an OSTree.Sign instance for ed25519 keys""" return OSTree.Sign.get_by_name('ed25519') def have_ed25519_support(): """Check whether ostree was compiled with ed25519 signature support""" try: get_ostree_ed25519_sign() except (AttributeError, GLib.GError): return False return True needs_ed25519 = pytest.mark.skipif( not have_ed25519_support(), reason='ed25519 support required' ) ././@PaxHeader0000000000000000000000000000002600000000000010213 xustar0022 mtime=1670088793.0 ostree-push-1.2.0/tox.ini0000664000175100017510000000036514342704131013440 0ustar00dandan[tox] envlist = py3 requires = setuptools >= 40.8.0 [testenv] deps = pytest commands = pytest {posargs} # This is used so that processes launched by the test sshd use the correct # python from the virtualenv. setenv = TOXBINDIR = {envbindir}