pax_global_header00006660000000000000000000000064146325373570014531gustar00rootroot0000000000000052 comment=aafc04909d9900449760fc734873fbddbb1a428f rndusr-torf-cli-688c8c0/000077500000000000000000000000001463253735700151305ustar00rootroot00000000000000rndusr-torf-cli-688c8c0/.coveragerc000066400000000000000000000000401463253735700172430ustar00rootroot00000000000000[html] directory = /tmp/htmlcov rndusr-torf-cli-688c8c0/.flake8000066400000000000000000000015321463253735700163040ustar00rootroot00000000000000[flake8] ignore = # visually indented line with same indent as next logical line E129, # missing whitespace before ':' E203, # multiple spaces before operator E221, # missing whitespace after ',' E231, # too many leading '#' for block comment E266, # multiple spaces after keyword E271, # multiple spaces before keyword E272, # line too long E501, # expected 2 blank lines E302, # too many blank lines E303, # expected 2 blank lines after class or function definition E305, # multiple spaces after ',' E241, # multiple statements on one line (colon) E701, # multiple statements on one line (def) E704, # line break before binary operator W503, # line break after binary operator W504, # invalid escape sequence '\ ' W605, rndusr-torf-cli-688c8c0/.gitignore000066400000000000000000000002511463253735700171160ustar00rootroot00000000000000# Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] *.egg-info/ # Pytest cache .cache .pytest_cache .tox .python-version # Virtual environment venv pypirc rndusr-torf-cli-688c8c0/CHANGELOG000066400000000000000000000124231463253735700163440ustar00rootroot000000000000002024-06-13 5.2.1 - Exclude tests from package 2024-03-25 5.2.0 - New option: --merge JSON: Add custom information to a torrent's metainfo or remove arbitrary information from it 2023-04-29 5.1.0 - New option: --creator 2023-04-13 5.0.0 - Include profile name (if given) in torrent file name by default 2022-12-17 4.0.4 - Bugfix: Fix --max-piece-size 2022-07-05 4.0.3 - Bugfix: Fix --help output: Short form of --nocreator is now -A. 2022-06-19 4.0.2 - Bugfix: --max-piece-size was ignored with reused torrents. 2022-06-02 4.0.1 - Bugfix: --nomagnet was ignored in info mode. - Include magnet link in output even if input is also a magnet link. Only exclude it with --nomagnet. 2022-05-05 4.0.0 - New option: --reuse copies piece hashes from an existing torrent file if it contains the same files. If given a directory, it is recursively searched for a matching torrent file. - The short flag -R for --nocreator was renamed to -A so -R can be used for --noreuse. - When verifying files against a torrent in previous versions, each file was only verified up to the first corrupt piece unless --verbose was given. Now all files are always completely verified. - The "Files" field in --json output is now a list of objects like this: {"Path": , "Size": } - Bugfix: The --max-piece-size parameter was always used as the actual piece size, even if it was much too large for the given content. 2021-02-20 3.4.0 - New option: --threads 2020-08-11 3.3.0 - Always display which tier a tracker belongs to unless the whole torrent has only one tracker. - New options: --include and --include-regex include files that match exclude patterns. - Bugfix: --exclude-regex was ignored when editing an existing torrent. 2020-06-21 3.2.0 - Support for reading magnet URIs was added, e.g. "torf -i 'magnet:...' -o foo.torrent". The missing information is downloaded from the parameters "xs", "as", "ws" and "tr", if possible. Support for DHT and UDP trackers is not implemented. - The --in option now supports "-" as a parameter to read a torrent or magnet URI from stdin. - When verifying torrent content, a trailing slash in PATH automatically appends the torrent's name to PATH. For example, "torf -i foo.torrent path/to/foo" is identical to "torf -i foo.torrent path/to/" while "torf -i foo.torrent path/to" looks for foo.torrent's files in "path/to". - --in, --out and --name are now illegal in config files. 2020-04-07 3.1.1 - Bugfix: Allow all torf 3.*.* version, not just 3.0.0. 2020-04-02 3.1.0 - Huge performance increase due to multithreading. - Verify a torrent's content: torf -i content.torrent path/to/content - Progress is now reported in two lines with more information. - New option: --metainfo prints a JSON object of the torrent's metainfo. - New option: --json prints a JSON object of the regular output. - New option: --verbose shows file sizes in plain bytes, verifies file content more thoroughly, etc. - New option: --exclude-regex excludes files that match a regular expression. - --exclude and --exclude-regex patterns are now matched against the complete relative path within the torrent instead of individual path segments. - Support for multiple tiers of announce URLs when creating torrents. - Exit codes have changed and are now properly documented in the man page. - Bugfix: --max-piece-size can now set piece sizes larger than 16 MiB. 2019-06-03 3.0.1 - Fixed minor bug that caused trailing zeros to be removed from numbers, e.g. "10 GiB" was displayed as "1 GiB" 2019-04-04 3.0.0 - Use proper version number scheme. - Fixed "--exclude requires PATH" error when editing a torrent with global "exclude" options in the config file. - New options: --source to add a "source" field to the torrent and --nosource to remove it from an existing torrent. - New option: --max-piece-size optionally limits the piece size. 2018-06-19 2.0 - Support for default arguments and special profiles in ~/.config/torf/config or any file specified by --config or -f. - Use \e[0E instead of \e[1` to clear the line when showing progress. (marcelpaulo) - If output is not a TTY, "Progress ..." lines are not cleared but followed by a newline character and the rest of the line is parsable like the other output. - Long or multiline torrent file comments are now properly indented. - --exclude patterns are now matched against each directory/file name in a path instead of just the file name. - Torrent file and magnet link are now both created by default, and the --magnet option was replaced by --nomagnet and --notorrent. - In the output 'Magnet URI' was shortened to 'Magnet', 'Torrent File' was shortened to 'Torrent' and 'Creation Date' was shortened to 'Created'. - The default for --date was changed from 'today' to 'now'. 2018-04-08 1.1 - Major rewrite with lots of tests that should fix the most obvious bugs - The options --source and --nosource have been removed - The option --nocreator has been added - Output is now easier to parse when stdout is not a TTY (e.g. when piping to grep, cut, awk, etc) 2018-02-01 1.0 Final release 2018-01-15 1.0a1 Initial release rndusr-torf-cli-688c8c0/LICENSE000066400000000000000000001045131463253735700161410ustar00rootroot00000000000000 GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively state the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see . Also add information on how to contact you by electronic and paper mail. If the program does terminal interaction, make it output a short notice like this when it starts in an interactive mode: Copyright (C) This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, your program's commands might be different; for a GUI interface, you would use an "about box". You should also get your employer (if you work as a programmer) or school, if any, to sign a "copyright disclaimer" for the program, if necessary. For more information on this, and how to apply and follow the GNU GPL, see . The GNU General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. But first, please read . rndusr-torf-cli-688c8c0/Makefile000066400000000000000000000012001463253735700165610ustar00rootroot00000000000000PYTHON?=python3 VENV_PATH?=venv MANPAGE ?= docs/torf.1 MANPAGE_HTML ?= docs/torf.1.html MANPAGE_SRC ?= docs/torf.1.asciidoc .PHONY: clean man release clean: find . -name "*.pyc" -delete find . -name "__pycache__" -delete rm -rf dist build rm -rf .pytest_cache .cache rm -rf $(MANDIR) rm -rf "$(VENV_PATH)" rm -rf .tox venv: "$(PYTHON)" -m venv "$(VENV_PATH)" "$(VENV_PATH)"/bin/pip install --editable '.[dev]' man: asciidoctor $(MANPAGE_SRC) -o $(MANPAGE) --doctype=manpage --backend=manpage asciidoctor $(MANPAGE_SRC) -o $(MANPAGE_HTML) --doctype=manpage --backend=html release: man pyrelease CHANGELOG ./torfcli/_vars.py rndusr-torf-cli-688c8c0/README.rst000066400000000000000000000175131463253735700166260ustar00rootroot00000000000000torf-cli ======== torf-cli is a command line tool that can create, read and edit torrent files and magnet URIs. It can also verify a file system path against a torrent and provide detailed errors. When creating a torrent, it can find an existing torrent with the same files and copy its piece hashes to the freshly created torrent to avoid hashing the files again. The output is pleasant to read for humans or easy to parse with common CLI tools. An optional configuration file specifies custom default options and profiles that combine commonly used options. Documentation is available as a man page, or you can `read it here `_. The only dependencies are `torf `_ and `pyxdg `_. Examples -------- Create private torrent with two trackers and a specific creation date: .. code:: sh $ torf ./docs -t http://bar:123/announce -t http://baz:321/announce \ --private --date '2020-03-31 21:23:42' Name docs Size 74.43 KiB Created 2020-03-31 21:23:42 Created By torf 3.1.0 Private yes Trackers http://bar:123/announce http://baz:321/announce Piece Size 16 KiB Piece Count 5 File Count 3 Files docs ├─torf.1 [14.53 KiB] ├─torf.1.asciidoc [10.56 KiB] └─torf.1.html [49.34 KiB] Progress 100.00 % | 0:00:00 total | 72.69 MiB/s Info Hash 0a9dfcf07feb2a82da11b509e8929266d8510a02 Magnet magnet:?xt=urn:btih:0a9dfcf07feb2a82da11b509e8929266d8510a02&dn=docs&xl=76217&tr=http%3A%2F%2Fbar%3A123%2Fannounce&tr=http%3A%2F%2Fbaz%3A321%2Fannounce Torrent docs.torrent Display information about an existing torrent: .. code:: sh $ torf -i docs.torrent Name docs Info Hash 0a9dfcf07feb2a82da11b509e8929266d8510a02 Size 74.43 KiB Created 2020-03-31 21:23:42 Created By torf 3.1.0 Private yes Trackers http://bar:123/announce http://baz:321/announce Piece Size 16 KiB Piece Count 5 File Count 3 Files docs ├─torf.1 [14.53 KiB] ├─torf.1.asciidoc [10.56 KiB] └─torf.1.html [49.34 KiB] Magnet magnet:?xt=urn:btih:0a9dfcf07feb2a82da11b509e8929266d8510a02&dn=docs&xl=76217&tr=http%3A%2F%2Fbar%3A123%2Fannounce&tr=http%3A%2F%2Fbaz%3A321%2Fannounce Quickly add a comment to an existing torrent: .. code:: sh $ torf -i docs.torrent --comment 'Forgot to add this comment.' -o docs.revised.torrent Name docs Info Hash 0a9dfcf07feb2a82da11b509e8929266d8510a02 Size 74.43 KiB Comment Forgot to add this comment. Created 2020-03-31 21:23:42 Created By torf 3.1.0 Private yes Trackers http://bar:123/announce http://baz:321/announce Piece Size 16 KiB Piece Count 5 File Count 3 Files docs ├─torf.1 [14.53 KiB] ├─torf.1.asciidoc [10.56 KiB] └─torf.1.html [49.34 KiB] Magnet magnet:?xt=urn:btih:0a9dfcf07feb2a82da11b509e8929266d8510a02&dn=docs&xl=76217&tr=http%3A%2F%2Fbar%3A123%2Fannounce&tr=http%3A%2F%2Fbaz%3A321%2Fannounce Torrent docs.revised.torrent Verify the files in ``docs``: .. code:: sh $ $ torf -i docs.revised.torrent docs Name docs Info Hash 0a9dfcf07feb2a82da11b509e8929266d8510a02 Size 74.43 KiB Comment Forgot to add this comment. Created 2020-03-31 21:23:42 Created By torf 3.1.0 Private yes Trackers http://bar:123/announce http://baz:321/announce Piece Size 16 KiB Piece Count 5 File Count 3 Files docs ├─torf.1 [14.53 KiB] ├─torf.1.asciidoc [10.56 KiB] └─torf.1.html [49.34 KiB] Path docs Info Hash 0a9dfcf07feb2a82da11b509e8929266d8510a02 Error docs/torf.1.html: Too big: 50523 instead of 50522 bytes Error Corruption in piece 2, at least one of these files is corrupt: docs/torf.1.asciidoc docs/torf.1.html Progress 100.00 % | 0:00:00 total | 72.69 MiB/s torf: docs does not satisfy docs.revised.torrent Get a list of files via ``grep`` and ``cut``: .. code:: sh $ torf -i docs.revised.torrent | grep '^Files' | cut -f2- docs/torf.1 docs/torf.1.asciidoc docs/torf.1.html # Files are delimited by a horizontal tab (``\t``) Get a list of files via `jq `_: .. code:: sh $ torf -i docs.revised.torrent --json | jq .Files [ "docs/torf.1", "docs/torf.1.asciidoc", "docs/torf.1.html" ] Get metainfo as JSON: .. code:: sh $ torf -i docs.revised.torrent -m { "announce": "http://bar:123/announce", "announce-list": [ [ "http://bar:123/announce" ], [ "http://baz:321/announce" ] ], "comment": "Forgot to add this comment.", "created by": "torf 3.1.0", "creation date": 1585682622, "info": { "name": "docs", "piece length": 16384, "private": 1, "files": [ { "length": 14877, "path": [ "torf.1" ] }, { "length": 10818, "path": [ "torf.1.asciidoc" ] }, { "length": 50522, "path": [ "torf.1.html" ] } ] } } Installation ------------ The latest release is available on `PyPI `_ and on `AUR `_. pipx ```` The easiest and cleanest installation method is `pipx `__, which installs each application with all dependencies in a separate virtual environment in ``~/.local/venvs/`` and links the executable to ``~/.local/bin/``. .. code:: sh $ pipx install torf-cli $ pipx upgrade torf-cli $ pipx uninstall torf-cli # Also removes dependencies The only drawback is that, at the time of writing, pipx doesn't make the man page available, but `it's also available here `_. pip ``` The alternative is regular `pip `__, but if you decide to uninstall, you have to manually uninstall the dependencies. .. code:: sh $ pip3 install torf-cli # Installs system-wide (/usr/local/) $ pip3 install --user torf-cli # Installs in your home (~/.local/) The `latest development version `_ is available on GitHub in the master branch. .. code:: sh $ pip3 install [--user] git+https://github.com/rndusr/torf-cli.git Contributing ------------ Bug reports and feature requests are welcome in the `issue tracker `_. License ------- torf-cli is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the `GNU General Public License `_ for more details. rndusr-torf-cli-688c8c0/docs/000077500000000000000000000000001463253735700160605ustar00rootroot00000000000000rndusr-torf-cli-688c8c0/docs/torf.1000066400000000000000000000422031463253735700171150ustar00rootroot00000000000000'\" t .\" Title: torf .\" Author: [see the "AUTHOR(S)" section] .\" Generator: Asciidoctor 2.0.20 .\" Date: 2024-03-25 .\" Manual: \ \& .\" Source: \ \& .\" Language: English .\" .TH "TORF" "1" "2024-03-25" "\ \&" "\ \&" .ie \n(.g .ds Aq \(aq .el .ds Aq ' .ss \n[.ss] 0 .nh .ad l .de URL \fI\\$2\fP <\\$1>\\$3 .. .als MTO URL .if \n[.g] \{\ . mso www.tmac . am URL . ad l . . . am MTO . ad l . . . LINKSTYLE blue R < > .\} .SH "NAME" torf \- command line tool to create, display and edit torrents .SH "SYNOPSIS" .sp \fBtorf\fP \fIPATH\fP [\fIOPTIONS\fP] [\fB\-o\fP \fITORRENT\fP] .br \fBtorf\fP \fB\-i\fP \fIINPUT\fP .br \fBtorf\fP \fB\-i\fP \fIINPUT\fP [\fIOPTIONS\fP] \fB\-o\fP \fITORRENT\fP .br \fBtorf\fP \fB\-i\fP \fITORRENT\fP \fIPATH\fP .br .SH "DESCRIPTION" .sp torf can create, display and edit torrent files and verify the integrity of the files in a torrent. .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ . sp -1 . IP \(bu 2.3 .\} \fBtorf\fP \fIPATH\fP [\fIOPTIONS\fP] [\fB\-o\fP \fITORRENT\fP] .br Create the torrent file \fITORRENT\fP from the file or directory \fIPATH\fP. .RE .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ . sp -1 . IP \(bu 2.3 .\} \fBtorf\fP \fB\-i\fP \fIINPUT\fP .br Display information stored in the torrent file or magnet URI \fIINPUT\fP. .RE .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ . sp -1 . IP \(bu 2.3 .\} \fBtorf\fP \fB\-i\fP \fIINPUT\fP [\fIOPTIONS\fP] \fB\-o\fP \fITORRENT\fP .br Edit the existing torrent file or magnet URI \fIINPUT\fP (e.g. to fix a typo) and create the new torrent file \fITORRENT\fP. .if n .sp .RS 4 .it 1 an-trap .nr an-no-space-flag 1 .nr an-break-flag 1 .br .ps +1 .B Warning .ps -1 .br .sp Editing a torrent can change its hash, depending on what is changed, which essentially makes it a new torrent. See OPTIONS to find out whether a certain option will change the hash. .sp .5v .RE .RE .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ . sp -1 . IP \(bu 2.3 .\} \fBtorf\fP \fB\-i\fP \fITORRENT\fP \fIPATH\fP .br Verify that the content in \fIPATH\fP matches the metadata in the torrent file \fITORRENT\fP. .sp If \fIPATH\fP ends with a path separator (usually \(lq/\(rq), the name of the torrent (as specified by the metadata in \fITORRENT\fP) is appended. .RE .SH "OPTIONS" .sp Options that start with \fB\-\-no\fP take precedence. .sp \fIPATH\fP .RS 4 The path to the torrent\(cqs content. .RE .sp \fB\-\-in\fP, \fB\-i\fP \fIINPUT\fP .RS 4 Read metainfo from the torrent file or magnet URI \fIINPUT\fP. If \fIINPUT\fP is \(lq\-\(rq and does not exist, the torrent data or magnet URI is read from stdin. .RE .sp \fB\-\-out\fP, \fB\-o\fP \fITORRENT\fP .RS 4 Write to torrent file \fITORRENT\fP. .br Default: \fINAME\fP\fB.torrent\fP .RE .sp \fB\-\-reuse\fP, \fB\-r\fP \fIPATH\fP .RS 4 Copy piece size and piece hashes from existing torrent \fIPATH\fP. The existing torrent must have identical files. If \fIPATH\fP is a directory, it is searched recursively for a matching torrent. This option may be given multiple times. .RE .sp \fB\-\-noreuse\fP, \fB\-R\fP .RS 4 Ignore all \fB\-\-reuse\fP arguments. This is particularly useful if you have reuse paths in your configuration file. .RE .sp \fB\-\-exclude\fP, \fB\-e\fP \fIPATTERN\fP .RS 4 Exclude files from \fIPATH\fP that match the glob pattern \fIPATTERN\fP. This option may be given multiple times. See \fBEXCLUDING FILES\fP. .RE .sp \fB\-\-include\fP \fIPATTERN\fP .RS 4 Include files from \fIPATH\fP that match the glob pattern \fIPATTERN\fP even if they match any \fB\-\-exclude\fP or \fB\-\-exclude\-regex\fP patterns. This option may be given multiple times. See \fBEXCLUDING FILES\fP. .RE .sp \fB\-\-exclude\-regex\fP, \fB\-er\fP \fIPATTERN\fP .RS 4 Exclude files from \fIPATH\fP that match the regular expression \fIPATTERN\fP. This option may be given multiple times. See \fBEXCLUDING FILES\fP. .RE .sp \fB\-\-include\-regex\fP, \fB\-ir\fP \fIPATTERN\fP .RS 4 Include files from \fIPATH\fP that match the regular expression \fIPATTERN\fP even if they match any \fB\-\-exclude\fP or \fB\-\-exclude\-regex\fP patterns. This option may be given multiple times. See \fBEXCLUDING FILES\fP. .RE .sp \fB\-\-notorrent\fP, \fB\-N\fP .RS 4 Do not create a torrent file. .RE .sp \fB\-\-nomagnet\fP, \fB\-M\fP .RS 4 Do not create a magnet URI. .RE .sp \fB\-\-name\fP, \fB\-n\fP \fINAME\fP .RS 4 Destination file or directory when the torrent is downloaded. .br Default: Basename of \fIPATH\fP .if n .sp .RS 4 .it 1 an-trap .nr an-no-space-flag 1 .nr an-break-flag 1 .br .ps +1 .B Warning .ps -1 .br .sp When editing, this option changes the info hash and creates a new torrent. .sp .5v .RE .RE .sp \fB\-\-tracker\fP, \fB\-t\fP \fIURL\fP .RS 4 List of comma\-separated announce URLs. This option may be given multiple times for multiple tiers. Clients try all URLs from one tier in random order before moving on to the next tier. .RE .sp \fB\-\-notracker\fP, \fB\-T\fP .RS 4 Remove trackers from an existing torrent. .RE .sp \fB\-\-webseed\fP, \fB\-w\fP \fIURL\fP .RS 4 A webseed URL (BEP19). This option may be given multiple times. .RE .sp \fB\-\-nowebseed\fP, \fB\-W\fP .RS 4 Remove webseeds from an existing torrent. .RE .sp \fB\-\-private\fP, \fB\-p\fP .RS 4 Tell clients to only use tracker(s) for peer discovery, not DHT or PEX. .if n .sp .RS 4 .it 1 an-trap .nr an-no-space-flag 1 .nr an-break-flag 1 .br .ps +1 .B Warning .ps -1 .br .sp When editing, this option changes the info hash and creates a new torrent. .sp .5v .RE .RE .sp \fB\-\-noprivate\fP, \fB\-P\fP .RS 4 Allow clients to use trackerless methods like DHT and PEX for peer discovery. .if n .sp .RS 4 .it 1 an-trap .nr an-no-space-flag 1 .nr an-break-flag 1 .br .ps +1 .B Warning .ps -1 .br .sp When editing, this option changes the info hash and creates a new torrent. .sp .5v .RE .RE .sp \fB\-\-comment\fP, \fB\-c\fP \fICOMMENT\fP .RS 4 A comment that is stored in the torrent file. .RE .sp \fB\-\-nocomment\fP, \fB\-C\fP .RS 4 Remove the comment from an existing torrent. .RE .sp \fB\-\-date\fP, \fB\-d\fP \fIDATE\fP .RS 4 The creation date in the format \fIYYYY\fP\fB\-\fP\fIMM\fP\fB\-\fP\fIDD\fP[ \fIHH\fP\fB:\fP\fIMM\fP[\fB:\fP\fISS\fP]], \fBnow\fP for the current time or \fBtoday\fP for today at midnight. .br Default: \fBnow\fP .RE .sp \fB\-\-nodate\fP, \fB\-D\fP .RS 4 Remove the creation date from an existing torrent. .RE .sp \fB\-\-source\fP, \fB\-s\fP \fISOURCE\fP .RS 4 Add a \(lqsource\(rq field to the torrent file. This is usually used to make the torrent\(cqs info hash unique per tracker. .if n .sp .RS 4 .it 1 an-trap .nr an-no-space-flag 1 .nr an-break-flag 1 .br .ps +1 .B Warning .ps -1 .br .sp When editing, this option changes the info hash and creates a new torrent. .sp .5v .RE .RE .sp \fB\-\-merge\fP \fIJSON\fP .RS 4 Update existing metainfo in \fITORRENT\fP with a JSON object. This option may be given multiple times. Fields in \fIJSON\fP that have a value of \f(CRnull\fP (unquoted) are removed in the output \fITORRENT\fP. Adding or removing items from an existing list is not supported. .sp This example adds add a \(lqcustom\(rq section to the \(lqinfo\(rq section, removes the \(lqcomment\(rq field and changes \(lqcreation date\(rq. .sp .if n .RS 4 .nf .fam C $ torf \-i old.torrent \(rs \-\-merge \*(Aq{"info": {"custom": {"this": "that", "numbers": [1, 2, 3]}}}\*(Aq \(rs \-\-merge \*(Aq{"comment": null, "creation date": 123456789}\*(Aq \(rs \-o new.torrent .fam .fi .if n .RE .sp This also works when creating a torrent. .sp .if n .RS 4 .nf .fam C $ torf path/to/my/files \(rs \-\-merge \*(Aq{"my stuff": {"my": ["s", "e", "c", "r", "e", "t"]}}\*(Aq .fam .fi .if n .RE .if n .sp .RS 4 .it 1 an-trap .nr an-no-space-flag 1 .nr an-break-flag 1 .br .ps +1 .B Warning .ps -1 .br .sp If the \(lqinfo\(rq section is modified, the info hash changes and a new torrent is created. .sp .5v .RE .RE .sp \fB\-\-nosource\fP, \fB\-S\fP .RS 4 Remove the \(lqsource\(rq field from an existing torrent. .if n .sp .RS 4 .it 1 an-trap .nr an-no-space-flag 1 .nr an-break-flag 1 .br .ps +1 .B Warning .ps -1 .br .sp When editing, this option changes the info hash and creates a new torrent. .sp .5v .RE .RE .sp \fB\-\-xseed\fP, \fB\-x\fP .RS 4 Randomize the info hash to help with cross\-seeding. This simply adds an \fBentropy\fP field to the \fBinfo\fP section of the metainfo and sets it to a random integer. .if n .sp .RS 4 .it 1 an-trap .nr an-no-space-flag 1 .nr an-break-flag 1 .br .ps +1 .B Warning .ps -1 .br .sp When editing, this option changes the info hash and creates a new torrent. .sp .5v .RE .RE .sp \fB\-\-noxseed\fP, \fB\-X\fP .RS 4 De\-randomize a previously randomized info hash of an existing torrent. This removes the \fBentropy\fP field from the \fBinfo\fP section of the metainfo. .if n .sp .RS 4 .it 1 an-trap .nr an-no-space-flag 1 .nr an-break-flag 1 .br .ps +1 .B Warning .ps -1 .br .sp When editing, this option changes the info hash and creates a new torrent. .sp .5v .RE .RE .sp \fB\-\-max\-piece\-size\fP \fISIZE\fP .RS 4 The maximum piece size when creating a torrent. SIZE is multiplied by 1 MiB (1048576 bytes). The resulting number must be a multiple of 16 KiB (16384 bytes). Use fractions for piece sizes smaller than 1 MiB (e.g. 0.5 for 512 KiB). .RE .sp \fB\-\-creator\fP, \fB\-a\fP \fICREATOR\fP .RS 4 Name and version of the application that created the torrent. .RE .sp \fB\-\-nocreator\fP, \fB\-A\fP .RS 4 Remove the name of the application that created the torrent from an existing torrent. .RE .sp \fB\-\-yes\fP, \fB\-y\fP .RS 4 Answer all yes/no prompts with \(lqyes\(rq. At the moment, all this does is overwrite \fITORRENT\fP without asking. .RE .sp \fB\-\-config\fP, \fB\-f\fP \fIFILE\fP .RS 4 Read command line arguments from configuration FILE. See \fBCONFIGURATION FILE\fP. .br Default: \fI$XDG_CONFIG_HOME\fP\fB/torf/config\fP where \fI$XDG_CONFIG_HOME\fP defaults to \fB~/.config\fP .RE .sp \fB\-\-noconfig\fP, \fB\-F\fP .RS 4 Do not use any configuration file. .RE .sp \fB\-\-profile\fP, \fB\-z\fP \fIPROFILE\fP .RS 4 Use predefined arguments specified in \fIPROFILE\fP. This option may be given multiple times. See \fBCONFIGURATION FILE\fP. .RE .sp \fB\-\-verbose\fP, \fB\-v\fP .RS 4 Produce more output or be more thorough. This option may be given multiple times. .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ . sp -1 . IP \(bu 2.3 .\} Display bytes with and without unit prefix, e.g. \(lq1.38 MiB / 1,448,576 B\(rq. .RE .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ . sp -1 . IP \(bu 2.3 .\} Any other effects are explained in the relevant arguments\*(Aq documentation. .RE .RE .sp \fB\-\-json\fP, \fB\-j\fP .RS 4 Print information and errors as a JSON object. Progress is not reported. .RE .sp \fB\-\-metainfo\fP, \fB\-m\fP .RS 4 Print the torrent\(cqs metainfo as a JSON object. Byte strings (e.g. \(lqpieces\(rq in the \(lqinfo\(rq section) are encoded in Base64. Progress is not reported. Errors are reported normally on stderr. .sp Unless \fB\-\-verbose\fP is given, any non\-standard fields are excluded and metainfo that doesn\(cqt represent a valid torrent results in an error. .sp Unless \fB\-\-verbose\fP is given twice, the \(lqpieces\(rq field in the \(lqinfo\(rq section is excluded. .RE .sp \fB\-\-human\fP, \fB\-u\fP .RS 4 Display information in human\-readable output even if stdout is not a TTY. See \fBPIPING OUTPUT\fP. .RE .sp \fB\-\-nohuman\fP, \fB\-U\fP .RS 4 Display information in machine\-readable output even if stdout is a TTY. See \fBPIPING OUTPUT\fP. .RE .sp \fB\-\-help\fP, \fB\-h\fP .RS 4 Display a short help text and exit. .RE .sp \fB\-\-version\fP, \fB\-V\fP .RS 4 Display the version number and exit. .RE .SH "EXAMPLES" .sp Create \(lqfoo.torrent\(rq with two trackers and don\(cqt store the creation date: .sp .if n .RS 4 .nf .fam C $ torf path/to/foo \(rs \-t http://example.org:6881/announce \(rs \-t http://example.com:6881/announce \(rs \-\-nodate .fam .fi .if n .RE .sp Read \(lqfoo.torrent\(rq and print its metainfo: .sp .if n .RS 4 .nf .fam C $ torf \-i foo.torrent .fam .fi .if n .RE .sp Print only the name: .sp .if n .RS 4 .nf .fam C $ torf \-i foo.torrent | grep \*(Aq^Name\*(Aq | cut \-f2 .fam .fi .if n .RE .sp Change the comment and remove the date from \(lqfoo.torrent\(rq, write the result to \(lqbar.torrent\(rq: .sp .if n .RS 4 .nf .fam C $ torf \-i foo.torrent \-c \*(AqNew comment\*(Aq \-D \-o bar.torrent .fam .fi .if n .RE .sp Check if \(lqpath/to/foo\(rq contains valid data as specified in \(lqbar.torrent\(rq: .sp .if n .RS 4 .nf .fam C $ torf \-i bar.torrent path/to/foo .fam .fi .if n .RE .SH "EXCLUDING FILES" .sp The \fB\-\-exclude\fP option takes a glob pattern that is matched against each file path beneath \fIPATH\fP. Files that match are not included in the torrent. Matching is case\-insensitive. .sp The \fB\-\-exclude\-regex\fP option works like \fB\-\-exclude\fP but it takes a regular expression pattern and it does case\-sensitive matching. .sp The \fB\-\-include\fP and \fB\-\-include\-regex\fP options are applied like their excluding counterparts, but any matching files are included even if they match any exclude patterns. .sp File paths start with the torrent\(cqs name (usually the last segment of \fIPATH\fP), e.g. if \fIPATH\fP is \(lq/home/foo/bar\(rq, each file path starts with \(lqbar/\(rq .sp Empty directories and empty files are automatically excluded. .sp Regular expressions should be Perl\-compatible for simple patterns. See .URL "https://docs.python.org/3/library/re.html#regular\-expression\-syntax" "" "" for the complete documentation. .sp Glob patterns support these wildcard characters: .TS allbox tab(:); lt lt. T{ .sp * T}:T{ .sp matches everything T} T{ .sp ? T}:T{ .sp matches any single character T} T{ .sp [\fISEQ\fP] T}:T{ .sp matches any character in \fISEQ\fP T} T{ .sp [!\fISEQ\fP] T}:T{ .sp matches any character not in \fISEQ\fP T} .TE .sp .SH "CONFIGURATION FILE" .sp A configuration file lists long\-form command line options with all leading \(lq\-\(rq characters removed. If an option takes a parameter, \(lq=\(rq is used as a separator. Spaces before and after the \(lq=\(rq are ignored. The parameter may be quoted with single or double quotes to preserve leading and/or trailing spaces. Lines that start with \(lq#\(rq are ignored. .sp All of the options listed in the \fBOPTIONS\fP section are allowed except for \fIPATH\fP, \fBin\fP, \fBout\fP, \fBname\fP, \fBconfig\fP, \fBnoconfig\fP, \fBprofile\fP, \fBhelp\fP, \fBversion\fP. .sp There is rudimental support for environment variables in parameters. As usual, \(lq$FOO\(rq or \(lq${FOO}\(rq is replaced with the value of the variable \fBFOO\fP, \(lq$\(rq is escaped with \(lq\(rs\(rq (backslash) and a literal \(lq\(rs\(rq is represented by two \(lq\(rs\(rq. More complex string manipulation syntax (e.g. \(lq${FOO:3}\(rq) is not supported. .SS "Profiles" .sp A profile is a set of options bound to a name that is given to the \fB\-\-profile\fP option. In the configuration file it is specified as \(lq[\fIPROFILE NAME\fP]\(rq followed by a list of options. Profiles inherit any options specified globally at the top of the file, but they can overload them. .SS "Example" .sp This is an example configuration file with some global custom defaults and the two profiles \(lqfoo\(rq and \(lqbar\(rq: .sp .if n .RS 4 .nf .fam C yes nodate exclude = *.txt [foo] tracker = https://foo1/announce tracker = https://foo2/announce private [bar] tracker = https://bar/announce comment = I love bar. .fam .fi .if n .RE .sp With this configuration file, these arguments are always used: .sp .if n .RS 4 .nf .fam C \-\-yes \-\-nodate \-\-exclude \*(Aq*.txt\*(Aq .fam .fi .if n .RE .sp If \(lq\-\-profile foo\(rq is given, it also adds these arguments: .sp .if n .RS 4 .nf .fam C \-\-tracker https://foo1/announce \-\-tracker https://foo2/announce \-\-private .fam .fi .if n .RE .sp If \(lq\-\-profile bar\(rq is given, it also adds these arguments: .sp .if n .RS 4 .nf .fam C \-\-tracker https://bar/announce \-\-comment \*(AqI love bar.\*(Aq .fam .fi .if n .RE .SH "PIPING OUTPUT" .sp If stdout is not a TTY (i.e. when output is piped) or if the \fB\-\-nohuman\fP option is provided, the output format is different: .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ . sp -1 . IP \(bu 2.3 .\} Leading spaces are removed from each line. .RE .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ . sp -1 . IP \(bu 2.3 .\} The delimiter between label and value as well as between multiple values (files, trackers, etc) is a tab character (\(lq\(rst\(rq or ASCII code 0x9). Trackers are flattened into a one\-dimensional list. .RE .sp .RS 4 .ie n \{\ \h'-04'\(bu\h'+03'\c .\} .el \{\ . sp -1 . IP \(bu 2.3 .\} Numbers are not formatted (UNIX timestamps for times, seconds for time deltas, raw bytes for sizes, etc). .RE .SH "EXIT CODES" .sp 1 .RS 4 Anything not specified below .RE .sp 2 .RS 4 Unknown or invalid command line arguments .RE .sp 3 .RS 4 Error while reading or parsing the config file .RE .sp 4 .RS 4 Error while reading a torrent file or content .RE .sp 5 .RS 4 Error while writing a torrent file .RE .sp 6 .RS 4 Error while verifying a torrent\(cqs content .RE .sp 128 .RS 4 Aborted by SIGINT (typically Ctrl\-c was pressed) .RE .SH "REPORTING BUGS" .sp Bug reports, feature requests and poems about hedgehogs are welcome on the .URL "https://github.com/rndusr/torf\-cli/issues" "issue tracker" "."rndusr-torf-cli-688c8c0/docs/torf.1.asciidoc000066400000000000000000000311511463253735700206720ustar00rootroot00000000000000= TORF(1) == NAME torf - command line tool to create, display and edit torrents == SYNOPSIS *torf* _PATH_ [_OPTIONS_] [*-o* _TORRENT_] + *torf* *-i* _INPUT_ + *torf* *-i* _INPUT_ [_OPTIONS_] *-o* _TORRENT_ + *torf* *-i* _TORRENT_ _PATH_ + == DESCRIPTION torf can create, display and edit torrent files and verify the integrity of the files in a torrent. * *torf* _PATH_ [_OPTIONS_] [*-o* _TORRENT_] + Create the torrent file _TORRENT_ from the file or directory _PATH_. * *torf* *-i* _INPUT_ + Display information stored in the torrent file or magnet URI _INPUT_. * *torf* *-i* _INPUT_ [_OPTIONS_] *-o* _TORRENT_ + Edit the existing torrent file or magnet URI _INPUT_ (e.g. to fix a typo) and create the new torrent file _TORRENT_. + WARNING: Editing a torrent can change its hash, depending on what is changed, which essentially makes it a new torrent. See OPTIONS to find out whether a certain option will change the hash. * *torf* *-i* _TORRENT_ _PATH_ + Verify that the content in _PATH_ matches the metadata in the torrent file _TORRENT_. + If _PATH_ ends with a path separator (usually "`/`"), the name of the torrent (as specified by the metadata in _TORRENT_) is appended. == OPTIONS Options that start with *--no* take precedence. _PATH_:: The path to the torrent's content. *--in*, *-i* _INPUT_:: Read metainfo from the torrent file or magnet URI _INPUT_. If _INPUT_ is "`-`" and does not exist, the torrent data or magnet URI is read from stdin. *--out*, *-o* _TORRENT_:: Write to torrent file _TORRENT_. + Default: __NAME__**.torrent** *--reuse*, *-r* _PATH_:: Copy piece size and piece hashes from existing torrent _PATH_. The existing torrent must have identical files. If _PATH_ is a directory, it is searched recursively for a matching torrent. This option may be given multiple times. *--noreuse*, *-R*:: Ignore all *--reuse* arguments. This is particularly useful if you have reuse paths in your configuration file. *--exclude*, *-e* _PATTERN_:: Exclude files from _PATH_ that match the glob pattern _PATTERN_. This option may be given multiple times. See *EXCLUDING FILES*. *--include* _PATTERN_:: Include files from _PATH_ that match the glob pattern _PATTERN_ even if they match any *--exclude* or *--exclude-regex* patterns. This option may be given multiple times. See *EXCLUDING FILES*. *--exclude-regex*, *-er* _PATTERN_:: Exclude files from _PATH_ that match the regular expression _PATTERN_. This option may be given multiple times. See *EXCLUDING FILES*. *--include-regex*, *-ir* _PATTERN_:: Include files from _PATH_ that match the regular expression _PATTERN_ even if they match any *--exclude* or *--exclude-regex* patterns. This option may be given multiple times. See *EXCLUDING FILES*. *--notorrent*, *-N*:: Do not create a torrent file. *--nomagnet*, *-M*:: Do not create a magnet URI. *--name*, *-n* _NAME_:: Destination file or directory when the torrent is downloaded. + Default: Basename of _PATH_ + WARNING: When editing, this option changes the info hash and creates a new torrent. *--tracker*, *-t* _URL_:: List of comma-separated announce URLs. This option may be given multiple times for multiple tiers. Clients try all URLs from one tier in random order before moving on to the next tier. *--notracker*, *-T*:: Remove trackers from an existing torrent. *--webseed*, *-w* _URL_:: A webseed URL (BEP19). This option may be given multiple times. *--nowebseed*, *-W*:: Remove webseeds from an existing torrent. *--private*, *-p*:: Tell clients to only use tracker(s) for peer discovery, not DHT or PEX. + WARNING: When editing, this option changes the info hash and creates a new torrent. *--noprivate*, *-P*:: Allow clients to use trackerless methods like DHT and PEX for peer discovery. + WARNING: When editing, this option changes the info hash and creates a new torrent. *--comment*, *-c* _COMMENT_:: A comment that is stored in the torrent file. *--nocomment*, *-C*:: Remove the comment from an existing torrent. *--date*, *-d* _DATE_:: The creation date in the format __YYYY__**-**__MM__**-**__DD__[ __HH__**:**__MM__[**:**__SS__]], *now* for the current time or *today* for today at midnight. + Default: *now* *--nodate*, *-D*:: Remove the creation date from an existing torrent. *--source*, *-s* _SOURCE_:: Add a "`source`" field to the torrent file. This is usually used to make the torrent's info hash unique per tracker. + WARNING: When editing, this option changes the info hash and creates a new torrent. *--merge* _JSON_:: Update existing metainfo in _TORRENT_ with a JSON object. This option may be given multiple times. Fields in _JSON_ that have a value of `null` (unquoted) are removed in the output _TORRENT_. Adding or removing items from an existing list is not supported. + This example adds add a "`custom`" section to the "`info`" section, removes the "`comment`" field and changes "`creation date`". + $ torf -i old.torrent \ --merge '{"info": {"custom": {"this": "that", "numbers": [1, 2, 3]}}}' \ --merge '{"comment": null, "creation date": 123456789}' \ -o new.torrent + This also works when creating a torrent. + $ torf path/to/my/files \ --merge '{"my stuff": {"my": ["s", "e", "c", "r", "e", "t"]}}' + WARNING: If the "`info`" section is modified, the info hash changes and a new torrent is created. *--nosource*, *-S*:: Remove the "`source`" field from an existing torrent. + WARNING: When editing, this option changes the info hash and creates a new torrent. *--xseed*, *-x*:: Randomize the info hash to help with cross-seeding. This simply adds an *entropy* field to the *info* section of the metainfo and sets it to a random integer. + WARNING: When editing, this option changes the info hash and creates a new torrent. *--noxseed*, *-X*:: De-randomize a previously randomized info hash of an existing torrent. This removes the *entropy* field from the *info* section of the metainfo. + WARNING: When editing, this option changes the info hash and creates a new torrent. *--max-piece-size* _SIZE_:: The maximum piece size when creating a torrent. SIZE is multiplied by 1 MiB (1048576 bytes). The resulting number must be a multiple of 16 KiB (16384 bytes). Use fractions for piece sizes smaller than 1 MiB (e.g. 0.5 for 512 KiB). *--creator*, *-a* _CREATOR_:: Name and version of the application that created the torrent. *--nocreator*, *-A*:: Remove the name of the application that created the torrent from an existing torrent. *--yes*, *-y*:: Answer all yes/no prompts with "`yes`". At the moment, all this does is overwrite _TORRENT_ without asking. *--config*, *-f* _FILE_:: Read command line arguments from configuration FILE. See *CONFIGURATION FILE*. + Default: __$XDG_CONFIG_HOME__**/torf/config** where _$XDG_CONFIG_HOME_ defaults to *~/.config* *--noconfig*, *-F*:: Do not use any configuration file. *--profile*, *-z* _PROFILE_:: Use predefined arguments specified in _PROFILE_. This option may be given multiple times. See *CONFIGURATION FILE*. *--verbose*, *-v*:: Produce more output or be more thorough. This option may be given multiple times. + * Display bytes with and without unit prefix, e.g. "`1.38 MiB / 1,448,576 B`". * Any other effects are explained in the relevant arguments' documentation. *--json*, *-j*:: Print information and errors as a JSON object. Progress is not reported. *--metainfo*, *-m*:: Print the torrent's metainfo as a JSON object. Byte strings (e.g. "`pieces`" in the "`info`" section) are encoded in Base64. Progress is not reported. Errors are reported normally on stderr. + Unless *--verbose* is given, any non-standard fields are excluded and metainfo that doesn't represent a valid torrent results in an error. + Unless *--verbose* is given twice, the "`pieces`" field in the "`info`" section is excluded. *--human*, *-u*:: Display information in human-readable output even if stdout is not a TTY. See *PIPING OUTPUT*. *--nohuman*, *-U*:: Display information in machine-readable output even if stdout is a TTY. See *PIPING OUTPUT*. *--help*, *-h*:: Display a short help text and exit. *--version*, *-V*:: Display the version number and exit. == EXAMPLES Create "`foo.torrent`" with two trackers and don't store the creation date: $ torf path/to/foo \ -t http://example.org:6881/announce \ -t http://example.com:6881/announce \ --nodate Read "`foo.torrent`" and print its metainfo: $ torf -i foo.torrent Print only the name: $ torf -i foo.torrent | grep '^Name' | cut -f2 Change the comment and remove the date from "`foo.torrent`", write the result to "`bar.torrent`": $ torf -i foo.torrent -c 'New comment' -D -o bar.torrent Check if "`path/to/foo`" contains valid data as specified in "`bar.torrent`": $ torf -i bar.torrent path/to/foo == EXCLUDING FILES The *--exclude* option takes a glob pattern that is matched against each file path beneath _PATH_. Files that match are not included in the torrent. Matching is case-insensitive. The *--exclude-regex* option works like *--exclude* but it takes a regular expression pattern and it does case-sensitive matching. The *--include* and *--include-regex* options are applied like their excluding counterparts, but any matching files are included even if they match any exclude patterns. File paths start with the torrent's name (usually the last segment of _PATH_), e.g. if _PATH_ is "`/home/foo/bar`", each file path starts with "`bar/`" Empty directories and empty files are automatically excluded. Regular expressions should be Perl-compatible for simple patterns. See https://docs.python.org/3/library/re.html#regular-expression-syntax for the complete documentation. Glob patterns support these wildcard characters: [%autowidth, frame=none, grid=none, cols=">,<"] |=== | * |matches everything | ? |matches any single character | [_SEQ_] |matches any character in _SEQ_ | [!_SEQ_] |matches any character not in _SEQ_ |=== == CONFIGURATION FILE A configuration file lists long-form command line options with all leading "`-`" characters removed. If an option takes a parameter, "`=`" is used as a separator. Spaces before and after the "`=`" are ignored. The parameter may be quoted with single or double quotes to preserve leading and/or trailing spaces. Lines that start with "`#`" are ignored. All of the options listed in the *OPTIONS* section are allowed except for _PATH_, *in*, *out*, *name*, *config*, *noconfig*, *profile*, *help*, *version*. There is rudimental support for environment variables in parameters. As usual, "`$FOO`" or "`${FOO}`" is replaced with the value of the variable *FOO*, "`$`" is escaped with "`\`" (backslash) and a literal "`\`" is represented by two "`\`". More complex string manipulation syntax (e.g. "`${FOO:3}`") is not supported. === Profiles A profile is a set of options bound to a name that is given to the *--profile* option. In the configuration file it is specified as "`[_PROFILE NAME_]`" followed by a list of options. Profiles inherit any options specified globally at the top of the file, but they can overload them. === Example This is an example configuration file with some global custom defaults and the two profiles "`foo`" and "`bar`": ---- yes nodate exclude = *.txt [foo] tracker = https://foo1/announce tracker = https://foo2/announce private [bar] tracker = https://bar/announce comment = I love bar. ---- With this configuration file, these arguments are always used: --yes --nodate --exclude '*.txt' If "`--profile foo`" is given, it also adds these arguments: --tracker https://foo1/announce --tracker https://foo2/announce --private If "`--profile bar`" is given, it also adds these arguments: --tracker https://bar/announce --comment 'I love bar.' == PIPING OUTPUT If stdout is not a TTY (i.e. when output is piped) or if the *--nohuman* option is provided, the output format is different: - Leading spaces are removed from each line. - The delimiter between label and value as well as between multiple values (files, trackers, etc) is a tab character ("`\t`" or ASCII code 0x9). Trackers are flattened into a one-dimensional list. - Numbers are not formatted (UNIX timestamps for times, seconds for time deltas, raw bytes for sizes, etc). == EXIT CODES 1:: Anything not specified below 2:: Unknown or invalid command line arguments 3:: Error while reading or parsing the config file 4:: Error while reading a torrent file or content 5:: Error while writing a torrent file 6:: Error while verifying a torrent's content 128:: Aborted by SIGINT (typically Ctrl-c was pressed) == REPORTING BUGS Bug reports, feature requests and poems about hedgehogs are welcome on the https://github.com/rndusr/torf-cli/issues[issue tracker]. rndusr-torf-cli-688c8c0/docs/torf.1.html000066400000000000000000001471271463253735700200730ustar00rootroot00000000000000 TORF(1)

SYNOPSIS

torf PATH [OPTIONS] [-o TORRENT]
torf -i INPUT
torf -i INPUT [OPTIONS] -o TORRENT
torf -i TORRENT PATH

DESCRIPTION

torf can create, display and edit torrent files and verify the integrity of the files in a torrent.

  • torf PATH [OPTIONS] [-o TORRENT]
    Create the torrent file TORRENT from the file or directory PATH.

  • torf -i INPUT
    Display information stored in the torrent file or magnet URI INPUT.

  • torf -i INPUT [OPTIONS] -o TORRENT
    Edit the existing torrent file or magnet URI INPUT (e.g. to fix a typo) and create the new torrent file TORRENT.

    Warning
    Editing a torrent can change its hash, depending on what is changed, which essentially makes it a new torrent. See OPTIONS to find out whether a certain option will change the hash.
  • torf -i TORRENT PATH
    Verify that the content in PATH matches the metadata in the torrent file TORRENT.

    If PATH ends with a path separator (usually “/”), the name of the torrent (as specified by the metadata in TORRENT) is appended.

OPTIONS

Options that start with --no take precedence.

PATH

The path to the torrent’s content.

--in, -i INPUT

Read metainfo from the torrent file or magnet URI INPUT. If INPUT is “-” and does not exist, the torrent data or magnet URI is read from stdin.

--out, -o TORRENT

Write to torrent file TORRENT.
Default: NAME.torrent

--reuse, -r PATH

Copy piece size and piece hashes from existing torrent PATH. The existing torrent must have identical files. If PATH is a directory, it is searched recursively for a matching torrent. This option may be given multiple times.

--noreuse, -R

Ignore all --reuse arguments. This is particularly useful if you have reuse paths in your configuration file.

--exclude, -e PATTERN

Exclude files from PATH that match the glob pattern PATTERN. This option may be given multiple times. See EXCLUDING FILES.

--include PATTERN

Include files from PATH that match the glob pattern PATTERN even if they match any --exclude or --exclude-regex patterns. This option may be given multiple times. See EXCLUDING FILES.

--exclude-regex, -er PATTERN

Exclude files from PATH that match the regular expression PATTERN. This option may be given multiple times. See EXCLUDING FILES.

--include-regex, -ir PATTERN

Include files from PATH that match the regular expression PATTERN even if they match any --exclude or --exclude-regex patterns. This option may be given multiple times. See EXCLUDING FILES.

--notorrent, -N

Do not create a torrent file.

--nomagnet, -M

Do not create a magnet URI.

--name, -n NAME

Destination file or directory when the torrent is downloaded.
Default: Basename of PATH

Warning
When editing, this option changes the info hash and creates a new torrent.
--tracker, -t URL

List of comma-separated announce URLs. This option may be given multiple times for multiple tiers. Clients try all URLs from one tier in random order before moving on to the next tier.

--notracker, -T

Remove trackers from an existing torrent.

--webseed, -w URL

A webseed URL (BEP19). This option may be given multiple times.

--nowebseed, -W

Remove webseeds from an existing torrent.

--private, -p

Tell clients to only use tracker(s) for peer discovery, not DHT or PEX.

Warning
When editing, this option changes the info hash and creates a new torrent.
--noprivate, -P

Allow clients to use trackerless methods like DHT and PEX for peer discovery.

Warning
When editing, this option changes the info hash and creates a new torrent.
--comment, -c COMMENT

A comment that is stored in the torrent file.

--nocomment, -C

Remove the comment from an existing torrent.

--date, -d DATE

The creation date in the format YYYY-MM-DD[ HH:MM[:SS]], now for the current time or today for today at midnight.
Default: now

--nodate, -D

Remove the creation date from an existing torrent.

--source, -s SOURCE

Add a “source” field to the torrent file. This is usually used to make the torrent’s info hash unique per tracker.

Warning
When editing, this option changes the info hash and creates a new torrent.
--merge JSON

Update existing metainfo in TORRENT with a JSON object. This option may be given multiple times. Fields in JSON that have a value of null (unquoted) are removed in the output TORRENT. Adding or removing items from an existing list is not supported.

This example adds add a “custom” section to the “info” section, removes the “comment” field and changes “creation date”.

$ torf -i old.torrent \
       --merge '{"info": {"custom": {"this": "that", "numbers": [1, 2, 3]}}}' \
       --merge '{"comment": null, "creation date": 123456789}' \
       -o new.torrent

This also works when creating a torrent.

$ torf path/to/my/files \
       --merge '{"my stuff": {"my": ["s", "e", "c", "r", "e", "t"]}}'
Warning
If the “info” section is modified, the info hash changes and a new torrent is created.
--nosource, -S

Remove the “source” field from an existing torrent.

Warning
When editing, this option changes the info hash and creates a new torrent.
--xseed, -x

Randomize the info hash to help with cross-seeding. This simply adds an entropy field to the info section of the metainfo and sets it to a random integer.

Warning
When editing, this option changes the info hash and creates a new torrent.
--noxseed, -X

De-randomize a previously randomized info hash of an existing torrent. This removes the entropy field from the info section of the metainfo.

Warning
When editing, this option changes the info hash and creates a new torrent.
--max-piece-size SIZE

The maximum piece size when creating a torrent. SIZE is multiplied by 1 MiB (1048576 bytes). The resulting number must be a multiple of 16 KiB (16384 bytes). Use fractions for piece sizes smaller than 1 MiB (e.g. 0.5 for 512 KiB).

--creator, -a CREATOR

Name and version of the application that created the torrent.

--nocreator, -A

Remove the name of the application that created the torrent from an existing torrent.

--yes, -y

Answer all yes/no prompts with “yes”. At the moment, all this does is overwrite TORRENT without asking.

--config, -f FILE

Read command line arguments from configuration FILE. See CONFIGURATION FILE.
Default: $XDG_CONFIG_HOME/torf/config where $XDG_CONFIG_HOME defaults to ~/.config

--noconfig, -F

Do not use any configuration file.

--profile, -z PROFILE

Use predefined arguments specified in PROFILE. This option may be given multiple times. See CONFIGURATION FILE.

--verbose, -v

Produce more output or be more thorough. This option may be given multiple times.

  • Display bytes with and without unit prefix, e.g. “1.38 MiB / 1,448,576 B”.

  • Any other effects are explained in the relevant arguments' documentation.

--json, -j

Print information and errors as a JSON object. Progress is not reported.

--metainfo, -m

Print the torrent’s metainfo as a JSON object. Byte strings (e.g. “pieces” in the “info” section) are encoded in Base64. Progress is not reported. Errors are reported normally on stderr.

Unless --verbose is given, any non-standard fields are excluded and metainfo that doesn’t represent a valid torrent results in an error.

Unless --verbose is given twice, the “pieces” field in the “info” section is excluded.

--human, -u

Display information in human-readable output even if stdout is not a TTY. See PIPING OUTPUT.

--nohuman, -U

Display information in machine-readable output even if stdout is a TTY. See PIPING OUTPUT.

--help, -h

Display a short help text and exit.

--version, -V

Display the version number and exit.

EXAMPLES

Create “foo.torrent” with two trackers and don’t store the creation date:

$ torf path/to/foo \
       -t http://example.org:6881/announce \
       -t http://example.com:6881/announce \
       --nodate

Read “foo.torrent” and print its metainfo:

$ torf -i foo.torrent

Print only the name:

$ torf -i foo.torrent | grep '^Name' | cut -f2

Change the comment and remove the date from “foo.torrent”, write the result to “bar.torrent”:

$ torf -i foo.torrent -c 'New comment' -D -o bar.torrent

Check if “path/to/foo” contains valid data as specified in “bar.torrent”:

$ torf -i bar.torrent path/to/foo

EXCLUDING FILES

The --exclude option takes a glob pattern that is matched against each file path beneath PATH. Files that match are not included in the torrent. Matching is case-insensitive.

The --exclude-regex option works like --exclude but it takes a regular expression pattern and it does case-sensitive matching.

The --include and --include-regex options are applied like their excluding counterparts, but any matching files are included even if they match any exclude patterns.

File paths start with the torrent’s name (usually the last segment of PATH), e.g. if PATH is “/home/foo/bar”, each file path starts with “bar/”

Empty directories and empty files are automatically excluded.

Regular expressions should be Perl-compatible for simple patterns. See https://docs.python.org/3/library/re.html#regular-expression-syntax for the complete documentation.

Glob patterns support these wildcard characters:

*

matches everything

?

matches any single character

[SEQ]

matches any character in SEQ

[!SEQ]

matches any character not in SEQ

CONFIGURATION FILE

A configuration file lists long-form command line options with all leading “-” characters removed. If an option takes a parameter, “=” is used as a separator. Spaces before and after the “=” are ignored. The parameter may be quoted with single or double quotes to preserve leading and/or trailing spaces. Lines that start with “#” are ignored.

All of the options listed in the OPTIONS section are allowed except for PATH, in, out, name, config, noconfig, profile, help, version.

There is rudimental support for environment variables in parameters. As usual, “$FOO” or “${FOO}” is replaced with the value of the variable FOO, “$” is escaped with “\” (backslash) and a literal “\” is represented by two “\”. More complex string manipulation syntax (e.g. “${FOO:3}”) is not supported.

Profiles

A profile is a set of options bound to a name that is given to the --profile option. In the configuration file it is specified as “[PROFILE NAME]” followed by a list of options. Profiles inherit any options specified globally at the top of the file, but they can overload them.

Example

This is an example configuration file with some global custom defaults and the two profiles “foo” and “bar”:

yes
nodate
exclude = *.txt

[foo]
tracker = https://foo1/announce
tracker = https://foo2/announce
private

[bar]
tracker = https://bar/announce
comment = I love bar.

With this configuration file, these arguments are always used:

--yes
--nodate
--exclude '*.txt'

If “--profile foo” is given, it also adds these arguments:

--tracker https://foo1/announce
--tracker https://foo2/announce
--private

If “--profile bar” is given, it also adds these arguments:

--tracker https://bar/announce
--comment 'I love bar.'

PIPING OUTPUT

If stdout is not a TTY (i.e. when output is piped) or if the --nohuman option is provided, the output format is different:

  • Leading spaces are removed from each line.

  • The delimiter between label and value as well as between multiple values (files, trackers, etc) is a tab character (“\t” or ASCII code 0x9). Trackers are flattened into a one-dimensional list.

  • Numbers are not formatted (UNIX timestamps for times, seconds for time deltas, raw bytes for sizes, etc).

EXIT CODES

1

Anything not specified below

2

Unknown or invalid command line arguments

3

Error while reading or parsing the config file

4

Error while reading a torrent file or content

5

Error while writing a torrent file

6

Error while verifying a torrent’s content

128

Aborted by SIGINT (typically Ctrl-c was pressed)

REPORTING BUGS

Bug reports, feature requests and poems about hedgehogs are welcome on the issue tracker.

rndusr-torf-cli-688c8c0/pyproject.toml000066400000000000000000000024551463253735700200520ustar00rootroot00000000000000[project] name = "torf-cli" description = "CLI tool to create, read and edit torrent files" readme = "README.rst" license = {text = "GPL-3.0-or-later"} authors = [ { name="Random User", email="rndusr@posteo.de" }, ] keywords = ["bittorrent", "torrent", "magnet", "cli"] dynamic = ["version"] # Get version from PROJECT/__version__ classifiers = [ "Development Status :: 5 - Production/Stable", "Intended Audience :: End Users/Desktop", "License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)", "Programming Language :: Python :: 3", ] requires-python = ">=3.7" dependencies = [ "torf==4.*,>=4.1.2", "pyxdg", ] [project.optional-dependencies] dev = [ "pytest", "tox", "coverage", "pytest-cov", "ruff", "flake8", "isort", ] [project.urls] Repository = "https://github.com/rndusr/torf-cli" Documentation = "https://rndusr.github.io/torf-cli/torf.1.html" "Bug Tracker" = "https://github.com/rndusr/torf-cli/issues" Changelog = "https://raw.githubusercontent.com/rndusr/torf-cli/master/CHANGELOG" [build-system] requires = ["setuptools"] build-backend = "setuptools.build_meta" [tool.setuptools.packages.find] include = ["torfcli*"] [tool.setuptools.dynamic] version = {attr = "torfcli._vars.__version__"} [project.scripts] torf = "torfcli:run" rndusr-torf-cli-688c8c0/ruff.toml000066400000000000000000000003551463253735700167720ustar00rootroot00000000000000line-length = 120 lint.select = [ "E", # pycodestyle "F", # pyflakes "I", # isort ] [lint.per-file-ignores] "__init__.py" = [ # imported but unused "F401", ] "tests/*" = [ # Line too long "E501", ] rndusr-torf-cli-688c8c0/tests/000077500000000000000000000000001463253735700162725ustar00rootroot00000000000000rndusr-torf-cli-688c8c0/tests/conftest.py000066400000000000000000000110521463253735700204700ustar00rootroot00000000000000import contextlib import functools import os import re from types import GeneratorType from unittest import mock import pytest import torf @pytest.fixture def regex(): # https://kalnytskyi.com/howto/assert-str-matches-regex-in-pytest/ class _regex: def __init__(self, pattern, flags=0, show_groups=False): self._regex = re.compile(pattern, flags) self._show_groups = show_groups def __eq__(self, string): match = self._regex.search(string) if match is not None and self._show_groups: print(match.groups()) return False else: return bool(match) def __repr__(self): return self._regex.pattern return _regex @pytest.fixture(autouse=True) def change_cwd(tmp_path): orig_dir = os.getcwd() os.chdir(str(tmp_path)) try: yield finally: os.chdir(orig_dir) @pytest.fixture(autouse=True) def cfgfile(tmp_path, monkeypatch): cfgdir = tmp_path / 'configdir' cfgdir.mkdir() cfgfile = cfgdir / 'config' from torfcli import _config monkeypatch.setattr(_config, 'DEFAULT_CONFIG_FILE', str(cfgfile)) return cfgfile def _assert_torrents_equal(orig, new, path_map=None, ignore=(), **new_attrs, ): attrs = ['comment', 'created_by', 'creation_date', 'files', 'filetree', 'httpseeds', 'name', 'piece_size', 'pieces', 'private', 'randomize_infohash', 'size', 'source', 'trackers', 'webseeds'] for attr in attrs: if attr not in new_attrs and attr not in ignore: orig_val, new_val = getattr(orig, attr), getattr(new, attr) if isinstance(orig_val, GeneratorType): orig_val, new_val = tuple(orig_val), tuple(new_val) assert orig_val == new_val for attr,val in new_attrs.items(): assert getattr(new, attr) == val if path_map: for path, exp_value in path_map.items(): path = list(path) value = new.metainfo while path: key = path.pop(0) value = value[key] assert value == exp_value @pytest.fixture def assert_torrents_equal(): return _assert_torrents_equal @pytest.fixture def human_readable(monkeypatch): @contextlib.contextmanager def _human_readable(monkeypatch, human_readable): from torfcli import _ui monkeypatch.setattr(_ui.UI, '_human', lambda self: bool(human_readable)) yield return functools.partial(_human_readable, monkeypatch) @pytest.fixture def mock_content(tmp_path): base = tmp_path / 'My Torrent' base.mkdir() file1 = base / 'Something.jpg' file2 = base / 'Anotherthing.iso' file3 = base / 'Thirdthing.txt' for f in (file1, file2, file3): f.write_text('some data') return base @pytest.fixture def mock_create_mode(monkeypatch): from torfcli import _main mock_create_mode = mock.MagicMock() monkeypatch.setattr(_main, '_create_mode', mock_create_mode) return mock_create_mode @contextlib.contextmanager def _create_torrent(tmp_path, mock_content, **kwargs): torrent_file = str(tmp_path / 'test.torrent') kw = {'path': str(mock_content), 'exclude_globs': ['Original', 'exclusions'], 'trackers': ['http://some.tracker'], 'webseeds': ['http://some.webseed'], 'private': False, 'randomize_infohash': False, 'comment': 'Original Comment', 'created_by': 'Original Creator'} kw.update(kwargs) t = torf.Torrent(**kw) t.generate() t.write(torrent_file) try: yield torrent_file finally: if os.path.exists(torrent_file): os.remove(torrent_file) @pytest.fixture def create_torrent(tmp_path, mock_content): return functools.partial(_create_torrent, tmp_path, mock_content) ansi_regex = re.compile(r'(?:\x1B[@-_]|[\x80-\x9F])[0-?]*[ -/]*[@-~]') erase_line_regex = re.compile(r'^(.*?)\x1b\[2K.*?$', flags=re.MULTILINE) @pytest.fixture def clear_ansi(): def _clear_ansi(string): string = erase_line_regex.sub(r'\1', string) string = ansi_regex.sub('', string) string = re.sub(r'\x1b[78]', '', string) string = re.sub(r'(?:\r|^).*\r', '', string, flags=re.MULTILINE) return string return _clear_ansi @pytest.fixture def assert_no_ctrl(): """Assert string doesn't contain control sequences except for \n and \t""" def _assert_no_ctrl(string): for c in string: assert ord(c) >= 32 or c in ('\n', '\t') return _assert_no_ctrl rndusr-torf-cli-688c8c0/tests/test_basics.py000066400000000000000000000017471463253735700211600ustar00rootroot00000000000000from unittest.mock import patch from torfcli import _errors, _vars, run def test_no_arguments(capsys): with patch('sys.exit') as mock_exit: run([]) mock_exit.assert_called_once_with(_errors.Code.CLI) cap = capsys.readouterr() assert cap.out == '' assert cap.err == (f'{_vars.__appname__}: Not sure what to do ' f'(see USAGE in `{_vars.__appname__} -h`)\n') def test_help(capsys): for arg in ('--help', '-h'): with patch('sys.exit') as mock_exit: run([arg]) mock_exit.assert_not_called() cap = capsys.readouterr() from torfcli._config import HELP_TEXT assert cap.out == HELP_TEXT + '\n' assert cap.err == '' def test_version(capsys): with patch('sys.exit') as mock_exit: run(['--version']) mock_exit.assert_not_called() cap = capsys.readouterr() from torfcli._config import VERSION_TEXT assert cap.out == VERSION_TEXT + '\n' assert cap.err == '' rndusr-torf-cli-688c8c0/tests/test_configfile.py000066400000000000000000000200141463253735700220050ustar00rootroot00000000000000import datetime import os import textwrap from unittest.mock import patch from torfcli import _errors, _vars, run def test_default_configfile_doesnt_exist(cfgfile, mock_content, mock_create_mode): run([str(mock_content)]) cfg = mock_create_mode.call_args[0][1] assert cfg['PATH'] == str(mock_content) def test_custom_configfile_doesnt_exist(capsys, tmp_path, mock_content, mock_create_mode): cfgfile = tmp_path / 'wrong_special_config' with patch('sys.exit') as mock_exit: run(['--config', str(cfgfile), str(mock_content)]) mock_exit.assert_called_once_with(_errors.Code.CONFIG) cap = capsys.readouterr() assert cap.out == '' assert cap.err == f'{_vars.__appname__}: {cfgfile}: No such file or directory\n' assert mock_create_mode.call_args is None def test_config_unreadable(capsys, cfgfile, mock_content, mock_create_mode): cfgfile.write_text('something') import os os.chmod(cfgfile, 0o000) with patch('sys.exit') as mock_exit: run([str(mock_content)]) mock_exit.assert_called_once_with(_errors.Code.CONFIG) cap = capsys.readouterr() assert cap.out == '' assert cap.err == f'{_vars.__appname__}: {cfgfile}: Permission denied\n' assert mock_create_mode.call_args is None def test_custom_configfile(tmp_path, mock_content, mock_create_mode): cfgfile = tmp_path / 'special_config' cfgfile.write_text(textwrap.dedent(''' comment = asdf ''')) run(['--config', str(cfgfile), str(mock_content)]) cfg = mock_create_mode.call_args[0][1] assert cfg['comment'] == 'asdf' def test_noconfig_option(cfgfile, mock_content, mock_create_mode): cfgfile.write_text(textwrap.dedent(''' private comment = Nobody shall see this! ''')) run([str(mock_content), '--noconfig']) cfg = mock_create_mode.call_args[0][1] assert cfg['private'] is None assert cfg['comment'] is None def test_cli_args_take_precedence(cfgfile, mock_content, mock_create_mode): cfgfile.write_text(textwrap.dedent(''' xseed comment = Generic description date = 1970-01-01 ''')) run([str(mock_content), '--noxseed', '--date', '2001-02-03 04:05']) cfg = mock_create_mode.call_args[0][1] assert cfg['noxseed'] is True assert cfg['xseed'] is True assert cfg['comment'] == 'Generic description' assert cfg['date'] == datetime.datetime(2001, 2, 3, 4, 5) def test_adding_to_list_via_cli(cfgfile, mock_content, mock_create_mode): cfgfile.write_text(textwrap.dedent(''' tracker = https://foo tracker = https://bar ''')) run([str(mock_content), '--tracker', 'https://baz']) cfg = mock_create_mode.call_args[0][1] assert cfg['tracker'] == ['https://foo', 'https://bar', 'https://baz'] def test_invalid_option_name(capsys, cfgfile, mock_content, mock_create_mode): cfgfile.write_text(textwrap.dedent(''' foo = 123 ''')) with patch('sys.exit') as mock_exit: run([]) mock_exit.assert_called_once_with(_errors.Code.CONFIG) cap = capsys.readouterr() assert cap.out == '' assert cap.err == f'{_vars.__appname__}: {cfgfile}: Unrecognized arguments: --foo\n' assert mock_create_mode.call_args is None def test_invalid_boolean_name(capsys, cfgfile, mock_content, mock_create_mode): cfgfile.write_text(textwrap.dedent(''' foo ''')) with patch('sys.exit') as mock_exit: run([]) mock_exit.assert_called_once_with(_errors.Code.CONFIG) cap = capsys.readouterr() assert cap.out == '' assert cap.err == f'{_vars.__appname__}: {cfgfile}: Unrecognized arguments: --foo\n' assert mock_create_mode.call_args is None def test_illegal_configfile_arguments(capsys, cfgfile, mock_content, mock_create_mode): for arg in ('config', 'noconfig', 'profile', 'help', 'version'): cfgfile.write_text(textwrap.dedent(f''' {arg} = foo ''')) with patch('sys.exit') as mock_exit: run(['--config', str(cfgfile), str(mock_content)]) mock_exit.assert_called_once_with(_errors.Code.CONFIG) cap = capsys.readouterr() assert cap.out == '' assert cap.err == f'{_vars.__appname__}: {cfgfile}: Not allowed in config file: {arg}\n' assert mock_create_mode.call_args is None for arg in ('config', 'noconfig', 'profile', 'help', 'version'): cfgfile.write_text(textwrap.dedent(f''' {arg} ''')) with patch('sys.exit') as mock_exit: run(['--config', str(cfgfile), str(mock_content)]) mock_exit.assert_called_once_with(_errors.Code.CONFIG) cap = capsys.readouterr() assert cap.out == '' assert cap.err == f'{_vars.__appname__}: {cfgfile}: Not allowed in config file: {arg}\n' assert mock_create_mode.call_args is None def test_environment_variable_resolution(cfgfile, mock_content, mock_create_mode): cfgfile.write_text(textwrap.dedent(''' tracker = https://$DOMAIN:$PORT${PATH} date = $DATE comment = $UNDEFINED ''')) with patch.dict(os.environ, {'DOMAIN': 'tracker.example.org', 'PORT': '123', 'PATH': '/announce', 'DATE': '1999-12-31'}): run([str(mock_content)]) cfg = mock_create_mode.call_args[0][1] assert cfg['tracker'] == ['https://tracker.example.org:123/announce'] assert cfg['date'] == datetime.datetime(1999, 12, 31, 0, 0) assert cfg['comment'] == '$UNDEFINED' def test_environment_variable_resolution_in_profile(cfgfile, mock_content, mock_create_mode): cfgfile.write_text(textwrap.dedent(''' [foo] tracker = https://$DOMAIN:${PORT}/$PATH date = $DATE comment = $UNDEFINED ''')) with patch.dict(os.environ, {'DOMAIN': 'tracker.example.org', 'PORT': '123', 'PATH': 'announce', 'DATE': '1999-12-31'}): run([str(mock_content), '--profile', 'foo']) cfg = mock_create_mode.call_args[0][1] assert cfg['tracker'] == ['https://tracker.example.org:123/announce'] assert cfg['date'] == datetime.datetime(1999, 12, 31, 0, 0) assert cfg['comment'] == '$UNDEFINED' def test_escaping_dollar(cfgfile, mock_content, mock_create_mode): cfgfile.write_text(textwrap.dedent(''' [one] comment = \\$COMMENT [two] comment = \\\\$COMMENT [three] comment = \\\\\\$COMMENT [four] comment = \\\\\\\\$COMMENT [five] comment = \\\\\\\\\\$COMMENT [six] comment = \\\\\\\\\\\\$COMMENT [seven] comment = \\\\\\\\\\\\\\$COMMENT ''')) with patch.dict(os.environ, {'COMMENT': 'The comment.'}): run([str(mock_content), '--profile', 'one']) cfg = mock_create_mode.call_args[0][1] assert cfg['comment'] == '$COMMENT' with patch.dict(os.environ, {'COMMENT': 'The comment.'}): run([str(mock_content), '--profile', 'two']) cfg = mock_create_mode.call_args[0][1] assert cfg['comment'] == '\\The comment.' with patch.dict(os.environ, {'COMMENT': 'The comment.'}): run([str(mock_content), '--profile', 'three']) cfg = mock_create_mode.call_args[0][1] assert cfg['comment'] == '\\$COMMENT' with patch.dict(os.environ, {'COMMENT': 'The comment.'}): run([str(mock_content), '--profile', 'four']) cfg = mock_create_mode.call_args[0][1] assert cfg['comment'] == '\\\\The comment.' with patch.dict(os.environ, {'COMMENT': 'The comment.'}): run([str(mock_content), '--profile', 'five']) cfg = mock_create_mode.call_args[0][1] assert cfg['comment'] == '\\\\$COMMENT' with patch.dict(os.environ, {'COMMENT': 'The comment.'}): run([str(mock_content), '--profile', 'six']) cfg = mock_create_mode.call_args[0][1] assert cfg['comment'] == '\\\\\\The comment.' with patch.dict(os.environ, {'COMMENT': 'The comment.'}): run([str(mock_content), '--profile', 'seven']) cfg = mock_create_mode.call_args[0][1] assert cfg['comment'] == '\\\\\\$COMMENT' rndusr-torf-cli-688c8c0/tests/test_configformat.py000066400000000000000000000041651463253735700223670ustar00rootroot00000000000000import textwrap from torfcli._config import _readfile def test_boolean_options(cfgfile): cfgfile.write_text(textwrap.dedent(''' foo bar ''')) cfg = _readfile(cfgfile) assert cfg == {'foo': True, 'bar': True} def test_options_with_single_values(cfgfile): cfgfile.write_text(textwrap.dedent(''' foo = 1 bar = two ''')) cfg = _readfile(cfgfile) assert cfg == {'foo': '1', 'bar': 'two'} def test_options_with_empty_value(cfgfile): cfgfile.write_text(textwrap.dedent(''' foo = ''')) cfg = _readfile(cfgfile) assert cfg == {'foo': ''} def test_options_with_list_values(cfgfile): cfgfile.write_text(textwrap.dedent(''' foo = 1 foo = 2 foo = three ''')) cfg = _readfile(cfgfile) assert cfg == {'foo': ['1', '2', 'three']} def test_optional_quotes(cfgfile): for comment_cfg,comment_exp in ((' A comment ', 'A comment'), ("' A comment '", ' A comment '), ('" A comment "', ' A comment '), ('\' A comment "', '\' A comment "')): cfgfile.write_text(textwrap.dedent(f''' comment = {comment_cfg} ''')) cfg = _readfile(cfgfile) assert cfg == {'comment': comment_exp} def test_comments(cfgfile): cfgfile.write_text(textwrap.dedent(''' # This is a config file date = 1970-01-01 # The next line is empty # This is a boolean value private # And here's comment comment=A comment # Goodbye! ''')) cfg = _readfile(cfgfile) assert cfg == {'date': '1970-01-01', 'private': True, 'comment': 'A comment'} def test_sections(cfgfile): cfgfile.write_text(textwrap.dedent(''' date = 1970-01-01 x = 0 [foo] x = 10 date = never yup [bar] yup x = -100 y = 25 ''')) cfg = _readfile(cfgfile) assert cfg == {'date': '1970-01-01', 'x': '0', 'foo': {'x': '10', 'date': 'never', 'yup': True}, 'bar': {'x': '-100', 'y': '25', 'yup': True}} rndusr-torf-cli-688c8c0/tests/test_create.py000066400000000000000000001023671463253735700211570ustar00rootroot00000000000000import os import re from datetime import date, datetime, time, timedelta from unittest.mock import DEFAULT, patch import pytest import torf from torfcli import _errors as err from torfcli import _vars, run def assert_approximate_date(date1, date2): date_min = date2.replace(microsecond=0) - timedelta(seconds=1) date_max = date2.replace(microsecond=0) + timedelta(seconds=1) assert date_min <= date1 <= date_max ### Basic creation modes @pytest.mark.parametrize('hr_enabled', (True, False), ids=('human_readable=True', 'human_readable=False')) def test_default_torrent_filepath(capsys, mock_content, human_readable, hr_enabled, clear_ansi, assert_no_ctrl, regex): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) now = datetime.today() with human_readable(hr_enabled): run([content_path]) t = torf.Torrent.read(exp_torrent_filepath) assert t.name == 'My Torrent' assert len(tuple(t.files)) == 3 assert_approximate_date(t.creation_date, now) assert t.created_by.startswith('torf') cap = capsys.readouterr() if hr_enabled: out_cleared = clear_ansi(cap.out) assert out_cleared == regex(rf'^\s*Magnet magnet:\?xt=urn:btih:{t.infohash}&dn=My\+Torrent&xl=\d+$', flags=re.MULTILINE) assert out_cleared == regex(rf'^\s*Torrent {exp_torrent_filename}$', flags=re.MULTILINE) assert out_cleared == regex(r'^\s*Name My Torrent$', flags=re.MULTILINE) assert out_cleared == regex(r'^\s*File Count 3$', flags=re.MULTILINE) assert out_cleared == regex(rf'^\s*Info Hash {t.infohash}$', flags=re.MULTILINE) assert out_cleared == regex(rf'^\s*Created By torf {re.escape(_vars.__version__)}$', flags=re.MULTILINE) else: assert_no_ctrl(cap.out) assert cap.out == regex(rf'^Magnet\tmagnet:\?xt=urn:btih:{t.infohash}&dn=My\+Torrent&xl=\d+$', flags=re.MULTILINE) assert cap.out == regex(rf'^Torrent\t{exp_torrent_filename}$', flags=re.MULTILINE) assert cap.out == regex(r'^Name\tMy Torrent$', flags=re.MULTILINE) assert cap.out == regex(r'^File Count\t3$', flags=re.MULTILINE) assert cap.out == regex(rf'^Info Hash\t{t.infohash}$', flags=re.MULTILINE) assert cap.out == regex(rf'^Created By\ttorf {re.escape(_vars.__version__)}$', flags=re.MULTILINE) def test_user_given_torrent_filepath(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = 'foo.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) now = datetime.today() run([content_path, '--out', exp_torrent_filename]) t = torf.Torrent.read(exp_torrent_filepath) assert t.name == 'My Torrent' assert len(tuple(t.files)) == 3 assert_approximate_date(t.creation_date, now) assert t.created_by.startswith('torf') cap = capsys.readouterr() assert 'Magnet\tmagnet:?xt=urn:btih:' in cap.out assert f'Torrent\t{exp_torrent_filename}' in cap.out assert 'Name\tMy Torrent' in cap.out assert 'File Count\t3' in cap.out assert 'Info Hash' in cap.out assert 'Created By\ttorf' in cap.out ### Error cases @pytest.mark.parametrize('hr_enabled', (True, False), ids=('human_readable=True', 'human_readable=False')) def test_content_path_is_empty_directory(capsys, tmp_path, human_readable, hr_enabled): (tmp_path / 'empty').mkdir() with human_readable(hr_enabled): with patch('sys.exit') as mock_exit: run([str(tmp_path / 'empty')]) mock_exit.assert_called_once_with(err.Code.READ) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: {tmp_path / "empty"}: Empty or all files excluded\n' @pytest.mark.parametrize('hr_enabled', (True, False), ids=('human_readable=True', 'human_readable=False')) def test_content_path_is_empty_file(capsys, tmp_path, human_readable, hr_enabled): (tmp_path / 'empty').write_bytes(b'') with human_readable(hr_enabled): with patch('sys.exit') as mock_exit: run([str(tmp_path / 'empty')]) mock_exit.assert_called_once_with(err.Code.READ) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: {tmp_path / "empty"}: Empty or all files excluded\n' @pytest.mark.parametrize('hr_enabled', (True, False), ids=('human_readable=True', 'human_readable=False')) def test_content_path_doesnt_exist(capsys, human_readable, hr_enabled): with human_readable(hr_enabled): with patch('sys.exit') as mock_exit: run(['/path/doesnt/exist']) mock_exit.assert_called_once_with(err.Code.READ) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: /path/doesnt/exist: No such file or directory\n' def test_torrent_filepath_exists(capsys, mock_content, human_readable): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) with open(exp_torrent_filepath, 'wb') as f: f.write(b'') with human_readable(False): with patch('sys.exit') as mock_exit: run([content_path, '--out', exp_torrent_filepath]) mock_exit.assert_called_once_with(err.Code.WRITE) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: {exp_torrent_filepath}: File exists\n' with human_readable(True): with patch('torfcli._ui._HumanFormatter.dialog_yes_no') as mock_dialog: with patch('sys.exit') as mock_exit: mock_dialog.return_value = False run([content_path, '--out', exp_torrent_filepath]) mock_exit.assert_called_once_with(err.Code.WRITE) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: {exp_torrent_filepath}: File exists\n' with patch('sys.exit') as mock_exit: mock_dialog.return_value = True run([content_path, '--out', exp_torrent_filepath]) mock_exit.assert_not_called() cap = capsys.readouterr() assert cap.err == '' assert torf.Torrent.read(exp_torrent_filepath).name == mock_content.name ### Options def test_nomagnet_option(capsys, mock_content): content_path = str(mock_content) run([content_path, '--nomagnet']) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) assert os.path.exists(exp_torrent_filepath) cap = capsys.readouterr() assert 'Magnet\t' not in cap.out assert 'Torrent\t' in cap.out assert 'Name\tMy Torrent' in cap.out assert 'File Count\t3' in cap.out assert 'Info Hash' in cap.out assert 'Created By\ttorf' in cap.out def test_notorrent_option(capsys, mock_content): content_path = str(mock_content) run([content_path, '--notorrent']) unexp_torrent_filename = os.path.basename(content_path) + '.torrent' unexp_torrent_filepath = os.path.join(os.getcwd(), unexp_torrent_filename) assert not os.path.exists(unexp_torrent_filepath) cap = capsys.readouterr() assert 'Magnet\t' in cap.out assert 'Torrent\t' not in cap.out assert 'Name\tMy Torrent' in cap.out assert 'File Count\t3' in cap.out assert 'Info Hash' in cap.out assert 'Created By\ttorf' in cap.out def test_yes_option(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) with open(exp_torrent_filepath, 'wb') as f: f.write(b'') assert os.path.exists(exp_torrent_filepath) run([content_path, '--out', exp_torrent_filename, '--yes']) t = torf.Torrent.read(exp_torrent_filepath) assert t.name == 'My Torrent' cap = capsys.readouterr() assert 'Magnet\tmagnet:?xt=urn:btih:' in cap.out assert f'Torrent\t{exp_torrent_filename}' in cap.out def test_exclude_glob(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) run([content_path, '--exclude', '*.jpg']) t = torf.Torrent.read(exp_torrent_filepath) assert len(tuple(t.files)) == 2 cap = capsys.readouterr() assert 'Exclude\t*.jpg' in cap.out assert 'File Count\t2' in cap.out def test_excludes_regex(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) with patch('sys.exit') as mock_exit: run([content_path, '--exclude-regex', '*']) mock_exit.assert_called_once_with(err.Code.CLI) cap = capsys.readouterr() assert cap.out == '' assert cap.err == f'{_vars.__appname__}: Invalid regular expression: *: Nothing to repeat at position 0\n' run([content_path, '--exclude-regex', r'.*\.jpg$']) t = torf.Torrent.read(exp_torrent_filepath) assert len(tuple(t.files)) == 2 cap = capsys.readouterr() assert cap.err == '' assert 'Exclude\t.*\\.jpg$' in cap.out assert 'File Count\t2' in cap.out def test_multiple_excludes(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) run([content_path, '--exclude', '*.jpg', '--exclude-regex', 'txt$']) t = torf.Torrent.read(exp_torrent_filepath) assert len(tuple(t.files)) == 1 cap = capsys.readouterr() assert 'Exclude\t*.jpg\ttxt$' in cap.out assert 'File Count\t1' in cap.out def test_exclude_everything(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) with patch('sys.exit') as mock_exit: run([content_path, '--exclude', '*']) mock_exit.assert_called_once_with(err.Code.READ) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: {content_path}: Empty or all files excluded\n' assert not os.path.exists(exp_torrent_filepath) def test_include_glob(capsys, tmp_path): content = tmp_path / 'My Content' content.mkdir() new_file1 = content / 'file1.jpg' new_file1.write_text('image data') new_file2 = content / 'file2.jpg' new_file2.write_text('image data') exp_torrent_filepath = content.name + '.torrent' run([str(content), '--exclude', '*.jpg', '--include', '*file2*']) t = torf.Torrent.read(exp_torrent_filepath) assert tuple(t.files) == (torf.File('My Content/file2.jpg', size=10),) cap = capsys.readouterr() assert 'Exclude\t*.jpg' in cap.out assert 'Include\t*file2*' in cap.out assert 'File Count\t1' in cap.out def test_include_regex(capsys, tmp_path): content = tmp_path / 'My Content' content.mkdir() new_file1 = content / 'file1.jpg' new_file1.write_text('image data') new_file2 = content / 'file2.jpg' new_file2.write_text('image data') exp_torrent_filepath = content.name + '.torrent' run([str(content), '--exclude', '*file*', '--include-regex', r'file2\.jpg$']) t = torf.Torrent.read(exp_torrent_filepath) assert tuple(t.files) == (torf.File('My Content/file2.jpg', size=10),) cap = capsys.readouterr() print(cap.out) assert 'Exclude\t*file*' in cap.out assert 'Include\tfile2\\.jpg$' in cap.out assert 'File Count\t1' in cap.out def test_name_option(capsys, mock_content): content_path = str(mock_content) name = 'Your Torrent' exp_torrent_filename = f'{name}.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) wrong_torrent_filename = os.path.basename(content_path) + '.torrent' run([content_path, '--name', name]) assert not os.path.exists(wrong_torrent_filename) t = torf.Torrent.read(exp_torrent_filepath) assert t.name == 'Your Torrent' cap = capsys.readouterr() assert f'Name\t{name}' in cap.out assert 'Torrent\tYour Torrent.torrent' in cap.out def test_private_option(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) run([content_path, '--private', '--tracker', 'https://foo.bar:123/']) t = torf.Torrent.read(exp_torrent_filepath) assert t.private is True cap = capsys.readouterr() assert 'Private\tyes' in cap.out def test_private_enabled_and_no_trackers_given(capsys, mock_content): run([str(mock_content), '--private']) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: WARNING: Torrent is private and has no trackers\n' assert os.path.exists(str(mock_content) + '.torrent') def test_noprivate_option(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) run([content_path, '--private', '--noprivate']) t = torf.Torrent.read(exp_torrent_filepath) assert t.private is False cap = capsys.readouterr() assert 'Private\tno' in cap.out def test_missing_private_option_does_not_set_private_field(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) run([content_path]) t = torf.Torrent.read(exp_torrent_filepath) assert 'private' not in t.metainfo['info'] assert t.private is None def test_source_option(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) run([content_path, '--source', 'SOURCE']) t = torf.Torrent.read(exp_torrent_filepath) assert t.source == 'SOURCE' cap = capsys.readouterr() assert 'Source\tSOURCE' in cap.out def test_nosource_option(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) run([content_path, '--source', 'SOURCE', '--nosource']) t = torf.Torrent.read(exp_torrent_filepath) assert t.source is None cap = capsys.readouterr() assert 'Source\t' not in cap.out def test_xseed_option(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) run([content_path]) cap = capsys.readouterr() hash_line_1 = [line for line in cap.out.split('\n') if 'Info Hash' in line][0] hash_1 = torf.Torrent.read(exp_torrent_filepath).infohash run([content_path, '--xseed', '--yes']) cap = capsys.readouterr() hash_line_2 = [line for line in cap.out.split('\n') if 'Info Hash' in line][0] hash_2 = torf.Torrent.read(exp_torrent_filepath).infohash assert hash_line_1 != hash_line_2 assert hash_1 != hash_2 def test_noxseed_option(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) run([content_path, '--xseed', '--noxseed']) cap = capsys.readouterr() hash_line_1 = [line for line in cap.out.split('\n') if 'Info Hash' in line][0] hash_1 = torf.Torrent.read(exp_torrent_filepath).infohash run([content_path, '--yes']) cap = capsys.readouterr() hash_line_2 = [line for line in cap.out.split('\n') if 'Info Hash' in line][0] hash_2 = torf.Torrent.read(exp_torrent_filepath).infohash assert hash_line_1 == hash_line_2 assert hash_1 == hash_2 def test_max_piece_size_option_not_taking_effect(capsys, mock_content): # Create large sparse file, i.e. a file that isn't actually written to disk large_file = mock_content / 'large file' with open(large_file, 'ab') as f: f.truncate(2**20) content_path = str(mock_content) with patch.multiple('torfcli._main', _hash_pieces=DEFAULT, _write_torrent=DEFAULT): run([content_path, '--max-piece-size', '8']) cap = capsys.readouterr() piece_size = [line for line in cap.out.split('\n') if 'Piece Size' in line][0].split('\t')[1] assert int(piece_size) < 8 * 1048576 def test_max_piece_size_option_smaller_than_default(capsys, mock_content): # Create large sparse file, i.e. a file that isn't actually written to disk large_file = mock_content / 'large file' with open(large_file, 'ab') as f: f.truncate(5**20) content_path = str(mock_content) with patch.multiple('torfcli._main', _hash_pieces=DEFAULT, _write_torrent=DEFAULT): run([content_path, '--max-piece-size', '2']) cap = capsys.readouterr() piece_size = [line for line in cap.out.split('\n') if 'Piece Size' in line][0].split('\t')[1] assert int(piece_size) == 2 * 1048576 def test_max_piece_size_option_larger_than_default(capsys, mock_content): # Create large sparse file, i.e. a file that isn't actually written to disk large_file = mock_content / 'large file' with open(large_file, 'ab') as f: f.truncate(5**20) content_path = str(mock_content) with patch.multiple('torfcli._main', _hash_pieces=DEFAULT, _write_torrent=DEFAULT): run([content_path, '--max-piece-size', '128']) cap = capsys.readouterr() piece_size = [line for line in cap.out.split('\n') if 'Piece Size' in line][0].split('\t')[1] assert int(piece_size) == 128 * 1048576 def test_max_piece_size_option_not_given(capsys, mock_content): # Create large sparse file, i.e. a file that isn't actually written to disk large_file = mock_content / 'large file' with open(large_file, 'ab') as f: f.truncate(2**40) content_path = str(mock_content) with patch.multiple('torfcli._main', _hash_pieces=DEFAULT, _write_torrent=DEFAULT): run([content_path]) cap = capsys.readouterr() piece_size = [line for line in cap.out.split('\n') if 'Piece Size' in line][0].split('\t')[1] assert int(piece_size) == torf.Torrent().piece_size_max def test_max_piece_size_is_no_power_of_two(capsys, mock_content): # Create large sparse file, i.e. a file that isn't actually written to disk large_file = mock_content / 'large file' with open(large_file, 'ab') as f: f.truncate(2**40) content_path = str(mock_content) with patch.multiple('torfcli._main', _hash_pieces=DEFAULT, _write_torrent=DEFAULT): factor = 1.234 exp_invalid_piece_size = int(factor * 2**20) with patch('sys.exit') as mock_exit: run([content_path, '--max-piece-size', str(factor)]) mock_exit.assert_called_once_with(err.Code.CLI) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: Piece size must be divisible by 16 KiB: {exp_invalid_piece_size}\n' def test_default_date(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) now = datetime.today().replace(microsecond=0) run([content_path]) t = torf.Torrent.read(exp_torrent_filepath) assert_approximate_date(t.creation_date, datetime.today()) cap = capsys.readouterr() exp_dates = [int(now.timestamp()), int((now + timedelta(seconds=1)).timestamp())] assert any(f'Created\t{exp_date}' in cap.out for exp_date in exp_dates) def test_date_today(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) run([content_path, '--date', 'today']) t = torf.Torrent.read(exp_torrent_filepath) assert t.creation_date == datetime.combine(date.today(), time(0, 0, 0)) cap = capsys.readouterr() exp_date = int(datetime.today() .replace(hour=0, minute=0, second=0, microsecond=0) .timestamp()) assert f'Created\t{exp_date}' in cap.out def test_date_now(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) now = datetime.today() run([content_path, '--date', 'now']) t = torf.Torrent.read(exp_torrent_filepath) assert_approximate_date(t.creation_date, now) cap = capsys.readouterr() exp_dates = [int(now.timestamp()), int((now + timedelta(seconds=1)).timestamp())] assert any(f'Created\t{exp_date}' in cap.out for exp_date in exp_dates) def test_user_given_date(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) run([content_path, '--date', '2000-01-02']) t = torf.Torrent.read(exp_torrent_filepath) assert t.creation_date == datetime.combine(date(2000, 1, 2), time(0, 0, 0)) cap = capsys.readouterr() exp_date = int(datetime.strptime('2000-01-02', '%Y-%m-%d').timestamp()) assert f'Created\t{exp_date}' in cap.out def test_user_given_date_and_time(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) run([content_path, '--date', '2000-01-02 03:04']) t = torf.Torrent.read(exp_torrent_filepath) assert t.creation_date == datetime(2000, 1, 2, 3, 4) cap = capsys.readouterr() exp_date = int(datetime.strptime('2000-01-02 03:04', '%Y-%m-%d %H:%M').timestamp()) assert f'Created\t{exp_date}' in cap.out def test_user_given_date_and_time_with_seconds(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) run([content_path, '--date', '2000-01-02 03:04:05']) t = torf.Torrent.read(exp_torrent_filepath) assert t.creation_date == datetime(2000, 1, 2, 3, 4, 5) cap = capsys.readouterr() exp_date = int(datetime.strptime('2000-01-02 03:04:05', '%Y-%m-%d %H:%M:%S').timestamp()) assert f'Created\t{exp_date}' in cap.out def test_invalid_date(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) with patch('sys.exit') as mock_exit: run([content_path, '--date', 'foo']) mock_exit.assert_called_once_with(err.Code.CLI) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: foo: Invalid date\n' assert not os.path.exists(exp_torrent_filepath) def test_nodate_option(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) run([content_path, '--date', '2000-01-02 03:04:05', '--nodate']) t = torf.Torrent.read(exp_torrent_filepath) assert t.creation_date is None cap = capsys.readouterr() assert 'Created\t' not in cap.out def test_comment_option(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) run([content_path, '--comment', 'This is a comment.']) t = torf.Torrent.read(exp_torrent_filepath) assert t.comment == 'This is a comment.' cap = capsys.readouterr() assert 'Comment\tThis is a comment.' in cap.out def test_nocomment_option(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) run([content_path, '--comment', 'This is a comment.', '--nocomment']) t = torf.Torrent.read(exp_torrent_filepath) assert t.comment is None cap = capsys.readouterr() assert 'Comment\t' not in cap.out def test_creator_option(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) run([content_path, '--creator', 'Mbombo']) t = torf.Torrent.read(exp_torrent_filepath) assert t.created_by == 'Mbombo' cap = capsys.readouterr() assert 'Created By\tMbombo' in cap.out def test_nocreator_option(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) run([content_path, '--nocreator']) t = torf.Torrent.read(exp_torrent_filepath) assert t.created_by is None cap = capsys.readouterr() assert 'Created By\t' not in cap.out def test_single_tracker(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) run([content_path, '--tracker', 'https://mytracker.example.org']) t = torf.Torrent.read(exp_torrent_filepath) assert t.trackers == [['https://mytracker.example.org']] cap = capsys.readouterr() assert 'Tracker\thttps://mytracker.example.org' in cap.out def test_multiple_trackers(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) run([content_path, '--tracker', 'https://tracker1.example.org/foo', '--tracker', 'https://tracker2.example.org/bar/', '--tracker', 'https://tracker3.example.org/baz']) t = torf.Torrent.read(exp_torrent_filepath) assert t.trackers == [['https://tracker1.example.org/foo'], ['https://tracker2.example.org/bar/'], ['https://tracker3.example.org/baz']] cap = capsys.readouterr() assert 'Trackers\thttps://tracker1.example.org/foo' in cap.out assert '\thttps://tracker2.example.org/bar/' in cap.out assert '\thttps://tracker3.example.org/baz' in cap.out def test_multiple_tracker_tiers(capsys, mock_content, human_readable, clear_ansi, regex): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) with human_readable(True): run([content_path, '--tracker', 'http://foo,http://bar', '--tracker', 'http://a,http://b,http://c', '--tracker', 'http://asdf']) t = torf.Torrent.read(exp_torrent_filepath) assert t.trackers == [['http://foo', 'http://bar'], ['http://a', 'http://b', 'http://c'], ['http://asdf']] cap = capsys.readouterr() assert clear_ansi(cap.out) == regex(r'^(\s*)Trackers Tier 1: http://foo\n' r'\1 http://bar\n' r'\1 Tier 2: http://a\n' r'\1 http://b\n' r'\1 http://c\n' r'\1 Tier 3: http://asdf\n', flags=re.MULTILINE) with human_readable(False): run([content_path, '-y', '--tracker', 'http://foo,http://bar', '--tracker', 'http://a,http://b,http://c', '--tracker', 'http://asdf']) t = torf.Torrent.read(exp_torrent_filepath) assert t.trackers == [['http://foo', 'http://bar'], ['http://a', 'http://b', 'http://c'], ['http://asdf']] cap = capsys.readouterr() assert cap.out == regex(r'^Trackers\thttp://foo\thttp://bar\t' r'http://a\thttp://b\thttp://c\thttp://asdf\n', flags=re.MULTILINE) def test_notracker_option(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) run([content_path, '--tracker', 'https://mytracker.example.org', '--notracker']) t = torf.Torrent.read(exp_torrent_filepath) assert t.trackers == [] cap = capsys.readouterr() assert 'Tracker\t' not in cap.out def test_single_webseed(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) run([content_path, '--webseed', 'https://mywebseed.example.org/foo']) t = torf.Torrent.read(exp_torrent_filepath) assert t.webseeds == ['https://mywebseed.example.org/foo'] cap = capsys.readouterr() assert 'Webseed\thttps://mywebseed.example.org/foo' in cap.out def test_multiple_webseeds(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) run([content_path, '--webseed', 'https://webseed1.example.org/foo', '--webseed', 'https://webseed2.example.org/bar/', '--webseed', 'https://webseed3.example.org/baz']) t = torf.Torrent.read(exp_torrent_filepath) assert t.webseeds == ['https://webseed1.example.org/foo', 'https://webseed2.example.org/bar/', 'https://webseed3.example.org/baz'] cap = capsys.readouterr() assert 'Webseeds\thttps://webseed1.example.org/foo' in cap.out assert '\thttps://webseed2.example.org/bar/' in cap.out assert '\thttps://webseed3.example.org/baz' in cap.out def test_nowebseed_option(capsys, mock_content): content_path = str(mock_content) exp_torrent_filename = os.path.basename(content_path) + '.torrent' exp_torrent_filepath = os.path.join(os.getcwd(), exp_torrent_filename) run([content_path, '--webseed', 'https://mywebseed.example.org/foo', '--nowebseed']) t = torf.Torrent.read(exp_torrent_filepath) assert t.webseeds == [] cap = capsys.readouterr() assert 'Webseed\t' not in cap.out @pytest.mark.parametrize( argnames='merges, exp_result', argvalues=( ( [ '{"creation date": 1352534887}', '{"created by": null, "info": {"name": "New Name"}, "nosuchkey": null}', ( '{' '"my stuff": {"my numbers": [57, [1, 2, 3]], "my strings": ["foo", "bar"]},' '"info": {"foo": [{"bar": 123}, "baz"], "your strings": []}' '}' ), ], { 'creation_date': datetime(2012, 11, 10, 9, 8, 7), 'created_by': None, 'name': 'New Name', 'path_map': { ('my stuff', 'my numbers'): [57, [1, 2, 3]], ('my stuff', 'my strings'): ['foo', 'bar'], ('info', 'foo'): [{'bar': 123}, 'baz'], ('info', 'your strings'): [], }, }, ), ( ['"Hello, World!"'], err.CliError("Not a JSON object: Hello, World!"), ), ), ids=lambda v: repr(v), ) def test_merge_option(merges, exp_result, capsys, mock_content, assert_torrents_equal, tmp_path): content_path = str(mock_content) torrent_filepath = str(tmp_path / 'my.torrent') cmd = [content_path, '-o', torrent_filepath] for merge in merges: cmd.extend(('--merge', merge)) if isinstance(exp_result, err.Error): with patch('sys.exit') as mock_exit: run(cmd) mock_exit.assert_called_once_with(exp_result.exit_code) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: {str(exp_result)}\n' assert not os.path.exists(torrent_filepath) else: run(cmd) new = torf.Torrent.read(torrent_filepath) for attr, exp_value in exp_result.items(): if attr == 'path_map': for path, exp_value in exp_value.items(): path = list(path) value = new.metainfo while path: key = path.pop(0) value = value[key] assert value == exp_value else: value = getattr(new, attr) assert value == exp_value rndusr-torf-cli-688c8c0/tests/test_edit.py000066400000000000000000000534561463253735700206450ustar00rootroot00000000000000import os import re from datetime import datetime from unittest.mock import patch import pytest import torf from torfcli import _config as config from torfcli import _errors as err from torfcli import _vars, run def test_nonexisting_input(capsys): nonexisting_path = '/no/such/file' with patch('sys.exit') as mock_exit: run(['-i', nonexisting_path, '-o', 'out.torrent']) mock_exit.assert_called_once_with(err.Code.READ) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: {nonexisting_path}: No such file or directory\n' def test_existing_output(capsys, tmp_path, create_torrent): outfile = tmp_path / 'out.torrent' outfile.write_text('some existing file content') with create_torrent() as infile: with patch('sys.exit') as mock_exit: run(['-i', infile, '-o', str(outfile)]) mock_exit.assert_called_once_with(err.Code.WRITE) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: {outfile}: File exists\n' def test_unwritable_output(capsys, create_torrent): unwritable_path = '/out.torrent' with create_torrent() as infile: with patch('sys.exit') as mock_exit: run(['-i', infile, '-o', unwritable_path]) mock_exit.assert_called_once_with(err.Code.WRITE) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: {unwritable_path}: Permission denied\n' def test_no_changes(create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') with create_torrent() as infile: orig = torf.Torrent.read(infile) run(['-i', infile, '-o', outfile]) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new) def test_edit_comment(create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') with create_torrent(comment='A comment') as infile: orig = torf.Torrent.read(infile) run(['-i', infile, '--comment', 'A different comment', '-o', outfile]) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, comment='A different comment') def test_remove_comment(create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') with create_torrent(comment='A comment') as infile: orig = torf.Torrent.read(infile) run(['-i', infile, '--nocomment', '-o', outfile]) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, comment=None) def test_remove_creator(create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') with create_torrent(created_by='The creator') as infile: orig = torf.Torrent.read(infile) run(['-i', infile, '--nocreator', '-o', outfile]) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, created_by=None) def test_remove_creator_even_when_creator_provided(create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') with create_torrent(created_by='The creator') as infile: orig = torf.Torrent.read(infile) run(['-i', infile, '--nocreator', '--creator', 'A conflicting creator', '-o', outfile]) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, created_by=None) def test_edit_creator(create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') with create_torrent(created_by='The creator') as infile: orig = torf.Torrent.read(infile) run(['-i', infile, '--creator', 'A different creator', '-o', outfile]) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, created_by='A different creator') def test_edit_default_creator(create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') with create_torrent(created_by='The creator') as infile: orig = torf.Torrent.read(infile) run(['-i', infile, '--creator', '-o', outfile]) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, created_by=config.DEFAULT_CREATOR) def test_remove_private(create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') with create_torrent(private=True) as infile: orig = torf.Torrent.read(infile) run(['-i', infile, '--noprivate', '-o', outfile]) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, private=None) def test_add_private(create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') with create_torrent(private=False) as infile: orig = torf.Torrent.read(infile) run(['-i', infile, '--private', '-o', outfile]) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, private=True) def test_add_private_and_remove_all_trackers(create_torrent, tmp_path, assert_torrents_equal, capsys): outfile = str(tmp_path / 'out.torrent') with create_torrent(private=False) as infile: orig = torf.Torrent.read(infile) run(['-i', infile, '--private', '--notracker', '-o', outfile]) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: WARNING: Torrent is private and has no trackers\n' new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, private=True, trackers=()) def test_edit_source(create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') with create_torrent(source='the source') as infile: orig = torf.Torrent.read(infile) run(['-i', infile, '--source', 'another source', '-o', outfile]) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, source='another source') def test_remove_source(create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') with create_torrent(source='the source') as infile: orig = torf.Torrent.read(infile) run(['-i', infile, '--nosource', '-o', outfile]) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, source=None) def test_remove_xseed(create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') with create_torrent(randomize_infohash=True) as infile: orig = torf.Torrent.read(infile) run(['-i', infile, '--noxseed', '-o', outfile]) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, randomize_infohash=False) assert orig.infohash != new.infohash def test_add_xseed(create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') with create_torrent(randomize_infohash=False) as infile: orig = torf.Torrent.read(infile) run(['-i', infile, '--xseed', '-o', outfile]) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, randomize_infohash=True) assert orig.infohash != new.infohash def test_remove_trackers(create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') with create_torrent(trackers=['http://tracker1', 'http://tracker2']) as infile: orig = torf.Torrent.read(infile) run(['-i', infile, '--notracker', '-o', outfile]) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, trackers=[]) def test_add_trackers(create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') with create_torrent(trackers=['http://tracker1', 'http://tracker2']) as infile: orig = torf.Torrent.read(infile) run(['-i', infile, '--tracker', 'http://a', '-o', outfile]) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, trackers=[['http://tracker1', 'http://a'], ['http://tracker2']]) outfile = str(tmp_path / 'out.torrent') with create_torrent(trackers=['http://foo', 'http://bar']) as infile: orig = torf.Torrent.read(infile) run(['-i', infile, '--tracker', 'http://a,http://b', '--tracker', 'http://x', '--tracker', 'http://y', '-o', outfile, '-y']) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, trackers=[['http://foo', 'http://a', 'http://b'], ['http://bar', 'http://x'], ['http://y']]) def test_replace_trackers(create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') with create_torrent(trackers=['http://tracker1', 'http://tracker2']) as infile: orig = torf.Torrent.read(infile) run(['-i', infile, '--notracker', '--tracker', 'http://tracker10', '--tracker', 'http://tracker20', '-o', outfile]) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, trackers=[['http://tracker10'], ['http://tracker20']]) def test_invalid_tracker_url(capsys, create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') with create_torrent(trackers=['http://tracker1', 'http://tracker2']) as infile: with patch('sys.exit') as mock_exit: run(['-i', infile, '--tracker', 'not a url', '-o', outfile]) mock_exit.assert_called_once_with(err.Code.CLI) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: not a url: Invalid URL\n' assert not os.path.exists(outfile) def test_remove_webseeds(create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') with create_torrent(webseeds=['http://webseed1', 'http://webseed2']) as infile: orig = torf.Torrent.read(infile) run(['-i', infile, '--nowebseed', '-o', outfile]) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, webseeds=[]) def test_add_webseed(create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') with create_torrent(webseeds=['http://webseed1', 'http://webseed2']) as infile: orig = torf.Torrent.read(infile) run(['-i', infile, '--webseed', 'http://webseed3', '-o', outfile]) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, webseeds=['http://webseed1', 'http://webseed2', 'http://webseed3']) def test_replace_webseeds(create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') with create_torrent(webseeds=['http://webseed1', 'http://webseed2']) as infile: orig = torf.Torrent.read(infile) run(['-i', infile, '--nowebseed', '--webseed', 'http://webseed10', '--webseed', 'http://webseed20', '-o', outfile]) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, webseeds=['http://webseed10', 'http://webseed20']) def test_invalid_webseed_url(capsys, create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') with create_torrent(webseeds=['http://webseed1', 'http://webseed2']) as infile: with patch('sys.exit') as mock_exit: run(['-i', infile, '--webseed', 'not a url', '-o', outfile]) mock_exit.assert_called_once_with(err.Code.CLI) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: not a url: Invalid URL\n' assert not os.path.exists(outfile) def test_edit_creation_date(create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') with create_torrent() as infile: orig = torf.Torrent.read(infile) run(['-i', infile, '--date', '3000-05-30 15:03:01', '-o', outfile]) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, creation_date=datetime(3000, 5, 30, 15, 3, 1)) def test_remove_creation_date(create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') with create_torrent() as infile: orig = torf.Torrent.read(infile) run(['-i', infile, '--nodate', '-o', outfile]) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, creation_date=None) def test_invalid_creation_date(capsys, create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') with create_torrent() as infile: with patch('sys.exit') as mock_exit: run(['-i', infile, '--date', 'foo', '-o', outfile]) mock_exit.assert_called_once_with(err.Code.CLI) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: foo: Invalid date\n' assert not os.path.exists(outfile) def test_edit_path(create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') new_content = tmp_path / 'new content' new_content.mkdir() new_file = new_content / 'some file' new_file.write_text('different data') with create_torrent() as infile: orig = torf.Torrent.read(infile) run(['-i', infile, str(new_content), '-o', outfile]) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, ignore=('files', 'filetree', 'name', 'piece_size', 'pieces', 'size')) assert tuple(new.files) == (torf.File('new content/some file', size=14),) assert new.filetree == {'new content': {'some file': torf.File('new content/some file', size=14)}} assert new.name == 'new content' assert new.size == len('different data') def test_edit_path_with_exclude_option(create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') new_content = tmp_path / 'new content' new_content.mkdir() new_file1 = new_content / 'some image.jpg' new_file1.write_text('image data') new_file2 = new_content / 'some text.txt' new_file2.write_text('text data') with create_torrent() as infile: orig = torf.Torrent.read(infile) run(['-i', infile, str(new_content), '--exclude', '*.txt', '-o', outfile]) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, ignore=('files', 'filetree', 'name', 'piece_size', 'pieces', 'size')) assert tuple(new.files) == (torf.File('new content/some image.jpg', size=10),) assert new.filetree == {'new content': {'some image.jpg': torf.File('new content/some image.jpg', size=10)}} assert new.name == 'new content' assert new.size == len('image data') def test_edit_path_with_exclude_regex_option(create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') new_content = tmp_path / 'new content' new_content.mkdir() new_file1 = new_content / 'some image.jpg' new_file1.write_text('image data') new_file2 = new_content / 'some text.txt' new_file2.write_text('text data') with create_torrent() as infile: orig = torf.Torrent.read(infile) run(['-i', infile, str(new_content), '--exclude-regex', r'.*\.txt$', '-o', outfile]) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, ignore=('files', 'filetree', 'name', 'piece_size', 'pieces', 'size')) assert tuple(new.files) == (torf.File('new content/some image.jpg', size=10),) assert new.filetree == {'new content': {'some image.jpg': torf.File('new content/some image.jpg', size=10)}} assert new.name == 'new content' assert new.size == len('image data') def test_edit_name(create_torrent, tmp_path, assert_torrents_equal): outfile = str(tmp_path / 'out.torrent') with create_torrent() as infile: orig = torf.Torrent.read(infile) run(['-i', infile, '--name', 'new name', '-o', outfile]) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, ignore=('name', 'files', 'filetree')) assert new.name == 'new name' for of,nf in zip(orig.files, new.files): assert nf.parts[0] == 'new name' assert nf.parts[1:] == of.parts[1:] assert new.filetree == {'new name': {'Anotherthing.iso': torf.File('new name/Anotherthing.iso', size=9), 'Something.jpg': torf.File('new name/Something.jpg', size=9), 'Thirdthing.txt': torf.File('new name/Thirdthing.txt', size=9)}} def test_edit_invalid_torrent_with_validation_enabled(tmp_path, capsys): infile = tmp_path / 'in.torrent' outfile = tmp_path / 'out.torrent' with open(infile, 'wb') as f: f.write(b'd1:2i3e4:thisl2:is3:note5:validd2:is2:ok8:metainfol3:but4:thateee') with patch('sys.exit') as mock_exit: run(['-i', str(infile), '--name', 'New Name', '-o', str(outfile)]) mock_exit.assert_called_once_with(err.Code.READ) cap = capsys.readouterr() assert cap.err == f"{_vars.__appname__}: Invalid metainfo: Missing 'info'\n" assert cap.out == '' assert not os.path.exists(outfile) def test_edit_invalid_torrent_with_validation_disabled(tmp_path, capsys, regex): infile = tmp_path / 'in.torrent' outfile = tmp_path / 'out.torrent' with open(infile, 'wb') as f: f.write(b'd1:2i3e4:thisl2:is3:note5:validd2:is2:ok8:metainfol3:but4:thateee') run(['-i', str(infile), '--name', 'New Name', '-o', str(outfile), '--novalidate']) cap = capsys.readouterr() assert cap.err == f"{_vars.__appname__}: WARNING: Invalid metainfo: Missing 'piece length' in ['info']\n" assert cap.out == regex(r'^Name\tNew Name$', flags=re.MULTILINE) assert cap.out == regex(fr'^Torrent\t{outfile}$', flags=re.MULTILINE) assert os.path.exists(outfile) def test_edit_magnet_uri_and_dont_create_torrent(capsys, regex): magnet = ('magnet:?xt=urn:btih:e167b1fbb42ea72f051f4f50432703308efb8fd1&dn=My+Torrent&xl=142631' '&tr=https%3A%2F%2Flocalhost%3A123%2Fannounce') run(['-i', magnet, '--name', 'New Name', '--notracker', '--webseed', 'http://foo', '--notorrent']) cap = capsys.readouterr() assert cap.err == '' new_magnet = ('magnet:?xt=urn:btih:e167b1fbb42ea72f051f4f50432703308efb8fd1&dn=New+Name&xl=142631' '&ws=http%3A%2F%2Ffoo') assert cap.out == regex(fr'^Magnet\t{re.escape(new_magnet)}\n$', flags=re.MULTILINE) def test_edit_magnet_uri_and_create_torrent_with_validation_enabled(capsys, tmp_path, regex): magnet = ('magnet:?xt=urn:btih:e167b1fbb42ea72f051f4f50432703308efb8fd1&dn=My+Torrent&xl=142631' '&tr=https%3A%2F%2Flocalhost%3A123%2Fannounce') outfile = tmp_path / 'out.torrent' with patch('sys.exit') as mock_exit: run(['-i', magnet, '--name', 'New Name', '--notracker', '--tracker', 'http://bar', '-o', str(outfile)]) mock_exit.assert_called_once_with(err.Code.READ) cap = capsys.readouterr() assert cap.err == (f"{_vars.__appname__}: https://localhost:123/file?info_hash=%E1g%B1%FB%B4.%A7/%05%1FOPC%27%030%8E%FB%8F%D1" f': Connection refused\n' f"{_vars.__appname__}: Invalid metainfo: Missing 'piece length' in ['info']\n") def test_edit_magnet_uri_and_create_torrent_with_validation_disabled(capsys, tmp_path, regex): magnet = ('magnet:?xt=urn:btih:e167b1fbb42ea72f051f4f50432703308efb8fd1&dn=My+Torrent&xl=142631' '&tr=https%3A%2F%2Flocalhost%3A123%2Fannounce') outfile = tmp_path / 'out.torrent' run(['-i', magnet, '--name', 'New Name', '--notracker', '--tracker', 'http://bar', '-o', str(outfile), '--novalidate']) cap = capsys.readouterr() assert cap.err == (f"{_vars.__appname__}: https://localhost:123/file?info_hash=%E1g%B1%FB%B4.%A7/%05%1FOPC%27%030%8E%FB%8F%D1" f': Connection refused\n' f"{_vars.__appname__}: WARNING: Invalid metainfo: Missing 'piece length' in ['info']\n") new_magnet = ('magnet:?xt=urn:btih:e167b1fbb42ea72f051f4f50432703308efb8fd1&dn=New+Name&xl=142631' '&tr=http%3A%2F%2Fbar') assert cap.out == regex(fr'^Magnet\t{re.escape(new_magnet)}$', flags=re.MULTILINE) assert cap.out == regex(fr'^Torrent\t{re.escape(str(outfile))}$', flags=re.MULTILINE) torrent = torf.Torrent.read(outfile, validate=False) assert torrent.size == 142631 assert torrent.name == 'New Name' assert torrent.trackers == [['http://bar']] @pytest.mark.parametrize( argnames='merges, exp_result', argvalues=( ( [ '{"creation date": 1352534887}', '{"info": {"foo": ["Hello", "World!"], "bar": "baz", "private": null, "nosuchkey": null}}', '{"created by": null}', ], { 'creation_date': datetime(2012, 11, 10, 9, 8, 7), 'created_by': None, 'private': None, 'path_map': { ('info', 'foo'): ['Hello', 'World!'], ('info', 'bar'): 'baz', }, }, ), ( ['["Hello", "World!"]'], err.CliError("Not a JSON object: ['Hello', 'World!']"), ), ), ids=lambda v: repr(v), ) def test_merge_option(merges, exp_result, create_torrent, tmp_path, assert_torrents_equal, capsys): outfile = str(tmp_path / 'out.torrent') with create_torrent() as infile: orig = torf.Torrent.read(infile) cmd = ['-i', infile, '-o', outfile] for merge in merges: cmd.extend(('--merge', merge)) if isinstance(exp_result, err.Error): with patch('sys.exit') as mock_exit: run(cmd) mock_exit.assert_called_once_with(exp_result.exit_code) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: {str(exp_result)}\n' else: run(cmd) new = torf.Torrent.read(outfile) assert_torrents_equal(orig, new, **exp_result) rndusr-torf-cli-688c8c0/tests/test_errors.py000066400000000000000000000074301463253735700212230ustar00rootroot00000000000000import errno import pytest import torf from torfcli import _errors as err def test_CliError(): for cls,args,kwargs in ((err.CliError, ('invalid argument: --foo',), {}), (err.Error, ('invalid argument: --foo', err.Code.CLI), {}), (err.Error, ('invalid argument: --foo',), {'code': err.Code.CLI})): with pytest.raises(err.CliError) as exc_info: raise cls(*args, **kwargs) assert exc_info.value.exit_code is err.Code.CLI assert str(exc_info.value) == 'invalid argument: --foo' def test_ConfigError(): for cls,args,kwargs in ((err.ConfigError, ('config error',), {}), (err.Error, ('config error', err.Code.CONFIG), {}), (err.Error, ('config error',), {'code': err.Code.CONFIG})): with pytest.raises(err.ConfigError) as exc_info: raise cls(*args, **kwargs) assert exc_info.value.exit_code is err.Code.CONFIG assert str(exc_info.value) == 'config error' def test_ReadError(): for cls,args,kwargs in ((err.ReadError, ('path/to/file: No such file or directory',), {}), (err.Error, (torf.ReadError(errno.ENOENT, 'path/to/file'),), {}), (err.Error, (torf.PathError('path/to/file', msg='No such file or directory'),), {}), (err.Error, ('path/to/file: No such file or directory', err.Code.READ), {}), (err.Error, ('path/to/file: No such file or directory',), {'code': err.Code.READ})): with pytest.raises(err.ReadError) as exc_info: raise cls(*args, **kwargs) assert exc_info.value.exit_code is err.Code.READ assert str(exc_info.value) == 'path/to/file: No such file or directory' def test_WriteError(): for cls,args,kwargs in ((err.WriteError, ('path/to/file: No space left on device',), {}), (err.Error, (torf.WriteError(errno.ENOSPC, 'path/to/file'),), {}), (err.Error, ('path/to/file: No space left on device', err.Code.WRITE), {}), (err.Error, ('path/to/file: No space left on device',), {'code': err.Code.WRITE})): with pytest.raises(err.WriteError) as exc_info: raise cls(*args, **kwargs) assert exc_info.value.exit_code is err.Code.WRITE assert str(exc_info.value) == 'path/to/file: No space left on device' def test_VerifyError(): for cls,args,kwargs in ((err.VerifyError, (), {'content': 'path/to/content', 'torrent': 'path/to/torrent'}), (err.Error, ('path/to/content does not satisfy path/to/torrent', err.Code.VERIFY), {}), (err.Error, ('path/to/content does not satisfy path/to/torrent',), {'code': err.Code.VERIFY})): with pytest.raises(err.VerifyError) as exc_info: raise cls(*args, **kwargs) assert exc_info.value.exit_code is err.Code.VERIFY assert str(exc_info.value) == 'path/to/content does not satisfy path/to/torrent' with pytest.raises(err.VerifyError) as exc_info: raise err.Error(torf.VerifyNotDirectoryError('path/to/file')) assert exc_info.value.exit_code is err.Code.VERIFY assert str(exc_info.value) == 'path/to/file: Not a directory' with pytest.raises(err.VerifyError) as exc_info: raise err.Error(torf.VerifyIsDirectoryError('path/to/file')) assert exc_info.value.exit_code is err.Code.VERIFY assert str(exc_info.value) == 'path/to/file: Is a directory' with pytest.raises(err.VerifyError) as exc_info: raise err.Error(torf.VerifyFileSizeError('path/to/file', 123, 456)) assert exc_info.value.exit_code is err.Code.VERIFY assert str(exc_info.value) == 'path/to/file: Too small: 123 instead of 456 bytes' rndusr-torf-cli-688c8c0/tests/test_info.py000066400000000000000000000427401463253735700206450ustar00rootroot00000000000000import os import re from datetime import datetime from unittest.mock import patch from torfcli import _errors as err from torfcli import _vars, run def test_nonexisting_torrent_file(capsys): nonexising_path = '/no/such/file' with patch('sys.exit') as mock_exit: run(['-i', nonexising_path]) mock_exit.assert_called_once_with(err.Code.READ) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: {nonexising_path}: No such file or directory\n' assert cap.out == '' def test_insufficient_permissions(capsys, create_torrent): with create_torrent() as torrent_file: os.chmod(torrent_file, 0o000) with patch('sys.exit') as mock_exit: run(['-i', torrent_file]) mock_exit.assert_called_once_with(err.Code.READ) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: {torrent_file}: Permission denied\n' assert cap.out == '' def test_magnet(capsys, create_torrent, human_readable, clear_ansi, regex): with create_torrent(name='foo') as torrent_file: with human_readable(True): run(['-i', torrent_file]) cap = capsys.readouterr() assert clear_ansi(cap.out) == regex(r'^\s*Magnet magnet:\?xt=urn:btih:[0-9a-z]{40}', flags=re.MULTILINE) assert cap.err == '' with human_readable(False): run(['-i', torrent_file]) cap = capsys.readouterr() assert cap.out == regex(r'^Magnet\tmagnet:\?xt=urn:btih:[0-9a-z]{40}', flags=re.MULTILINE) assert cap.err == '' def test_nomagnet(capsys, create_torrent, human_readable, clear_ansi, regex): with create_torrent(name='foo') as torrent_file: with human_readable(True): run(['-i', torrent_file, '--nomagnet']) cap = capsys.readouterr() assert clear_ansi(cap.out) != regex(r'^\s*Magnet', flags=re.MULTILINE) assert cap.err == '' with human_readable(False): run(['-i', torrent_file, '--nomagnet']) cap = capsys.readouterr() assert cap.out != regex(r'^Magnet', flags=re.MULTILINE) assert cap.err == '' def test_name(capsys, create_torrent, human_readable, clear_ansi, regex): with create_torrent(name='foo') as torrent_file: with human_readable(True): run(['-i', torrent_file]) cap = capsys.readouterr() assert clear_ansi(cap.out) == regex(r'^\s*Name foo$', flags=re.MULTILINE) assert cap.err == '' with human_readable(False): run(['-i', torrent_file]) cap = capsys.readouterr() assert cap.out == regex(r'^Name\tfoo$', flags=re.MULTILINE) assert cap.err == '' def test_info_hash(capsys, create_torrent, human_readable, clear_ansi, regex): with create_torrent() as torrent_file: with human_readable(True): run(['-i', torrent_file]) cap = capsys.readouterr() assert clear_ansi(cap.out) == regex(r'^\s*Info Hash [0-9a-z]{40}$', flags=re.MULTILINE) assert cap.err == '' with human_readable(False): run(['-i', torrent_file]) cap = capsys.readouterr() assert cap.out == regex(r'^Info Hash\t[0-9a-z]{40}$', flags=re.MULTILINE) assert cap.err == '' def test_size(capsys, create_torrent, human_readable, clear_ansi, regex): with create_torrent() as torrent_file: with human_readable(True): run(['-i', torrent_file]) cap = capsys.readouterr() assert clear_ansi(cap.out) == regex(r'^\s*Size [0-9]+ [KMGT]?i?B$', flags=re.MULTILINE) assert cap.err == '' with human_readable(False): run(['-i', torrent_file]) cap = capsys.readouterr() assert cap.out == regex(r'^Size\t[0-9]+$', flags=re.MULTILINE) assert cap.err == '' def test_piece_size(capsys, create_torrent, human_readable, clear_ansi, regex): with create_torrent() as torrent_file: with human_readable(True): run(['-i', torrent_file]) cap = capsys.readouterr() assert clear_ansi(cap.out) == regex(r'^\s*Piece Size [0-9]+ [KMGT]?i?B$', flags=re.MULTILINE) assert cap.err == '' with human_readable(False): run(['-i', torrent_file]) cap = capsys.readouterr() assert cap.out == regex(r'^Piece Size\t[0-9]+$', flags=re.MULTILINE) assert cap.err == '' def test_piece_count(capsys, create_torrent, human_readable, clear_ansi, regex): with create_torrent() as torrent_file: with human_readable(True): run(['-i', torrent_file]) cap = capsys.readouterr() assert clear_ansi(cap.out) == regex(r'^\s*Piece Count [0-9]+$', flags=re.MULTILINE) assert cap.err == '' with human_readable(False): run(['-i', torrent_file]) cap = capsys.readouterr() assert cap.out == regex(r'^Piece Count\t[0-9]+$', flags=re.MULTILINE) assert cap.err == '' def test_single_line_comment(capsys, create_torrent, human_readable, clear_ansi, regex): with create_torrent(comment='This is my torrent.') as torrent_file: with human_readable(True): run(['-i', torrent_file]) cap = capsys.readouterr() assert clear_ansi(cap.out) == regex(r'^\s*Comment This is my torrent\.$', flags=re.MULTILINE) assert cap.err == '' with human_readable(False): run(['-i', torrent_file]) cap = capsys.readouterr() assert cap.out == regex(r'^Comment\tThis is my torrent\.$', flags=re.MULTILINE) assert cap.err == '' def test_multiline_comment(capsys, create_torrent, human_readable, clear_ansi, regex): comment = 'This is my torrent.\nShare it!' with create_torrent(comment=comment) as torrent_file: with human_readable(True): run(['-i', torrent_file]) cap = capsys.readouterr() assert clear_ansi(cap.out) == regex(r'^(\s*)Comment This is my torrent\.\n' r'\1 Share it!$', flags=re.MULTILINE) assert cap.err == '' with human_readable(False): run(['-i', torrent_file]) cap = capsys.readouterr() assert cap.out == regex(r'^Comment\tThis is my torrent\.\tShare it!$', flags=re.MULTILINE) assert cap.err == '' def test_creation_date(capsys, create_torrent, human_readable, clear_ansi, regex): date = datetime(2000, 5, 10, 0, 30, 45) with create_torrent(creation_date=date) as torrent_file: with human_readable(True): run(['-i', torrent_file]) cap = capsys.readouterr() assert clear_ansi(cap.out) == regex(r'^\s*Created 2000-05-10 00:30:45$', flags=re.MULTILINE) assert cap.err == '' with human_readable(False): run(['-i', torrent_file]) cap = capsys.readouterr() exp_timestamp = int(date.timestamp()) assert cap.out == regex(rf'^Created\t{exp_timestamp}$', flags=re.MULTILINE) assert cap.err == '' def test_created_by(capsys, create_torrent, human_readable, clear_ansi, regex): with create_torrent(created_by='foo') as torrent_file: with human_readable(True): run(['-i', torrent_file]) cap = capsys.readouterr() assert clear_ansi(cap.out) == regex(r'^\s*Created By foo$', flags=re.MULTILINE) assert cap.err == '' with human_readable(False): run(['-i', torrent_file]) cap = capsys.readouterr() assert cap.out == regex(r'^Created By\tfoo$', flags=re.MULTILINE) assert cap.err == '' def test_private(capsys, create_torrent, human_readable, clear_ansi, regex): with create_torrent(private=True) as torrent_file: with human_readable(True): run(['-i', torrent_file]) cap = capsys.readouterr() assert clear_ansi(cap.out) == regex(r'^\s*Private yes$', flags=re.MULTILINE) assert cap.err == '' with human_readable(False): run(['-i', torrent_file]) cap = capsys.readouterr() assert cap.out == regex(r'^Private\tyes$', flags=re.MULTILINE) assert cap.err == '' with create_torrent(private=False) as torrent_file: with human_readable(True): run(['-i', torrent_file]) cap = capsys.readouterr() assert clear_ansi(cap.out) == regex(r'^\s*Private no', flags=re.MULTILINE) assert cap.err == '' with human_readable(False): run(['-i', torrent_file]) cap = capsys.readouterr() assert cap.out == regex(r'^Private\tno', flags=re.MULTILINE) assert cap.err == '' def test_trackers___single_tracker_per_tier(capsys, create_torrent, human_readable, clear_ansi, regex): trackers = ['http://tracker1.1', 'http://tracker2.1'] with create_torrent(trackers=trackers) as torrent_file: with human_readable(True): run(['-i', torrent_file]) cap = capsys.readouterr() assert clear_ansi(cap.out) == regex(rf'^(\s*)Trackers Tier 1: {trackers[0]}\n' rf'\1 Tier 2: {trackers[1]}$', flags=re.MULTILINE) assert cap.err == '' with human_readable(False): run(['-i', torrent_file]) cap = capsys.readouterr() exp_trackers = '\t'.join(trackers) assert cap.out == regex(rf'^Trackers\t{exp_trackers}$', flags=re.MULTILINE) assert cap.err == '' def test_trackers___multiple_trackers_per_tier(capsys, create_torrent, human_readable, clear_ansi, regex): trackers = ['http://tracker1.1', ['http://tracker2.1', 'http://tracker2.2'], ['http://tracker3.1']] with create_torrent(trackers=trackers) as torrent_file: with human_readable(True): run(['-i', torrent_file]) cap = capsys.readouterr() assert clear_ansi(cap.out) == regex(r'^(\s*)Trackers Tier 1: http://tracker1.1\n' r'\1 Tier 2: http://tracker2.1\n' r'\1 http://tracker2.2\n' r'\1 Tier 3: http://tracker3.1$', flags=re.MULTILINE) assert cap.err == '' with human_readable(False): run(['-i', torrent_file]) cap = capsys.readouterr() exp_trackers = '\t'.join(('http://tracker1.1', 'http://tracker2.1', 'http://tracker2.2', 'http://tracker3.1')) assert cap.out == regex(rf'^Trackers\t{exp_trackers}$', flags=re.MULTILINE) assert cap.err == '' def test_webseeds(capsys, create_torrent, human_readable, clear_ansi, regex): webseeds = ['http://webseed1', 'http://webseed2'] with create_torrent(webseeds=webseeds) as torrent_file: with human_readable(True): run(['-i', torrent_file]) cap = capsys.readouterr() assert clear_ansi(cap.out) == regex((rf'^(\s*)Webseeds {webseeds[0]}\n' rf'\1 {webseeds[1]}$'), flags=re.MULTILINE) assert cap.err == '' with human_readable(False): run(['-i', torrent_file]) cap = capsys.readouterr() exp_webseeds = '\t'.join(webseeds) assert cap.out == regex(rf'^Webseeds\t{exp_webseeds}$', flags=re.MULTILINE) assert cap.err == '' def test_httpseeds(capsys, create_torrent, human_readable, clear_ansi, regex): httpseeds = ['http://httpseed1', 'http://httpseed2'] with create_torrent(httpseeds=httpseeds) as torrent_file: with human_readable(True): run(['-i', torrent_file]) cap = capsys.readouterr() assert clear_ansi(cap.out) == regex((rf'^(\s*)HTTP Seeds {httpseeds[0]}\n' rf'\1 {httpseeds[1]}$'), flags=re.MULTILINE) assert cap.err == '' with human_readable(False): run(['-i', torrent_file]) cap = capsys.readouterr() exp_httpseeds = '\t'.join(httpseeds) assert cap.out == regex(rf'^HTTP Seeds\t{exp_httpseeds}$', flags=re.MULTILINE) assert cap.err == '' def test_file_tree_and_file_count(capsys, create_torrent, human_readable, tmp_path, clear_ansi, regex): root = tmp_path / 'root' (root / 'subdir1' / 'subdir1.0' / 'subdir1.0.0').mkdir(parents=True) (root / 'subdir2').mkdir(parents=True) (root / 'subdir1' / 'file1').write_text('data') (root / 'subdir1' / 'subdir1.0' / 'file2').write_text('data') (root / 'subdir1' / 'subdir1.0' / 'subdir1.0.0' / 'file3').write_text('data') (root / 'subdir2' / 'file4').write_text('data') with create_torrent(path=str(root)) as torrent_file: with human_readable(True): run(['-i', torrent_file]) cap = capsys.readouterr() assert clear_ansi(cap.out) == regex(r'^\s*File Count 4$', flags=re.MULTILINE) assert clear_ansi(cap.out) == regex(r'^(\s*) Files root\n' r'\1 ├─subdir1\n' r'\1 │ ├─file1 \[4 B\]\n' r'\1 │ └─subdir1.0\n' r'\1 │ ├─file2 \[4 B\]\n' r'\1 │ └─subdir1.0.0\n' r'\1 │ └─file3 \[4 B\]\n' r'\1 └─subdir2\n' r'\1 └─file4 \[4 B\]$', flags=re.MULTILINE) assert cap.err == '' with human_readable(False): run(['-i', torrent_file]) cap = capsys.readouterr() assert cap.out == regex(r'^File Count\t4$', flags=re.MULTILINE) exp_files = '\t'.join(('root/subdir1/file1', 'root/subdir1/subdir1.0/file2', 'root/subdir1/subdir1.0/subdir1.0.0/file3', 'root/subdir2/file4')) assert cap.out == regex(rf'^Files\t{exp_files}$', flags=re.MULTILINE) assert cap.err == '' def test_reading_magnet(capsys, human_readable, clear_ansi, regex): magnet = ('magnet:?xt=urn:btih:e167b1fbb42ea72f051f4f50432703308efb8fd1&dn=My+Torrent&xl=142631' '&tr=https%3A%2F%2Flocalhost%3A123%2Fannounce' '&xs=https%3A%2F%2Flocalhost%3A123%2FMy+Torrent.torrent' '&as=https%3A%2F%2Flocalhost%3A456%2FMy+Torrent.torrent' '&ws=https%3A%2F%2Flocalhost%2FMy+Torrent') with human_readable(True): run(['-i', magnet]) cap = capsys.readouterr() assert clear_ansi(cap.out) == regex((r'^\s*Name My Torrent\n' r'\s*Size \d+\.\d+ [TMK]iB\n' r'\s*Tracker https://localhost:123/announce\n' r'\s*Webseed https://localhost/My\+Torrent\n' r'\s*File Count \d+\n' r'\s*Files My Torrent \[\d+\.\d+ [TMK]iB\]\n' r'\s*Magnet magnet:\?xt=urn:btih:[0-9a-z]{40}.*?\n$')) assert cap.err == regex((rf'^{_vars.__appname__}: https://localhost:123/My\+Torrent.torrent: [\w\s]+\n' rf'{_vars.__appname__}: https://localhost:456/My\+Torrent.torrent: [\w\s]+\n' rf'{_vars.__appname__}: https://localhost/My\+Torrent.torrent: [\w\s]+\n' rf'{_vars.__appname__}: https://localhost:123/file\?info_hash=' r'%E1g%B1%FB%B4\.%A7/%05%1FOPC%27%030%8E%FB%8F%D1: [\w\s]+\n$')) with human_readable(False): run(['-i', magnet]) cap = capsys.readouterr() assert cap.out == regex((r'^Name\tMy Torrent\n' r'Size\t\d+\n' r'Tracker\thttps://localhost:123/announce\n' r'Webseed\thttps://localhost/My\+Torrent\n' r'File Count\t\d+\n' r'Files\tMy Torrent\n' r'Magnet\tmagnet:\?xt=urn:btih:[0-9a-z]{40}.*?\n$')) assert cap.err == regex((rf'^{_vars.__appname__}: https://localhost:123/My\+Torrent.torrent: [\w\s]+\n' rf'{_vars.__appname__}: https://localhost:456/My\+Torrent.torrent: [\w\s]+\n' rf'{_vars.__appname__}: https://localhost/My\+Torrent.torrent: [\w\s]+\n' rf'{_vars.__appname__}: https://localhost:123/file\?info_hash=' r'%E1g%B1%FB%B4\.%A7/%05%1FOPC%27%030%8E%FB%8F%D1: [\w\s]+\n$')) def test_reading_invalid_magnet(capsys): magnet = 'magnet:?xt=urn:btih:e167b1fbb42ea72f051f4f50432703308efb8fd1&xl=not_an_int' with patch('sys.exit') as mock_exit: run(['-i', magnet]) mock_exit.assert_called_once_with(err.Code.READ) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: not_an_int: Invalid exact length ("xl")\n' assert cap.out == '' rndusr-torf-cli-688c8c0/tests/test_json.py000066400000000000000000000105451463253735700206610ustar00rootroot00000000000000import json import time from unittest.mock import patch import pytest from torfcli import _errors as err from torfcli import _vars, run def test_json_contains_standard_fields(capsys, mock_content): now = time.time() run([str(mock_content), '--json']) cap = capsys.readouterr() j = json.loads(cap.out) assert isinstance(j['Name'], str) assert isinstance(j['Size'], int) assert j['Created'] == pytest.approx(now - 1, abs=2) assert j['Created By'] == f'{_vars.__appname__} {_vars.__version__}' assert isinstance(j['Piece Size'], int) assert isinstance(j['Piece Count'], int) assert isinstance(j['File Count'], int) assert isinstance(j['Files'], list) for f in j['Files']: assert isinstance(f, dict) assert tuple(f.keys()) == ('Path', 'Size') assert isinstance(f['Path'], str) assert isinstance(f['Size'], int) assert isinstance(j['Info Hash'], str) assert len(j['Info Hash']) == 40 assert j['Magnet'].startswith('magnet:?xt=urn:btih:') assert isinstance(j['Torrent'], str) def test_json_does_not_contain_progress(capsys, mock_content): run([str(mock_content), '--json']) cap = capsys.readouterr() assert cap.err == '' j = json.loads(cap.out) assert 'Progress' not in j def test_json_contains_cli_errors(capsys): with patch('sys.exit') as mock_exit: run(['--foo', '--json']) mock_exit.assert_called_once_with(err.Code.CLI) cap = capsys.readouterr() assert cap.err == '' j = json.loads(cap.out) assert j['Error'] == ['Unrecognized arguments: --foo'] def test_json_contains_config_errors(capsys, cfgfile): cfgfile.write_text(''' foo ''') with patch('sys.exit') as mock_exit: run(['--json']) mock_exit.assert_called_once_with(err.Code.CONFIG) cap = capsys.readouterr() assert cap.err == '' j = json.loads(cap.out) assert j['Error'] == [f'{cfgfile}: Unrecognized arguments: --foo'] def test_json_contains_regular_errors(capsys): with patch('sys.exit') as mock_exit: run(['-i', 'path/to/nonexisting.torrent', '--json']) mock_exit.assert_called_once_with(err.Code.READ) cap = capsys.readouterr() assert cap.err == '' j = json.loads(cap.out) assert j['Error'] == ['path/to/nonexisting.torrent: No such file or directory'] def test_json_contains_sigint(capsys, mock_create_mode, mock_content): mock_create_mode.side_effect = KeyboardInterrupt() with patch('sys.exit') as mock_exit: run([str(mock_content), '--json']) mock_exit.assert_called_once_with(err.Code.ABORTED) cap = capsys.readouterr() assert cap.err == '' j = json.loads(cap.out) assert j['Error'] == ['Aborted'] def test_json_contains_verification_errors(capsys, tmp_path, create_torrent): content_path = tmp_path / 'file.jpg' content_path.write_text('some data') with create_torrent(path=content_path) as torrent_file: content_path.write_text('some data!!!') with patch('sys.exit') as mock_exit: run([str(content_path), '-i', torrent_file, '--json']) mock_exit.assert_called_once_with(err.Code.VERIFY) cap = capsys.readouterr() assert cap.err == '' j = json.loads(cap.out) assert j['Error'] == [f'{content_path}: Too big: 12 instead of 9 bytes', f'{content_path} does not satisfy {torrent_file}'] def test_json_with_magnet_uri(capsys, regex): # Notice the double "&" in the URI, which is syntactically correct but # should be fixed in the output. magnet = ('magnet:?xt=urn:btih:e167b1fbb42ea72f051f4f50432703308efb8fd1&dn=My+Torrent&xl=142631' '&tr=https%3A%2F%2Flocalhost%3A123%2Fannounce&&tr=https%3A%2F%2Flocalhost%3A456%2Fannounce') run(['-i', magnet, '--json']) cap = capsys.readouterr() assert cap.err == '' assert json.loads(cap.out) == { "Error": ['https://localhost:123/file?info_hash=%E1g%B1%FB%B4.%A7/%05%1FOPC%27%030%8E%FB%8F%D1: Connection refused', 'https://localhost:456/file?info_hash=%E1g%B1%FB%B4.%A7/%05%1FOPC%27%030%8E%FB%8F%D1: Connection refused'], 'Name': 'My Torrent', 'Size': 142631, 'Trackers': ['https://localhost:123/announce', 'https://localhost:456/announce'], 'File Count': 1, 'Files': [ {'Path': 'My Torrent', 'Size': 142631}, ], 'Magnet': magnet.replace('&&', '&'), } rndusr-torf-cli-688c8c0/tests/test_metainfo.py000066400000000000000000000206421463253735700215110ustar00rootroot00000000000000import datetime import json from unittest.mock import patch import pytest import torf from torfcli import _errors as err from torfcli import _vars, run @pytest.fixture def nonstandard_torrent(tmp_path): (tmp_path / 'content').mkdir() (tmp_path / 'content' / 'file1').write_text('foo') (tmp_path / 'content' / 'file2').write_text('bar') (tmp_path / 'content' / 'dir').mkdir() (tmp_path / 'content' / 'dir' / 'file3').write_text('baz') t = torf.Torrent(path=tmp_path / 'content', private=True, trackers=('https://foo.example.org',)) t.metainfo['foo'] = 'bar' t.metainfo['info']['baz'] = (1, 2, 3) t.metainfo['info']['files'][0]['sneaky'] = 'pete' t.generate() t.write(tmp_path / 'nonstandard.torrent') return str(tmp_path / 'nonstandard.torrent') def test_metainfo_with_verbosity_level_zero(capsys, nonstandard_torrent): run(['-i', nonstandard_torrent, '--metainfo']) cap = capsys.readouterr() assert cap.err == '' assert json.loads(cap.out) == {'created by': f'torf {torf.__version__}', 'announce': 'https://foo.example.org', 'info': {'name': 'content', 'piece length': 16384, 'private': 1, 'files': [{'length': 3, 'path': ['dir', 'file3']}, {'length': 3, 'path': ['file1']}, {'length': 3, 'path': ['file2']}]}} def test_metainfo_with_verbosity_level_one(capsys, nonstandard_torrent): run(['-i', nonstandard_torrent, '--metainfo', '--verbose']) cap = capsys.readouterr() assert cap.err == '' assert json.loads(cap.out) == {'created by': f'torf {torf.__version__}', 'announce': 'https://foo.example.org', 'foo': 'bar', 'info': {'baz': [1, 2, 3], 'private': 1, 'files': [{'length': 3, 'path': ['dir', 'file3'], 'sneaky': 'pete'}, {'length': 3, 'path': ['file1']}, {'length': 3, 'path': ['file2']}], 'name': 'content', 'piece length': 16384}} def test_metainfo_with_verbosity_level_two(capsys, nonstandard_torrent): run(['-i', nonstandard_torrent, '--metainfo', '--verbose', '--verbose']) cap = capsys.readouterr() assert cap.err == '' assert json.loads(cap.out) == {'created by': f'torf {torf.__version__}', 'announce': 'https://foo.example.org', 'foo': 'bar', 'info': {'baz': [1, 2, 3], 'private': 1, 'files': [{'length': 3, 'path': ['dir', 'file3'], 'sneaky': 'pete'}, {'length': 3, 'path': ['file1']}, {'length': 3, 'path': ['file2']}], 'name': 'content', 'piece length': 16384, 'pieces': 'YscFPSkTuTXkBSgIyyaqj/HVRXU='}} def test_metainfo_uses_one_and_zero_for_boolean_values(capsys, create_torrent): with create_torrent(private=True) as torrent_file: run(['-i', torrent_file, '--metainfo']) cap = capsys.readouterr() assert cap.err == '' assert json.loads(cap.out)['info']['private'] == 1 def test_metainfo_with_disabled_validation(capsys, tmp_path): with open(tmp_path / 'nonstandard.torrent', 'wb') as f: f.write(b'd1:2i3e4:thisl2:is3:note5:validd2:is2:ok8:metainfol3:but4:thateee') torf.Torrent.read(tmp_path / 'nonstandard.torrent', validate=False) with patch('sys.exit') as mock_exit: run(['-i', str(tmp_path / 'nonstandard.torrent'), '--metainfo']) mock_exit.assert_called_once_with(err.Code.READ) cap = capsys.readouterr() assert cap.err == f"{_vars.__appname__}: Invalid metainfo: Missing 'info'\n" assert json.loads(cap.out) == {} run(['-i', str(tmp_path / 'nonstandard.torrent'), '--metainfo', '--novalidate']) cap = capsys.readouterr() assert cap.err == f"{_vars.__appname__}: WARNING: Invalid metainfo: Missing 'name' in ['info']\n" assert json.loads(cap.out) == {} run(['-i', str(tmp_path / 'nonstandard.torrent'), '--metainfo', '--novalidate', '--verbose']) cap = capsys.readouterr() assert cap.err == f"{_vars.__appname__}: WARNING: Invalid metainfo: Missing 'name' in ['info']\n" assert json.loads(cap.out) == {"2": 3, "this": ["is", "not"], "valid": {"is": "ok", "metainfo": ["but", "that"]}} def test_metainfo_with_unreadable_torrent(capsys): with patch('sys.exit') as mock_exit: run(['-i', 'no/such/path.torrent', '--metainfo']) mock_exit.assert_called_once_with(err.Code.READ) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: no/such/path.torrent: No such file or directory\n' assert json.loads(cap.out) == {} def test_metainfo_when_creating_torrent(capsys, mock_content): run([str(mock_content), '--metainfo', '-vv']) cap = capsys.readouterr() assert cap.err == '' j = json.loads(cap.out) assert 'info' in j assert 'name' in j['info'] assert 'pieces' in j['info'] def test_metainfo_when_editing_torrent(capsys, create_torrent): date = '1999-07-23 14:00' with create_torrent(trackers=['http://foo', 'http://bar']) as orig_torrent: run(['-i', str(orig_torrent), '--comment', 'This comment was not here before.', '--date', date, '--nowebseed', '--webseed', 'https://new.webseeds', '-o', 'new.torrent', '--metainfo', '-v']) cap = capsys.readouterr() assert cap.err == '' j = json.loads(cap.out) assert 'info' in j assert 'name' in j['info'] assert 'pieces' not in j['info'] assert j['comment'] == 'This comment was not here before.' assert j['creation date'] == datetime.datetime.strptime(date, '%Y-%m-%d %H:%M').timestamp() assert j['url-list'] == ['https://new.webseeds'] assert j['announce-list'] == [['http://foo'], ['http://bar']] assert j['announce'] == 'http://foo' def test_metainfo_when_verifying_torrent(capsys, create_torrent, mock_content, tmp_path): with create_torrent(path=mock_content) as torrent_file: run(['-i', str(torrent_file), str(mock_content), '--metainfo']) cap = capsys.readouterr() assert cap.err == '' j = json.loads(cap.out) assert 'info' in j assert 'name' in j['info'] assert 'pieces' not in j['info'] with create_torrent(path=mock_content) as torrent_file: wrong_content = (tmp_path / 'wrong_content') wrong_content.write_text('foo') with patch('sys.exit') as mock_exit: run(['-i', str(torrent_file), str(wrong_content), '--metainfo']) mock_exit.assert_called_once_with(err.Code.VERIFY) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: {wrong_content} does not satisfy {torrent_file}\n' assert json.loads(cap.out) == {} def test_metainfo_with_magnet_uri(capsys, regex): magnet = ('magnet:?xt=urn:btih:e167b1fbb42ea72f051f4f50432703308efb8fd1&dn=My+Torrent&xl=142631' '&tr=https%3A%2F%2Flocalhost%3A123%2Fannounce&&tr=https%3A%2F%2Flocalhost%3A456%2Fannounce') run(['-i', magnet, '--metainfo']) cap = capsys.readouterr() assert cap.err == regex(rf'^{_vars.__appname__}: https://localhost:123/file\?info_hash=' r'%E1g%B1%FB%B4\.%A7/%05%1FOPC%27%030%8E%FB%8F%D1: [\w\s]+\n' rf'{_vars.__appname__}: https://localhost:456/file\?info_hash=' r'%E1g%B1%FB%B4\.%A7/%05%1FOPC%27%030%8E%FB%8F%D1: [\w\s]+\n$') j = json.loads(cap.out) assert j == {'announce': 'https://localhost:123/announce', 'announce-list': [['https://localhost:123/announce'], ['https://localhost:456/announce']], 'info': {'name': 'My Torrent', 'length': 142631}} rndusr-torf-cli-688c8c0/tests/test_profiles.py000066400000000000000000000103571463253735700215340ustar00rootroot00000000000000import datetime import textwrap from unittest.mock import patch from torfcli import _errors as err from torfcli import _vars, run def test_unknown_profile(capsys, cfgfile, mock_content, mock_create_mode): cfgfile.write_text(textwrap.dedent(''' [foo] comment = Foo! ''')) with patch('sys.exit') as mock_exit: run([str(mock_content), '--profile', 'bar']) mock_exit.assert_called_once_with(err.Code.CONFIG) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: {cfgfile}: No such profile: bar\n' assert mock_create_mode.call_args is None def test_profile_option(cfgfile, mock_content, mock_create_mode): cfgfile.write_text(textwrap.dedent(''' xseed date = 2000-01-02 [foo] comment = Foo! ''')) run([str(mock_content)]) cfg = mock_create_mode.call_args[0][1] assert cfg['xseed'] is True assert cfg['date'] == datetime.datetime(2000, 1, 2) assert cfg['comment'] is None run([str(mock_content), '--profile', 'foo']) cfg = mock_create_mode.call_args[0][1] assert cfg['xseed'] is True assert cfg['date'] == datetime.datetime(2000, 1, 2) assert cfg['comment'] == 'Foo!' def test_overloading_values(cfgfile, mock_content, mock_create_mode): cfgfile.write_text(textwrap.dedent(''' [foo] comment = Foo private [bar] comment = Bar yes [baz] comment = Baz xseed ''')) run([str(mock_content), '--profile', 'foo', '--profile', 'bar']) cfg = mock_create_mode.call_args[0][1] assert cfg['comment'] == 'Bar' assert cfg['private'] is True assert cfg['yes'] is True assert cfg['xseed'] is False run([str(mock_content), '--profile', 'bar', '--profile', 'foo']) cfg = mock_create_mode.call_args[0][1] assert cfg['comment'] == 'Foo' assert cfg['private'] is True assert cfg['yes'] is True assert cfg['xseed'] is False run([str(mock_content), '--profile', 'bar', '--profile', 'baz']) cfg = mock_create_mode.call_args[0][1] assert cfg['comment'] == 'Baz' assert cfg['private'] is None assert cfg['yes'] is True assert cfg['xseed'] is True def test_list_value(cfgfile, mock_content, mock_create_mode): cfgfile.write_text(textwrap.dedent(''' [foo] webseed = https://foo [bar] webseed = https://bar [baz] nowebseed webseed = https://baz ''')) run([str(mock_content), '--profile', 'foo', '--profile', 'bar']) cfg = mock_create_mode.call_args[0][1] assert cfg['webseed'] == ['https://foo', 'https://bar'] assert cfg['nowebseed'] is False run([str(mock_content), '--profile', 'bar', '--profile', 'foo']) cfg = mock_create_mode.call_args[0][1] assert cfg['webseed'] == ['https://bar', 'https://foo'] assert cfg['nowebseed'] is False run([str(mock_content), '--profile', 'bar', '--profile', 'baz']) cfg = mock_create_mode.call_args[0][1] assert cfg['webseed'] == ['https://bar', 'https://baz'] assert cfg['nowebseed'] is True run([str(mock_content), '--profile', 'bar', '--profile', 'baz', '--profile', 'foo']) cfg = mock_create_mode.call_args[0][1] assert cfg['webseed'] == ['https://bar', 'https://baz', 'https://foo'] assert cfg['nowebseed'] is True def test_illegal_configfile_arguments(capsys, cfgfile, mock_content, mock_create_mode): for arg in ('config', 'profile'): cfgfile.write_text(textwrap.dedent(f''' [foo] {arg} = foo ''')) with patch('sys.exit') as mock_exit: run(['--config', str(cfgfile), str(mock_content)]) mock_exit.assert_called_once_with(err.Code.CONFIG) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: {cfgfile}: Not allowed in config file: {arg}\n' assert mock_create_mode.call_args is None for arg in ('noconfig', 'help', 'version'): cfgfile.write_text(textwrap.dedent(f''' [foo] {arg} ''')) with patch('sys.exit') as mock_exit: run(['--config', str(cfgfile), str(mock_content)]) mock_exit.assert_called_once_with(err.Code.CONFIG) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: {cfgfile}: Not allowed in config file: {arg}\n' assert mock_create_mode.call_args is None rndusr-torf-cli-688c8c0/tests/test_progress.py000066400000000000000000000130421463253735700215470ustar00rootroot00000000000000import os from unittest.mock import patch import pytest from torfcli import _errors as err from torfcli import _vars, run @pytest.mark.parametrize('hr_enabled', (True, False), ids=('human_readable=True', 'human_readable=False')) def test_creating_prints_performance_summary_on_success(tmp_path, human_readable, hr_enabled, capsys, clear_ansi, regex): content_path = tmp_path / 'foo' content_path.write_text('bar') with human_readable(hr_enabled): run([str(content_path)]) cap = capsys.readouterr() if hr_enabled: pattern = (r'\s*Progress 100.00 % \| \d+:\d{2}:\d{2} total \| \s*\d+\.\d{2} [KMGT]iB/s\n' r'\s*Info Hash [0-9a-f]{40}\n' r'\s*Magnet magnet:\?xt=urn:btih:[0-9a-f]{40}&dn=foo&xl=3\n' r'\s*Torrent foo.torrent\n$') assert clear_ansi(cap.out) == regex(pattern), clear_ansi(cap.out) else: pattern = (r'\nProgress\t100\.000\t\d+\t\d+\t\d+\t\d+\t\d+\t' + str(content_path) + '\n' r'Info Hash\t[0-9a-f]{40}\n' r'Magnet\tmagnet:\?xt=urn:btih:[0-9a-f]{40}&dn=foo&xl=3\n' r'Torrent\tfoo.torrent\n$') assert cap.out == regex(pattern), cap.out @pytest.mark.parametrize('hr_enabled', (True, False), ids=('human_readable=True', 'human_readable=False')) def test_creating_keeps_progress_when_aborted(tmp_path, human_readable, hr_enabled, capsys, clear_ansi, monkeypatch, regex): content_path = tmp_path / 'foo' content_path.write_bytes(os.urandom(int(1e6))) import torfcli if hr_enabled: status_reporter_cls = torfcli._ui._HumanStatusReporter else: status_reporter_cls = torfcli._ui._MachineStatusReporter class MockStatusReporter(status_reporter_cls): def generate_callback(self, torrent, filepath, pieces_done, pieces_total): if pieces_done / pieces_total >= 0.5: raise KeyboardInterrupt() else: super().generate_callback(torrent, filepath, pieces_done, pieces_total) monkeypatch.setattr(torfcli._ui, status_reporter_cls.__name__, MockStatusReporter) monkeypatch.setattr(torfcli._main, 'PROGRESS_INTERVAL', 0) with human_readable(hr_enabled): with patch('sys.exit') as mock_exit: run([str(content_path)]) mock_exit.assert_called_once_with(err.Code.ABORTED) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: Aborted\n' if hr_enabled: pattern = (r'\s*Progress \d+:\d{2}:\d{2} elapsed \| \d+:\d{2}:\d{2} left \| ' r'\d+:\d{2}:\d{2} total \| ETA: \d{2}:\d{2}:\d{2}' r'\d{1,2}\.\d{2} % ▕foo\s+▏ \s*\d+\.\d{2} [KMGT]iB/s\n\n$') assert clear_ansi(cap.out) == regex(pattern), clear_ansi(cap.out) else: pattern = (r'\nProgress\t\d+\.\d+\t\d+\t\d+\t\d+\t\d+\t\d+\t' + str(content_path) + '\n$') assert cap.out == regex(pattern), cap.out @pytest.mark.parametrize('hr_enabled', (True, False), ids=('human_readable=True', 'human_readable=False')) def test_verifying_prints_performance_summary_on_success(tmp_path, human_readable, hr_enabled, capsys, clear_ansi, regex): content_path = tmp_path / 'foo' content_path.write_text('bar') run([str(content_path)]) with human_readable(hr_enabled): run([str(content_path), '-i', 'foo.torrent']) cap = capsys.readouterr() if hr_enabled: pattern = r'\s*Progress 100.00 % \| \d+:\d{2}:\d{2} total \| \s*\d+\.\d{2} [KMGT]iB/s\n$' assert clear_ansi(cap.out) == regex(pattern) else: pattern = rf'Progress\t100.000\t\d+\t\d+\t\d+\t\d+\t\d+\t{content_path}\n$' assert clear_ansi(cap.out) == regex(pattern) @pytest.mark.parametrize('hr_enabled', (True, False), ids=('human_readable=True', 'human_readable=False')) def test_verifying_keeps_progress_when_aborted(tmp_path, human_readable, hr_enabled, capsys, clear_ansi, monkeypatch, regex): content_path = tmp_path / 'foo' content_path.write_bytes(os.urandom(int(1e6))) run([str(content_path)]) import torfcli if hr_enabled: status_reporter_cls = torfcli._ui._HumanStatusReporter else: status_reporter_cls = torfcli._ui._MachineStatusReporter class MockStatusReporter(status_reporter_cls): def verify_callback(self, torrent, filepath, pieces_done, pieces_total, piece_index, piece_hash, exception): if pieces_done / pieces_total >= 0.5: raise KeyboardInterrupt() else: super().verify_callback(torrent, filepath, pieces_done, pieces_total, piece_index, piece_hash, exception) monkeypatch.setattr(torfcli._ui, status_reporter_cls.__name__, MockStatusReporter) monkeypatch.setattr(torfcli._main, 'PROGRESS_INTERVAL', 0) with human_readable(hr_enabled): with patch('sys.exit') as mock_exit: run([str(content_path), '-i', 'foo.torrent']) mock_exit.assert_called_once_with(err.Code.ABORTED) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: Aborted\n' if hr_enabled: pattern = (r'\s*Progress \d+:\d{2}:\d{2} elapsed \| \d+:\d{2}:\d{2} left \| ' r'\d+:\d{2}:\d{2} total \| ETA: \d{2}:\d{2}:\d{2}' r'\d{1,2}\.\d{2} % ▕foo\s+▏ \s*\d+\.\d{2} [KMGT]iB/s\n\n$') assert clear_ansi(cap.out) == regex(pattern), clear_ansi(cap.out) else: pattern = (r'\nProgress\t\d+\.\d+\t\d+\t\d+\t\d+\t\d+\t\d+\t' + str(content_path) + '\n$') assert cap.out == regex(pattern), cap.out rndusr-torf-cli-688c8c0/tests/test_reuse.py000066400000000000000000000207571463253735700210410ustar00rootroot00000000000000import datetime import os import pathlib import random import re import pytest import torf from torfcli import run @pytest.fixture def create_existing_torrent(tmp_path): torrents_path = tmp_path / 'torrents' torrents_path.mkdir(exist_ok=True) contents_path = tmp_path / 'contents' contents_path.mkdir(exist_ok=True) kwargs_base = { 'trackers': ['http://some.tracker'], 'webseeds': ['http://some.webseed'], 'private': bool(random.randint(0, 1)), 'source': 'ASDF', 'randomize_infohash': False, 'comment': 'Original Comment', 'created_by': 'Original Creator', 'creation_date': datetime.datetime.fromisoformat('1975-05-23'), } def create_torrent(*files, **kwargs): # Generate content if len(files) == 1: name = files[0][0] content = files[0][1] content_path = contents_path / name content_path.write_bytes(content) else: name = files[0][0].split('/')[0] content_path = contents_path / name content_path.mkdir() for file, data in files: assert file.startswith(name) (content_path / file).parent.mkdir(parents=True, exist_ok=True) (content_path / file).write_bytes(data) # Generate Torrent arguments kw = {**kwargs_base, **kwargs} for _ in range(random.randint(0, 5)): del kw[random.choice(tuple(kw))] t = torf.Torrent(path=content_path, **kw) t.generate() torrent_file = torrents_path / f'{name}.torrent' t.write(torrent_file) return torrent_file return create_torrent @pytest.mark.parametrize('hr_enabled', (True, False), ids=('human_readable=True', 'human_readable=False')) def test_finds_matching_torrent(hr_enabled, create_existing_torrent, regex, capsys, human_readable, clear_ansi, assert_no_ctrl): existing_torrents = [ create_existing_torrent(('foo1.jpg', b'just an image 1')), create_existing_torrent(('foo2.jpg', b'just an image 2')), create_existing_torrent(('foo3.jpg', b'just an image 3')), create_existing_torrent( ('bar/this.mp4', b'just a video'), ('bar/that.txt', b'just a text'), ('bar/baz/oh.pdf', b'a subdirectory!'), ), create_existing_torrent( ('baz/hello.mp4', b'just a video'), ('baz/yo.txt', b'just a text'), ), ] existing_torrents_path = pathlib.Path(os.path.commonpath(existing_torrents)) existing_contents_path = existing_torrents_path.parent / 'contents' # Copy matching torrent with different piece sizes for piece_size in (4, 2, 8): t = torf.Torrent.read(existing_torrents_path / 'foo2.jpg.torrent') piece_size_max = t.piece_size_max t.piece_size_max = 16 * 1048576 t.piece_size = piece_size * 1048576 t.piece_size_max = piece_size_max t.write(existing_torrents_path / f'foo2.{piece_size}.jpg.torrent') os.unlink(existing_torrents_path / 'foo2.jpg.torrent') content_path = existing_contents_path / 'foo2.jpg' exp_reused_torrent = existing_torrents_path / 'foo2.2.jpg.torrent' exp_torrent = content_path.name + '.torrent' with human_readable(hr_enabled): run([str(content_path), '--reuse', str(existing_torrents_path), '--max-piece-size', '2']) cap = capsys.readouterr() assert cap.err == '' if hr_enabled: assert cap.out == regex(rf'Verifying {exp_reused_torrent}', flags=re.MULTILINE) assert clear_ansi(cap.out) == regex(rf'^\s*Reused {exp_reused_torrent}$', flags=re.MULTILINE) assert clear_ansi(cap.out) != regex(r'^\s+Progress\s+', flags=re.MULTILINE) assert clear_ansi(cap.out) == regex(rf'^\s*Torrent {exp_torrent}$', flags=re.MULTILINE) else: assert_no_ctrl(cap.out) assert cap.out == regex(r'^Reuse\t' rf'{existing_torrents_path}{os.sep}(?:foo[\d\.]+\.jpg|bar|baz)\.torrent\t' r'\d+\.\d+\t\d+\t\d+$', flags=re.MULTILINE) assert cap.out == regex(rf'^Verifying\t{exp_reused_torrent}$', flags=re.MULTILINE) assert cap.out == regex(rf'^Reused\t{exp_reused_torrent}$', flags=re.MULTILINE) assert cap.out != regex(r'^Progress\t', flags=re.MULTILINE) assert cap.out == regex(rf'^Torrent\t{exp_torrent}$', flags=re.MULTILINE) @pytest.mark.parametrize('hr_enabled', (True, False), ids=('human_readable=True', 'human_readable=False')) def test_does_not_find_matching_torrent(hr_enabled, create_existing_torrent, regex, capsys, human_readable, clear_ansi, assert_no_ctrl, mock_content): existing_torrents = [ create_existing_torrent(('foo1.jpg', b'just an image 1')), create_existing_torrent(('foo2.jpg', b'just an image 2')), create_existing_torrent(('foo3.jpg', b'just an image 3')), create_existing_torrent( ('bar/this.mp4', b'just a video'), ('bar/that.txt', b'just a text'), ('bar/baz/oh.pdf', b'a subdirectory!'), ), create_existing_torrent( ('baz/hello.mp4', b'just a video'), ('baz/yo.txt', b'just a text'), ), ] existing_torrents_path = pathlib.Path(os.path.commonpath(existing_torrents)) exp_torrent = mock_content.name + '.torrent' with human_readable(hr_enabled): run([str(mock_content), '--reuse', str(existing_torrents_path)]) cap = capsys.readouterr() assert cap.err == '' if hr_enabled: assert cap.out == regex(r'\s+Reuse\s+', flags=re.MULTILINE) assert cap.out != regex(r'\s+Verifying\s+', flags=re.MULTILINE) assert clear_ansi(cap.out) != regex(r'^\s+Reused\s+', flags=re.MULTILINE) assert clear_ansi(cap.out) == regex(r'^\s+Progress 100\.00 % \| \d+:\d+:\d+ total \| \d+.\d+ \w+/s$', flags=re.MULTILINE) assert clear_ansi(cap.out) == regex(rf'^\s*Torrent {exp_torrent}$', flags=re.MULTILINE) else: assert_no_ctrl(cap.out) assert cap.out == regex(r'^Reuse\t', flags=re.MULTILINE) assert cap.out != regex(r'^Verifying\t$', flags=re.MULTILINE) assert cap.out != regex(r'^Reused\t$', flags=re.MULTILINE) assert cap.out == regex(rf'^Progress\t.*?/{mock_content.name}/', flags=re.MULTILINE) assert cap.out == regex(rf'^Torrent\t{exp_torrent}$', flags=re.MULTILINE) @pytest.mark.parametrize('hr_enabled', (True, False), ids=('human_readable=True', 'human_readable=False')) def test_noreuse_argument(hr_enabled, create_existing_torrent, regex, capsys, human_readable, clear_ansi, assert_no_ctrl): existing_torrents = [ create_existing_torrent(('foo1.jpg', b'just an image 1')), create_existing_torrent(('foo2.jpg', b'just an image 2')), create_existing_torrent(('foo3.jpg', b'just an image 3')), create_existing_torrent( ('bar/this.mp4', b'just a video'), ('bar/that.txt', b'just a text'), ('bar/baz/oh.pdf', b'a subdirectory!'), ), create_existing_torrent( ('baz/hello.mp4', b'just a video'), ('baz/yo.txt', b'just a text'), ), ] existing_torrents_path = pathlib.Path(os.path.commonpath(existing_torrents)) existing_contents_path = existing_torrents_path.parent / 'contents' content_path = existing_contents_path / 'foo2.jpg' exp_torrent = content_path.name + '.torrent' with human_readable(hr_enabled): run([str(content_path), '--reuse', str(existing_torrents_path), '--noreuse']) cap = capsys.readouterr() assert cap.err == '' if hr_enabled: assert cap.out != regex(r'Reuse', flags=re.MULTILINE) assert cap.out != regex(r'Verifying', flags=re.MULTILINE) assert clear_ansi(cap.out) != regex(r'^\s*Reused', flags=re.MULTILINE) assert clear_ansi(cap.out) == regex(r'^\s+Progress\s+', flags=re.MULTILINE) assert clear_ansi(cap.out) == regex(rf'^\s*Torrent {exp_torrent}$', flags=re.MULTILINE) else: assert_no_ctrl(cap.out) assert cap.out != regex(r'^Reuse\t', flags=re.MULTILINE) assert cap.out != regex(r'^Verifying\t', flags=re.MULTILINE) assert cap.out != regex(r'^Reused\t', flags=re.MULTILINE) assert cap.out == regex(r'^Progress\t', flags=re.MULTILINE) assert cap.out == regex(rf'^Torrent\t{exp_torrent}$', flags=re.MULTILINE) rndusr-torf-cli-688c8c0/tests/test_stdin.py000066400000000000000000000044111463253735700210240ustar00rootroot00000000000000import os import re import sys from unittest.mock import patch import torf from torfcli import _errors, _vars, run def test_reading_valid_torrent_data_from_stdin(capsys, monkeypatch, clear_ansi, create_torrent, regex): with create_torrent(name='Foo', comment='Bar.') as torrent_file: monkeypatch.setattr(sys, 'stdin', open(torrent_file, 'rb')) run(['-i', '-']) cap = capsys.readouterr() assert clear_ansi(cap.out) == regex(r'^Name\tFoo$', flags=re.MULTILINE) assert clear_ansi(cap.out) == regex(r'^Comment\tBar.$', flags=re.MULTILINE) assert cap.err == '' def test_reading_invalid_torrent_data_from_stdin(capsys, tmp_path, monkeypatch, clear_ansi, regex): torrent = torf.Torrent(name='Foo', comment='Bar.') r, w = os.pipe() monkeypatch.setattr(sys, 'stdin', os.fdopen(r)) os.fdopen(w, 'wb').write(torrent.dump(validate=False)) with patch('sys.exit') as mock_exit: run(['-i', '-']) mock_exit.assert_called_once_with(_errors.Code.READ) cap = capsys.readouterr() assert cap.out == '' assert cap.err == f"{_vars.__appname__}: Invalid metainfo: Missing 'piece length' in ['info']\n" def test_reading_valid_magnet_URI_from_stdin(capsys, monkeypatch, clear_ansi, regex): magnet = torf.Magnet('7edbb76b446f87617393537fffa48af733cb4127', dn='Foo', xl=12345) r, w = os.pipe() monkeypatch.setattr(sys, 'stdin', os.fdopen(r)) os.fdopen(w, 'wb').write(str(magnet).encode('utf-8')) run(['-i', '-']) cap = capsys.readouterr() assert clear_ansi(cap.out) == regex(r'^Name\tFoo$', flags=re.MULTILINE) assert clear_ansi(cap.out) == regex(r'^Size\t12345$', flags=re.MULTILINE) assert cap.err == '' def test_reading_invalid_magnet_URI_from_stdin(capsys, monkeypatch, clear_ansi, create_torrent, regex): magnet = 'magnet:?xt=urn:btih:7edbb76b446f87617393537fffa48af733cb4127&dn=Foo&xl=one+million+things' r, w = os.pipe() monkeypatch.setattr(sys, 'stdin', os.fdopen(r)) os.fdopen(w, 'wb').write(magnet.encode('utf-8')) with patch('sys.exit') as mock_exit: run(['-i', '-']) mock_exit.assert_called_once_with(_errors.Code.READ) cap = capsys.readouterr() assert cap.out == '' assert cap.err == f'{_vars.__appname__}: one million things: Invalid exact length ("xl")\n' rndusr-torf-cli-688c8c0/tests/test_utils.py000066400000000000000000000025371463253735700210520ustar00rootroot00000000000000from types import SimpleNamespace import pytest from torfcli import _utils def test_bytes2string__rounding(): assert _utils.bytes2string(1.455 * 2**30) == '1.46 GiB' assert _utils.bytes2string(1.454 * 2**30) == '1.45 GiB' def test_bytes2string__trailing_zeroes(): assert _utils.bytes2string(1.5 * 2**30, trailing_zeros=True) == '1.50 GiB' assert _utils.bytes2string(1.5 * 2**30, trailing_zeros=False) == '1.5 GiB' assert _utils.bytes2string(1 * 2**30, trailing_zeros=True) == '1.00 GiB' assert _utils.bytes2string(1 * 2**30, trailing_zeros=False) == '1 GiB' assert _utils.bytes2string(10 * 2**30, trailing_zeros=True) == '10.00 GiB' assert _utils.bytes2string(10 * 2**30, trailing_zeros=False) == '10 GiB' @pytest.mark.parametrize( argnames='torrent, cfg, exp_return_value', argvalues=( (None, {'out': 'user-given.torrent'}, 'user-given.torrent'), (SimpleNamespace(name='foo'), {'out': ''}, 'foo.torrent'), (SimpleNamespace(name='foo'), {'out': '', 'profile': ['this']}, 'foo.this.torrent'), (SimpleNamespace(name='foo'), {'out': '', 'profile': ['this', 'that']}, 'foo.this.that.torrent'), ), ids=lambda v: repr(v), ) def test_get_torrent_filepath(torrent, cfg, exp_return_value): return_value = _utils.get_torrent_filepath(torrent, cfg) assert return_value == exp_return_value rndusr-torf-cli-688c8c0/tests/test_verify.py000066400000000000000000000307111463253735700212110ustar00rootroot00000000000000import os import re from unittest.mock import patch import pytest import torf from torfcli import _errors as err from torfcli import _vars, run def test_torrent_unreadable(capsys, mock_content): with patch('sys.exit') as mock_exit: run([str(mock_content), '-i', 'nonexisting.torrent']) mock_exit.assert_called_once_with(err.Code.READ) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: nonexisting.torrent: No such file or directory\n' @pytest.mark.parametrize('hr_enabled', (True, False), ids=('human_readable=True', 'human_readable=False')) def test_PATH_unreadable(create_torrent, human_readable, hr_enabled, capsys, clear_ansi, regex, assert_no_ctrl): with create_torrent() as torrent_file: with human_readable(hr_enabled): with patch('sys.exit') as mock_exit: run(['path/to/nothing', '-i', torrent_file]) mock_exit.assert_called_once_with(err.Code.VERIFY) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: path/to/nothing does not satisfy {torrent_file}\n' if hr_enabled: assert clear_ansi(cap.out) == regex(r'^\s*Error path/to/nothing: Not a directory$', flags=re.MULTILINE) else: assert_no_ctrl(cap.out) assert cap.out == regex(r'^Error\tpath/to/nothing: Not a directory$', flags=re.MULTILINE) @pytest.mark.parametrize('hr_enabled', (True, False), ids=('human_readable=True', 'human_readable=False')) def test_singlefile_torrent__path_is_dir(tmp_path, create_torrent, human_readable, hr_enabled, capsys, clear_ansi, assert_no_ctrl, regex): content_path = tmp_path / 'content' content_path.write_bytes(b'some data') assert os.path.isfile(content_path) is True with create_torrent(path=content_path) as torrent_file: os.remove(content_path) content_path.mkdir() (content_path / 'some.file').write_bytes(b'some data') assert os.path.isfile(content_path) is False with human_readable(hr_enabled): with patch('sys.exit') as mock_exit: run([str(content_path), '-i', torrent_file]) mock_exit.assert_called_once_with(err.Code.VERIFY) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: {content_path} does not satisfy {torrent_file}\n' if hr_enabled: assert clear_ansi(cap.out) == regex(rf'^\s*Error {content_path}: Is a directory$', flags=re.MULTILINE) else: assert_no_ctrl(cap.out) assert cap.out == regex(rf'^Error\t{content_path}: Is a directory$', flags=re.MULTILINE) @pytest.mark.parametrize('hr_enabled', (True, False), ids=('human_readable=True', 'human_readable=False')) def test_singlefile_torrent__wrong_size(tmp_path, create_torrent, human_readable, hr_enabled, capsys, clear_ansi, assert_no_ctrl, regex): content_path = tmp_path / 'file.jpg' content_path.write_text('some data') with create_torrent(path=content_path) as torrent_file: content_path.write_text('some data!!!') with human_readable(hr_enabled): with patch('sys.exit') as mock_exit: run([str(content_path), '-i', torrent_file]) mock_exit.assert_called_once_with(err.Code.VERIFY) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: {content_path} does not satisfy {torrent_file}\n' if hr_enabled: assert clear_ansi(cap.out) == regex(rf'^\s*Error {content_path}: Too big: 12 instead of 9 bytes$', flags=re.MULTILINE) else: assert_no_ctrl(cap.out) assert cap.out == regex(rf'^Error\t{content_path}: Too big: 12 instead of 9 bytes$', flags=re.MULTILINE) @pytest.mark.parametrize('hr_enabled', (True, False), ids=('human_readable=True', 'human_readable=False')) def test_singlefile_torrent__correct_size_but_corrupt(tmp_path, create_torrent, human_readable, hr_enabled, capsys, clear_ansi, assert_no_ctrl, regex): content_path = tmp_path / 'content' content_path.write_text('some data') with create_torrent(path=content_path) as torrent_file: content_path.write_text('somm date') with human_readable(hr_enabled): with patch('sys.exit') as mock_exit: run([str(content_path), '-i', torrent_file]) mock_exit.assert_called_once_with(err.Code.VERIFY) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: {content_path} does not satisfy {torrent_file}\n' if hr_enabled: assert clear_ansi(cap.out) == regex(r'^\s*Error Corruption in piece 1$', flags=re.MULTILINE) else: assert_no_ctrl(cap.out) assert cap.out == regex(r'^Error\tCorruption in piece 1$', flags=re.MULTILINE) @pytest.mark.parametrize('hr_enabled', (True, False), ids=('human_readable=True', 'human_readable=False')) def test_multifile_torrent__path_is_file(tmp_path, create_torrent, human_readable, hr_enabled, capsys, clear_ansi, assert_no_ctrl, regex): content_path = tmp_path / 'content' content_path.mkdir() file1 = content_path / 'file1.jpg' file1.write_text('some data') assert os.path.isdir(content_path) is True with create_torrent(path=content_path) as torrent_file: os.remove(file1) os.rmdir(content_path) content_path.write_text('some data') assert os.path.isdir(content_path) is False with human_readable(hr_enabled): with patch('sys.exit') as mock_exit: run([str(content_path), '-i', torrent_file]) mock_exit.assert_called_once_with(err.Code.VERIFY) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: {content_path} does not satisfy {torrent_file}\n' if hr_enabled: assert clear_ansi(cap.out) == regex(rf'^\s*Error {content_path}: Not a directory$', flags=re.MULTILINE) else: assert_no_ctrl(cap.out) assert cap.out == regex(rf'^Error\t{content_path}: Not a directory$', flags=re.MULTILINE) @pytest.mark.parametrize('hr_enabled', (True, False), ids=('human_readable=True', 'human_readable=False')) def test_multifile_torrent__missing_file(tmp_path, create_torrent, human_readable, hr_enabled, capsys, clear_ansi, assert_no_ctrl, regex): content_path = tmp_path / 'content' content_path.mkdir() file1 = content_path / 'file1.jpg' file1.write_text('some data') file2 = content_path / 'file2.jpg' file2.write_text('some other data') with create_torrent(path=content_path) as torrent_file: os.remove(file1) with human_readable(hr_enabled): with patch('sys.exit') as mock_exit: run([str(content_path), '-i', torrent_file]) mock_exit.assert_called_once_with(err.Code.VERIFY) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: {content_path} does not satisfy {torrent_file}\n' if hr_enabled: assert clear_ansi(cap.out) == regex(rf'^\s*Error {file1}: No such file or directory$', flags=re.MULTILINE) else: assert_no_ctrl(cap.out) assert cap.out == regex(rf'^Error\t{file1}: No such file or directory$', flags=re.MULTILINE) @pytest.mark.parametrize('hr_enabled', (True, False), ids=('human_readable=True', 'human_readable=False')) def test_multifile_torrent__wrong_size(tmp_path, create_torrent, human_readable, hr_enabled, capsys, clear_ansi, assert_no_ctrl, regex): content_path = tmp_path / 'content' content_path.mkdir() file1 = content_path / 'file1.jpg' file1.write_text('some data') file2 = content_path / 'file2.jpg' file2.write_text('some other data') file2_size = os.path.getsize(file2) with create_torrent(path=content_path) as torrent_file: file2.write_text('some more other data') assert os.path.getsize(file2) != file2_size with human_readable(hr_enabled): with patch('sys.exit') as mock_exit: run([str(content_path), '-i', torrent_file]) mock_exit.assert_called_once_with(err.Code.VERIFY) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: {content_path} does not satisfy {torrent_file}\n' if hr_enabled: assert clear_ansi(cap.out) == regex(rf'^\s*Error {file2}: Too big: 20 instead of 15 bytes$', flags=re.MULTILINE) else: assert_no_ctrl(cap.out) assert cap.out == regex(rf'^Error\t{file2}: Too big: 20 instead of 15 bytes$', flags=re.MULTILINE) @pytest.mark.parametrize('hr_enabled', (True, False), ids=('human_readable=True', 'human_readable=False')) def test_multifile_torrent__correct_size_but_corrupt(tmp_path, create_torrent, human_readable, hr_enabled, capsys, clear_ansi, assert_no_ctrl, regex): content_path = tmp_path / 'content' content_path.mkdir() file1 = content_path / 'file1.jpg' file1_data = bytearray(b'\x00' * int(1e6)) file1.write_bytes(file1_data) file1_size = os.path.getsize(file1) file2 = content_path / 'file2.jpg' file2.write_text('some other data') with create_torrent(path=content_path) as torrent_file: file1_data[int(1e6 / 2)] = (file1_data[int(1e6 / 2)] + 1) % 256 file1.write_bytes(file1_data) assert os.path.getsize(file1) == file1_size with human_readable(hr_enabled): with patch('sys.exit') as mock_exit: run([str(content_path), '-i', torrent_file]) mock_exit.assert_called_once_with(err.Code.VERIFY) cap = capsys.readouterr() assert cap.err == f'{_vars.__appname__}: {content_path} does not satisfy {torrent_file}\n' if hr_enabled: assert clear_ansi(cap.out) == regex(rf'^\s*Error Corruption in piece 31 in {file1}$', flags=re.MULTILINE) else: assert_no_ctrl(cap.out) assert cap.out == regex((rf'^Error\tCorruption in piece 31 in {file1}$'), flags=re.MULTILINE) @pytest.mark.parametrize('hr_enabled', (True, False), ids=('human_readable=True', 'human_readable=False')) def test_success(tmp_path, create_torrent, human_readable, hr_enabled, capsys, clear_ansi, assert_no_ctrl, regex): content_path = tmp_path / 'content' content_path.write_text('some data') with create_torrent(path=content_path) as torrent_file: with human_readable(hr_enabled): run([str(content_path), '-i', torrent_file]) cap = capsys.readouterr() if hr_enabled: assert clear_ansi(cap.out) == regex(r'^\s*Progress 100.00 % \| \d+:\d+:\d+ total \| \s*\d+\.\d+ [KMGT]iB/s$', flags=re.MULTILINE) else: assert_no_ctrl(cap.out) assert cap.out == regex(rf'^Progress\t100\.000\t\d+\t\d+\t\d+\t\d+\t\d+\t{content_path}$', flags=re.MULTILINE) def test_metainfo_with_magnet_uri(capsys, tmp_path, regex): magnet = ('magnet:?xt=urn:btih:e167b1fbb42ea72f051f4f50432703308efb8fd1&dn=My+Torrent&xl=142631' '&tr=https%3A%2F%2Flocalhost%3A123%2Fannounce&&tr=https%3A%2F%2Flocalhost%3A456%2Fannounce') filepath = tmp_path / 'My Torrent' filepath.write_text('something') with patch('sys.exit') as mock_exit: run(['-i', magnet, str(filepath)]) mock_exit.assert_called_once_with(err.Code.READ) cap = capsys.readouterr() assert cap.err == regex(rf'^{_vars.__appname__}: https://localhost:123/file\?info_hash=' r'%E1g%B1%FB%B4\.%A7/%05%1FOPC%27%030%8E%FB%8F%D1: [\w\s]+\n' rf'{_vars.__appname__}: https://localhost:456/file\?info_hash=' r'%E1g%B1%FB%B4\.%A7/%05%1FOPC%27%030%8E%FB%8F%D1: [\w\s]+\n' rf"{_vars.__appname__}: Invalid metainfo: Missing 'piece length' in \['info'\]\n$") def test_PATH_argument_with_trailing_slash(capsys, create_torrent): with create_torrent() as torrent_file: torrent_name = torf.Torrent.read(torrent_file).name with patch('torf.Torrent.verify') as mock_verify: run(['-i', torrent_file, 'some/path']) assert mock_verify.call_args_list[0][0][0] == 'some/path' with patch('torf.Torrent.verify') as mock_verify: run(['-i', torrent_file, 'some/path/']) assert mock_verify.call_args_list[0][0][0] == f'some/path/{torrent_name}' rndusr-torf-cli-688c8c0/torfcli/000077500000000000000000000000001463253735700165725ustar00rootroot00000000000000rndusr-torf-cli-688c8c0/torfcli/__init__.py000066400000000000000000000023241463253735700207040ustar00rootroot00000000000000# This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details # http://www.gnu.org/licenses/gpl-3.0.txt import sys def run(args=sys.argv[1:]): from . import _config, _errors, _main, _ui # Only parse --json, --human and --nohuman so UI can report errors. ui = _ui.UI(_config.parse_early_args(args)) # Parse the rest of the args; report any errors as specified by early args. torrent = None try: ui.cfg = _config.get_cfg(args) except (_errors.CliError, _errors.ConfigError) as e: ui.error(e) else: try: torrent = _main.run(ui) except _errors.Error as e: ui.error(e) except KeyboardInterrupt: ui.error(_errors.Error('Aborted', code=_errors.Code.ABORTED)) finally: ui.terminate(torrent) rndusr-torf-cli-688c8c0/torfcli/__main__.py000066400000000000000000000012061463253735700206630ustar00rootroot00000000000000# This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details # http://www.gnu.org/licenses/gpl-3.0.txt # This file is evaluated when the torfcli module is loaded with # `python -m torfcli`. from . import run run() rndusr-torf-cli-688c8c0/torfcli/_config.py000066400000000000000000000343341463253735700205570ustar00rootroot00000000000000# This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details # http://www.gnu.org/licenses/gpl-3.0.txt import argparse import itertools import json import os import re import torf from xdg import BaseDirectory from . import _errors, _utils, _vars DEFAULT_CONFIG_FILE = os.path.join(BaseDirectory.xdg_config_home, _vars.__appname__, 'config') DEFAULT_CREATOR = f'{_vars.__appname__} {_vars.__version__}' VERSION_TEXT = f'{_vars.__appname__} {_vars.__version__} <{_vars.__url__}>' HELP_TEXT = f""" {_vars.__appname__} - {_vars.__description__} USAGE {_vars.__appname__} PATH [OPTIONS] [-o TORRENT] # Create torrent {_vars.__appname__} -i INPUT # Display torrent {_vars.__appname__} -i INPUT [OPTIONS] -o TORRENT # Edit torrent {_vars.__appname__} -i TORRENT PATH # Verify file content ARGUMENTS PATH Path to torrent's content file or directory --in, -i INPUT Read metainfo from torrent file or magnet URI --out, -o TORRENT Write metainfo to TORRENT (default: NAME.torrent) --reuse, -r REUSE Copy pieces from existing torrent file if possible --noreuse, -R Ignore any --reuse paths FILES SELECTION --exclude, -e PATTERN Exclude files that match this glob pattern (e.g. "*.txt") --include PATTERN Include excluded files that match this glob pattern --exclude-regex, -er PATTERN Exclude files that match this regular expression (e.g. ".*\\.txt$") --include-regex, -ir PATTERN Include excluded files that match this regular expression TORRENT METADATA --name, -n NAME Torrent name (default: basename of PATH) --tracker, -t TRACKER List of comma-separated announce URLs; may be given multiple times for multiple tiers --webseed, -w WEBSEED Webseed URL; may be given multiple times --private, -p Forbid clients to use DHT and PEX --comment, -c COMMENT Comment that is stored in the torrent file --date, -d DATE Creation date as YYYY-MM-DD[ HH:MM[:SS]], 'now' or 'today' (default: 'now') --creator, -a CREATOR Name of application used to create torrent file. (default: '{DEFAULT_CREATOR}') --source, -s SOURCE Add "source" field --merge JSON Insert or remove arbitrary metainfo (see man page) --xseed, -x Randomize info hash --max-piece-size SIZE Maximum piece size in multiples of 1 MiB --notracker, -T Remove trackers from INPUT --nowebseed, -W Remove webseeds from INPUT --noprivate, -P Remove private flag from INPUT --nocomment, -C Remove comment from INPUT --nosource, -S Remove "source" field from INPUT --noxseed, -X De-randomize info hash of INPUT --nodate, -D Don't include date or remove date from INPUT --nocreator, -A Don't include creator or remove creator from INPUT --notorrent, -N Don't create torrent file --nomagnet, -M Don't create magnet URI --novalidate, -V Don't check INPUT and/or TORRENT for errors CONFIGURATION --yes, -y Answer all yes/no prompts with "yes" --config, -f FILE Read configuration from FILE (default: ~/.config/{_vars.__appname__}/config --noconfig, -F Ignore configuration file --profile, -z PROFILE Use options from PROFILE --threads THREADS Number of threads to use for hashing TEXT OUTPUT --json, -j Print output as JSON object --metainfo, -m Print torrent metainfo as JSON object --human, -u Force human-readable output --nohuman, -U Force machine-readable output --verbose, -v Increase verbosity --help, -h Show this help screen and exit --version Show version number and exit """.strip() class DictFromJSON(dict): def __new__(cls, string): try: return json.loads(string) except ValueError as e: raise argparse.ArgumentTypeError(f'Invalid JSON: {e}') class CLIParser(argparse.ArgumentParser): def error(self, msg): msg = msg[0].upper() + msg[1:] raise _errors.CliError(msg) _cliparser = CLIParser(add_help=False) _cliparser.add_argument('PATH', nargs='?') _cliparser.add_argument('--in', '-i', default='') _cliparser.add_argument('--out', '-o', default='') _cliparser.add_argument('--reuse', '-r', default=[], action='append') _cliparser.add_argument('--noreuse', '-R', action='store_true') _cliparser.add_argument('--exclude', '-e', default=[], action='append') _cliparser.add_argument('--include', default=[], action='append') _cliparser.add_argument('--exclude-regex', '-er', default=[], action='append') _cliparser.add_argument('--include-regex', '-ir', default=[], action='append') _cliparser.add_argument('--name', '-n', default='') _cliparser.add_argument('--tracker', '-t', default=[], action='append') _cliparser.add_argument('--webseed', '-w', default=[], action='append') _cliparser.add_argument('--private', '-p', action='store_true', default=None) _cliparser.add_argument('--comment', '-c') _cliparser.add_argument('--date', '-d', default='') _cliparser.add_argument('--creator', '-a', nargs='?', const=DEFAULT_CREATOR) _cliparser.add_argument('--source', '-s', default='') _cliparser.add_argument('--merge', type=DictFromJSON, action='append') _cliparser.add_argument('--xseed', '-x', action='store_true') _cliparser.add_argument('--max-piece-size', default=0, type=float) _cliparser.add_argument('--notracker', '-T', action='store_true') _cliparser.add_argument('--nowebseed', '-W', action='store_true') _cliparser.add_argument('--noprivate', '-P', action='store_true') _cliparser.add_argument('--nocomment', '-C', action='store_true') _cliparser.add_argument('--nosource', '-S', action='store_true') _cliparser.add_argument('--noxseed', '-X', action='store_true') _cliparser.add_argument('--nodate', '-D', action='store_true') _cliparser.add_argument('--nocreator', '-A', action='store_true') _cliparser.add_argument('--notorrent', '-N', action='store_true') _cliparser.add_argument('--nomagnet', '-M', action='store_true') _cliparser.add_argument('--novalidate', '-V', action='store_true') _cliparser.add_argument('--yes', '-y', action='store_true') _cliparser.add_argument('--config', '-f') _cliparser.add_argument('--noconfig', '-F', action='store_true') _cliparser.add_argument('--profile', '-z', default=[], action='append') _cliparser.add_argument('--threads', type=int, default=0) _cliparser.add_argument('--json', '-j', action='store_true') _cliparser.add_argument('--metainfo', '-m', action='store_true') _cliparser.add_argument('--human', '-u', action='store_true') _cliparser.add_argument('--nohuman', '-U', action='store_true') _cliparser.add_argument('--verbose', '-v', action='count', default=0) _cliparser.add_argument('--help', '-h', action='store_true') _cliparser.add_argument('--version', action='store_true') _cliparser.add_argument('--debug-file') def parse_early_args(args): # Parse only some arguments we need to figure out how to report errors. # Ignore all other arguments and any errors we might encounter. parser = argparse.ArgumentParser(add_help=False) parser.add_argument('--json', '-j', action='store_true') parser.add_argument('--human', '-u', action='store_true') parser.add_argument('--nohuman', '-U', action='store_true') return vars(parser.parse_known_args(args)[0]) def parse_args(args): cfg = vars(_cliparser.parse_args(args)) # Validate creation date if cfg['date']: try: cfg['date'] = _utils.parse_date(cfg['date'] or 'now') except ValueError: raise _errors.CliError(f'{cfg["date"]}: Invalid date') # Validate max piece size if cfg['max_piece_size']: cfg['max_piece_size'] = cfg['max_piece_size'] * 1048576 try: torf.Torrent( piece_size_min=131072, # 128 kiB piece_size_max=134217728, # 128 MiB ).piece_size = cfg['max_piece_size'] except torf.PieceSizeError as e: raise _errors.CliError(e) # Validate tracker URLs for tier in cfg['tracker']: for url in tier.split(','): try: torf.Torrent().trackers = url except torf.URLError as e: raise _errors.CliError(e) # Validate webseed URLs for webseed in cfg['webseed']: try: torf.Torrent().webseeds = (webseed,) except torf.URLError as e: raise _errors.CliError(e) # Validate regular expressions for regex in itertools.chain(cfg['exclude_regex'], cfg['include_regex']): try: re.compile(regex) except re.error as e: raise _errors.CliError(f'Invalid regular expression: {regex}: ' f'{str(e)[0].upper()}{str(e)[1:]}') cfg['validate'] = not cfg['novalidate'] return cfg def get_cfg(cliargs): """Combine values from CLI, config file, profiles and defaults""" clicfg = parse_args(cliargs) if clicfg['debug_file']: import logging logging.basicConfig(level=logging.DEBUG, format='%(asctime)s %(message)s', filename=clicfg['debug_file']) # If we don't need to read a config file, return parsed CLI arguments cfgfile = clicfg['config'] or DEFAULT_CONFIG_FILE if clicfg['noconfig'] or (not clicfg['config'] and not os.path.exists(cfgfile)): return clicfg # Read config file filecfg = _readfile(cfgfile) # Check for illegal arguments _check_illegal_configfile_arguments(filecfg, cfgfile) for cfg in filecfg.values(): if isinstance(cfg, dict): _check_illegal_configfile_arguments(cfg, cfgfile) # Parse combined arguments from config file and CLI to allow --profile in # CLI and config file try: cfg = parse_args(_cfg2args(filecfg) + cliargs) except _errors.CliError as e: raise _errors.ConfigError(f'{cfgfile}: {e}') # Apply profiles specified in config file or on CLI def apply_profile(profname): prof = filecfg.get(profname) if prof is None: raise _errors.ConfigError(f'{cfgfile}: No such profile: {profname}') else: profargs.extend(_cfg2args(prof)) profargs = [] for profname in cfg['profile']: apply_profile(profname) # Combine arguments from profiles with arguments from global config and CLI args = _cfg2args(filecfg) + profargs + cliargs try: return parse_args(args) except _errors.CliError as e: raise _errors.ConfigError(f'{cfgfile}: {e}') def _check_illegal_configfile_arguments(cfg, cfgfile): for arg in ('in', 'name', 'out', 'config', 'noconfig', 'profile', 'help', 'version'): if arg in cfg: raise _errors.ConfigError(f'{cfgfile}: Not allowed in config file: {arg}') _re_bool = re.compile(r'^(\S+)$') _re_assign = re.compile(r'^(\S+)\s*=\s*(.*)\s*$') def _readfile(filepath): """Read INI-style file into dictionary""" # Catch any errors from the OS try: with open(filepath, 'r') as f: lines = tuple(line.strip() for line in f.readlines()) except OSError as e: raise _errors.ConfigError(f'{filepath}: {os.strerror(e.errno)}') # Parse lines cfg = subcfg = {} for line in lines: # Skip empty lines and comments if not line or line[0] == '#': continue # Start new profile if line[0] == '[' and line[-1] == ']': profile_name = line[1:-1] cfg[profile_name] = subcfg = {} continue # Boolean option bool_match = _re_bool.match(line) if bool_match: name = bool_match.group(1) subcfg[name] = True continue # String option assign_match = _re_assign.match(line) if assign_match: name = assign_match.group(1) value = assign_match.group(2).strip() # Strip off optional quotes if value: if value[0] == value[-1] == '"' or value[0] == value[-1] == "'": value = value[1:-1] value = _resolve_envvars(value) # Multiple occurences of the same name turn its value into a list if name in subcfg: if not isinstance(subcfg[name], list): subcfg[name] = [subcfg[name]] subcfg[name].append(value) else: subcfg[name] = value continue return cfg def _resolve_envvars(string): def resolve(m): # The string of \ chars is halfed because every \ escapes the next \. esc_count = len(m.group(1)) esc_str = int(esc_count / 2) * '\\' varname = m.group(2) or m.group(3) value = os.environ.get(varname, '$' + varname) # Uneven number of \ means $varname is escaped, even number of \ means # it is not. if esc_count and esc_count % 2 != 0: return f'{esc_str}${varname}' else: return f'{esc_str}{value}' regex = re.compile(r'(\\*)\$(?:(\w+)|\{(\w+)\})') return regex.sub(resolve, string) def _cfg2args(cfg): args = [] for name,value in cfg.items(): option = '--' + name # Option with parameter if isinstance(value, str): args.extend((option, value)) # Switch without parameter elif isinstance(value, (bool, type(None))): args.append(option) # Option that can occur multiple times elif isinstance(value, list): for item in value: args.extend((option, item)) return args rndusr-torf-cli-688c8c0/torfcli/_errors.py000066400000000000000000000067151463253735700206300ustar00rootroot00000000000000# This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details # http://www.gnu.org/licenses/gpl-3.0.txt from collections import defaultdict from enum import IntEnum import torf class Code(IntEnum): GENERIC = 1 CLI = 2 CONFIG = 3 READ = 4 WRITE = 5 VERIFY = 6 ABORTED = 128 class Error(Exception): """ Automatically return the appropriate subclass instance based on error code or passed message. >>> Error('foo', code=Code.READ) ReadError('foo') >>> Error(torf.ReadError(errno.ENOENT, 'foo')) ReadError('foo: No such file or directory') """ _subclsmap = defaultdict( lambda: Code.GENERIC, # torf.URLError and torf.PieceSizeError are handled in _config.py {torf.ReadError : Code.READ, torf.PathError : Code.READ, torf.BdecodeError : Code.READ, torf.MetainfoError : Code.READ, torf.MagnetError : Code.READ, torf.WriteError : Code.WRITE, torf.VerifyNotDirectoryError : Code.VERIFY, torf.VerifyIsDirectoryError : Code.VERIFY, torf.VerifyFileSizeError : Code.VERIFY, torf.VerifyContentError : Code.VERIFY}) @classmethod def _get_exception_cls(cls, msg, code): if code is None: # If `msg` is a torf.*Error, translate it into an error code code = cls._subclsmap[type(msg)] assert code in Code, f'Not an error code: {code}' # Translate error code name to exception class cls_name = code.name.capitalize() + 'Error' try: return globals()[cls_name] except KeyError: return None def __new__(cls, msg='Unspecified error', code=None, **kwargs): subcls = cls._get_exception_cls(msg, code) if subcls is not None: self = super(Error, cls).__new__(subcls) else: self = super().__new__(cls) return self def __init__(self, msg=None, code=None): msg = msg or 'Unspecified error' self._exit_code = code or self._subclsmap[type(self)] super().__init__(str(msg)) @property def exit_code(self): return self._exit_code class CliError(Error): def __init__(self, msg, code=None): super().__init__(msg, code=Code.CLI) class ConfigError(Error): def __init__(self, msg, code=None): super().__init__(msg, code=Code.CONFIG) class ReadError(Error): def __init__(self, msg, code=None): super().__init__(msg, code=Code.READ) class WriteError(Error): def __init__(self, msg, code=None): super().__init__(msg, code=Code.WRITE) class VerifyError(Error): def __init__(self, content=None, code=None, torrent=None): if torrent is None: # Content is a complete message super().__init__(content, code=Code.VERIFY) else: # Content is a path super().__init__(f'{content} does not satisfy {torrent}', code=Code.VERIFY) rndusr-torf-cli-688c8c0/torfcli/_main.py000066400000000000000000000232561463253735700202370ustar00rootroot00000000000000# This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details # http://www.gnu.org/licenses/gpl-3.0.txt import datetime import os.path import torf from . import _config, _errors, _utils, _vars # Seconds between progress updates PROGRESS_INTERVAL = 0.5 def run(ui): cfg = ui.cfg if cfg['help']: print(_config.HELP_TEXT) elif cfg['version']: print(_config.VERSION_TEXT) else: # Figure out our modus operandi if cfg['PATH'] and not cfg['in']: return _create_mode(ui, cfg) elif cfg['in'] and ( # Create new torrent file cfg['out'] # Create new magnet URI or cfg['name'] or cfg['tracker'] or cfg['webseed'] or cfg['notracker'] or cfg['nowebseed'] ): return _edit_mode(ui, cfg) elif not cfg['PATH'] and not cfg['out'] and cfg['in']: return _info_mode(ui, cfg) elif cfg['PATH'] and not cfg['out'] and cfg['in']: return _verify_mode(ui, cfg) else: raise _errors.CliError(f'Not sure what to do (see USAGE in `{_vars.__appname__} -h`)') def _info_mode(ui, cfg): torrent = _utils.get_torrent(cfg, ui) ui.show_torrent(torrent) if not cfg['nomagnet']: try: ui.info('Magnet', torrent.magnet()) except torf.TorfError as e: if cfg['validate']: raise _errors.Error(e) else: ui.warn(_errors.Error(e)) return torrent def _create_mode(ui, cfg): trackers = [tier.split(',') for tier in cfg['tracker']] try: torrent = torf.Torrent( path=cfg['PATH'], name=cfg['name'] or None, exclude_globs=cfg['exclude'], exclude_regexs=cfg['exclude_regex'], include_globs=cfg['include'], include_regexs=cfg['include_regex'], piece_size_max=cfg['max_piece_size'] if cfg['max_piece_size'] else None, trackers=() if cfg['notracker'] else trackers, webseeds=() if cfg['nowebseed'] else cfg['webseed'], private=False if cfg['noprivate'] else cfg['private'], source=None if cfg['nosource'] or not cfg['source'] else cfg['source'], randomize_infohash=False if cfg['noxseed'] else cfg['xseed'], comment=None if cfg['nocomment'] else cfg['comment'], created_by=None if cfg['nocreator'] else (cfg['creator'] or _config.DEFAULT_CREATOR), ) except torf.TorfError as e: raise _errors.Error(e) if cfg['nodate']: torrent.creation_date = None elif cfg['date']: torrent.creation_date = cfg['date'] else: torrent.creation_date = datetime.datetime.now() # Apply custom JSON objects from --merge _customize_torrent(torrent, cfg) ui.check_output_file_exists(_utils.get_torrent_filepath(torrent, cfg)) ui.show_torrent(torrent) _hash_pieces( ui=ui, torrent=torrent, reuse_paths=cfg['reuse'] if not cfg['noreuse'] else (), threads=cfg['threads'], ) _write_torrent(ui, torrent, cfg) return torrent def _edit_mode(ui, cfg): torrent = _utils.get_torrent(cfg, ui) # Make sure we can write before we start editing ui.check_output_file_exists(_utils.get_torrent_filepath(torrent, cfg)) # Make changes according to CLI args def set_or_remove(arg_name, attr_name): if cfg.get('no' + arg_name): setattr(torrent, attr_name, None) elif cfg[arg_name]: try: setattr(torrent, attr_name, cfg[arg_name]) except torf.TorfError as e: raise _errors.Error(e) set_or_remove('comment', 'comment') set_or_remove('private', 'private') set_or_remove('source', 'source') set_or_remove('xseed', 'randomize_infohash') def list_set_or_remove(arg_name, attr_name, split_values_at=None): if cfg.get('no' + arg_name): setattr(torrent, attr_name, None) if cfg[arg_name]: old_list = getattr(torrent, attr_name) or [] if split_values_at is not None: add_list = [tier.split(split_values_at) for tier in cfg[arg_name]] else: add_list = cfg[arg_name] new_list = old_list + add_list try: setattr(torrent, attr_name, new_list) except torf.TorfError as e: raise _errors.Error(e) list_set_or_remove('tracker', 'trackers', split_values_at=',') list_set_or_remove('webseed', 'webseeds') if cfg['nocreator']: torrent.created_by = None elif cfg['creator']: torrent.created_by = cfg['creator'] if cfg['nodate']: torrent.creation_date = None elif cfg['date']: torrent.creation_date = cfg['date'] # Apply custom JSON objects from --merge _customize_torrent(torrent, cfg) if cfg['PATH']: list_set_or_remove('exclude', 'exclude_globs') list_set_or_remove('exclude_regex', 'exclude_regexs') list_set_or_remove('include', 'include_globs') list_set_or_remove('include_regex', 'include_regexs') try: torrent.path = cfg['PATH'] except torf.TorfError as e: raise _errors.Error(e) else: # Setting torrent.path overwrites torrent.name, so we must set any # custom name after setting path if cfg['name']: torrent.name = cfg['name'] ui.show_torrent(torrent) _hash_pieces(ui, torrent) else: if cfg['name']: torrent.name = cfg['name'] ui.show_torrent(torrent) _write_torrent(ui, torrent, cfg) return torrent def _verify_mode(ui, cfg): torrent = _utils.get_torrent(cfg, ui) # Append torrent's name to path if it ends with "/" path = cfg['PATH'] if path[-1] == os.path.sep: path = os.path.join(path, torrent.metainfo['info'].get('name', '')) ui.show_torrent(torrent) ui.info('Path', path) try: ui.info('Info Hash', torrent.infohash) except torf.TorfError as e: raise _errors.Error(e) with ui.StatusReporter() as sr: try: success = torrent.verify(path, callback=sr.verify_callback, interval=PROGRESS_INTERVAL) except torf.TorfError as e: raise _errors.Error(e) except KeyboardInterrupt: sr.keep_progress() raise else: sr.keep_progress_summary() if not success: raise _errors.VerifyError(content=cfg['PATH'], torrent=cfg['in']) return torrent def _hash_pieces(ui, torrent, reuse_paths=None, threads=0): with ui.StatusReporter() as sr: try: # Try reusing existing torrent and generate() if that fails success = False if reuse_paths and torrent.files: success = torrent.reuse(reuse_paths, callback=sr.reuse_callback, interval=PROGRESS_INTERVAL) if not success: sr.reset() success = torrent.generate(callback=sr.generate_callback, interval=PROGRESS_INTERVAL, threads=threads or None) except torf.TorfError as e: raise _errors.Error(e) except KeyboardInterrupt: sr.keep_progress() raise else: sr.keep_progress_summary() if success: try: ui.info('Info Hash', torrent.infohash) except torf.TorfError as e: raise _errors.Error(e) def _write_torrent(ui, torrent, cfg): _validate_torrent(ui, torrent, cfg) if not cfg['nomagnet']: try: ui.info('Magnet', torrent.magnet()) except torf.TorfError: # Error was already reported pass if not cfg['notorrent']: filepath = _utils.get_torrent_filepath(torrent, cfg) try: torrent.write(filepath, overwrite=True, validate=cfg['validate']) except torf.WriteError as e: # Errors other than WriteError should already be reported by # torrent.validate() above raise _errors.Error(e) else: ui.info('Torrent', filepath) if torrent.private and not torrent.trackers: ui.warn('Torrent is private and has no trackers') def _validate_torrent(ui, torrent, cfg): try: torrent.validate() except torf.TorfError as e: if cfg['notorrent']: # Not writing torrent file; do not fail because, # e.g., magnet URI lacks ['info'] pass elif cfg['validate']: # Croak with validation error raise _errors.Error(e) else: # Report validation error but write torrent/magnet anyway ui.warn(_errors.Error(e)) def _customize_torrent(torrent, cfg): # Apply JSON objects from --merge argument(s) if cfg['merge']: for merge in cfg['merge']: try: _utils.merge_metainfo(torrent.metainfo, merge) except ValueError as e: raise _errors.CliError(e) rndusr-torf-cli-688c8c0/torfcli/_term.py000066400000000000000000000057231463253735700202610ustar00rootroot00000000000000# This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details # http://www.gnu.org/licenses/gpl-3.0.txt import io import sys # References: # https://www.vt100.net/docs/vt100-ug/chapter3.html#DECSCNM erase_line = '\x1b[2K' erase_to_eol = '\x1b[K' reverse_on = '\x1b[7m' reverse_off = '\x1b[0m' hide_cursor = '\x1b[?25l' show_cursor = '\x1b[?25h' ensure_line_below = '\n\x1b[1A' save_cursor_pos = '\x1b7' restore_cursor_pos = '\x1b8' move_pos1 = '\r' move_up = '\x1b[1A' move_down = '\x1b[1B' move_right = '\x1b[1C' move_left = '\x1b[1D' def echo(*names): seqs = ''.join(globals()[name] for name in names) print(seqs, end='') def getch(): with raw_mode: return sys.stdin.read(1) class _raw_mode(): _orig_attrs = None def enable(self): try: import termios import tty fd = sys.stdin.fileno() self._orig_attrs = termios.tcgetattr(fd) tty.setraw(sys.stdin.fileno()) except (ImportError, io.UnsupportedOperation): pass def disable(self): try: import termios if self._orig_attrs is not None: fd = sys.stdin.fileno() termios.tcsetattr(fd, termios.TCSADRAIN, self._orig_attrs) except (ImportError, io.UnsupportedOperation): pass def __enter__(self): self.enable() def __exit__(self, _, __, ___): self.disable() raw_mode = _raw_mode() class _no_user_input(): """Disable printing of characters as they are typed and hide cursor""" def enable(self): try: import termios fd = sys.stdin.fileno() self._orig_attrs = termios.tcgetattr(fd) new = termios.tcgetattr(fd) new[3] = new[3] & ~termios.ECHO # lflags termios.tcsetattr(fd, termios.TCSADRAIN, new) echo('hide_cursor') except (ImportError, io.UnsupportedOperation): pass def disable(self): orig_attrs = getattr(self, '_orig_attrs', None) if orig_attrs is not None: try: import termios fd = sys.stdin.fileno() termios.tcsetattr(fd, termios.TCSADRAIN, orig_attrs) echo('show_cursor') except (ImportError, io.UnsupportedOperation): pass def __enter__(self): self.enable() def __exit__(self, _, __, ___): self.disable() no_user_input = _no_user_input() rndusr-torf-cli-688c8c0/torfcli/_ui.py000066400000000000000000000563711463253735700177340ustar00rootroot00000000000000# This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details # http://www.gnu.org/licenses/gpl-3.0.txt import datetime import os import shutil import sys import textwrap import time import types from collections import abc import torf from . import _errors as err from . import _term, _utils, _vars LABEL_WIDTH = 11 LABEL_SEPARATOR = ' ' STATUS_SEPARATOR = ' | ' class UI: """Universal abstraction layer to allow different UIs""" def __init__(self, cfg=None): if cfg is not None: self.cfg = cfg def _human(self): if self._cfg.get('nohuman'): return False elif self._cfg.get('human'): return True elif sys.stdout.isatty(): return True else: return False @property def cfg(self): return self._cfg @cfg.setter def cfg(self, cfg): self._cfg = cfg if cfg.get('json'): self._fmt = _JSONFormatter(cfg) elif cfg.get('metainfo'): self._fmt = _MetainfoFormatter(cfg) elif self._human(): self._fmt = _HumanFormatter(cfg) else: self._fmt = _MachineFormatter(cfg) def error(self, exc, exit=True): if self._cfg['json']: self.info('Error', exc) else: sys.stderr.write(f'{_vars.__appname__}: {exc}\n') if exit: sys.exit(getattr(exc, 'exit_code', err.Code.GENERIC)) def warn(self, msg): sys.stderr.write(f'{_vars.__appname__}: WARNING: {msg}\n') def info(self, key, value, newline=True): return self._fmt.info(key, value, newline=newline) def infos(self, pairs): return self._fmt.infos(pairs) def show_torrent(self, torrent): info = self.info if torrent.name is not None: info('Name', torrent.name) if torrent.is_ready: info('Info Hash', torrent.infohash) info('Size', self._fmt.size(torrent)) if torrent.comment: info('Comment', self._fmt.comment(torrent)) if torrent.creation_date: info('Created', self._fmt.creation_date(torrent)) if torrent.created_by: info('Created By', torrent.created_by) if torrent.source: info('Source', torrent.source) if torrent.private is not None: info('Private', self._fmt.private(torrent)) if torrent.trackers: info('Tracker' + ('s' if len(torrent.trackers) > 1 else ''), self._fmt.trackers(torrent)) if torrent.webseeds: info('Webseed' + ('s' if len(torrent.webseeds) > 1 else ''), self._fmt.webseeds(torrent)) if torrent.httpseeds: info('HTTP Seed' + ('s' if len(torrent.httpseeds) > 1 else ''), self._fmt.httpseeds(torrent)) if torrent.piece_size: info('Piece Size', self._fmt.piece_size(torrent)) if torrent.piece_size: info('Piece Count', torrent.pieces) info('File Count', len(torrent.files)) exclude_patterns = [p for p in torrent.exclude_globs] exclude_patterns.extend(r.pattern for r in torrent.exclude_regexs) if exclude_patterns: info('Exclude', exclude_patterns) include_patterns = [p for p in torrent.include_globs] include_patterns.extend(r.pattern for r in torrent.include_regexs) if include_patterns: info('Include', include_patterns) try: info('Files', self._fmt.files(torrent)) except torf.PathError as e: self.error(e, exit=False) def StatusReporter(self): if self._cfg['json'] or self._cfg['metainfo']: return _QuietStatusReporter(self) elif self._human(): return _HumanStatusReporter(self) else: return _MachineStatusReporter(self) def check_output_file_exists(self, filepath): if not self._cfg['notorrent']: if os.path.exists(filepath): if os.path.isdir(filepath): raise err.WriteError(f'{filepath}: Is a directory') elif (not self._cfg['yes'] and not self._fmt.dialog_yes_no(f'{filepath}: Overwrite file?')): raise err.WriteError(f'{filepath}: File exists') def terminate(self, torrent): fmt = getattr(self, '_fmt', None) if fmt: fmt.terminate(torrent) class _FormatterBase: def __init__(self, cfg): self._cfg = cfg def webseeds(self, torrent): return torrent.webseeds def httpseeds(self, torrent): return torrent.httpseeds def terminate(self, torrent): pass class _HumanFormatter(_FormatterBase): def private(self, torrent): return 'yes' if torrent.private else 'no' def size(self, torrent): return _utils.bytes2string(torrent.size, plain_bytes=self._cfg['verbose'] > 0) def creation_date(self, torrent): if isinstance(torrent.creation_date, datetime.datetime): return torrent.creation_date.isoformat(sep=' ', timespec='seconds') else: return torrent.creation_date def piece_size(self, torrent): return _utils.bytes2string(torrent.piece_size, plain_bytes=self._cfg['verbose'] > 0) def files(self, torrent): return _utils.make_filetree(torrent.filetree, plain_bytes=self._cfg['verbose'] > 0) def comment(self, torrent): # Split lines into paragraphs, then wrap each paragraph at max width. list_of_lines = tuple(textwrap.wrap(line, width=75) or [''] # Preserve empty lines for line in torrent.comment.splitlines()) return tuple(line for lines in list_of_lines for line in lines) def trackers(self, torrent): lines = [] if len(torrent.trackers) == 1 and len(torrent.trackers[0]) == 1: # Single tracker in single tier - don't bother displaying tiers for tier in torrent.trackers: if tier: lines.append(tier[0]) else: # Show which tier each tracker belongs to tier_label_width = len('Tier :') + len(str(len(torrent.trackers))) for i,tier in enumerate(torrent.trackers, 1): if tier: lines.append(f'Tier {i}: {tier[0]}') for tracker in tier[1:]: lines.append(' ' * tier_label_width + ' ' + tracker) return lines def info(self, key, value, newline=True): label = key.rjust(LABEL_WIDTH) # Show multiple values as indented list if not isinstance(value, str) and isinstance(value, abc.Sequence): if value: # Print one indented value per line value_parts = [f'{value[0]}'] indent = len(label) * ' ' for item in value[1:]: value_parts.append(f'{indent}{LABEL_SEPARATOR}{item}') value = f'{_term.erase_to_eol}\n'.join(value_parts) else: value = '' else: value = str(value) value += _term.erase_to_eol _term.echo('move_pos1') if newline: sys.stdout.write(f'{label}{LABEL_SEPARATOR}{value}\n') _term.echo('ensure_line_below') else: sys.stdout.write(f'{label}{LABEL_SEPARATOR}{value}') _utils.flush(sys.stdout) def infos(self, pairs): for key, value in pairs: self.info(key, value) DIALOG_YES_NO_ANSWERS = {'y': True, 'n': False, 'Y': True, 'N': False, '\x03': False, # ctrl-c '\x07': False, # ctrl-g '\x1b': False} # escape def dialog_yes_no(self, question): while True: sys.stdout.write(f'{question} [y|n] ') _utils.flush(sys.stdout) key = _term.getch() _term.echo('erase_line', 'move_pos1') answer = self.DIALOG_YES_NO_ANSWERS.get(key, None) if answer is not None: return answer class _MachineFormatter(_FormatterBase): def private(self, torrent): return 'yes' if torrent.private else 'no' def size(self, torrent): return int(torrent.size) def creation_date(self, torrent): if isinstance(torrent.creation_date, datetime.datetime): return int(torrent.creation_date.timestamp()) else: return torrent.creation_date def piece_size(self, torrent): return int(torrent.piece_size) def files(self, torrent): return '\t'.join(str(f) for f in torrent.files) def comment(self, torrent): return torrent.comment.splitlines() def trackers(self, torrent): return [url for tier in torrent.trackers for url in tier] def info(self, key, value, newline=None): # Join multiple values with a tab character if not isinstance(value, str) and isinstance(value, abc.Sequence): value = '\t'.join(str(v) for v in value) sys.stdout.write(f'{key}\t{value}\n') _utils.flush(sys.stdout) def infos(self, pairs): for key, value in pairs: self.info(key, value) def dialog_yes_no(self, *_, **__): return False class _JSONFormatter(_MachineFormatter): def __init__(self, *args, **kwargs): super().__init__(*args, **kwargs) self._info = {} def private(self, torrent): return torrent.private def files(self, torrent): return [ {'Path': str(f), 'Size': f.size} for f in torrent.files ] def info(self, key, value, newline=None): # Make sure we can JSON-encode all kinds of iterable if not isinstance(value, str) and isinstance(value, abc.Sequence): value = tuple(value) if key == 'Error': errors = self._info.get(key, []) errors.append(value) self._info[key] = errors else: self._info[key] = value def terminate(self, torrent): sys.stdout.write(_utils.json_dumps(self._info)) _utils.flush(sys.stdout) class _MetainfoFormatter(_JSONFormatter): def info(self, key, value, newline=None): pass def terminate(self, torrent): if torrent is None: mi = {} elif self._cfg['verbose'] <= 0: # Show only standard fields mi = _utils.metainfo(torrent.metainfo, all_fields=False, remove_pieces=True) elif self._cfg['verbose'] == 1: # Show all fields except for ['info']['pieces'] mi = _utils.metainfo(torrent.metainfo, all_fields=True, remove_pieces=True) elif self._cfg['verbose'] >= 2: # Show all fields mi = _utils.metainfo(torrent.metainfo, all_fields=True, remove_pieces=False) sys.stdout.write(_utils.json_dumps(mi)) _utils.flush(sys.stdout) class _StatusReporterBase(): def __init__(self, ui): self._ui = ui self.reset() def reset(self): self._start_time = time.time() self._progress = _utils.Average(samples=5) self._time_left = _utils.Average(samples=3) self._info = types.SimpleNamespace( torrent=None, filepath=None, items_done=0, items_total=0, fraction_done=0, throughput=0, time_left=datetime.timedelta(0), time_elapsed=datetime.timedelta(0), time_total=datetime.timedelta(0), eta=datetime.datetime.now() + datetime.timedelta(300), ) def __enter__(self): return self def __exit__(self, _, __, ___): pass def keep_progress_summary(self): pass def keep_progress(self): pass def generate_callback(self, torrent, filepath, pieces_done, pieces_total): self._update_progress_info_hashing(torrent, filepath, pieces_done, pieces_total) progress_lines = self._get_hashing_progress_lines(self._info) self._ui.info('Progress', progress_lines, newline=False) def reuse_callback(self, torrent, torrent_filepath, torrent_files_done, torrent_files_total, is_match, exception): if exception: if isinstance(exception, torf.MetainfoError): self._ui.info('Error', f'{torrent_filepath}: {self._format_error(exception, torrent)}') else: self._ui.info('Error', self._format_error(exception, torrent)) if is_match is True: self._ui.info('Reused', torrent_filepath, newline=False) elif is_match is None: self._ui.info('Verifying', torrent_filepath, newline=False) else: self._update_progress_info_reuse(torrent, torrent_filepath, torrent_files_done, torrent_files_total) progress_lines = self._get_reuse_progress_lines(self._info) self._ui.info('Reuse', progress_lines, newline=False) def verify_callback(self, torrent, filepath, pieces_done, pieces_total, piece_index, piece_hash, exception): if exception: self._ui.info('Error', self._format_error(exception, torrent)) self._update_progress_info_hashing(torrent, filepath, pieces_done, pieces_total) progress_lines = self._get_hashing_progress_lines(self._info) self._ui.info('Progress', progress_lines, newline=False) def _update_progress_info_common(self, torrent, filepath, items_done, items_total): info = self._info info.torrent = torrent info.filepath = filepath info.items_done = items_done info.items_total = items_total if items_total > 0: info.fraction_done = items_done / items_total else: info.fraction_done = 100.0 def _update_progress_info_hashing(self, torrent, filepath, pieces_done, pieces_total): self._update_progress_info_common(torrent, filepath, pieces_done, pieces_total) info = self._info if pieces_done < pieces_total: self._progress.add(pieces_done) # Make sure we have enough samples to make estimates if len(self._progress.values) >= 2: info.time_elapsed = datetime.timedelta(seconds=round(time.time() - self._start_time)) time_diff = self._progress.times[-1] - self._progress.times[0] pieces_diff = self._progress.values[-1] - self._progress.values[0] bytes_diff = pieces_diff * torrent.piece_size info.throughput = bytes_diff / (time_diff + 0.001) # Prevent ZeroDivisionError bytes_left = (pieces_total - pieces_done) * torrent.piece_size self._time_left.add(bytes_left / info.throughput) info.time_left = datetime.timedelta(seconds=round(self._time_left.avg)) info.time_total = info.time_elapsed + info.time_left info.eta = datetime.datetime.now() + info.time_left else: # The last piece was hashed info.time_elapsed = datetime.timedelta(seconds=round(time.time() - self._start_time)) info.time_total = info.time_elapsed info.throughput = torrent.size / (info.time_total.total_seconds() + 0.001) # Prevent ZeroDivisionError info.time_left = datetime.timedelta(seconds=0) info.eta = datetime.datetime.now() def _update_progress_info_reuse(self, torrent, filepath, files_done, files_total): self._update_progress_info_common(torrent, filepath, files_done, files_total) info = self._info self._progress.add(files_done) # Make sure we have enough samples to make estimates if len(self._progress.values) >= 2: info.time_elapsed = datetime.timedelta(seconds=round(time.time() - self._start_time)) time_diff = self._progress.times[-1] - self._progress.times[0] files_diff = self._progress.values[-1] - self._progress.values[0] info.throughput = files_diff / (time_diff + 0.001) # Prevent ZeroDivisionError def _get_hashing_progress_lines(self, info): return str(info) def _get_reuse_progress_lines(self, info): return str(info) class _HumanStatusReporter(_StatusReporterBase): def __enter__(self): _term.no_user_input.enable() _term.echo('ensure_line_below') return self def keep_progress_summary(self): # The first of the final "Progress" lines is a performance summary. # Keep the summary but erase the progress bar blow. _term.echo('erase_to_eol', 'move_down', 'erase_line', 'move_up') sys.stdout.write('\n') def keep_progress(self): # Keep progress info fully intact so we can see how far it got sys.stdout.write('\n\n') def __exit__(self, _, __, ___): _term.no_user_input.disable() def _get_status_width(self): term_width, _ = shutil.get_terminal_size() term_width = min(term_width, 76) return term_width - LABEL_WIDTH - len(LABEL_SEPARATOR) def _get_hashing_progress_lines(self, info): percent_str = f'{info.fraction_done * 100:5.2f} %' throughput_str = f'{_utils.bytes2string(info.throughput, trailing_zeros=True)}/s' if info.items_done < info.items_total: status_width = self._get_status_width() line1 = self._progress_line1(info.fraction_done, os.path.basename(info.filepath), percent_str, throughput_str, status_width) line2 = self._progress_line2(info, status_width) return ''.join((_term.save_cursor_pos, _term.erase_to_eol, _term.move_down, line2, _term.restore_cursor_pos, line1)) else: return STATUS_SEPARATOR.join((percent_str, f'{info.time_total} total', throughput_str)) def _get_reuse_progress_lines(self, info): filename = os.path.basename(info.filepath) term_width,_ = shutil.get_terminal_size() term_width = min(term_width, 76) # Available width minus label status_width = term_width - LABEL_WIDTH - len(LABEL_SEPARATOR) percent_str = f'{info.fraction_done * 100:5.2f} %' throughput_str = f'{info.throughput:4.0f} files/s' return self._progress_line1(info.fraction_done, filename, percent_str, throughput_str, status_width) def _progress_line1(self, fraction_done, filename, percent, suffix, status_width): progress_bar_width = (status_width - len(percent) - len(suffix) - 1) if progress_bar_width >= 10: progress_bar = self._progress_bar(filename, fraction_done, progress_bar_width) return ' '.join((percent, progress_bar, suffix)) elif status_width >= 23: return STATUS_SEPARATOR.join((percent, suffix)) else: return percent def _progress_bar(self, text, fraction_done, width): text_width = width - 2 if len(text) > text_width: half = int(text_width / 2) text = text[:half] + '…' + text[-(text_width - half - 1) :] elif len(text) < text_width: text += ' ' * (text_width - len(text)) pos = int(fraction_done * text_width) return ''.join(('▕', _term.reverse_on, text[:pos], _term.reverse_off, text[pos:], '▏')) def _progress_line2(self, info, status_width): items = {'elapsed' : f'{info.time_elapsed} elapsed', 'left' : f'{info.time_left} left', 'total' : f'{info.time_total} total', 'eta' : f'ETA: {"{0:%H}:{0:%M}:{0:%S}".format(info.eta)}'} priority = ('left', 'total', 'elapsed', 'eta') order = ('elapsed', 'left', 'total', 'eta') parts = [] priority = iter(priority) while True: try: name = next(priority) except StopIteration: break position = order.index(name) part = items[name] parts.insert(position, part) if len(STATUS_SEPARATOR.join(parts)) > status_width: # Always keep at least the time left to completion, even if it # looks ugly if len(parts) > 1: del parts[parts.index(part)] break line = STATUS_SEPARATOR.join(parts) if len(line) < status_width: # Remove any garbage that might still be there from previous draw # when terminal was wider return line + _term.erase_to_eol else: return line def _format_error(self, exception, torrent): if isinstance(exception, torf.VerifyContentError) and len(exception.files) > 1: lines = [f'Corruption in piece {exception.piece_index + 1}, ' f'at least one of these files is corrupt:'] for filepath in exception.files: lines.append(f' {filepath}') return lines else: return str(exception) class _MachineStatusReporter(_StatusReporterBase): def _get_hashing_progress_lines(self, info): return '\t'.join((f'{info.fraction_done * 100:.3f}', f'{round(info.time_elapsed.total_seconds())}', f'{round(info.time_left.total_seconds())}', f'{round(info.time_total.total_seconds())}', f'{round(info.eta.timestamp())}', f'{round(info.throughput)}', f'{info.filepath}')) def _get_reuse_progress_lines(self, info): return '\t'.join((f'{info.filepath}', f'{info.fraction_done * 100:.3f}', f'{info.items_done}', f'{info.items_total}')) def _format_error(self, exception, torrent): if isinstance(exception, torf.VerifyContentError) and len(exception.files) > 1: lines = [f'Corruption in piece {exception.piece_index + 1}, ' f'at least one of these files is corrupt:'] lines.extend(exception.files) return lines else: return str(exception) class _QuietStatusReporter(_MachineStatusReporter): def generate_callback(self, torrent, filepath, pieces_done, pieces_total): pass def reuse_callback(self, torrent, torrent_filepath, torrent_files_done, torrent_files_total, is_match, exception): pass def verify_callback(self, torrent, filepath, pieces_done, pieces_total, piece_index, piece_hash, exception): if exception: self._ui.info('Error', self._format_error(exception, torrent)) rndusr-torf-cli-688c8c0/torfcli/_utils.py000066400000000000000000000253221463253735700204470ustar00rootroot00000000000000# This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details # http://www.gnu.org/licenses/gpl-3.0.txt import base64 import contextlib import datetime import io import json import os import sys import time from collections import abc import torf from . import _errors def get_torrent(cfg, ui): """ Read --in parameter and return torf.Torrent instance The --in parameter may be the path to a torrent file, a magnet URI or "-". If "-", stdin is read and interpreted as the content of a torrent file or a magnet URI. """ # Create torf.Torrent instance from INPUT if not cfg['in']: raise RuntimeError('--in option not given; mode detection is probably kaput') def get_torrent_from_magnet(string, fallback_exc): # Parse magnet URI from stdin try: magnet = torf.Magnet.from_string(string) except torf.TorfError as exc: # Raise magnet parsing error if INPUT looks like magnet URI, # torrent parsing error otherwise. if string.startswith('magnet:'): raise _errors.Error(exc) else: raise _errors.Error(fallback_exc) else: # Get "info" section (files, sizes, etc) unless the user is not # interested in a complete torrent, e.g. when editing a magnet URI if not cfg['notorrent']: def callback(exc): ui.error(_errors.Error(exc), exit=False) magnet.get_info(callback=callback) torrent = magnet.torrent() torrent.created_by = None return torrent if cfg['in'] == '-' and not os.path.exists('-'): data = os.read(sys.stdin.fileno(), torf.Torrent.MAX_TORRENT_FILE_SIZE) try: # Read torrent data from stdin return torf.Torrent.read_stream(io.BytesIO(data), validate=cfg['validate']) except torf.TorfError as exc: # Parse magnet URI from stdin return get_torrent_from_magnet(data.decode('utf-8'), exc) else: try: # Read torrent data from file path return torf.Torrent.read(cfg['in'], validate=cfg['validate']) except torf.TorfError as exc: # Parse magnet URI from string return get_torrent_from_magnet(cfg['in'], exc) def get_torrent_filepath(torrent, cfg): """Return the file path of the output torrent file""" if cfg['out']: # User-given torrent file path return cfg['out'] else: filename = torrent.name profiles = cfg.get('profile', ()) if profiles: filename += '.' + '.'.join(profiles) return filename + '.torrent' def is_magnet(string): return not os.path.exists(string) and string.startswith('magnet:') class Average(): def __init__(self, samples): self.times = [] self.values = [] self.samples = samples def add(self, value): self.times.append(time.time()) self.values.append(value) while len(self.values) > self.samples: self.times.pop(0) self.values.pop(0) @property def avg(self): return sum(self.values) / len(self.values) _C_DOWN = '\u2502' # │ _C_DOWN_RIGHT = '\u251C' # ├ _C_RIGHT = '\u2500' # ─ _C_CORNER = '\u2514' # └ def make_filetree(tree, parents_is_last=(), plain_bytes=False): lines = [] items = tuple(tree.items()) max_i = len(items) - 1 for i,(name,node) in enumerate(items): is_last = i >= max_i # Assemble indentation string (`parents_is_last` being empty means # this is the top node) indent = '' if parents_is_last: # `parents_is_last` holds the `is_last` values of our ancestors. # This lets us construct the correct indentation string: For # each parent, if it has any siblings below it in the directory, # print a vertical bar ('|') that leads to the siblings. # Otherwise the indentation string for that parent is empty. # We ignore the first/top/root node because it isn't indented. for parent_is_last in parents_is_last[1:]: if parent_is_last: indent += ' ' else: indent += f'{_C_DOWN} ' # If this is the last node, use '└' to stop the line, otherwise # branch off with '├'. if is_last: indent += f'{_C_CORNER}{_C_RIGHT}' else: indent += f'{_C_DOWN_RIGHT}{_C_RIGHT}' if isinstance(node, torf.File): lines.append(f'{indent}{name} [{bytes2string(node.size, plain_bytes=plain_bytes)}]') else: lines.append(f'{indent}{name}') # Descend into child node sub_parents_is_last = parents_is_last + (is_last,) lines.extend(make_filetree(node, parents_is_last=sub_parents_is_last, plain_bytes=plain_bytes)) return lines def merge_metainfo(a, b): # Merge dictionary `b` into dictionary `a`, overwriting or adding values # from `b` and preserving existing values in `a`. if isinstance(b, abc.Mapping): for k, v in b.items(): if k in a and isinstance(a[k], abc.Mapping) and isinstance(v, abc.Mapping): merge_metainfo(a[k], v) elif k in a and v is None: del a[k] elif v is not None: a[k] = v else: raise ValueError(f'Not a JSON object: {b}') _DATE_FORMATS = ('%Y-%m-%d %H:%M:%S', '%Y-%m-%dT%H:%M:%S', '%Y-%m-%d %H:%M', '%Y-%m-%dT%H:%M', '%Y-%m-%d') def parse_date(date_str): if date_str == 'now': return datetime.datetime.now() elif date_str == 'today': return datetime.datetime.now().replace(hour=0, minute=0, second=0, microsecond=0) elif isinstance(date_str, str): for f in _DATE_FORMATS: try: return datetime.datetime.strptime(date_str, f) except ValueError: pass raise ValueError('Invalid date') _PREFIXES = ((1024**4, 'Ti'), (1024**3, 'Gi'), (1024**2, 'Mi'), (1024, 'Ki')) def bytes2string(b, plain_bytes=False, trailing_zeros=False): string = str(b) prefix = '' for minval,_prefix in _PREFIXES: if b >= minval: prefix = _prefix string = f'{b / minval:.02f}' # Remove trailing zeros after the point while not trailing_zeros and string[-1] == '0': string = string[:-1] if not trailing_zeros: if string[-1] == '.': string = string[:-1] break if plain_bytes and prefix: return f'{string} {prefix}B / {b:,} B' else: return f'{string} {prefix}B' @contextlib.contextmanager def caught_BrokenPipeError(): try: yield except BrokenPipeError: # Prevent Python interpreter from printing redundant error message # "BrokenPipeError: [Errno 32] Broken pipe" and exit with correct exit # code. # https://bugs.python.org/issue11380#msg248579 try: sys.stdout.flush() finally: try: sys.stdout.close() finally: try: sys.stderr.flush() finally: sys.stderr.close() sys.exit(0) def flush(f): with caught_BrokenPipeError(): f.flush() # torf.Torrent.metainfo stores boolean values (i.e. "private") as True/False # and JSON converts them to true/false, but bencode doesn't know booleans # and uses integers (1/0) instead. def bool2int(obj): if isinstance(obj, bool): return int(obj) elif isinstance(obj, abc.Mapping): return {k:bool2int(v) for k,v in obj.items()} elif isinstance(obj, abc.Iterable) and not isinstance(obj, (str, bytes, bytearray)): return [bool2int(item) for item in obj] else: return obj _main_fields = ('announce', 'announce-list', 'comment', 'created by', 'creation date', 'encoding', 'info', 'url-list', 'httpseed') _info_fields = ('files', 'length', 'md5sum', 'name', 'piece length', 'private') _files_fields = ('length', 'path', 'md5sum') def metainfo(dct, all_fields=False, remove_pieces=True): """ Return user-friendly copy of metainfo `dct` all_fields: Whether to remove any non-standard entries in `dct` remove_pieces: Whether to remove ['info']['pieces'] """ def copy(obj, only=(), exclude=()): if isinstance(obj, abc.Mapping): cp = type(obj)() for k,v in obj.items(): if k not in exclude and (not only or k in only): cp[k] = copy(v) return cp elif isinstance(obj, abc.Iterable) and not isinstance(obj, (str, bytes, bytearray)): return [copy(v) for v in obj] else: return obj new = copy(dct) if remove_pieces: if 'pieces' in new.get('info', {}): del new['info']['pieces'] if not all_fields: # Remove non-standard top-level fields for k in tuple(new): if k not in _main_fields: del new[k] # Remove non-standard fields in "info" or "info" itself if non-dict if 'info' in new: if not isinstance(new['info'], dict): del new['info'] else: for k in tuple(new['info']): if k not in _info_fields: del new['info'][k] if 'files' in new['info'] and isinstance(new['info']['files'], list): for file in new['info']['files']: for k in tuple(file): if k not in _files_fields: del file[k] if 'info' in new and not new['info']: del new['info'] return bool2int(new) def json_dumps(obj): def default(obj): if isinstance(obj, datetime.datetime): return int(obj.timestamp()) elif isinstance(obj, (bytes, bytearray)): return base64.standard_b64encode(obj).decode() else: return str(obj) return json.dumps(obj, allow_nan=False, indent=4, default=default) + '\n' rndusr-torf-cli-688c8c0/torfcli/_vars.py000066400000000000000000000013031463253735700202530ustar00rootroot00000000000000# This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details # http://www.gnu.org/licenses/gpl-3.0.txt __version__ = '5.2.1' __appname__ = 'torf' __url__ = 'https://github.com/rndusr/torf-cli' __description__ = 'CLI tool to create, read and edit torrent files' rndusr-torf-cli-688c8c0/tox.ini000066400000000000000000000004031463253735700164400ustar00rootroot00000000000000[tox] envlist = py313, py312, py311, py310, py39, py38, lint [testenv] deps = pytest ../torf commands = pytest {posargs} [testenv:lint] deps = ruff flake8 isort commands = ruff check . flake8 torfcli tests isort --check-only torfcli tests