././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5005574 flit-3.12.0/.bumpversion.cfg0000644000000000000000000000045014770461472012604 0ustar00[bumpversion] current_version = 3.12.0 commit = True tag = False [bumpversion:file:pyproject.toml] search = flit_core >={current_version} replace = flit_core >={new_version} [bumpversion:file:flit/__init__.py] [bumpversion:file:flit_core/flit_core/__init__.py] [bumpversion:file:doc/conf.py] ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5005574 flit-3.12.0/.coveragerc0000644000000000000000000000006014770461472011612 0ustar00[run] omit = */tests/* */flit_core/vendor/* ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5005574 flit-3.12.0/.github/dependabot.yml0000644000000000000000000000016514770461472013667 0ustar00version: 2 updates: - package-ecosystem: "github-actions" directory: "/" schedule: interval: "daily" ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5005574 flit-3.12.0/.github/workflows/test.yml0000644000000000000000000000615614770461472014604 0ustar00name: Test on: push: branches: - main tags: - "*" pull_request: concurrency: group: >- ${{ github.workflow }}- ${{ github.ref_type }}- ${{ github.event.pull_request.number || github.sha }} cancel-in-progress: true jobs: test: runs-on: ${{ matrix.platform }} strategy: matrix: platform: - "ubuntu-latest" - "windows-latest" python-version: - "3.8" - "3.9" - "3.10" - "3.11" - "3.12" - "3.13" steps: - uses: actions/checkout@v4 - name: Setup Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | python -m pip install --upgrade pip pip install tox tox-gh-actions codecov - name: Run tests run: tox - name: Codecov upload env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} run: codecov test-py37: runs-on: "ubuntu-22.04" strategy: matrix: python-version: [ "3.7", ] steps: - uses: actions/checkout@v4 - name: Setup Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | python -m pip install --upgrade pip pip install tox tox-gh-actions codecov - name: Run tests run: tox - name: Codecov upload env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} run: codecov test-py36: runs-on: "ubuntu-20.04" strategy: matrix: python-version: [ "3.6", ] steps: - uses: actions/checkout@v4 - name: Setup Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Install dependencies run: | python -m pip install --upgrade pip pip install tox tox-gh-actions codecov - name: Run tests run: tox - name: Codecov upload env: CODECOV_TOKEN: ${{ secrets.CODECOV_TOKEN }} run: codecov packages: runs-on: ubuntu-latest needs: [test, test-py36] permissions: id-token: write # OIDC for uploading to PyPI steps: - name: Checkout uses: actions/checkout@v4 - name: Set up Python 3.12 uses: actions/setup-python@v5 with: python-version: "3.12" - name: Build flit_core & flit packages run: | python flit_core/build_dists.py pip install requests docutils PYTHONPATH=flit_core/ python -m flit build # Copy flit_core packages to same location cp flit_core/dist/* dist/ - uses: actions/upload-artifact@v4 with: name: packages path: ./dist/* - name: Publish package distributions to PyPI uses: pypa/gh-action-pypi-publish@release/v1 if: ${{ startsWith(github.ref, 'refs/tags/') }} ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5005574 flit-3.12.0/.gitignore0000644000000000000000000000031714770461472011466 0ustar00/build/ /dist/ /flit_core/dist/ __pycache__/ /doc/_build/ /tests/samples/build/ /tests/samples/dist/ /tests/samples/ns1-pkg/dist/ /htmlcov/ /.coverage /.pytest_cache /.tox .idea/ venv/ *.pyc .python-version ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5005574 flit-3.12.0/.pre-commit-config.yaml0000644000000000000000000000023114770461472013752 0ustar00repos: - repo: https://github.com/pre-commit/pre-commit-hooks rev: v5.0.0 hooks: - id: trailing-whitespace - id: end-of-file-fixer ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5005574 flit-3.12.0/.readthedocs.yml0000644000000000000000000000024614770461472012565 0ustar00version: 2 build: os: ubuntu-22.04 tools: python: "3.11" sphinx: configuration: doc/conf.py python: install: - requirements: doc/requirements.txt ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5005574 flit-3.12.0/LICENSE0000644000000000000000000000276514770461472010514 0ustar00Copyright (c) 2015, Thomas Kluyver and contributors All rights reserved. BSD 3-clause license: Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5005574 flit-3.12.0/README.rst0000644000000000000000000000500514770461472011164 0ustar00**Flit** is a simple way to put Python packages and modules on PyPI. It tries to require less thought about packaging and help you avoid common mistakes. See `Why use Flit? `_ for more about how it compares to other Python packaging tools. Install ------- :: $ python3 -m pip install flit Flit requires Python 3 and therefore needs to be installed using the Python 3 version of pip. Python 2 modules can be distributed using Flit, but need to be importable on Python 3 without errors. Usage ----- Say you're writing a module ``foobar`` — either as a single file ``foobar.py``, or as a directory — and you want to distribute it. 1. Make sure that foobar's docstring starts with a one-line summary of what the module is, and that it has a ``__version__``: .. code-block:: python """An amazing sample package!""" __version__ = "0.1" 2. Install flit if you don't already have it:: python3 -m pip install flit 3. Run ``flit init`` in the directory containing the module to create a ``pyproject.toml`` file. It will look something like this: .. code-block:: ini [build-system] requires = ["flit_core >=3.2,<4"] build-backend = "flit_core.buildapi" [project] name = "foobar" authors = [{name = "Sir Robin", email = "robin@camelot.uk"}] dynamic = ["version", "description"] [project.urls] Home = "https://github.com/sirrobin/foobar" You can edit this file to add other metadata, for example to set up command line scripts. See the `pyproject.toml page `_ of the documentation. If you have already got a ``flit.ini`` file to use with older versions of Flit, convert it to ``pyproject.toml`` by running ``python3 -m flit.tomlify``. 4. Run this command to upload your code to PyPI:: flit publish Once your package is published, people can install it using *pip* just like any other package. In most cases, pip will download a 'wheel' package, a standard format it knows how to install. If you specifically ask pip to install an 'sdist' package, it will install and use Flit in a temporary environment. To install a package locally for development, run:: flit install [--symlink] [--python path/to/python] Flit packages a single importable module or package at a time, using the import name as the name on PyPI. All subpackages and data files within a package are included automatically. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5005574 flit-3.12.0/SECURITY.md0000644000000000000000000000107414770461472011270 0ustar00# Security Policy ## Supported Versions Only the latest non-prerelease version is supported. ## Security contact information To report a security vulnerability ### Directly on GitHub You can also directly propose a GitHub security advisory on the Flit Security page of github: [https://github.com/pypa/flit/security](https://github.com/pypa/flit/security) ### via Tidelift: You can use the [Tidelift security contact](https://tidelift.com/security). Tidelift will coordinate the fix and disclosure. If you are a tidelift subscriber, this is the preferred path ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5005574 flit-3.12.0/bootstrap_dev.py0000644000000000000000000000202414770461472012720 0ustar00#!/usr/bin/env python3 # Symlink install flit & flit_core for development. # Most projects can do the same with 'flit install --symlink'. # But that doesn't work until Flit is installed, so we need some bootstrapping. import argparse import logging import os from pathlib import Path import sys my_dir = Path(__file__).parent os.chdir(str(my_dir)) sys.path.insert(0, 'flit_core') from flit.install import Installer ap = argparse.ArgumentParser() ap.add_argument('--user') args = ap.parse_args() logging.basicConfig(level=logging.INFO) install_kwargs = {'symlink': True} if os.name == 'nt': # Use .pth files instead of symlinking on Windows install_kwargs = {'symlink': False, 'pth': True} # Install flit_core Installer.from_ini_path( my_dir / 'flit_core' / 'pyproject.toml', user=args.user, **install_kwargs ).install() print("Linked flit_core into site-packages.") # Install flit Installer.from_ini_path( my_dir / 'pyproject.toml', user=args.user, **install_kwargs ).install() print("Linked flit into site-packages.") ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5005574 flit-3.12.0/codecov.yml0000644000000000000000000000001514770461472011636 0ustar00comment: off ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5015574 flit-3.12.0/doc/Makefile0000644000000000000000000001514214770461472011705 0ustar00# Makefile for Sphinx documentation # # You can set these variables from the command line. SPHINXOPTS = SPHINXBUILD = sphinx-build PAPER = BUILDDIR = _build # User-friendly check for sphinx-build ifeq ($(shell which $(SPHINXBUILD) >/dev/null 2>&1; echo $$?), 1) $(error The '$(SPHINXBUILD)' command was not found. Make sure you have Sphinx installed, then set the SPHINXBUILD environment variable to point to the full path of the '$(SPHINXBUILD)' executable. Alternatively you can add the directory with the executable to your PATH. If you don't have Sphinx installed, grab it from http://sphinx-doc.org/) endif # Internal variables. PAPEROPT_a4 = -D latex_paper_size=a4 PAPEROPT_letter = -D latex_paper_size=letter ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . # the i18n builder cannot share the environment and doctrees with the others I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) . .PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext help: @echo "Please use \`make ' where is one of" @echo " html to make standalone HTML files" @echo " dirhtml to make HTML files named index.html in directories" @echo " singlehtml to make a single large HTML file" @echo " pickle to make pickle files" @echo " json to make JSON files" @echo " htmlhelp to make HTML files and a HTML help project" @echo " qthelp to make HTML files and a qthelp project" @echo " devhelp to make HTML files and a Devhelp project" @echo " epub to make an epub" @echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter" @echo " latexpdf to make LaTeX files and run them through pdflatex" @echo " latexpdfja to make LaTeX files and run them through platex/dvipdfmx" @echo " text to make text files" @echo " man to make manual pages" @echo " texinfo to make Texinfo files" @echo " info to make Texinfo files and run them through makeinfo" @echo " gettext to make PO message catalogs" @echo " changes to make an overview of all changed/added/deprecated items" @echo " xml to make Docutils-native XML files" @echo " pseudoxml to make pseudoxml-XML files for display purposes" @echo " linkcheck to check all external links for integrity" @echo " doctest to run all doctests embedded in the documentation (if enabled)" clean: rm -rf $(BUILDDIR)/* html: $(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/html." dirhtml: $(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml @echo @echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml." singlehtml: $(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml @echo @echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml." pickle: $(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle @echo @echo "Build finished; now you can process the pickle files." json: $(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json @echo @echo "Build finished; now you can process the JSON files." htmlhelp: $(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp @echo @echo "Build finished; now you can run HTML Help Workshop with the" \ ".hhp project file in $(BUILDDIR)/htmlhelp." qthelp: $(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp @echo @echo "Build finished; now you can run "qcollectiongenerator" with the" \ ".qhcp project file in $(BUILDDIR)/qthelp, like this:" @echo "# qcollectiongenerator $(BUILDDIR)/qthelp/Flit.qhcp" @echo "To view the help file:" @echo "# assistant -collectionFile $(BUILDDIR)/qthelp/Flit.qhc" devhelp: $(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp @echo @echo "Build finished." @echo "To view the help file:" @echo "# mkdir -p $$HOME/.local/share/devhelp/Flit" @echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/Flit" @echo "# devhelp" epub: $(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub @echo @echo "Build finished. The epub file is in $(BUILDDIR)/epub." latex: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo @echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex." @echo "Run \`make' in that directory to run these through (pdf)latex" \ "(use \`make latexpdf' here to do that automatically)." latexpdf: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through pdflatex..." $(MAKE) -C $(BUILDDIR)/latex all-pdf @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." latexpdfja: $(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex @echo "Running LaTeX files through platex and dvipdfmx..." $(MAKE) -C $(BUILDDIR)/latex all-pdf-ja @echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex." text: $(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text @echo @echo "Build finished. The text files are in $(BUILDDIR)/text." man: $(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man @echo @echo "Build finished. The manual pages are in $(BUILDDIR)/man." texinfo: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo @echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo." @echo "Run \`make' in that directory to run these through makeinfo" \ "(use \`make info' here to do that automatically)." info: $(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo @echo "Running Texinfo files through makeinfo..." make -C $(BUILDDIR)/texinfo info @echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo." gettext: $(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale @echo @echo "Build finished. The message catalogs are in $(BUILDDIR)/locale." changes: $(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes @echo @echo "The overview file is in $(BUILDDIR)/changes." linkcheck: $(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck @echo @echo "Link check complete; look for any errors in the above output " \ "or in $(BUILDDIR)/linkcheck/output.txt." doctest: $(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest @echo "Testing of doctests in the sources finished, look at the " \ "results in $(BUILDDIR)/doctest/output.txt." xml: $(SPHINXBUILD) -b xml $(ALLSPHINXOPTS) $(BUILDDIR)/xml @echo @echo "Build finished. The XML files are in $(BUILDDIR)/xml." pseudoxml: $(SPHINXBUILD) -b pseudoxml $(ALLSPHINXOPTS) $(BUILDDIR)/pseudoxml @echo @echo "Build finished. The pseudo-XML files are in $(BUILDDIR)/pseudoxml." ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5015574 flit-3.12.0/doc/_static/flit_logo_nobg.svg0000644000000000000000000002127514770461472015403 0ustar00 image/svg+xml ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5015574 flit-3.12.0/doc/_static/flit_logo_nobg_cropped.png0000644000000000000000000001340214770461472017075 0ustar00PNG  IHDR,a?>bKGD pHYs))"ߌtIME"KL{IDATxy|ǿ;YeU+nEQ$,5VԊT[i]ץ~\rui"Jb*- "E " ,{8 $g,gN;gf7(((((((((JHB?GX;@crXY74h(*tQ6v?// +yg)ME1b݋ ?!6[EXW.;c:F򦲽c |,.rV4R:/Mk@{`42vk**xY:c]#ہ (zfbx߅ @ajL!,Re. {ǚ+8i 'ȼ)[opoZc C qv;K2?Fli>ޅdbb[AF8ޏebWjlSo3G`%A!#ی8WPtkJc˰ `>3:5k`c2&IEqSXka?D6fM߯v7QRe B[x ǎఈs(+3eeڟxKp YenbƎ!PUu6?gbqqF0_#lS-[TBAU?&?*i&YHH,!FX2$Sҫ|s (p\BlQt)}@xY^fq4A27<ـ#mƟM%P:7Y\"HL%groBv$'MIr}τ$d~>*"}^2208 ιT*l)5gd<ƾHYoj`71LYȑ;{JeuVz#DnC gj*F92@&& dMM r/0!A|r|^䢔FUU1&%++3Wje=~{L q\eҁ45mRJCo$"ڧa:Vb)z6|$ڭ?̠uX8h'AbN, ]IKݣ'b?coR!SǸ+3]"%h؏3}DFCڈ/2 a1PޟӓH/?]sh*N% o q}1~@'" @NF<\4;dHX&W)V8SOPVŘҙy(!K?i;%=T\!2`0 ;O[+g֩MS_.WU1JAEoHƕޏ&P T} 2r%TJϧLټ!]Is!['O݂نn\ r(ȡ4Y`տ̳ 1"$6^kt+s? LFbMbz`cɮu&@&&cǓ\'BԨ4LUPzJ iH!/ؗLUJpi W=ab/Ռ; WSӭ.} D{鯰l8b$5Z&.Ę653_w&5UF U8M(}mm&{zC8y|VǯkH{#2'tK{ eҾ,귾>{Nzo/g<f%l馦ṫ3#SsPrw0Aƀd?&ix8]8K|W#@M U#pϔ)וya\ _3}op3gkePVnf 3:]fXW6y{G;~@`/rtLj.EL)R )[HҶ7O5;L[,WPv@V Sm㮉s99Lh߇zsz"IdKycaj\Yw&xH?,P@WhEw|엏 <6ʑ=C-9w% BLa;15^[=Y ‰ϟtdDٛ :W,1 XG9a͍EcRјI򕣚G{E~P~X ddY" 3EF4ol$ WbG[ѫDRؽo^UIUH6Y'g5G S{.X{jbpu/WuaMzLD6EFE"n`?~7WM*ʋ!V{βwW2rPɪcMSAGYMg&މ\QzkŴ`k(.N(ZflBcmvB5qNL=&`ZmMki?-9/0v'*,ƚlA{mߵNgzV\L{Ri#R+ϐ?6BWq-_vr:͵{8&ZrkT$UŘo#2#Di'R߄c^_ڙBVSt1rۍFȐ c,{d{!Blo1ߞksJ;3Y`}W{Y&lEV/\ϗ UV~\~;;lxd'O2qc=j6'!lbL@~>3G;d{:NwqL]F=]o!; Ə!m۱X1q};XW']|[7iutvSuD:O㮰Y5y7.Ǧ|ct 嫆a6Q*nV(zHzwa 7Ϫ9(*F,!}b|QA0 Vo{bc9jz/]1ڃWSPTmyjKD0  ShC6&y0iA image/svg+xml ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5015574 flit-3.12.0/doc/bootstrap.rst0000644000000000000000000000330714770461472013014 0ustar00Bootstrapping ============= Flit is itself packaged using Flit, as are some foundational packaging tools such as ``pep517``. So where can you start if you need to install everything from source? .. note:: For most users, ``pip`` handles all this automatically. You should only need to deal with this if you're building things entirely from scratch, such as putting Python packages into another package format. The key piece is ``flit_core``. This is a package which can build itself using nothing except Python and the standard library. From an unpacked source archive, you can make a wheel by running:: python -m flit_core.wheel And then you can install this wheel with the ``bootstrap_install.py`` script included in the sdist (or by unzipping it to the correct directory):: # Install to site-packages for this Python: python bootstrap_install.py dist/flit_core-*.whl # Install somewhere else: python bootstrap_install.py --installdir /path/to/site-packages dist/flit_core-*.whl As of version 3.6, flit_core bundles the ``tomli`` TOML parser, to avoid a dependency cycle. If you need to unbundle it, you will need to special-case installing flit_core and/or tomli to get around that cycle. After ``flit_core``, I recommend that you get `installer `_ set up. You can use ``python -m flit_core.wheel`` again to make a wheel, and then use installer itself (from the source directory) to install it. After that, you probably want to get `build `_ and its dependencies installed as the goal of the bootstrapping phase. You can then use ``build`` to create wheels of any other Python packages, and ``installer`` to install them. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5015574 flit-3.12.0/doc/cmdline.rst0000644000000000000000000002023314770461472012407 0ustar00Flit command line interface =========================== All operations use the ``flit`` command, followed by one of a number of subcommands. Common options -------------- .. program:: flit .. option:: -f , --ini-file Path to a config file specifying the module to build. The default is ``pyproject.toml``. .. option:: --version Show the version of Flit in use. .. option:: --help Show help on the command-line interface. .. option:: --debug Show more detailed logs about what flit is doing. .. _build_cmd: ``flit build`` -------------- .. program:: flit build Build a wheel and an sdist (tarball) from the package. .. option:: --format Limit to building either ``wheel`` or ``sdist``. .. option:: --setup-py Generate a ``setup.py`` file in the sdist, so it can be installed by older versions of pip. .. option:: --no-setup-py Don't generate a setup.py file in the sdist. This is the default. An sdist built without this will only work with tools that support PEP 517, but the wheel will still be usable by any compatible tool. .. versionchanged:: 3.5 Generating ``setup.py`` disabled by default. .. option:: --use-vcs Use the files checked in to git or mercurial as the starting list to include in an sdist, and then apply inclusions and exclusions :ref:`from pyproject.toml `. This is the default for now, but we're planning to switch to ``--no-use-vcs`` as the default in a future version. .. option:: --no-use-vcs Create the sdist starting with only the files inside the installed module or package, along with any inclusions and exclusions defined in pyproject.toml. With this option, sdists from ``flit build`` are equivalent to those built by tools calling Flit as a backend, such as `build `_. .. _publish_cmd: ``flit publish`` ---------------- .. program:: flit publish Build a wheel and an sdist (tarball) from the package, and upload them to PyPI or another repository. .. option:: --format Limit to publishing either ``wheel`` or ``sdist``. You should normally publish the two formats together. .. option:: --setup-py .. option:: --no-setup-py .. option:: --use-vcs .. option:: --no-use-vcs These options affecting what goes in the sdist are described for :ref:`build_cmd` above. .. option:: --repository Name of a repository to upload packages to. Should match a section in ``~/.pypirc``. The default is ``pypi``. .. option:: --pypirc The .pypirc config file to be used. The default is ``~/.pypirc``. .. seealso:: :doc:`upload` .. _install_cmd: ``flit install`` ---------------- .. program:: flit install Install the package on your system. By default, the package is installed to the same Python environment that Flit itself is installed in; use :option:`--python` or :envvar:`FLIT_INSTALL_PYTHON` to override this. If you don't have permission to modify the environment (e.g. the system Python on Linux), Flit may do a user install instead. Use the :option:`--user` or :option:`--env` flags to force this one way or the other, rather than letting Flit guess. .. option:: -s, --symlink Symlink the module into site-packages rather than copying it, so that you can test changes without reinstalling the module. .. option:: --pth-file Create a ``.pth`` file in site-packages rather than copying the module, so you can test changes without reinstalling. This is a less elegant alternative to ``--symlink``, but it works on Windows, which typically doesn't allow symlinks. .. option:: --deps Which dependencies to install. One of ``all``, ``production``, ``develop``, or ``none``. ``all`` and ``develop`` install the extras ``test``, ``doc``, and ``dev``. Default ``all``. .. option:: --extras Which named extra features to install dependencies for. Specify ``all`` to install all optional dependencies, or a comma-separated list of extras. Default depends on ``--deps``. .. option:: --only-deps Install the dependencies of this package, but not the package itself. This can be useful for e.g. building a container image, where your own code is copied or mounted into the container at a later stage. .. versionadded:: 3.8 .. option:: --user Do a user-local installation. This is the default if flit is not in a virtualenv or conda env (if the environment's library directory is read-only and ``site.ENABLE_USER_SITE`` is true). .. option:: --env Install into the environment - the opposite of :option:`--user`. This is the default in a virtualenv or conda env (if the environment's library directory is writable or ``site.ENABLE_USER_SITE`` is false). .. option:: --python Install for another Python, identified by the path of the python executable. Using this option, you can install a module for Python 2, for instance. See :envvar:`FLIT_INSTALL_PYTHON` if this option is not given. .. versionchanged:: 2.1 Added :envvar:`FLIT_INSTALL_PYTHON` and use its value over the Python running Flit when an explicit :option:`--python` option is not given. .. note:: Flit calls pip to do the installation. You can set any of pip's options `using its environment variables `__. When you use the :option:`--symlink` or :option:`--pth-file` options, pip is used to install dependencies. Otherwise, Flit builds a wheel and then calls pip to install that. .. _init_cmd: ``flit init`` ------------- .. program:: flit init Create a new ``pyproject.toml`` config file by prompting for information about the module in the current directory. Environment variables --------------------- .. envvar:: FLIT_NO_NETWORK .. versionadded:: 0.10 Setting this to any non-empty value will stop flit from making network connections (unless you explicitly ask to upload a package). This is intended for downstream packagers, so if you use this, it's up to you to ensure any necessary dependencies are installed. .. envvar:: FLIT_ROOT_INSTALL By default, ``flit install`` will fail when run as root on POSIX systems, because installing Python modules systemwide is not recommended. Setting this to any non-empty value allows installation as root. It has no effect on Windows. .. envvar:: FLIT_USERNAME FLIT_PASSWORD FLIT_INDEX_URL .. versionadded:: 0.11 Set a username, password, and index URL for uploading packages. See :ref:`uploading packages with environment variables ` for more information. Token-based upload to PyPI is supported. To upload using a PyPI token, set ``FLIT_USERNAME`` to ``__token__``, and ``FLIT_PASSWORD`` to the token value. .. envvar:: FLIT_ALLOW_INVALID .. versionadded:: 0.13 Setting this to any non-empty value tells Flit to continue if it detects invalid metadata, instead of failing with an error. Problems will still be reported in the logs, but won't cause Flit to stop. If the metadata is invalid, uploading the package to PyPI may fail. This environment variable provides an escape hatch in case Flit incorrectly rejects your valid metadata. If you need to use it and you believe your metadata is valid, please `open an issue `__. .. envvar:: FLIT_INSTALL_PYTHON .. versionadded:: 2.1 .. program:: flit install Set a default Python interpreter for :ref:`install_cmd` to use when :option:`--python` is not specified. The value can be either an absolute path, or a command name (which will be found in ``PATH``). If this is unset or empty, the module is installed for the copy of Python that is running Flit. .. envvar:: SOURCE_DATE_EPOCH To make reproducible builds, set this to a timestamp as a number of seconds since the start of the year 1970 in UTC, and document the value you used. On Unix systems, you can get a value for the current time by running:: date +%s .. seealso:: `The SOURCE_DATE_EPOCH specification `__ ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5015574 flit-3.12.0/doc/conf.py0000644000000000000000000002016314770461472011543 0ustar00# -*- coding: utf-8 -*- # # Flit documentation build configuration file, created by # sphinx-quickstart on Sun Mar 15 19:16:41 2015. # # This file is execfile()d with the current directory set to its # containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys import os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. #sys.path.insert(0, os.path.abspath('.')) # -- General configuration ------------------------------------------------ # If your documentation needs a minimal Sphinx version, state it here. #needs_sphinx = '1.0' # Add any Sphinx extension module names here, as strings. They can be # extensions coming with Sphinx (named 'sphinx.ext.*') or your custom # ones. extensions = [ 'sphinx.ext.autodoc', 'sphinxcontrib_github_alt', 'sphinx_rtd_theme', ] github_project_url = "https://github.com/pypa/flit" # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8-sig' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Flit' copyright = u'2015, Thomas Kluyver' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '3.12.0' # The full version, including alpha/beta/rc tags. release = version #+ '.1' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. #language = None # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of patterns, relative to source directory, that match files and # directories to ignore when looking for source files. exclude_patterns = ['_build'] # The reST default role (used for this markup: `text`) to use for all # documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. #show_authors = False # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # If true, keep warnings as "system message" paragraphs in the built documents. #keep_warnings = False # -- Options for HTML output ---------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for # a list of builtin themes. html_theme = 'sphinx_rtd_theme' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. #html_theme_path = [] # The name for this set of Sphinx documents. If None, it defaults to # " v documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = '_static/flit_logo_nobg_cropped.svg' # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # Add any extra paths that contain custom files (such as robots.txt or # .htaccess) here, relative to this directory. These files are copied # directly to the root of the documentation. #html_extra_path = [] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_domain_indices = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. #html_show_sphinx = True # If true, "(C) Copyright ..." is shown in the HTML footer. Default is True. #html_show_copyright = True # If true, an OpenSearch description file will be output, and all pages will # contain a tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # This is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = None # Output file base name for HTML help builder. htmlhelp_basename = 'Flitdoc' # -- Options for LaTeX output --------------------------------------------- latex_elements = { # The paper size ('letterpaper' or 'a4paper'). #'papersize': 'letterpaper', # The font size ('10pt', '11pt' or '12pt'). #'pointsize': '10pt', # Additional stuff for the LaTeX preamble. #'preamble': '', } # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, # author, documentclass [howto, manual, or own class]). latex_documents = [ ('index', 'Flit.tex', u'Flit Documentation', u'Thomas Kluyver', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # If true, show page references after internal links. #latex_show_pagerefs = False # If true, show URL addresses after external links. #latex_show_urls = False # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_domain_indices = True # -- Options for manual page output --------------------------------------- # One entry per manual page. List of tuples # (source start file, name, description, authors, manual section). man_pages = [ ('index', 'flit', u'Flit Documentation', [u'Thomas Kluyver'], 1) ] # If true, show URL addresses after external links. #man_show_urls = False # -- Options for Texinfo output ------------------------------------------- # Grouping the document tree into Texinfo files. List of tuples # (source start file, target name, title, author, # dir menu entry, description, category) texinfo_documents = [ ('index', 'Flit', u'Flit Documentation', u'Thomas Kluyver', 'Flit', 'One line description of project.', 'Miscellaneous'), ] # Documents to append as an appendix to all manuals. #texinfo_appendices = [] # If false, no module index is generated. #texinfo_domain_indices = True # How to display URL addresses: 'footnote', 'no', or 'inline'. #texinfo_show_urls = 'footnote' # If true, do not generate a @detailmenu in the "Top" node's menu. #texinfo_no_detailmenu = False ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5015574 flit-3.12.0/doc/development.rst0000644000000000000000000000111014770461472013307 0ustar00Developing Flit =============== To get a development installation of Flit itself:: git clone https://github.com/pypa/flit.git cd flit python3 -m pip install docutils requests python3 bootstrap_dev.py This links Flit into the current Python environment, so you can make changes and try them without having to reinstall each time. Testing ------- To run the tests in separate environments for each available Python version:: tox `tox `_ has many options. To run the tests in your current environment, run:: pytest ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5015574 flit-3.12.0/doc/flit_ini.rst0000644000000000000000000000653614770461472012603 0ustar00:orphan: The flit.ini config file ======================== This file lives next to the module or package. .. note:: Flit 0.12 and above uses a :doc:`pyproject.toml file ` file to store this information. Run ``python3 -m flit.tomlify`` to convert a ``flit.ini`` file to ``pyproject.toml``. Metadata section ---------------- There are four required fields: module The name of the module/package, as you'd use in an import statement. author Your name author-email Your email address home-page A URL for the project, such as its Github repository. e.g. for flit itself .. code-block:: ini [metadata] module=flit author=Thomas Kluyver author-email=thomas@kluyver.me.uk home-page=https://github.com/pypa/flit The remaining fields are optional: requires A list of other packages from PyPI that this package needs. Each package should be on its own line, and may be followed by a version specifier in parentheses, like ``(>=4.1)``, and/or an `environment marker `_ after a semicolon. For example: .. code-block:: ini requires = requests (>=2.6) configparser; python_version == '2.7' dev-requires Packages that are required for development. This field is in the same format as ``requires``. These are not (yet) encoded in the wheel, but are used when doing ``flit install``. description-file A path (relative to the .ini file) to a file containing a longer description of your package to show on PyPI. This should be written in `reStructuredText `_, if your long description is not valid reStructuredText, a warning will be printed, and it will be interpreted as plain text on PyPI. classifiers A list of `Trove classifiers `_, one per line, indented. requires-python A version specifier for the versions of Python this requires, e.g. ``~=3.3`` or ``>=3.3,<4`` which are equivalents. dist-name If you want your package's name on PyPI to be different from the importable module name, set this to the PyPI name. keywords Comma separated list of words to help with searching for your package. license The name of a license, if you're using one for which there isn't a Trove classifier. It's recommended to use Trove classifiers instead of this in most cases. maintainer, maintainer-email Like author, for if you've taken over a project from someone else. Here's the full example from flit itself: .. code-block:: ini [metadata] author=Thomas Kluyver author-email=thomas@kluyver.me.uk home-page=https://github.com/pypa/flit requires=requests requires-python= >=3 description-file=README.rst classifiers=Intended Audience :: Developers License :: OSI Approved :: BSD License Programming Language :: Python :: 3 Topic :: Software Development :: Libraries :: Python Modules .. _flit_ini_scripts: Scripts section --------------- Each key and value in this describes a shell command to be installed along with your package. These work like setuptools 'entry points'. Here's the section for flit: .. code-block:: ini [scripts] flit = flit:main This will create a ``flit`` command, which will call the function ``main()`` imported from :mod:`flit`. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5015574 flit-3.12.0/doc/history.rst0000644000000000000000000006042014770461472012477 0ustar00Release history =============== Version 3.12 ------------ - Support for license expressions using the AND and OR operators (:ghpull:`731`). - Recognise ``__version__: str = "0.1"`` annotated assignments when finding the version number (:ghpull:`728`). - Clear error message when referring to a license file in a parent directory, which is not supported (:ghpull:`725`). Version 3.11 ------------ - Support for SPDX license expressions and multiple license files, as detailed in :pep:`639`:: license = "BSD-3-Clause" license-files = ["LICENSE"] For now, only a single license identifier is allowed. More complex expressions describing multiple licenses & expressions may be supported in a future version. - The `metadata format `_ in produced packages is now version 2.4, to support the expanded license information. Version 3.10.1 -------------- - The sdist of ``flit_core`` now includes the corresponding tests (:ghpull:`704`). These were missing in 3.10. Version 3.10 ------------ - ``flit publish`` can now use PyPI tokens stored in keyring (:ghpull:`649`), either project tokens with a 'username' like :samp:`pypi_token:project:{project_name}` (use the `normalised form of the name `_) or user tokens (:samp:`pypi_token:user:{username}`). - The ``--python`` option can now take the path of a virtualenv folder, as an alternative to a Python executable (:ghpull:`667`). - Flit will work with current development versions of Pythona again (:ghpull:`684`). - The ``flit`` command line package now requires Python 3.8 or above (:ghpulL:`660`). ``flit_core`` still works with Python 3.6 or above. - The metadata in packages now has the names of optional dependency groups ("extras") normalised, complying with version 2.3 of the metadata standard (:ghpull:`676`, :ghpull:`697`). - The ``flit`` command line package now depends on pip (:ghpull:`647`). - Fix potential substitution of environment variables into passwords read from ``.pypirc`` files (:ghpull:`652`). - A warning is now shown when building packages which specify the old ``flit.buildapi`` backend, which should be replaced by ``flit_core.buildapi`` (:ghpull:`674`). It's a good idea to always set a maximum version for the build requirement, to protect against changes in future major versions of Flit. - Avoid using the deprecated ``datetime.utcfromtimestamp()`` (:ghpull:`682`). - Flit now has a ``SECURITY.md`` file in the Github repository (:ghpull:`665`). - The tests for ``flit_core`` are no longer part of the installed package, reducing the size of the wheels (:ghpull:`691`). Version 3.9 ----------- - New options :option:`flit build --use-vcs` and :option:`flit build --no-use-vcs` to enable & disable including all committed files in the sdist. For now ``--use-vcs`` is the default, but this is likely to change in a future version, to bring ``flit build`` in line with standard build frontends like ``python -m build`` (:ghpull:`625`). - Sdist file names, and the name of the top-level folder in an sdist, are now normalised, in accordance with :pep:`625` (:ghpull:`628`). - A statically defined version number can now be parsed from files called ``version.py``, ``_version.py`` or ``__version__.py`` inside a package, as well as from ``__init__.py``, so executing code is required in fewer cases (:ghpull:`630`). - Fix setting the flag for regular files in zip metadata (:ghpull:`639`). - The timestamp embedded in the gzip wrapper for sdists now defaults to a fixed date, so building an sdist twice on the same machine should produce identical results, even without any special steps (:ghpull:`635`). Setting :envvar:`SOURCE_DATE_EPOCH` is still recommended for properly :doc:`reproducible builds `. Version 3.8 ----------- - A project name containing hyphens is now automatically translated to use underscores for the import name (:ghpull:`566`). - New option :option:`flit install --only-deps` to install the dependencies of the package, but not the package itself. - Add support for recursive globbing (``**``) in sdist includes and excludes (:ghpull:`550`). - Python's bytecode cache files (``__pycache__`` folders and ``.pyc`` files) are now always excluded from sdists (:ghpull:`581`). - Use tomllib in Python 3.11, rather than tomli (:ghpull:`573`, :ghpull:`604`). - Fix crash when unable to get a password from ``keyring`` (:ghpull:`567`). - Fix including modified files in sdist when using Mercurial (:ghpull:`541`). - Fix for some cases of determining whether a package supports Python 2 or not (:ghpull:`593`). - Fix parsing version number from code using multiple assignments (:ghpull:`474`). - Document how to use a PyPI token with :envvar:`FLIT_PASSWORD` (:ghpull:`602`). - Fix link to information about environment variables for pip (:ghpull:`576`). - Link to the docs for the latest stable version in package metadata (:ghpull:`589`). - Remove a mention of the ``toml`` package, which is no longer needed, from the :doc:`development` page (:ghpull:`601`). - The :doc:`bootstrap ` install script for ``flit_core`` accepts a new ``--install-root`` option. - Ensure the license file is included in packages on PyPI (:ghpull:`603`). Version 3.7.1 ------------- - Fix building packages which need execution to get the version number, and have a relative import in ``__init__.py`` (:ghpull:`531`). Version 3.7 ----------- - Support for :ref:`external data files ` such as man pages or Jupyter extension support files (:ghpull:`510`). - Project names are now lowercase in wheel filenames and ``.dist-info`` folder names, in line with the specifications (:ghpull:`498`). - Improved support for :doc:`bootstrapping ` a Python environment, e.g. for downstream packagers (:ghpull:`511`). ``flit_core.wheel`` is usable with ``python -m`` to create wheels before the `build `_ tool is available, and ``flit_core`` sdists also include a script to install itself from a wheel before `installer `_ is available. - Use newer importlib APIs, fixing some deprecation warnings (:ghpull:`499`). Version 3.6 ----------- - ``flit_core`` now bundles the `tomli `_ TOML parser library (version 1.2.3) to avoid a circular dependency between ``flit_core`` and ``tomli`` (:ghpull:`492`). This means ``flit_core`` now has no dependencies except Python itself, both at build time and at runtime, simplifying :doc:`bootstrapping `. Version 3.5.1 ------------- - Fix development installs with ``flit install --symlink`` and ``--pth-file``, which were broken in 3.5.0, especially for packages using a ``src`` folder (:ghpull:`472`). Version 3.5 ----------- - You can now use Flit to distribute a module or package inside a namespace package (as defined by :pep:`420`). To do this, specify the import name of the concrete, inner module you are packaging - e.g. ``name = "sphinxcontrib.foo"`` - either in the ``[project]`` table, or under ``[tool.flit.module]`` if you want to use a different name on PyPI (:ghpull:`468`). - Flit no longer generates a ``setup.py`` file in sdists (``.tar.gz`` packages) by default (:ghpull:`462`). Modern packaging tools don't need this. You can use the ``--setup-py`` flag to keep adding it for now, but this will probably be removed at some point in the future. - Fixed how ``flit init`` handles authors' names with non-ASCII characters (:ghpull:`460`). - When ``flit init`` generates a LICENSE file, the new ``pyproject.toml`` now references it (:ghpull:`467`). Version 3.4 ----------- - Python 3.6 or above is now required, both for ``flit`` and ``flit_core``. - Add a ``--setup-py`` option to ``flit build`` and ``flit publish``, and a warning when neither this nor ``--no-setup-py`` are specified (:ghpull:`431`). A future version will stop generating ``setup.py`` files in sdists by default. - Add support for standardised editable installs - ``pip install -e`` - according to :pep:`660` (:ghpull:`400`). - Add a ``--pypirc`` option for ``flit publish`` to specify an alternative path to a ``.pypirc`` config file describing package indexes (:ghpull:`434`). - Fix installing dependencies specified in a ``[project]`` table (:ghpull:`433`). - Fix building wheels when ``SOURCE_DATE_EPOCH`` (see :doc:`reproducible`) is set to a date before 1980 (:ghpull:`448`). - Switch to using the `tomli `_ TOML parser, in common with other packaging projects (:ghpull:`438`). This supports TOML version 1.0. - Add a document on :doc:`bootstrap` (:ghpull:`441`). Version 3.3 ----------- - ``PKG-INFO`` files in sdists are now generated the same way as ``METADATA`` in wheels, fixing some issues with sdists (:ghpull:`410`). - ``flit publish`` now sends SHA-256 hashes, fixing uploads to GitLab package repositories (:ghpull:`416`). - The ``[project]`` metadata table from :pep:`621` is now fully supported and :ref:`documented `. Projects using this can now specify ``requires = ["flit_core >=3.2,<4"]`` in the ``[build-system]`` table. Version 3.2 ----------- - Experimental support for specifying metadata in a ``[project]`` table in ``pyproject.toml`` as specified by :pep:`621` (:ghpull:`393`). If you try using this, please specify ``requires = ["flit_core >=3.2.0,<3.3"]`` in the ``[build-system]`` table for now, in case it needs to change for the next release. - Fix writing METADATA file with multi-line information in certain fields such as ``Author`` (:ghpull:`402`). - Fix building wheel when a directory such as LICENSES appears in the project root directory (:ghpull:`401`). Version 3.1 ----------- - Update handling of names & version numbers in wheel filenames and ``.dist-info`` folders in line with changes in the specs (:ghpull:`395`). - Switch from the deprecated ``pytoml`` package to ``toml`` (:ghpull:`378`). - Fix specifying backend-path in ``pyproject.toml`` for flit-core (as a list instead of a string). Version 3.0 ----------- Breaking changes: - Projects must now provide Flit with information in ``pyproject.toml`` files, not the older ``flit.ini`` format (:ghpull:`338`). - ``flit_core`` once again requires Python 3 (>=3.4). Packages that support Python 2 can still be built by ``flit_core`` 2.x, but can't rely on new features (:ghpull:`342`). - The deprecated ``flit installfrom`` command was removed (:ghpull:`334`). You can use ``pip install git+https://github.com/...`` instead. Features and fixes: - Fix building sdists from a git repository with non-ASCII characters in filenames (:ghpull:`346`). - Fix identifying the version number when the code contains a subscript assignment before ``__version__ =`` (:ghpull:`348`). - Script entry points can now use a class method (:ghpull:`359`). - Set suitable permission bits on metadata files in wheels (:ghpull:`256`). - Fixed line endings in the ``RECORD`` file when installing on Windows (:ghpull:`368`). - Support for recording the source of local installations, as in :pep:`610` (:ghpull:`335`). - ``flit init`` will check for a README in the root of the project and automatically set it as ``description-file`` (:ghpull:`337`). - Pygments is not required for checking reStructuredText READMEs (:ghpull:`357`). - Packages where the version number can be recognised without executing their code don't need their dependencies installed to build, which should make them build faster (:ghpull:`361`). - Ensure the installed ``RECORD`` file is predictably ordered (:ghpull:`366`). Version 2.3 ----------- - New projects created with :ref:`init_cmd` now declare that they require ``flit_core >=2,<4`` (:ghpull:`328`). Any projects using ``pyproject.toml`` (not ``flit.ini``) should be compatible with flit 3.x. - Fix selecting files from a git submodule to include in an sdist (:ghpull:`324`). - Fix checking classifiers when no writeable cache directory is available (:ghpull:`319`). - Better errors when trying to install to a mis-spelled or missing Python interpreter (:ghpull:`331`). - Fix specifying ``--repository`` before ``upload`` (:ghpull:`322`). Passing the option like this is deprecated, and you should now pass it after ``upload``. - Documentation improvements (:ghpull:`327`, :ghpull:`318`, :ghpull:`314`) Version 2.2 ----------- - Allow underscores in package names with Python 2 (:ghpull:`305`). - Add a ``--no-setup-py`` option to build sdists without a backwards-compatible ``setup.py`` file (:ghpull:`311`). - Fix the generated ``setup.py`` file for packages using a ``src/`` layout (:ghpull:`303`). - Fix detecting when more than one file matches the module name specified (:ghpull:`307`). - Fix installing to a venv on Windows with the ``--python`` option (:ghpull:`300`). - Don't echo the command in scripts installed with ``--symlink`` or ``--pth-file`` on Windows (:ghpull:`310`). - New ``bootstrap_dev.py`` script to set up a development installation of Flit from the repository (:ghpull:`301`, :ghpull:`306`). Version 2.1 ----------- - Use compression when adding files to wheels. - Added the :envvar:`FLIT_INSTALL_PYTHON` environment variable (:ghpull:`295`), to configure flit to always install into a Python other than the one it's running on. - ``flit_core`` uses the ``intreehooks`` shim package to load its bootstrapping backend, until a released version of pip supports the standard ``backend-path`` mechanism. Version 2.0 ----------- Flit 2 is a major architecture change. The ``flit_core`` package now provides a :pep:`517` backend for building packages, while ``flit`` is a :doc:`command line interface ` extending that. The build backend works on Python 2, so tools like pip should be able to install packages built with flit from source on Python 2. The ``flit`` command requires Python 3.5 or above. You will need to change the build-system table in your ``pyproject.toml`` file to look like this: .. code-block:: toml [build-system] requires = ["flit_core >=2,<4"] build-backend = "flit_core.buildapi" Other changes include: - Support for storing your code under a ``src/`` folder (:ghpull:`260`). You don't need to change any configuration if you do this. - Options to control what files are included in an sdist - see :ref:`pyproject_toml_sdist` for the details. - Requirements can specify a URL 'direct reference', as an alternative to a version number, with the syntax defined in :pep:`440`: ``requests @ https://example.com/requests-2.22.0.tar.gz``. - Fix the shebang of scripts installed with the ``--python`` option and the ``--symlink`` flag (:ghpull:`286`). - Installing with ``--deps develop`` now installs normal dependencies as well as development dependencies. - Author email is no longer required in the metadata table (:ghpull:`289`). - More error messages are now shown without a traceback (:ghpull:`254`) Version 1.3 ----------- - Fix for building sdists from a subdirectory in a Mercurial repository (:ghpull:`233`). - Fix for getting the docstring and version from modules defining their encoding (:ghpull:`239`). - Fix for installing packages with ``flit installfrom`` (:ghpull:`221`). - Packages with requirements no longer get a spurious ``Provides-Extra: .none`` metadata entry (:ghissue:`228`). - Better check of whether ``python-requires`` includes any Python 2 version (:ghpull:`232`). - Better check of home page URLs in ``flit init`` (:ghpull:`230`). - Better error message when the description file is not found (:ghpull:`234`). - Updated a help message to refer to ``pyproject.toml`` (:ghpull:`240`). - Improve tests of ``flit init`` (:ghpull:`229`). Version 1.2.1 ------------- - Fix for installing packages with ``flit install``. - Make ``requests_download`` an extra dependency, to avoid a circular build dependency. To use ``flit installfrom``, you can install with ``pip install flit[installfrom]``. Note that the ``installfrom`` subcommand is deprecated, as it will soon be possible to use pip to install Flit projects directly from a VCS URL. Version 1.2 ----------- - Fixes for packages specifying ``requires-extra``: sdists should now work, and environment markers can be used together with ``requires-extra``. - Fix running ``flit installfrom`` without a config file present in the working directory. - The error message for a missing or empty docstring tells you what file the docstring should be in. - Improvements to documentation on version selectors for requirements. Version 1.1 ----------- - Packages can now have 'extras', specified as ``requires-extra`` in the :doc:`pyproject.toml file `. These are additional dependencies for optional features. - The ``home-page`` metadata field is no longer required. - Additional project URLs are now validated. - ``flit -V`` is now equivalent to ``flit --version``. - Various improvements to documentation. Version 1.0 ----------- - The description file may now be written in reStructuredText, Markdown or plain text. The file extension should indicate which of these formats it is (``.rst``, ``.md`` or ``.txt``). Previously, only reStructuredText was officially supported. - Multiple links (e.g. documentation, bug tracker) can now be specified in a new :ref:`[tool.flit.metadata.urls] section ` of ``pyproject.toml``. - Dependencies are now correctly installed to the target Python when you use the ``--symlink`` or ``--pth-file`` options. - Dependencies are only installed to the Python where Flit is running if it fails to get the docstring and version number without them. - The commands deprecated in 0.13—``flit wheel``, ``flit sdist`` and ``flit register``—have been removed. Although version 1.0 sounds like a milestone, there's nothing that makes this release especially significant. It doesn't represent a step change in stability or completeness. Flit has been gradually maturing for some time, and I chose this point to end the series of 0.x version numbers. Version 0.13 ------------ - Better validation of several metadata fields (``dist-name``, ``requires``, ``requires-python``, ``home-page``), and of the version number. - New :envvar:`FLIT_ALLOW_INVALID` environment variable to ignore validation failures in case they go wrong. - The list of valid classifiers is now fetched from Warehouse (https://pypi.org), rather than the older https://pypi.python.org site. - Deprecated ``flit wheel`` and ``flit sdist`` subcommands: use :ref:`build_cmd`. - Deprecated ``flit register``: you can no longer register a package separately from uploading it. Version 0.12.3 -------------- - Fix building and installing packages with a ``-`` in the distribution name. - Fix numbering in README. Version 0.12.2 -------------- - New tool to convert ``flit.ini`` to ``pyproject.toml``:: python3 -m flit.tomlify - Use the PAX tar format for sdists, as specified by PEP 517. Version 0.12.1 -------------- - Restore dependency on ``zipfile36`` backport package. - Add some missing options to documentation of ``flit install`` subcommand. - Rearrange environment variables in the docs. Version 0.12 ------------ - Switch the config to ``pyproject.toml`` by default instead of ``flit.ini``, and implement the PEP 517 API. - A new option ``--pth-file`` allows for development installation on Windows (where ``--symlink`` usually won't work). - Normalise file permissions in the zip file, making builds more reproducible across different systems. - Sdists (.tar.gz packages) can now also be reproducibly built by setting :envvar:`SOURCE_DATE_EPOCH`. - For most modules, Flit can now extract the version number and docstring without importing it. It will still fall back to importing where getting these from the AST fails. - ``flit build`` will build the wheel from the sdist, helping to ensure that files aren't left out of the sdist. - All list fields in the INI file now ignore blank lines (``requires``, ``dev-requires``, ``classifiers``). - Fix the path separator in the ``RECORD`` file of a wheel built on Windows. - Some minor fixes to building reproducible wheels. - If building a wheel fails, the temporary file created will be cleaned up. - Various improvements to docs and README. Version 0.11.4 -------------- - Explicitly open various files as UTF-8, rather than relying on locale encoding. - Link to docs from README. - Better test coverage, and a few minor fixes for problems revealed by tests. Version 0.11.3 -------------- - Fixed a bug causing failed uploads when the password is entered in the terminal. Version 0.11.2 -------------- - A couple of behaviour changes when uploading to warehouse. Version 0.11.1 -------------- - Fixed a bug when you use flit to build an sdist from a subdirectory inside a VCS checkout. The VCS is now correctly detected. - Fix the rst checker for newer versions of docutils, by upgrading the bundled copy of readme_renderer. Version 0.11 ------------ - Flit can now build sdists (tarballs) and upload them to PyPI, if your code is in a git or mercurial repository. There are new commands: - ``flit build`` builds both a wheel and an sdist. - ``flit publish`` builds and uploads a wheel and an sdist. - Smarter ways of getting the information needed for upload: - If you have the `keyring `_ package installed, flit can use it to store your password, rather than keeping it in plain text in ``~/.pypirc``. - If ``~/.pypirc`` does not already exist, and you are prompted for your username, flit will write it into that file. - You can provide the information as environment variables: :envvar:`FLIT_USERNAME`, :envvar:`FLIT_PASSWORD` and :envvar:`FLIT_INDEX_URL`. Use this to upload packages from a CI service, for instance. - Include 'LICENSE' or 'COPYING' files in wheels. - Fix for ``flit install --symlink`` inside a virtualenv. Version 0.10 ------------ - Downstream packagers can use the :envvar:`FLIT_NO_NETWORK` environment variable to stop flit downloading data from the network. Version 0.9 ----------- - ``flit install`` and ``flit installfrom`` now take an optional ``--python`` argument, with the path to the Python executable you want to install it for. Using this, you can install modules to Python 2. - Installing a module normally (without ``--symlink``) builds a wheel and uses pip to install it, which should work better in some corner cases. Version 0.8 ----------- - A new ``flit installfrom`` subcommand to install a project from a source archive, such as from Github. - :doc:`Reproducible builds ` - you can produce byte-for-byte identical wheels. - A warning for non-canonical version numbers according to `PEP 440 `__. - Fix for installing projects on Windows. - Better error message when module docstring is only whitespace. Version 0.7 ----------- - A new ``dev-requires`` field in the config file for development requirements, used when doing ``flit install``. - Added a ``--deps`` option for ``flit install`` to control which dependencies are installed. - Flit can now be invoked with ``python -m flit``. Version 0.6 ----------- - ``flit install`` now ensures requirements specified in ``flit.ini`` are installed, using pip. - If you specify a description file, flit now warns you if it's not valid reStructuredText (since invalid reStructuredText is treated as plain text on PyPI). - Improved the error message for mis-spelled keys in ``flit.ini``. Version 0.5 ----------- - A new ``flit init`` command to quickly define the essential basic metadata for a package. - Support for entry points. - A new ``flit register`` command to register a package without uploading it, for when you want to claim a name before you're ready to release. - Added a ``--repository`` option for specifying an alternative PyPI instance. - Added a ``--debug`` flag to show debug-level log messages. - Better error messages when the module docstring or ``__version__`` is missing. Version 0.4 ----------- - Users can now specify ``dist-name`` in the config file if they need to use different names on PyPI and for imports. - Classifiers are now checked against a locally cached list of valid classifiers. - Packages can be locally installed into environments for development. - Local installation now creates a PEP 376 ``.dist-info`` folder instead of ``.egg-info``. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5015574 flit-3.12.0/doc/index.rst0000644000000000000000000000070214770461472012102 0ustar00Flit |version| ============== .. raw:: html .. include:: ../README.rst Documentation contents ---------------------- .. toctree:: :maxdepth: 2 pyproject_toml cmdline upload reproducible rationale bootstrap .. toctree:: :maxdepth: 1 development history Indices and tables ================== * :ref:`genindex` * :ref:`search` ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5015574 flit-3.12.0/doc/make.bat0000644000000000000000000001505114770461472011651 0ustar00@ECHO OFF REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set BUILDDIR=_build set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% . set I18NSPHINXOPTS=%SPHINXOPTS% . if NOT "%PAPER%" == "" ( set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS% set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS% ) if "%1" == "" goto help if "%1" == "help" ( :help echo.Please use `make ^` where ^ is one of echo. html to make standalone HTML files echo. dirhtml to make HTML files named index.html in directories echo. singlehtml to make a single large HTML file echo. pickle to make pickle files echo. json to make JSON files echo. htmlhelp to make HTML files and a HTML help project echo. qthelp to make HTML files and a qthelp project echo. devhelp to make HTML files and a Devhelp project echo. epub to make an epub echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter echo. text to make text files echo. man to make manual pages echo. texinfo to make Texinfo files echo. gettext to make PO message catalogs echo. changes to make an overview over all changed/added/deprecated items echo. xml to make Docutils-native XML files echo. pseudoxml to make pseudoxml-XML files for display purposes echo. linkcheck to check all external links for integrity echo. doctest to run all doctests embedded in the documentation if enabled goto end ) if "%1" == "clean" ( for /d %%i in (%BUILDDIR%\*) do rmdir /q /s %%i del /q /s %BUILDDIR%\* goto end ) %SPHINXBUILD% 2> nul if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) if "%1" == "html" ( %SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/html. goto end ) if "%1" == "dirhtml" ( %SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml. goto end ) if "%1" == "singlehtml" ( %SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml if errorlevel 1 exit /b 1 echo. echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml. goto end ) if "%1" == "pickle" ( %SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the pickle files. goto end ) if "%1" == "json" ( %SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can process the JSON files. goto end ) if "%1" == "htmlhelp" ( %SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run HTML Help Workshop with the ^ .hhp project file in %BUILDDIR%/htmlhelp. goto end ) if "%1" == "qthelp" ( %SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp if errorlevel 1 exit /b 1 echo. echo.Build finished; now you can run "qcollectiongenerator" with the ^ .qhcp project file in %BUILDDIR%/qthelp, like this: echo.^> qcollectiongenerator %BUILDDIR%\qthelp\Flit.qhcp echo.To view the help file: echo.^> assistant -collectionFile %BUILDDIR%\qthelp\Flit.ghc goto end ) if "%1" == "devhelp" ( %SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp if errorlevel 1 exit /b 1 echo. echo.Build finished. goto end ) if "%1" == "epub" ( %SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub if errorlevel 1 exit /b 1 echo. echo.Build finished. The epub file is in %BUILDDIR%/epub. goto end ) if "%1" == "latex" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex if errorlevel 1 exit /b 1 echo. echo.Build finished; the LaTeX files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdf" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "latexpdfja" ( %SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex cd %BUILDDIR%/latex make all-pdf-ja cd %BUILDDIR%/.. echo. echo.Build finished; the PDF files are in %BUILDDIR%/latex. goto end ) if "%1" == "text" ( %SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text if errorlevel 1 exit /b 1 echo. echo.Build finished. The text files are in %BUILDDIR%/text. goto end ) if "%1" == "man" ( %SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man if errorlevel 1 exit /b 1 echo. echo.Build finished. The manual pages are in %BUILDDIR%/man. goto end ) if "%1" == "texinfo" ( %SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo if errorlevel 1 exit /b 1 echo. echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo. goto end ) if "%1" == "gettext" ( %SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale if errorlevel 1 exit /b 1 echo. echo.Build finished. The message catalogs are in %BUILDDIR%/locale. goto end ) if "%1" == "changes" ( %SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes if errorlevel 1 exit /b 1 echo. echo.The overview file is in %BUILDDIR%/changes. goto end ) if "%1" == "linkcheck" ( %SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck if errorlevel 1 exit /b 1 echo. echo.Link check complete; look for any errors in the above output ^ or in %BUILDDIR%/linkcheck/output.txt. goto end ) if "%1" == "doctest" ( %SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest if errorlevel 1 exit /b 1 echo. echo.Testing of doctests in the sources finished, look at the ^ results in %BUILDDIR%/doctest/output.txt. goto end ) if "%1" == "xml" ( %SPHINXBUILD% -b xml %ALLSPHINXOPTS% %BUILDDIR%/xml if errorlevel 1 exit /b 1 echo. echo.Build finished. The XML files are in %BUILDDIR%/xml. goto end ) if "%1" == "pseudoxml" ( %SPHINXBUILD% -b pseudoxml %ALLSPHINXOPTS% %BUILDDIR%/pseudoxml if errorlevel 1 exit /b 1 echo. echo.Build finished. The pseudo-XML files are in %BUILDDIR%/pseudoxml. goto end ) :end ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5015574 flit-3.12.0/doc/pyproject_toml.rst0000644000000000000000000004054714770461472014060 0ustar00The pyproject.toml config file ============================== This file lives next to the module or package. .. note:: Older version of Flit (up to 0.11) used a :doc:`flit.ini file ` for similar information. These files no longer work with Flit 3 and above. Run ``python3 -m flit.tomlify`` to convert a ``flit.ini`` file to ``pyproject.toml``. Build system section -------------------- This tells tools like pip to build your project with flit. It's a standard defined by PEP 517. For any new project using Flit, it will look like this: .. code-block:: toml [build-system] requires = ["flit_core >=3.11,<4"] build-backend = "flit_core.buildapi" Version constraints: - For now, all packages should specify ``<4``, so they won't be impacted by changes in the next major version. - ``license-files`` and license expressions in the ``license`` field require ``flit_core >=3.11``. - :ref:`pyproject_toml_project` requires ``flit_core >=3.2`` - :ref:`pyproject_old_metadata` requires ``flit_core >=2,<4`` - The older :doc:`flit.ini file ` requires ``flit_core <3``. - TOML features new in version 1.0 require ``flit_core >=3.4``. - ``flit_core`` 3.3 is the last version supporting Python 3.4 & 3.5. Packages supporting these Python versions can only use `TOML v0.5 `_. - Only ``flit_core`` 2.x can build packages on Python 2, so packages still supporting Python 2 cannot use new-style metadata (the ``[project]`` table). .. _pyproject_toml_project: New style metadata ------------------ .. versionadded:: 3.2 The new standard way to specify project metadata is in a ``[project]`` table, as defined by :pep:`621`. Flit works for now with either this or the older ``[tool.flit.metadata]`` table (:ref:`described below `), but it won't allow you to mix them. A simple ``[project]`` table might look like this: .. code-block:: toml [project] name = "astcheck" authors = [ {name = "Thomas Kluyver", email = "thomas@kluyver.me.uk"}, ] readme = "README.rst" license = "MIT" requires-python = ">=3.5" dynamic = ["version", "description"] The allowed fields are: name The name your package will have on PyPI. This field is required. For Flit, this name, with any hyphens replaced by underscores, is also the default value of the import name (see :ref:`pyproject_module` if that needs to be different). .. versionchanged:: 3.8 Hyphens in the project name are now translated to underscores for the import name. version Version number as a string. If you want Flit to get this from a ``__version__`` attribute, leave it out of the TOML config and include "version" in the ``dynamic`` field. description A one-line description of your project. If you want Flit to get this from the module docstring, leave it out of the TOML config and include "description" in the ``dynamic`` field. readme A path (relative to the .toml file) to a file containing a longer description of your package to show on PyPI. This should be written in `reStructuredText `_, Markdown or plain text, and the filename should have the appropriate extension (``.rst``, ``.md`` or ``.txt``). Alternatively, ``readme`` can be a table with either a ``file`` key (a relative path) or a ``text`` key (literal text), and an optional ``content-type`` key (e.g. ``text/x-rst``). requires-python A version specifier for the versions of Python this requires, e.g. ``~=3.3`` or ``>=3.3,<4``, which are equivalents. license A valid SPDX `license expression `_ or a table with either a ``file`` key (a relative path to a license file) or a ``text`` key (the license text). license-files A list of glob patterns for license files to include. Defaults to ``['COPYING*', 'LICEN[CS]E*']``. authors A list of tables with ``name`` and ``email`` keys (both optional) describing the authors of the project. maintainers Same format as authors. keywords A list of words to help with searching for your package. classifiers A list of `Trove classifiers `_. Add ``Private :: Do Not Upload`` into the list to prevent a private package from being uploaded to PyPI by accident. dependencies & optional-dependencies See :ref:`pyproject_project_dependencies`. urls See :ref:`pyproject_project_urls`. scripts & gui-scripts See :ref:`pyproject_project_scripts`. entry-points See :ref:`pyproject_project_entrypoints`. dynamic A list of field names which aren't specified here, for which Flit should find a value at build time. Only "version" and "description" are accepted. .. _pyproject_project_dependencies: Dependencies ~~~~~~~~~~~~ The ``dependencies`` field is a list of other packages from PyPI that this package needs. Each package may be followed by a version specifier like ``>=4.1``, and/or an `environment marker`_ after a semicolon. For example: .. code-block:: toml dependencies = [ "requests >=2.6", "configparser; python_version == '2.7'", ] The ``[project.optional-dependencies]`` table contains lists of packages needed for every optional feature. The requirements are specified in the same format as for ``dependencies``. For example: .. code-block:: toml [project.optional-dependencies] test = [ "pytest >=2.7.3", "pytest-cov", ] doc = ["sphinx"] You can call these optional features anything you want, although ``test`` and ``doc`` are common ones. You specify them for installation in square brackets after the package name or directory, e.g. ``pip install '.[test]'``. .. _pyproject_project_urls: URLs table ~~~~~~~~~~ Your project's page on `pypi.org `_ can show a number of links. You can point people to documentation or a bug tracker, for example. This section is called ``[project.urls]`` in the file. You can use any names inside it. Here it is for flit: .. code-block:: toml [project.urls] Documentation = "https://flit.pypa.io" Source = "https://github.com/pypa/flit" .. _pyproject_project_scripts: Scripts section ~~~~~~~~~~~~~~~ This section is called ``[project.scripts]`` in the file. Each key and value describes a shell command to be installed along with your package. These work like setuptools 'entry points'. Here's the section for flit: .. code-block:: toml [project.scripts] flit = "flit:main" This will create a ``flit`` command, which will call the function ``main()`` imported from :mod:`flit`. A similar table called ``[project.gui-scripts]`` defines commands which launch a GUI. This only makes a difference on Windows, where GUI scripts are run without a console. .. _pyproject_project_entrypoints: Entry points sections ~~~~~~~~~~~~~~~~~~~~~ You can declare `entry points `_ using sections named :samp:`[project.entry-points.{groupname}]`. E.g. to provide a pygments lexer from your package: .. code-block:: toml [project.entry-points."pygments.lexers"] dogelang = "dogelang.lexer:DogeLexer" In each ``package:name`` value, the part before the colon should be an importable module name, and the latter part should be the name of an object accessible within that module. The details of what object to expose depend on the application you're extending. If the group name contains a dot, it must be quoted (``"pygments.lexers"`` above). Script entry points are defined in :ref:`scripts tables `, so you can't use the group names ``console_scripts`` or ``gui_scripts`` here. .. _pyproject_module: Module section ~~~~~~~~~~~~~~ If your package will have different names for installation and import, you should specify the install (PyPI) name in the ``[project]`` table (:ref:`see above `), and the import name in a ``[tool.flit.module]`` table: .. code-block:: toml [project] name = "pynsist" # ... [tool.flit.module] name = "nsist" Flit looks for the source of the package by its import name. The source may be located either in the directory that holds the ``pyproject.toml`` file, or in a ``src/`` subdirectory. .. _pyproject_old_metadata: Old style metadata ------------------ Flit's older way to specify metadata is in a ``[tool.flit.metadata]`` table, along with ``[tool.flit.scripts]`` and ``[tool.flit.entrypoints]``, described below. This is still recognised for now, but you can't mix it with :ref:`pyproject_toml_project`. There are three required fields: module The name of the module/package, as you'd use in an import statement. author Your name author-email Your email address e.g. for flit itself .. code-block:: toml [tool.flit.metadata] module = "flit" author = "Thomas Kluyver" author-email = "thomas@kluyver.me.uk" .. versionchanged:: 1.1 ``home-page`` was previously required. The remaining fields are optional: home-page A URL for the project, such as its Github repository. requires A list of other packages from PyPI that this package needs. Each package may be followed by a version specifier like ``(>=4.1)`` or ``>=4.1``, and/or an `environment marker`_ after a semicolon. For example: .. code-block:: toml requires = [ "requests >=2.6", "configparser; python_version == '2.7'", ] requires-extra Lists of packages needed for every optional feature. The requirements are specified in the same format as for ``requires``. The requirements of the two reserved extras ``test`` and ``doc`` as well as the extra ``dev`` are installed by ``flit install``. For example: .. code-block:: toml [tool.flit.metadata.requires-extra] test = [ "pytest >=2.7.3", "pytest-cov", ] doc = ["sphinx"] .. versionadded:: 1.1 description-file A path (relative to the .toml file) to a file containing a longer description of your package to show on PyPI. This should be written in `reStructuredText `_, Markdown or plain text, and the filename should have the appropriate extension (``.rst``, ``.md`` or ``.txt``). classifiers A list of `Trove classifiers `_. Add ``Private :: Do Not Upload`` into the list to prevent a private package from uploading on PyPI by accident. requires-python A version specifier for the versions of Python this requires, e.g. ``~=3.3`` or ``>=3.3,<4`` which are equivalents. dist-name If you want your package's name on PyPI to be different from the importable module name, set this to the PyPI name. keywords Comma separated list of words to help with searching for your package. license The name of a license, if you're using one for which there isn't a Trove classifier. It's recommended to use Trove classifiers instead of this in most cases. maintainer, maintainer-email Like author, for if you've taken over a project from someone else. Here was the metadata section from flit using the older style: .. code-block:: toml [tool.flit.metadata] module="flit" author="Thomas Kluyver" author-email="thomas@kluyver.me.uk" home-page="https://github.com/pypa/flit" requires=[ "flit_core >=2.2.0", "requests", "docutils", "tomli", "tomli-w", ] requires-python=">=3.6" description-file="README.rst" classifiers=[ "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Programming Language :: Python :: 3", "Topic :: Software Development :: Libraries :: Python Modules", ] .. _pyproject_toml_urls: URLs subsection ~~~~~~~~~~~~~~~ Your project's page on `pypi.org `_ can show a number of links, in addition to the ``home-page`` URL described above. You can point people to documentation or a bug tracker, for example. This section is called ``[tool.flit.metadata.urls]`` in the file. You can use any names inside it. Here it is for flit: .. code-block:: toml [tool.flit.metadata.urls] Documentation = "https://flit.pypa.io" .. versionadded:: 1.0 .. _pyproject_toml_scripts: Scripts section ~~~~~~~~~~~~~~~ A ``[tool.flit.scripts]`` table can be used along with ``[tool.flit.metadata]``. It is in the same format as the newer ``[project.scripts]`` table :ref:`described above `. Entry points sections ~~~~~~~~~~~~~~~~~~~~~ ``[tool.flit.entrypoints]`` tables can be used along with ``[tool.flit.metadata]``. They are in the same format as the newer ``[project.entry-points]`` tables :ref:`described above `. .. _pyproject_toml_sdist: Sdist section ------------- .. versionadded:: 2.0 With no configuration, Flit can make an sdist with everything it needs to build and install your module: the package contents (including non-Python data files, but not ``.pyc`` bytecode files), your ``pyproject.toml`` file, the readme & license files given in the metadata, and the :ref:`external data folder ` if you specified that. If you want more control, you can give lists of paths or glob patterns as ``include`` and ``exclude`` in this section. For example: .. code-block:: toml [tool.flit.sdist] include = ["doc/"] exclude = ["doc/*.html"] These paths: - Always use ``/`` as a separator (POSIX style) - Must be relative paths from the directory containing ``pyproject.toml`` - Cannot go outside that directory (no ``../`` paths) - Cannot contain control characters or ``<>:"\\`` - Can refer to directories, in which case they include everything under the directory, including subdirectories - Should match the case of the files they refer to, as case-insensitive matching is platform dependent .. versionchanged:: 3.8 Include and exclude patterns can now use recursive glob patterns (``**``). Exclusions have priority over inclusions. Bytecode is excluded by default and cannot be included. Including files committed in git/hg ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ If you use :ref:`build_cmd` or :ref:`publish_cmd`, you can also make sdists with the files which are committed in version control (git or hg). This is a shortcut to e.g. include documentation source files, but not built HTML or PDF documentation. The include and exclude patterns are then applied on top of this list. For now, including files from version control is the default for :ref:`build_cmd` and :ref:`publish_cmd`, and can be disabled with ``--no-use-vcs``. The default will switch in a future version. Using ``flit_core`` as a backend to other tools such as `build `_ never gets the list of files for the sdist from version control. .. _pyproject_toml_external_data: External data section --------------------- .. versionadded:: 3.7 Data files which your code will use should go inside the Python package folder. Flit will package these with no special configuration. However, sometimes it's useful to package external files for system integration, such as man pages or files defining a Jupyter extension. To do this, arrange the files within a directory such as ``data``, next to your ``pyproject.toml`` file, and add a section like this: .. code-block:: toml [tool.flit.external-data] directory = "data" Paths within this directory are typically installed to corresponding paths under a prefix (such as a virtualenv directory). E.g. you might save a man page for a script as ``(data)/share/man/man1/foo.1``. Whether these files are detected by the systems they're meant to integrate with depends on how your package is installed and how those systems are configured. For instance, installing in a virtualenv usually doesn't affect anything outside that environment. Don't rely on these files being picked up unless you have close control of how the package will be installed. If you install a package with ``flit install --symlink``, a symlink is made for each file in the external data directory. Otherwise (including development installs with ``pip install -e``), these files are copied to their destination, so changes here won't take effect until you reinstall the package. .. note:: For users coming from setuptools: external data corresponds to setuptools' ``data_files`` parameter, although setuptools offers more flexibility. .. _environment marker: https://www.python.org/dev/peps/pep-0508/#environment-markers ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5015574 flit-3.12.0/doc/rationale.rst0000644000000000000000000000541514770461472012757 0ustar00Why use Flit? ============= *Make the easy things easy and the hard things possible* is an old motto from the Perl community. Flit is entirely focused on the *easy things* part of that, and leaves the hard things up to other tools. Specifically, the easy things are pure Python packages with no build steps (neither compiling C code, nor bundling Javascript, etc.). The vast majority of packages on PyPI are like this: plain Python code, with maybe some static data files like icons included. It's easy to underestimate the challenges involved in distributing and installing code, because it seems like you just need to copy some files into the right place. There's a whole lot of metadata and tooling that has to work together around that fundamental step. But with the right tooling, a developer who wants to release their code doesn't need to know about most of that. What, specifically, does Flit make easy? - ``flit init`` helps you set up the information Flit needs about your package. - Subpackages are automatically included: you only need to specify the top-level package. - Data files within a package directory are automatically included. Missing data files has been a common packaging mistake with other tools. - The version number is taken from your package's ``__version__`` attribute, so that always matches the version that tools like pip see. - ``flit publish`` uploads a package to PyPI, so you don't need a separate tool to do this. Setuptools, the most common tool for Python packaging, now has shortcuts for many of the same things. But it has to stay compatible with projects published many years ago, which limits what it can do by default. Flit also has some support for :doc:`reproducible builds `, a feature which some people care about. There have been many other efforts to improve the user experience of Python packaging, such as `pbr `_, but before Flit, these tended to build on setuptools and distutils. That was a pragmatic decision, but it's hard to build something radically different on top of those libraries. The existence of Flit spurred the development of new standards, like :pep:`518` and :pep:`517`, which are now used by other packaging tools such as `Poetry `_ and `Enscons `_. Other options ------------- If your package needs a build step, you won't be able to use Flit. `Setuptools `_ is the de-facto standard, but newer tools such as Enscons_ also cover this case. Flit also doesn't help you manage dependencies: you have to add them to ``pyproject.toml`` by hand. Tools like Poetry_ and `Pipenv `_ have features which help add and update dependencies on other packages. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5015574 flit-3.12.0/doc/reproducible.rst0000644000000000000000000000256414770461472013462 0ustar00Reproducible builds =================== .. versionadded:: 0.8 Wheels built by flit are reproducible: if you build from the same source code, you should be able to make wheels that are exactly identical, byte for byte. This is useful for verifying software. For more details, see `reproducible-builds.org `__. There is a caveat, however: wheels (which are zip files) include the modification timestamp from each file. This will probably be different on each computer, because it indicates when your local copy of the file was written, not when it was changed in version control. These timestamps can be overridden by the environment variable :envvar:`SOURCE_DATE_EPOCH`. .. code-block:: shell SOURCE_DATE_EPOCH=$(date +%s) flit publish # Record the value of SOURCE_DATE_EPOCH in release notes for reproduction .. versionchanged:: 0.12 Normalising permission bits Flit normalises the permission bits of files copied into a wheel to either 755 (executable) or 644. This means that a file is readable by all users and writable only by the user who owns it. The most popular version control systems only track the executable bit, so checking out the same repository on systems with different umasks (e.g. Debian and Fedora) produces files with different permissions. With Flit 0.11 and earlier, this difference would produce non-identical wheels. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5015574 flit-3.12.0/doc/requirements.txt0000644000000000000000000000010614770461472013523 0ustar00sphinx ~= 7.0 sphinxcontrib_github_alt ~= 1.2 sphinx-rtd-theme ~= 2.0 ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5025573 flit-3.12.0/doc/upload.rst0000644000000000000000000000544514770461472012270 0ustar00Controlling package uploads =========================== .. program:: flit publish The command ``flit publish`` will upload your package to a package index server. The default settings let you upload to `PyPI `_, the default Python Package Index, with a single user account. If you want to upload to other servers, or with more than one user account, or upload packages from a continuous integration job, you can configure Flit in two main ways: Using .pypirc ------------- You can create or edit a config file in your home directory, ``~/.pypirc`` that will be used by default or you can specify a custom location. This is also used by other Python tools such as `twine `_. For instance, to upload a package to the `Test PyPI server `_ instead of the normal PyPI, use a config file looking like this: .. code-block:: ini [distutils] index-servers = pypi testpypi [pypi] repository = https://upload.pypi.org/legacy/ username = sirrobin # Replace with your PyPI username [testpypi] repository = https://test.pypi.org/legacy/ username = sirrobin # Replace with your TestPyPI username You can select an index server from this config file with the :option:`--repository` option:: flit publish --repository testpypi If you don't use this option, Flit will use the server called ``pypi`` in the config file. If that doesn't exist, it uploads to PyPI at ``https://upload.pypi.org/legacy/`` by default. If you publish a package and you don't have a ``.pypirc`` file, Flit will create it to store your username. Flit tries to store your password securely using the `keyring `_ library. If keyring is not installed, Flit will ask for your password for each upload. Alternatively, you can also manually add your password to the ``.pypirc`` file (``password = ...``) .. _upload_envvars: Using environment variables --------------------------- You can specify a server to upload to with :envvar:`FLIT_INDEX_URL`, and pass credentials with :envvar:`FLIT_USERNAME` and :envvar:`FLIT_PASSWORD`. Environment variables take precedence over the config file, except if you use the :option:`--repository` option to explicitly pick a server from the config file. This can make it easier to automate uploads, for example to release packages from a continuous integration job. .. warning:: Storing a password in an environment variable is convenient, but it's `easy to accidentally leak it `_. Look out for scripts that helpfully print all environment variables for debugging, and remember that other scripts and libraries you run in that environment have access to your password. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5025573 flit-3.12.0/flit/__init__.py0000644000000000000000000002232414770461472012547 0ustar00"""A simple packaging tool for simple packages.""" import argparse import logging import os import pathlib import shutil import subprocess import sys from typing import Optional from flit_core import common from .config import ConfigError from .log import enable_colourful_output __version__ = '3.12.0' log = logging.getLogger(__name__) class PythonNotFoundError(FileNotFoundError): pass def find_python_executable(python: Optional[str] = None) -> str: """Returns an absolute filepath to the executable of Python to use.""" if not python: python = os.environ.get("FLIT_INSTALL_PYTHON") if not python: return sys.executable if os.path.isdir(python): # Assume it's a virtual environment and look for the environment's # Python executable. This is the same behavior used by pip. # # Try both Unix and Windows paths in case of odd cases like cygwin. for exe in ("bin/python", "Scripts/python.exe"): py = os.path.join(python, exe) if os.path.exists(py): return os.path.abspath(py) if os.path.isabs(python): # sys.executable is absolute too return python # get absolute filepath of {python} # shutil.which may give a different result to the raw subprocess call # see https://github.com/pypa/flit/pull/300 and https://bugs.python.org/issue38905 resolved_python = shutil.which(python) if resolved_python is None: raise PythonNotFoundError("Unable to resolve Python executable {!r}".format(python)) try: return subprocess.check_output( [resolved_python, "-c", "import sys; print(sys.executable)"], universal_newlines=True, ).strip() except Exception as e: raise PythonNotFoundError( "{} occurred trying to find the absolute filepath of Python executable {!r} ({!r})".format( e.__class__.__name__, python, resolved_python ) ) from e def add_shared_install_options(parser: argparse.ArgumentParser): parser.add_argument('--user', action='store_true', default=None, help="Do a user-local install (default if site.ENABLE_USER_SITE is True)" ) parser.add_argument('--env', action='store_false', dest='user', help="Install into sys.prefix (default if site.ENABLE_USER_SITE is False, i.e. in virtualenvs)" ) parser.add_argument('--python', help="Target Python executable, if different from the one running flit" ) parser.add_argument('--deps', choices=['all', 'production', 'develop', 'none'], default='all', help="Which set of dependencies to install. If --deps=develop, the extras dev, doc, and test are installed" ) parser.add_argument('--only-deps', action='store_true', help="Install only dependencies of this package, and not the package itself" ) parser.add_argument('--extras', default=(), type=lambda l: l.split(',') if l else (), help="Install the dependencies of these (comma separated) extras additionally to the ones implied by --deps. " "--extras=all can be useful in combination with --deps=production, --deps=none precludes using --extras" ) def add_shared_build_options(parser: argparse.ArgumentParser): parser.add_argument('--format', action='append', help="Select a format to publish. Options: 'wheel', 'sdist'" ) setup_py_grp = parser.add_mutually_exclusive_group() setup_py_grp.add_argument('--setup-py', action='store_true', help=("Generate a setup.py file in the sdist. " "The sdist will work with older tools that predate PEP 517. " ) ) setup_py_grp.add_argument('--no-setup-py', action='store_true', help=("Don't generate a setup.py file in the sdist. This is the default. " "The sdist will only work with tools that support PEP 517, " "but the wheel will still be usable by any compatible tool." ) ) vcs_grp = parser.add_mutually_exclusive_group() vcs_grp.add_argument('--use-vcs', action='store_true', help=("Choose which files to include in the sdist using git or hg. " "This is a convenient way to include all checked-in files, like " "tests and doc source files, in your sdist, but requires that git " "or hg is available on the command line. This is currently the " "default, but it will change in a future version. " ) ) vcs_grp.add_argument('--no-use-vcs', action='store_true', help=("Select the files to include in the sdist without using git or hg. " "This should include all essential files to install and use your " "package; see the documentation for precisely what is included. " "This will become the default in a future version." ) ) def main(argv=None): ap = argparse.ArgumentParser() ap.add_argument('-f', '--ini-file', type=pathlib.Path, default='pyproject.toml') ap.add_argument('-V', '--version', action='version', version='Flit '+__version__) # --repository now belongs on 'flit publish' - it's still here for # compatibility with scripts passing it before the subcommand. ap.add_argument('--repository', dest='deprecated_repository', help=argparse.SUPPRESS) ap.add_argument('--debug', action='store_true', help=argparse.SUPPRESS) ap.add_argument('--logo', action='store_true', help=argparse.SUPPRESS) subparsers = ap.add_subparsers(title='subcommands', dest='subcmd') # flit build -------------------------------------------- parser_build = subparsers.add_parser('build', help="Build wheel and sdist", ) add_shared_build_options(parser_build) # flit publish -------------------------------------------- parser_publish = subparsers.add_parser('publish', help="Upload wheel and sdist", ) add_shared_build_options(parser_publish) parser_publish.add_argument('--pypirc', help="The .pypirc config file to be used. DEFAULT = \"~/.pypirc\"" ) parser_publish.add_argument('--repository', help="Name of the repository to upload to (must be in the specified .pypirc file)" ) # flit install -------------------------------------------- parser_install = subparsers.add_parser('install', help="Install the package", ) parser_install.add_argument('-s', '--symlink', action='store_true', help="Symlink the module/package into site packages instead of copying it" ) parser_install.add_argument('--pth-file', action='store_true', help="Add .pth file for the module/package to site packages instead of copying it" ) add_shared_install_options(parser_install) # flit init -------------------------------------------- parser_init = subparsers.add_parser('init', help="Prepare pyproject.toml for a new package" ) args = ap.parse_args(argv) if args.ini_file.suffix == '.ini': sys.exit("flit.ini format is no longer supported. You can use " "'python3 -m flit.tomlify' to convert it to pyproject.toml") if args.subcmd not in {'init'} and not args.ini_file.is_file(): sys.exit('Config file {} does not exist'.format(args.ini_file)) enable_colourful_output(logging.DEBUG if args.debug else logging.INFO) log.debug("Parsed arguments %r", args) if args.logo: from .logo import clogo print(clogo.format(version=__version__)) sys.exit(0) def gen_setup_py(): if not (args.setup_py or args.no_setup_py): return False return args.setup_py def sdist_use_vcs(): return not args.no_use_vcs if args.subcmd == 'build': from .build import main try: main(args.ini_file, formats=set(args.format or []), gen_setup_py=gen_setup_py(), use_vcs=sdist_use_vcs()) except(common.NoDocstringError, common.VCSError, common.NoVersionError) as e: sys.exit(e.args[0]) elif args.subcmd == 'publish': if args.deprecated_repository: log.warning("Passing --repository before the 'upload' subcommand is deprecated: pass it after") repository = args.repository or args.deprecated_repository from .upload import main main(args.ini_file, repository, args.pypirc, formats=set(args.format or []), gen_setup_py=gen_setup_py(), use_vcs=sdist_use_vcs()) elif args.subcmd == 'install': from .install import Installer try: python = find_python_executable(args.python) installer = Installer.from_ini_path( args.ini_file, user=args.user, python=python, symlink=args.symlink, deps=args.deps, extras=args.extras, pth=args.pth_file ) if args.only_deps: installer.install_requirements() else: installer.install() except (ConfigError, PythonNotFoundError, common.NoDocstringError, common.NoVersionError) as e: sys.exit(e.args[0]) elif args.subcmd == 'init': from .init import TerminalIniter TerminalIniter().initialise() else: ap.print_help() sys.exit(1) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5025573 flit-3.12.0/flit/__main__.py0000644000000000000000000000010314770461472012517 0ustar00from __future__ import absolute_import from . import main main() ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5025573 flit-3.12.0/flit/_get_dirs.py0000644000000000000000000000160214770461472012743 0ustar00"""get_dirs() is pulled out as a separate file so we can run it in a target Python. """ import os import sys import sysconfig def get_dirs(user=True): """Get the 'scripts' and 'purelib' directories we'll install into. This is now a thin wrapper around sysconfig.get_paths(). It's not inlined, because some tests mock it out to install to a different location. """ if user: if (sys.platform == "darwin") and sysconfig.get_config_var('PYTHONFRAMEWORK'): return sysconfig.get_paths('osx_framework_user') return sysconfig.get_paths(os.name + '_user') else: # The default scheme is 'posix_prefix' or 'nt', and should work for e.g. # installing into a virtualenv return sysconfig.get_paths() if __name__ == '__main__': import json user = '--user'in sys.argv dirs = get_dirs(user) json.dump(dirs, sys.stdout) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5025573 flit-3.12.0/flit/build.py0000644000000000000000000000414714770461472012112 0ustar00"""flit build - build both wheel and sdist""" from contextlib import contextmanager import logging import os from pathlib import Path import tarfile from tempfile import TemporaryDirectory from types import SimpleNamespace import sys from .config import read_flit_config, ConfigError from .sdist import SdistBuilder from .wheel import make_wheel_in log = logging.getLogger(__name__) ALL_FORMATS = {'wheel', 'sdist'} @contextmanager def unpacked_tarball(path): tf = tarfile.open(str(path)) with TemporaryDirectory() as tmpdir: tf.extractall(tmpdir) files = os.listdir(tmpdir) assert len(files) == 1, files yield os.path.join(tmpdir, files[0]) def main(ini_file: Path, formats=None, gen_setup_py=True, use_vcs=True): """Build wheel and sdist""" if not formats: formats = ALL_FORMATS elif not formats.issubset(ALL_FORMATS): raise ValueError("Unknown package formats: {}".format(formats - ALL_FORMATS)) sdist_info = wheel_info = None dist_dir = ini_file.parent / 'dist' dist_dir.mkdir(parents=True, exist_ok=True) try: # Load the config file to make sure it gets validated read_flit_config(ini_file) if 'sdist' in formats: sb = SdistBuilder.from_ini_path(ini_file, use_vcs=use_vcs) sdist_file = sb.build(dist_dir, gen_setup_py=gen_setup_py) sdist_info = SimpleNamespace(builder=sb, file=sdist_file) # When we're building both, build the wheel from the unpacked sdist. # This helps ensure that the sdist contains all the necessary files. if 'wheel' in formats: with unpacked_tarball(sdist_file) as tmpdir: log.debug('Building wheel from unpacked sdist %s', tmpdir) tmp_ini_file = Path(tmpdir, ini_file.name) wheel_info = make_wheel_in(tmp_ini_file, dist_dir) elif 'wheel' in formats: wheel_info = make_wheel_in(ini_file, dist_dir) except ConfigError as e: sys.exit('Config error: {}'.format(e)) return SimpleNamespace(wheel=wheel_info, sdist=sdist_info) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5025573 flit-3.12.0/flit/buildapi.py0000644000000000000000000000050314770461472012574 0ustar00from warnings import warn warn('A package has specified `build-backend = "flit.buildapi"` and is being ' 'built with Flit >= 3.10. This is likely to break in a future version. ' 'Please change the backend to flit_core.buildapi, and/or specify a ' 'maximum version of Flit.') from flit_core.buildapi import * ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5025573 flit-3.12.0/flit/config.py0000644000000000000000000000112014770461472012244 0ustar00import os from flit_core.config import * from flit_core.config import read_flit_config as _read_flit_config_core from .validate import validate_config def read_flit_config(path): """Read and check the `pyproject.toml` or `flit.ini` file with data about the package. """ res = _read_flit_config_core(path) if validate_config(res): if os.environ.get('FLIT_ALLOW_INVALID'): log.warning("Allowing invalid data (FLIT_ALLOW_INVALID set). Uploads may still fail.") else: raise ConfigError("Invalid config values (see log)") return res ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5025573 flit-3.12.0/flit/init.py0000644000000000000000000002116414770461472011754 0ustar00from datetime import date import json import os from pathlib import Path import re import sys import tomli_w def get_data_dir(): """Get the directory path for flit user data files. """ home = os.path.realpath(os.path.expanduser('~')) if sys.platform == 'darwin': d = Path(home, 'Library') elif os.name == 'nt': appdata = os.environ.get('APPDATA', None) if appdata: d = Path(appdata) else: d = Path(home, 'AppData', 'Roaming') else: # Linux, non-OS X Unix, AIX, etc. xdg = os.environ.get("XDG_DATA_HOME", None) d = Path(xdg) if xdg else Path(home, '.local/share') return d / 'flit' def get_defaults(): try: with (get_data_dir() / 'init_defaults.json').open(encoding='utf-8') as f: return json.load(f) except FileNotFoundError: return {} def store_defaults(d): data_dir = get_data_dir() try: data_dir.mkdir(parents=True) except FileExistsError: pass with (data_dir / 'init_defaults.json').open('w', encoding='utf-8') as f: json.dump(d, f, indent=2) license_choices = [ ('mit', "MIT - simple and permissive"), ('apache', "Apache - explicitly grants patent rights"), ('gpl3', "GPL - ensures that code based on this is shared with the same terms"), ('skip', "Skip - choose a license later"), ] license_names_to_spdx = { 'mit': 'MIT', 'apache': 'Apache-2.0', 'gpl3': 'GPL-3.0-or-later', } license_templates_dir = Path(__file__).parent / 'license_templates' class IniterBase: def __init__(self, directory='.'): self.directory = Path(directory) self.defaults = get_defaults() def validate_email(self, s): # Properly validating an email address is much more complex return bool(re.match(r'.+@.+', s)) or s == "" def validate_homepage(self, s): return not s or s.startswith(('http://', 'https://')) def guess_module_name(self): packages, modules = [], [] for p in self.directory.iterdir(): if not p.stem.isidentifier(): continue if p.is_dir() and (p / '__init__.py').is_file(): if p.name not in {'test', 'tests'}: packages.append(p.name) elif p.is_file() and p.suffix == '.py': if p.stem not in {'setup'} and not p.name.startswith('test_'): modules.append(p.stem) src_dir = self.directory / 'src' if src_dir.is_dir(): for p in src_dir.iterdir(): if not p.stem.isidentifier(): continue if p.is_dir() and (p / '__init__.py').is_file(): if p.name not in {'test', 'tests'}: packages.append(p.name) elif p.is_file() and p.suffix == '.py': if p.stem not in {'setup'} and not p.name.startswith('test_'): modules.append(p.stem) if len(packages) == 1: return packages[0] elif len(packages) == 0 and len(modules) == 1: return modules[0] else: return None def update_defaults(self, author, author_email, module, home_page, license): new_defaults = {'author': author, 'author_email': author_email, 'license': license} name_chunk_pat = r'\b{}\b'.format(re.escape(module)) if re.search(name_chunk_pat, home_page): new_defaults['home_page_template'] = \ re.sub(name_chunk_pat, '{modulename}', home_page, flags=re.I) if any(new_defaults[k] != self.defaults.get(k) for k in new_defaults): self.defaults.update(new_defaults) store_defaults(self.defaults) def write_license(self, name, author): if (self.directory / 'LICENSE').exists(): return year = date.today().year license_text = (license_templates_dir / name).read_text('utf-8') (self.directory / 'LICENSE').write_text( license_text.format(year=year, author=author), encoding='utf-8' ) def find_readme(self): allowed = ("readme.md","readme.rst","readme.txt") for fl in self.directory.glob("*.*"): if fl.name.lower() in allowed: return fl.name return None class TerminalIniter(IniterBase): def prompt_text(self, prompt, default, validator, retry_msg="Try again."): if default is not None: p = "{} [{}]: ".format(prompt, default) else: p = prompt + ': ' while True: response = input(p) if response == '' and default is not None: response = default if validator(response): return response print(retry_msg) def prompt_options(self, prompt, options, default=None): default_ix = None print(prompt) for i, (key, text) in enumerate(options, start=1): print("{}. {}".format(i, text)) if key == default: default_ix = i while True: p = "Enter 1-" + str(len(options)) if default_ix is not None: p += ' [{}]'.format(default_ix) response = input(p+': ') if (default_ix is not None) and response == '': return default if response.isnumeric(): ir = int(response) if 1 <= ir <= len(options): return options[ir-1][0] print("Try again.") def initialise(self): if (self.directory / 'pyproject.toml').exists(): resp = input("pyproject.toml exists - overwrite it? [y/N]: ") if (not resp) or resp[0].lower() != 'y': return module = self.prompt_text('Module name', self.guess_module_name(), str.isidentifier) author = self.prompt_text('Author', self.defaults.get('author'), lambda s: True) author_email = self.prompt_text('Author email', self.defaults.get('author_email'), self.validate_email) if 'home_page_template' in self.defaults: home_page_default = self.defaults['home_page_template'].replace( '{modulename}', module) else: home_page_default = None home_page = self.prompt_text('Home page', home_page_default, self.validate_homepage, retry_msg="Should start with http:// or https:// - try again.") license = self.prompt_options('Choose a license (see http://choosealicense.com/ for more info)', license_choices, self.defaults.get('license')) readme = self.find_readme() self.update_defaults(author=author, author_email=author_email, home_page=home_page, module=module, license=license) # Format information as TOML # This is ugly code, but I want the generated pyproject.toml, which # will mostly be edited by hand, to look a particular way - e.g. authors # in inline tables. It's easier to 'cheat' with some string formatting # than to do this through a TOML library. author_info = [] if author: author_info.append(f'name = {json.dumps(author, ensure_ascii=False)}') if author_email: author_info.append(f'email = {json.dumps(author_email)}') if author_info: authors_list = "[{%s}]" % ", ".join(author_info) else: authors_list = "[]" if license != 'skip': self.write_license(license, author) with (self.directory / 'pyproject.toml').open('w', encoding='utf-8') as f: f.write(TEMPLATE.format( name=json.dumps(module), authors=authors_list )) if readme: f.write(tomli_w.dumps({'readme': readme})) if license != 'skip': f.write(tomli_w.dumps({'license': license_names_to_spdx[license]})) f.write(f"license-files = {json.dumps(['LICENSE'])}\n") f.write('dynamic = ["version", "description"]\n') if home_page: f.write("\n" + tomli_w.dumps({ 'project': {'urls': {'Home': home_page}} })) print() print("Written pyproject.toml; edit that file to add optional extra info.") TEMPLATE = """\ [build-system] requires = ["flit_core >=3.11,<4"] build-backend = "flit_core.buildapi" [project] name = {name} authors = {authors} """ if __name__ == '__main__': TerminalIniter().initialise() ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5025573 flit-3.12.0/flit/install.py0000644000000000000000000004067214770461472012464 0ustar00"""Install packages locally for development """ import logging import os import os.path as osp import csv import json import pathlib import random import shutil import site import sys import tempfile from subprocess import check_call, check_output import sysconfig from flit_core import common from .config import read_flit_config from ._get_dirs import get_dirs log = logging.getLogger(__name__) def _requires_dist_to_pip_requirement(requires_dist): """Parse "Foo (v); python_version == '2.x'" from Requires-Dist Returns pip-style appropriate for requirements.txt. """ env_mark = '' if ';' in requires_dist: name_version, env_mark = requires_dist.split(';', 1) else: name_version = requires_dist if '(' in name_version: # turn 'name (X)' and 'name ('): version = '==' + version name_version = name + version # re-add environment marker return ' ;'.join([name_version, env_mark]) def test_writable_dir(path): """Check if a directory is writable. Uses os.access() on POSIX, tries creating files on Windows. """ if os.name == 'posix': return os.access(path, os.W_OK) return _test_writable_dir_win(path) def _test_writable_dir_win(path): # os.access doesn't work on Windows: http://bugs.python.org/issue2528 # and we can't use tempfile: http://bugs.python.org/issue22107 basename = 'accesstest_deleteme_fishfingers_custard_' alphabet = 'abcdefghijklmnopqrstuvwxyz0123456789' for i in range(10): name = basename + ''.join(random.choice(alphabet) for _ in range(6)) file = osp.join(path, name) try: with open(file, mode='xb'): pass except FileExistsError: continue except PermissionError: # This could be because there's a directory with the same name. # But it's highly unlikely there's a directory called that, # so we'll assume it's because the parent directory is not writable. return False else: os.unlink(file) return True # This should never be reached msg = ('Unexpected condition testing for writable directory {!r}. ' 'Please open an issue on flit to debug why this occurred.') # pragma: no cover raise EnvironmentError(msg.format(path)) # pragma: no cover class RootInstallError(Exception): def __str__(self): return ("Installing packages as root is not recommended. " "To allow this, set FLIT_ROOT_INSTALL=1 and try again.") class DependencyError(Exception): def __str__(self): return 'To install dependencies for extras, you cannot set deps=none.' class Installer(object): def __init__(self, directory, ini_info, user=None, python=sys.executable, symlink=False, deps='all', extras=(), pth=False): self.directory = directory self.ini_info = ini_info self.python = python self.symlink = symlink self.pth = pth self.deps = deps self.extras = extras if deps != 'none' and os.environ.get('FLIT_NO_NETWORK', ''): self.deps = 'none' log.warning('Not installing dependencies, because FLIT_NO_NETWORK is set') if deps == 'none' and extras: raise DependencyError() self.module = common.Module(self.ini_info.module, directory) if (hasattr(os, 'getuid') and (os.getuid() == 0) and (not os.environ.get('FLIT_ROOT_INSTALL'))): raise RootInstallError if user is None: self.user = self._auto_user(python) else: self.user = user log.debug('User install? %s', self.user) self.installed_files = [] @classmethod def from_ini_path(cls, ini_path, user=None, python=sys.executable, symlink=False, deps='all', extras=(), pth=False): ini_info = read_flit_config(ini_path) return cls(ini_path.parent, ini_info, user=user, python=python, symlink=symlink, deps=deps, extras=extras, pth=pth) def _run_python(self, code=None, file=None, extra_args=()): if code and file: raise ValueError('Specify code or file, not both') if not (code or file): raise ValueError('Specify code or file') if code: args = [self.python, '-c', code] else: args = [self.python, file] args.extend(extra_args) env = os.environ.copy() env['PYTHONIOENCODING'] = 'utf-8' # On Windows, shell needs to be True to pick up our local PATH # when finding the Python command. shell = (os.name == 'nt') return check_output(args, shell=shell, env=env).decode('utf-8') def _auto_user(self, python): """Default guess for whether to do user-level install. This should be True for system Python, and False in an env. """ if python == sys.executable: user_site = site.ENABLE_USER_SITE lib_dir = sysconfig.get_path('purelib') else: out = self._run_python(code= ("import sysconfig, site; " "print(site.ENABLE_USER_SITE); " "print(sysconfig.get_path('purelib'))")) user_site, lib_dir = out.split('\n', 1) user_site = (user_site.strip() == 'True') lib_dir = lib_dir.strip() if not user_site: # No user site packages - probably a virtualenv log.debug('User site packages not available - env install') return False log.debug('Checking access to %s', lib_dir) return not test_writable_dir(lib_dir) def install_scripts(self, script_defs, scripts_dir): for name, ep in script_defs.items(): module, func = common.parse_entry_point(ep) import_name = func.split('.')[0] script_file = pathlib.Path(scripts_dir) / name log.info('Writing script to %s', script_file) with script_file.open('w', encoding='utf-8') as f: f.write(common.script_template.format( interpreter=self.python, module=module, import_name=import_name, func=func )) script_file.chmod(0o755) self.installed_files.append(script_file) if sys.platform == 'win32': cmd_file = script_file.with_suffix('.cmd') cmd = '@echo off\r\n"{python}" "%~dp0\\{script}" %*\r\n'.format( python=self.python, script=name) log.debug("Writing script wrapper to %s", cmd_file) with cmd_file.open('w') as f: f.write(cmd) self.installed_files.append(cmd_file) def install_data_dir(self, target_data_dir): for src_path in common.walk_data_dir(self.ini_info.data_directory): rel_path = os.path.relpath(src_path, self.ini_info.data_directory) dst_path = os.path.join(target_data_dir, rel_path) os.makedirs(os.path.dirname(dst_path), exist_ok=True) pathlib.Path(dst_path).unlink(missing_ok=True) if self.symlink: os.symlink(os.path.realpath(src_path), dst_path) else: shutil.copy2(src_path, dst_path) self.installed_files.append(dst_path) def _record_installed_directory(self, path): for dirpath, dirnames, files in os.walk(path): for f in files: self.installed_files.append(osp.join(dirpath, f)) def _extras_to_install(self): extras_to_install = set(self.extras) if self.deps == 'all' or 'all' in extras_to_install: extras_to_install |= set(self.ini_info.reqs_by_extra.keys()) # We don’t remove 'all' from the set because there might be an extra called “all”. elif self.deps == 'develop': extras_to_install |= {'dev', 'doc', 'test'} if self.deps != 'none': # '.none' is an internal token for normal requirements extras_to_install.add('.none') log.info("Extras to install for deps %r: %s", self.deps, extras_to_install) return extras_to_install def install_requirements(self): """Install requirements of a package with pip. Creates a temporary requirements.txt from requires_dist metadata. """ # construct the full list of requirements, including dev requirements requirements = [] if self.deps == 'none': return for extra in self._extras_to_install(): requirements.extend(self.ini_info.reqs_by_extra.get(extra, [])) # there aren't any requirements, so return if len(requirements) == 0: return requirements = [ _requires_dist_to_pip_requirement(req_d) for req_d in requirements ] # install the requirements with pip cmd = [self.python, '-m', 'pip', 'install'] if self.user: cmd.append('--user') with tempfile.NamedTemporaryFile(mode='w', suffix='requirements.txt', delete=False) as tf: tf.file.write('\n'.join(requirements)) cmd.extend(['-r', tf.name]) log.info("Installing requirements") try: check_call(cmd) finally: os.remove(tf.name) def install_reqs_my_python_if_needed(self): """Install requirements to this environment if needed. We can normally get the summary and version number without import the module, but if we do need to import it, we may need to install its requirements for the Python where flit is running. """ try: common.get_info_from_module(self.module, self.ini_info.dynamic_metadata) except ImportError: if self.deps == 'none': raise # We were asked not to install deps, so bail out. log.warning("Installing requirements to Flit's env to import module.") user = self.user if (self.python == sys.executable) else None i2 = Installer(self.directory, self.ini_info, user=user, deps='production') i2.install_requirements() def _get_dirs(self, user): if self.python == sys.executable: return get_dirs(user=user) else: import json path = osp.join(osp.dirname(__file__), '_get_dirs.py') args = ['--user'] if user else [] return json.loads(self._run_python(file=path, extra_args=args)) def install_directly(self): """Install a module/package into site-packages, and create its scripts. """ dirs = self._get_dirs(user=self.user) os.makedirs(dirs['purelib'], exist_ok=True) os.makedirs(dirs['scripts'], exist_ok=True) module_rel_path = self.module.path.relative_to(self.module.source_dir) dst = osp.join(dirs['purelib'], module_rel_path) if osp.lexists(dst): if osp.isdir(dst) and not osp.islink(dst): shutil.rmtree(dst) else: os.unlink(dst) # Install requirements to target environment self.install_requirements() # Install requirements to this environment if we need them to # get docstring & version number. if self.python != sys.executable: self.install_reqs_my_python_if_needed() src = self.module.path if self.symlink: if self.module.in_namespace_package: ns_dir = os.path.dirname(dst) os.makedirs(ns_dir, exist_ok=True) log.info("Symlinking %s -> %s", src, dst) os.symlink(src.resolve(), dst) self.installed_files.append(dst) elif self.pth: # .pth points to the the folder containing the module (which is # added to sys.path) pth_file = pathlib.Path(dirs['purelib'], self.module.name + '.pth') log.info("Adding .pth file %s for %s", pth_file, self.module.source_dir) pth_file.write_text(str(self.module.source_dir.resolve()), 'utf-8') self.installed_files.append(pth_file) elif self.module.is_package: log.info("Copying directory %s -> %s", src, dst) shutil.copytree(src, dst) self._record_installed_directory(dst) else: log.info("Copying file %s -> %s", src, dst) os.makedirs(osp.dirname(dst), exist_ok=True) shutil.copy2(src, dst) self.installed_files.append(dst) scripts = self.ini_info.entrypoints.get('console_scripts', {}) self.install_scripts(scripts, dirs['scripts']) self.install_data_dir(dirs['data']) self.write_dist_info(dirs['purelib']) def install_with_pip(self): """Let pip install the project directory pip will create an isolated build environment and install build dependencies, which means downloading flit_core from PyPI. We ask pip to install the project directory (instead of building a temporary wheel and asking pip to install it), so pip will record the project directory in direct_url.json. """ self.install_reqs_my_python_if_needed() extras = self._extras_to_install() extras.discard('.none') req_with_extras = '{}[{}]'.format(self.directory, ','.join(extras)) \ if extras else str(self.directory) cmd = [self.python, '-m', 'pip', 'install', req_with_extras] if self.user: cmd.append('--user') if self.deps == 'none': cmd.append('--no-deps') shell = (os.name == 'nt') check_call(cmd, shell=shell) def write_dist_info(self, site_pkgs): """Write dist-info folder, according to PEP 376""" metadata = common.make_metadata(self.module, self.ini_info) dist_info = pathlib.Path(site_pkgs) / common.dist_info_name( metadata.name, metadata.version) try: dist_info.mkdir() except FileExistsError: shutil.rmtree(str(dist_info)) dist_info.mkdir() with (dist_info / 'METADATA').open('w', encoding='utf-8') as f: metadata.write_metadata_file(f) self.installed_files.append(dist_info / 'METADATA') with (dist_info / 'INSTALLER').open('w', encoding='utf-8') as f: f.write('flit') self.installed_files.append(dist_info / 'INSTALLER') # We only handle explicitly requested installations with (dist_info / 'REQUESTED').open('wb'): pass self.installed_files.append(dist_info / 'REQUESTED') if self.ini_info.entrypoints: with (dist_info / 'entry_points.txt').open('w') as f: common.write_entry_points(self.ini_info.entrypoints, f) self.installed_files.append(dist_info / 'entry_points.txt') with (dist_info / 'direct_url.json').open('w', encoding='utf-8') as f: json.dump( { "url": self.directory.resolve().as_uri(), "dir_info": {"editable": bool(self.symlink or self.pth)} }, f ) self.installed_files.append(dist_info / 'direct_url.json') # newline='' because the csv module does its own newline translation with (dist_info / 'RECORD').open('w', encoding='utf-8', newline='') as f: cf = csv.writer(f) for path in sorted(self.installed_files, key=str): path = pathlib.Path(path) if path.is_symlink() or path.suffix in {'.pyc', '.pyo'}: hash, size = '', '' else: hash = 'sha256=' + common.hash_file(str(path)) size = path.stat().st_size try: path = path.relative_to(site_pkgs) except ValueError: pass cf.writerow((str(path), hash, size)) cf.writerow(((dist_info / 'RECORD').relative_to(site_pkgs), '', '')) def install(self): if self.symlink or self.pth: self.install_directly() else: self.install_with_pip() ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5025573 flit-3.12.0/flit/license_templates/apache0000644000000000000000000002171114770461472015301 0ustar00Apache License Version 2.0, January 2004 http://www.apache.org/licenses/ TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION 1. Definitions. "License" shall mean the terms and conditions for use, reproduction, and distribution as defined by Sections 1 through 9 of this document. "Licensor" shall mean the copyright owner or entity authorized by the copyright owner that is granting the License. "Legal Entity" shall mean the union of the acting entity and all other entities that control, are controlled by, or are under common control with that entity. For the purposes of this definition, "control" means (i) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (ii) ownership of fifty percent (50%) or more of the outstanding shares, or (iii) beneficial ownership of such entity. "You" (or "Your") shall mean an individual or Legal Entity exercising permissions granted by this License. "Source" form shall mean the preferred form for making modifications, including but not limited to software source code, documentation source, and configuration files. "Object" form shall mean any form resulting from mechanical transformation or translation of a Source form, including but not limited to compiled object code, generated documentation, and conversions to other media types. "Work" shall mean the work of authorship, whether in Source or Object form, made available under the License, as indicated by a copyright notice that is included in or attached to the work (an example is provided in the Appendix below). "Derivative Works" shall mean any work, whether in Source or Object form, that is based on (or derived from) the Work and for which the editorial revisions, annotations, elaborations, or other modifications represent, as a whole, an original work of authorship. For the purposes of this License, Derivative Works shall not include works that remain separable from, or merely link (or bind by name) to the interfaces of, the Work and Derivative Works thereof. "Contribution" shall mean any work of authorship, including the original version of the Work and any modifications or additions to that Work or Derivative Works thereof, that is intentionally submitted to Licensor for inclusion in the Work by the copyright owner or by an individual or Legal Entity authorized to submit on behalf of the copyright owner. For the purposes of this definition, "submitted" means any form of electronic, verbal, or written communication sent to the Licensor or its representatives, including but not limited to communication on electronic mailing lists, source code control systems, and issue tracking systems that are managed by, or on behalf of, the Licensor for the purpose of discussing and improving the Work, but excluding communication that is conspicuously marked or otherwise designated in writing by the copyright owner as "Not a Contribution." "Contributor" shall mean Licensor and any individual or Legal Entity on behalf of whom a Contribution has been received by Licensor and subsequently incorporated within the Work. 2. Grant of Copyright License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable copyright license to reproduce, prepare Derivative Works of, publicly display, publicly perform, sublicense, and distribute the Work and such Derivative Works in Source or Object form. 3. Grant of Patent License. Subject to the terms and conditions of this License, each Contributor hereby grants to You a perpetual, worldwide, non-exclusive, no-charge, royalty-free, irrevocable (except as stated in this section) patent license to make, have made, use, offer to sell, sell, import, and otherwise transfer the Work, where such license applies only to those patent claims licensable by such Contributor that are necessarily infringed by their Contribution(s) alone or by combination of their Contribution(s) with the Work to which such Contribution(s) was submitted. If You institute patent litigation against any entity (including a cross-claim or counterclaim in a lawsuit) alleging that the Work or a Contribution incorporated within the Work constitutes direct or contributory patent infringement, then any patent licenses granted to You under this License for that Work shall terminate as of the date such litigation is filed. 4. Redistribution. You may reproduce and distribute copies of the Work or Derivative Works thereof in any medium, with or without modifications, and in Source or Object form, provided that You meet the following conditions: You must give any other recipients of the Work or Derivative Works a copy of this License; and You must cause any modified files to carry prominent notices stating that You changed the files; and You must retain, in the Source form of any Derivative Works that You distribute, all copyright, patent, trademark, and attribution notices from the Source form of the Work, excluding those notices that do not pertain to any part of the Derivative Works; and If the Work includes a "NOTICE" text file as part of its distribution, then any Derivative Works that You distribute must include a readable copy of the attribution notices contained within such NOTICE file, excluding those notices that do not pertain to any part of the Derivative Works, in at least one of the following places: within a NOTICE text file distributed as part of the Derivative Works; within the Source form or documentation, if provided along with the Derivative Works; or, within a display generated by the Derivative Works, if and wherever such third-party notices normally appear. The contents of the NOTICE file are for informational purposes only and do not modify the License. You may add Your own attribution notices within Derivative Works that You distribute, alongside or as an addendum to the NOTICE text from the Work, provided that such additional attribution notices cannot be construed as modifying the License. You may add Your own copyright statement to Your modifications and may provide additional or different license terms and conditions for use, reproduction, or distribution of Your modifications, or for any such Derivative Works as a whole, provided Your use, reproduction, and distribution of the Work otherwise complies with the conditions stated in this License. 5. Submission of Contributions. Unless You explicitly state otherwise, any Contribution intentionally submitted for inclusion in the Work by You to the Licensor shall be under the terms and conditions of this License, without any additional terms or conditions. Notwithstanding the above, nothing herein shall supersede or modify the terms of any separate license agreement you may have executed with Licensor regarding such Contributions. 6. Trademarks. This License does not grant permission to use the trade names, trademarks, service marks, or product names of the Licensor, except as required for reasonable and customary use in describing the origin of the Work and reproducing the content of the NOTICE file. 7. Disclaimer of Warranty. Unless required by applicable law or agreed to in writing, Licensor provides the Work (and each Contributor provides its Contributions) on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied, including, without limitation, any warranties or conditions of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A PARTICULAR PURPOSE. You are solely responsible for determining the appropriateness of using or redistributing the Work and assume any risks associated with Your exercise of permissions under this License. 8. Limitation of Liability. In no event and under no legal theory, whether in tort (including negligence), contract, or otherwise, unless required by applicable law (such as deliberate and grossly negligent acts) or agreed to in writing, shall any Contributor be liable to You for damages, including any direct, indirect, special, incidental, or consequential damages of any character arising as a result of this License or out of the use or inability to use the Work (including but not limited to damages for loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses), even if such Contributor has been advised of the possibility of such damages. 9. Accepting Warranty or Additional Liability. While redistributing the Work or Derivative Works thereof, You may choose to offer, and charge a fee for, acceptance of support, warranty, indemnity, or other liability obligations and/or rights consistent with this License. However, in accepting such obligations, You may act only on Your own behalf and on Your sole responsibility, not on behalf of any other Contributor, and only if You agree to indemnify, defend, and hold each Contributor harmless for any liability incurred by, or claims asserted against, such Contributor by reason of your accepting any such warranty or additional liability. END OF TERMS AND CONDITIONS ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5025573 flit-3.12.0/flit/license_templates/gpl30000644000000000000000000007724614770461472014743 0ustar00 GNU GENERAL PUBLIC LICENSE Version 3, 29 June 2007 Copyright (C) 2007 Free Software Foundation, Inc. Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The GNU General Public License is a free, copyleft license for software and other kinds of works. The licenses for most software and other practical works are designed to take away your freedom to share and change the works. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change all versions of a program--to make sure it remains free software for all its users. We, the Free Software Foundation, use the GNU General Public License for most of our software; it applies also to any other work released this way by its authors. You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for them if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs, and that you know you can do these things. To protect your rights, we need to prevent others from denying you these rights or asking you to surrender the rights. Therefore, you have certain responsibilities if you distribute copies of the software, or if you modify it: responsibilities to respect the freedom of others. For example, if you distribute copies of such a program, whether gratis or for a fee, you must pass on to the recipients the same freedoms that you received. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. Developers that use the GNU GPL protect your rights with two steps: (1) assert copyright on the software, and (2) offer you this License giving you legal permission to copy, distribute and/or modify it. For the developers' and authors' protection, the GPL clearly explains that there is no warranty for this free software. For both users' and authors' sake, the GPL requires that modified versions be marked as changed, so that their problems will not be attributed erroneously to authors of previous versions. Some devices are designed to deny users access to install or run modified versions of the software inside them, although the manufacturer can do so. This is fundamentally incompatible with the aim of protecting users' freedom to change the software. The systematic pattern of such abuse occurs in the area of products for individuals to use, which is precisely where it is most unacceptable. Therefore, we have designed this version of the GPL to prohibit the practice for those products. If such problems arise substantially in other domains, we stand ready to extend this provision to those domains in future versions of the GPL, as needed to protect the freedom of users. Finally, every program is threatened constantly by software patents. States should not allow patents to restrict development and use of software on general-purpose computers, but in those that do, we wish to avoid the special danger that patents applied to a free program could make it effectively proprietary. To prevent this, the GPL assures that patents cannot be used to render the program non-free. The precise terms and conditions for copying, distribution and modification follow. TERMS AND CONDITIONS 0. Definitions. "This License" refers to version 3 of the GNU General Public License. "Copyright" also means copyright-like laws that apply to other kinds of works, such as semiconductor masks. "The Program" refers to any copyrightable work licensed under this License. Each licensee is addressed as "you". "Licensees" and "recipients" may be individuals or organizations. To "modify" a work means to copy from or adapt all or part of the work in a fashion requiring copyright permission, other than the making of an exact copy. The resulting work is called a "modified version" of the earlier work or a work "based on" the earlier work. A "covered work" means either the unmodified Program or a work based on the Program. To "propagate" a work means to do anything with it that, without permission, would make you directly or secondarily liable for infringement under applicable copyright law, except executing it on a computer or modifying a private copy. Propagation includes copying, distribution (with or without modification), making available to the public, and in some countries other activities as well. To "convey" a work means any kind of propagation that enables other parties to make or receive copies. Mere interaction with a user through a computer network, with no transfer of a copy, is not conveying. An interactive user interface displays "Appropriate Legal Notices" to the extent that it includes a convenient and prominently visible feature that (1) displays an appropriate copyright notice, and (2) tells the user that there is no warranty for the work (except to the extent that warranties are provided), that licensees may convey the work under this License, and how to view a copy of this License. If the interface presents a list of user commands or options, such as a menu, a prominent item in the list meets this criterion. 1. Source Code. The "source code" for a work means the preferred form of the work for making modifications to it. "Object code" means any non-source form of a work. A "Standard Interface" means an interface that either is an official standard defined by a recognized standards body, or, in the case of interfaces specified for a particular programming language, one that is widely used among developers working in that language. The "System Libraries" of an executable work include anything, other than the work as a whole, that (a) is included in the normal form of packaging a Major Component, but which is not part of that Major Component, and (b) serves only to enable use of the work with that Major Component, or to implement a Standard Interface for which an implementation is available to the public in source code form. A "Major Component", in this context, means a major essential component (kernel, window system, and so on) of the specific operating system (if any) on which the executable work runs, or a compiler used to produce the work, or an object code interpreter used to run it. The "Corresponding Source" for a work in object code form means all the source code needed to generate, install, and (for an executable work) run the object code and to modify the work, including scripts to control those activities. However, it does not include the work's System Libraries, or general-purpose tools or generally available free programs which are used unmodified in performing those activities but which are not part of the work. For example, Corresponding Source includes interface definition files associated with source files for the work, and the source code for shared libraries and dynamically linked subprograms that the work is specifically designed to require, such as by intimate data communication or control flow between those subprograms and other parts of the work. The Corresponding Source need not include anything that users can regenerate automatically from other parts of the Corresponding Source. The Corresponding Source for a work in source code form is that same work. 2. Basic Permissions. All rights granted under this License are granted for the term of copyright on the Program, and are irrevocable provided the stated conditions are met. This License explicitly affirms your unlimited permission to run the unmodified Program. The output from running a covered work is covered by this License only if the output, given its content, constitutes a covered work. This License acknowledges your rights of fair use or other equivalent, as provided by copyright law. You may make, run and propagate covered works that you do not convey, without conditions so long as your license otherwise remains in force. You may convey covered works to others for the sole purpose of having them make modifications exclusively for you, or provide you with facilities for running those works, provided that you comply with the terms of this License in conveying all material for which you do not control copyright. Those thus making or running the covered works for you must do so exclusively on your behalf, under your direction and control, on terms that prohibit them from making any copies of your copyrighted material outside their relationship with you. Conveying under any other circumstances is permitted solely under the conditions stated below. Sublicensing is not allowed; section 10 makes it unnecessary. 3. Protecting Users' Legal Rights From Anti-Circumvention Law. No covered work shall be deemed part of an effective technological measure under any applicable law fulfilling obligations under article 11 of the WIPO copyright treaty adopted on 20 December 1996, or similar laws prohibiting or restricting circumvention of such measures. When you convey a covered work, you waive any legal power to forbid circumvention of technological measures to the extent such circumvention is effected by exercising rights under this License with respect to the covered work, and you disclaim any intention to limit operation or modification of the work as a means of enforcing, against the work's users, your or third parties' legal rights to forbid circumvention of technological measures. 4. Conveying Verbatim Copies. You may convey verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice; keep intact all notices stating that this License and any non-permissive terms added in accord with section 7 apply to the code; keep intact all notices of the absence of any warranty; and give all recipients a copy of this License along with the Program. You may charge any price or no price for each copy that you convey, and you may offer support or warranty protection for a fee. 5. Conveying Modified Source Versions. You may convey a work based on the Program, or the modifications to produce it from the Program, in the form of source code under the terms of section 4, provided that you also meet all of these conditions: a) The work must carry prominent notices stating that you modified it, and giving a relevant date. b) The work must carry prominent notices stating that it is released under this License and any conditions added under section 7. This requirement modifies the requirement in section 4 to "keep intact all notices". c) You must license the entire work, as a whole, under this License to anyone who comes into possession of a copy. This License will therefore apply, along with any applicable section 7 additional terms, to the whole of the work, and all its parts, regardless of how they are packaged. This License gives no permission to license the work in any other way, but it does not invalidate such permission if you have separately received it. d) If the work has interactive user interfaces, each must display Appropriate Legal Notices; however, if the Program has interactive interfaces that do not display Appropriate Legal Notices, your work need not make them do so. A compilation of a covered work with other separate and independent works, which are not by their nature extensions of the covered work, and which are not combined with it such as to form a larger program, in or on a volume of a storage or distribution medium, is called an "aggregate" if the compilation and its resulting copyright are not used to limit the access or legal rights of the compilation's users beyond what the individual works permit. Inclusion of a covered work in an aggregate does not cause this License to apply to the other parts of the aggregate. 6. Conveying Non-Source Forms. You may convey a covered work in object code form under the terms of sections 4 and 5, provided that you also convey the machine-readable Corresponding Source under the terms of this License, in one of these ways: a) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by the Corresponding Source fixed on a durable physical medium customarily used for software interchange. b) Convey the object code in, or embodied in, a physical product (including a physical distribution medium), accompanied by a written offer, valid for at least three years and valid for as long as you offer spare parts or customer support for that product model, to give anyone who possesses the object code either (1) a copy of the Corresponding Source for all the software in the product that is covered by this License, on a durable physical medium customarily used for software interchange, for a price no more than your reasonable cost of physically performing this conveying of source, or (2) access to copy the Corresponding Source from a network server at no charge. c) Convey individual copies of the object code with a copy of the written offer to provide the Corresponding Source. This alternative is allowed only occasionally and noncommercially, and only if you received the object code with such an offer, in accord with subsection 6b. d) Convey the object code by offering access from a designated place (gratis or for a charge), and offer equivalent access to the Corresponding Source in the same way through the same place at no further charge. You need not require recipients to copy the Corresponding Source along with the object code. If the place to copy the object code is a network server, the Corresponding Source may be on a different server (operated by you or a third party) that supports equivalent copying facilities, provided you maintain clear directions next to the object code saying where to find the Corresponding Source. Regardless of what server hosts the Corresponding Source, you remain obligated to ensure that it is available for as long as needed to satisfy these requirements. e) Convey the object code using peer-to-peer transmission, provided you inform other peers where the object code and Corresponding Source of the work are being offered to the general public at no charge under subsection 6d. A separable portion of the object code, whose source code is excluded from the Corresponding Source as a System Library, need not be included in conveying the object code work. A "User Product" is either (1) a "consumer product", which means any tangible personal property which is normally used for personal, family, or household purposes, or (2) anything designed or sold for incorporation into a dwelling. In determining whether a product is a consumer product, doubtful cases shall be resolved in favor of coverage. For a particular product received by a particular user, "normally used" refers to a typical or common use of that class of product, regardless of the status of the particular user or of the way in which the particular user actually uses, or expects or is expected to use, the product. A product is a consumer product regardless of whether the product has substantial commercial, industrial or non-consumer uses, unless such uses represent the only significant mode of use of the product. "Installation Information" for a User Product means any methods, procedures, authorization keys, or other information required to install and execute modified versions of a covered work in that User Product from a modified version of its Corresponding Source. The information must suffice to ensure that the continued functioning of the modified object code is in no case prevented or interfered with solely because modification has been made. If you convey an object code work under this section in, or with, or specifically for use in, a User Product, and the conveying occurs as part of a transaction in which the right of possession and use of the User Product is transferred to the recipient in perpetuity or for a fixed term (regardless of how the transaction is characterized), the Corresponding Source conveyed under this section must be accompanied by the Installation Information. But this requirement does not apply if neither you nor any third party retains the ability to install modified object code on the User Product (for example, the work has been installed in ROM). The requirement to provide Installation Information does not include a requirement to continue to provide support service, warranty, or updates for a work that has been modified or installed by the recipient, or for the User Product in which it has been modified or installed. Access to a network may be denied when the modification itself materially and adversely affects the operation of the network or violates the rules and protocols for communication across the network. Corresponding Source conveyed, and Installation Information provided, in accord with this section must be in a format that is publicly documented (and with an implementation available to the public in source code form), and must require no special password or key for unpacking, reading or copying. 7. Additional Terms. "Additional permissions" are terms that supplement the terms of this License by making exceptions from one or more of its conditions. Additional permissions that are applicable to the entire Program shall be treated as though they were included in this License, to the extent that they are valid under applicable law. If additional permissions apply only to part of the Program, that part may be used separately under those permissions, but the entire Program remains governed by this License without regard to the additional permissions. When you convey a copy of a covered work, you may at your option remove any additional permissions from that copy, or from any part of it. (Additional permissions may be written to require their own removal in certain cases when you modify the work.) You may place additional permissions on material, added by you to a covered work, for which you have or can give appropriate copyright permission. Notwithstanding any other provision of this License, for material you add to a covered work, you may (if authorized by the copyright holders of that material) supplement the terms of this License with terms: a) Disclaiming warranty or limiting liability differently from the terms of sections 15 and 16 of this License; or b) Requiring preservation of specified reasonable legal notices or author attributions in that material or in the Appropriate Legal Notices displayed by works containing it; or c) Prohibiting misrepresentation of the origin of that material, or requiring that modified versions of such material be marked in reasonable ways as different from the original version; or d) Limiting the use for publicity purposes of names of licensors or authors of the material; or e) Declining to grant rights under trademark law for use of some trade names, trademarks, or service marks; or f) Requiring indemnification of licensors and authors of that material by anyone who conveys the material (or modified versions of it) with contractual assumptions of liability to the recipient, for any liability that these contractual assumptions directly impose on those licensors and authors. All other non-permissive additional terms are considered "further restrictions" within the meaning of section 10. If the Program as you received it, or any part of it, contains a notice stating that it is governed by this License along with a term that is a further restriction, you may remove that term. If a license document contains a further restriction but permits relicensing or conveying under this License, you may add to a covered work material governed by the terms of that license document, provided that the further restriction does not survive such relicensing or conveying. If you add terms to a covered work in accord with this section, you must place, in the relevant source files, a statement of the additional terms that apply to those files, or a notice indicating where to find the applicable terms. Additional terms, permissive or non-permissive, may be stated in the form of a separately written license, or stated as exceptions; the above requirements apply either way. 8. Termination. You may not propagate or modify a covered work except as expressly provided under this License. Any attempt otherwise to propagate or modify it is void, and will automatically terminate your rights under this License (including any patent licenses granted under the third paragraph of section 11). However, if you cease all violation of this License, then your license from a particular copyright holder is reinstated (a) provisionally, unless and until the copyright holder explicitly and finally terminates your license, and (b) permanently, if the copyright holder fails to notify you of the violation by some reasonable means prior to 60 days after the cessation. Moreover, your license from a particular copyright holder is reinstated permanently if the copyright holder notifies you of the violation by some reasonable means, this is the first time you have received notice of violation of this License (for any work) from that copyright holder, and you cure the violation prior to 30 days after your receipt of the notice. Termination of your rights under this section does not terminate the licenses of parties who have received copies or rights from you under this License. If your rights have been terminated and not permanently reinstated, you do not qualify to receive new licenses for the same material under section 10. 9. Acceptance Not Required for Having Copies. You are not required to accept this License in order to receive or run a copy of the Program. Ancillary propagation of a covered work occurring solely as a consequence of using peer-to-peer transmission to receive a copy likewise does not require acceptance. However, nothing other than this License grants you permission to propagate or modify any covered work. These actions infringe copyright if you do not accept this License. Therefore, by modifying or propagating a covered work, you indicate your acceptance of this License to do so. 10. Automatic Licensing of Downstream Recipients. Each time you convey a covered work, the recipient automatically receives a license from the original licensors, to run, modify and propagate that work, subject to this License. You are not responsible for enforcing compliance by third parties with this License. An "entity transaction" is a transaction transferring control of an organization, or substantially all assets of one, or subdividing an organization, or merging organizations. If propagation of a covered work results from an entity transaction, each party to that transaction who receives a copy of the work also receives whatever licenses to the work the party's predecessor in interest had or could give under the previous paragraph, plus a right to possession of the Corresponding Source of the work from the predecessor in interest, if the predecessor has it or can get it with reasonable efforts. You may not impose any further restrictions on the exercise of the rights granted or affirmed under this License. For example, you may not impose a license fee, royalty, or other charge for exercise of rights granted under this License, and you may not initiate litigation (including a cross-claim or counterclaim in a lawsuit) alleging that any patent claim is infringed by making, using, selling, offering for sale, or importing the Program or any portion of it. 11. Patents. A "contributor" is a copyright holder who authorizes use under this License of the Program or a work on which the Program is based. The work thus licensed is called the contributor's "contributor version". A contributor's "essential patent claims" are all patent claims owned or controlled by the contributor, whether already acquired or hereafter acquired, that would be infringed by some manner, permitted by this License, of making, using, or selling its contributor version, but do not include claims that would be infringed only as a consequence of further modification of the contributor version. For purposes of this definition, "control" includes the right to grant patent sublicenses in a manner consistent with the requirements of this License. Each contributor grants you a non-exclusive, worldwide, royalty-free patent license under the contributor's essential patent claims, to make, use, sell, offer for sale, import and otherwise run, modify and propagate the contents of its contributor version. In the following three paragraphs, a "patent license" is any express agreement or commitment, however denominated, not to enforce a patent (such as an express permission to practice a patent or covenant not to sue for patent infringement). To "grant" such a patent license to a party means to make such an agreement or commitment not to enforce a patent against the party. If you convey a covered work, knowingly relying on a patent license, and the Corresponding Source of the work is not available for anyone to copy, free of charge and under the terms of this License, through a publicly available network server or other readily accessible means, then you must either (1) cause the Corresponding Source to be so available, or (2) arrange to deprive yourself of the benefit of the patent license for this particular work, or (3) arrange, in a manner consistent with the requirements of this License, to extend the patent license to downstream recipients. "Knowingly relying" means you have actual knowledge that, but for the patent license, your conveying the covered work in a country, or your recipient's use of the covered work in a country, would infringe one or more identifiable patents in that country that you have reason to believe are valid. If, pursuant to or in connection with a single transaction or arrangement, you convey, or propagate by procuring conveyance of, a covered work, and grant a patent license to some of the parties receiving the covered work authorizing them to use, propagate, modify or convey a specific copy of the covered work, then the patent license you grant is automatically extended to all recipients of the covered work and works based on it. A patent license is "discriminatory" if it does not include within the scope of its coverage, prohibits the exercise of, or is conditioned on the non-exercise of one or more of the rights that are specifically granted under this License. You may not convey a covered work if you are a party to an arrangement with a third party that is in the business of distributing software, under which you make payment to the third party based on the extent of your activity of conveying the work, and under which the third party grants, to any of the parties who would receive the covered work from you, a discriminatory patent license (a) in connection with copies of the covered work conveyed by you (or copies made from those copies), or (b) primarily for and in connection with specific products or compilations that contain the covered work, unless you entered into that arrangement, or that patent license was granted, prior to 28 March 2007. Nothing in this License shall be construed as excluding or limiting any implied license or other defenses to infringement that may otherwise be available to you under applicable patent law. 12. No Surrender of Others' Freedom. If conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot convey a covered work so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not convey it at all. For example, if you agree to terms that obligate you to collect a royalty for further conveying from those to whom you convey the Program, the only way you could satisfy both those terms and this License would be to refrain entirely from conveying the Program. 13. Use with the GNU Affero General Public License. Notwithstanding any other provision of this License, you have permission to link or combine any covered work with a work licensed under version 3 of the GNU Affero General Public License into a single combined work, and to convey the resulting work. The terms of this License will continue to apply to the part which is the covered work, but the special requirements of the GNU Affero General Public License, section 13, concerning interaction through a network will apply to the combination as such. 14. Revised Versions of this License. The Free Software Foundation may publish revised and/or new versions of the GNU General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies that a certain numbered version of the GNU General Public License "or any later version" applies to it, you have the option of following the terms and conditions either of that numbered version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of the GNU General Public License, you may choose any version ever published by the Free Software Foundation. If the Program specifies that a proxy can decide which future versions of the GNU General Public License can be used, that proxy's public statement of acceptance of a version permanently authorizes you to choose that version for the Program. Later license versions may give you additional or different permissions. However, no additional obligations are imposed on any author or copyright holder as a result of your choosing to follow a later version. 15. Disclaimer of Warranty. THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 16. Limitation of Liability. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. 17. Interpretation of Sections 15 and 16. If the disclaimer of warranty and limitation of liability provided above cannot be given local legal effect according to their terms, reviewing courts shall apply local law that most closely approximates an absolute waiver of all civil liability in connection with the Program, unless a warranty or assumption of liability accompanies a copy of the Program in return for a fee. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5025573 flit-3.12.0/flit/license_templates/mit0000644000000000000000000000206514770461472014652 0ustar00The MIT License (MIT) Copyright (c) {year} {author} Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5025573 flit-3.12.0/flit/log.py0000644000000000000000000000772014770461472011574 0ustar00"""Nicer log formatting with colours. Code copied from Tornado, Apache licensed. """ # Copyright 2012 Facebook # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import logging import sys try: import curses except ImportError: curses = None def _stderr_supports_color(): color = False if curses and hasattr(sys.stderr, 'isatty') and sys.stderr.isatty(): try: curses.setupterm() if curses.tigetnum("colors") > 0: color = True except Exception: pass return color class LogFormatter(logging.Formatter): """Log formatter with colour support """ DEFAULT_COLORS = { logging.INFO: 2, # Green logging.WARNING: 3, # Yellow logging.ERROR: 1, # Red logging.CRITICAL: 1, } def __init__(self, color=True, datefmt=None): r""" :arg bool color: Enables color support. :arg string fmt: Log message format. It will be applied to the attributes dict of log records. The text between ``%(color)s`` and ``%(end_color)s`` will be colored depending on the level if color support is on. :arg dict colors: color mappings from logging level to terminal color code :arg string datefmt: Datetime format. Used for formatting ``(asctime)`` placeholder in ``prefix_fmt``. .. versionchanged:: 3.2 Added ``fmt`` and ``datefmt`` arguments. """ logging.Formatter.__init__(self, datefmt=datefmt) self._colors = {} if color and _stderr_supports_color(): # The curses module has some str/bytes confusion in # python3. Until version 3.2.3, most methods return # bytes, but only accept strings. In addition, we want to # output these strings with the logging module, which # works with unicode strings. The explicit calls to # unicode() below are harmless in python2 but will do the # right conversion in python 3. fg_color = (curses.tigetstr("setaf") or curses.tigetstr("setf") or "") if (3, 0) < sys.version_info < (3, 2, 3): fg_color = str(fg_color, "ascii") for levelno, code in self.DEFAULT_COLORS.items(): self._colors[levelno] = str(curses.tparm(fg_color, code), "ascii") self._normal = str(curses.tigetstr("sgr0"), "ascii") scr = curses.initscr() self.termwidth = scr.getmaxyx()[1] curses.endwin() else: self._normal = '' # Default width is usually 80, but too wide is worse than too narrow self.termwidth = 70 def formatMessage(self, record): l = len(record.message) right_text = '{initial}-{name}'.format(initial=record.levelname[0], name=record.name) if l + len(right_text) < self.termwidth: space = ' ' * (self.termwidth - (l + len(right_text))) else: space = ' ' if record.levelno in self._colors: start_color = self._colors[record.levelno] end_color = self._normal else: start_color = end_color = '' return record.message + space + start_color + right_text + end_color def enable_colourful_output(level=logging.INFO): handler = logging.StreamHandler() handler.setFormatter(LogFormatter()) logging.root.addHandler(handler) logging.root.setLevel(level) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5025573 flit-3.12.0/flit/logo.py0000644000000000000000000000070014770461472011742 0ustar00"""White and colored version for flit""" logo = """ ._ ._ ```. ```. .--.______ `. `-. `. / °,-—´ `. `~-.>.' / `. .` | -..;. / / /___ _____ /r_,.´| | | | ,' `/ |—— | | | .´ ,'/ | |__ | | .´ / . / '__/|/ V {version} """ clogo = '\x1b[36m'+logo+'\x1b[39m' ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5025573 flit-3.12.0/flit/sdist.py0000644000000000000000000002017014770461472012133 0ustar00from collections import defaultdict import io import logging import os from pathlib import Path from posixpath import join as pjoin from pprint import pformat import tarfile from flit_core.sdist import SdistBuilder as SdistBuilderCore from flit_core.common import Module, VCSError from flit.vcs import identify_vcs log = logging.getLogger(__name__) # Our generated setup.py deliberately loads distutils, not setuptools, to # discourage running it directly and getting a setuptools mess. Tools like pip # handle this correctly - loading setuptools anyway but avoiding its issues. SETUP = """\ #!/usr/bin/env python # setup.py generated by flit for tools that don't yet use PEP 517 from distutils.core import setup {before} setup(name={name!r}, version={version!r}, description={description!r}, author={author!r}, author_email={author_email!r}, url={url!r}, {extra} ) """ def namespace_packages(module: Module): """Get parent package names""" name_parts = [] for part in module.namespace_package_name.split('.'): name_parts.append(part) yield '.'.join(name_parts) def auto_packages(module: Module): """Discover subpackages and package_data""" pkgdir = os.path.normpath(str(module.path)) pkg_name = module.name packages = [] if module.in_namespace_package: packages.extend(namespace_packages(module)) packages.append(pkg_name) pkg_data = defaultdict(list) # Undocumented distutils feature: the empty string matches all package names pkg_data[''].append('*') subpkg_paths = set() def find_nearest_pkg(rel_path): parts = rel_path.split(os.sep) for i in reversed(range(1, len(parts))): ancestor = '/'.join(parts[:i]) if ancestor in subpkg_paths: pkg = '.'.join([pkg_name] + parts[:i]) return pkg, '/'.join(parts[i:]) # Relative to the top-level package return pkg_name, rel_path for path, dirnames, filenames in os.walk(pkgdir, topdown=True): if os.path.basename(path) == '__pycache__': continue from_top_level = os.path.relpath(path, pkgdir) if from_top_level == '.': continue is_subpkg = '__init__.py' in filenames if is_subpkg: subpkg_paths.add(from_top_level) parts = from_top_level.split(os.sep) packages.append('.'.join([pkg_name] + parts)) else: pkg, from_nearest_pkg = find_nearest_pkg(from_top_level) pkg_data[pkg].append(pjoin(from_nearest_pkg, '*')) # Sort values in pkg_data pkg_data = {k: sorted(v) for (k, v) in pkg_data.items()} return sorted(packages), pkg_data def include_path(p): return not (p.startswith('dist' + os.sep) or (os.sep+'__pycache__' in p) or p.endswith('.pyc')) def _parse_req(requires_dist): """Parse "Foo (v); python_version == '2.x'" from Requires-Dist Returns pip-style appropriate for requirements.txt. """ if ';' in requires_dist: name_version, env_mark = requires_dist.split(';', 1) env_mark = env_mark.strip() else: name_version, env_mark = requires_dist, None if '(' in name_version: # turn 'name (X)' and 'name ('): version = '==' + version name_version = name + version return name_version, env_mark def convert_requires(reqs_by_extra): """Regroup requirements by (extra, env_mark)""" grouping = defaultdict(list) for extra, reqs in reqs_by_extra.items(): for req in reqs: name_version, env_mark = _parse_req(req) grouping[(extra, env_mark)].append(name_version) install_reqs = grouping.pop(('.none', None), []) extra_reqs = {} for (extra, env_mark), reqs in grouping.items(): if extra == '.none': extra = '' if env_mark is None: extra_reqs[extra] = reqs else: extra_reqs[extra + ':' + env_mark] = reqs return install_reqs, extra_reqs class SdistBuilder(SdistBuilderCore): """Build a complete sdist This extends the minimal sdist-building in flit_core: - Include any files tracked in version control, such as docs sources and tests. - Add a generated setup.py for compatibility with tools which don't yet know about PEP 517. """ use_vcs = True @classmethod def from_ini_path(cls, ini_path: Path, use_vcs=True): inst = super().from_ini_path(ini_path) inst.use_vcs = use_vcs return inst def select_files(self): if not self.use_vcs: return super().select_files() vcs_mod = identify_vcs(self.cfgdir) if vcs_mod is not None: untracked_deleted = vcs_mod.list_untracked_deleted_files(self.cfgdir) if any(include_path(p) and not self.excludes.match_file(p) for p in untracked_deleted): raise VCSError( "Untracked or deleted files in the source directory. " "Commit, undo or ignore these files in your VCS.", self.cfgdir) files = [os.path.normpath(p) for p in vcs_mod.list_tracked_files(self.cfgdir)] files = sorted(filter(include_path, files)) log.info("Found %d files tracked in %s", len(files), vcs_mod.name) else: files = super().select_files() return files def add_setup_py(self, files_to_add, target_tarfile): if 'setup.py' in files_to_add: log.warning( "Using setup.py from repository, not generating setup.py") else: setup_py = self.make_setup_py() log.info("Writing generated setup.py") ti = tarfile.TarInfo(pjoin(self.dir_name, 'setup.py')) ti.size = len(setup_py) target_tarfile.addfile(ti, io.BytesIO(setup_py)) def make_setup_py(self): before, extra = [], [] if self.module.is_package: packages, package_data = auto_packages(self.module) before.append("packages = \\\n%s\n" % pformat(sorted(packages))) before.append("package_data = \\\n%s\n" % pformat(package_data)) extra.append("packages=packages,") extra.append("package_data=package_data,") else: extra.append("py_modules={!r},".format([self.module.name])) if self.module.in_namespace_package: packages = list(namespace_packages(self.module)) before.append("packages = \\\n%s\n" % pformat(packages)) extra.append("packages=packages,") if self.module.prefix: package_dir = pformat({'': self.module.prefix}) before.append("package_dir = \\\n%s\n" % package_dir) extra.append("package_dir=package_dir,") install_reqs, extra_reqs = convert_requires(self.reqs_by_extra) if install_reqs: before.append("install_requires = \\\n%s\n" % pformat(install_reqs)) extra.append("install_requires=install_requires,") if extra_reqs: before.append("extras_require = \\\n%s\n" % pformat(extra_reqs)) extra.append("extras_require=extras_require,") entrypoints = self.prep_entry_points() if entrypoints: before.append("entry_points = \\\n%s\n" % pformat(entrypoints)) extra.append("entry_points=entry_points,") if self.metadata.requires_python: extra.append('python_requires=%r,' % self.metadata.requires_python) return SETUP.format( before='\n'.join(before), name=self.metadata.name, version=self.metadata.version, description=self.metadata.summary, author=self.metadata.author, author_email=self.metadata.author_email, url=self.metadata.home_page, extra='\n '.join(extra), ).encode('utf-8') ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5025573 flit-3.12.0/flit/tomlify.py0000644000000000000000000000437114770461472012475 0ustar00"""Convert a flit.ini file to pyproject.toml """ import argparse from collections import OrderedDict import configparser import os from pathlib import Path import tomli_w from .config import metadata_list_fields TEMPLATE = """\ [build-system] requires = ["flit_core >=2,<4"] build-backend = "flit_core.buildapi" [tool.flit.metadata] {metadata} """ class CaseSensitiveConfigParser(configparser.ConfigParser): optionxform = staticmethod(str) def convert(path): cp = configparser.ConfigParser() with path.open(encoding='utf-8') as f: cp.read_file(f) ep_file = Path('entry_points.txt') metadata = OrderedDict() for name, value in cp['metadata'].items(): if name in metadata_list_fields: metadata[name] = [l for l in value.splitlines() if l.strip()] elif name == 'entry-points-file': ep_file = Path(value) else: metadata[name] = value if 'scripts' in cp: scripts = OrderedDict(cp['scripts']) else: scripts = {} entrypoints = CaseSensitiveConfigParser() if ep_file.is_file(): with ep_file.open(encoding='utf-8') as f: entrypoints.read_file(f) written_entrypoints = False with Path('pyproject.toml').open('w', encoding='utf-8') as f: f.write(TEMPLATE.format(metadata=tomli_w.dumps(metadata))) if scripts: f.write('\n[tool.flit.scripts]\n') f.write(tomli_w.dumps(scripts)) for groupname, group in entrypoints.items(): if not dict(group): continue if '.' in groupname: groupname = '"{}"'.format(groupname) f.write('\n[tool.flit.entrypoints.{}]\n'.format(groupname)) f.write(tomli_w.dumps(OrderedDict(group))) written_entrypoints = True print("Written 'pyproject.toml'") files = str(path) if written_entrypoints: files += ' and ' + str(ep_file) print("Please check the new file, then remove", files) def main(argv=None): ap = argparse.ArgumentParser() ap.add_argument('-f', '--ini-file', type=Path, default='flit.ini') args = ap.parse_args(argv) os.chdir(str(args.ini_file.parent)) convert(Path(args.ini_file.name)) if __name__ == '__main__': main() ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5035572 flit-3.12.0/flit/upload.py0000644000000000000000000002445214770461472012300 0ustar00"""Code to communicate with PyPI to register distributions and upload files. This is cribbed heavily from distutils.command.(upgrade|register), which as part of Python is under the PSF license. """ import configparser import getpass import hashlib import logging import os from pathlib import Path import re import requests import sys from dataclasses import dataclass from typing import Optional from urllib.parse import urlparse from flit_core.common import make_metadata, Metadata, Module from .config import read_flit_config log = logging.getLogger(__name__) PYPI = "https://upload.pypi.org/legacy/" PYPIRC_DEFAULT = "~/.pypirc" SWITCH_TO_HTTPS = ( "http://pypi.python.org/", "http://testpypi.python.org/", "http://upload.pypi.org/", ) @dataclass class RepoDetails: url: str username: Optional[str] = None password: Optional[str] = None @property def is_pypi(self): # Main PyPI (pypi.org) or TestPyPI (test.pypi.org) # This is used to guess the URL for the project's HTML page return self.url.rstrip('/').endswith('/legacy') def get_repositories(file="~/.pypirc"): """Get the known repositories from a pypirc file. This returns a dict keyed by name, of dicts with keys 'url', 'username', 'password'. Username and password may be None. """ cp = configparser.ConfigParser(interpolation=None) if isinstance(file, str): file = os.path.expanduser(file) if not os.path.isfile(file): return {'pypi': RepoDetails(url=PYPI)} cp.read(file) else: cp.read_file(file) names = cp.get('distutils', 'index-servers', fallback='pypi').split() repos = {} for name in names: repos[name] = RepoDetails( url=cp.get(name, 'repository', fallback=PYPI), username=cp.get(name, 'username', fallback=None), password=cp.get(name, 'password', fallback=None), ) return repos def get_repository(pypirc_path="~/.pypirc", name=None, project_name=None): """Get the url, username and password for one repository. Returns a dict with keys 'url', 'username', 'password'. There is a hierarchy of possible sources of information: Index URL: 1. Command line arg --repository (looked up in .pypirc) 2. $FLIT_INDEX_URL 3. Repository called 'pypi' from .pypirc 4. Default PyPI (hardcoded) Username: 1. $FLIT_USERNAME 2. Repository with specified name (default 'pypi') in .pypirc 3. Terminal prompt (write to .pypirc if it doesn't exist yet) Password: 1. $FLIT_PASSWORD 2. Repository with specified name (default 'pypi') in .pypirc 3. keyring - pypi_token:project: 4. keyring - pypi_token:user: 5. keyring - username 6. Terminal prompt (store to keyring if available) """ log.debug("Loading repositories config from %r", pypirc_path) repos_cfg = get_repositories(pypirc_path) if name is not None: repo = repos_cfg[name] if 'FLIT_INDEX_URL' in os.environ: raise EnvironmentError( "Use either FLIT_INDEX_URL or --repository, not both" ) elif 'FLIT_INDEX_URL' in os.environ: repo = RepoDetails(url=os.environ['FLIT_INDEX_URL']) elif 'pypi' in repos_cfg: repo = repos_cfg['pypi'] else: repo = RepoDetails(url=PYPI) if repo.url.startswith(SWITCH_TO_HTTPS): # Use https for PyPI, even if an http URL was given repo.url = 'https' + repo.url[4:] elif repo.url.startswith('http://'): log.warning("Unencrypted connection - credentials may be visible on " "the network.") log.info("Using repository at %s", repo.url) if 'FLIT_USERNAME' in os.environ: repo.username = os.environ['FLIT_USERNAME'] if not repo.username and sys.stdin.isatty(): while not repo.username: repo.username = input("Username: ") if repo.url == PYPI: write_pypirc(repo, pypirc_path) elif not repo.username: raise Exception("Could not find username for upload.") if 'FLIT_PASSWORD' in os.environ: repo.password = os.environ['FLIT_PASSWORD'] if not repo.password: token = find_token(repo, project_name) if token is not None: repo.username = '__token__' repo.password = token else: repo.password = get_password(repo) return repo def write_pypirc(repo, file="~/.pypirc"): """Write .pypirc if it doesn't already exist """ file = os.path.expanduser(file) if os.path.isfile(file): return with open(file, 'w', encoding='utf-8') as f: f.write("[pypi]\n" "username = %s\n" % repo.username) def get_password(repo: RepoDetails): try: import keyring, keyring.errors except ImportError: # pragma: no cover log.warning("Install keyring to store tokens/passwords securely") keyring = None else: try: stored_pw = keyring.get_password(repo.url, repo.username) if stored_pw is not None: return stored_pw except keyring.errors.KeyringError as e: log.warning("Could not get password from keyring (%s)", e) if sys.stdin.isatty(): pw = None while not pw: print('Server :', repo.url) print('Username:', repo.username) pw = getpass.getpass('Password: ') else: raise Exception("Could not find password for upload.") if keyring is not None: try: keyring.set_password(repo.url, repo.username, pw) log.info("Stored password with keyring") except keyring.errors.KeyringError as e: log.warning("Could not store password in keyring (%s)", e) return pw def find_token(repo: RepoDetails, project_name: str): # https://packaging.python.org/en/latest/specifications/name-normalization/ project_name = re.sub(r"[-_.]+", "-", project_name).lower() candidate_keys = [f"pypi_token:project:{project_name}"] if repo.username is not None: candidate_keys.append(f"pypi_token:user:{repo.username}") try: import keyring, keyring.errors except ImportError: # pragma: no cover pass else: try: for key in candidate_keys: token = keyring.get_password(repo.url, key) if token is not None: return token except keyring.errors.KeyringError as e: log.warning("Could not get token from keyring (%s)", e) def build_post_data(action, metadata:Metadata): """Prepare the metadata needed for requests to PyPI. """ d = { ":action": action, "name": metadata.name, "version": metadata.version, # additional meta-data "metadata_version": '2.3', "summary": metadata.summary, "home_page": metadata.home_page, "author": metadata.author, "author_email": metadata.author_email, "maintainer": metadata.maintainer, "maintainer_email": metadata.maintainer_email, "license": metadata.license, "description": metadata.description, "keywords": metadata.keywords, "platform": metadata.platform, "classifiers": metadata.classifiers, "download_url": metadata.download_url, "supported_platform": metadata.supported_platform, # Metadata 1.1 (PEP 314) "provides": metadata.provides, "requires": metadata.requires, "obsoletes": metadata.obsoletes, # Metadata 1.2 (PEP 345) "project_urls": metadata.project_urls, "provides_dist": metadata.provides_dist, "obsoletes_dist": metadata.obsoletes_dist, "requires_dist": metadata.requires_dist, "requires_external": metadata.requires_external, "requires_python": metadata.requires_python, # Metadata 2.1 (PEP 566) "description_content_type": metadata.description_content_type, "provides_extra": metadata.provides_extra, } return {k:v for k,v in d.items() if v} def upload_file(file:Path, metadata:Metadata, repo: RepoDetails): """Upload a file to an index server, given the index server details. """ data = build_post_data('file_upload', metadata) data['protocol_version'] = '1' if file.suffix == '.whl': data['filetype'] = 'bdist_wheel' py2_support = not (metadata.requires_python or '')\ .startswith(('3', '>3', '>=3')) data['pyversion'] = ('py2.' if py2_support else '') + 'py3' else: data['filetype'] = 'sdist' with file.open('rb') as f: content = f.read() files = {'content': (file.name, content)} data['md5_digest'] = hashlib.md5(content).hexdigest() data['sha256_digest'] = hashlib.sha256(content).hexdigest() log.info('Uploading %s...', file) resp = requests.post( repo.url, data=data, files=files, auth=(repo.username, repo.password), ) resp.raise_for_status() def do_upload(file:Path, metadata:Metadata, repo: RepoDetails): """Upload a file to an index server. """ upload_file(file, metadata, repo) if repo.is_pypi: domain = urlparse(repo.url).netloc if domain.startswith('upload.'): domain = domain[7:] log.info("Package is at https://%s/project/%s/", domain, metadata.name) else: log.info("Package is at %s/%s", repo.url, metadata.name) def main(ini_path, repo_name, pypirc_path=None, formats=None, gen_setup_py=True, use_vcs=True): """Build and upload wheel and sdist.""" if pypirc_path is None: pypirc_path = PYPIRC_DEFAULT elif not os.path.isfile(pypirc_path): raise FileNotFoundError("The specified pypirc config file does not exist.") ini_info = read_flit_config(ini_path) srcdir = ini_path.parent module = Module(ini_info.module, srcdir) metadata = make_metadata(module, ini_info) repo = get_repository(pypirc_path, repo_name, project_name=metadata.name) from . import build built = build.main( ini_path, formats=formats, gen_setup_py=gen_setup_py, use_vcs=use_vcs ) if built.wheel is not None: do_upload(built.wheel.file, built.wheel.builder.metadata, repo) if built.sdist is not None: do_upload(built.sdist.file, built.sdist.builder.metadata, repo) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5035572 flit-3.12.0/flit/validate.py0000644000000000000000000002345714770461472012611 0ustar00"""Validate various pieces of packaging data""" import errno import io import logging import os from pathlib import Path import re import requests import sys from .vendorized.readme.rst import render log = logging.getLogger(__name__) CUSTOM_CLASSIFIERS = frozenset({ # https://github.com/pypa/warehouse/pull/5440 'Private :: Do Not Upload', }) def get_cache_dir() -> Path: """Locate a platform-appropriate cache directory for flit to use Does not ensure that the cache directory exists. """ # Linux, Unix, AIX, etc. if os.name == 'posix' and sys.platform != 'darwin': # use ~/.cache if empty OR not set xdg = os.environ.get("XDG_CACHE_HOME", None) \ or os.path.expanduser('~/.cache') return Path(xdg, 'flit') # Mac OS elif sys.platform == 'darwin': return Path(os.path.expanduser('~'), 'Library/Caches/flit') # Windows (hopefully) else: local = os.environ.get('LOCALAPPDATA', None) \ or os.path.expanduser('~\\AppData\\Local') return Path(local, 'flit') def _read_classifiers_cached(): """Reads classifiers from cached file""" with (get_cache_dir() / 'classifiers.lst').open(encoding='utf-8') as f: valid_classifiers = set(l.strip() for l in f) return valid_classifiers def _download_and_cache_classifiers(): """Get the list of valid trove classifiers from PyPI""" log.info('Fetching list of valid trove classifiers') resp = requests.get( 'https://pypi.org/pypi?%3Aaction=list_classifiers') resp.raise_for_status() cache_dir = get_cache_dir() try: cache_dir.mkdir(parents=True) except (FileExistsError, PermissionError): pass except OSError as e: # readonly mounted file raises OSError, only these should be captured if e.errno != errno.EROFS: raise try: with (cache_dir / 'classifiers.lst').open('wb') as f: f.write(resp.content) except (PermissionError, FileNotFoundError): # cache file could not be created pass except OSError as e: # readonly mounted file raises OSError, only these should be captured if e.errno != errno.EROFS: raise valid_classifiers = set(l.strip() for l in resp.text.splitlines()) return valid_classifiers def _verify_classifiers(classifiers, valid_classifiers): """Check classifiers against a set of known classifiers""" invalid = classifiers - valid_classifiers return ["Unrecognised classifier: {!r}".format(c) for c in sorted(invalid)] def validate_classifiers(classifiers): """Verify trove classifiers from config file. Fetches and caches a list of known classifiers from PyPI. Setting the environment variable FLIT_NO_NETWORK=1 will skip this if the classifiers are not already cached. """ if not classifiers: return [] problems = [] classifiers = set(classifiers) try: valid_classifiers = _read_classifiers_cached() valid_classifiers.update(CUSTOM_CLASSIFIERS) problems = _verify_classifiers(classifiers, valid_classifiers) except (FileNotFoundError, PermissionError) as e1: # We haven't yet got the classifiers cached or couldn't read it pass else: if not problems: return [] # Either we don't have the list, or there were unexpected classifiers # which might have been added since we last fetched it. Fetch and cache. if os.environ.get('FLIT_NO_NETWORK', ''): log.warning( "Not checking classifiers, because FLIT_NO_NETWORK is set") return [] # Try to download up-to-date list of classifiers try: valid_classifiers = _download_and_cache_classifiers() except requests.ConnectionError: # The error you get on a train, going through Oregon, without wifi log.warning( "Couldn't get list of valid classifiers to check against") return problems valid_classifiers.update(CUSTOM_CLASSIFIERS) return _verify_classifiers(classifiers, valid_classifiers) def validate_entrypoints(entrypoints): """Check that the loaded entrypoints are valid. Expects a dict of dicts, e.g.:: {'console_scripts': {'flit': 'flit:main'}} """ def _is_identifier_attr(s): return all(n.isidentifier() for n in s.split('.')) problems = [] for groupname, group in entrypoints.items(): for k, v in group.items(): if ':' in v: mod, obj = v.split(':', 1) valid = _is_identifier_attr(mod) and _is_identifier_attr(obj) else: valid = _is_identifier_attr(v) if not valid: problems.append('Invalid entry point in group {}: ' '{} = {}'.format(groupname, k, v)) return problems # Distribution name, not quite the same as a Python identifier NAME = re.compile(r'^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$', re.IGNORECASE) r'' VERSION_SPEC = re.compile(r'(~=|===?|!=|<=?|>=?)\s*[A-Z0-9\-_.*+!]+$', re.IGNORECASE) REQUIREMENT = re.compile(NAME.pattern[:-1] + # Trim '$' r"""\s*(?P\[.*\])? \s*(?P[(=~<>!@][^;]*)? \s*(?P;.*)? $""", re.IGNORECASE | re.VERBOSE) MARKER_OP = re.compile(r'(~=|===?|!=|<=?|>=?|\s+in\s+|\s+not in\s+)') def validate_name(metadata): name = metadata.get('name', None) if name is None or NAME.match(name): return [] return ['Invalid name: {!r}'.format(name)] def _valid_version_specifier(s): for clause in s.split(','): if not VERSION_SPEC.match(clause.strip()): return False return True def validate_requires_python(metadata): spec = metadata.get('requires_python', None) if spec is None or _valid_version_specifier(spec): return [] return ['Invalid requires-python: {!r}'.format(spec)] MARKER_VARS = { 'python_version', 'python_full_version', 'os_name', 'sys_platform', 'platform_release', 'platform_system', 'platform_version', 'platform_machine', 'platform_python_implementation', 'implementation_name', 'implementation_version', 'extra', } def validate_environment_marker(em): clauses = re.split(r'\s+(?:and|or)\s+', em) problems = [] for c in clauses: # TODO: validate parentheses properly. They're allowed by PEP 508. parts = MARKER_OP.split(c.strip('()')) if len(parts) != 3: problems.append("Invalid expression in environment marker: {!r}".format(c)) continue l, op, r = parts for var in (l.strip(), r.strip()): if var[:1] in {'"', "'"}: if len(var) < 2 or var[-1:] != var[:1]: problems.append("Invalid string in environment marker: {}".format(var)) elif var not in MARKER_VARS: problems.append("Invalid variable name in environment marker: {!r}".format(var)) return problems def validate_requires_dist(metadata): probs = [] for req in metadata.get('requires_dist', []): m = REQUIREMENT.match(req) if not m: probs.append("Could not parse requirement: {!r}".format(req)) continue extras, version, envmark = m.group('extras', 'version', 'envmark') if not (extras is None or all(NAME.match(e.strip()) for e in extras[1:-1].split(','))): probs.append("Invalid extras in requirement: {!r}".format(req)) if version is not None: if version.startswith('(') and version.endswith(')'): version = version[1:-1] if version.startswith('@'): pass # url specifier TODO: validate URL elif not _valid_version_specifier(version): print((extras, version, envmark)) probs.append("Invalid version specifier {!r} in requirement {!r}" .format(version, req)) if envmark is not None: probs.extend(validate_environment_marker(envmark[1:])) return probs def validate_url(url): if url is None: return [] probs = [] if not url.startswith(('http://', 'https://')): probs.append("URL {!r} doesn't start with https:// or http://" .format(url)) elif not url.split('//', 1)[1]: probs.append("URL missing address") return probs def validate_project_urls(metadata): probs = [] for prurl in metadata.get('project_urls', []): name, url = prurl.split(',', 1) url = url.lstrip() if not name: probs.append("No name for project URL {!r}".format(url)) elif len(name) > 32: probs.append("Project URL name {!r} longer than 32 characters" .format(name)) probs.extend(validate_url(url)) return probs def validate_readme_rst(metadata): mimetype = metadata.get('description_content_type', '') if mimetype != 'text/x-rst': return [] # rst check raw_desc = metadata.get('description', '') stream = io.StringIO() res = render(raw_desc, stream) if not res: return [ ("The file description seems not to be valid rst for PyPI;" " it will be interpreted as plain text"), stream.getvalue(), ] return [] # rst rendered OK def validate_config(config_info): i = config_info problems = sum([ validate_classifiers(i.metadata.get('classifiers')), validate_entrypoints(i.entrypoints), validate_name(i.metadata), validate_requires_python(i.metadata), validate_requires_dist(i.metadata), validate_url(i.metadata.get('home_page', None)), validate_project_urls(i.metadata), validate_readme_rst(i.metadata) ], []) for p in problems: log.error(p) return problems ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5035572 flit-3.12.0/flit/vcs/__init__.py0000644000000000000000000000047214770461472013342 0ustar00from pathlib import Path from . import hg from . import git def identify_vcs(directory: Path): directory = directory.resolve() for p in [directory] + list(directory.parents): if (p / '.git').is_dir(): return git if (p / '.hg').is_dir(): return hg return None ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5035572 flit-3.12.0/flit/vcs/git.py0000644000000000000000000000107614770461472012367 0ustar00import os from subprocess import check_output name = 'git' def list_tracked_files(directory): outb = check_output(['git', 'ls-files', '--recurse-submodules', '-z'], cwd=str(directory)) return [os.fsdecode(l) for l in outb.strip(b'\0').split(b'\0') if l] def list_untracked_deleted_files(directory): outb = check_output(['git', 'ls-files', '--deleted', '--others', '--exclude-standard', '-z'], cwd=str(directory)) return [os.fsdecode(l) for l in outb.strip(b'\0').split(b'\0') if l] ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5035572 flit-3.12.0/flit/vcs/hg.py0000644000000000000000000000235214770461472012200 0ustar00import os from subprocess import check_output name = 'hg' def find_repo_root(directory): for p in [directory] + list(directory.parents): if (p / '.hg').is_dir(): return p def _repo_paths_to_directory_paths(paths, directory): # 'hg status' gives paths from repo root, which may not be our directory. directory = directory.resolve() repo = find_repo_root(directory) if directory != repo: directory_in_repo = str(directory.relative_to(repo)) + os.sep ix = len(directory_in_repo) paths = [p[ix:] for p in paths if os.path.normpath(p).startswith(directory_in_repo)] return paths def list_tracked_files(directory): outb = check_output(['hg', 'status', '--clean', '--added', '--modified', '--no-status'], cwd=str(directory)) paths = [os.fsdecode(l) for l in outb.strip().splitlines()] return _repo_paths_to_directory_paths(paths, directory) def list_untracked_deleted_files(directory): outb = check_output(['hg', 'status', '--unknown', '--deleted', '--no-status'], cwd=str(directory)) paths = [os.fsdecode(l) for l in outb.strip().splitlines()] return _repo_paths_to_directory_paths(paths, directory) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5035572 flit-3.12.0/flit/vendorized/__init__.py0000644000000000000000000000000014770461472014703 0ustar00././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5035572 flit-3.12.0/flit/vendorized/readme/__init__.py0000644000000000000000000000000014770461472016140 0ustar00././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5035572 flit-3.12.0/flit/vendorized/readme/clean.py0000644000000000000000000000011614770461472015473 0ustar00## shim readme clean to simplify vendorizing of readme.rst clean = lambda x:x ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5035572 flit-3.12.0/flit/vendorized/readme/rst.py0000644000000000000000000001012414770461472015221 0ustar00# Copyright 2014 Donald Stufft # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # Copied from https://github.com/pypa/readme_renderer # Commit 5b455a9c5bafc1732dafad9619bcbfa8e15432c9 from __future__ import absolute_import, division, print_function import io import os.path from docutils.core import publish_parts from docutils.writers.html4css1 import HTMLTranslator, Writer from docutils.utils import SystemMessage from .clean import clean class ReadMeHTMLTranslator(HTMLTranslator): def depart_image(self, node): uri = node["uri"] ext = os.path.splitext(uri)[1].lower() # we need to swap RST's use of `object` with `img` tags # see http://git.io/5me3dA if ext == ".svg": # preserve essential attributes atts = {} for attribute, value in node.attributes.items(): # we have no time for empty values if value: if attribute == "uri": atts["src"] = value else: atts[attribute] = value # toss off `object` tag self.body.pop() # add on `img` with attributes self.body.append(self.starttag(node, "img", **atts)) SETTINGS = { # Cloaking email addresses provides a small amount of additional # privacy protection for email addresses inside of a chunk of ReST. "cloak_email_addresses": True, # Prevent a lone top level heading from being promoted to document # title, and thus second level headings from being promoted to top # level. "doctitle_xform": True, # Prevent a lone subsection heading from being promoted to section # title, and thus second level headings from being promoted to top # level. "sectsubtitle_xform": True, # Set our initial header level "initial_header_level": 2, # Prevent local files from being included into the rendered output. # This is a security concern because people can insert files # that are part of the system, such as /etc/passwd. "file_insertion_enabled": False, # Halt rendering and throw an exception if there was any errors or # warnings from docutils. "halt_level": 2, # Output math blocks as LaTeX that can be interpreted by MathJax for # a prettier display of Math formulas. "math_output": "MathJax", # Disable raw html as enabling it is a security risk, we do not want # people to be able to include any old HTML in the final output. "raw_enabled": False, # Disable all system messages from being reported. "report_level": 5, # Use typographic quotes, and transform --, ---, and ... into their # typographic counterparts. "smart_quotes": True, # Strip all comments from the rendered output. "strip_comments": True, # PATCH FOR FLIT ---------------------------------- # Disable syntax highlighting so we don't need Pygments installed. "syntax_highlight": "none", # ------------------------------------------------- } def render(raw, stream=None): if stream is None: # Use a io.StringIO as the warning stream to prevent warnings from # being printed to sys.stderr. stream = io.StringIO() settings = SETTINGS.copy() settings["warning_stream"] = stream writer = Writer() writer.translator_class = ReadMeHTMLTranslator try: parts = publish_parts(raw, writer=writer, settings_overrides=settings) except SystemMessage: rendered = None else: rendered = parts.get("fragment") if rendered: return clean(rendered) else: return None ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5035572 flit-3.12.0/flit/wheel.py0000644000000000000000000000042714770461472012114 0ustar00import logging import flit_core.wheel as core_wheel log = logging.getLogger(__name__) def make_wheel_in(ini_path, wheel_directory, editable=False): return core_wheel.make_wheel_in(ini_path, wheel_directory, editable) class WheelBuilder(core_wheel.WheelBuilder): pass ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5035572 flit-3.12.0/flit_core/LICENSE0000644000000000000000000000276514770461472012462 0ustar00Copyright (c) 2015, Thomas Kluyver and contributors All rights reserved. BSD 3-clause license: Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5035572 flit-3.12.0/flit_core/README.rst0000644000000000000000000000051514770461472013133 0ustar00flit_core --------- This provides a `PEP 517 `_ build backend for packages using `Flit `_. The only public interface is the API specified by PEP 517, at ``flit_core.buildapi``. See the `Flit documentation `_ for more information. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5035572 flit-3.12.0/flit_core/bootstrap_install.py0000644000000000000000000000331614770461472015563 0ustar00"""Install flit_core without using any other tools. Normally, you would install flit_core with pip like any other Python package. This script is meant to help with 'bootstrapping' other packaging systems, where you may need flit_core to build other packaging tools. Use 'python -m flit_core.wheel' to make a wheel, then: python bootstrap_install.py flit_core-3.6.0-py3-none-any.whl To install for something other than the Python running the script, pass a site-packages or equivalent directory with the --installdir option. """ import argparse import sys import sysconfig from pathlib import Path from zipfile import ZipFile def extract_wheel(whl_path, dest): print("Installing to", dest) with ZipFile(whl_path) as zf: zf.extractall(dest) if __name__ == "__main__": parser = argparse.ArgumentParser() parser.add_argument( 'wheel', type=Path, help=f'flit_core wheel to install (.whl file)', ) purelib = Path(sysconfig.get_path('purelib')).resolve() parser.add_argument( '--installdir', '-i', type=Path, default=purelib, help=f'installdir directory (defaults to {purelib})', ) parser.add_argument( '--install-root', type=Path, default=None, help='if given, installdir is considered to be under this' ) args = parser.parse_args() if not args.wheel.name.startswith('flit_core-'): sys.exit("Use this script only for flit_core wheels") if args.install_root: installdir = args.install_root / args.installdir.relative_to("/") else: installdir = args.installdir installdir.mkdir(parents=True, exist_ok=True) extract_wheel(args.wheel, installdir) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5035572 flit-3.12.0/flit_core/build_dists.py0000644000000000000000000000064514770461472014327 0ustar00"""Build flit_core to upload to PyPI. Normally, this should only be used by me when making a release. """ import os from flit_core import buildapi os.chdir(os.path.dirname(os.path.abspath(__file__))) print("Building sdist") sdist_fname = buildapi.build_sdist('dist/') print(os.path.join('dist', sdist_fname)) print("\nBuilding wheel") whl_fname = buildapi.build_wheel('dist/') print(os.path.join('dist', whl_fname)) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5035572 flit-3.12.0/flit_core/flit_core/__init__.py0000644000000000000000000000034114770461472015520 0ustar00"""Flit's core machinery for building packages. This package provides a standard PEP 517 API to build packages using Flit. All the convenient development features live in the main 'flit' package. """ __version__ = '3.12.0' ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5035572 flit-3.12.0/flit_core/flit_core/_spdx_data.py0000644000000000000000000006162514770461472016103 0ustar00# This file is generated from SPDX license data; don't edit it manually. licenses = \ {'0bsd': {'id': '0BSD'}, '3d-slicer-1.0': {'id': '3D-Slicer-1.0'}, 'aal': {'id': 'AAL'}, 'abstyles': {'id': 'Abstyles'}, 'adacore-doc': {'id': 'AdaCore-doc'}, 'adobe-2006': {'id': 'Adobe-2006'}, 'adobe-display-postscript': {'id': 'Adobe-Display-PostScript'}, 'adobe-glyph': {'id': 'Adobe-Glyph'}, 'adobe-utopia': {'id': 'Adobe-Utopia'}, 'adsl': {'id': 'ADSL'}, 'afl-1.1': {'id': 'AFL-1.1'}, 'afl-1.2': {'id': 'AFL-1.2'}, 'afl-2.0': {'id': 'AFL-2.0'}, 'afl-2.1': {'id': 'AFL-2.1'}, 'afl-3.0': {'id': 'AFL-3.0'}, 'afmparse': {'id': 'Afmparse'}, 'agpl-1.0-only': {'id': 'AGPL-1.0-only'}, 'agpl-1.0-or-later': {'id': 'AGPL-1.0-or-later'}, 'agpl-3.0-only': {'id': 'AGPL-3.0-only'}, 'agpl-3.0-or-later': {'id': 'AGPL-3.0-or-later'}, 'aladdin': {'id': 'Aladdin'}, 'amd-newlib': {'id': 'AMD-newlib'}, 'amdplpa': {'id': 'AMDPLPA'}, 'aml': {'id': 'AML'}, 'aml-glslang': {'id': 'AML-glslang'}, 'ampas': {'id': 'AMPAS'}, 'antlr-pd': {'id': 'ANTLR-PD'}, 'antlr-pd-fallback': {'id': 'ANTLR-PD-fallback'}, 'any-osi': {'id': 'any-OSI'}, 'any-osi-perl-modules': {'id': 'any-OSI-perl-modules'}, 'apache-1.0': {'id': 'Apache-1.0'}, 'apache-1.1': {'id': 'Apache-1.1'}, 'apache-2.0': {'id': 'Apache-2.0'}, 'apafml': {'id': 'APAFML'}, 'apl-1.0': {'id': 'APL-1.0'}, 'app-s2p': {'id': 'App-s2p'}, 'apsl-1.0': {'id': 'APSL-1.0'}, 'apsl-1.1': {'id': 'APSL-1.1'}, 'apsl-1.2': {'id': 'APSL-1.2'}, 'apsl-2.0': {'id': 'APSL-2.0'}, 'arphic-1999': {'id': 'Arphic-1999'}, 'artistic-1.0': {'id': 'Artistic-1.0'}, 'artistic-1.0-cl8': {'id': 'Artistic-1.0-cl8'}, 'artistic-1.0-perl': {'id': 'Artistic-1.0-Perl'}, 'artistic-2.0': {'id': 'Artistic-2.0'}, 'aswf-digital-assets-1.0': {'id': 'ASWF-Digital-Assets-1.0'}, 'aswf-digital-assets-1.1': {'id': 'ASWF-Digital-Assets-1.1'}, 'baekmuk': {'id': 'Baekmuk'}, 'bahyph': {'id': 'Bahyph'}, 'barr': {'id': 'Barr'}, 'bcrypt-solar-designer': {'id': 'bcrypt-Solar-Designer'}, 'beerware': {'id': 'Beerware'}, 'bitstream-charter': {'id': 'Bitstream-Charter'}, 'bitstream-vera': {'id': 'Bitstream-Vera'}, 'bittorrent-1.0': {'id': 'BitTorrent-1.0'}, 'bittorrent-1.1': {'id': 'BitTorrent-1.1'}, 'blessing': {'id': 'blessing'}, 'blueoak-1.0.0': {'id': 'BlueOak-1.0.0'}, 'boehm-gc': {'id': 'Boehm-GC'}, 'boehm-gc-without-fee': {'id': 'Boehm-GC-without-fee'}, 'borceux': {'id': 'Borceux'}, 'brian-gladman-2-clause': {'id': 'Brian-Gladman-2-Clause'}, 'brian-gladman-3-clause': {'id': 'Brian-Gladman-3-Clause'}, 'bsd-1-clause': {'id': 'BSD-1-Clause'}, 'bsd-2-clause': {'id': 'BSD-2-Clause'}, 'bsd-2-clause-darwin': {'id': 'BSD-2-Clause-Darwin'}, 'bsd-2-clause-first-lines': {'id': 'BSD-2-Clause-first-lines'}, 'bsd-2-clause-patent': {'id': 'BSD-2-Clause-Patent'}, 'bsd-2-clause-views': {'id': 'BSD-2-Clause-Views'}, 'bsd-3-clause': {'id': 'BSD-3-Clause'}, 'bsd-3-clause-acpica': {'id': 'BSD-3-Clause-acpica'}, 'bsd-3-clause-attribution': {'id': 'BSD-3-Clause-Attribution'}, 'bsd-3-clause-clear': {'id': 'BSD-3-Clause-Clear'}, 'bsd-3-clause-flex': {'id': 'BSD-3-Clause-flex'}, 'bsd-3-clause-hp': {'id': 'BSD-3-Clause-HP'}, 'bsd-3-clause-lbnl': {'id': 'BSD-3-Clause-LBNL'}, 'bsd-3-clause-modification': {'id': 'BSD-3-Clause-Modification'}, 'bsd-3-clause-no-military-license': {'id': 'BSD-3-Clause-No-Military-License'}, 'bsd-3-clause-no-nuclear-license': {'id': 'BSD-3-Clause-No-Nuclear-License'}, 'bsd-3-clause-no-nuclear-license-2014': {'id': 'BSD-3-Clause-No-Nuclear-License-2014'}, 'bsd-3-clause-no-nuclear-warranty': {'id': 'BSD-3-Clause-No-Nuclear-Warranty'}, 'bsd-3-clause-open-mpi': {'id': 'BSD-3-Clause-Open-MPI'}, 'bsd-3-clause-sun': {'id': 'BSD-3-Clause-Sun'}, 'bsd-4-clause': {'id': 'BSD-4-Clause'}, 'bsd-4-clause-shortened': {'id': 'BSD-4-Clause-Shortened'}, 'bsd-4-clause-uc': {'id': 'BSD-4-Clause-UC'}, 'bsd-4.3reno': {'id': 'BSD-4.3RENO'}, 'bsd-4.3tahoe': {'id': 'BSD-4.3TAHOE'}, 'bsd-advertising-acknowledgement': {'id': 'BSD-Advertising-Acknowledgement'}, 'bsd-attribution-hpnd-disclaimer': {'id': 'BSD-Attribution-HPND-disclaimer'}, 'bsd-inferno-nettverk': {'id': 'BSD-Inferno-Nettverk'}, 'bsd-protection': {'id': 'BSD-Protection'}, 'bsd-source-beginning-file': {'id': 'BSD-Source-beginning-file'}, 'bsd-source-code': {'id': 'BSD-Source-Code'}, 'bsd-systemics': {'id': 'BSD-Systemics'}, 'bsd-systemics-w3works': {'id': 'BSD-Systemics-W3Works'}, 'bsl-1.0': {'id': 'BSL-1.0'}, 'busl-1.1': {'id': 'BUSL-1.1'}, 'bzip2-1.0.6': {'id': 'bzip2-1.0.6'}, 'c-uda-1.0': {'id': 'C-UDA-1.0'}, 'cal-1.0': {'id': 'CAL-1.0'}, 'cal-1.0-combined-work-exception': {'id': 'CAL-1.0-Combined-Work-Exception'}, 'caldera': {'id': 'Caldera'}, 'caldera-no-preamble': {'id': 'Caldera-no-preamble'}, 'catharon': {'id': 'Catharon'}, 'catosl-1.1': {'id': 'CATOSL-1.1'}, 'cc-by-1.0': {'id': 'CC-BY-1.0'}, 'cc-by-2.0': {'id': 'CC-BY-2.0'}, 'cc-by-2.5': {'id': 'CC-BY-2.5'}, 'cc-by-2.5-au': {'id': 'CC-BY-2.5-AU'}, 'cc-by-3.0': {'id': 'CC-BY-3.0'}, 'cc-by-3.0-at': {'id': 'CC-BY-3.0-AT'}, 'cc-by-3.0-au': {'id': 'CC-BY-3.0-AU'}, 'cc-by-3.0-de': {'id': 'CC-BY-3.0-DE'}, 'cc-by-3.0-igo': {'id': 'CC-BY-3.0-IGO'}, 'cc-by-3.0-nl': {'id': 'CC-BY-3.0-NL'}, 'cc-by-3.0-us': {'id': 'CC-BY-3.0-US'}, 'cc-by-4.0': {'id': 'CC-BY-4.0'}, 'cc-by-nc-1.0': {'id': 'CC-BY-NC-1.0'}, 'cc-by-nc-2.0': {'id': 'CC-BY-NC-2.0'}, 'cc-by-nc-2.5': {'id': 'CC-BY-NC-2.5'}, 'cc-by-nc-3.0': {'id': 'CC-BY-NC-3.0'}, 'cc-by-nc-3.0-de': {'id': 'CC-BY-NC-3.0-DE'}, 'cc-by-nc-4.0': {'id': 'CC-BY-NC-4.0'}, 'cc-by-nc-nd-1.0': {'id': 'CC-BY-NC-ND-1.0'}, 'cc-by-nc-nd-2.0': {'id': 'CC-BY-NC-ND-2.0'}, 'cc-by-nc-nd-2.5': {'id': 'CC-BY-NC-ND-2.5'}, 'cc-by-nc-nd-3.0': {'id': 'CC-BY-NC-ND-3.0'}, 'cc-by-nc-nd-3.0-de': {'id': 'CC-BY-NC-ND-3.0-DE'}, 'cc-by-nc-nd-3.0-igo': {'id': 'CC-BY-NC-ND-3.0-IGO'}, 'cc-by-nc-nd-4.0': {'id': 'CC-BY-NC-ND-4.0'}, 'cc-by-nc-sa-1.0': {'id': 'CC-BY-NC-SA-1.0'}, 'cc-by-nc-sa-2.0': {'id': 'CC-BY-NC-SA-2.0'}, 'cc-by-nc-sa-2.0-de': {'id': 'CC-BY-NC-SA-2.0-DE'}, 'cc-by-nc-sa-2.0-fr': {'id': 'CC-BY-NC-SA-2.0-FR'}, 'cc-by-nc-sa-2.0-uk': {'id': 'CC-BY-NC-SA-2.0-UK'}, 'cc-by-nc-sa-2.5': {'id': 'CC-BY-NC-SA-2.5'}, 'cc-by-nc-sa-3.0': {'id': 'CC-BY-NC-SA-3.0'}, 'cc-by-nc-sa-3.0-de': {'id': 'CC-BY-NC-SA-3.0-DE'}, 'cc-by-nc-sa-3.0-igo': {'id': 'CC-BY-NC-SA-3.0-IGO'}, 'cc-by-nc-sa-4.0': {'id': 'CC-BY-NC-SA-4.0'}, 'cc-by-nd-1.0': {'id': 'CC-BY-ND-1.0'}, 'cc-by-nd-2.0': {'id': 'CC-BY-ND-2.0'}, 'cc-by-nd-2.5': {'id': 'CC-BY-ND-2.5'}, 'cc-by-nd-3.0': {'id': 'CC-BY-ND-3.0'}, 'cc-by-nd-3.0-de': {'id': 'CC-BY-ND-3.0-DE'}, 'cc-by-nd-4.0': {'id': 'CC-BY-ND-4.0'}, 'cc-by-sa-1.0': {'id': 'CC-BY-SA-1.0'}, 'cc-by-sa-2.0': {'id': 'CC-BY-SA-2.0'}, 'cc-by-sa-2.0-uk': {'id': 'CC-BY-SA-2.0-UK'}, 'cc-by-sa-2.1-jp': {'id': 'CC-BY-SA-2.1-JP'}, 'cc-by-sa-2.5': {'id': 'CC-BY-SA-2.5'}, 'cc-by-sa-3.0': {'id': 'CC-BY-SA-3.0'}, 'cc-by-sa-3.0-at': {'id': 'CC-BY-SA-3.0-AT'}, 'cc-by-sa-3.0-de': {'id': 'CC-BY-SA-3.0-DE'}, 'cc-by-sa-3.0-igo': {'id': 'CC-BY-SA-3.0-IGO'}, 'cc-by-sa-4.0': {'id': 'CC-BY-SA-4.0'}, 'cc-pddc': {'id': 'CC-PDDC'}, 'cc-pdm-1.0': {'id': 'CC-PDM-1.0'}, 'cc-sa-1.0': {'id': 'CC-SA-1.0'}, 'cc0-1.0': {'id': 'CC0-1.0'}, 'cddl-1.0': {'id': 'CDDL-1.0'}, 'cddl-1.1': {'id': 'CDDL-1.1'}, 'cdl-1.0': {'id': 'CDL-1.0'}, 'cdla-permissive-1.0': {'id': 'CDLA-Permissive-1.0'}, 'cdla-permissive-2.0': {'id': 'CDLA-Permissive-2.0'}, 'cdla-sharing-1.0': {'id': 'CDLA-Sharing-1.0'}, 'cecill-1.0': {'id': 'CECILL-1.0'}, 'cecill-1.1': {'id': 'CECILL-1.1'}, 'cecill-2.0': {'id': 'CECILL-2.0'}, 'cecill-2.1': {'id': 'CECILL-2.1'}, 'cecill-b': {'id': 'CECILL-B'}, 'cecill-c': {'id': 'CECILL-C'}, 'cern-ohl-1.1': {'id': 'CERN-OHL-1.1'}, 'cern-ohl-1.2': {'id': 'CERN-OHL-1.2'}, 'cern-ohl-p-2.0': {'id': 'CERN-OHL-P-2.0'}, 'cern-ohl-s-2.0': {'id': 'CERN-OHL-S-2.0'}, 'cern-ohl-w-2.0': {'id': 'CERN-OHL-W-2.0'}, 'cfitsio': {'id': 'CFITSIO'}, 'check-cvs': {'id': 'check-cvs'}, 'checkmk': {'id': 'checkmk'}, 'clartistic': {'id': 'ClArtistic'}, 'clips': {'id': 'Clips'}, 'cmu-mach': {'id': 'CMU-Mach'}, 'cmu-mach-nodoc': {'id': 'CMU-Mach-nodoc'}, 'cnri-jython': {'id': 'CNRI-Jython'}, 'cnri-python': {'id': 'CNRI-Python'}, 'cnri-python-gpl-compatible': {'id': 'CNRI-Python-GPL-Compatible'}, 'coil-1.0': {'id': 'COIL-1.0'}, 'community-spec-1.0': {'id': 'Community-Spec-1.0'}, 'condor-1.1': {'id': 'Condor-1.1'}, 'copyleft-next-0.3.0': {'id': 'copyleft-next-0.3.0'}, 'copyleft-next-0.3.1': {'id': 'copyleft-next-0.3.1'}, 'cornell-lossless-jpeg': {'id': 'Cornell-Lossless-JPEG'}, 'cpal-1.0': {'id': 'CPAL-1.0'}, 'cpl-1.0': {'id': 'CPL-1.0'}, 'cpol-1.02': {'id': 'CPOL-1.02'}, 'cronyx': {'id': 'Cronyx'}, 'crossword': {'id': 'Crossword'}, 'crystalstacker': {'id': 'CrystalStacker'}, 'cua-opl-1.0': {'id': 'CUA-OPL-1.0'}, 'cube': {'id': 'Cube'}, 'curl': {'id': 'curl'}, 'cve-tou': {'id': 'cve-tou'}, 'd-fsl-1.0': {'id': 'D-FSL-1.0'}, 'dec-3-clause': {'id': 'DEC-3-Clause'}, 'diffmark': {'id': 'diffmark'}, 'dl-de-by-2.0': {'id': 'DL-DE-BY-2.0'}, 'dl-de-zero-2.0': {'id': 'DL-DE-ZERO-2.0'}, 'doc': {'id': 'DOC'}, 'docbook-schema': {'id': 'DocBook-Schema'}, 'docbook-stylesheet': {'id': 'DocBook-Stylesheet'}, 'docbook-xml': {'id': 'DocBook-XML'}, 'dotseqn': {'id': 'Dotseqn'}, 'drl-1.0': {'id': 'DRL-1.0'}, 'drl-1.1': {'id': 'DRL-1.1'}, 'dsdp': {'id': 'DSDP'}, 'dtoa': {'id': 'dtoa'}, 'dvipdfm': {'id': 'dvipdfm'}, 'ecl-1.0': {'id': 'ECL-1.0'}, 'ecl-2.0': {'id': 'ECL-2.0'}, 'efl-1.0': {'id': 'EFL-1.0'}, 'efl-2.0': {'id': 'EFL-2.0'}, 'egenix': {'id': 'eGenix'}, 'elastic-2.0': {'id': 'Elastic-2.0'}, 'entessa': {'id': 'Entessa'}, 'epics': {'id': 'EPICS'}, 'epl-1.0': {'id': 'EPL-1.0'}, 'epl-2.0': {'id': 'EPL-2.0'}, 'erlpl-1.1': {'id': 'ErlPL-1.1'}, 'etalab-2.0': {'id': 'etalab-2.0'}, 'eudatagrid': {'id': 'EUDatagrid'}, 'eupl-1.0': {'id': 'EUPL-1.0'}, 'eupl-1.1': {'id': 'EUPL-1.1'}, 'eupl-1.2': {'id': 'EUPL-1.2'}, 'eurosym': {'id': 'Eurosym'}, 'fair': {'id': 'Fair'}, 'fbm': {'id': 'FBM'}, 'fdk-aac': {'id': 'FDK-AAC'}, 'ferguson-twofish': {'id': 'Ferguson-Twofish'}, 'frameworx-1.0': {'id': 'Frameworx-1.0'}, 'freebsd-doc': {'id': 'FreeBSD-DOC'}, 'freeimage': {'id': 'FreeImage'}, 'fsfap': {'id': 'FSFAP'}, 'fsfap-no-warranty-disclaimer': {'id': 'FSFAP-no-warranty-disclaimer'}, 'fsful': {'id': 'FSFUL'}, 'fsfullr': {'id': 'FSFULLR'}, 'fsfullrwd': {'id': 'FSFULLRWD'}, 'ftl': {'id': 'FTL'}, 'furuseth': {'id': 'Furuseth'}, 'fwlw': {'id': 'fwlw'}, 'gcr-docs': {'id': 'GCR-docs'}, 'gd': {'id': 'GD'}, 'generic-xts': {'id': 'generic-xts'}, 'gfdl-1.1-invariants-only': {'id': 'GFDL-1.1-invariants-only'}, 'gfdl-1.1-invariants-or-later': {'id': 'GFDL-1.1-invariants-or-later'}, 'gfdl-1.1-no-invariants-only': {'id': 'GFDL-1.1-no-invariants-only'}, 'gfdl-1.1-no-invariants-or-later': {'id': 'GFDL-1.1-no-invariants-or-later'}, 'gfdl-1.1-only': {'id': 'GFDL-1.1-only'}, 'gfdl-1.1-or-later': {'id': 'GFDL-1.1-or-later'}, 'gfdl-1.2-invariants-only': {'id': 'GFDL-1.2-invariants-only'}, 'gfdl-1.2-invariants-or-later': {'id': 'GFDL-1.2-invariants-or-later'}, 'gfdl-1.2-no-invariants-only': {'id': 'GFDL-1.2-no-invariants-only'}, 'gfdl-1.2-no-invariants-or-later': {'id': 'GFDL-1.2-no-invariants-or-later'}, 'gfdl-1.2-only': {'id': 'GFDL-1.2-only'}, 'gfdl-1.2-or-later': {'id': 'GFDL-1.2-or-later'}, 'gfdl-1.3-invariants-only': {'id': 'GFDL-1.3-invariants-only'}, 'gfdl-1.3-invariants-or-later': {'id': 'GFDL-1.3-invariants-or-later'}, 'gfdl-1.3-no-invariants-only': {'id': 'GFDL-1.3-no-invariants-only'}, 'gfdl-1.3-no-invariants-or-later': {'id': 'GFDL-1.3-no-invariants-or-later'}, 'gfdl-1.3-only': {'id': 'GFDL-1.3-only'}, 'gfdl-1.3-or-later': {'id': 'GFDL-1.3-or-later'}, 'giftware': {'id': 'Giftware'}, 'gl2ps': {'id': 'GL2PS'}, 'glide': {'id': 'Glide'}, 'glulxe': {'id': 'Glulxe'}, 'glwtpl': {'id': 'GLWTPL'}, 'gnuplot': {'id': 'gnuplot'}, 'gpl-1.0-only': {'id': 'GPL-1.0-only'}, 'gpl-1.0-or-later': {'id': 'GPL-1.0-or-later'}, 'gpl-2.0-only': {'id': 'GPL-2.0-only'}, 'gpl-2.0-or-later': {'id': 'GPL-2.0-or-later'}, 'gpl-3.0-only': {'id': 'GPL-3.0-only'}, 'gpl-3.0-or-later': {'id': 'GPL-3.0-or-later'}, 'graphics-gems': {'id': 'Graphics-Gems'}, 'gsoap-1.3b': {'id': 'gSOAP-1.3b'}, 'gtkbook': {'id': 'gtkbook'}, 'gutmann': {'id': 'Gutmann'}, 'haskellreport': {'id': 'HaskellReport'}, 'hdparm': {'id': 'hdparm'}, 'hidapi': {'id': 'HIDAPI'}, 'hippocratic-2.1': {'id': 'Hippocratic-2.1'}, 'hp-1986': {'id': 'HP-1986'}, 'hp-1989': {'id': 'HP-1989'}, 'hpnd': {'id': 'HPND'}, 'hpnd-dec': {'id': 'HPND-DEC'}, 'hpnd-doc': {'id': 'HPND-doc'}, 'hpnd-doc-sell': {'id': 'HPND-doc-sell'}, 'hpnd-export-us': {'id': 'HPND-export-US'}, 'hpnd-export-us-acknowledgement': {'id': 'HPND-export-US-acknowledgement'}, 'hpnd-export-us-modify': {'id': 'HPND-export-US-modify'}, 'hpnd-export2-us': {'id': 'HPND-export2-US'}, 'hpnd-fenneberg-livingston': {'id': 'HPND-Fenneberg-Livingston'}, 'hpnd-inria-imag': {'id': 'HPND-INRIA-IMAG'}, 'hpnd-intel': {'id': 'HPND-Intel'}, 'hpnd-kevlin-henney': {'id': 'HPND-Kevlin-Henney'}, 'hpnd-markus-kuhn': {'id': 'HPND-Markus-Kuhn'}, 'hpnd-merchantability-variant': {'id': 'HPND-merchantability-variant'}, 'hpnd-mit-disclaimer': {'id': 'HPND-MIT-disclaimer'}, 'hpnd-netrek': {'id': 'HPND-Netrek'}, 'hpnd-pbmplus': {'id': 'HPND-Pbmplus'}, 'hpnd-sell-mit-disclaimer-xserver': {'id': 'HPND-sell-MIT-disclaimer-xserver'}, 'hpnd-sell-regexpr': {'id': 'HPND-sell-regexpr'}, 'hpnd-sell-variant': {'id': 'HPND-sell-variant'}, 'hpnd-sell-variant-mit-disclaimer': {'id': 'HPND-sell-variant-MIT-disclaimer'}, 'hpnd-sell-variant-mit-disclaimer-rev': {'id': 'HPND-sell-variant-MIT-disclaimer-rev'}, 'hpnd-uc': {'id': 'HPND-UC'}, 'hpnd-uc-export-us': {'id': 'HPND-UC-export-US'}, 'htmltidy': {'id': 'HTMLTIDY'}, 'ibm-pibs': {'id': 'IBM-pibs'}, 'icu': {'id': 'ICU'}, 'iec-code-components-eula': {'id': 'IEC-Code-Components-EULA'}, 'ijg': {'id': 'IJG'}, 'ijg-short': {'id': 'IJG-short'}, 'imagemagick': {'id': 'ImageMagick'}, 'imatix': {'id': 'iMatix'}, 'imlib2': {'id': 'Imlib2'}, 'info-zip': {'id': 'Info-ZIP'}, 'inner-net-2.0': {'id': 'Inner-Net-2.0'}, 'innosetup': {'id': 'InnoSetup'}, 'intel': {'id': 'Intel'}, 'intel-acpi': {'id': 'Intel-ACPI'}, 'interbase-1.0': {'id': 'Interbase-1.0'}, 'ipa': {'id': 'IPA'}, 'ipl-1.0': {'id': 'IPL-1.0'}, 'isc': {'id': 'ISC'}, 'isc-veillard': {'id': 'ISC-Veillard'}, 'jam': {'id': 'Jam'}, 'jasper-2.0': {'id': 'JasPer-2.0'}, 'jpl-image': {'id': 'JPL-image'}, 'jpnic': {'id': 'JPNIC'}, 'json': {'id': 'JSON'}, 'kastrup': {'id': 'Kastrup'}, 'kazlib': {'id': 'Kazlib'}, 'knuth-ctan': {'id': 'Knuth-CTAN'}, 'lal-1.2': {'id': 'LAL-1.2'}, 'lal-1.3': {'id': 'LAL-1.3'}, 'latex2e': {'id': 'Latex2e'}, 'latex2e-translated-notice': {'id': 'Latex2e-translated-notice'}, 'leptonica': {'id': 'Leptonica'}, 'lgpl-2.0-only': {'id': 'LGPL-2.0-only'}, 'lgpl-2.0-or-later': {'id': 'LGPL-2.0-or-later'}, 'lgpl-2.1-only': {'id': 'LGPL-2.1-only'}, 'lgpl-2.1-or-later': {'id': 'LGPL-2.1-or-later'}, 'lgpl-3.0-only': {'id': 'LGPL-3.0-only'}, 'lgpl-3.0-or-later': {'id': 'LGPL-3.0-or-later'}, 'lgpllr': {'id': 'LGPLLR'}, 'libpng': {'id': 'Libpng'}, 'libpng-2.0': {'id': 'libpng-2.0'}, 'libselinux-1.0': {'id': 'libselinux-1.0'}, 'libtiff': {'id': 'libtiff'}, 'libutil-david-nugent': {'id': 'libutil-David-Nugent'}, 'liliq-p-1.1': {'id': 'LiLiQ-P-1.1'}, 'liliq-r-1.1': {'id': 'LiLiQ-R-1.1'}, 'liliq-rplus-1.1': {'id': 'LiLiQ-Rplus-1.1'}, 'linux-man-pages-1-para': {'id': 'Linux-man-pages-1-para'}, 'linux-man-pages-copyleft': {'id': 'Linux-man-pages-copyleft'}, 'linux-man-pages-copyleft-2-para': {'id': 'Linux-man-pages-copyleft-2-para'}, 'linux-man-pages-copyleft-var': {'id': 'Linux-man-pages-copyleft-var'}, 'linux-openib': {'id': 'Linux-OpenIB'}, 'loop': {'id': 'LOOP'}, 'lpd-document': {'id': 'LPD-document'}, 'lpl-1.0': {'id': 'LPL-1.0'}, 'lpl-1.02': {'id': 'LPL-1.02'}, 'lppl-1.0': {'id': 'LPPL-1.0'}, 'lppl-1.1': {'id': 'LPPL-1.1'}, 'lppl-1.2': {'id': 'LPPL-1.2'}, 'lppl-1.3a': {'id': 'LPPL-1.3a'}, 'lppl-1.3c': {'id': 'LPPL-1.3c'}, 'lsof': {'id': 'lsof'}, 'lucida-bitmap-fonts': {'id': 'Lucida-Bitmap-Fonts'}, 'lzma-sdk-9.11-to-9.20': {'id': 'LZMA-SDK-9.11-to-9.20'}, 'lzma-sdk-9.22': {'id': 'LZMA-SDK-9.22'}, 'mackerras-3-clause': {'id': 'Mackerras-3-Clause'}, 'mackerras-3-clause-acknowledgment': {'id': 'Mackerras-3-Clause-acknowledgment'}, 'magaz': {'id': 'magaz'}, 'mailprio': {'id': 'mailprio'}, 'makeindex': {'id': 'MakeIndex'}, 'martin-birgmeier': {'id': 'Martin-Birgmeier'}, 'mcphee-slideshow': {'id': 'McPhee-slideshow'}, 'metamail': {'id': 'metamail'}, 'minpack': {'id': 'Minpack'}, 'mips': {'id': 'MIPS'}, 'miros': {'id': 'MirOS'}, 'mit': {'id': 'MIT'}, 'mit-0': {'id': 'MIT-0'}, 'mit-advertising': {'id': 'MIT-advertising'}, 'mit-click': {'id': 'MIT-Click'}, 'mit-cmu': {'id': 'MIT-CMU'}, 'mit-enna': {'id': 'MIT-enna'}, 'mit-feh': {'id': 'MIT-feh'}, 'mit-festival': {'id': 'MIT-Festival'}, 'mit-khronos-old': {'id': 'MIT-Khronos-old'}, 'mit-modern-variant': {'id': 'MIT-Modern-Variant'}, 'mit-open-group': {'id': 'MIT-open-group'}, 'mit-testregex': {'id': 'MIT-testregex'}, 'mit-wu': {'id': 'MIT-Wu'}, 'mitnfa': {'id': 'MITNFA'}, 'mmixware': {'id': 'MMIXware'}, 'motosoto': {'id': 'Motosoto'}, 'mpeg-ssg': {'id': 'MPEG-SSG'}, 'mpi-permissive': {'id': 'mpi-permissive'}, 'mpich2': {'id': 'mpich2'}, 'mpl-1.0': {'id': 'MPL-1.0'}, 'mpl-1.1': {'id': 'MPL-1.1'}, 'mpl-2.0': {'id': 'MPL-2.0'}, 'mpl-2.0-no-copyleft-exception': {'id': 'MPL-2.0-no-copyleft-exception'}, 'mplus': {'id': 'mplus'}, 'ms-lpl': {'id': 'MS-LPL'}, 'ms-pl': {'id': 'MS-PL'}, 'ms-rl': {'id': 'MS-RL'}, 'mtll': {'id': 'MTLL'}, 'mulanpsl-1.0': {'id': 'MulanPSL-1.0'}, 'mulanpsl-2.0': {'id': 'MulanPSL-2.0'}, 'multics': {'id': 'Multics'}, 'mup': {'id': 'Mup'}, 'naist-2003': {'id': 'NAIST-2003'}, 'nasa-1.3': {'id': 'NASA-1.3'}, 'naumen': {'id': 'Naumen'}, 'nbpl-1.0': {'id': 'NBPL-1.0'}, 'ncbi-pd': {'id': 'NCBI-PD'}, 'ncgl-uk-2.0': {'id': 'NCGL-UK-2.0'}, 'ncl': {'id': 'NCL'}, 'ncsa': {'id': 'NCSA'}, 'netcdf': {'id': 'NetCDF'}, 'newsletr': {'id': 'Newsletr'}, 'ngpl': {'id': 'NGPL'}, 'nicta-1.0': {'id': 'NICTA-1.0'}, 'nist-pd': {'id': 'NIST-PD'}, 'nist-pd-fallback': {'id': 'NIST-PD-fallback'}, 'nist-software': {'id': 'NIST-Software'}, 'nlod-1.0': {'id': 'NLOD-1.0'}, 'nlod-2.0': {'id': 'NLOD-2.0'}, 'nlpl': {'id': 'NLPL'}, 'nokia': {'id': 'Nokia'}, 'nosl': {'id': 'NOSL'}, 'noweb': {'id': 'Noweb'}, 'npl-1.0': {'id': 'NPL-1.0'}, 'npl-1.1': {'id': 'NPL-1.1'}, 'nposl-3.0': {'id': 'NPOSL-3.0'}, 'nrl': {'id': 'NRL'}, 'ntp': {'id': 'NTP'}, 'ntp-0': {'id': 'NTP-0'}, 'o-uda-1.0': {'id': 'O-UDA-1.0'}, 'oar': {'id': 'OAR'}, 'occt-pl': {'id': 'OCCT-PL'}, 'oclc-2.0': {'id': 'OCLC-2.0'}, 'odbl-1.0': {'id': 'ODbL-1.0'}, 'odc-by-1.0': {'id': 'ODC-By-1.0'}, 'offis': {'id': 'OFFIS'}, 'ofl-1.0': {'id': 'OFL-1.0'}, 'ofl-1.0-no-rfn': {'id': 'OFL-1.0-no-RFN'}, 'ofl-1.0-rfn': {'id': 'OFL-1.0-RFN'}, 'ofl-1.1': {'id': 'OFL-1.1'}, 'ofl-1.1-no-rfn': {'id': 'OFL-1.1-no-RFN'}, 'ofl-1.1-rfn': {'id': 'OFL-1.1-RFN'}, 'ogc-1.0': {'id': 'OGC-1.0'}, 'ogdl-taiwan-1.0': {'id': 'OGDL-Taiwan-1.0'}, 'ogl-canada-2.0': {'id': 'OGL-Canada-2.0'}, 'ogl-uk-1.0': {'id': 'OGL-UK-1.0'}, 'ogl-uk-2.0': {'id': 'OGL-UK-2.0'}, 'ogl-uk-3.0': {'id': 'OGL-UK-3.0'}, 'ogtsl': {'id': 'OGTSL'}, 'oldap-1.1': {'id': 'OLDAP-1.1'}, 'oldap-1.2': {'id': 'OLDAP-1.2'}, 'oldap-1.3': {'id': 'OLDAP-1.3'}, 'oldap-1.4': {'id': 'OLDAP-1.4'}, 'oldap-2.0': {'id': 'OLDAP-2.0'}, 'oldap-2.0.1': {'id': 'OLDAP-2.0.1'}, 'oldap-2.1': {'id': 'OLDAP-2.1'}, 'oldap-2.2': {'id': 'OLDAP-2.2'}, 'oldap-2.2.1': {'id': 'OLDAP-2.2.1'}, 'oldap-2.2.2': {'id': 'OLDAP-2.2.2'}, 'oldap-2.3': {'id': 'OLDAP-2.3'}, 'oldap-2.4': {'id': 'OLDAP-2.4'}, 'oldap-2.5': {'id': 'OLDAP-2.5'}, 'oldap-2.6': {'id': 'OLDAP-2.6'}, 'oldap-2.7': {'id': 'OLDAP-2.7'}, 'oldap-2.8': {'id': 'OLDAP-2.8'}, 'olfl-1.3': {'id': 'OLFL-1.3'}, 'oml': {'id': 'OML'}, 'openpbs-2.3': {'id': 'OpenPBS-2.3'}, 'openssl': {'id': 'OpenSSL'}, 'openssl-standalone': {'id': 'OpenSSL-standalone'}, 'openvision': {'id': 'OpenVision'}, 'opl-1.0': {'id': 'OPL-1.0'}, 'opl-uk-3.0': {'id': 'OPL-UK-3.0'}, 'opubl-1.0': {'id': 'OPUBL-1.0'}, 'oset-pl-2.1': {'id': 'OSET-PL-2.1'}, 'osl-1.0': {'id': 'OSL-1.0'}, 'osl-1.1': {'id': 'OSL-1.1'}, 'osl-2.0': {'id': 'OSL-2.0'}, 'osl-2.1': {'id': 'OSL-2.1'}, 'osl-3.0': {'id': 'OSL-3.0'}, 'padl': {'id': 'PADL'}, 'parity-6.0.0': {'id': 'Parity-6.0.0'}, 'parity-7.0.0': {'id': 'Parity-7.0.0'}, 'pddl-1.0': {'id': 'PDDL-1.0'}, 'php-3.0': {'id': 'PHP-3.0'}, 'php-3.01': {'id': 'PHP-3.01'}, 'pixar': {'id': 'Pixar'}, 'pkgconf': {'id': 'pkgconf'}, 'plexus': {'id': 'Plexus'}, 'pnmstitch': {'id': 'pnmstitch'}, 'polyform-noncommercial-1.0.0': {'id': 'PolyForm-Noncommercial-1.0.0'}, 'polyform-small-business-1.0.0': {'id': 'PolyForm-Small-Business-1.0.0'}, 'postgresql': {'id': 'PostgreSQL'}, 'ppl': {'id': 'PPL'}, 'psf-2.0': {'id': 'PSF-2.0'}, 'psfrag': {'id': 'psfrag'}, 'psutils': {'id': 'psutils'}, 'python-2.0': {'id': 'Python-2.0'}, 'python-2.0.1': {'id': 'Python-2.0.1'}, 'python-ldap': {'id': 'python-ldap'}, 'qhull': {'id': 'Qhull'}, 'qpl-1.0': {'id': 'QPL-1.0'}, 'qpl-1.0-inria-2004': {'id': 'QPL-1.0-INRIA-2004'}, 'radvd': {'id': 'radvd'}, 'rdisc': {'id': 'Rdisc'}, 'rhecos-1.1': {'id': 'RHeCos-1.1'}, 'rpl-1.1': {'id': 'RPL-1.1'}, 'rpl-1.5': {'id': 'RPL-1.5'}, 'rpsl-1.0': {'id': 'RPSL-1.0'}, 'rsa-md': {'id': 'RSA-MD'}, 'rscpl': {'id': 'RSCPL'}, 'ruby': {'id': 'Ruby'}, 'ruby-pty': {'id': 'Ruby-pty'}, 'sax-pd': {'id': 'SAX-PD'}, 'sax-pd-2.0': {'id': 'SAX-PD-2.0'}, 'saxpath': {'id': 'Saxpath'}, 'scea': {'id': 'SCEA'}, 'schemereport': {'id': 'SchemeReport'}, 'sendmail': {'id': 'Sendmail'}, 'sendmail-8.23': {'id': 'Sendmail-8.23'}, 'sendmail-open-source-1.1': {'id': 'Sendmail-Open-Source-1.1'}, 'sgi-b-1.0': {'id': 'SGI-B-1.0'}, 'sgi-b-1.1': {'id': 'SGI-B-1.1'}, 'sgi-b-2.0': {'id': 'SGI-B-2.0'}, 'sgi-opengl': {'id': 'SGI-OpenGL'}, 'sgp4': {'id': 'SGP4'}, 'shl-0.5': {'id': 'SHL-0.5'}, 'shl-0.51': {'id': 'SHL-0.51'}, 'simpl-2.0': {'id': 'SimPL-2.0'}, 'sissl': {'id': 'SISSL'}, 'sissl-1.2': {'id': 'SISSL-1.2'}, 'sl': {'id': 'SL'}, 'sleepycat': {'id': 'Sleepycat'}, 'smail-gpl': {'id': 'SMAIL-GPL'}, 'smlnj': {'id': 'SMLNJ'}, 'smppl': {'id': 'SMPPL'}, 'snia': {'id': 'SNIA'}, 'snprintf': {'id': 'snprintf'}, 'softsurfer': {'id': 'softSurfer'}, 'soundex': {'id': 'Soundex'}, 'spencer-86': {'id': 'Spencer-86'}, 'spencer-94': {'id': 'Spencer-94'}, 'spencer-99': {'id': 'Spencer-99'}, 'spl-1.0': {'id': 'SPL-1.0'}, 'ssh-keyscan': {'id': 'ssh-keyscan'}, 'ssh-openssh': {'id': 'SSH-OpenSSH'}, 'ssh-short': {'id': 'SSH-short'}, 'ssleay-standalone': {'id': 'SSLeay-standalone'}, 'sspl-1.0': {'id': 'SSPL-1.0'}, 'sugarcrm-1.1.3': {'id': 'SugarCRM-1.1.3'}, 'sun-ppp': {'id': 'Sun-PPP'}, 'sun-ppp-2000': {'id': 'Sun-PPP-2000'}, 'sunpro': {'id': 'SunPro'}, 'swl': {'id': 'SWL'}, 'swrule': {'id': 'swrule'}, 'symlinks': {'id': 'Symlinks'}, 'tapr-ohl-1.0': {'id': 'TAPR-OHL-1.0'}, 'tcl': {'id': 'TCL'}, 'tcp-wrappers': {'id': 'TCP-wrappers'}, 'termreadkey': {'id': 'TermReadKey'}, 'tgppl-1.0': {'id': 'TGPPL-1.0'}, 'thirdeye': {'id': 'ThirdEye'}, 'threeparttable': {'id': 'threeparttable'}, 'tmate': {'id': 'TMate'}, 'torque-1.1': {'id': 'TORQUE-1.1'}, 'tosl': {'id': 'TOSL'}, 'tpdl': {'id': 'TPDL'}, 'tpl-1.0': {'id': 'TPL-1.0'}, 'trustedqsl': {'id': 'TrustedQSL'}, 'ttwl': {'id': 'TTWL'}, 'ttyp0': {'id': 'TTYP0'}, 'tu-berlin-1.0': {'id': 'TU-Berlin-1.0'}, 'tu-berlin-2.0': {'id': 'TU-Berlin-2.0'}, 'ubuntu-font-1.0': {'id': 'Ubuntu-font-1.0'}, 'ucar': {'id': 'UCAR'}, 'ucl-1.0': {'id': 'UCL-1.0'}, 'ulem': {'id': 'ulem'}, 'umich-merit': {'id': 'UMich-Merit'}, 'unicode-3.0': {'id': 'Unicode-3.0'}, 'unicode-dfs-2015': {'id': 'Unicode-DFS-2015'}, 'unicode-dfs-2016': {'id': 'Unicode-DFS-2016'}, 'unicode-tou': {'id': 'Unicode-TOU'}, 'unixcrypt': {'id': 'UnixCrypt'}, 'unlicense': {'id': 'Unlicense'}, 'upl-1.0': {'id': 'UPL-1.0'}, 'urt-rle': {'id': 'URT-RLE'}, 'vim': {'id': 'Vim'}, 'vostrom': {'id': 'VOSTROM'}, 'vsl-1.0': {'id': 'VSL-1.0'}, 'w3c': {'id': 'W3C'}, 'w3c-19980720': {'id': 'W3C-19980720'}, 'w3c-20150513': {'id': 'W3C-20150513'}, 'w3m': {'id': 'w3m'}, 'watcom-1.0': {'id': 'Watcom-1.0'}, 'widget-workshop': {'id': 'Widget-Workshop'}, 'wsuipa': {'id': 'Wsuipa'}, 'wtfpl': {'id': 'WTFPL'}, 'wwl': {'id': 'wwl'}, 'x11': {'id': 'X11'}, 'x11-distribute-modifications-variant': {'id': 'X11-distribute-modifications-variant'}, 'x11-swapped': {'id': 'X11-swapped'}, 'xdebug-1.03': {'id': 'Xdebug-1.03'}, 'xerox': {'id': 'Xerox'}, 'xfig': {'id': 'Xfig'}, 'xfree86-1.1': {'id': 'XFree86-1.1'}, 'xinetd': {'id': 'xinetd'}, 'xkeyboard-config-zinoviev': {'id': 'xkeyboard-config-Zinoviev'}, 'xlock': {'id': 'xlock'}, 'xnet': {'id': 'Xnet'}, 'xpp': {'id': 'xpp'}, 'xskat': {'id': 'XSkat'}, 'xzoom': {'id': 'xzoom'}, 'ypl-1.0': {'id': 'YPL-1.0'}, 'ypl-1.1': {'id': 'YPL-1.1'}, 'zed': {'id': 'Zed'}, 'zeeff': {'id': 'Zeeff'}, 'zend-2.0': {'id': 'Zend-2.0'}, 'zimbra-1.3': {'id': 'Zimbra-1.3'}, 'zimbra-1.4': {'id': 'Zimbra-1.4'}, 'zlib': {'id': 'Zlib'}, 'zlib-acknowledgement': {'id': 'zlib-acknowledgement'}, 'zpl-1.1': {'id': 'ZPL-1.1'}, 'zpl-2.0': {'id': 'ZPL-2.0'}, 'zpl-2.1': {'id': 'ZPL-2.1'}} ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5035572 flit-3.12.0/flit_core/flit_core/buildapi.py0000644000000000000000000000632214770461472015557 0ustar00"""PEP-517 compliant buildsystem API""" import logging import io import os import os.path as osp from pathlib import Path from .common import ( Module, make_metadata, write_entry_points, dist_info_name, get_docstring_and_version_via_ast, ) from .config import read_flit_config from .wheel import make_wheel_in, _write_wheel_file from .sdist import SdistBuilder log = logging.getLogger(__name__) # PEP 517 specifies that the CWD will always be the source tree pyproj_toml = Path('pyproject.toml') def get_requires_for_build_wheel(config_settings=None): """Returns a list of requirements for building, as strings""" info = read_flit_config(pyproj_toml) # If we can get version & description from pyproject.toml (PEP 621), or # by parsing the module (_via_ast), we don't need any extra # dependencies. If not, we'll need to try importing it, so report any # runtime dependencies as build dependencies. want_summary = 'description' in info.dynamic_metadata want_version = 'version' in info.dynamic_metadata module = Module(info.module, Path.cwd()) docstring, version = get_docstring_and_version_via_ast(module) if (want_summary and not docstring) or (want_version and not version): return info.metadata.get('requires_dist', []) else: return [] # Requirements to build an sdist are the same as for a wheel get_requires_for_build_sdist = get_requires_for_build_wheel # Requirements to build an editable are the same as for a wheel get_requires_for_build_editable = get_requires_for_build_wheel def prepare_metadata_for_build_wheel(metadata_directory, config_settings=None): """Creates {metadata_directory}/foo-1.2.dist-info""" ini_info = read_flit_config(pyproj_toml) module = Module(ini_info.module, Path.cwd()) metadata = make_metadata(module, ini_info) dist_info = osp.join(metadata_directory, dist_info_name(metadata.name, metadata.version)) os.mkdir(dist_info) with io.open(osp.join(dist_info, 'WHEEL'), 'w', encoding='utf-8') as f: _write_wheel_file(f, supports_py2=metadata.supports_py2) with io.open(osp.join(dist_info, 'METADATA'), 'w', encoding='utf-8') as f: metadata.write_metadata_file(f) if ini_info.entrypoints: with io.open(osp.join(dist_info, 'entry_points.txt'), 'w', encoding='utf-8') as f: write_entry_points(ini_info.entrypoints, f) return osp.basename(dist_info) # Metadata for editable are the same as for a wheel prepare_metadata_for_build_editable = prepare_metadata_for_build_wheel def build_wheel(wheel_directory, config_settings=None, metadata_directory=None): """Builds a wheel, places it in wheel_directory""" info = make_wheel_in(pyproj_toml, Path(wheel_directory)) return info.file.name def build_editable(wheel_directory, config_settings=None, metadata_directory=None): """Builds an "editable" wheel, places it in wheel_directory""" info = make_wheel_in(pyproj_toml, Path(wheel_directory), editable=True) return info.file.name def build_sdist(sdist_directory, config_settings=None): """Builds an sdist, places it in sdist_directory""" path = SdistBuilder.from_ini_path(pyproj_toml).build(Path(sdist_directory)) return path.name ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5035572 flit-3.12.0/flit_core/flit_core/common.py0000644000000000000000000004076314770461472015265 0ustar00import ast from contextlib import contextmanager import hashlib import logging import os import sys from pathlib import Path import re log = logging.getLogger(__name__) from .versionno import normalise_version class Module(object): """This represents the module/package that we are going to distribute """ in_namespace_package = False namespace_package_name = None def __init__(self, name, directory=Path()): self.name = name # It must exist either as a .py file or a directory, but not both name_as_path = name.replace('.', os.sep) pkg_dir = directory / name_as_path py_file = directory / (name_as_path+'.py') src_pkg_dir = directory / 'src' / name_as_path src_py_file = directory / 'src' / (name_as_path+'.py') existing = set() if pkg_dir.is_dir(): self.path = pkg_dir self.is_package = True self.prefix = '' existing.add(pkg_dir) if py_file.is_file(): self.path = py_file self.is_package = False self.prefix = '' existing.add(py_file) if src_pkg_dir.is_dir(): self.path = src_pkg_dir self.is_package = True self.prefix = 'src' existing.add(src_pkg_dir) if src_py_file.is_file(): self.path = src_py_file self.is_package = False self.prefix = 'src' existing.add(src_py_file) if len(existing) > 1: raise ValueError( "Multiple files or folders could be module {}: {}" .format(name, ", ".join([str(p) for p in sorted(existing)])) ) elif not existing: raise ValueError("No file/folder found for module {}".format(name)) self.source_dir = directory / self.prefix if '.' in name: self.namespace_package_name = name.rpartition('.')[0] self.in_namespace_package = True @property def file(self): if self.is_package: return self.path / '__init__.py' else: return self.path @property def version_files(self): """Files which will be parsed to find a version number Files later in this list take precedence over earlier ones. """ if self.is_package: paths = [self.path / '__init__.py'] for filename in ('version.py', '_version.py', '__version__.py'): if (self.path / filename).is_file(): paths.insert(0, self.path / filename) return paths else: return [self.path] def iter_files(self): """Iterate over the files contained in this module. Yields absolute paths - caller may want to make them relative. Excludes any __pycache__ and *.pyc files. """ def _include(path): name = os.path.basename(path) if (name == '__pycache__') or name.endswith('.pyc'): return False return True if self.is_package: # Ensure we sort all files and directories so the order is stable for dirpath, dirs, files in os.walk(str(self.path)): for file in sorted(files): full_path = os.path.join(dirpath, file) if _include(full_path): yield full_path dirs[:] = [d for d in sorted(dirs) if _include(d)] else: yield str(self.path) class ProblemInModule(ValueError): pass class NoDocstringError(ProblemInModule): pass class NoVersionError(ProblemInModule): pass class InvalidVersion(ProblemInModule): pass class VCSError(Exception): def __init__(self, msg, directory): self.msg = msg self.directory = directory def __str__(self): return self.msg + ' ({})'.format(self.directory) @contextmanager def _module_load_ctx(): """Preserve some global state that modules might change at import time. - Handlers on the root logger. """ logging_handlers = logging.root.handlers[:] try: yield finally: logging.root.handlers = logging_handlers def get_docstring_and_version_via_ast(target): """ Return a tuple like (docstring, version) for the given module, extracted by parsing its AST. """ version = None for target_path in target.version_files: # read as bytes to enable custom encodings with target_path.open('rb') as f: node = ast.parse(f.read()) for child in node.body: if is_version_str_assignment(child): if sys.version_info >= (3, 8): version = child.value.value else: version = child.value.s break return ast.get_docstring(node), version def is_version_str_assignment(node): """Check if *node* is a simple string assignment to __version__""" if not isinstance(node, (ast.Assign, ast.AnnAssign)): return False constant_type = ast.Constant if sys.version_info >= (3, 8) else ast.Str if not isinstance(node.value, constant_type): return False targets = (node.target,) if isinstance(node, ast.AnnAssign) else node.targets for target in targets: if isinstance(target, ast.Name) and target.id == "__version__": return True return False # To ensure we're actually loading the specified file, give it a unique name to # avoid any cached import. In normal use we'll only load one module per process, # so it should only matter for the tests, but we'll do it anyway. _import_i = 0 def get_docstring_and_version_via_import(target): """ Return a tuple like (docstring, version) for the given module, extracted by importing the module and pulling __doc__ & __version__ from it. """ global _import_i _import_i += 1 log.debug("Loading module %s", target.file) from importlib.util import spec_from_file_location, module_from_spec mod_name = 'flit_core.dummy.import%d' % _import_i spec = spec_from_file_location(mod_name, target.file) with _module_load_ctx(): m = module_from_spec(spec) # Add the module to sys.modules to allow relative imports to work. # importlib has more code around this to handle the case where two # threads are trying to load the same module at the same time, but Flit # should always be running a single thread, so we won't duplicate that. sys.modules[mod_name] = m try: spec.loader.exec_module(m) finally: sys.modules.pop(mod_name, None) docstring = m.__dict__.get('__doc__', None) version = m.__dict__.get('__version__', None) return docstring, version def get_info_from_module(target, for_fields=('version', 'description')): """Load the module/package, get its docstring and __version__ """ if not for_fields: return {} # What core metadata calls Summary, PEP 621 calls description want_summary = 'description' in for_fields want_version = 'version' in for_fields log.debug("Loading module %s", target.file) # Attempt to extract our docstring & version by parsing our target's # AST, falling back to an import if that fails. This allows us to # build without necessarily requiring that our built package's # requirements are installed. docstring, version = get_docstring_and_version_via_ast(target) if (want_summary and not docstring) or (want_version and not version): docstring, version = get_docstring_and_version_via_import(target) res = {} if want_summary: if (not docstring) or not docstring.strip(): raise NoDocstringError( 'Flit cannot package module without docstring, or empty docstring. ' 'Please add a docstring to your module ({}).'.format(target.file) ) res['summary'] = docstring.lstrip().splitlines()[0] if want_version: res['version'] = check_version(version) return res def check_version(version): """ Check whether a given version string match PEP 440, and do normalisation. Raise InvalidVersion/NoVersionError with relevant information if version is invalid. Log a warning if the version is not canonical with respect to PEP 440. Returns the version in canonical PEP 440 format. """ if not version: raise NoVersionError('Cannot package module without a version string. ' 'Please define a `__version__ = "x.y.z"` in your module.') if not isinstance(version, str): raise InvalidVersion('__version__ must be a string, not {}.' .format(type(version))) # Import here to avoid circular import version = normalise_version(version) return version script_template = """\ #!{interpreter} # -*- coding: utf-8 -*- import re import sys from {module} import {import_name} if __name__ == '__main__': sys.argv[0] = re.sub(r'(-script\\.pyw|\\.exe)?$', '', sys.argv[0]) sys.exit({func}()) """ def parse_entry_point(ep): """Check and parse a 'package.module:func' style entry point specification. Returns (modulename, funcname) """ if ':' not in ep: raise ValueError("Invalid entry point (no ':'): %r" % ep) mod, func = ep.split(':') for piece in func.split('.'): if not piece.isidentifier(): raise ValueError("Invalid entry point: %r is not an identifier" % piece) for piece in mod.split('.'): if not piece.isidentifier(): raise ValueError("Invalid entry point: %r is not a module path" % piece) return mod, func def write_entry_points(d, fp): """Write entry_points.txt from a two-level dict Sorts on keys to ensure results are reproducible. """ for group_name in sorted(d): fp.write(u'[{}]\n'.format(group_name)) group = d[group_name] for name in sorted(group): val = group[name] fp.write(u'{}={}\n'.format(name, val)) fp.write(u'\n') def hash_file(path, algorithm='sha256'): with open(path, 'rb') as f: h = hashlib.new(algorithm, f.read()) return h.hexdigest() def normalize_file_permissions(st_mode): """Normalize the permission bits in the st_mode field from stat to 644/755 Popular VCSs only track whether a file is executable or not. The exact permissions can vary on systems with different umasks. Normalising to 644 (non executable) or 755 (executable) makes builds more reproducible. """ # Set 644 permissions, leaving higher bits of st_mode unchanged new_mode = (st_mode | 0o644) & ~0o133 if st_mode & 0o100: new_mode |= 0o111 # Executable: 644 -> 755 return new_mode class Metadata(object): summary = None home_page = None author = None author_email = None maintainer = None maintainer_email = None license = None license_expression = None description = None keywords = None download_url = None requires_python = None description_content_type = None platform = () supported_platform = () classifiers = () provides = () requires = () obsoletes = () project_urls = () provides_dist = () requires_dist = () obsoletes_dist = () requires_external = () provides_extra = () license_files = () dynamic = () metadata_version = "2.4" def __init__(self, data): data = data.copy() self.name = data.pop('name') self.version = data.pop('version') for k, v in data.items(): assert hasattr(self, k), "data does not have attribute '{}'".format(k) setattr(self, k, v) def _normalise_field_name(self, n): return n.lower().replace('-', '_') def _extract_extras(self, req): match = re.search(r'\[([^]]*)\]', req) if match: list_str = match.group(1) return [item.strip() for item in list_str.split(',')] else: return None def _normalise_requires_dist(self, req): extras = self._extract_extras(req) if extras: normalised_extras = [normalise_core_metadata_name(extra) for extra in extras] normalised_extras_str = ', '.join(normalised_extras) normalised_req = re.sub(r'\[([^]]*)\]', f"[{normalised_extras_str}]", req) return normalised_req else: return req def write_metadata_file(self, fp): """Write out metadata in the email headers format""" fields = [ 'Metadata-Version', 'Name', 'Version', ] optional_fields = [ 'Summary', 'Home-page', 'Keywords', 'Author', 'Author-email', 'Maintainer', 'Maintainer-email', 'Requires-Python', 'Description-Content-Type', ] for field in fields: value = getattr(self, self._normalise_field_name(field)) fp.write(u"{}: {}\n".format(field, value)) for field in optional_fields: value = getattr(self, self._normalise_field_name(field)) if value is not None: # TODO: verify which fields can be multiline # The spec has multiline examples for Author, Maintainer & # License (& Description, but we put that in the body) # Indent following lines with 8 spaces: value = '\n '.join(value.splitlines()) fp.write(u"{}: {}\n".format(field, value)) license_expr = getattr(self, self._normalise_field_name("License-Expression")) license = getattr(self, self._normalise_field_name("License")) if license_expr: fp.write(u'License-Expression: {}\n'.format(license_expr)) elif license: # Deprecated, superseded by License-Expression fp.write(u'License: {}\n'.format(license)) for clsfr in self.classifiers: fp.write(u'Classifier: {}\n'.format(clsfr)) for file in self.license_files: fp.write(u'License-File: {}\n'.format(file)) for req in self.requires_dist: normalised_req = self._normalise_requires_dist(req) fp.write(u'Requires-Dist: {}\n'.format(normalised_req)) for url in self.project_urls: fp.write(u'Project-URL: {}\n'.format(url)) for extra in self.provides_extra: normalised_extra = normalise_core_metadata_name(extra) fp.write(u'Provides-Extra: {}\n'.format(normalised_extra)) if self.description is not None: fp.write(u'\n' + self.description + u'\n') @property def supports_py2(self): """Return True if Requires-Python indicates Python 2 support.""" for part in (self.requires_python or "").split(","): if re.search(r"^\s*(>=?|~=|===?)?\s*[3-9]", part): return False return True def make_metadata(module, ini_info): md_dict = {'name': module.name, 'provides': [module.name]} md_dict.update(get_info_from_module(module, ini_info.dynamic_metadata)) md_dict.update(ini_info.metadata) return Metadata(md_dict) def normalise_core_metadata_name(name): """Normalise a project or extra name (as in PEP 503, also PEP 685)""" return re.sub(r"[-_.]+", "-", name).lower() def normalize_dist_name(name: str, version: str) -> str: """Normalizes a name and a PEP 440 version The resulting string is valid as dist-info folder name and as first part of a wheel filename See https://packaging.python.org/specifications/binary-distribution-format/#escaping-and-unicode """ normalized_name = re.sub(r'[-_.]+', '_', name, flags=re.UNICODE).lower() assert check_version(version) == version assert '-' not in version, 'Normalized versions can’t have dashes' return '{}-{}'.format(normalized_name, version) def dist_info_name(distribution, version): """Get the correct name of the .dist-info folder""" return normalize_dist_name(distribution, version) + '.dist-info' def walk_data_dir(data_directory): """Iterate over the files in the given data directory. Yields paths prefixed with data_directory - caller may want to make them relative to that. Excludes any __pycache__ subdirectories. """ if data_directory is None: return for dirpath, dirs, files in os.walk(data_directory): for file in sorted(files): full_path = os.path.join(dirpath, file) yield full_path dirs[:] = [d for d in sorted(dirs) if d != '__pycache__'] ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5045574 flit-3.12.0/flit_core/flit_core/config.py0000644000000000000000000010177114770461472015237 0ustar00import difflib from email.headerregistry import Address import errno import logging import os import os.path as osp from pathlib import Path import re try: import tomllib except ImportError: try: from .vendor import tomli as tomllib # Some downstream distributors remove the vendored tomli. # When that is removed, import tomli from the regular location. except ImportError: import tomli as tomllib from ._spdx_data import licenses from .common import normalise_core_metadata_name from .versionno import normalise_version log = logging.getLogger(__name__) class ConfigError(ValueError): pass metadata_list_fields = { 'classifiers', 'requires', 'dev-requires' } metadata_allowed_fields = { 'module', 'author', 'author-email', 'maintainer', 'maintainer-email', 'home-page', 'license', 'keywords', 'requires-python', 'dist-name', 'description-file', 'requires-extra', } | metadata_list_fields metadata_required_fields = { 'module', 'author', } pep621_allowed_fields = { 'name', 'version', 'description', 'readme', 'requires-python', 'license', 'license-files', 'authors', 'maintainers', 'keywords', 'classifiers', 'urls', 'scripts', 'gui-scripts', 'entry-points', 'dependencies', 'optional-dependencies', 'dynamic', } default_license_files_globs = ['COPYING*', 'LICEN[CS]E*'] license_files_allowed_chars = re.compile(r'^[\w\-\.\/\*\?\[\]]+$') def read_flit_config(path): """Read and check the `pyproject.toml` file with data about the package. """ d = tomllib.loads(path.read_text('utf-8')) return prep_toml_config(d, path) class EntryPointsConflict(ConfigError): def __str__(self): return ('Please specify console_scripts entry points, or [scripts] in ' 'flit config, not both.') def prep_toml_config(d, path): """Validate config loaded from pyproject.toml and prepare common metadata Returns a LoadedConfig object. """ dtool = d.get('tool', {}).get('flit', {}) if 'project' in d: # Metadata in [project] table (PEP 621) if 'metadata' in dtool: raise ConfigError( "Use [project] table for metadata or [tool.flit.metadata], not both." ) if ('scripts' in dtool) or ('entrypoints' in dtool): raise ConfigError( "Don't mix [project] metadata with [tool.flit.scripts] or " "[tool.flit.entrypoints]. Use [project.scripts]," "[project.gui-scripts] or [project.entry-points] as replacements." ) loaded_cfg = read_pep621_metadata(d['project'], path) module_tbl = dtool.get('module', {}) if 'name' in module_tbl: loaded_cfg.module = module_tbl['name'] elif 'metadata' in dtool: # Metadata in [tool.flit.metadata] (pre PEP 621 format) if 'module' in dtool: raise ConfigError( "Use [tool.flit.module] table with new-style [project] metadata, " "not [tool.flit.metadata]" ) loaded_cfg = _prep_metadata(dtool['metadata'], path) loaded_cfg.dynamic_metadata = ['version', 'description'] if 'entrypoints' in dtool: loaded_cfg.entrypoints = flatten_entrypoints(dtool['entrypoints']) if 'scripts' in dtool: loaded_cfg.add_scripts(dict(dtool['scripts'])) else: raise ConfigError( "Neither [project] nor [tool.flit.metadata] found in pyproject.toml" ) unknown_sections = set(dtool) - { 'metadata', 'module', 'scripts', 'entrypoints', 'sdist', 'external-data' } unknown_sections = [s for s in unknown_sections if not s.lower().startswith('x-')] if unknown_sections: raise ConfigError('Unexpected tables in pyproject.toml: ' + ', '.join( '[tool.flit.{}]'.format(s) for s in unknown_sections )) if 'sdist' in dtool: unknown_keys = set(dtool['sdist']) - {'include', 'exclude'} if unknown_keys: raise ConfigError( "Unknown keys in [tool.flit.sdist]:" + ", ".join(unknown_keys) ) loaded_cfg.sdist_include_patterns = _check_glob_patterns( dtool['sdist'].get('include', []), 'include' ) exclude = [ "**/__pycache__", "**.pyc", ] + dtool['sdist'].get('exclude', []) loaded_cfg.sdist_exclude_patterns = _check_glob_patterns( exclude, 'exclude' ) data_dir = dtool.get('external-data', {}).get('directory', None) if data_dir is not None: toml_key = "tool.flit.external-data.directory" if not isinstance(data_dir, str): raise ConfigError(f"{toml_key} must be a string") normp = osp.normpath(data_dir) if isabs_ish(normp): raise ConfigError(f"{toml_key} cannot be an absolute path") if normp.startswith('..' + os.sep): raise ConfigError( f"{toml_key} cannot point outside the directory containing pyproject.toml" ) if normp == '.': raise ConfigError( f"{toml_key} cannot refer to the directory containing pyproject.toml" ) loaded_cfg.data_directory = path.parent / data_dir if not loaded_cfg.data_directory.is_dir(): raise ConfigError(f"{toml_key} must refer to a directory") return loaded_cfg def flatten_entrypoints(ep): """Flatten nested entrypoints dicts. Entry points group names can include dots. But dots in TOML make nested dictionaries: [entrypoints.a.b] # {'entrypoints': {'a': {'b': {}}}} The proper way to avoid this is: [entrypoints."a.b"] # {'entrypoints': {'a.b': {}}} But since there isn't a need for arbitrarily nested mappings in entrypoints, flit allows you to use the former. This flattens the nested dictionaries from loading pyproject.toml. """ def _flatten(d, prefix): d1 = {} for k, v in d.items(): if isinstance(v, dict): for flattened in _flatten(v, prefix+'.'+k): yield flattened else: d1[k] = v if d1: yield prefix, d1 res = {} for k, v in ep.items(): res.update(_flatten(v, k)) return res def _check_glob_patterns(pats, clude): """Check and normalise glob patterns for sdist include/exclude""" if not isinstance(pats, list): raise ConfigError("sdist {} patterns must be a list".format(clude)) # Windows filenames can't contain these (nor * or ?, but they are part of # glob patterns) - https://stackoverflow.com/a/31976060/434217 bad_chars = re.compile(r'[\000-\037<>:"\\]') normed = [] for p in pats: if bad_chars.search(p): raise ConfigError( '{} pattern {!r} contains bad characters (<>:\"\\ or control characters)' .format(clude, p) ) normp = osp.normpath(p) if isabs_ish(normp): raise ConfigError( f'{clude} pattern {p!r} is an absolute path' ) if normp.startswith('..' + os.sep): raise ConfigError( '{} pattern {!r} points out of the directory containing pyproject.toml' .format(clude, p) ) normed.append(normp) return normed class LoadedConfig(object): def __init__(self): self.module = None self.metadata = {} self.reqs_by_extra = {} self.entrypoints = {} self.referenced_files = [] self.sdist_include_patterns = [] self.sdist_exclude_patterns = [] self.dynamic_metadata = [] self.data_directory = None def add_scripts(self, scripts_dict): if scripts_dict: if 'console_scripts' in self.entrypoints: raise EntryPointsConflict else: self.entrypoints['console_scripts'] = scripts_dict readme_ext_to_content_type = { '.rst': 'text/x-rst', '.md': 'text/markdown', '.txt': 'text/plain', } def description_from_file(rel_path: str, proj_dir: Path, guess_mimetype=True): if isabs_ish(rel_path): raise ConfigError("Readme path must be relative") desc_path = proj_dir / rel_path try: with desc_path.open('r', encoding='utf-8') as f: raw_desc = f.read() except IOError as e: if e.errno == errno.ENOENT: raise ConfigError( "Description file {} does not exist".format(desc_path) ) raise if guess_mimetype: ext = desc_path.suffix.lower() try: mimetype = readme_ext_to_content_type[ext] except KeyError: log.warning("Unknown extension %r for description file.", ext) log.warning(" Recognised extensions: %s", " ".join(readme_ext_to_content_type)) mimetype = None else: mimetype = None return raw_desc, mimetype def _prep_metadata(md_sect, path): """Process & verify the metadata from a config file - Pull out the module name we're packaging. - Read description-file and check that it's valid rst - Convert dashes in key names to underscores (e.g. home-page in config -> home_page in metadata) """ if not set(md_sect).issuperset(metadata_required_fields): missing = metadata_required_fields - set(md_sect) raise ConfigError("Required fields missing: " + '\n'.join(missing)) res = LoadedConfig() res.module = md_sect.get('module') if not all([m.isidentifier() for m in res.module.split(".")]): raise ConfigError("Module name %r is not a valid identifier" % res.module) md_dict = res.metadata # Description file if 'description-file' in md_sect: desc_path = md_sect.get('description-file') res.referenced_files.append(desc_path) desc_content, mimetype = description_from_file(desc_path, path.parent) md_dict['description'] = desc_content md_dict['description_content_type'] = mimetype if 'urls' in md_sect: project_urls = md_dict['project_urls'] = [] for label, url in sorted(md_sect.pop('urls').items()): project_urls.append("{}, {}".format(label, url)) for key, value in md_sect.items(): if key in {'description-file', 'module'}: continue if key not in metadata_allowed_fields: closest = difflib.get_close_matches(key, metadata_allowed_fields, n=1, cutoff=0.7) msg = "Unrecognised metadata key: {!r}".format(key) if closest: msg += " (did you mean {!r}?)".format(closest[0]) raise ConfigError(msg) k2 = key.replace('-', '_') md_dict[k2] = value if key in metadata_list_fields: if not isinstance(value, list): raise ConfigError('Expected a list for {} field, found {!r}' .format(key, value)) if not all(isinstance(a, str) for a in value): raise ConfigError('Expected a list of strings for {} field' .format(key)) elif key == 'requires-extra': if not isinstance(value, dict): raise ConfigError('Expected a dict for requires-extra field, found {!r}' .format(value)) if not all(isinstance(e, list) for e in value.values()): raise ConfigError('Expected a dict of lists for requires-extra field') for e, reqs in value.items(): if not all(isinstance(a, str) for a in reqs): raise ConfigError('Expected a string list for requires-extra. (extra {})' .format(e)) else: if not isinstance(value, str): raise ConfigError('Expected a string for {} field, found {!r}' .format(key, value)) # What we call requires in the ini file is technically requires_dist in # the metadata. if 'requires' in md_dict: md_dict['requires_dist'] = md_dict.pop('requires') # And what we call dist-name is name in the metadata if 'dist_name' in md_dict: md_dict['name'] = md_dict.pop('dist_name') # Move dev-requires into requires-extra reqs_noextra = md_dict.pop('requires_dist', []) reqs_extra = md_dict.pop('requires_extra', {}) extra_names_by_normed = {} for e, reqs in reqs_extra.items(): if not all(isinstance(a, str) for a in reqs): raise ConfigError( f'Expected a string list for requires-extra group {e}' ) if not name_is_valid(e): raise ConfigError( f'requires-extra group name {e!r} is not valid' ) enorm = normalise_core_metadata_name(e) extra_names_by_normed.setdefault(enorm, set()).add(e) res.reqs_by_extra[enorm] = reqs clashing_extra_names = [ g for g in extra_names_by_normed.values() if len(g) > 1 ] if clashing_extra_names: fmted = ['/'.join(sorted(g)) for g in clashing_extra_names] raise ConfigError( f"requires-extra group names clash: {'; '.join(fmted)}" ) dev_requires = md_dict.pop('dev_requires', None) if dev_requires is not None: if 'dev' in res.reqs_by_extra: raise ConfigError( 'dev-requires occurs together with its replacement requires-extra.dev.') else: log.warning( '"dev-requires = ..." is obsolete. Use "requires-extra = {"dev" = ...}" instead.') res.reqs_by_extra['dev'] = dev_requires # Add requires-extra requirements into requires_dist md_dict['requires_dist'] = \ reqs_noextra + list(_expand_requires_extra(res.reqs_by_extra)) md_dict['provides_extra'] = sorted(res.reqs_by_extra.keys()) # For internal use, record the main requirements as a '.none' extra. res.reqs_by_extra['.none'] = reqs_noextra if path: license_files = sorted( _license_files_from_globs( path.parent, default_license_files_globs, warn_no_files=False ) ) res.referenced_files.extend(license_files) md_dict['license_files'] = license_files return res def _expand_requires_extra(re): for extra, reqs in sorted(re.items()): for req in reqs: if ';' in req: name, envmark = req.split(';', 1) yield '{} ; extra == "{}" and ({})'.format(name, extra, envmark) else: yield '{} ; extra == "{}"'.format(req, extra) def _license_files_from_globs(project_dir: Path, globs, warn_no_files = True): license_files = set() for pattern in globs: if isabs_ish(pattern): raise ConfigError( "Invalid glob pattern for [project.license-files]: '{}'. " "Pattern must not start with '/'.".format(pattern) ) if ".." in pattern: raise ConfigError( "Invalid glob pattern for [project.license-files]: '{}'. " "Pattern must not contain '..'".format(pattern) ) if license_files_allowed_chars.match(pattern) is None: raise ConfigError( "Invalid glob pattern for [project.license-files]: '{}'. " "Pattern contains invalid characters. " "https://packaging.python.org/en/latest/specifications/pyproject-toml/#license-files" ) try: files = [ file.relative_to(project_dir).as_posix() for file in project_dir.glob(pattern) if file.is_file() ] except ValueError as ex: raise ConfigError( "Invalid glob pattern for [project.license-files]: '{}'. {}".format(pattern, ex.args[0]) ) if not files and warn_no_files: raise ConfigError( "No files found for [project.license-files]: '{}' pattern".format(pattern) ) license_files.update(files) return license_files def _check_type(d, field_name, cls): if not isinstance(d[field_name], cls): raise ConfigError( "{} field should be {}, not {}".format(field_name, cls, type(d[field_name])) ) def _check_types(d, field_name, cls_list) -> None: if not isinstance(d[field_name], cls_list): raise ConfigError( "{} field should be {}, not {}".format( field_name, ' or '.join(map(str, cls_list)), type(d[field_name]) ) ) def _check_list_of_str(d, field_name): if not isinstance(d[field_name], list) or not all( isinstance(e, str) for e in d[field_name] ): raise ConfigError( "{} field should be a list of strings".format(field_name) ) def read_pep621_metadata(proj, path) -> LoadedConfig: lc = LoadedConfig() md_dict = lc.metadata if 'name' not in proj: raise ConfigError('name must be specified in [project] table') _check_type(proj, 'name', str) if not name_is_valid(proj['name']): raise ConfigError(f"name {proj['name']} is not valid") md_dict['name'] = proj['name'] lc.module = md_dict['name'].replace('-', '_') unexpected_keys = proj.keys() - pep621_allowed_fields if unexpected_keys: log.warning("Unexpected names under [project]: %s", ', '.join(unexpected_keys)) if 'version' in proj: _check_type(proj, 'version', str) md_dict['version'] = normalise_version(proj['version']) if 'description' in proj: _check_type(proj, 'description', str) md_dict['summary'] = proj['description'] if 'readme' in proj: readme = proj['readme'] if isinstance(readme, str): lc.referenced_files.append(readme) desc_content, mimetype = description_from_file(readme, path.parent) elif isinstance(readme, dict): unrec_keys = set(readme.keys()) - {'text', 'file', 'content-type'} if unrec_keys: raise ConfigError( "Unrecognised keys in [project.readme]: {}".format(unrec_keys) ) if 'content-type' in readme: mimetype = readme['content-type'] mtype_base = mimetype.split(';')[0].strip() # e.g. text/x-rst if mtype_base not in readme_ext_to_content_type.values(): raise ConfigError( "Unrecognised readme content-type: {!r}".format(mtype_base) ) # TODO: validate content-type parameters (charset, md variant)? else: raise ConfigError( "content-type field required in [project.readme] table" ) if 'file' in readme: if 'text' in readme: raise ConfigError( "[project.readme] should specify file or text, not both" ) lc.referenced_files.append(readme['file']) desc_content, _ = description_from_file( readme['file'], path.parent, guess_mimetype=False ) elif 'text' in readme: desc_content = readme['text'] else: raise ConfigError( "file or text field required in [project.readme] table" ) else: raise ConfigError( "project.readme should be a string or a table" ) md_dict['description'] = desc_content md_dict['description_content_type'] = mimetype if 'requires-python' in proj: md_dict['requires_python'] = proj['requires-python'] license_files = set() if 'license' in proj: _check_types(proj, 'license', (str, dict)) if isinstance(proj['license'], str): licence_expr = proj['license'] md_dict['license_expression'] = normalise_compound_license_expr(licence_expr) else: license_tbl = proj['license'] unrec_keys = set(license_tbl.keys()) - {'text', 'file'} if unrec_keys: raise ConfigError( "Unrecognised keys in [project.license]: {}".format(unrec_keys) ) # The 'License' field in packaging metadata is a brief description of # a license, not the full text or a file path. if 'file' in license_tbl: if 'text' in license_tbl: raise ConfigError( "[project.license] should specify file or text, not both" ) license_f = osp.normpath(license_tbl['file']) if isabs_ish(license_f): raise ConfigError( f"License file path ({license_tbl['file']}) cannot be an absolute path" ) if license_f.startswith('..' + os.sep): raise ConfigError( f"License file path ({license_tbl['file']}) cannot contain '..'" ) license_p = path.parent / license_f if not license_p.is_file(): raise ConfigError(f"License file {license_tbl['file']} does not exist") license_f = license_p.relative_to(path.parent).as_posix() license_files.add(license_f) elif 'text' in license_tbl: pass else: raise ConfigError( "file or text field required in [project.license] table" ) if 'license-files' in proj: _check_type(proj, 'license-files', list) globs = proj['license-files'] license_files = _license_files_from_globs(path.parent, globs) if isinstance(proj.get('license'), dict): raise ConfigError( "license-files cannot be used with a license table, " "use 'project.license' with a license expression instead" ) else: license_files.update( _license_files_from_globs( path.parent, default_license_files_globs, warn_no_files=False ) ) license_files_sorted = sorted(license_files) lc.referenced_files.extend(license_files_sorted) md_dict['license_files'] = license_files_sorted if 'authors' in proj: _check_type(proj, 'authors', list) md_dict.update(pep621_people(proj['authors'])) if 'maintainers' in proj: _check_type(proj, 'maintainers', list) md_dict.update(pep621_people(proj['maintainers'], group_name='maintainer')) if 'keywords' in proj: _check_list_of_str(proj, 'keywords') md_dict['keywords'] = ",".join(proj['keywords']) if 'classifiers' in proj: _check_list_of_str(proj, 'classifiers') classifiers = proj['classifiers'] license_expr = md_dict.get('license_expression', None) if license_expr: for cl in classifiers: if not cl.startswith('License :: '): continue raise ConfigError( "License classifiers are deprecated in favor of the license expression. " "Remove the '{}' classifier".format(cl) ) md_dict['classifiers'] = proj['classifiers'] if 'urls' in proj: _check_type(proj, 'urls', dict) project_urls = md_dict['project_urls'] = [] for label, url in sorted(proj['urls'].items()): project_urls.append("{}, {}".format(label, url)) if 'entry-points' in proj: _check_type(proj, 'entry-points', dict) for grp in proj['entry-points'].values(): if not isinstance(grp, dict): raise ConfigError( "projects.entry-points should only contain sub-tables" ) if not all(isinstance(k, str) for k in grp.values()): raise ConfigError( "[projects.entry-points.*] tables should have string values" ) if set(proj['entry-points'].keys()) & {'console_scripts', 'gui_scripts'}: raise ConfigError( "Scripts should be specified in [project.scripts] or " "[project.gui-scripts], not under [project.entry-points]" ) lc.entrypoints = proj['entry-points'] if 'scripts' in proj: _check_type(proj, 'scripts', dict) if not all(isinstance(k, str) for k in proj['scripts'].values()): raise ConfigError( "[projects.scripts] table should have string values" ) lc.entrypoints['console_scripts'] = proj['scripts'] if 'gui-scripts' in proj: _check_type(proj, 'gui-scripts', dict) if not all(isinstance(k, str) for k in proj['gui-scripts'].values()): raise ConfigError( "[projects.gui-scripts] table should have string values" ) lc.entrypoints['gui_scripts'] = proj['gui-scripts'] if 'dependencies' in proj: _check_list_of_str(proj, 'dependencies') reqs_noextra = proj['dependencies'] else: reqs_noextra = [] if 'optional-dependencies' in proj: _check_type(proj, 'optional-dependencies', dict) optdeps = proj['optional-dependencies'] if not all(isinstance(e, list) for e in optdeps.values()): raise ConfigError( 'Expected a dict of lists in optional-dependencies field' ) extra_names_by_normed = {} for e, reqs in optdeps.items(): if not all(isinstance(a, str) for a in reqs): raise ConfigError( 'Expected a string list for optional-dependencies ({})'.format(e) ) if not name_is_valid(e): raise ConfigError( f'optional-dependencies group name {e!r} is not valid' ) enorm = normalise_core_metadata_name(e) extra_names_by_normed.setdefault(enorm, set()).add(e) lc.reqs_by_extra[enorm] = reqs clashing_extra_names = [ g for g in extra_names_by_normed.values() if len(g) > 1 ] if clashing_extra_names: fmted = ['/'.join(sorted(g)) for g in clashing_extra_names] raise ConfigError( f"optional-dependencies group names clash: {'; '.join(fmted)}" ) md_dict['provides_extra'] = sorted(lc.reqs_by_extra.keys()) md_dict['requires_dist'] = \ reqs_noextra + list(_expand_requires_extra(lc.reqs_by_extra)) # For internal use, record the main requirements as a '.none' extra. if reqs_noextra: lc.reqs_by_extra['.none'] = reqs_noextra if 'dynamic' in proj: _check_list_of_str(proj, 'dynamic') dynamic = set(proj['dynamic']) unrec_dynamic = dynamic - {'version', 'description'} if unrec_dynamic: raise ConfigError( "flit only supports dynamic metadata for 'version' & 'description'" ) if dynamic.intersection(proj): raise ConfigError( "keys listed in project.dynamic must not be in [project] table" ) lc.dynamic_metadata = dynamic if ('version' not in proj) and ('version' not in lc.dynamic_metadata): raise ConfigError( "version must be specified under [project] or listed as a dynamic field" ) if ('description' not in proj) and ('description' not in lc.dynamic_metadata): raise ConfigError( "description must be specified under [project] or listed as a dynamic field" ) return lc def name_is_valid(name) -> bool: return bool(re.match( r"^([A-Z0-9]|[A-Z0-9][A-Z0-9._-]*[A-Z0-9])$", name, re.IGNORECASE )) def pep621_people(people, group_name='author') -> dict: """Convert authors/maintainers from PEP 621 to core metadata fields""" names, emails = [], [] for person in people: if not isinstance(person, dict): raise ConfigError("{} info must be list of dicts".format(group_name)) unrec_keys = set(person.keys()) - {'name', 'email'} if unrec_keys: raise ConfigError( "Unrecognised keys in {} info: {}".format(group_name, unrec_keys) ) if 'email' in person: email = person['email'] if 'name' in person: email = str(Address(person['name'], addr_spec=email)) emails.append(email) elif 'name' in person: names.append(person['name']) res = {} if names: res[group_name] = ", ".join(names) if emails: res[group_name + '_email'] = ", ".join(emails) return res def isabs_ish(path): """Like os.path.isabs(), but Windows paths from a drive root count as absolute isabs() worked this way up to Python 3.12 (inclusive), and where we reject absolute paths, we also want to reject these odd halfway paths. """ return os.path.isabs(path) or path.startswith(('/', '\\')) def normalise_compound_license_expr(s: str) -> str: """Validate and normalise a compund SPDX license expression. Per the specification, licence expression operators (AND, OR and WITH) are matched case-sensitively. The WITH operator is not currently supported. Spec: https://spdx.github.io/spdx-spec/v2.2.2/SPDX-license-expressions/ """ invalid_msg = "'{s}' is not a valid SPDX license expression: {reason}" if not s or s.isspace(): raise ConfigError(f"The SPDX license expression must not be empty") stack = 0 parts = [] try: for part in filter(None, re.split(r' +|([()])', s)): if part.upper() == 'WITH': # provide a sensible error message for the WITH operator raise ConfigError(f"The SPDX 'WITH' operator is not yet supported!") elif part in {'AND', 'OR'}: if not parts or parts[-1] in {' AND ', ' OR ', ' WITH ', '('}: reason = f"a license ID is missing before '{part}'" raise ConfigError(invalid_msg.format(s=s, reason=reason)) parts.append(f' {part} ') elif part.lower() in {'and', 'or', 'with'}: # provide a sensible error message for non-uppercase operators reason = f"operators must be uppercase, not '{part}'" raise ConfigError(invalid_msg.format(s=s, reason=reason)) elif part == '(': if parts and parts[-1] not in {' AND ', ' OR ', '('}: reason = f"'(' must follow either AND, OR, or '('" raise ConfigError(invalid_msg.format(s=s, reason=reason)) stack += 1 parts.append(part) elif part == ')': if not parts or parts[-1] in {' AND ', ' OR ', ' WITH ', '('}: reason = f"a license ID is missing before '{part}'" raise ConfigError(invalid_msg.format(s=s, reason=reason)) stack -= 1 if stack < 0: reason = 'unbalanced brackets' raise ConfigError(invalid_msg.format(s=s, reason=reason)) parts.append(part) else: if parts and parts[-1] not in {' AND ', ' OR ', '('}: reason = f"a license ID must follow either AND, OR, or '('" raise ConfigError(invalid_msg.format(s=s, reason=reason)) simple_expr = normalise_simple_license_expr(part) parts.append(simple_expr) if stack != 0: reason = 'unbalanced brackets' raise ConfigError(invalid_msg.format(s=s, reason=reason)) if parts[-1] in {' AND ', ' OR ', ' WITH '}: last_part = parts[-1].strip() reason = f"a license ID or expression should follow '{last_part}'" raise ConfigError(invalid_msg.format(s=s, reason=reason)) except ConfigError: if os.environ.get('FLIT_ALLOW_INVALID'): log.warning(f"Invalid license ID {s!r} allowed by FLIT_ALLOW_INVALID") return s raise return ''.join(parts) def normalise_simple_license_expr(s: str) -> str: """Normalise a simple SPDX license expression. https://spdx.github.io/spdx-spec/v2.2.2/SPDX-license-expressions/#d3-simple-license-expressions """ ls = s.lower() if ls.startswith('licenseref-'): ref = s[11:] if re.fullmatch(r'[a-zA-Z0-9\-.]+', ref): # Normalise case of LicenseRef, leave the rest alone return f"LicenseRef-{ref}" raise ConfigError( "LicenseRef- license expression can only contain ASCII letters " "& digits, - and ." ) or_later = ls.endswith('+') if or_later: ls = ls[:-1] try: normalised_id = licenses[ls]['id'] except KeyError: raise ConfigError(f"{s!r} is not a recognised SPDX license ID") if or_later: return f'{normalised_id}+' return normalised_id ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5045574 flit-3.12.0/flit_core/flit_core/sdist.py0000644000000000000000000001610514770461472015114 0ustar00from collections import defaultdict from copy import copy from glob import glob from gzip import GzipFile import io import logging import os import os.path as osp from pathlib import Path from posixpath import join as pjoin import tarfile from . import common log = logging.getLogger(__name__) def clean_tarinfo(ti, mtime=None): """Clean metadata from a TarInfo object to make it more reproducible. - Set uid & gid to 0 - Set uname and gname to "" - Normalise permissions to 644 or 755 - Set mtime if not None """ ti = copy(ti) ti.uid = 0 ti.gid = 0 ti.uname = '' ti.gname = '' ti.mode = common.normalize_file_permissions(ti.mode) if mtime is not None: ti.mtime = mtime return ti class FilePatterns: """Manage a set of file inclusion/exclusion patterns relative to basedir""" def __init__(self, patterns, basedir): self.basedir = basedir self.dirs = set() self.files = set() for pattern in patterns: for path in sorted(glob(osp.join(basedir, pattern), recursive=True)): rel = osp.relpath(path, basedir) if osp.isdir(path): self.dirs.add(rel) else: self.files.add(rel) def match_file(self, rel_path): if rel_path in self.files: return True return any(rel_path.startswith(d + os.sep) for d in self.dirs) def match_dir(self, rel_path): if rel_path in self.dirs: return True # Check if it's a subdirectory of any directory in the list return any(rel_path.startswith(d + os.sep) for d in self.dirs) class SdistBuilder: """Builds a minimal sdist These minimal sdists should work for PEP 517. The class is extended in flit.sdist to make a more 'full fat' sdist, which is what should normally be published to PyPI. """ def __init__(self, module, metadata, cfgdir, reqs_by_extra, entrypoints, extra_files, data_directory, include_patterns=(), exclude_patterns=()): self.module = module self.metadata = metadata self.cfgdir = cfgdir self.reqs_by_extra = reqs_by_extra self.entrypoints = entrypoints self.extra_files = extra_files self.data_directory = data_directory self.includes = FilePatterns(include_patterns, str(cfgdir)) self.excludes = FilePatterns(exclude_patterns, str(cfgdir)) @classmethod def from_ini_path(cls, ini_path: Path): # Local import so bootstrapping doesn't try to load toml from .config import read_flit_config ini_info = read_flit_config(ini_path) srcdir = ini_path.parent module = common.Module(ini_info.module, srcdir) metadata = common.make_metadata(module, ini_info) extra_files = [ini_path.name] + ini_info.referenced_files return cls( module, metadata, srcdir, ini_info.reqs_by_extra, ini_info.entrypoints, extra_files, ini_info.data_directory, ini_info.sdist_include_patterns, ini_info.sdist_exclude_patterns, ) def prep_entry_points(self): # Reformat entry points from dict-of-dicts to dict-of-lists res = defaultdict(list) for groupname, group in self.entrypoints.items(): for name, ep in sorted(group.items()): res[groupname].append('{} = {}'.format(name, ep)) return dict(res) def select_files(self): """Pick which files from the source tree will be included in the sdist This is overridden in flit itself to use information from a VCS to include tests, docs, etc. for a 'gold standard' sdist. """ cfgdir_s = str(self.cfgdir) return [ osp.relpath(p, cfgdir_s) for p in self.module.iter_files() ] + [ osp.relpath(p, cfgdir_s) for p in common.walk_data_dir(self.data_directory) ] + self.extra_files def apply_includes_excludes(self, files): cfgdir_s = str(self.cfgdir) files = {f for f in files if not self.excludes.match_file(f)} for f_rel in self.includes.files: if not self.excludes.match_file(f_rel): files.add(f_rel) for rel_d in self.includes.dirs: for dirpath, dirs, dfiles in os.walk(osp.join(cfgdir_s, rel_d)): for file in dfiles: f_abs = osp.join(dirpath, file) f_rel = osp.relpath(f_abs, cfgdir_s) if not self.excludes.match_file(f_rel): files.add(f_rel) # Filter subdirectories before os.walk scans them dirs[:] = [d for d in dirs if not self.excludes.match_dir( osp.relpath(osp.join(dirpath, d), cfgdir_s) )] crucial_files = set( self.extra_files + [str(self.module.file.relative_to(self.cfgdir))] ) missing_crucial = crucial_files - files if missing_crucial: raise Exception("Crucial files were excluded from the sdist: {}" .format(", ".join(missing_crucial))) return sorted(files) def add_setup_py(self, files_to_add, target_tarfile): """No-op here; overridden in flit to generate setup.py""" pass @property def dir_name(self): return common.normalize_dist_name(self.metadata.name, self.metadata.version) def build(self, target_dir, gen_setup_py=True): os.makedirs(str(target_dir), exist_ok=True) target = target_dir / '{}.tar.gz'.format(self.dir_name) source_date_epoch = os.environ.get('SOURCE_DATE_EPOCH', '') mtime = int(source_date_epoch) if source_date_epoch else None # For the gzip timestamp, default to 2016-1-1 00:00 (UTC) # This makes the sdist reproducible even without SOURCE_DATE_EPOCH, # if the source file mtimes don't change, i.e. from the same checkout. gz = GzipFile(str(target), mode='wb', mtime=(mtime or 1451606400)) tf = tarfile.TarFile(str(target), mode='w', fileobj=gz, format=tarfile.PAX_FORMAT) try: files_to_add = self.apply_includes_excludes(self.select_files()) for relpath in files_to_add: path = str(self.cfgdir / relpath) ti = tf.gettarinfo(path, arcname=pjoin(self.dir_name, relpath)) ti = clean_tarinfo(ti, mtime) if ti.isreg(): with open(path, 'rb') as f: tf.addfile(ti, f) else: tf.addfile(ti) # Symlinks & ? if gen_setup_py: self.add_setup_py(files_to_add, tf) stream = io.StringIO() self.metadata.write_metadata_file(stream) pkg_info = stream.getvalue().encode() ti = tarfile.TarInfo(pjoin(self.dir_name, 'PKG-INFO')) ti.size = len(pkg_info) tf.addfile(ti, io.BytesIO(pkg_info)) finally: tf.close() gz.close() log.info("Built sdist: %s", target) return target ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5045574 flit-3.12.0/flit_core/flit_core/vendor/README0000644000000000000000000000152014770461472015564 0ustar00flit_core bundles the 'tomli' TOML parser, to avoid a [bootstrapping problem](https://github.com/pypa/packaging-problems/issues/342). tomli is packaged using Flit, so there would be a dependency cycle when building from source. Vendoring a copy of tomli avoids this. The code in tomli is under the MIT license, and the LICENSE file is in the .dist-info folder. If you want to unbundle tomli and rely on it as a separate package, you can replace the package with Python code doing 'from tomli import *'. You will probably need to work around the dependency cycle between flit_core and tomli. Bundling a TOML parser should be a special case - I don't plan on bundling anything else in flit_core (or depending on any other packages). I hope that a TOML parser will be added to the Python standard library, and then this bundled parser will go away. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5045574 flit-3.12.0/flit_core/flit_core/vendor/__init__.py0000644000000000000000000000000014770461472017005 0ustar00././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5045574 flit-3.12.0/flit_core/flit_core/vendor/tomli-1.2.3.dist-info/LICENSE0000644000000000000000000000206014770461472021367 0ustar00MIT License Copyright (c) 2021 Taneli Hukkinen Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5045574 flit-3.12.0/flit_core/flit_core/vendor/tomli-1.2.3.dist-info/METADATA0000644000000000000000000002160014770461472021466 0ustar00Metadata-Version: 2.1 Name: tomli Version: 1.2.3 Summary: A lil' TOML parser Keywords: toml Author-email: Taneli Hukkinen Requires-Python: >=3.6 Description-Content-Type: text/markdown Classifier: License :: OSI Approved :: MIT License Classifier: Operating System :: MacOS Classifier: Operating System :: Microsoft :: Windows Classifier: Operating System :: POSIX :: Linux Classifier: Programming Language :: Python :: 3 :: Only Classifier: Programming Language :: Python :: 3.6 Classifier: Programming Language :: Python :: 3.7 Classifier: Programming Language :: Python :: 3.8 Classifier: Programming Language :: Python :: 3.9 Classifier: Programming Language :: Python :: 3.10 Classifier: Programming Language :: Python :: Implementation :: CPython Classifier: Programming Language :: Python :: Implementation :: PyPy Classifier: Topic :: Software Development :: Libraries :: Python Modules Classifier: Typing :: Typed Project-URL: Changelog, https://github.com/hukkin/tomli/blob/master/CHANGELOG.md Project-URL: Homepage, https://github.com/hukkin/tomli [![Build Status](https://github.com/hukkin/tomli/workflows/Tests/badge.svg?branch=master)](https://github.com/hukkin/tomli/actions?query=workflow%3ATests+branch%3Amaster+event%3Apush) [![codecov.io](https://codecov.io/gh/hukkin/tomli/branch/master/graph/badge.svg)](https://codecov.io/gh/hukkin/tomli) [![PyPI version](https://img.shields.io/pypi/v/tomli)](https://pypi.org/project/tomli) # Tomli > A lil' TOML parser **Table of Contents** *generated with [mdformat-toc](https://github.com/hukkin/mdformat-toc)* - [Intro](#intro) - [Installation](#installation) - [Usage](#usage) - [Parse a TOML string](#parse-a-toml-string) - [Parse a TOML file](#parse-a-toml-file) - [Handle invalid TOML](#handle-invalid-toml) - [Construct `decimal.Decimal`s from TOML floats](#construct-decimaldecimals-from-toml-floats) - [FAQ](#faq) - [Why this parser?](#why-this-parser) - [Is comment preserving round-trip parsing supported?](#is-comment-preserving-round-trip-parsing-supported) - [Is there a `dumps`, `write` or `encode` function?](#is-there-a-dumps-write-or-encode-function) - [How do TOML types map into Python types?](#how-do-toml-types-map-into-python-types) - [Performance](#performance) ## Intro Tomli is a Python library for parsing [TOML](https://toml.io). Tomli is fully compatible with [TOML v1.0.0](https://toml.io/en/v1.0.0). ## Installation ```bash pip install tomli ``` ## Usage ### Parse a TOML string ```python import tomli toml_str = """ gretzky = 99 [kurri] jari = 17 """ toml_dict = tomli.loads(toml_str) assert toml_dict == {"gretzky": 99, "kurri": {"jari": 17}} ``` ### Parse a TOML file ```python import tomli with open("path_to_file/conf.toml", "rb") as f: toml_dict = tomli.load(f) ``` The file must be opened in binary mode (with the `"rb"` flag). Binary mode will enforce decoding the file as UTF-8 with universal newlines disabled, both of which are required to correctly parse TOML. Support for text file objects is deprecated for removal in the next major release. ### Handle invalid TOML ```python import tomli try: toml_dict = tomli.loads("]] this is invalid TOML [[") except tomli.TOMLDecodeError: print("Yep, definitely not valid.") ``` Note that while the `TOMLDecodeError` type is public API, error messages of raised instances of it are not. Error messages should not be assumed to stay constant across Tomli versions. ### Construct `decimal.Decimal`s from TOML floats ```python from decimal import Decimal import tomli toml_dict = tomli.loads("precision-matters = 0.982492", parse_float=Decimal) assert toml_dict["precision-matters"] == Decimal("0.982492") ``` Note that `decimal.Decimal` can be replaced with another callable that converts a TOML float from string to a Python type. The `decimal.Decimal` is, however, a practical choice for use cases where float inaccuracies can not be tolerated. Illegal types include `dict`, `list`, and anything that has the `append` attribute. Parsing floats into an illegal type results in undefined behavior. ## FAQ ### Why this parser? - it's lil' - pure Python with zero dependencies - the fastest pure Python parser [\*](#performance): 15x as fast as [tomlkit](https://pypi.org/project/tomlkit/), 2.4x as fast as [toml](https://pypi.org/project/toml/) - outputs [basic data types](#how-do-toml-types-map-into-python-types) only - 100% spec compliant: passes all tests in [a test set](https://github.com/toml-lang/compliance/pull/8) soon to be merged to the official [compliance tests for TOML](https://github.com/toml-lang/compliance) repository - thoroughly tested: 100% branch coverage ### Is comment preserving round-trip parsing supported? No. The `tomli.loads` function returns a plain `dict` that is populated with builtin types and types from the standard library only. Preserving comments requires a custom type to be returned so will not be supported, at least not by the `tomli.loads` and `tomli.load` functions. Look into [TOML Kit](https://github.com/sdispater/tomlkit) if preservation of style is what you need. ### Is there a `dumps`, `write` or `encode` function? [Tomli-W](https://github.com/hukkin/tomli-w) is the write-only counterpart of Tomli, providing `dump` and `dumps` functions. The core library does not include write capability, as most TOML use cases are read-only, and Tomli intends to be minimal. ### How do TOML types map into Python types? | TOML type | Python type | Details | | ---------------- | ------------------- | ------------------------------------------------------------ | | Document Root | `dict` | | | Key | `str` | | | String | `str` | | | Integer | `int` | | | Float | `float` | | | Boolean | `bool` | | | Offset Date-Time | `datetime.datetime` | `tzinfo` attribute set to an instance of `datetime.timezone` | | Local Date-Time | `datetime.datetime` | `tzinfo` attribute set to `None` | | Local Date | `datetime.date` | | | Local Time | `datetime.time` | | | Array | `list` | | | Table | `dict` | | | Inline Table | `dict` | | ## Performance The `benchmark/` folder in this repository contains a performance benchmark for comparing the various Python TOML parsers. The benchmark can be run with `tox -e benchmark-pypi`. Running the benchmark on my personal computer output the following: ```console foo@bar:~/dev/tomli$ tox -e benchmark-pypi benchmark-pypi installed: attrs==19.3.0,click==7.1.2,pytomlpp==1.0.2,qtoml==0.3.0,rtoml==0.7.0,toml==0.10.2,tomli==1.1.0,tomlkit==0.7.2 benchmark-pypi run-test-pre: PYTHONHASHSEED='2658546909' benchmark-pypi run-test: commands[0] | python -c 'import datetime; print(datetime.date.today())' 2021-07-23 benchmark-pypi run-test: commands[1] | python --version Python 3.8.10 benchmark-pypi run-test: commands[2] | python benchmark/run.py Parsing data.toml 5000 times: ------------------------------------------------------ parser | exec time | performance (more is better) -----------+------------+----------------------------- rtoml | 0.901 s | baseline (100%) pytomlpp | 1.08 s | 83.15% tomli | 3.89 s | 23.15% toml | 9.36 s | 9.63% qtoml | 11.5 s | 7.82% tomlkit | 56.8 s | 1.59% ``` The parsers are ordered from fastest to slowest, using the fastest parser as baseline. Tomli performed the best out of all pure Python TOML parsers, losing only to pytomlpp (wraps C++) and rtoml (wraps Rust). ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5045574 flit-3.12.0/flit_core/flit_core/vendor/tomli/__init__.py0000644000000000000000000000044614770461472020147 0ustar00"""A lil' TOML parser.""" __all__ = ("loads", "load", "TOMLDecodeError") __version__ = "1.2.3" # DO NOT EDIT THIS LINE MANUALLY. LET bump2version UTILITY DO IT from ._parser import TOMLDecodeError, load, loads # Pretend this exception was created here. TOMLDecodeError.__module__ = "tomli" ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5045574 flit-3.12.0/flit_core/flit_core/vendor/tomli/_parser.py0000644000000000000000000005222114770461472020041 0ustar00import string from types import MappingProxyType from typing import Any, BinaryIO, Dict, FrozenSet, Iterable, NamedTuple, Optional, Tuple import warnings from ._re import ( RE_DATETIME, RE_LOCALTIME, RE_NUMBER, match_to_datetime, match_to_localtime, match_to_number, ) from ._types import Key, ParseFloat, Pos ASCII_CTRL = frozenset(chr(i) for i in range(32)) | frozenset(chr(127)) # Neither of these sets include quotation mark or backslash. They are # currently handled as separate cases in the parser functions. ILLEGAL_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t") ILLEGAL_MULTILINE_BASIC_STR_CHARS = ASCII_CTRL - frozenset("\t\n") ILLEGAL_LITERAL_STR_CHARS = ILLEGAL_BASIC_STR_CHARS ILLEGAL_MULTILINE_LITERAL_STR_CHARS = ILLEGAL_MULTILINE_BASIC_STR_CHARS ILLEGAL_COMMENT_CHARS = ILLEGAL_BASIC_STR_CHARS TOML_WS = frozenset(" \t") TOML_WS_AND_NEWLINE = TOML_WS | frozenset("\n") BARE_KEY_CHARS = frozenset(string.ascii_letters + string.digits + "-_") KEY_INITIAL_CHARS = BARE_KEY_CHARS | frozenset("\"'") HEXDIGIT_CHARS = frozenset(string.hexdigits) BASIC_STR_ESCAPE_REPLACEMENTS = MappingProxyType( { "\\b": "\u0008", # backspace "\\t": "\u0009", # tab "\\n": "\u000A", # linefeed "\\f": "\u000C", # form feed "\\r": "\u000D", # carriage return '\\"': "\u0022", # quote "\\\\": "\u005C", # backslash } ) class TOMLDecodeError(ValueError): """An error raised if a document is not valid TOML.""" def load(fp: BinaryIO, *, parse_float: ParseFloat = float) -> Dict[str, Any]: """Parse TOML from a binary file object.""" s_bytes = fp.read() try: s = s_bytes.decode() except AttributeError: warnings.warn( "Text file object support is deprecated in favor of binary file objects." ' Use `open("foo.toml", "rb")` to open the file in binary mode.', DeprecationWarning, stacklevel=2, ) s = s_bytes # type: ignore[assignment] return loads(s, parse_float=parse_float) def loads(s: str, *, parse_float: ParseFloat = float) -> Dict[str, Any]: # noqa: C901 """Parse TOML from a string.""" # The spec allows converting "\r\n" to "\n", even in string # literals. Let's do so to simplify parsing. src = s.replace("\r\n", "\n") pos = 0 out = Output(NestedDict(), Flags()) header: Key = () # Parse one statement at a time # (typically means one line in TOML source) while True: # 1. Skip line leading whitespace pos = skip_chars(src, pos, TOML_WS) # 2. Parse rules. Expect one of the following: # - end of file # - end of line # - comment # - key/value pair # - append dict to list (and move to its namespace) # - create dict (and move to its namespace) # Skip trailing whitespace when applicable. try: char = src[pos] except IndexError: break if char == "\n": pos += 1 continue if char in KEY_INITIAL_CHARS: pos = key_value_rule(src, pos, out, header, parse_float) pos = skip_chars(src, pos, TOML_WS) elif char == "[": try: second_char: Optional[str] = src[pos + 1] except IndexError: second_char = None if second_char == "[": pos, header = create_list_rule(src, pos, out) else: pos, header = create_dict_rule(src, pos, out) pos = skip_chars(src, pos, TOML_WS) elif char != "#": raise suffixed_err(src, pos, "Invalid statement") # 3. Skip comment pos = skip_comment(src, pos) # 4. Expect end of line or end of file try: char = src[pos] except IndexError: break if char != "\n": raise suffixed_err( src, pos, "Expected newline or end of document after a statement" ) pos += 1 return out.data.dict class Flags: """Flags that map to parsed keys/namespaces.""" # Marks an immutable namespace (inline array or inline table). FROZEN = 0 # Marks a nest that has been explicitly created and can no longer # be opened using the "[table]" syntax. EXPLICIT_NEST = 1 def __init__(self) -> None: self._flags: Dict[str, dict] = {} def unset_all(self, key: Key) -> None: cont = self._flags for k in key[:-1]: if k not in cont: return cont = cont[k]["nested"] cont.pop(key[-1], None) def set_for_relative_key(self, head_key: Key, rel_key: Key, flag: int) -> None: cont = self._flags for k in head_key: if k not in cont: cont[k] = {"flags": set(), "recursive_flags": set(), "nested": {}} cont = cont[k]["nested"] for k in rel_key: if k in cont: cont[k]["flags"].add(flag) else: cont[k] = {"flags": {flag}, "recursive_flags": set(), "nested": {}} cont = cont[k]["nested"] def set(self, key: Key, flag: int, *, recursive: bool) -> None: # noqa: A003 cont = self._flags key_parent, key_stem = key[:-1], key[-1] for k in key_parent: if k not in cont: cont[k] = {"flags": set(), "recursive_flags": set(), "nested": {}} cont = cont[k]["nested"] if key_stem not in cont: cont[key_stem] = {"flags": set(), "recursive_flags": set(), "nested": {}} cont[key_stem]["recursive_flags" if recursive else "flags"].add(flag) def is_(self, key: Key, flag: int) -> bool: if not key: return False # document root has no flags cont = self._flags for k in key[:-1]: if k not in cont: return False inner_cont = cont[k] if flag in inner_cont["recursive_flags"]: return True cont = inner_cont["nested"] key_stem = key[-1] if key_stem in cont: cont = cont[key_stem] return flag in cont["flags"] or flag in cont["recursive_flags"] return False class NestedDict: def __init__(self) -> None: # The parsed content of the TOML document self.dict: Dict[str, Any] = {} def get_or_create_nest( self, key: Key, *, access_lists: bool = True, ) -> dict: cont: Any = self.dict for k in key: if k not in cont: cont[k] = {} cont = cont[k] if access_lists and isinstance(cont, list): cont = cont[-1] if not isinstance(cont, dict): raise KeyError("There is no nest behind this key") return cont def append_nest_to_list(self, key: Key) -> None: cont = self.get_or_create_nest(key[:-1]) last_key = key[-1] if last_key in cont: list_ = cont[last_key] try: list_.append({}) except AttributeError: raise KeyError("An object other than list found behind this key") else: cont[last_key] = [{}] class Output(NamedTuple): data: NestedDict flags: Flags def skip_chars(src: str, pos: Pos, chars: Iterable[str]) -> Pos: try: while src[pos] in chars: pos += 1 except IndexError: pass return pos def skip_until( src: str, pos: Pos, expect: str, *, error_on: FrozenSet[str], error_on_eof: bool, ) -> Pos: try: new_pos = src.index(expect, pos) except ValueError: new_pos = len(src) if error_on_eof: raise suffixed_err(src, new_pos, f"Expected {expect!r}") from None if not error_on.isdisjoint(src[pos:new_pos]): while src[pos] not in error_on: pos += 1 raise suffixed_err(src, pos, f"Found invalid character {src[pos]!r}") return new_pos def skip_comment(src: str, pos: Pos) -> Pos: try: char: Optional[str] = src[pos] except IndexError: char = None if char == "#": return skip_until( src, pos + 1, "\n", error_on=ILLEGAL_COMMENT_CHARS, error_on_eof=False ) return pos def skip_comments_and_array_ws(src: str, pos: Pos) -> Pos: while True: pos_before_skip = pos pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE) pos = skip_comment(src, pos) if pos == pos_before_skip: return pos def create_dict_rule(src: str, pos: Pos, out: Output) -> Tuple[Pos, Key]: pos += 1 # Skip "[" pos = skip_chars(src, pos, TOML_WS) pos, key = parse_key(src, pos) if out.flags.is_(key, Flags.EXPLICIT_NEST) or out.flags.is_(key, Flags.FROZEN): raise suffixed_err(src, pos, f"Can not declare {key} twice") out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False) try: out.data.get_or_create_nest(key) except KeyError: raise suffixed_err(src, pos, "Can not overwrite a value") from None if not src.startswith("]", pos): raise suffixed_err(src, pos, 'Expected "]" at the end of a table declaration') return pos + 1, key def create_list_rule(src: str, pos: Pos, out: Output) -> Tuple[Pos, Key]: pos += 2 # Skip "[[" pos = skip_chars(src, pos, TOML_WS) pos, key = parse_key(src, pos) if out.flags.is_(key, Flags.FROZEN): raise suffixed_err(src, pos, f"Can not mutate immutable namespace {key}") # Free the namespace now that it points to another empty list item... out.flags.unset_all(key) # ...but this key precisely is still prohibited from table declaration out.flags.set(key, Flags.EXPLICIT_NEST, recursive=False) try: out.data.append_nest_to_list(key) except KeyError: raise suffixed_err(src, pos, "Can not overwrite a value") from None if not src.startswith("]]", pos): raise suffixed_err(src, pos, 'Expected "]]" at the end of an array declaration') return pos + 2, key def key_value_rule( src: str, pos: Pos, out: Output, header: Key, parse_float: ParseFloat ) -> Pos: pos, key, value = parse_key_value_pair(src, pos, parse_float) key_parent, key_stem = key[:-1], key[-1] abs_key_parent = header + key_parent if out.flags.is_(abs_key_parent, Flags.FROZEN): raise suffixed_err( src, pos, f"Can not mutate immutable namespace {abs_key_parent}" ) # Containers in the relative path can't be opened with the table syntax after this out.flags.set_for_relative_key(header, key, Flags.EXPLICIT_NEST) try: nest = out.data.get_or_create_nest(abs_key_parent) except KeyError: raise suffixed_err(src, pos, "Can not overwrite a value") from None if key_stem in nest: raise suffixed_err(src, pos, "Can not overwrite a value") # Mark inline table and array namespaces recursively immutable if isinstance(value, (dict, list)): out.flags.set(header + key, Flags.FROZEN, recursive=True) nest[key_stem] = value return pos def parse_key_value_pair( src: str, pos: Pos, parse_float: ParseFloat ) -> Tuple[Pos, Key, Any]: pos, key = parse_key(src, pos) try: char: Optional[str] = src[pos] except IndexError: char = None if char != "=": raise suffixed_err(src, pos, 'Expected "=" after a key in a key/value pair') pos += 1 pos = skip_chars(src, pos, TOML_WS) pos, value = parse_value(src, pos, parse_float) return pos, key, value def parse_key(src: str, pos: Pos) -> Tuple[Pos, Key]: pos, key_part = parse_key_part(src, pos) key: Key = (key_part,) pos = skip_chars(src, pos, TOML_WS) while True: try: char: Optional[str] = src[pos] except IndexError: char = None if char != ".": return pos, key pos += 1 pos = skip_chars(src, pos, TOML_WS) pos, key_part = parse_key_part(src, pos) key += (key_part,) pos = skip_chars(src, pos, TOML_WS) def parse_key_part(src: str, pos: Pos) -> Tuple[Pos, str]: try: char: Optional[str] = src[pos] except IndexError: char = None if char in BARE_KEY_CHARS: start_pos = pos pos = skip_chars(src, pos, BARE_KEY_CHARS) return pos, src[start_pos:pos] if char == "'": return parse_literal_str(src, pos) if char == '"': return parse_one_line_basic_str(src, pos) raise suffixed_err(src, pos, "Invalid initial character for a key part") def parse_one_line_basic_str(src: str, pos: Pos) -> Tuple[Pos, str]: pos += 1 return parse_basic_str(src, pos, multiline=False) def parse_array(src: str, pos: Pos, parse_float: ParseFloat) -> Tuple[Pos, list]: pos += 1 array: list = [] pos = skip_comments_and_array_ws(src, pos) if src.startswith("]", pos): return pos + 1, array while True: pos, val = parse_value(src, pos, parse_float) array.append(val) pos = skip_comments_and_array_ws(src, pos) c = src[pos : pos + 1] if c == "]": return pos + 1, array if c != ",": raise suffixed_err(src, pos, "Unclosed array") pos += 1 pos = skip_comments_and_array_ws(src, pos) if src.startswith("]", pos): return pos + 1, array def parse_inline_table(src: str, pos: Pos, parse_float: ParseFloat) -> Tuple[Pos, dict]: pos += 1 nested_dict = NestedDict() flags = Flags() pos = skip_chars(src, pos, TOML_WS) if src.startswith("}", pos): return pos + 1, nested_dict.dict while True: pos, key, value = parse_key_value_pair(src, pos, parse_float) key_parent, key_stem = key[:-1], key[-1] if flags.is_(key, Flags.FROZEN): raise suffixed_err(src, pos, f"Can not mutate immutable namespace {key}") try: nest = nested_dict.get_or_create_nest(key_parent, access_lists=False) except KeyError: raise suffixed_err(src, pos, "Can not overwrite a value") from None if key_stem in nest: raise suffixed_err(src, pos, f"Duplicate inline table key {key_stem!r}") nest[key_stem] = value pos = skip_chars(src, pos, TOML_WS) c = src[pos : pos + 1] if c == "}": return pos + 1, nested_dict.dict if c != ",": raise suffixed_err(src, pos, "Unclosed inline table") if isinstance(value, (dict, list)): flags.set(key, Flags.FROZEN, recursive=True) pos += 1 pos = skip_chars(src, pos, TOML_WS) def parse_basic_str_escape( # noqa: C901 src: str, pos: Pos, *, multiline: bool = False ) -> Tuple[Pos, str]: escape_id = src[pos : pos + 2] pos += 2 if multiline and escape_id in {"\\ ", "\\\t", "\\\n"}: # Skip whitespace until next non-whitespace character or end of # the doc. Error if non-whitespace is found before newline. if escape_id != "\\\n": pos = skip_chars(src, pos, TOML_WS) try: char = src[pos] except IndexError: return pos, "" if char != "\n": raise suffixed_err(src, pos, 'Unescaped "\\" in a string') pos += 1 pos = skip_chars(src, pos, TOML_WS_AND_NEWLINE) return pos, "" if escape_id == "\\u": return parse_hex_char(src, pos, 4) if escape_id == "\\U": return parse_hex_char(src, pos, 8) try: return pos, BASIC_STR_ESCAPE_REPLACEMENTS[escape_id] except KeyError: if len(escape_id) != 2: raise suffixed_err(src, pos, "Unterminated string") from None raise suffixed_err(src, pos, 'Unescaped "\\" in a string') from None def parse_basic_str_escape_multiline(src: str, pos: Pos) -> Tuple[Pos, str]: return parse_basic_str_escape(src, pos, multiline=True) def parse_hex_char(src: str, pos: Pos, hex_len: int) -> Tuple[Pos, str]: hex_str = src[pos : pos + hex_len] if len(hex_str) != hex_len or not HEXDIGIT_CHARS.issuperset(hex_str): raise suffixed_err(src, pos, "Invalid hex value") pos += hex_len hex_int = int(hex_str, 16) if not is_unicode_scalar_value(hex_int): raise suffixed_err(src, pos, "Escaped character is not a Unicode scalar value") return pos, chr(hex_int) def parse_literal_str(src: str, pos: Pos) -> Tuple[Pos, str]: pos += 1 # Skip starting apostrophe start_pos = pos pos = skip_until( src, pos, "'", error_on=ILLEGAL_LITERAL_STR_CHARS, error_on_eof=True ) return pos + 1, src[start_pos:pos] # Skip ending apostrophe def parse_multiline_str(src: str, pos: Pos, *, literal: bool) -> Tuple[Pos, str]: pos += 3 if src.startswith("\n", pos): pos += 1 if literal: delim = "'" end_pos = skip_until( src, pos, "'''", error_on=ILLEGAL_MULTILINE_LITERAL_STR_CHARS, error_on_eof=True, ) result = src[pos:end_pos] pos = end_pos + 3 else: delim = '"' pos, result = parse_basic_str(src, pos, multiline=True) # Add at maximum two extra apostrophes/quotes if the end sequence # is 4 or 5 chars long instead of just 3. if not src.startswith(delim, pos): return pos, result pos += 1 if not src.startswith(delim, pos): return pos, result + delim pos += 1 return pos, result + (delim * 2) def parse_basic_str(src: str, pos: Pos, *, multiline: bool) -> Tuple[Pos, str]: if multiline: error_on = ILLEGAL_MULTILINE_BASIC_STR_CHARS parse_escapes = parse_basic_str_escape_multiline else: error_on = ILLEGAL_BASIC_STR_CHARS parse_escapes = parse_basic_str_escape result = "" start_pos = pos while True: try: char = src[pos] except IndexError: raise suffixed_err(src, pos, "Unterminated string") from None if char == '"': if not multiline: return pos + 1, result + src[start_pos:pos] if src.startswith('"""', pos): return pos + 3, result + src[start_pos:pos] pos += 1 continue if char == "\\": result += src[start_pos:pos] pos, parsed_escape = parse_escapes(src, pos) result += parsed_escape start_pos = pos continue if char in error_on: raise suffixed_err(src, pos, f"Illegal character {char!r}") pos += 1 def parse_value( # noqa: C901 src: str, pos: Pos, parse_float: ParseFloat ) -> Tuple[Pos, Any]: try: char: Optional[str] = src[pos] except IndexError: char = None # Basic strings if char == '"': if src.startswith('"""', pos): return parse_multiline_str(src, pos, literal=False) return parse_one_line_basic_str(src, pos) # Literal strings if char == "'": if src.startswith("'''", pos): return parse_multiline_str(src, pos, literal=True) return parse_literal_str(src, pos) # Booleans if char == "t": if src.startswith("true", pos): return pos + 4, True if char == "f": if src.startswith("false", pos): return pos + 5, False # Dates and times datetime_match = RE_DATETIME.match(src, pos) if datetime_match: try: datetime_obj = match_to_datetime(datetime_match) except ValueError as e: raise suffixed_err(src, pos, "Invalid date or datetime") from e return datetime_match.end(), datetime_obj localtime_match = RE_LOCALTIME.match(src, pos) if localtime_match: return localtime_match.end(), match_to_localtime(localtime_match) # Integers and "normal" floats. # The regex will greedily match any type starting with a decimal # char, so needs to be located after handling of dates and times. number_match = RE_NUMBER.match(src, pos) if number_match: return number_match.end(), match_to_number(number_match, parse_float) # Arrays if char == "[": return parse_array(src, pos, parse_float) # Inline tables if char == "{": return parse_inline_table(src, pos, parse_float) # Special floats first_three = src[pos : pos + 3] if first_three in {"inf", "nan"}: return pos + 3, parse_float(first_three) first_four = src[pos : pos + 4] if first_four in {"-inf", "+inf", "-nan", "+nan"}: return pos + 4, parse_float(first_four) raise suffixed_err(src, pos, "Invalid value") def suffixed_err(src: str, pos: Pos, msg: str) -> TOMLDecodeError: """Return a `TOMLDecodeError` where error message is suffixed with coordinates in source.""" def coord_repr(src: str, pos: Pos) -> str: if pos >= len(src): return "end of document" line = src.count("\n", 0, pos) + 1 if line == 1: column = pos + 1 else: column = pos - src.rindex("\n", 0, pos) return f"line {line}, column {column}" return TOMLDecodeError(f"{msg} (at {coord_repr(src, pos)})") def is_unicode_scalar_value(codepoint: int) -> bool: return (0 <= codepoint <= 55295) or (57344 <= codepoint <= 1114111) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5045574 flit-3.12.0/flit_core/flit_core/vendor/tomli/_re.py0000644000000000000000000000537514770461472017163 0ustar00from datetime import date, datetime, time, timedelta, timezone, tzinfo from functools import lru_cache import re from typing import Any, Optional, Union from ._types import ParseFloat # E.g. # - 00:32:00.999999 # - 00:32:00 _TIME_RE_STR = r"([01][0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])(?:\.([0-9]{1,6})[0-9]*)?" RE_NUMBER = re.compile( r""" 0 (?: x[0-9A-Fa-f](?:_?[0-9A-Fa-f])* # hex | b[01](?:_?[01])* # bin | o[0-7](?:_?[0-7])* # oct ) | [+-]?(?:0|[1-9](?:_?[0-9])*) # dec, integer part (?P (?:\.[0-9](?:_?[0-9])*)? # optional fractional part (?:[eE][+-]?[0-9](?:_?[0-9])*)? # optional exponent part ) """, flags=re.VERBOSE, ) RE_LOCALTIME = re.compile(_TIME_RE_STR) RE_DATETIME = re.compile( fr""" ([0-9]{{4}})-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01]) # date, e.g. 1988-10-27 (?: [Tt ] {_TIME_RE_STR} (?:([Zz])|([+-])([01][0-9]|2[0-3]):([0-5][0-9]))? # optional time offset )? """, flags=re.VERBOSE, ) def match_to_datetime(match: "re.Match") -> Union[datetime, date]: """Convert a `RE_DATETIME` match to `datetime.datetime` or `datetime.date`. Raises ValueError if the match does not correspond to a valid date or datetime. """ ( year_str, month_str, day_str, hour_str, minute_str, sec_str, micros_str, zulu_time, offset_sign_str, offset_hour_str, offset_minute_str, ) = match.groups() year, month, day = int(year_str), int(month_str), int(day_str) if hour_str is None: return date(year, month, day) hour, minute, sec = int(hour_str), int(minute_str), int(sec_str) micros = int(micros_str.ljust(6, "0")) if micros_str else 0 if offset_sign_str: tz: Optional[tzinfo] = cached_tz( offset_hour_str, offset_minute_str, offset_sign_str ) elif zulu_time: tz = timezone.utc else: # local date-time tz = None return datetime(year, month, day, hour, minute, sec, micros, tzinfo=tz) @lru_cache(maxsize=None) def cached_tz(hour_str: str, minute_str: str, sign_str: str) -> timezone: sign = 1 if sign_str == "+" else -1 return timezone( timedelta( hours=sign * int(hour_str), minutes=sign * int(minute_str), ) ) def match_to_localtime(match: "re.Match") -> time: hour_str, minute_str, sec_str, micros_str = match.groups() micros = int(micros_str.ljust(6, "0")) if micros_str else 0 return time(int(hour_str), int(minute_str), int(sec_str), micros) def match_to_number(match: "re.Match", parse_float: "ParseFloat") -> Any: if match.group("floatpart"): return parse_float(match.group()) return int(match.group(), 0) ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5045574 flit-3.12.0/flit_core/flit_core/vendor/tomli/_types.py0000644000000000000000000000017614770461472017713 0ustar00from typing import Any, Callable, Tuple # Type annotations ParseFloat = Callable[[str], Any] Key = Tuple[str, ...] Pos = int ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5045574 flit-3.12.0/flit_core/flit_core/vendor/tomli/py.typed0000644000000000000000000000003214770461472017524 0ustar00# Marker file for PEP 561 ././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5045574 flit-3.12.0/flit_core/flit_core/versionno.py0000644000000000000000000001125414770461472016010 0ustar00"""Normalise version number according to PEP 440""" import logging import os import re log = logging.getLogger(__name__) # Regex below from packaging, via PEP 440. BSD License: # Copyright (c) Donald Stufft and individual contributors. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # # 1. Redistributions of source code must retain the above copyright notice, # this list of conditions and the following disclaimer. # # 2. Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND # ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED # WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE # DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE # FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL # DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR # SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, # OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. VERSION_PERMISSIVE = re.compile(r""" \s*v? (?: (?:(?P[0-9]+)!)? # epoch (?P[0-9]+(?:\.[0-9]+)*) # release segment (?P
                                          # pre-release
            [-_\.]?
            (?P(a|b|c|rc|alpha|beta|pre|preview))
            [-_\.]?
            (?P[0-9]+)?
        )?
        (?P                                         # post release
            (?:-(?P[0-9]+))
            |
            (?:
                [-_\.]?
                (?Ppost|rev|r)
                [-_\.]?
                (?P[0-9]+)?
            )
        )?
        (?P                                          # dev release
            [-_\.]?
            (?Pdev)
            [-_\.]?
            (?P[0-9]+)?
        )?
    )
    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
\s*$""", re.VERBOSE)

pre_spellings = {
    'a': 'a', 'alpha': 'a',
    'b': 'b', 'beta': 'b',
    'rc': 'rc', 'c': 'rc', 'pre': 'rc', 'preview': 'rc',
}

def normalise_version(orig_version):
    """Normalise version number according to rules in PEP 440

    Raises InvalidVersion if the version does not match PEP 440. This can be
    overridden with the FLIT_ALLOW_INVALID environment variable.

    https://www.python.org/dev/peps/pep-0440/#normalization
    """
    version = orig_version.lower()
    m = VERSION_PERMISSIVE.match(version)
    if not m:
        if os.environ.get('FLIT_ALLOW_INVALID'):
            log.warning("Invalid version number {!r} allowed by FLIT_ALLOW_INVALID"
                        .format(orig_version))
            return version
        else:
            from .common import InvalidVersion
            raise InvalidVersion("Version number {!r} does not match PEP 440 rules"
                                 .format(orig_version))

    components = []
    add = components.append

    epoch, release = m.group('epoch', 'release')
    if epoch is not None:
        add(str(int(epoch)) + '!')
    add('.'.join(str(int(rp)) for rp in release.split('.')))

    pre_l, pre_n = m.group('pre_l', 'pre_n')
    if pre_l is not None:
        pre_l = pre_spellings[pre_l]
        pre_n = '0' if pre_n is None else str(int(pre_n))
        add(pre_l + pre_n)

    post_n1, post_l, post_n2 = m.group('post_n1', 'post_l', 'post_n2')
    if post_n1 is not None:
        add('.post' + str(int(post_n1)))
    elif post_l is not None:
        post_n = '0' if post_n2 is None else str(int(post_n2))
        add('.post' + str(int(post_n)))

    dev_l, dev_n = m.group('dev_l', 'dev_n')
    if dev_l is not None:
        dev_n = '0' if dev_n is None else str(int(dev_n))
        add('.dev' + dev_n)

    local = m.group('local')
    if local is not None:
        local = local.replace('-', '.').replace('_', '.')
        l = [str(int(c)) if c.isdigit() else c
             for c in local.split('.')]
        add('+' + '.'.join(l))

    version = ''.join(components)
    if version != orig_version:
        log.warning("Version number normalised: {!r} -> {!r} (see PEP 440)"
                    .format(orig_version, version))
    return version
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5045574
flit-3.12.0/flit_core/flit_core/wheel.py0000644000000000000000000002205314770461472015071 0ustar00import argparse
from base64 import urlsafe_b64encode
import contextlib
from datetime import datetime, timezone
import hashlib
import io
import logging
import os
import os.path as osp
import stat
import tempfile
from pathlib import Path
from types import SimpleNamespace
from typing import Optional
import zipfile

from flit_core import __version__
from . import common

log = logging.getLogger(__name__)

wheel_file_template = u"""\
Wheel-Version: 1.0
Generator: flit {version}
Root-Is-Purelib: true
""".format(version=__version__)

def _write_wheel_file(f, supports_py2=False):
    f.write(wheel_file_template)
    if supports_py2:
        f.write(u"Tag: py2-none-any\n")
    f.write(u"Tag: py3-none-any\n")


def _set_zinfo_mode(zinfo, mode):
    # Set the bits for the mode
    zinfo.external_attr = mode << 16


def zip_timestamp_from_env() -> Optional[tuple]:
    """Prepare a timestamp from $SOURCE_DATE_EPOCH, if set"""
    try:
        # If SOURCE_DATE_EPOCH is set (e.g. by Debian), it's used for
        # timestamps inside the zip file.
        t = int(os.environ['SOURCE_DATE_EPOCH'])
        d = datetime.fromtimestamp(t, timezone.utc)
    except (KeyError, ValueError):
        # Otherwise, we'll use the mtime of files, and generated files will
        # default to 2016-1-1 00:00:00
        return None

    if d.year >= 1980:
        log.info("Zip timestamps will be from SOURCE_DATE_EPOCH: %s", d)
        # zipfile expects a 6-tuple, not a datetime object
        return d.year, d.month, d.day, d.hour, d.minute, d.second
    else:
        log.info("SOURCE_DATE_EPOCH is below the minimum for zip file timestamps")
        log.info("Zip timestamps will be 1980-01-01 00:00:00")
        return 1980, 1, 1, 0, 0, 0


class WheelBuilder:
    def __init__(
            self, directory, module, metadata, entrypoints, target_fp, data_directory
    ):
        """Build a wheel from a module/package
        """
        self.directory = directory
        self.module = module
        self.metadata = metadata
        self.entrypoints = entrypoints
        self.data_directory = data_directory

        self.records = []
        self.source_time_stamp = zip_timestamp_from_env()

        # Open the zip file ready to write
        self.wheel_zip = zipfile.ZipFile(target_fp, 'w',
                             compression=zipfile.ZIP_DEFLATED)

    @classmethod
    def from_ini_path(cls, ini_path, target_fp):
        from .config import read_flit_config
        directory = ini_path.parent
        ini_info = read_flit_config(ini_path)
        entrypoints = ini_info.entrypoints
        module = common.Module(ini_info.module, directory)
        metadata = common.make_metadata(module, ini_info)
        return cls(
            directory, module, metadata, entrypoints, target_fp, ini_info.data_directory
        )

    @property
    def dist_info(self):
        return common.dist_info_name(self.metadata.name, self.metadata.version)

    @property
    def wheel_filename(self):
        dist_name = common.normalize_dist_name(self.metadata.name, self.metadata.version)
        tag = ('py2.' if self.metadata.supports_py2 else '') + 'py3-none-any'
        return '{}-{}.whl'.format(dist_name, tag)

    def _add_file(self, full_path, rel_path):
        log.debug("Adding %s to zip file", full_path)
        full_path, rel_path = str(full_path), str(rel_path)
        if os.sep != '/':
            # We always want to have /-separated paths in the zip file and in
            # RECORD
            rel_path = rel_path.replace(os.sep, '/')

        if self.source_time_stamp is None:
            zinfo = zipfile.ZipInfo.from_file(full_path, rel_path)
        else:
            # Set timestamps in zipfile for reproducible build
            zinfo = zipfile.ZipInfo(rel_path, self.source_time_stamp)

        # Normalize permission bits to either 755 (executable) or 644
        st_mode = os.stat(full_path).st_mode
        new_mode = common.normalize_file_permissions(st_mode)
        _set_zinfo_mode(zinfo, new_mode & 0xFFFF)  # Unix attributes

        if stat.S_ISDIR(st_mode):
            zinfo.external_attr |= 0x10  # MS-DOS directory flag

        zinfo.compress_type = zipfile.ZIP_DEFLATED

        hashsum = hashlib.sha256()
        with open(full_path, 'rb') as src, self.wheel_zip.open(zinfo, 'w') as dst:
            while True:
                buf = src.read(1024 * 8)
                if not buf:
                    break
                hashsum.update(buf)
                dst.write(buf)

        size = os.stat(full_path).st_size
        hash_digest = urlsafe_b64encode(hashsum.digest()).decode('ascii').rstrip('=')
        self.records.append((rel_path, hash_digest, size))

    @contextlib.contextmanager
    def _write_to_zip(self, rel_path, mode=0o644):
        sio = io.StringIO()
        yield sio

        log.debug("Writing data to %s in zip file", rel_path)
        # The default is a fixed timestamp rather than the current time, so
        # that building a wheel twice on the same computer can automatically
        # give you the exact same result.
        date_time = self.source_time_stamp or (2016, 1, 1, 0, 0, 0)
        zi = zipfile.ZipInfo(rel_path, date_time)
        # Also sets bit 0x8000 for "regular file" (S_IFREG)
        _set_zinfo_mode(zi, mode | stat.S_IFREG)
        b = sio.getvalue().encode('utf-8')
        hashsum = hashlib.sha256(b)
        hash_digest = urlsafe_b64encode(hashsum.digest()).decode('ascii').rstrip('=')
        self.wheel_zip.writestr(zi, b, compress_type=zipfile.ZIP_DEFLATED)
        self.records.append((rel_path, hash_digest, len(b)))

    def copy_module(self):
        log.info('Copying package file(s) from %s', self.module.path)
        source_dir = str(self.module.source_dir)

        for full_path in self.module.iter_files():
            rel_path = osp.relpath(full_path, source_dir)
            self._add_file(full_path, rel_path)

    def add_pth(self):
        with self._write_to_zip(self.module.name + ".pth") as f:
            f.write(str(self.module.source_dir.resolve()))

    def add_data_directory(self):
        dir_in_whl = '{}.data/data/'.format(
            common.normalize_dist_name(self.metadata.name, self.metadata.version)
        )
        for full_path in common.walk_data_dir(self.data_directory):
            rel_path = os.path.relpath(full_path, self.data_directory)
            self._add_file(full_path, dir_in_whl + rel_path)

    def write_metadata(self):
        log.info('Writing metadata files')

        if self.entrypoints:
            with self._write_to_zip(self.dist_info + '/entry_points.txt') as f:
                common.write_entry_points(self.entrypoints, f)

        for file in self.metadata.license_files:
            self._add_file(self.directory / file, '%s/licenses/%s' % (self.dist_info, file))

        with self._write_to_zip(self.dist_info + '/WHEEL') as f:
            _write_wheel_file(f, supports_py2=self.metadata.supports_py2)

        with self._write_to_zip(self.dist_info + '/METADATA') as f:
            self.metadata.write_metadata_file(f)

    def write_record(self):
        log.info('Writing the record of files')
        # Write a record of the files in the wheel
        with self._write_to_zip(self.dist_info + '/RECORD') as f:
            for path, hash, size in self.records:
                f.write(u'{},sha256={},{}\n'.format(path, hash, size))
            # RECORD itself is recorded with no hash or size
            f.write(self.dist_info + '/RECORD,,\n')

    def build(self, editable=False):
        try:
            if editable:
                self.add_pth()
            else:
                self.copy_module()
            self.add_data_directory()
            self.write_metadata()
            self.write_record()
        finally:
            self.wheel_zip.close()

def make_wheel_in(ini_path, wheel_directory, editable=False):
    # We don't know the final filename until metadata is loaded, so write to
    # a temporary_file, and rename it afterwards.
    (fd, temp_path) = tempfile.mkstemp(suffix='.whl', dir=str(wheel_directory))
    try:
        with io.open(fd, 'w+b') as fp:
            wb = WheelBuilder.from_ini_path(ini_path, fp)
            wb.build(editable)

        wheel_path = wheel_directory / wb.wheel_filename
        os.replace(temp_path, str(wheel_path))
    except:
        os.unlink(temp_path)
        raise

    log.info("Built wheel: %s", wheel_path)
    return SimpleNamespace(builder=wb, file=wheel_path)


def main(argv=None):
    parser = argparse.ArgumentParser()
    parser.add_argument(
        'srcdir',
        type=Path,
        nargs='?',
        default=Path.cwd(),
        help='source directory (defaults to current directory)',
    )

    parser.add_argument(
        '--outdir',
        '-o',
        help='output directory (defaults to {srcdir}/dist)',
    )
    args = parser.parse_args(argv)
    outdir = args.srcdir / 'dist' if args.outdir is None else Path(args.outdir)
    print("Building wheel from", args.srcdir)
    pyproj_toml = args.srcdir / 'pyproject.toml'
    outdir.mkdir(parents=True, exist_ok=True)
    info = make_wheel_in(pyproj_toml, outdir)
    print("Wheel built", outdir / info.file.name)

if __name__ == "__main__":
    main()
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5045574
flit-3.12.0/flit_core/pyproject.toml0000644000000000000000000000136114770461472014360 0ustar00[build-system]
requires = []
build-backend = "flit_core.buildapi"
backend-path = ["."]

[project]
name="flit_core"
authors=[
    {name = "Thomas Kluyver & contributors", email = "thomas@kluyver.me.uk"},
]
description = "Distribution-building parts of Flit. See flit package for more information"
dependencies = []
requires-python = '>=3.6'
readme = "README.rst"
license = "BSD-3-Clause"
license-files = ["LICENSE*", "flit_core/vendor/**/LICENSE*"]
classifiers = [
    "Topic :: Software Development :: Libraries :: Python Modules",
]
dynamic = ["version"]

[project.urls]
Documentation = "https://flit.pypa.io"
Source = "https://github.com/pypa/flit"

[tool.flit.sdist]
include = [
    "bootstrap_install.py",
    "build_dists.py",
    "tests_core/",
]
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5045574
flit-3.12.0/flit_core/tests_core/__init__.py0000644000000000000000000000000014770461472015714 0ustar00././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5045574
flit-3.12.0/flit_core/tests_core/samples/EG_README.rst0000644000000000000000000000024514770461472017324 0ustar00This is an example long description for tests to load.

This file is `valid reStructuredText
`_.
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/annotated_version/module1.py0000644000000000000000000000013214770461472022717 0ustar00
"""This module has a __version__ that has a type annotation"""

__version__: str = '0.1'
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/annotated_version/pyproject.toml0000644000000000000000000000041214770461472023714 0ustar00[build-system]
requires = ["flit_core >=2,<4"]
build-backend = "flit_core.buildapi"

[tool.flit.metadata]
module = "module1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/module1"
requires = [
    "numpy >=1.16.0",
]
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/bad-description-ext.toml0000644000000000000000000000033514770461472022024 0ustar00[build-system]
requires = ["flit"]

[tool.flit.metadata]
module = "module1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/module1"
description-file = "module1.py"  # WRONG
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/conflicting_modules/module1.py0000644000000000000000000000000014770461472023216 0ustar00././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/conflicting_modules/pyproject.toml0000644000000000000000000000026414770461472024226 0ustar00[build-system]
requires = ["flit"]

[tool.flit.metadata]
module = "module1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/module1"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/conflicting_modules/src/module1.py0000644000000000000000000000000014770461472024005 0ustar00././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/constructed_version/module1.py0000644000000000000000000000016314770461472023303 0ustar00
"""This module has a __version__ that requires runtime interpretation"""

__version__ = ".".join(["1", "2", "3"])
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/constructed_version/pyproject.toml0000644000000000000000000000041214770461472024274 0ustar00[build-system]
requires = ["flit_core >=2,<4"]
build-backend = "flit_core.buildapi"

[tool.flit.metadata]
module = "module1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/module1"
requires = [
    "numpy >=1.16.0",
]
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/extras-dev-conflict.toml0000644000000000000000000000046114770461472022040 0ustar00[build-system]
requires = ["flit"]

[tool.flit.metadata]
module = "module1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/module1"
description-file = "EG_README.rst"
dev-requires = ["apackage"]

[tool.flit.metadata.requires-extra]
dev = ["anotherpackage"]
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/extras-newstyle.toml0000644000000000000000000000044014770461472021332 0ustar00[build-system]
requires = ["flit_core >=2,<4"]
build-backend = "flit_core.buildapi"

[project]
name = "module1"
version = "0.1"
description = "Example for testing"
dependencies = ["toml"]

[project.optional-dependencies]
test = ["pytest"]
cus__Tom = ["requests"]  # To test normalisation
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/extras.toml0000644000000000000000000000060414770461472017464 0ustar00[build-system]
requires = ["flit_core >=2,<4"]
build-backend = "flit_core.buildapi"

[tool.flit.metadata]
module = "module1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/module1"
description-file = "EG_README.rst"
requires = ["toml"]

[tool.flit.metadata.requires-extra]
test = ["pytest"]
cus__Tom = ["requests"]  # To test normalisation
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/imported_version/package1/__init__.py0000644000000000000000000000021314770461472024432 0ustar00"""This module has a __version__ that requires a relative import"""

from ._version import __version__

import a_package_that_doesnt_exist
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/imported_version/package1/_version.py0000644000000000000000000000010714770461472024521 0ustar00"""Imposter docstring that shouldn't be used"""

__version__ = '0.5.8'
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/imported_version/pyproject.toml0000644000000000000000000000033414770461472023565 0ustar00[build-system]
requires = ["flit_core >=3.2,<4"]
build-backend = "flit_core.buildapi"

[project]
name = "package1"
authors = [
    {name = "Sir Röbin", email = "robin@camelot.uk"}
]
dynamic = ["version", "description"]
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/inclusion/LICENSES/README0000644000000000000000000000014414770461472021350 0ustar00This directory will match the LICENSE* glob which Flit uses to add license
files to wheel metadata.
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/inclusion/doc/subdir/subsubdir/test.md0000644000000000000000000000000014770461472024612 0ustar00././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/inclusion/doc/subdir/test.txt0000644000000000000000000000006014770461472023035 0ustar00sdists should include this (see pyproject.toml)
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/inclusion/doc/test.rst0000644000000000000000000000006014770461472021536 0ustar00sdists should include this (see pyproject.toml)
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/inclusion/doc/test.txt0000644000000000000000000000006014770461472021545 0ustar00sdists should exclude this (see pyproject.toml)
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/inclusion/module1.py0000644000000000000000000000004514770461472021203 0ustar00"""For tests"""

__version__ = '0.1'
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/inclusion/pyproject.toml0000644000000000000000000000035714770461472022205 0ustar00[build-system]
requires = ["flit"]
build-backend = "flit.buildapi"

[tool.flit.metadata]
module = "module1"
author = "Sir Robin"
author-email = "robin@camelot.uk"

[tool.flit.sdist]
include = ["doc"]
exclude = ["doc/*.txt", "doc/**/*.md"]
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/invalid_version1.py0000644000000000000000000000014014770461472021102 0ustar00"""Sample module with invalid __version__ string"""

__version__ = "not starting with a number"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/missing-description-file.toml0000644000000000000000000000037614770461472023073 0ustar00[build-system]
requires = ["flit"]

[tool.flit.metadata]
module = "missingdescriptionfile"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/missingdescriptionfile"
description-file = "definitely-missing.rst"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/misspelled-key.toml0000644000000000000000000000045314770461472021107 0ustar00[build-system]
requires = ["flit_core >=2,<4"]
build-backend = "flit_core.buildapi"

[tool.flit.metadata]
module = "package1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
descryption-file = "my-description.rst"  # Deliberate typo for test
home-page = "http://github.com/sirrobin/package1"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/module1-pkg.ini0000644000000000000000000000016614770461472020112 0ustar00[metadata]
module=module1
author=Sir Robin
author-email=robin@camelot.uk
home-page=http://github.com/sirrobin/module1
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/module1-pkg.toml0000644000000000000000000000044014770461472020301 0ustar00[build-system]
requires = ["flit"]

[tool.flit.metadata]
module = "module1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/module1"
description-file = "EG_README.rst"

[tool.flit.metadata.urls]
Documentation = "https://example.com/module1"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5055573
flit-3.12.0/flit_core/tests_core/samples/module1.py0000644000000000000000000000005214770461472017176 0ustar00"""Example module"""

__version__ = '0.1'
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5065572
flit-3.12.0/flit_core/tests_core/samples/module2.py0000644000000000000000000000027314770461472017204 0ustar00"""
Docstring formatted like this.
"""

a = {}
# An assignment to a subscript (a['test']) broke introspection
# https://github.com/pypa/flit/issues/343
a['test'] = 6

__version__ = '7.0'
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5065572
flit-3.12.0/flit_core/tests_core/samples/moduleunimportable.py0000644000000000000000000000012014770461472021533 0ustar00
"""
A sample unimportable module
"""

raise ImportError()

__version__ = "0.1"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5065572
flit-3.12.0/flit_core/tests_core/samples/moduleunimportabledouble.py0000644000000000000000000000016114770461472022733 0ustar00
"""
A sample unimportable module with double assignment
"""

raise ImportError()

VERSION = __version__ = "0.1"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5065572
flit-3.12.0/flit_core/tests_core/samples/my-description.rst0000644000000000000000000000003514770461472020757 0ustar00Sample description for test.
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5065572
flit-3.12.0/flit_core/tests_core/samples/no_docstring-pkg.toml0000644000000000000000000000045714770461472021433 0ustar00[build-system]
requires = ["flit"]

[tool.flit.metadata]
module = "no_docstring"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/no_docstring"
description-file = "EG_README.rst"

[tool.flit.metadata.urls]
Documentation = "https://example.com/no_docstring"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5065572
flit-3.12.0/flit_core/tests_core/samples/no_docstring.py0000644000000000000000000000002414770461472020317 0ustar00__version__ = '7.0'
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5065572
flit-3.12.0/flit_core/tests_core/samples/normalization/my_python_module.py0000644000000000000000000000000014770461472024102 0ustar00././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5065572
flit-3.12.0/flit_core/tests_core/samples/normalization/pyproject.toml0000644000000000000000000000051614770461472023065 0ustar00[build-system]
requires = ["flit_core >=3.8,<4"]
build-backend = "flit_core.buildapi"

[project]
name = "my-python-module"
version = "0.0.1"
description = "Hyphenated package name, inferred import name"
authors = [
    {name = "Sir Robin", email = "robin@camelot.uk"}
]

[project.urls]
homepage = "http://github.com/me/python-module"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5065572
flit-3.12.0/flit_core/tests_core/samples/ns1-pkg/EG_README.rst0000644000000000000000000000024514770461472020604 0ustar00This is an example long description for tests to load.

This file is `valid reStructuredText
`_.
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5065572
flit-3.12.0/flit_core/tests_core/samples/ns1-pkg/ns1/pkg/__init__.py0000644000000000000000000000011314770461472022127 0ustar00"""
==================
ns1.pkg
==================
"""

__version__ = '0.1'
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5065572
flit-3.12.0/flit_core/tests_core/samples/ns1-pkg/pyproject.toml0000644000000000000000000000041214770461472021452 0ustar00[build-system]
requires = ["flit_core >=3.5,<4"]
build-backend = "flit_core.buildapi"

[tool.flit.metadata]
module = "ns1.pkg"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/module1"
description-file = "EG_README.rst"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5065572
flit-3.12.0/flit_core/tests_core/samples/package1.toml0000644000000000000000000000046714770461472017641 0ustar00[build-system]
requires = ["flit_core >=2,<4"]
build-backend = "flit_core.buildapi"

[tool.flit.metadata]
module = "package1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
description-file = "my-description.rst"
home-page = "http://github.com/sirrobin/package1"

[scripts]
pkg_script = "package1:main"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5065572
flit-3.12.0/flit_core/tests_core/samples/package1/__init__.py0000644000000000000000000000012414770461472021043 0ustar00"""A sample package"""

__version__ = '0.1'

def main():
    print("package1 main")
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5065572
flit-3.12.0/flit_core/tests_core/samples/package1/data_dir/foo.sh0000644000000000000000000000004314770461472021620 0ustar00#!/bin/sh
echo "Example data file"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5065572
flit-3.12.0/flit_core/tests_core/samples/package1/foo.py0000644000000000000000000000000614770461472020066 0ustar00a = 1
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5065572
flit-3.12.0/flit_core/tests_core/samples/package1/subpkg/__init__.py0000644000000000000000000000000014770461472022327 0ustar00././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5065572
flit-3.12.0/flit_core/tests_core/samples/package1/subpkg/sp_data_dir/test.json0000644000000000000000000000002214770461472024345 0ustar00{"example": true}
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5065572
flit-3.12.0/flit_core/tests_core/samples/package1/subpkg2/__init__.py0000644000000000000000000000000014770461472022411 0ustar00././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5065572
flit-3.12.0/flit_core/tests_core/samples/pep517/LICENSE0000644000000000000000000000004414770461472017305 0ustar00This file should be added to wheels
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5065572
flit-3.12.0/flit_core/tests_core/samples/pep517/README.rst0000644000000000000000000000004514770461472017770 0ustar00This contains a nön-ascii character
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5065572
flit-3.12.0/flit_core/tests_core/samples/pep517/module1.py0000644000000000000000000000005214770461472020217 0ustar00"""Example module"""

__version__ = '0.1'
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5065572
flit-3.12.0/flit_core/tests_core/samples/pep517/pyproject.toml0000644000000000000000000000057414770461472021224 0ustar00[build-system]
requires = ["flit_core >=2,<4"]
build-backend = "flit_core.buildapi"

[tool.flit.metadata]
module = "module1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/module1"
description-file = "README.rst"
requires = [
    "requests >= 2.18",
    "docutils",
]

[tool.flit.entrypoints.flit_test_example]
foo = "module1:main"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5065572
flit-3.12.0/flit_core/tests_core/samples/pep621/LICENSE0000644000000000000000000000004414770461472017301 0ustar00This file should be added to wheels
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5075574
flit-3.12.0/flit_core/tests_core/samples/pep621/README.rst0000644000000000000000000000004514770461472017764 0ustar00This contains a nön-ascii character
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5075574
flit-3.12.0/flit_core/tests_core/samples/pep621/module1a.py0000644000000000000000000000005214770461472020354 0ustar00"""Example module"""

__version__ = '0.1'
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5075574
flit-3.12.0/flit_core/tests_core/samples/pep621/pyproject.toml0000644000000000000000000000126614770461472021217 0ustar00[build-system]
requires = ["flit_core >=3.2,<4"]
build-backend = "flit_core.buildapi"

[project]
name = "module1"
authors = [
    {name = "Sir Röbin", email = "robin@camelot.uk"}
]
maintainers = [
    {name = "Sir Galahad"}
]
readme = "README.rst"
license = {file = "LICENSE"}
requires-python = ">=3.7"
dependencies = [
    "requests >= 2.18",
    "docutils",
]
keywords = ["example", "test"]
dynamic = [
    "version",
    "description",
]

[project.optional-dependencies]
test = [
  "pytest",
  "mock; python_version<'3.6'"
]

[project.urls]
homepage = "http://github.com/sirrobin/module1"

[project.entry-points.flit_test_example]
foo = "module1:main"

[tool.flit.module]
name = "module1a"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5075574
flit-3.12.0/flit_core/tests_core/samples/pep621_license_files/LICENSE0000644000000000000000000000004414770461472022165 0ustar00This file should be added to wheels
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5075574
flit-3.12.0/flit_core/tests_core/samples/pep621_license_files/README.rst0000644000000000000000000000000714770461472022646 0ustar00Readme
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5075574
flit-3.12.0/flit_core/tests_core/samples/pep621_license_files/module/vendor/LICENSE_VENDOR0000644000000000000000000000004414770461472026024 0ustar00This file should be added to wheels
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5075574
flit-3.12.0/flit_core/tests_core/samples/pep621_license_files/module1a.py0000644000000000000000000000005214770461472023240 0ustar00"""Example module"""

__version__ = '0.1'
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5075574
flit-3.12.0/flit_core/tests_core/samples/pep621_license_files/pyproject.toml0000644000000000000000000000127114770461472024077 0ustar00[build-system]
requires = ["flit_core >=3.2,<4"]
build-backend = "flit_core.buildapi"

[project]
name = "module1"
authors = [
    {name = "Sir Röbin", email = "robin@camelot.uk"}
]
maintainers = [
    {name = "Sir Galahad"}
]
readme = "README.rst"
license-files = ["**/LICENSE*"]
requires-python = ">=3.7"
dependencies = [
    "requests >= 2.18",
    "docutils",
]
keywords = ["example", "test"]
dynamic = [
    "version",
    "description",
]

[project.optional-dependencies]
test = [
  "pytest",
  "mock; python_version<'3.6'"
]

[project.urls]
homepage = "http://github.com/sirrobin/module1"

[project.entry-points.flit_test_example]
foo = "module1:main"

[tool.flit.module]
name = "module1a"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5075574
flit-3.12.0/flit_core/tests_core/samples/pep621_nodynamic/README.rst0000644000000000000000000000004514770461472022025 0ustar00This contains a nön-ascii character
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5075574
flit-3.12.0/flit_core/tests_core/samples/pep621_nodynamic/module1.py0000644000000000000000000000000014770461472022245 0ustar00././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5075574
flit-3.12.0/flit_core/tests_core/samples/pep621_nodynamic/pyproject.toml0000644000000000000000000000121214770461472023247 0ustar00[build-system]
requires = ["flit_core >=3.2,<4"]
build-backend = "flit_core.buildapi"

[project]
name = "module1"
version = "0.03"
description = "Statically specified description"
authors = [
    {name = "Sir Robin", email = "robin@camelot.uk"}
]
readme = {file = "README.rst", content-type = "text/x-rst"}
classifiers = [
    "Topic :: Internet :: WWW/HTTP",
]
dependencies = [
    "requests >= 2.18",
    "docutils",
]  # N.B. Using this to check behaviour with dependencies but no optional deps

[project.urls]
homepage = "http://github.com/sirrobin/module1"

[project.scripts]
foo = "module1:main"

[project.gui-scripts]
foo-gui = "module1:main"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5075574
flit-3.12.0/flit_core/tests_core/samples/requires-dev.toml0000644000000000000000000000046714770461472020600 0ustar00[build-system]
requires = ["flit"]

[tool.flit.metadata]
module = "module1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/module1"
description-file = "EG_README.rst"
# This should generate a warning tell you to use requires-extra.dev
dev-requires = ["apackage"]
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5075574
flit-3.12.0/flit_core/tests_core/samples/requires-envmark.toml0000644000000000000000000000037414770461472021462 0ustar00[build-system]
requires = ["flit"]

[tool.flit.metadata]
module = "module1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/module1"
requires = [
    "requests",
    "pathlib2; python_version == '2.7'",
]
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5075574
flit-3.12.0/flit_core/tests_core/samples/requires-extra-envmark.toml0000644000000000000000000000045314770461472022601 0ustar00[build-system]
requires = ["flit"]

[tool.flit.metadata]
module = "module1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/module1"
description-file = "EG_README.rst"

[tool.flit.metadata.requires-extra]
test = ["pathlib2; python_version == \"2.7\""]
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5075574
flit-3.12.0/flit_core/tests_core/samples/requires-requests.toml0000644000000000000000000000035714770461472021673 0ustar00[build-system]
requires = ["flit"]

[tool.flit.metadata]
module = "module1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/module1"
description-file = "EG_README.rst"
requires = ["requests"]
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5075574
flit-3.12.0/flit_core/tests_core/samples/with_data_dir/LICENSE0000644000000000000000000000004414770461472021066 0ustar00This file should be added to wheels
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5075574
flit-3.12.0/flit_core/tests_core/samples/with_data_dir/README.rst0000644000000000000000000000004514770461472021551 0ustar00This contains a nön-ascii character
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5085573
flit-3.12.0/flit_core/tests_core/samples/with_data_dir/data/share/man/man1/foo.10000644000000000000000000000002214770461472024344 0ustar00Example data file
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5085573
flit-3.12.0/flit_core/tests_core/samples/with_data_dir/module1.py0000644000000000000000000000005214770461472022000 0ustar00"""Example module"""

__version__ = '0.1'
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5085573
flit-3.12.0/flit_core/tests_core/samples/with_data_dir/pyproject.toml0000644000000000000000000000070414770461472023000 0ustar00[build-system]
requires = ["flit_core >=3.2,<4"]
build-backend = "flit_core.buildapi"

[project]
name = "module1"
authors = [
    {name = "Sir Röbin", email = "robin@camelot.uk"}
]
readme = "README.rst"
license = {file = "LICENSE"}
requires-python = ">=3.7"
dependencies = [
    "requests >= 2.18",
    "docutils",
]
dynamic = [
    "version",
    "description",
]

[project.scripts]
foo = "module1:main"

[tool.flit.external-data]
directory = "data"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5085573
flit-3.12.0/flit_core/tests_core/test_build_thyself.py0000644000000000000000000000302014770461472020056 0ustar00"""Tests of flit_core building itself"""
import os
import os.path as osp
import pytest
import tarfile
from testpath import assert_isdir, assert_isfile
import zipfile

from flit_core import buildapi

@pytest.fixture()
def cwd_project():
    proj_dir = osp.dirname(osp.dirname(osp.abspath(buildapi.__file__)))
    if not osp.isfile(osp.join(proj_dir, 'pyproject.toml')):
        pytest.skip("need flit_core source directory")

    old_cwd = os.getcwd()
    try:
        os.chdir(proj_dir)
        yield
    finally:
        os.chdir(old_cwd)


def test_prepare_metadata(tmp_path, cwd_project):
    tmp_path = str(tmp_path)
    dist_info = buildapi.prepare_metadata_for_build_wheel(tmp_path)

    assert dist_info.endswith('.dist-info')
    assert dist_info.startswith('flit_core')
    dist_info = osp.join(tmp_path, dist_info)
    assert_isdir(dist_info)

    assert_isfile(osp.join(dist_info, 'WHEEL'))
    assert_isfile(osp.join(dist_info, 'METADATA'))


def test_wheel(tmp_path, cwd_project):
    tmp_path = str(tmp_path)
    filename = buildapi.build_wheel(tmp_path)

    assert filename.endswith('.whl')
    assert filename.startswith('flit_core')
    path = osp.join(tmp_path, filename)
    assert_isfile(path)
    assert zipfile.is_zipfile(path)


def test_sdist(tmp_path, cwd_project):
    tmp_path = str(tmp_path)
    filename = buildapi.build_sdist(tmp_path)

    assert filename.endswith('.tar.gz')
    assert filename.startswith('flit_core')
    path = osp.join(tmp_path, filename)
    assert_isfile(path)
    assert tarfile.is_tarfile(path)
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5085573
flit-3.12.0/flit_core/tests_core/test_buildapi.py0000644000000000000000000000753614770461472017032 0ustar00from contextlib import contextmanager
import os
import os.path as osp
import tarfile
from testpath import assert_isfile, assert_isdir
from testpath.tempdir import TemporaryDirectory
import zipfile

from flit_core import buildapi

samples_dir = osp.join(osp.dirname(__file__), 'samples')

@contextmanager
def cwd(directory):
    prev = os.getcwd()
    os.chdir(directory)
    try:
        yield
    finally:
        os.chdir(prev)

def test_get_build_requires():
    # This module can be inspected (for docstring & __version__) without
    # importing it, so there are no build dependencies.
    with cwd(osp.join(samples_dir,'pep517')):
        assert buildapi.get_requires_for_build_wheel() == []
        assert buildapi.get_requires_for_build_editable() == []
        assert buildapi.get_requires_for_build_sdist() == []

def test_get_build_requires_pep621_nodynamic():
    # This module isn't inspected because version & description are specified
    # as static metadata in pyproject.toml, so there are no build dependencies
    with cwd(osp.join(samples_dir, 'pep621_nodynamic')):
        assert buildapi.get_requires_for_build_wheel() == []
        assert buildapi.get_requires_for_build_editable() == []
        assert buildapi.get_requires_for_build_sdist() == []

def test_get_build_requires_import():
    # This one has to be imported, so its runtime dependencies are also
    # build dependencies.
    expected = ["numpy >=1.16.0"]
    with cwd(osp.join(samples_dir, 'constructed_version')):
        assert buildapi.get_requires_for_build_wheel() == expected
        assert buildapi.get_requires_for_build_editable() == expected
        assert buildapi.get_requires_for_build_sdist() == expected

def test_build_wheel():
    with TemporaryDirectory() as td, cwd(osp.join(samples_dir,'pep517')):
        filename = buildapi.build_wheel(td)
        assert filename.endswith('.whl'), filename
        assert_isfile(osp.join(td, filename))
        assert zipfile.is_zipfile(osp.join(td, filename))
        with zipfile.ZipFile(osp.join(td, filename)) as zip:
            assert "module1.py" in zip.namelist()
            assert "module1.pth" not in zip.namelist()

def test_build_wheel_pep621():
    with TemporaryDirectory() as td, cwd(osp.join(samples_dir, 'pep621')):
        filename = buildapi.build_wheel(td)
        assert filename.endswith('.whl'), filename
        assert_isfile(osp.join(td, filename))
        assert zipfile.is_zipfile(osp.join(td, filename))

def test_build_editable():
    with TemporaryDirectory() as td, cwd(osp.join(samples_dir,'pep517')):
        filename = buildapi.build_editable(td)
        assert filename.endswith('.whl'), filename
        assert_isfile(osp.join(td, filename))
        assert zipfile.is_zipfile(osp.join(td, filename))
        with zipfile.ZipFile(osp.join(td, filename)) as zip:
            assert "module1.py" not in zip.namelist()
            assert "module1.pth" in zip.namelist()

def test_build_sdist():
    with TemporaryDirectory() as td, cwd(osp.join(samples_dir,'pep517')):
        filename = buildapi.build_sdist(td)
        assert filename.endswith('.tar.gz'), filename
        assert_isfile(osp.join(td, filename))
        assert tarfile.is_tarfile(osp.join(td, filename))

def test_prepare_metadata_for_build_wheel():
    with TemporaryDirectory() as td, cwd(osp.join(samples_dir,'pep517')):
        dirname = buildapi.prepare_metadata_for_build_wheel(td)
        assert dirname.endswith('.dist-info'), dirname
        assert_isdir(osp.join(td, dirname))
        assert_isfile(osp.join(td, dirname, 'METADATA'))

def test_prepare_metadata_for_build_editable():
    with TemporaryDirectory() as td, cwd(osp.join(samples_dir,'pep517')):
        dirname = buildapi.prepare_metadata_for_build_editable(td)
        assert dirname.endswith('.dist-info'), dirname
        assert_isdir(osp.join(td, dirname))
        assert_isfile(osp.join(td, dirname, 'METADATA'))
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5085573
flit-3.12.0/flit_core/tests_core/test_common.py0000644000000000000000000002121014770461472016512 0ustar00import email.parser
import email.policy
from io import StringIO
from pathlib import Path
import pytest
from unittest import TestCase

from flit_core import config
from flit_core.common import (
    Module, get_info_from_module, InvalidVersion, NoVersionError, check_version,
    normalize_file_permissions, Metadata, make_metadata,
)

samples_dir = Path(__file__).parent / 'samples'

class ModuleTests(TestCase):
    def test_ns_package_importable(self):
        i = Module('ns1.pkg', samples_dir / 'ns1-pkg')
        assert i.path == Path(samples_dir, 'ns1-pkg', 'ns1', 'pkg')
        assert i.file == Path(samples_dir, 'ns1-pkg', 'ns1', 'pkg', '__init__.py')
        assert i.is_package

        assert i.in_namespace_package
        assert i.namespace_package_name == 'ns1'

    def test_package_importable(self):
        i = Module('package1', samples_dir)
        assert i.path == samples_dir / 'package1'
        assert i.file == samples_dir / 'package1' / '__init__.py'
        assert i.is_package

    def test_module_importable(self):
        i = Module('module1', samples_dir)
        assert i.path == samples_dir / 'module1.py'
        assert not i.is_package

    def test_missing_name(self):
        with self.assertRaises(ValueError):
            i = Module('doesnt_exist', samples_dir)

    def test_conflicting_modules(self):
        with pytest.raises(ValueError, match="Multiple"):
            Module('module1', samples_dir / 'conflicting_modules')

    def test_get_info_from_module(self):
        info = get_info_from_module(Module('module1', samples_dir))
        self.assertEqual(info, {'summary': 'Example module',
                                'version': '0.1'}
                         )

        info = get_info_from_module(Module('module2', samples_dir))
        self.assertEqual(info, {'summary': 'Docstring formatted like this.',
                                'version': '7.0'}
                         )

        pkg1 = Module('package1', samples_dir)
        info = get_info_from_module(pkg1)
        self.assertEqual(info, {'summary': 'A sample package',
                                'version': '0.1'}
                         )
        info = get_info_from_module(pkg1, for_fields=['version'])
        self.assertEqual(info, {'version': '0.1'})
        info = get_info_from_module(pkg1, for_fields=['description'])
        self.assertEqual(info, {'summary': 'A sample package'})
        info = get_info_from_module(pkg1, for_fields=[])
        self.assertEqual(info, {})

        info = get_info_from_module(Module('moduleunimportable', samples_dir))
        self.assertEqual(info, {'summary': 'A sample unimportable module',
                                'version': '0.1'}
                         )

        info = get_info_from_module(Module('moduleunimportabledouble', samples_dir))
        self.assertEqual(info, {'summary': 'A sample unimportable module with double assignment',
                                'version': '0.1'}
                         )

        info = get_info_from_module(Module('module1', samples_dir / 'annotated_version'))
        self.assertEqual(info, {'summary': 'This module has a __version__ that has a type annotation',
                                'version': '0.1'}
                         )

        info = get_info_from_module(Module('module1', samples_dir / 'constructed_version'))
        self.assertEqual(info, {'summary': 'This module has a __version__ that requires runtime interpretation',
                                'version': '1.2.3'}
                         )

        info = get_info_from_module(Module('package1', samples_dir / 'imported_version'))
        self.assertEqual(info, {'summary': 'This module has a __version__ that requires a relative import',
                                'version': '0.5.8'}
                         )

        with self.assertRaises(InvalidVersion):
            get_info_from_module(Module('invalid_version1', samples_dir))

    def test_version_raise(self):
        with pytest.raises(InvalidVersion):
            check_version('a.1.0.beta0')

        with pytest.raises(InvalidVersion):
            check_version('3!')

        with pytest.raises(InvalidVersion):
            check_version((1, 2))

        with pytest.raises(NoVersionError):
            check_version(None)

        assert check_version('4.1.0beta1') == '4.1.0b1'
        assert check_version('v1.2') == '1.2'

def test_normalize_file_permissions():
    assert normalize_file_permissions(0o100664) == 0o100644 # regular file
    assert normalize_file_permissions(0o40775) == 0o40755   # directory

@pytest.mark.parametrize(
    ("requires_python", "expected_result"),
    [
        ("", True),
        (">2.7", True),
        ("3", False),
        (">= 3.7", False),
        ("<4, > 3.2", False),
        (">3.4", False),
        (">=2.7, !=3.0.*, !=3.1.*, !=3.2.*", True),
        ("== 3.9", False),
        ("~=2.7", True),
        ("~=3.9", False),
    ],
)
def test_supports_py2(requires_python, expected_result):
    metadata = object.__new__(Metadata)
    metadata.requires_python = requires_python
    result = metadata.supports_py2
    assert result == expected_result

def test_make_metadata():
    project_dir = samples_dir / 'pep621_nodynamic'
    ini_info = config.read_flit_config(project_dir / 'pyproject.toml')
    module = Module(ini_info.module, project_dir)
    print(module.file)
    md = make_metadata(module, ini_info)
    assert md.version == '0.3'
    assert md.summary == "Statically specified description"

def test_metadata_multiline(tmp_path):
    d = {
        'name': 'foo',
        'version': '1.0',
        # Example from: https://packaging.python.org/specifications/core-metadata/#author
        'author': ('C. Schultz, Universal Features Syndicate\n'
                   'Los Angeles, CA '),
    }
    md = Metadata(d)
    sio = StringIO()
    md.write_metadata_file(sio)
    sio.seek(0)

    msg = email.parser.Parser(policy=email.policy.compat32).parse(sio)
    assert msg['Name'] == d['name']
    assert msg['Version'] == d['version']
    assert [l.lstrip() for l in msg['Author'].splitlines()] == d['author'].splitlines()
    assert not msg.defects

@pytest.mark.parametrize(
    ("requires_dist", "expected_result"),
    [
        ('foo [extra_1, extra.2, extra-3, extra__4, extra..5, extra--6]', 'foo [extra-1, extra-2, extra-3, extra-4, extra-5, extra-6]'),
        ('foo', 'foo'),
        ('foo[bar]', 'foo[bar]'),
        # https://packaging.python.org/en/latest/specifications/core-metadata/#requires-dist-multiple-use
        ('pkginfo', 'pkginfo'),
        ('zope.interface (>3.5.0)', 'zope.interface (>3.5.0)'),
        ("pywin32 >1.0; sys_platform == 'win32'", "pywin32 >1.0; sys_platform == 'win32'"),
    ],
)
def test_metadata_2_3_requires_dist(requires_dist, expected_result):
    d = {
        'name': 'foo',
        'version': '1.0',
        'requires_dist': [requires_dist],
    }
    md = Metadata(d)
    sio = StringIO()
    md.write_metadata_file(sio)
    sio.seek(0)

    msg = email.parser.Parser(policy=email.policy.compat32).parse(sio)
    assert msg['Requires-Dist'] == expected_result
    assert not msg.defects

@pytest.mark.parametrize(
    ("provides_extra", "expected_result"),
    [
        ('foo', 'foo'),
        ('foo__bar..baz', 'foo-bar-baz'),
    ],
)
def test_metadata_2_3_provides_extra(provides_extra, expected_result):
    d = {
        'name': 'foo',
        'version': '1.0',
        'provides_extra': [provides_extra],
    }
    md = Metadata(d)
    sio = StringIO()
    md.write_metadata_file(sio)
    sio.seek(0)

    msg = email.parser.Parser(policy=email.policy.compat32).parse(sio)
    assert msg['Provides-Extra'] == expected_result
    assert not msg.defects

@pytest.mark.parametrize(
    ('value', 'expected_license', 'expected_license_expression'),
    [
        ({'license': 'MIT'}, 'MIT', None),
        ({'license': 'MIT OR Apache-2.0'}, 'MIT OR Apache-2.0', None),
        ({'license': 'MIT AND Apache-2.0'}, 'MIT AND Apache-2.0', None),
        ({'license_expression': 'MIT'}, None, 'MIT'),
        ({'license_expression': 'Apache-2.0'}, None, 'Apache-2.0'),
        ({'license_expression': 'MIT OR Apache-2.0'}, None, 'MIT OR Apache-2.0'),
        ({'license_expression': 'MIT AND Apache-2.0'}, None, 'MIT AND Apache-2.0'),
    ],
)
def test_metadata_license(value, expected_license, expected_license_expression):
    d = {
        'name': 'foo',
        'version': '1.0',
        **value,
    }
    md = Metadata(d)
    sio = StringIO()
    md.write_metadata_file(sio)
    sio.seek(0)

    msg = email.parser.Parser(policy=email.policy.compat32).parse(sio)
    assert msg.get('License') == expected_license
    assert msg.get('License-Expression') == expected_license_expression
    assert not msg.defects
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5085573
flit-3.12.0/flit_core/tests_core/test_config.py0000644000000000000000000003714314770461472016503 0ustar00import logging
import re
import sys
from pathlib import Path
import pytest

from flit_core import config

samples_dir = Path(__file__).parent / 'samples'

def test_flatten_entrypoints():
    r = config.flatten_entrypoints({'a': {'b': {'c': 'd'}, 'e': {'f': {'g': 'h'}}, 'i': 'j'}})
    assert r == {'a': {'i': 'j'}, 'a.b': {'c': 'd'}, 'a.e.f': {'g': 'h'}}

def test_load_toml():
    inf = config.read_flit_config(samples_dir / 'module1-pkg.toml')
    assert inf.module == 'module1'
    assert inf.metadata['home_page'] == 'http://github.com/sirrobin/module1'

def test_load_toml_ns():
    inf = config.read_flit_config(samples_dir / 'ns1-pkg' / 'pyproject.toml')
    assert inf.module == 'ns1.pkg'
    assert inf.metadata['home_page'] == 'http://github.com/sirrobin/module1'

def test_load_normalization():
    inf = config.read_flit_config(samples_dir / 'normalization' / 'pyproject.toml')
    assert inf.module == 'my_python_module'
    assert inf.metadata['name'] == 'my-python-module'

def test_load_pep621():
    inf = config.read_flit_config(samples_dir / 'pep621' / 'pyproject.toml')
    assert inf.module == 'module1a'
    assert inf.metadata['name'] == 'module1'
    assert inf.metadata['description_content_type'] == 'text/x-rst'
    # Remove all whitespace from requirements so we don't check exact format:
    assert {r.replace(' ', '') for r in inf.metadata['requires_dist']} == {
        'docutils',
        'requests>=2.18',
        'pytest;extra=="test"',  # from [project.optional-dependencies]
        'mock;extra=="test"and(python_version<\'3.6\')',
    }
    assert inf.metadata['author_email'] == "Sir Röbin "
    assert inf.entrypoints['flit_test_example']['foo'] == 'module1:main'
    assert set(inf.dynamic_metadata) == {'version', 'description'}

def test_load_pep621_nodynamic():
    inf = config.read_flit_config(samples_dir / 'pep621_nodynamic' / 'pyproject.toml')
    assert inf.module == 'module1'
    assert inf.metadata['name'] == 'module1'
    assert inf.metadata['version'] == '0.3'
    assert inf.metadata['summary'] == 'Statically specified description'
    assert set(inf.dynamic_metadata) == set()

    # Filling reqs_by_extra when dependencies were specified but no optional
    # dependencies was a bug.
    assert inf.reqs_by_extra == {'.none':  ['requests >= 2.18', 'docutils']}

def test_misspelled_key():
    with pytest.raises(config.ConfigError) as e_info:
        config.read_flit_config(samples_dir / 'misspelled-key.toml')

    assert 'description-file' in str(e_info.value)

def test_description_file():
    info = config.read_flit_config(samples_dir / 'package1.toml')
    assert info.metadata['description'] == \
        "Sample description for test.\n"
    assert info.metadata['description_content_type'] == 'text/x-rst'

def test_missing_description_file():
    with pytest.raises(config.ConfigError, match=r"Description file .* does not exist"):
        config.read_flit_config(samples_dir / 'missing-description-file.toml')

def test_bad_description_extension(caplog):
    info = config.read_flit_config(samples_dir / 'bad-description-ext.toml')
    assert info.metadata['description_content_type'] is None
    assert any((r.levelno == logging.WARN and "Unknown extension" in r.msg)
                for r in caplog.records)

def test_extras():
    info = config.read_flit_config(samples_dir / 'extras.toml')
    requires_dist = set(info.metadata['requires_dist'])
    assert requires_dist == {
        'toml',
        'pytest ; extra == "test"',
        'requests ; extra == "cus-tom"',
    }
    assert set(info.metadata['provides_extra']) == {'test', 'cus-tom'}

def test_extras_newstyle():
    # As above, but with new-style [project] table
    info = config.read_flit_config(samples_dir / 'extras-newstyle.toml')
    requires_dist = set(info.metadata['requires_dist'])
    assert requires_dist == {
        'toml',
        'pytest ; extra == "test"',
        'requests ; extra == "cus-tom"',
    }
    assert set(info.metadata['provides_extra']) == {'test', 'cus-tom'}

def test_extras_dev_conflict():
    with pytest.raises(config.ConfigError, match=r'dev-requires'):
        config.read_flit_config(samples_dir / 'extras-dev-conflict.toml')

def test_extras_dev_warning(caplog):
    info = config.read_flit_config(samples_dir / 'requires-dev.toml')
    assert '"dev-requires = ..." is obsolete' in caplog.text
    assert set(info.metadata['requires_dist']) == {'apackage ; extra == "dev"'}

def test_requires_extra_env_marker():
    info = config.read_flit_config(samples_dir / 'requires-extra-envmark.toml')
    assert info.metadata['requires_dist'][0].startswith('pathlib2 ;')

@pytest.mark.parametrize(('erroneous', 'match'), [
    ({'requires-extra': None}, r'Expected a dict for requires-extra field'),
    ({'requires-extra': dict(dev=None)}, r'Expected a dict of lists for requires-extra field'),
    ({'requires-extra': dict(dev=[1])}, r'Expected a string list for requires-extra'),
])
def test_faulty_requires_extra(erroneous, match):
    metadata = {'module': 'mymod', 'author': '', 'author-email': ''}
    with pytest.raises(config.ConfigError, match=match):
        config._prep_metadata(dict(metadata, **erroneous), None)

@pytest.mark.parametrize(('path', 'err_match'), [
    ('../bar', 'out of the directory'),
    ('foo/../../bar', 'out of the directory'),
    ('/home', 'absolute path'),
    ('foo:bar', 'bad character'),
])
def test_bad_include_paths(path, err_match):
    toml_cfg = {'tool': {'flit': {
        'metadata': {'module': 'xyz', 'author': 'nobody'},
        'sdist': {'include': [path]}
    }}}

    with pytest.raises(config.ConfigError, match=err_match):
        config.prep_toml_config(toml_cfg, None)

@pytest.mark.parametrize(('proj_bad', 'err_match'), [
    ({'version': 1}, r'\bstr\b'),
    ({'license': {'fromage': 2}}, '[Uu]nrecognised'),
    ({'license': {'file': 'LICENSE', 'text': 'xyz'}}, 'both'),
    (
        {'license': {'file': '/LICENSE'}},
        re.escape("License file path (/LICENSE) cannot be an absolute path"),
    ),
    (
        {'license': {'file': '../LICENSE'}},
        re.escape("License file path (../LICENSE) cannot contain '..'"),
    ),
    ({'license': {}}, 'required'),
    ({'license': 1}, "license field should be  or , not "),
    # ({'license': "MIT License"}, "Invalid license expression: 'MIT License'"),  # TODO
    (
        {'license': 'MIT', 'classifiers': ['License :: OSI Approved :: MIT License']},
        "License classifiers are deprecated in favor of the license expression",
    ),
    ({'license-files': 1}, r"\blist\b"),
    ({'license-files': ["/LICENSE"]}, r"'/LICENSE'.+must not start with '/'"),
    ({'license-files': ["../LICENSE"]}, r"'../LICENSE'.+must not contain '..'"),
    ({'license-files': ["NOT_FOUND"]}, r"No files found.+'NOT_FOUND'"),
    ({'license-files': ["(LICENSE | LICENCE)"]}, "Pattern contains invalid characters"),
    pytest.param(
        {'license-files': ["**LICENSE"]}, r"'\*\*LICENSE'.+Invalid pattern",
        marks=[pytest.mark.skipif(
            sys.version_info >= (3, 13), reason="Pattern is valid for 3.13+"
        )]
    ),
    pytest.param(
        {'license-files': ["./"]}, r"'./'.+Unacceptable pattern",
        marks=[pytest.mark.skipif(
            sys.version_info < (3, 13), reason="Pattern started to raise ValueError in 3.13"
        )]
    ),
    (
        {'license': {'file': 'LICENSE'}, 'license-files': ["LICENSE"]},
        "license-files cannot be used with a license table",
    ),
    ({'keywords': 'foo'}, 'list'),
    ({'keywords': ['foo', 7]}, 'strings'),
    ({'entry-points': {'foo': 'module1:main'}}, 'entry-point.*tables'),
    ({'entry-points': {'group': {'foo': 7}}}, 'entry-point.*string'),
    ({'entry-points': {'gui_scripts': {'foo': 'a:b'}}}, r'\[project\.gui-scripts\]'),
    ({'scripts': {'foo': 7}}, 'scripts.*string'),
    ({'gui-scripts': {'foo': 7}}, 'gui-scripts.*string'),
    ({'optional-dependencies': {'test': 'requests'}}, 'list.*optional-dep'),
    ({'optional-dependencies': {'test': [7]}}, 'string.*optional-dep'),
    ({'dynamic': ['classifiers']}, 'dynamic'),
    ({'dynamic': ['version']}, r'dynamic.*\[project\]'),
    ({'authors': ['thomas']}, r'author.*\bdict'),
    ({'maintainers': [{'title': 'Dr'}]}, r'maintainer.*title'),
    ({'name': 'mödule1'}, r'not valid'),
    ({'name': 'module1_'}, r'not valid'),
    ({'optional-dependencies': {'x_': []}}, r'not valid'),
    ({'optional-dependencies': {'x_a': [], 'X--a': []}}, r'clash'),
])
def test_bad_pep621_info(proj_bad, err_match):
    proj = {'name': 'module1', 'version': '1.0', 'description': 'x'}
    proj.update(proj_bad)
    with pytest.raises(config.ConfigError, match=err_match):
        config.read_pep621_metadata(proj, samples_dir / 'pep621' / 'pyproject.toml')

@pytest.mark.parametrize(('readme', 'err_match'), [
    ({'file': 'README.rst'}, 'required'),
    ({'file': 'README.rst', 'content-type': 'text/x-python'}, 'content-type'),
    ('/opt/README.rst', 'relative'),
    ({'file': 'README.rst', 'text': '', 'content-type': 'text/x-rst'}, 'both'),
    ({'content-type': 'text/x-rst'}, 'required'),
    ({'file': 'README.rst', 'content-type': 'text/x-rst', 'a': 'b'}, '[Uu]nrecognised'),
    (5, r'readme.*string'),
])
def test_bad_pep621_readme(readme, err_match):
    proj = {
        'name': 'module1', 'version': '1.0', 'description': 'x', 'readme': readme
    }
    with pytest.raises(config.ConfigError, match=err_match):
        config.read_pep621_metadata(proj, samples_dir / 'pep621' / 'pyproject.toml')


@pytest.mark.parametrize(('value', 'license_expression'), [
    # Accept and normalize valid SPDX expressions for 'license = ...'
    ("mit",  "MIT"),
    ("apache-2.0", "Apache-2.0"),
    ("APACHE-2.0+", "Apache-2.0+"),
    ("mit AND (apache-2.0 OR bsd-2-clause)", "MIT AND (Apache-2.0 OR BSD-2-Clause)"),
    ("(mit)", "(MIT)"),
    ("MIT OR Apache-2.0", "MIT OR Apache-2.0"),
    ("MIT AND Apache-2.0", "MIT AND Apache-2.0"),
    ("MIT AND Apache-2.0+ OR 0BSD", "MIT AND Apache-2.0+ OR 0BSD"),
    ("MIT AND (Apache-2.0+ OR (0BSD))", "MIT AND (Apache-2.0+ OR (0BSD))"),
    ("MIT OR(mit)", "MIT OR (MIT)"),
    ("(mit)AND mit", "(MIT) AND MIT"),
    ("MIT OR (MIT OR ( MIT )) AND ((MIT) AND MIT) OR MIT", "MIT OR (MIT OR (MIT)) AND ((MIT) AND MIT) OR MIT"),
    ("LICENSEREF-Public-Domain OR cc0-1.0 OR unlicense", "LicenseRef-Public-Domain OR CC0-1.0 OR Unlicense"),
    ("mit  AND  ( apache-2.0+  OR  mpl-2.0+ )", "MIT AND (Apache-2.0+ OR MPL-2.0+)"),
    # LicenseRef expressions: only the LicenseRef is normalised
    ("LiceNseref-Public-DoMain", "LicenseRef-Public-DoMain"),
])
def test_license_expr(value, license_expression):
    proj = {
        'name': 'module1', 'version': '1.0', 'description': 'x', 'license': value
    }
    info = config.read_pep621_metadata(proj, samples_dir / 'pep621' / 'pyproject.toml')
    assert 'license' not in info.metadata
    assert info.metadata['license_expression'] == license_expression

@pytest.mark.parametrize('invalid_expr', [
    "LicenseRef-foo_bar",
    "LicenseRef-foo~bar",
    "LicenseRef-foo:bar",
    "LicenseRef-foo[bar]",
    "LicenseRef-foo-bar+",
])
def test_license_expr_error_licenseref(invalid_expr: str):
    proj = {
        'name': 'module1', 'version': '1.0', 'description': 'x',
        'license': invalid_expr,
    }
    with pytest.raises(config.ConfigError, match="can only contain"):
        config.read_pep621_metadata(proj, samples_dir / 'pep621' / 'pyproject.toml')


@pytest.mark.parametrize('invalid_expr', [
    # Not a real licence
    "BSD-33-Clause",
    "MIT OR BSD-33-Clause",
    "MIT OR (MIT AND BSD-33-Clause)",
])
def test_license_expr_error_not_recognised(invalid_expr: str):
    proj = {
        'name': 'module1', 'version': '1.0', 'description': 'x',
        'license': invalid_expr,
    }
    with pytest.raises(config.ConfigError, match="recognised"):
        config.read_pep621_metadata(proj, samples_dir / 'pep621' / 'pyproject.toml')


@pytest.mark.parametrize('invalid_expr', [
    # No operator
    "MIT MIT",
    "MIT OR (MIT MIT)",
    # Only operator
    "AND",
    "OR",
    "AND AND AND",
    "OR OR OR",
    "OR AND OR",
    "AND OR OR AND OR OR AND",
    # Too many operators
    "MIT AND AND MIT",
    "MIT OR OR OR MIT",
    "MIT AND OR MIT",
    # Mixed case operator
    "MIT aND MIT",
    "MIT oR MIT",
    "MIT AND MIT oR MIT",
    # Missing operand
    "MIT AND",
    "AND MIT",
    "MIT OR",
    "OR MIT",
    "MIT (AND MIT)",
    "(MIT OR) MIT",
    # Unbalanced brackets
    ")(",
    "(",
    ")",
    "MIT OR ()",
    ") AND MIT",
    "MIT OR (",
    "MIT OR (MIT))",
    # Only brackets
    "()",
    "()()",
    "()(())",
    "(  )",
    "  (  )",
    "(  )  ",
    "  (  )  ",
])
def test_license_expr_error(invalid_expr: str):
    proj = {
        'name': 'module1', 'version': '1.0', 'description': 'x',
        'license': invalid_expr,
    }
    with pytest.raises(config.ConfigError, match="is not a valid"):
        config.read_pep621_metadata(proj, samples_dir / 'pep621' / 'pyproject.toml')


@pytest.mark.parametrize('invalid_expr', [
    "",
    " ",
    "\t",
    "\r",
    "\n",
    "\f",
    " \t \n \r \f ",
])
def test_license_expr_error_empty(invalid_expr: str):
    proj = {
        'name': 'module1', 'version': '1.0', 'description': 'x',
        'license': invalid_expr,
    }
    with pytest.raises(config.ConfigError, match="must not be empty"):
        config.read_pep621_metadata(proj, samples_dir / 'pep621' / 'pyproject.toml')


@pytest.mark.parametrize('invalid_expr', [
    "mit or mit",
    "or",
    "and",
    "MIT and MIT",
    "MIT AND MIT or MIT",
    "MIT AND (MIT or MIT)",
])
def test_license_expr_error_lowercase(invalid_expr: str):
    proj = {
        'name': 'module1', 'version': '1.0', 'description': 'x',
        'license': invalid_expr,
    }
    with pytest.raises(config.ConfigError, match="must be uppercase"):
        config.read_pep621_metadata(proj, samples_dir / 'pep621' / 'pyproject.toml')


@pytest.mark.parametrize('invalid_expr', [
    "WITH",
    "with",
    "WiTh",
    "wiTH",
    "MIT WITH MIT-Exception",
    "(MIT WITH MIT-Exception)",
    "MIT OR MIT WITH MIT-Exception",
    "MIT WITH MIT-Exception OR (MIT AND MIT)",
])
def test_license_expr_error_unsupported_with(invalid_expr: str):
    proj = {
        'name': 'module1', 'version': '1.0', 'description': 'x',
        'license': invalid_expr,
    }
    with pytest.raises(config.ConfigError, match="not yet supported"):
        config.read_pep621_metadata(proj, samples_dir / 'pep621' / 'pyproject.toml')


def test_license_file_defaults_with_old_metadata():
    metadata = {'module': 'mymod', 'author': ''}
    info = config._prep_metadata(metadata, samples_dir / 'pep621_license_files' / 'pyproject.toml')
    assert info.metadata['license_files'] == ["LICENSE"]


@pytest.mark.parametrize(('proj_license_files', 'files'), [
    ({}, ["LICENSE"]),  # Only match default patterns
    ({'license-files': []}, []),
    ({'license-files': ["LICENSE"]}, ["LICENSE"]),
    ({'license-files': ["LICENSE*"]}, ["LICENSE"]),
    ({'license-files': ["LICEN[CS]E*"]}, ["LICENSE"]),
    ({'license-files': ["**/LICENSE*"]}, ["LICENSE", "module/vendor/LICENSE_VENDOR"]),
    ({'license-files': ["module/vendor/LICENSE*"]}, ["module/vendor/LICENSE_VENDOR"]),
    ({'license-files': ["LICENSE", "module/**/LICENSE*"]}, ["LICENSE", "module/vendor/LICENSE_VENDOR"]),
    # Add project.license.file + match default patterns
    ({'license': {'file': 'module/vendor/LICENSE_VENDOR'}}, ["LICENSE", "module/vendor/LICENSE_VENDOR"]),
])
def test_pep621_license_files(proj_license_files, files):
    proj = {'name': 'module1', 'version': '1.0', 'description': 'x'}
    proj.update(proj_license_files)
    info = config.read_pep621_metadata(proj, samples_dir / 'pep621_license_files' / 'pyproject.toml')
    assert info.metadata['license_files'] == files
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5085573
flit-3.12.0/flit_core/tests_core/test_sdist.py0000644000000000000000000000423214770461472016355 0ustar00from io import BytesIO
import os.path as osp
from pathlib import Path
import tarfile
from testpath import assert_isfile

from flit_core import sdist

samples_dir = Path(__file__).parent / 'samples'

def test_make_sdist(tmp_path):
    # Smoke test of making a complete sdist
    builder = sdist.SdistBuilder.from_ini_path(samples_dir / 'package1.toml')
    builder.build(tmp_path)
    assert_isfile(tmp_path / 'package1-0.1.tar.gz')


def test_make_sdist_pep621(tmp_path):
    builder = sdist.SdistBuilder.from_ini_path(samples_dir / 'pep621' / 'pyproject.toml')
    path = builder.build(tmp_path)
    assert path == tmp_path / 'module1-0.1.tar.gz'
    assert_isfile(path)


def test_make_sdist_pep621_nodynamic(tmp_path):
    builder = sdist.SdistBuilder.from_ini_path(
        samples_dir / 'pep621_nodynamic' / 'pyproject.toml'
    )
    path = builder.build(tmp_path)
    assert path == tmp_path / 'module1-0.3.tar.gz'
    assert_isfile(path)


def test_clean_tarinfo():
    with tarfile.open(mode='w', fileobj=BytesIO()) as tf:
        ti = tf.gettarinfo(str(samples_dir / 'module1.py'))
    cleaned = sdist.clean_tarinfo(ti, mtime=42)
    assert cleaned.uid == 0
    assert cleaned.uname == ''
    assert cleaned.mtime == 42


def test_include_exclude():
    builder = sdist.SdistBuilder.from_ini_path(
        samples_dir / 'inclusion' / 'pyproject.toml'
    )
    files = builder.apply_includes_excludes(builder.select_files())

    assert osp.join('doc', 'test.rst') in files
    assert osp.join('doc', 'test.txt') not in files
    assert osp.join('doc', 'subdir', 'test.txt') in files
    assert osp.join('doc', 'subdir', 'subsubdir', 'test.md') not in files


def test_data_dir():
    builder = sdist.SdistBuilder.from_ini_path(
        samples_dir / 'with_data_dir' / 'pyproject.toml'
    )
    files = builder.apply_includes_excludes(builder.select_files())

    assert osp.join('data', 'share', 'man', 'man1', 'foo.1') in files


def test_pep625(tmp_path):
    builder = sdist.SdistBuilder.from_ini_path(
        samples_dir / 'normalization' / 'pyproject.toml'
    )
    path = builder.build(tmp_path)
    assert path == tmp_path / 'my_python_module-0.0.1.tar.gz'
    assert_isfile(path)
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5085573
flit-3.12.0/flit_core/tests_core/test_versionno.py0000644000000000000000000000222214770461472017246 0ustar00import pytest

from flit_core.common import InvalidVersion
from flit_core.versionno import normalise_version

def test_normalise_version():
    nv = normalise_version
    assert nv('4.3.1') == '4.3.1'
    assert nv('1.0b2') == '1.0b2'
    assert nv('2!1.3') == '2!1.3'

    # Prereleases
    assert nv('1.0B2') == '1.0b2'
    assert nv('1.0.b2') == '1.0b2'
    assert nv('1.0beta2') == '1.0b2'
    assert nv('1.01beta002') == '1.1b2'
    assert nv('1.0-preview2') == '1.0rc2'
    assert nv('1.0_c') == '1.0rc0'

    # Post releases
    assert nv('1.0post-2') == '1.0.post2'
    assert nv('1.0post') == '1.0.post0'
    assert nv('1.0-rev3') == '1.0.post3'
    assert nv('1.0-2') == '1.0.post2'

    # Development versions
    assert nv('1.0dev-2') == '1.0.dev2'
    assert nv('1.0dev') == '1.0.dev0'
    assert nv('1.0-dev3') == '1.0.dev3'

    assert nv('1.0+ubuntu-01') == '1.0+ubuntu.1'
    assert nv('v1.3-pre2') == '1.3rc2'
    assert nv(' 1.2.5.6\t') == '1.2.5.6'
    assert nv('1.0-alpha3-post02+ubuntu_xenial_5') == '1.0a3.post2+ubuntu.xenial.5'

    with pytest.raises(InvalidVersion):
        nv('3!')

    with pytest.raises(InvalidVersion):
        nv('abc')
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5085573
flit-3.12.0/flit_core/tests_core/test_wheel.py0000644000000000000000000000403014770461472016327 0ustar00from pathlib import Path
from zipfile import ZipFile

from testpath import assert_isfile

from flit_core.wheel import make_wheel_in, main

samples_dir = Path(__file__).parent / 'samples'

def test_licenses_dir(tmp_path):
    # Smoketest for https://github.com/pypa/flit/issues/399
    info = make_wheel_in(samples_dir / 'inclusion' / 'pyproject.toml', tmp_path)
    assert_isfile(info.file)


def test_source_date_epoch(tmp_path, monkeypatch):
    monkeypatch.setenv('SOURCE_DATE_EPOCH', '1633007882')
    info = make_wheel_in(samples_dir / 'pep621' / 'pyproject.toml', tmp_path)
    assert_isfile(info.file)
    # Minimum value for zip timestamps is 1980-1-1
    with ZipFile(info.file, 'r') as zf:
        assert zf.getinfo('module1a.py').date_time[:3] == (2021, 9, 30)


def test_zero_timestamp(tmp_path, monkeypatch):
    monkeypatch.setenv('SOURCE_DATE_EPOCH', '0')
    info = make_wheel_in(samples_dir / 'pep621' / 'pyproject.toml', tmp_path)
    assert_isfile(info.file)
    # Minimum value for zip timestamps is 1980-1-1
    with ZipFile(info.file, 'r') as zf:
        assert zf.getinfo('module1a.py').date_time == (1980, 1, 1, 0, 0, 0)


def test_main(tmp_path):
    main(['--outdir', str(tmp_path), str(samples_dir / 'pep621')])
    wheels = list(tmp_path.glob('*.whl'))
    assert len(wheels) == 1
    # Minimum value for zip timestamps is 1980-1-1
    with ZipFile(wheels[0], 'r') as zf:
        assert 'module1a.py' in zf.namelist()


def test_data_dir(tmp_path):
    info = make_wheel_in(samples_dir / 'with_data_dir' / 'pyproject.toml', tmp_path)
    assert_isfile(info.file)
    with ZipFile(info.file, 'r') as zf:
        assert 'module1-0.1.data/data/share/man/man1/foo.1' in zf.namelist()


def test_license_files(tmp_path):
    info = make_wheel_in(samples_dir / 'pep621_license_files' / 'pyproject.toml', tmp_path)
    assert_isfile(info.file)
    with ZipFile(info.file, 'r') as zf:
        assert 'module1-0.1.dist-info/licenses/LICENSE' in zf.namelist()
        assert 'module1-0.1.dist-info/licenses/module/vendor/LICENSE_VENDOR' in zf.namelist()
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5085573
flit-3.12.0/flit_core/update-vendored-tomli.sh0000755000000000000000000000105314770461472016211 0ustar00#!/bin/bash
# Update the vendored copy of tomli
set -euo pipefail

version=$1
echo "Bundling tomli version $version"

rm -rf flit_core/vendor/tomli*
pip install --target flit_core/vendor/ "tomli==$version"

# Convert absolute imports to relative (from tomli.foo -> from .foo)
for file in flit_core/vendor/tomli/*.py; do
  sed -i -E 's/((from|import)[[:space:]]+)tomli\./\1\./' "$file"
done

# Delete some files that aren't useful in this context.
# Leave LICENSE & METADATA present.
rm flit_core/vendor/tomli*.dist-info/{INSTALLER,RECORD,REQUESTED,WHEEL}
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5085573
flit-3.12.0/prepare_license_list.py0000644000000000000000000000124514770461472014244 0ustar00# Call with path to SPDX license-list-data repo, cloned from:
#   https://github.com/spdx/license-list-data

import json
import pprint
import sys
from pathlib import Path

list_data_repo = Path(sys.argv[1])
with (list_data_repo / 'json' / 'licenses.json').open('rb') as f:
    licenses_json = json.load(f)

condensed = {
    l['licenseId'].lower(): {'id': l['licenseId']}
    for l in licenses_json['licenses']
    if not l['isDeprecatedLicenseId']
}

with Path('flit_core', 'flit_core', '_spdx_data.py').open('w') as f:
    f.write("# This file is generated from SPDX license data; don't edit it manually.\n\n")

    f.write("licenses = \\\n")
    pprint.pprint(condensed, f)
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5085573
flit-3.12.0/pyproject.toml0000644000000000000000000000174414770461472012417 0ustar00[build-system]
requires = ["flit_core >=3.11,<4"]
build-backend = "flit_core.buildapi"

[project]
name = "flit"
authors = [
    {name = "Thomas Kluyver", email = "thomas@kluyver.me.uk"},
]
dependencies = [
    "flit_core >=3.12.0",
    "requests",
    "docutils",
    "tomli-w",
    "pip",
]
requires-python = ">=3.8"
readme = "README.rst"
license = "BSD-3-Clause"
license-files = ["LICENSE"]
classifiers = ["Intended Audience :: Developers",
    "Programming Language :: Python :: 3",
    "Topic :: Software Development :: Libraries :: Python Modules",
]
dynamic = ['version', 'description']

[project.optional-dependencies]
test = [
	"testpath",
	"responses",
	"pytest>=2.7.3",
	"pytest-cov",
	"tomli",
]
doc = [
	"sphinx",
	"sphinxcontrib_github_alt",
	"pygments-github-lexers",  # TOML highlighting
]

[project.urls]
Documentation = "https://flit.pypa.io"
Source = "https://github.com/pypa/flit"
Changelog = "https://flit.pypa.io/en/stable/history.html"

[project.scripts]
flit = "flit:main"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5085573
flit-3.12.0/tests/__init__.py0000644000000000000000000000000014770461472012736 0ustar00././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5085573
flit-3.12.0/tests/conftest.py0000644000000000000000000000056414770461472013043 0ustar00from pathlib import Path
import pytest
from shutil import copytree

samples_dir = Path(__file__).parent / 'samples'

@pytest.fixture
def copy_sample(tmp_path):
    """Copy a subdirectory from the samples dir to a temp dir"""
    def copy(dirname):
        dst = tmp_path / dirname
        copytree(str(samples_dir / dirname), str(dst))
        return dst

    return copy
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5095572
flit-3.12.0/tests/samples/EG_README.rst0000644000000000000000000000024514770461472014346 0ustar00This is an example long description for tests to load.

This file is `valid reStructuredText
`_.
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5095572
flit-3.12.0/tests/samples/altdistname/package1/__init__.py0000644000000000000000000000012414770461472020372 0ustar00"""A sample package"""

__version__ = '0.1'

def main():
    print("package1 main")
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5095572
flit-3.12.0/tests/samples/altdistname/package1/data_dir/foo.sh0000644000000000000000000000004314770461472021147 0ustar00#!/bin/sh
echo "Example data file"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5095572
flit-3.12.0/tests/samples/altdistname/package1/foo.py0000644000000000000000000000000614770461472017415 0ustar00a = 1
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5095572
flit-3.12.0/tests/samples/altdistname/package1/subpkg/__init__.py0000644000000000000000000000000014770461472021656 0ustar00././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5095572
flit-3.12.0/tests/samples/altdistname/package1/subpkg/sp_data_dir/test.json0000644000000000000000000000002214770461472023674 0ustar00{"example": true}
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5095572
flit-3.12.0/tests/samples/altdistname/package1/subpkg2/__init__.py0000644000000000000000000000000014770461472021740 0ustar00././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5095572
flit-3.12.0/tests/samples/altdistname/pyproject.toml0000644000000000000000000000040314770461472017521 0ustar00[build-system]
requires = ["flit_core >=2,<4"]
build-backend = "flit_core.buildapi"

[tool.flit.metadata]
module = "package1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/package1"
dist-name = "package-Dist1"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5095572
flit-3.12.0/tests/samples/bad-description-ext.toml0000644000000000000000000000033514770461472017046 0ustar00[build-system]
requires = ["flit"]

[tool.flit.metadata]
module = "module1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/module1"
description-file = "module1.py"  # WRONG
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5095572
flit-3.12.0/tests/samples/entrypoints_conflict/console_entry_points.txt0000644000000000000000000000003614770461472023601 0ustar00[console_scripts]
foo=bar:baz
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5105574
flit-3.12.0/tests/samples/entrypoints_conflict/package1/__init__.py0000644000000000000000000000012414770461472022344 0ustar00"""A sample package"""

__version__ = '0.1'

def main():
    print("package1 main")
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5105574
flit-3.12.0/tests/samples/entrypoints_conflict/package1/data_dir/foo.sh0000644000000000000000000000004314770461472023121 0ustar00#!/bin/sh
echo "Example data file"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5105574
flit-3.12.0/tests/samples/entrypoints_conflict/package1/foo.py0000644000000000000000000000000614770461472021367 0ustar00a = 1
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5105574
flit-3.12.0/tests/samples/entrypoints_conflict/package1/subpkg/__init__.py0000644000000000000000000000000014770461472023630 0ustar00././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5105574
flit-3.12.0/tests/samples/entrypoints_conflict/package1/subpkg/sp_data_dir/test.json0000644000000000000000000000002214770461472025646 0ustar00{"example": true}
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5105574
flit-3.12.0/tests/samples/entrypoints_conflict/package1/subpkg2/__init__.py0000644000000000000000000000000014770461472023712 0ustar00././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5105574
flit-3.12.0/tests/samples/entrypoints_conflict/pyproject.toml0000644000000000000000000000056014770461472021477 0ustar00[build-system]
requires = ["flit_core >=2,<4"]
build-backend = "flit_core.buildapi"

[tool.flit.metadata]
module = "package1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/package1"

# The sections below conflict
[tool.flit.scripts]
pkg_script = "package1:main"

[tool.flit.entrypoints.console_scripts]
foo = "bar:baz"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5105574
flit-3.12.0/tests/samples/entrypoints_valid/package1/__init__.py0000644000000000000000000000012414770461472021642 0ustar00"""A sample package"""

__version__ = '0.1'

def main():
    print("package1 main")
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5105574
flit-3.12.0/tests/samples/entrypoints_valid/package1/data_dir/foo.sh0000644000000000000000000000004314770461472022417 0ustar00#!/bin/sh
echo "Example data file"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5105574
flit-3.12.0/tests/samples/entrypoints_valid/package1/foo.py0000644000000000000000000000000614770461472020665 0ustar00a = 1
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5105574
flit-3.12.0/tests/samples/entrypoints_valid/package1/subpkg/__init__.py0000644000000000000000000000000014770461472023126 0ustar00././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5105574
flit-3.12.0/tests/samples/entrypoints_valid/package1/subpkg/sp_data_dir/test.json0000644000000000000000000000002214770461472025144 0ustar00{"example": true}
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5115573
flit-3.12.0/tests/samples/entrypoints_valid/package1/subpkg2/__init__.py0000644000000000000000000000000014770461472023210 0ustar00././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5115573
flit-3.12.0/tests/samples/entrypoints_valid/pyproject.toml0000644000000000000000000000052714770461472021000 0ustar00[build-system]
requires = ["flit_core >=2,<4"]
build-backend = "flit_core.buildapi"

[tool.flit.metadata]
module = "package1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/package1"

[tool.flit.scripts]
pkg_script = "package1:main"

[tool.flit.entrypoints.myplugins]
package1 = "package1:main"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5115573
flit-3.12.0/tests/samples/extras-dev-conflict.toml0000644000000000000000000000046114770461472017062 0ustar00[build-system]
requires = ["flit"]

[tool.flit.metadata]
module = "module1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/module1"
description-file = "EG_README.rst"
dev-requires = ["apackage"]

[tool.flit.metadata.requires-extra]
dev = ["anotherpackage"]
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5115573
flit-3.12.0/tests/samples/extras/module1.py0000644000000000000000000000005214770461472015526 0ustar00"""Example module"""

__version__ = '0.1'
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5115573
flit-3.12.0/tests/samples/extras/pyproject.toml0000644000000000000000000000042514770461472016526 0ustar00[build-system]
requires = ["flit"]

[tool.flit.metadata]
module = "module1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/module1"
requires = ["toml"]

[tool.flit.metadata.requires-extra]
test = ["pytest"]
custom = ["requests"]
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5115573
flit-3.12.0/tests/samples/invalid_classifier.toml0000644000000000000000000000056214770461472017035 0ustar00[build-system]
requires = ["flit_core >=2,<4"]
build-backend = "flit_core.buildapi"

[tool.flit.metadata]
module = "package1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
description-file = "my-description.rst"
home-page = "http://github.com/sirrobin/package1"
classifiers = [
    "License :: OSI Approved :: BSD License",
    "Intended Audience :: Pacman",
]
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5115573
flit-3.12.0/tests/samples/invalid_version1.py0000644000000000000000000000014014770461472016124 0ustar00"""Sample module with invalid __version__ string"""

__version__ = "not starting with a number"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5115573
flit-3.12.0/tests/samples/missing-description-file.toml0000644000000000000000000000037614770461472020115 0ustar00[build-system]
requires = ["flit"]

[tool.flit.metadata]
module = "missingdescriptionfile"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/missingdescriptionfile"
description-file = "definitely-missing.rst"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5115573
flit-3.12.0/tests/samples/module1.py0000644000000000000000000000005214770461472014220 0ustar00"""Example module"""

__version__ = '0.1'
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5115573
flit-3.12.0/tests/samples/module1_ini/flit.ini0000644000000000000000000000016614770461472016152 0ustar00[metadata]
module=module1
author=Sir Robin
author-email=robin@camelot.uk
home-page=http://github.com/sirrobin/module1
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5115573
flit-3.12.0/tests/samples/module1_ini/module1.py0000644000000000000000000000005214770461472016425 0ustar00"""Example module"""

__version__ = '0.1'
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5115573
flit-3.12.0/tests/samples/module1_toml/EG_README.rst0000644000000000000000000000024514770461472016747 0ustar00This is an example long description for tests to load.

This file is `valid reStructuredText
`_.
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5115573
flit-3.12.0/tests/samples/module1_toml/module1.py0000644000000000000000000000005214770461472016621 0ustar00"""Example module"""

__version__ = '0.1'
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5115573
flit-3.12.0/tests/samples/module1_toml/pyproject.toml0000644000000000000000000000044014770461472017616 0ustar00[build-system]
requires = ["flit"]

[tool.flit.metadata]
module = "module1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/module1"
description-file = "EG_README.rst"

[tool.flit.metadata.urls]
Documentation = "https://example.com/module1"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5115573
flit-3.12.0/tests/samples/module2.py0000644000000000000000000000007414770461472014225 0ustar00"""
Docstring formatted like this.
"""

__version__ = '7.0'
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5115573
flit-3.12.0/tests/samples/module3/LICENSE0000644000000000000000000000005414770461472014657 0ustar00Dummy license - check that it gets packaged
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5115573
flit-3.12.0/tests/samples/module3/pyproject.toml0000644000000000000000000000034514770461472016571 0ustar00[build-system]
requires = ["flit_core >=2,<4"]
build-backend = "flit_core.buildapi"

[tool.flit.metadata]
module = "module3"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/module3"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5125573
flit-3.12.0/tests/samples/module3/src/module3.py0000644000000000000000000000005214770461472016361 0ustar00"""Example module"""

__version__ = '0.1'
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5125573
flit-3.12.0/tests/samples/moduleunimportable.py0000644000000000000000000000012014770461472016555 0ustar00
"""
A sample unimportable module
"""

raise ImportError()

__version__ = "0.1"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5125573
flit-3.12.0/tests/samples/modulewithconstructedversion.py0000644000000000000000000000016314770461472020722 0ustar00
"""This module has a __version__ that requires runtime interpretation"""

__version__ = ".".join(["1", "2", "3"])
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5125573
flit-3.12.0/tests/samples/modulewithlocalversion/modulewithlocalversion.py0000644000000000000000000000011714770461472024260 0ustar00"""
A module with a local version specifier
"""

__version__ = "0.1.dev0+test"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5125573
flit-3.12.0/tests/samples/modulewithlocalversion/pyproject.toml0000644000000000000000000000040314770461472022016 0ustar00[build-system]
requires = ["flit_core >=2,<4"]
build-backend = "flit_core.buildapi"

[tool.flit.metadata]
module = "modulewithlocalversion"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/modulewithlocalversion"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5125573
flit-3.12.0/tests/samples/my-description.rst0000644000000000000000000000003514770461472016001 0ustar00Sample description for test.
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5125573
flit-3.12.0/tests/samples/no_docstring-pkg.toml0000644000000000000000000000045714770461472016455 0ustar00[build-system]
requires = ["flit"]

[tool.flit.metadata]
module = "no_docstring"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/no_docstring"
description-file = "EG_README.rst"

[tool.flit.metadata.urls]
Documentation = "https://example.com/no_docstring"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5125573
flit-3.12.0/tests/samples/no_docstring.py0000644000000000000000000000002414770461472015341 0ustar00__version__ = '7.0'
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5125573
flit-3.12.0/tests/samples/ns1-pkg-mod/ns1/module.py0000644000000000000000000000012114770461472016672 0ustar00"""An example single file module in a namespace package
"""

__version__ = '0.1'
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5125573
flit-3.12.0/tests/samples/ns1-pkg-mod/pyproject.toml0000644000000000000000000000023214770461472017251 0ustar00[build-system]
requires = ["flit_core >=3.5,<4"]
build-backend = "flit_core.buildapi"

[project]
name = "ns1.module"
dynamic = ["version", "description"]
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5125573
flit-3.12.0/tests/samples/ns1-pkg/EG_README.rst0000644000000000000000000000024514770461472015626 0ustar00This is an example long description for tests to load.

This file is `valid reStructuredText
`_.
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5125573
flit-3.12.0/tests/samples/ns1-pkg/ns1/pkg/__init__.py0000644000000000000000000000011314770461472017151 0ustar00"""
==================
ns1.pkg
==================
"""

__version__ = '0.1'
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5125573
flit-3.12.0/tests/samples/ns1-pkg/pyproject.toml0000644000000000000000000000041214770461472016474 0ustar00[build-system]
requires = ["flit_core >=3.5,<4"]
build-backend = "flit_core.buildapi"

[tool.flit.metadata]
module = "ns1.pkg"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/module1"
description-file = "EG_README.rst"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5125573
flit-3.12.0/tests/samples/ns1-pkg2/EG_README.rst0000644000000000000000000000024514770461472015710 0ustar00This is an example long description for tests to load.

This file is `valid reStructuredText
`_.
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5135572
flit-3.12.0/tests/samples/ns1-pkg2/ns1/pkg2/__init__.py0000644000000000000000000000011414770461472017316 0ustar00"""
==================
ns1.pkg2
==================
"""

__version__ = '0.1'
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5135572
flit-3.12.0/tests/samples/ns1-pkg2/pyproject.toml0000644000000000000000000000041314770461472016557 0ustar00[build-system]
requires = ["flit_core >=3.5,<4"]
build-backend = "flit_core.buildapi"

[tool.flit.metadata]
module = "ns1.pkg2"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/module1"
description-file = "EG_README.rst"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5135572
flit-3.12.0/tests/samples/package1/my-description.rst0000644000000000000000000000003514770461472017455 0ustar00Sample description for test.
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5135572
flit-3.12.0/tests/samples/package1/package1/__init__.py0000644000000000000000000000012414770461472017541 0ustar00"""A sample package"""

__version__ = '0.1'

def main():
    print("package1 main")
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5135572
flit-3.12.0/tests/samples/package1/package1/data_dir/foo.sh0000644000000000000000000000004314770461472020316 0ustar00#!/bin/sh
echo "Example data file"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5135572
flit-3.12.0/tests/samples/package1/package1/foo.py0000644000000000000000000000000614770461472016564 0ustar00a = 1
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5135572
flit-3.12.0/tests/samples/package1/package1/subpkg/__init__.py0000644000000000000000000000000014770461472021025 0ustar00././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5135572
flit-3.12.0/tests/samples/package1/package1/subpkg/sp_data_dir/test.json0000644000000000000000000000002214770461472023043 0ustar00{"example": true}
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5135572
flit-3.12.0/tests/samples/package1/package1/subpkg2/__init__.py0000644000000000000000000000000014770461472021107 0ustar00././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5135572
flit-3.12.0/tests/samples/package1/pyproject.toml0000644000000000000000000000050114770461472016667 0ustar00[build-system]
requires = ["flit_core >=2,<4"]
build-backend = "flit_core.buildapi"

[tool.flit.metadata]
module = "package1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
description-file = "my-description.rst"
home-page = "http://github.com/sirrobin/package1"

[tool.flit.scripts]
pkg_script = "package1:main"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5135572
flit-3.12.0/tests/samples/package2/package2-pkg.ini0000644000000000000000000000023414770461472016714 0ustar00[metadata]
module=package2
author=Sir Robin
author-email=robin@camelot.uk
home-page=http://github.com/sirrobin/package2

[scripts]
pkg_script=package2:main
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5135572
flit-3.12.0/tests/samples/package2/pyproject.toml0000644000000000000000000000041714770461472016676 0ustar00[build-system]
requires = ["flit_core >=2,<4"]
build-backend = "flit_core.buildapi"

[tool.flit.metadata]
module = "package2"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/package2"

[scripts]
pkg_script = "package2:main"
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5145574
flit-3.12.0/tests/samples/package2/src/package2/__init__.py0000644000000000000000000000012414770461472020332 0ustar00"""A sample package"""

__version__ = '0.1'

def main():
    print("package1 main")
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5145574
flit-3.12.0/tests/samples/package2/src/package2/foo.py0000644000000000000000000000000614770461472017355 0ustar00a = 1
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5145574
flit-3.12.0/tests/samples/packageinsrc/pyproject.toml0000644000000000000000000000034214770461472017650 0ustar00[build-system]
requires = ["flit"]
build-backend = "flit.buildapi"

[tool.flit.metadata]
module = "module1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/module1"
requires = []
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5145574
flit-3.12.0/tests/samples/packageinsrc/src/module1.py0000644000000000000000000000005214770461472017441 0ustar00"""Example module"""

__version__ = '0.1'
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5145574
flit-3.12.0/tests/samples/pep517/module1.py0000644000000000000000000000005214770461472015241 0ustar00"""Example module"""

__version__ = '0.1'
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5145574
flit-3.12.0/tests/samples/pep517/pyproject.toml0000644000000000000000000000041314770461472016236 0ustar00[build-system]
requires = ["flit"]
build-backend = "flit.buildapi"

[tool.flit.metadata]
module = "module1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/module1"
requires = [
    "requests >= 2.18",
    "docutils",
]
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5145574
flit-3.12.0/tests/samples/requires-dev.toml0000644000000000000000000000046714770461472015622 0ustar00[build-system]
requires = ["flit"]

[tool.flit.metadata]
module = "module1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/module1"
description-file = "EG_README.rst"
# This should generate a warning tell you to use requires-extra.dev
dev-requires = ["apackage"]
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5145574
flit-3.12.0/tests/samples/requires-envmark/module1.py0000644000000000000000000000005214770461472017520 0ustar00"""Example module"""

__version__ = '0.1'
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5145574
flit-3.12.0/tests/samples/requires-envmark/pyproject.toml0000644000000000000000000000037414770461472020523 0ustar00[build-system]
requires = ["flit"]

[tool.flit.metadata]
module = "module1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/module1"
requires = [
    "requests",
    "pathlib2; python_version == '2.7'",
]
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5145574
flit-3.12.0/tests/samples/requires-extra-envmark/module1.py0000644000000000000000000000005214770461472020641 0ustar00"""Example module"""

__version__ = '0.1'
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5145574
flit-3.12.0/tests/samples/requires-extra-envmark/pyproject.toml0000644000000000000000000000041014770461472021633 0ustar00[build-system]
requires = ["flit"]

[tool.flit.metadata]
module = "module1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/module1"

[tool.flit.metadata.requires-extra]
test = ["pathlib2; python_version == \"2.7\""]
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5145574
flit-3.12.0/tests/samples/requires-requests.toml0000644000000000000000000000035714770461472016715 0ustar00[build-system]
requires = ["flit"]

[tool.flit.metadata]
module = "module1"
author = "Sir Robin"
author-email = "robin@camelot.uk"
home-page = "http://github.com/sirrobin/module1"
description-file = "EG_README.rst"
requires = ["requests"]
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5145574
flit-3.12.0/tests/samples/with_flit_ini/flit.ini0000644000000000000000000000030414770461472016567 0ustar00[metadata]
module=package1
author=Sir Robin
author-email=robin@camelot.uk
home-page=http://github.com/sirrobin/package1
entry-points-file=some_entry_points.txt

[scripts]
pkg_script=package1:main
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5155573
flit-3.12.0/tests/samples/with_flit_ini/package1/__init__.py0000644000000000000000000000012414770461472020715 0ustar00"""A sample package"""

__version__ = '0.1'

def main():
    print("package1 main")
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5155573
flit-3.12.0/tests/samples/with_flit_ini/package1/foo.py0000644000000000000000000000000614770461472017740 0ustar00a = 1
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5155573
flit-3.12.0/tests/samples/with_flit_ini/package1/subpkg/__init__.py0000644000000000000000000000000014770461472022201 0ustar00././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5155573
flit-3.12.0/tests/samples/with_flit_ini/some_entry_points.txt0000644000000000000000000000004314770461472021451 0ustar00[myplugins]
package1=package1:main
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5155573
flit-3.12.0/tests/test_build.py0000644000000000000000000000556414770461472013361 0ustar00from pathlib import Path
import pytest
import shutil
import sys
from tempfile import TemporaryDirectory
from testpath import assert_isdir, MockCommand

from flit_core import common
from flit import build

samples_dir = Path(__file__).parent / 'samples'

LIST_FILES_TEMPLATE = """\
#!{python}
import sys
from os.path import join
if '--deleted' not in sys.argv:
    files = ['pyproject.toml', '{module}', 'EG_README.rst']
    print('\\0'.join(files), end='\\0')
"""

def test_build_main(copy_sample):
    td = copy_sample('module1_toml')
    (td / '.git').mkdir()   # Fake a git repo

    with MockCommand('git', LIST_FILES_TEMPLATE.format(
            python=sys.executable, module='module1.py')):
        res = build.main(td / 'pyproject.toml')
    assert res.wheel.file.suffix == '.whl'
    assert res.sdist.file.name.endswith('.tar.gz')

    assert_isdir(td / 'dist')

def test_build_sdist_only(copy_sample):
    td = copy_sample('module1_toml')
    (td / '.git').mkdir()  # Fake a git repo

    with MockCommand('git', LIST_FILES_TEMPLATE.format(
            python=sys.executable, module='module1.py')):
        res = build.main(td / 'pyproject.toml', formats={'sdist'})
    assert res.wheel is None

    # Compare str path to work around pathlib/pathlib2 mismatch on Py 3.5
    assert [str(p) for p in (td / 'dist').iterdir()] == [str(res.sdist.file)]

def test_build_wheel_only(copy_sample):
    td = copy_sample('module1_toml')
    (td / '.git').mkdir()  # Fake a git repo

    with MockCommand('git', LIST_FILES_TEMPLATE.format(
            python=sys.executable, module='module1.py')):
        res = build.main(td / 'pyproject.toml', formats={'wheel'})
    assert res.sdist is None

    # Compare str path to work around pathlib/pathlib2 mismatch on Py 3.5
    assert [str(p) for p in (td / 'dist').iterdir()] == [str(res.wheel.file)]

def test_build_ns_main(copy_sample):
    td = copy_sample('ns1-pkg')
    (td / '.git').mkdir()   # Fake a git repo

    with MockCommand('git', LIST_FILES_TEMPLATE.format(
            python=sys.executable, module='ns1/pkg/__init__.py')):
        res = build.main(td / 'pyproject.toml')
    assert res.wheel.file.suffix == '.whl'
    assert res.sdist.file.name.endswith('.tar.gz')

    assert_isdir(td / 'dist')


def test_build_module_no_docstring():
    with TemporaryDirectory() as td:
        pyproject = Path(td, 'pyproject.toml')
        shutil.copy(str(samples_dir / 'no_docstring-pkg.toml'), str(pyproject))
        shutil.copy(str(samples_dir / 'no_docstring.py'), td)
        shutil.copy(str(samples_dir / 'EG_README.rst'), td)
        Path(td, '.git').mkdir()   # Fake a git repo


        with MockCommand('git', LIST_FILES_TEMPLATE.format(
                python=sys.executable, module='no_docstring.py')):
            with pytest.raises(common.NoDocstringError) as exc_info:
                build.main(pyproject)
            assert 'no_docstring.py' in str(exc_info.value)
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5155573
flit-3.12.0/tests/test_command.py0000644000000000000000000000071414770461472013670 0ustar00from subprocess import Popen, PIPE, STDOUT
import sys

def test_flit_help():
    p = Popen([sys.executable, '-m', 'flit', '--help'], stdout=PIPE, stderr=STDOUT)
    out, _ = p.communicate()
    assert 'Build wheel' in out.decode('utf-8', 'replace')

def test_flit_usage():
    p = Popen([sys.executable, '-m', 'flit'], stdout=PIPE, stderr=STDOUT)
    out, _ = p.communicate()
    assert 'Build wheel' in out.decode('utf-8', 'replace')
    assert p.poll() == 1
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5155573
flit-3.12.0/tests/test_config.py0000644000000000000000000000042614770461472013517 0ustar00from pathlib import Path
import pytest

from flit.config import read_flit_config, ConfigError

samples_dir = Path(__file__).parent / 'samples'

def test_invalid_classifier():
    with pytest.raises(ConfigError):
        read_flit_config(samples_dir / 'invalid_classifier.toml')
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5155573
flit-3.12.0/tests/test_find_python_executable.py0000644000000000000000000000234714770461472017000 0ustar00from os.path import isabs, basename, dirname
import os
import re
import sys
import venv

import pytest

from flit import PythonNotFoundError, find_python_executable


def test_default():
    assert find_python_executable(None) == sys.executable


def test_self():
    assert find_python_executable(sys.executable) == sys.executable


def test_abs():
    abs_path = "C:\\PythonXY\\python.exe" if os.name == 'nt' else '/usr/bin/python'
    assert find_python_executable(abs_path) == abs_path


def test_find_in_path():
    assert isabs(find_python_executable("python"))


def test_env(tmp_path):
    path = tmp_path / "venv"
    venv.create(path)

    executable = find_python_executable(path)
    assert basename(dirname(dirname(executable))) == "venv"


def test_env_abs(tmp_path, monkeypatch):
    path = tmp_path / "venv"
    venv.create(path)

    monkeypatch.chdir(tmp_path)
    assert isabs(find_python_executable("venv"))


@pytest.mark.parametrize("bad_python_name", ["pyhton", "ls", "."])
def test_exception(bad_python_name: str):
    """Test that an appropriate exception (that contains the error string) is raised."""
    with pytest.raises(PythonNotFoundError, match=re.escape(bad_python_name)):
        find_python_executable(bad_python_name)
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5155573
flit-3.12.0/tests/test_init.py0000644000000000000000000002032114770461472013211 0ustar00import builtins
from contextlib import contextmanager
from pathlib import Path
from tempfile import TemporaryDirectory
from testpath import assert_isfile
from unittest.mock import patch
import pytest

try:
    import tomllib
except ImportError:
    import tomli as tomllib

from flit import init


@contextmanager
def patch_data_dir():
    with TemporaryDirectory() as td:
        with patch.object(init, 'get_data_dir', lambda: Path(td)):
            yield td

def test_store_defaults():
    with patch_data_dir():
        assert init.get_defaults() == {}
        d = {'author': 'Test'}
        init.store_defaults(d)
        assert init.get_defaults() == d

def fake_input(entries):
    it = iter(entries)
    def inner(prompt):
        try:
            return next(it)
        except StopIteration:
            raise EOFError

    return inner

def faking_input(entries):
    return patch.object(builtins, 'input', fake_input(entries))

def test_prompt_options():
    ti = init.TerminalIniter()
    with faking_input(['4', '1']):
        res = ti.prompt_options('Pick one', [('A', 'Apple'), ('B', 'Banana')])
    assert res == 'A'

    # Test with a default
    with faking_input(['']):
        res = ti.prompt_options('Pick one', [('A', 'Apple'), ('B', 'Banana')],
                                default='B')
    assert res == 'B'

@contextmanager
def make_dir(files=(), dirs=()):
    with TemporaryDirectory() as td:
        tdp = Path(td)
        for d in dirs:
            (tdp / d).mkdir()
        for f in files:
            (tdp / f).touch()
        yield td

def test_guess_module_name():
    with make_dir(['foo.py', 'foo-bar.py', 'test_foo.py', 'setup.py']) as td:
        ib = init.IniterBase(td)
        assert ib.guess_module_name() == 'foo'

    with make_dir(['baz/__init__.py', 'tests/__init__.py'], ['baz', 'tests']) as td:
        ib = init.IniterBase(td)
        assert ib.guess_module_name() == 'baz'

    with make_dir(['src/foo.py', 'src/foo-bar.py', 'test_foo.py', 'setup.py'],
                  ['src',]) as td:
        ib = init.IniterBase(td)
        assert ib.guess_module_name() == 'foo'

    with make_dir(['src/baz/__init__.py', 'tests/__init__.py'], ['src', 'src/baz', 'tests']) as td:
        ib = init.IniterBase(td)
        assert ib.guess_module_name() == 'baz'

    with make_dir(['foo.py', 'bar.py']) as td:
        ib = init.IniterBase(td)
        assert ib.guess_module_name() is None

    with make_dir(['src/foo.py', 'src/bar.py'], ['src']) as td:
        ib = init.IniterBase(td)
        assert ib.guess_module_name() is None

def test_write_license():
    with TemporaryDirectory() as td:
        ib = init.IniterBase(td)
        ib.write_license('mit', 'Thomas Kluyver')
        assert_isfile(Path(td, 'LICENSE'))

def test_init():
    responses = ['foo', # Module name
                 'Test Author',      # Author
                 'test@example.com',  # Author email
                 'http://example.com/', # Home page
                 '1'    # License (1 -> MIT)
                ]
    with TemporaryDirectory() as td, \
          patch_data_dir(), \
          faking_input(responses):
        ti = init.TerminalIniter(td)
        ti.initialise()

        generated = Path(td) / 'pyproject.toml'
        assert_isfile(generated)
        with generated.open('rb') as f:
            data = tomllib.load(f)
        assert data['project']['authors'][0]['email'] == "test@example.com"
        license = Path(td) / 'LICENSE'
        assert data['project']['license'] == 'MIT'
        assert data['project']['license-files'] == ['LICENSE']
        assert_isfile(license)
        with license.open() as f:
            license_text = f.read()
        assert license_text.startswith("The MIT License (MIT)")
        assert "{year}" not in license_text
        assert "Test Author" in license_text

def test_init_homepage_and_license_are_optional():
    responses = ['test_module_name',
                 'Test Author',
                 'test_email@example.com',
                 '',   # Home page omitted
                 '4',  # Skip - choose a license later
                ]
    with TemporaryDirectory() as td, \
          patch_data_dir(), \
          faking_input(responses):
        ti = init.TerminalIniter(td)
        ti.initialise()
        with Path(td, 'pyproject.toml').open('rb') as f:
            data = tomllib.load(f)
        assert not Path(td, 'LICENSE').exists()
    assert data['project'] == {
        'authors': [{'name': 'Test Author', 'email': 'test_email@example.com'}],
        'name': 'test_module_name',
        'dynamic': ['version', 'description'],
    }

def test_init_homepage_validator():
    responses = ['test_module_name',
                 'Test Author',
                 'test_email@example.com',
                 'www.uh-oh-spagghetti-o.com',  # fails validation
                 'https://www.example.org',  # passes
                 '4',  # Skip - choose a license later
                ]
    with TemporaryDirectory() as td, \
          patch_data_dir(), \
          faking_input(responses):
        ti = init.TerminalIniter(td)
        ti.initialise()
        with Path(td, 'pyproject.toml').open('rb') as f:
            data = tomllib.load(f)
    assert data['project'] == {
        'authors': [{'name': 'Test Author', 'email': 'test_email@example.com'}],
        'name': 'test_module_name',
        'urls': {'Home': 'https://www.example.org'},
        'dynamic': ['version', 'description'],
    }

def test_author_email_field_is_optional():
    responses = ['test_module_name',
                 'Test Author',
                 '',  # Author-email field is skipped
                 'https://www.example.org',
                 '4',
                ]
    with TemporaryDirectory() as td, \
          patch_data_dir(), \
          faking_input(responses):
        ti = init.TerminalIniter(td)
        ti.initialise()
        with Path(td, 'pyproject.toml').open('rb') as f:
            data = tomllib.load(f)
        assert not Path(td, 'LICENSE').exists()

    assert data['project'] == {
        'authors': [{'name': 'Test Author'}],
        'name': 'test_module_name',
        'urls': {'Home': 'https://www.example.org'},
        'dynamic': ['version', 'description'],
    }


@pytest.mark.parametrize(
    "readme_file",
    ["readme.md", "README.MD", "README.md",
     "Readme.md", "readme.MD", "readme.rst",
     "readme.txt"])
def test_find_readme(readme_file):
    with make_dir([readme_file]) as td:
        ib = init.IniterBase(td)
        assert ib.find_readme() == readme_file


def test_find_readme_not_found():
    with make_dir() as td:
        ib = init.IniterBase(td)
        assert ib.find_readme() is None


def test_init_readme_found_yes_choosen():
    responses = ['test_module_name',
                 'Test Author',
                 'test_email@example.com',
                 '',   # Home page omitted
                 '4',  # Skip - choose a license later
                ]
    with make_dir(["readme.md"]) as td, \
          patch_data_dir(), \
          faking_input(responses):
        ti = init.TerminalIniter(td)
        ti.initialise()
        with Path(td, 'pyproject.toml').open('rb') as f:
            data = tomllib.load(f)

    assert data['project'] == {
        'authors': [{'name': 'Test Author', 'email': 'test_email@example.com'}],
        'name': 'test_module_name',
        'readme': 'readme.md',
        'dynamic': ['version', 'description'],
    }


def test_init_non_ascii_author_name():
    responses = ['foo', # Module name
                 'Test Authôr',      # Author
                 '',  # Author email omitted
                 '', # Home page omitted
                 '1'    # License (1 -> MIT)
                ]
    with TemporaryDirectory() as td, \
          patch_data_dir(), \
          faking_input(responses):
        ti = init.TerminalIniter(td)
        ti.initialise()

        generated = Path(td) / 'pyproject.toml'
        assert_isfile(generated)
        with generated.open('r', encoding='utf-8') as f:
            raw_text = f.read()
            print(raw_text)
            assert "Test Authôr" in raw_text
            assert "\\u00f4" not in raw_text
        license = Path(td) / 'LICENSE'
        assert_isfile(license)
        with license.open(encoding='utf-8') as f:
            license_text = f.read()
        assert "Test Authôr" in license_text
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5155573
flit-3.12.0/tests/test_install.py0000644000000000000000000003704614770461472013730 0ustar00import json
import os
import pathlib
import sys
import tempfile
from unittest import TestCase, SkipTest, skipIf
from unittest.mock import patch

import pytest
from testpath import (
    assert_isfile, assert_isdir, assert_islink, assert_not_path_exists, MockCommand
)

from flit import install
from flit.install import Installer, _requires_dist_to_pip_requirement, DependencyError

tests_dir = pathlib.Path(__file__).parent
samples_dir = tests_dir / 'samples'
core_samples_dir = tests_dir.parent / 'flit_core' / 'tests_core' / 'samples'

class InstallTests(TestCase):
    def setUp(self):
        td = tempfile.TemporaryDirectory()
        self.addCleanup(td.cleanup)
        self.get_dirs_patch = patch('flit.install.get_dirs',
                return_value={
                    'scripts': os.path.join(td.name, 'scripts'),
                    'purelib': os.path.join(td.name, 'site-packages'),
                    'data': os.path.join(td.name, 'data'),
                })
        self.get_dirs_patch.start()
        self.tmpdir = pathlib.Path(td.name)

    def tearDown(self):
        self.get_dirs_patch.stop()

    def _assert_direct_url(self, directory, package, version, expected_editable):
        direct_url_file = (
            self.tmpdir
            / 'site-packages'
            / '{}-{}.dist-info'.format(package, version)
            / 'direct_url.json'
        )
        assert_isfile(direct_url_file)
        with direct_url_file.open() as f:
            direct_url = json.load(f)
            assert direct_url['url'].startswith('file:///')
            assert direct_url['url'] == directory.as_uri()
            assert direct_url['dir_info'].get('editable') is expected_editable

    def test_install_module(self):
        Installer.from_ini_path(samples_dir / 'module1_toml' / 'pyproject.toml').install_directly()
        assert_isfile(self.tmpdir / 'site-packages' / 'module1.py')
        assert_isdir(self.tmpdir / 'site-packages' / 'module1-0.1.dist-info')
        self._assert_direct_url(
            samples_dir / 'module1_toml', 'module1', '0.1', expected_editable=False
        )

    @skipIf(not core_samples_dir.is_dir(), "Missing flit_core samples")
    def test_install_module_pep621(self):
        Installer.from_ini_path(
            core_samples_dir / 'pep621_nodynamic' / 'pyproject.toml',
        ).install_directly()
        assert_isfile(self.tmpdir / 'site-packages' / 'module1.py')
        assert_isdir(self.tmpdir / 'site-packages' / 'module1-0.3.dist-info')
        self._assert_direct_url(
            core_samples_dir / 'pep621_nodynamic', 'module1', '0.3',
            expected_editable=False
        )

    def test_install_package(self):
        oldcwd = os.getcwd()
        os.chdir(str(samples_dir / 'package1'))
        try:
            Installer.from_ini_path(pathlib.Path('pyproject.toml')).install_directly()
        finally:
            os.chdir(oldcwd)
        assert_isdir(self.tmpdir / 'site-packages' / 'package1')
        assert_isdir(self.tmpdir / 'site-packages' / 'package1-0.1.dist-info')
        assert_isfile(self.tmpdir / 'scripts' / 'pkg_script')
        with (self.tmpdir / 'scripts' / 'pkg_script').open() as f:
            assert f.readline().strip() == "#!" + sys.executable
        self._assert_direct_url(
            samples_dir / 'package1', 'package1', '0.1', expected_editable=False
        )

    def test_install_module_in_src(self):
        oldcwd = os.getcwd()
        os.chdir(samples_dir / 'packageinsrc')
        try:
            Installer.from_ini_path(pathlib.Path('pyproject.toml')).install_directly()
        finally:
            os.chdir(oldcwd)
        assert_isfile(self.tmpdir / 'site-packages' / 'module1.py')
        assert_isdir(self.tmpdir / 'site-packages' / 'module1-0.1.dist-info')

    def test_install_ns_package_native(self):
        Installer.from_ini_path(samples_dir / 'ns1-pkg' / 'pyproject.toml').install_directly()
        assert_isdir(self.tmpdir / 'site-packages' / 'ns1')
        assert_isfile(self.tmpdir / 'site-packages' / 'ns1' / 'pkg' / '__init__.py')
        assert_not_path_exists(self.tmpdir / 'site-packages' / 'ns1' / '__init__.py')
        assert_isdir(self.tmpdir / 'site-packages' / 'ns1_pkg-0.1.dist-info')

    def test_install_ns_package_module_native(self):
        Installer.from_ini_path(samples_dir / 'ns1-pkg-mod' / 'pyproject.toml').install_directly()
        assert_isfile(self.tmpdir / 'site-packages' / 'ns1' / 'module.py')
        assert_not_path_exists(self.tmpdir / 'site-packages' / 'ns1' / '__init__.py')

    def test_install_ns_package_native_symlink(self):
        if os.name == 'nt':
            raise SkipTest('symlink')
        Installer.from_ini_path(
            samples_dir / 'ns1-pkg' / 'pyproject.toml', symlink=True
        ).install_directly()
        Installer.from_ini_path(
            samples_dir / 'ns1-pkg2' / 'pyproject.toml', symlink=True
        ).install_directly()
        Installer.from_ini_path(
            samples_dir / 'ns1-pkg-mod' / 'pyproject.toml', symlink=True
        ).install_directly()
        assert_isdir(self.tmpdir / 'site-packages' / 'ns1')
        assert_isdir(self.tmpdir / 'site-packages' / 'ns1' / 'pkg')
        assert_islink(self.tmpdir / 'site-packages' / 'ns1' / 'pkg',
                      to=str(samples_dir / 'ns1-pkg' / 'ns1' / 'pkg'))
        assert_isdir(self.tmpdir / 'site-packages' / 'ns1_pkg-0.1.dist-info')

        assert_isdir(self.tmpdir / 'site-packages' / 'ns1' / 'pkg2')
        assert_islink(self.tmpdir / 'site-packages' / 'ns1' / 'pkg2',
                      to=str(samples_dir / 'ns1-pkg2' / 'ns1' / 'pkg2'))
        assert_isdir(self.tmpdir / 'site-packages' / 'ns1_pkg2-0.1.dist-info')

        assert_islink(self.tmpdir / 'site-packages' / 'ns1' / 'module.py',
                      to=samples_dir / 'ns1-pkg-mod' / 'ns1' / 'module.py')
        assert_isdir(self.tmpdir / 'site-packages' / 'ns1_module-0.1.dist-info')

    def test_install_ns_package_pth_file(self):
        Installer.from_ini_path(
            samples_dir / 'ns1-pkg' / 'pyproject.toml', pth=True
        ).install_directly()

        pth_file = self.tmpdir / 'site-packages' / 'ns1.pkg.pth'
        assert_isfile(pth_file)
        assert pth_file.read_text('utf-8').strip() == str(samples_dir / 'ns1-pkg')

    def test_symlink_package(self):
        if os.name == 'nt':
            raise SkipTest("symlink")
        Installer.from_ini_path(samples_dir / 'package1' / 'pyproject.toml', symlink=True).install()
        assert_islink(self.tmpdir / 'site-packages' / 'package1',
                      to=samples_dir / 'package1' / 'package1')
        assert_isfile(self.tmpdir / 'scripts' / 'pkg_script')
        with (self.tmpdir / 'scripts' / 'pkg_script').open() as f:
            assert f.readline().strip() == "#!" + sys.executable
        self._assert_direct_url(
            samples_dir / 'package1', 'package1', '0.1', expected_editable=True
        )

    @skipIf(not core_samples_dir.is_dir(), "Missing flit_core samples")
    def test_symlink_module_pep621(self):
        if os.name == 'nt':
            raise SkipTest("symlink")
        Installer.from_ini_path(
            core_samples_dir / 'pep621_nodynamic' / 'pyproject.toml', symlink=True
        ).install_directly()
        assert_islink(self.tmpdir / 'site-packages' / 'module1.py',
                      to=core_samples_dir / 'pep621_nodynamic' / 'module1.py')
        assert_isdir(self.tmpdir / 'site-packages' / 'module1-0.3.dist-info')
        self._assert_direct_url(
            core_samples_dir / 'pep621_nodynamic', 'module1', '0.3',
            expected_editable=True
        )

    def test_symlink_module_in_src(self):
        if os.name == 'nt':
            raise SkipTest("symlink")
        oldcwd = os.getcwd()
        os.chdir(samples_dir / 'packageinsrc')
        try:
            Installer.from_ini_path(
                pathlib.Path('pyproject.toml'), symlink=True
            ).install_directly()
        finally:
            os.chdir(oldcwd)
        assert_islink(self.tmpdir / 'site-packages' / 'module1.py',
                      to=(samples_dir / 'packageinsrc' / 'src' / 'module1.py'))
        assert_isdir(self.tmpdir / 'site-packages' / 'module1-0.1.dist-info')

    def test_pth_package(self):
        Installer.from_ini_path(samples_dir / 'package1' / 'pyproject.toml', pth=True).install()
        assert_isfile(self.tmpdir / 'site-packages' / 'package1.pth')
        with open(str(self.tmpdir / 'site-packages' / 'package1.pth')) as f:
            assert f.read() == str(samples_dir / 'package1')
        assert_isfile(self.tmpdir / 'scripts' / 'pkg_script')
        self._assert_direct_url(
            samples_dir / 'package1', 'package1', '0.1', expected_editable=True
        )

    def test_pth_module_in_src(self):
        oldcwd = os.getcwd()
        os.chdir(samples_dir / 'packageinsrc')
        try:
            Installer.from_ini_path(
                pathlib.Path('pyproject.toml'), pth=True
            ).install_directly()
        finally:
            os.chdir(oldcwd)
        pth_path = self.tmpdir / 'site-packages' / 'module1.pth'
        assert_isfile(pth_path)
        assert pth_path.read_text('utf-8').strip() == str(
            samples_dir / 'packageinsrc' / 'src'
        )
        assert_isdir(self.tmpdir / 'site-packages' / 'module1-0.1.dist-info')

    def test_dist_name(self):
        Installer.from_ini_path(samples_dir / 'altdistname' / 'pyproject.toml').install_directly()
        assert_isdir(self.tmpdir / 'site-packages' / 'package1')
        assert_isdir(self.tmpdir / 'site-packages' / 'package_dist1-0.1.dist-info')

    def test_entry_points(self):
        Installer.from_ini_path(samples_dir / 'entrypoints_valid' / 'pyproject.toml').install_directly()
        assert_isfile(self.tmpdir / 'site-packages' / 'package1-0.1.dist-info' / 'entry_points.txt')

    def test_pip_install(self):
        ins = Installer.from_ini_path(samples_dir / 'package1' / 'pyproject.toml', python='mock_python',
                        user=False)

        with MockCommand('mock_python') as mock_py:
            ins.install()

        calls = mock_py.get_calls()
        assert len(calls) == 1
        cmd = calls[0]['argv']
        assert cmd[1:4] == ['-m', 'pip', 'install']
        assert cmd[4].endswith('package1')

    def test_symlink_other_python(self):
        if os.name == 'nt':
            raise SkipTest('symlink')
        (self.tmpdir / 'site-packages2').mkdir()
        (self.tmpdir / 'scripts2').mkdir()

        # Called by Installer._auto_user() :
        script1 = ("#!{python}\n"
                   "import sysconfig\n"
                   "print(True)\n"   # site.ENABLE_USER_SITE
                   "print({purelib!r})"  # sysconfig.get_path('purelib')
                  ).format(python=sys.executable,
                           purelib=str(self.tmpdir / 'site-packages2'))

        # Called by Installer._get_dirs() :
        script2 = ("#!{python}\n"
                   "import json, sys\n"
                   "json.dump({{'purelib': {purelib!r}, 'scripts': {scripts!r}, 'data': {data!r} }}, "
                   "sys.stdout)"
                  ).format(python=sys.executable,
                           purelib=str(self.tmpdir / 'site-packages2'),
                           scripts=str(self.tmpdir / 'scripts2'),
                           data=str(self.tmpdir / 'data'),
                  )

        with MockCommand('mock_python', content=script1):
            ins = Installer.from_ini_path(samples_dir / 'package1' / 'pyproject.toml', python='mock_python',
                      symlink=True)
        with MockCommand('mock_python', content=script2):
            ins.install()

        assert_islink(self.tmpdir / 'site-packages2' / 'package1',
                      to=samples_dir / 'package1' / 'package1')
        assert_isfile(self.tmpdir / 'scripts2' / 'pkg_script')
        with (self.tmpdir / 'scripts2' / 'pkg_script').open() as f:
            assert f.readline().strip() == "#!mock_python"

    def test_install_requires(self):
        ins = Installer.from_ini_path(samples_dir / 'requires-requests.toml',
                        user=False, python='mock_python')

        with MockCommand('mock_python') as mockpy:
            ins.install_requirements()
        calls = mockpy.get_calls()
        assert len(calls) == 1
        assert calls[0]['argv'][1:5] == ['-m', 'pip', 'install', '-r']

    @skipIf(not core_samples_dir.is_dir(), "Missing flit_core samples")
    def test_install_reqs_my_python_if_needed_pep621(self):
        ins = Installer.from_ini_path(
            core_samples_dir / 'pep621_nodynamic' / 'pyproject.toml',
            deps='none',
        )

        # This shouldn't try to get version & docstring from the module
        ins.install_reqs_my_python_if_needed()

    def test_extras_error(self):
        with pytest.raises(DependencyError):
            Installer.from_ini_path(samples_dir / 'requires-requests.toml',
                            user=False, deps='none', extras='dev')

    @skipIf(not core_samples_dir.is_dir(), "Missing flit_core samples")
    def test_install_data_dir(self):
        Installer.from_ini_path(
            core_samples_dir / 'with_data_dir' / 'pyproject.toml',
        ).install_directly()
        assert_isfile(self.tmpdir / 'site-packages' / 'module1.py')
        assert_isfile(self.tmpdir / 'data' / 'share' / 'man' / 'man1' / 'foo.1')

    @skipIf(not core_samples_dir.is_dir(), "Missing flit_core samples")
    def test_symlink_data_dir(self):
        if os.name == 'nt':
            raise SkipTest("symlink")
        Installer.from_ini_path(
            core_samples_dir / 'with_data_dir' / 'pyproject.toml', symlink=True
        ).install_directly()
        assert_isfile(self.tmpdir / 'site-packages' / 'module1.py')
        assert_islink(
            self.tmpdir / 'data' / 'share' / 'man' / 'man1' / 'foo.1',
            to=core_samples_dir / 'with_data_dir' / 'data' / 'share' / 'man' / 'man1' / 'foo.1'
        )

@pytest.mark.parametrize(('deps', 'extras', 'installed'), [
    ('none', [], set()),
    ('develop', [], {'pytest ;', 'toml ;'}),
    ('production', [], {'toml ;'}),
    ('all', [], {'toml ;', 'pytest ;', 'requests ;'}),
])
def test_install_requires_extra(deps, extras, installed):
    it = InstallTests()
    try:
        it.setUp()
        ins = Installer.from_ini_path(samples_dir / 'extras' / 'pyproject.toml', python='mock_python',
                        user=False, deps=deps, extras=extras)

        cmd = MockCommand('mock_python')
        get_reqs = (
            "#!{python}\n"
            "import sys\n"
            "with open({recording_file!r}, 'wb') as w, open(sys.argv[-1], 'rb') as r:\n"
            "    w.write(r.read())"
        ).format(python=sys.executable, recording_file=cmd.recording_file)
        cmd.content = get_reqs

        with cmd as mock_py:
            ins.install_requirements()
        with open(mock_py.recording_file) as f:
            str_deps = f.read()
        deps = str_deps.split('\n') if str_deps else []

        assert set(deps) == installed
    finally:
        it.tearDown()

def test_requires_dist_to_pip_requirement():
    rd = 'pathlib2 (>=2.3); python_version == "2.7"'
    assert _requires_dist_to_pip_requirement(rd) == \
        'pathlib2>=2.3 ; python_version == "2.7"'

def test_test_writable_dir_win():
    with tempfile.TemporaryDirectory() as td:
        assert install._test_writable_dir_win(td) is True

        # Ironically, I don't know how to make a non-writable dir on Windows,
        # so although the functionality is for Windows, the test is for Posix
        if os.name != 'posix':
            return

        # Remove write permissions from the directory
        os.chmod(td, 0o444)
        try:
            assert install._test_writable_dir_win(td) is False
        finally:
            os.chmod(td, 0o644)
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5155573
flit-3.12.0/tests/test_sdist.py0000644000000000000000000001246314770461472013404 0ustar00import ast
from os.path import join as pjoin
from pathlib import Path
import pytest
from shutil import which, copytree
import sys
import tarfile
from tempfile import TemporaryDirectory
from testpath import assert_isfile, MockCommand

from flit import sdist, common

samples_dir = Path(__file__).parent / 'samples'

def test_auto_packages():
    module = common.Module('package1', samples_dir / 'package1')
    packages, pkg_data = sdist.auto_packages(module)
    assert packages == ['package1', 'package1.subpkg', 'package1.subpkg2']
    assert pkg_data == {'': ['*'],
                        'package1': ['data_dir/*'],
                        'package1.subpkg': ['sp_data_dir/*'],
                       }

def test_make_sdist():
    # Smoke test of making a complete sdist
    if not which('git'):
        pytest.skip("requires git")
    builder = sdist.SdistBuilder.from_ini_path(samples_dir / 'package1' / 'pyproject.toml')
    with TemporaryDirectory() as td:
        td = Path(td)
        builder.build(td)
        sdist_file = td / 'package1-0.1.tar.gz'
        assert_isfile(sdist_file)

        with tarfile.open(str(sdist_file)) as tf:
            assert 'package1-0.1/setup.py' in tf.getnames()


def test_sdist_no_setup_py():
    # Smoke test of making a complete sdist
    if not which('git'):
        pytest.skip("requires git")
    builder = sdist.SdistBuilder.from_ini_path(samples_dir / 'package1' / 'pyproject.toml')
    with TemporaryDirectory() as td:
        td = Path(td)
        builder.build(td, gen_setup_py=False)
        sdist_file = td / 'package1-0.1.tar.gz'
        assert_isfile(sdist_file)

        with tarfile.open(str(sdist_file)) as tf:
            assert 'package1-0.1/setup.py' not in tf.getnames()


LIST_FILES = """\
#!{python}
import sys
from os.path import join
if '--deleted' not in sys.argv:
    files = [
        'foo',
        join('dir1', 'bar'),
        join('dir1', 'subdir', 'qux'),
        join('dir2', 'abc'),
        join('dist', 'def'),
    ]
    mode = '{vcs}'
    if mode == 'git':
        print('\\0'.join(files), end='\\0')
    elif mode == 'hg':
        for f in files:
            print(f)
"""

LIST_FILES_GIT = LIST_FILES.format(python=sys.executable, vcs='git')
LIST_FILES_HG = LIST_FILES.format(python=sys.executable, vcs='hg')


def test_get_files_list_git(copy_sample):
    td = copy_sample('module1_toml')
    (td / '.git').mkdir()

    builder = sdist.SdistBuilder.from_ini_path(td / 'pyproject.toml')
    with MockCommand('git', LIST_FILES_GIT):
        files = builder.select_files()

    assert set(files) == {
        'foo', pjoin('dir1', 'bar'), pjoin('dir1', 'subdir', 'qux'),
        pjoin('dir2', 'abc')
    }

def test_get_files_list_hg(tmp_path):
    dir1 = tmp_path / 'dir1'
    copytree(str(samples_dir / 'module1_toml'), str(dir1))
    (tmp_path / '.hg').mkdir()
    builder = sdist.SdistBuilder.from_ini_path(dir1 / 'pyproject.toml')
    with MockCommand('hg', LIST_FILES_HG):
        files = builder.select_files()

    assert set(files) == {
        'bar', pjoin('subdir', 'qux')
    }

def get_setup_assigns(setup):
    """Parse setup.py, execute assignments, return the namespace"""
    setup_ast = ast.parse(setup)
    # Select only assignment statements
    setup_ast.body = [n for n in setup_ast.body if isinstance(n, ast.Assign)]
    ns = {}
    exec(compile(setup_ast, filename="setup.py", mode="exec"), ns)
    return ns

def test_make_setup_py():
    builder = sdist.SdistBuilder.from_ini_path(samples_dir / 'package1' / 'pyproject.toml')
    ns = get_setup_assigns(builder.make_setup_py())
    assert ns['packages'] == ['package1', 'package1.subpkg', 'package1.subpkg2']
    assert 'install_requires' not in ns
    assert ns['entry_points'] == \
           {'console_scripts': ['pkg_script = package1:main']}

def test_make_setup_py_reqs():
    builder = sdist.SdistBuilder.from_ini_path(samples_dir / 'extras' / 'pyproject.toml')
    ns = get_setup_assigns(builder.make_setup_py())
    assert ns['install_requires'] == ['toml']
    assert ns['extras_require'] == {'test': ['pytest'], 'custom': ['requests']}

def test_make_setup_py_reqs_envmark():
    builder = sdist.SdistBuilder.from_ini_path(samples_dir / 'requires-envmark' / 'pyproject.toml')
    ns = get_setup_assigns(builder.make_setup_py())
    assert ns['install_requires'] == ['requests']
    assert ns['extras_require'] == {":python_version == '2.7'": ['pathlib2']}

def test_make_setup_py_reqs_extra_envmark():
    builder = sdist.SdistBuilder.from_ini_path(samples_dir / 'requires-extra-envmark' / 'pyproject.toml')
    ns = get_setup_assigns(builder.make_setup_py())
    assert ns['extras_require'] == {'test:python_version == "2.7"': ['pathlib2']}

def test_make_setup_py_package_dir_src():
    builder = sdist.SdistBuilder.from_ini_path(samples_dir / 'packageinsrc' / 'pyproject.toml')
    ns = get_setup_assigns(builder.make_setup_py())
    assert ns['package_dir'] == {'': 'src'}

def test_make_setup_py_ns_pkg():
    builder = sdist.SdistBuilder.from_ini_path(samples_dir / 'ns1-pkg' / 'pyproject.toml')
    setup = builder.make_setup_py()
    ns = get_setup_assigns(setup)
    assert ns['packages'] == ['ns1', 'ns1.pkg']

def test_make_setup_py_ns_pkg_mod():
    builder = sdist.SdistBuilder.from_ini_path(samples_dir / 'ns1-pkg-mod' / 'pyproject.toml')
    setup = builder.make_setup_py()
    ns = get_setup_assigns(setup)
    assert ns['packages'] == ['ns1']
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5155573
flit-3.12.0/tests/test_tomlify.py0000644000000000000000000000136214770461472013735 0ustar00from pathlib import Path
try:
    import tomllib
except ImportError:
    import tomli as tomllib
from testpath import assert_isfile

from flit import tomlify

samples_dir = Path(__file__).parent / 'samples'

def test_tomlify(copy_sample, monkeypatch):
    td = copy_sample('with_flit_ini')
    monkeypatch.chdir(td)

    tomlify.main(argv=[])

    pyproject_toml = (td / 'pyproject.toml')
    assert_isfile(pyproject_toml)

    with pyproject_toml.open('rb') as f:
        content = tomllib.load(f)

    assert 'build-system' in content
    assert 'tool' in content
    assert 'flit' in content['tool']
    flit_table = content['tool']['flit']
    assert 'metadata' in flit_table
    assert 'scripts' in flit_table
    assert 'entrypoints' in flit_table
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5155573
flit-3.12.0/tests/test_upload.py0000644000000000000000000001062214770461472013535 0ustar00from contextlib import contextmanager
from tempfile import NamedTemporaryFile
import os
import io
import pathlib

import pytest
import responses
from testpath import modified_env
from unittest.mock import patch

from flit import upload
from flit.build import ALL_FORMATS
from flit.upload import get_repository, RepoDetails

samples_dir = pathlib.Path(__file__).parent / 'samples'

repo_settings = upload.RepoDetails(
    url=upload.PYPI, username='user', password='pw'
)

pypirc1 = """
[distutils]
index-servers =
    pypi

[pypi]
username: fred
password: s3cret
"""
# That's not a real password. Well, hopefully not.

@contextmanager
def temp_pypirc(content):
    temp_file = NamedTemporaryFile("w+", delete=False)
    try:
        temp_file.write(content)
        temp_file.close()
        yield temp_file.name
    finally:
        os.unlink(temp_file.name)


@responses.activate
def test_upload(copy_sample):
    responses.add(responses.POST, upload.PYPI, status=200)
    td = copy_sample('module1_toml')

    with temp_pypirc(pypirc1) as pypirc, \
        patch('flit.upload.get_repository', return_value=repo_settings):
            upload.main(td / 'pyproject.toml', repo_name='pypi', pypirc_path=pypirc)

    assert len(responses.calls) == 2

def test_get_repository():
    with temp_pypirc(pypirc1) as pypirc:
        repo = upload.get_repository(pypirc_path=pypirc, project_name='foo')
        assert repo.url == upload.PYPI
        assert repo.username == 'fred'
        assert repo.password == 's3cret'

def test_get_repository_env():
    with temp_pypirc(pypirc1) as pypirc, \
        modified_env({
        'FLIT_INDEX_URL': 'https://pypi.example.com',
        'FLIT_USERNAME': 'alice',
        'FLIT_PASSWORD': 'p4ssword',  # Also not a real password
    }):
        repo = upload.get_repository(pypirc_path=pypirc, project_name='foo')
        # Because we haven't specified a repo name, environment variables should
        # have higher priority than the config file.
        assert repo.url == 'https://pypi.example.com'
        assert repo.username == 'alice'
        assert repo.password == 'p4ssword'

@contextmanager
def _fake_keyring(d):
    class FakeKeyring:
        @staticmethod
        def get_password(service_name, username):
            return d.get(service_name, {}).get(username, None)

    class FakeKeyringErrMod:
        class KeyringError(Exception):
            pass

    with patch.dict('sys.modules', {
        'keyring': FakeKeyring(), 'keyring.errors': FakeKeyringErrMod(),
    }):
        yield

pypirc2 = """
[distutils]
index-servers =
    pypi

[pypi]
username: fred
"""

def test_get_repository_keyring(monkeypatch):
    monkeypatch.delenv('FLIT_PASSWORD', raising=False)
    with _fake_keyring({upload.PYPI: {'fred': 'tops3cret'}}):
        repo = get_repository(pypirc_path=io.StringIO(pypirc2), project_name='foo')
    assert repo == RepoDetails(upload.PYPI, username='fred', password='tops3cret')

    for token_key in ['pypi_token:project:foo', 'pypi_token:user:fred']:
        with _fake_keyring({upload.PYPI: {token_key: 'xyz'}}):
            repo = get_repository(pypirc_path=io.StringIO(pypirc2), project_name='foo')
        assert repo == RepoDetails(upload.PYPI, username='__token__', password='xyz')


pypirc3_repo = "https://invalid-repo.inv"
pypirc3_user = "test"
pypirc3_pass = "not_a_real_password"
pypirc3 = f"""
[distutils] =
index-servers =
    test123

[test123]
repository: {pypirc3_repo}
username: {pypirc3_user}
password: {pypirc3_pass}
"""


def test_upload_pypirc_file(copy_sample):
    with temp_pypirc(pypirc3) as pypirc, \
        patch("flit.upload.upload_file") as upload_file:
        td = copy_sample("module1_toml")
        formats = list(ALL_FORMATS)[:1]
        upload.main(
            td / "pyproject.toml",
            formats=set(formats),
            repo_name="test123",
            pypirc_path=pypirc,
        )
        _, _, repo = upload_file.call_args[0]

        assert repo.url == pypirc3_repo
        assert repo.username == pypirc3_user
        assert repo.password == pypirc3_pass


def test_upload_invalid_pypirc_file(copy_sample):
    with patch("flit.upload.upload_file"):
        td = copy_sample("module1_toml")
        formats = list(ALL_FORMATS)[:1]
        with pytest.raises(FileNotFoundError):
            upload.main(
                td / "pyproject.toml",
                formats=set(formats),
                repo_name="test123",
                pypirc_path="./file.invalid",
            )
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5155573
flit-3.12.0/tests/test_validate.py0000644000000000000000000001650414770461472014047 0ustar00import errno
import pytest
import responses

from flit import validate as fv

def test_validate_entrypoints():
    assert fv.validate_entrypoints(
        {'console_scripts': {'flit': 'flit:main'}}) == []
    assert fv.validate_entrypoints(
        {'some.group': {'flit': 'flit.buildapi'}}) == []

    res = fv.validate_entrypoints({'some.group': {'flit': 'a:b:c'}})
    assert len(res) == 1

def test_validate_name():
    def check(name):
        return fv.validate_name({'name': name})

    assert check('foo.bar_baz') == []
    assert check('5minus6') == []

    assert len(check('_foo')) == 1  # Must start with alphanumeric
    assert len(check('foo.')) == 1  # Must end with alphanumeric
    assert len(check('Bücher')) == 1 # ASCII only

def test_validate_requires_python():
    assert fv.validate_requires_python({}) == []  # Not required

    def check(spec):
        return fv.validate_requires_python({'requires_python': spec})

    assert check('>=3') == []
    assert check('>=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*') == []

    assert len(check('3')) == 1
    assert len(check('@12')) == 1
    assert len(check('>=2.7; !=3.0.*')) == 1  # Comma separated, not semicolon

def test_validate_requires_dist():
    assert fv.validate_requires_dist({}) == []  # Not required

    def check(spec):
        return fv.validate_requires_dist({'requires_dist': [spec]})

    assert check('requests') == []
    assert check('requests[extra-foo]') == []
    assert check('requests (>=2.14)') == []  # parentheses allowed but not recommended
    assert check('requests >=2.14') == []
    assert check('pexpect; sys_platform == "win32"') == []
    # Altogether now
    assert check('requests[extra-foo] >=2.14; python_version < "3.0"') == []

    # URL specifier
    assert check('requests @ https://example.com/requests.tar.gz') == []
    assert check(
        'requests @ https://example.com/requests.tar.gz ; python_version < "3.8"'
    ) == []

    # Problems
    assert len(check('Bücher')) == 1
    assert len(check('requests 2.14')) == 1
    assert len(check('pexpect; sys.platform == "win32"')) == 1  # '.' -> '_'
    assert len(check('requests >=2.14 @ https://example.com/requests.tar.gz')) == 1
    # Several problems in one requirement
    assert len(check('pexpect[_foo] =3; sys.platform == "win32"')) == 3

def test_validate_environment_marker():
    vem = fv.validate_environment_marker

    assert vem('python_version >= "3" and os_name == \'posix\'') == []

    res = vem('python_version >= "3')  # Unclosed string
    assert len(res) == 1
    assert res[0].startswith("Invalid string")

    res = vem('python_verson >= "3"')  # Misspelled name
    assert len(res) == 1
    assert res[0].startswith("Invalid variable")

    res = vem("os_name is 'posix'")  # No 'is' comparisons
    assert len(res) == 1
    assert res[0].startswith("Invalid expression")

    res = vem("'2' < python_version < '4'")  # No chained comparisons
    assert len(res) == 1
    assert res[0].startswith("Invalid expression")

    assert len(vem('os.name == "linux\'')) == 2

def test_validate_url():
    vurl = fv.validate_url
    assert vurl("https://github.com/pypa/flit") == []

    assert len(vurl("github.com/pypa/flit")) == 1
    assert len(vurl("https://")) == 1


def test_validate_project_urls():
    vpu = fv.validate_project_urls

    def check(prurl):
        return vpu({'project_urls': [prurl]})
    assert vpu({}) == []   # Not required
    assert check('Documentation, https://flit.readthedocs.io/') == []

    # Missing https://
    assert len(check('Documentation, flit.readthedocs.io')) == 1
    # Double comma
    assert len(check('A, B, flit.readthedocs.io')) == 1
    # No name
    assert len(check(', https://flit.readthedocs.io/')) == 1
    # Name longer than 32 chars
    assert len(check('Supercalifragilisticexpialidocious, https://flit.readthedocs.io/')) == 1


def test_read_classifiers_cached(monkeypatch, tmp_path):

    def mock_get_cache_dir():
        tmp_file = tmp_path / "classifiers.lst"
        with tmp_file.open("w") as fh:
            fh.write("A\nB\nC")
        return tmp_path

    monkeypatch.setattr(fv, "get_cache_dir", mock_get_cache_dir)

    classifiers = fv._read_classifiers_cached()

    assert classifiers == {'A', 'B', 'C'}


@responses.activate
def test_download_and_cache_classifiers(monkeypatch, tmp_path):
    responses.add(
        responses.GET,
        'https://pypi.org/pypi?%3Aaction=list_classifiers',
        body="A\nB\nC")

    def mock_get_cache_dir():
        return tmp_path

    monkeypatch.setattr(fv, "get_cache_dir", mock_get_cache_dir)

    classifiers = fv._download_and_cache_classifiers()

    assert classifiers == {"A", "B", "C"}


def test_validate_classifiers_private(monkeypatch):
    """
    Test that `Private :: Do Not Upload` considered a valid classifier.
    This is a special case because it is not listed in a trove classifier
    but it is a way to make sure that a private package is not get uploaded
    on PyPI by accident.

    Implementation on PyPI side:
        https://github.com/pypa/warehouse/pull/5440
    Issue about officially documenting the trick:
        https://github.com/pypa/packaging.python.org/issues/643
    """
    monkeypatch.setattr(fv, "_read_classifiers_cached", lambda: set())

    actual = fv.validate_classifiers({'invalid'})
    assert actual == ["Unrecognised classifier: 'invalid'"]

    assert fv.validate_classifiers({'Private :: Do Not Upload'}) == []


@responses.activate
@pytest.mark.parametrize("error", [PermissionError, OSError(errno.EROFS, "")])
def test_download_and_cache_classifiers_with_unacessible_dir(monkeypatch, error):
    responses.add(
        responses.GET,
        'https://pypi.org/pypi?%3Aaction=list_classifiers',
        body="A\nB\nC")

    class MockCacheDir:
        def mkdir(self, parents):
            raise error
        def __truediv__(self, other):
            raise error

    monkeypatch.setattr(fv, "get_cache_dir", MockCacheDir)

    classifiers = fv._download_and_cache_classifiers()

    assert classifiers == {"A", "B", "C"}


def test_verify_classifiers_valid_classifiers():
    classifiers = {"A"}
    valid_classifiers = {"A", "B"}

    problems = fv._verify_classifiers(classifiers, valid_classifiers)

    assert problems == []

def test_verify_classifiers_invalid_classifiers():
    classifiers = {"A", "B"}
    valid_classifiers = {"A"}

    problems = fv._verify_classifiers(classifiers, valid_classifiers)

    assert problems == ["Unrecognised classifier: 'B'"]

def test_validate_readme_rst():
    metadata = {
        'description_content_type': 'text/x-rst',
        'description': "Invalid ``rst'",
    }
    problems = fv.validate_readme_rst(metadata)

    assert len(problems) == 2  # 1 message that rst is invalid + 1 with details
    assert "valid rst" in problems[0]

    # Markdown should be ignored
    metadata = {
        'description_content_type': 'text/markdown',
        'description': "Invalid `rst'",
    }
    problems = fv.validate_readme_rst(metadata)

    assert problems == []

RST_WITH_CODE = """
Code snippet:

.. code-block:: python

   a = [i ** 2 for i in range(5)]
"""

def test_validate_readme_rst_code():
    # Syntax highlighting shouldn't require pygments
    metadata = {
        'description_content_type': 'text/x-rst',
        'description': RST_WITH_CODE,
    }
    problems = fv.validate_readme_rst(metadata)
    for p in problems:
        print(p)

    assert problems == []
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5165572
flit-3.12.0/tests/test_vcs.py0000644000000000000000000000112114770461472013036 0ustar00from contextlib import contextmanager
import os
from pathlib import Path
from tempfile import TemporaryDirectory

from flit import vcs

@contextmanager
def cwd(path):
    if isinstance(path, Path):
        path = str(path)
    old_wd = os.getcwd()
    os.chdir(path)
    try:
        yield
    finally:
        os.chdir(old_wd)

def test_identify_git_parent():
    with TemporaryDirectory() as td:
        td = Path(td)
        (td / '.git').mkdir()
        subdir = (td / 'subdir')
        subdir.mkdir()
        with cwd(subdir):
            assert vcs.identify_vcs(Path('.')).name == 'git'
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5165572
flit-3.12.0/tests/test_wheel.py0000644000000000000000000002062614770461472013362 0ustar00import configparser
import os
import stat
from pathlib import Path
import tempfile
from unittest import skipIf
import zipfile

import pytest
from testpath import assert_isfile, assert_isdir, assert_not_path_exists

from flit.wheel import WheelBuilder, make_wheel_in
from flit.config import EntryPointsConflict

samples_dir = Path(__file__).parent / 'samples'


def unpack(path):
    z = zipfile.ZipFile(str(path))
    t = tempfile.TemporaryDirectory()
    z.extractall(t.name)
    return t

def test_wheel_module(copy_sample):
    td = copy_sample('module1_toml')
    make_wheel_in(td / 'pyproject.toml', td)
    assert_isfile(td / 'module1-0.1-py2.py3-none-any.whl')

def test_editable_wheel_module(copy_sample):
    td = copy_sample('module1_toml')
    make_wheel_in(td / 'pyproject.toml', td, editable=True)
    whl_file = td / 'module1-0.1-py2.py3-none-any.whl'
    assert_isfile(whl_file)
    with unpack(whl_file) as unpacked:
        pth_path = Path(unpacked, 'module1.pth')
        assert_isfile(pth_path)
        assert pth_path.read_text() == str(td)
        assert_isdir(Path(unpacked, 'module1-0.1.dist-info'))

def test_editable_wheel_has_absolute_pth(copy_sample):
        td = copy_sample('module1_toml')
        oldcwd = os.getcwd()
        os.chdir(str(td))
        try:
            make_wheel_in(Path('pyproject.toml'), Path('.'), editable=True)
            whl_file = 'module1-0.1-py2.py3-none-any.whl'
            assert_isfile(whl_file)
            with unpack(whl_file) as unpacked:
                pth_path = Path(unpacked, 'module1.pth')
                assert_isfile(pth_path)
                assert Path(pth_path.read_text()).is_absolute()
                assert pth_path.read_text() == str(td.resolve())
                assert_isdir(Path(unpacked, 'module1-0.1.dist-info'))
        finally:
            os.chdir(oldcwd)

def test_wheel_package(copy_sample):
    td = copy_sample('package1')
    make_wheel_in(td / 'pyproject.toml', td)
    assert_isfile(td / 'package1-0.1-py2.py3-none-any.whl')

def test_editable_wheel_package(copy_sample):
    td = copy_sample('package1')
    make_wheel_in(td / 'pyproject.toml', td, editable=True)
    whl_file = td / 'package1-0.1-py2.py3-none-any.whl'
    assert_isfile(whl_file)
    with unpack(whl_file) as unpacked:
        pth_path = Path(unpacked, 'package1.pth')
        assert_isfile(pth_path)
        assert pth_path.read_text() == str(td)
        assert_isdir(Path(unpacked, 'package1-0.1.dist-info'))

def test_editable_wheel_namespace_package(copy_sample):
    td = copy_sample('ns1-pkg')
    make_wheel_in(td / 'pyproject.toml', td, editable=True)
    whl_file = td / 'ns1_pkg-0.1-py2.py3-none-any.whl'
    assert_isfile(whl_file)
    with unpack(whl_file) as unpacked:
        pth_path = Path(unpacked, 'ns1.pkg.pth')
        assert_isfile(pth_path)
        assert pth_path.read_text() == str(td)
        assert_isdir(Path(unpacked, 'ns1_pkg-0.1.dist-info'))

def test_wheel_src_module(copy_sample):
    td = copy_sample('module3')
    make_wheel_in(td / 'pyproject.toml', td)

    whl_file = td / 'module3-0.1-py2.py3-none-any.whl'
    assert_isfile(whl_file)
    with unpack(whl_file) as unpacked:
        assert_isfile(Path(unpacked, 'module3.py'))
        assert_isdir(Path(unpacked, 'module3-0.1.dist-info'))
        assert_isfile(Path(unpacked, 'module3-0.1.dist-info', 'licenses', 'LICENSE'))

def test_editable_wheel_src_module(copy_sample):
    td = copy_sample('module3')
    make_wheel_in(td / 'pyproject.toml', td, editable=True)
    whl_file = td / 'module3-0.1-py2.py3-none-any.whl'
    assert_isfile(whl_file)
    with unpack(whl_file) as unpacked:
        pth_path = Path(unpacked, 'module3.pth')
        assert_isfile(pth_path)
        assert pth_path.read_text() == str(td / "src")
        assert_isdir(Path(unpacked, 'module3-0.1.dist-info'))

def test_wheel_src_package(copy_sample):
    td = copy_sample('package2')
    make_wheel_in(td / 'pyproject.toml', td)

    whl_file = td / 'package2-0.1-py2.py3-none-any.whl'
    assert_isfile(whl_file)
    with unpack(whl_file) as unpacked:
        print(os.listdir(unpacked))
        assert_isfile(Path(unpacked, 'package2', '__init__.py'))

def test_editable_wheel_src_package(copy_sample):
    td = copy_sample('package2')
    make_wheel_in(td / 'pyproject.toml', td, editable=True)
    whl_file = td / 'package2-0.1-py2.py3-none-any.whl'
    assert_isfile(whl_file)
    with unpack(whl_file) as unpacked:
        pth_path = Path(unpacked, 'package2.pth')
        assert_isfile(pth_path)
        assert pth_path.read_text() == str(td / "src")
        assert_isdir(Path(unpacked, 'package2-0.1.dist-info'))


def test_wheel_ns_package(copy_sample):
    td = copy_sample('ns1-pkg')
    res = make_wheel_in(td / 'pyproject.toml', td)
    assert res.file == td / 'ns1_pkg-0.1-py2.py3-none-any.whl'
    assert_isfile(res.file)
    with unpack(res.file) as td_unpack:
        assert_isdir(Path(td_unpack, 'ns1_pkg-0.1.dist-info'))
        assert_isfile(Path(td_unpack, 'ns1', 'pkg', '__init__.py'))
        assert_not_path_exists(Path(td_unpack, 'ns1', '__init__.py'))

def test_dist_name(copy_sample):
    td = copy_sample('altdistname')
    make_wheel_in(td / 'pyproject.toml', td)
    res = td / 'package_dist1-0.1-py2.py3-none-any.whl'
    assert_isfile(res)
    with unpack(res) as td_unpack:
        assert_isdir(Path(td_unpack, 'package_dist1-0.1.dist-info'))

def test_entry_points(copy_sample):
    td = copy_sample('entrypoints_valid')
    make_wheel_in(td / 'pyproject.toml', td)
    assert_isfile(td / 'package1-0.1-py2.py3-none-any.whl')
    with unpack(td / 'package1-0.1-py2.py3-none-any.whl') as td_unpack:
        entry_points = Path(td_unpack, 'package1-0.1.dist-info', 'entry_points.txt')
        assert_isfile(entry_points)
        cp = configparser.ConfigParser()
        cp.read(str(entry_points))
        assert 'console_scripts' in cp.sections()
        assert 'myplugins' in cp.sections()

def test_entry_points_conflict(copy_sample):
    td = copy_sample('entrypoints_conflict')
    with pytest.raises(EntryPointsConflict):
        make_wheel_in(td / 'pyproject.toml', td)

def test_wheel_builder():
    # Slightly lower level interface
    with tempfile.TemporaryDirectory() as td:
        target = Path(td, 'sample.whl')
        with target.open('wb') as f:
            wb = WheelBuilder.from_ini_path(samples_dir / 'package1' / 'pyproject.toml', f)
            wb.build()

        assert zipfile.is_zipfile(str(target))
        assert wb.wheel_filename == 'package1-0.1-py2.py3-none-any.whl'

@skipIf(os.name == 'nt', 'Windows does not preserve necessary permissions')
def test_permissions_normed(copy_sample):
    td = copy_sample('module1_toml')

    (td / 'module1.py').chmod(0o620)
    make_wheel_in(td / 'pyproject.toml', td)

    whl = td / 'module1-0.1-py2.py3-none-any.whl'
    assert_isfile(whl)
    with zipfile.ZipFile(str(whl)) as zf:
        info = zf.getinfo('module1.py')
        perms = (info.external_attr >> 16) & 0o777
        assert perms == 0o644, oct(perms)
    whl.unlink()

    # This time with executable bit set
    (td / 'module1.py').chmod(0o720)
    make_wheel_in(td / 'pyproject.toml', td)

    assert_isfile(whl)
    with zipfile.ZipFile(str(whl)) as zf:
        info = zf.getinfo('module1.py')
        perms = (info.external_attr >> 16) & 0o777
        assert perms == 0o755, oct(perms)

        info = zf.getinfo('module1-0.1.dist-info/METADATA')
        perms = (info.external_attr >> 16) & 0o777
        assert perms == 0o644, oct(perms)

        info = zf.getinfo('module1-0.1.dist-info/RECORD')
        perms = (info.external_attr >> 16) & stat.S_IFREG
        assert perms

def test_compression(tmp_path):
    info = make_wheel_in(samples_dir / 'module1_toml' / 'pyproject.toml', tmp_path)
    assert_isfile(info.file)
    with zipfile.ZipFile(str(info.file)) as zf:
        for name in [
            'module1.py',
            'module1-0.1.dist-info/METADATA',
        ]:
            assert zf.getinfo(name).compress_type == zipfile.ZIP_DEFLATED

def test_wheel_module_local_version(copy_sample):
    """Test if a local version specifier is preserved in wheel filename and dist-info dir name"""
    td = copy_sample('modulewithlocalversion')
    make_wheel_in(td / 'pyproject.toml', td)

    whl_file = td / 'modulewithlocalversion-0.1.dev0+test-py2.py3-none-any.whl'
    assert_isfile(whl_file)
    with unpack(whl_file) as unpacked:
        assert_isfile(Path(unpacked, 'modulewithlocalversion.py'))
        assert_isdir(Path(unpacked, 'modulewithlocalversion-0.1.dev0+test.dist-info'))
././@PaxHeader0000000000000000000000000000003400000000000010212 xustar0028 mtime=1742889785.5165572
flit-3.12.0/tox.ini0000644000000000000000000000220514770461472011007 0ustar00[tox]
envlist = py{314,313,312,311,310,39,38,37,36},bootstrap
skip_missing_interpreters = true

[gh-actions]
python =
    3.6: py36
    3.7: py37
    3.8: py38, bootstrap
    3.9: py39
    3.10: py310
    3.11: py311
    3.12: py312
    3.13: py313
    3.14: py314

[testenv]
deps =
    requests
    testpath
    responses
    docutils
    tomli;python_version < "3.11"
    tomli-w
    pytest>=2.7.3
    pytest-cov

skip_install=true

setenv =
    PYTHONPATH = flit_core

commands =
    python -m pytest --cov=flit --cov=flit_core/flit_core {posargs}

# Python 3.6: only test flit_core
[testenv:py36]
commands =
    python -m pytest --cov=flit_core/flit_core flit_core {posargs}

# Python 3.7: only test flit_core
[testenv:py37]
commands =
    python -m pytest --cov=flit_core/flit_core flit_core {posargs}

[testenv:bootstrap]
skip_install = true
# Make the install step a no-op, so nothing gets installed in the env
install_command = true {packages}
allowlist_externals = true
changedir = flit_core
commands =
    python -c "from flit_core.buildapi import build_wheel;\
               from tempfile import mkdtemp;\
               build_wheel(mkdtemp())"
flit-3.12.0/PKG-INFO0000644000000000000000000000716400000000000010527 0ustar00Metadata-Version: 2.4
Name: flit
Version: 3.12.0
Summary: A simple packaging tool for simple packages.
Author-email: Thomas Kluyver 
Requires-Python: >=3.8
Description-Content-Type: text/x-rst
License-Expression: BSD-3-Clause
Classifier: Intended Audience :: Developers
Classifier: Programming Language :: Python :: 3
Classifier: Topic :: Software Development :: Libraries :: Python Modules
License-File: LICENSE
Requires-Dist: flit_core >=3.12.0
Requires-Dist: requests
Requires-Dist: docutils
Requires-Dist: tomli-w
Requires-Dist: pip
Requires-Dist: sphinx ; extra == "doc"
Requires-Dist: sphinxcontrib_github_alt ; extra == "doc"
Requires-Dist: pygments-github-lexers ; extra == "doc"
Requires-Dist: testpath ; extra == "test"
Requires-Dist: responses ; extra == "test"
Requires-Dist: pytest>=2.7.3 ; extra == "test"
Requires-Dist: pytest-cov ; extra == "test"
Requires-Dist: tomli ; extra == "test"
Project-URL: Changelog, https://flit.pypa.io/en/stable/history.html
Project-URL: Documentation, https://flit.pypa.io
Project-URL: Source, https://github.com/pypa/flit
Provides-Extra: doc
Provides-Extra: test

**Flit** is a simple way to put Python packages and modules on PyPI.
It tries to require less thought about packaging and help you avoid common
mistakes.
See `Why use Flit? `_ for
more about how it compares to other Python packaging tools.

Install
-------

::

    $ python3 -m pip install flit

Flit requires Python 3 and therefore needs to be installed using the Python 3
version of pip.

Python 2 modules can be distributed using Flit, but need to be importable on
Python 3 without errors.

Usage
-----

Say you're writing a module ``foobar`` — either as a single file ``foobar.py``,
or as a directory — and you want to distribute it.

1. Make sure that foobar's docstring starts with a one-line summary of what
   the module is, and that it has a ``__version__``:

   .. code-block:: python

       """An amazing sample package!"""

       __version__ = "0.1"

2. Install flit if you don't already have it::

       python3 -m pip install flit

3. Run ``flit init`` in the directory containing the module to create a
   ``pyproject.toml`` file. It will look something like this:

   .. code-block:: ini

       [build-system]
       requires = ["flit_core >=3.2,<4"]
       build-backend = "flit_core.buildapi"

       [project]
       name = "foobar"
       authors = [{name = "Sir Robin", email = "robin@camelot.uk"}]
       dynamic = ["version", "description"]

       [project.urls]
       Home = "https://github.com/sirrobin/foobar"

   You can edit this file to add other metadata, for example to set up
   command line scripts. See the
   `pyproject.toml page `_
   of the documentation.

   If you have already got a ``flit.ini`` file to use with older versions of
   Flit, convert it to ``pyproject.toml`` by running ``python3 -m flit.tomlify``.

4. Run this command to upload your code to PyPI::

       flit publish

Once your package is published, people can install it using *pip* just like
any other package. In most cases, pip will download a 'wheel' package, a
standard format it knows how to install. If you specifically ask pip to install
an 'sdist' package, it will install and use Flit in a temporary environment.


To install a package locally for development, run::

    flit install [--symlink] [--python path/to/python]

Flit packages a single importable module or package at a time, using the import
name as the name on PyPI. All subpackages and data files within a package are
included automatically.