pax_global_header00006660000000000000000000000064147654446200014526gustar00rootroot0000000000000052 comment=6aa47c20bfbf38d6b78cb7026b29c16e5ec21858 nvchecker-2.17/000077500000000000000000000000001476544462000134275ustar00rootroot00000000000000nvchecker-2.17/.github/000077500000000000000000000000001476544462000147675ustar00rootroot00000000000000nvchecker-2.17/.github/workflows/000077500000000000000000000000001476544462000170245ustar00rootroot00000000000000nvchecker-2.17/.github/workflows/codeql.yml000066400000000000000000000066661476544462000210340ustar00rootroot00000000000000# For most projects, this workflow file will not need changing; you simply need # to commit it to your repository. # # You may wish to alter this file to override the set of languages analyzed, # or to provide custom queries or build logic. # # ******** NOTE ******** # We have attempted to detect the languages in your repository. Please check # the `language` matrix defined below to confirm you have the correct set of # supported CodeQL languages. # name: "CodeQL" on: push: branches: [ "master" ] pull_request: branches: [ "master" ] schedule: - cron: '34 14 * * 4' jobs: analyze: name: Analyze # Runner size impacts CodeQL analysis time. To learn more, please see: # - https://gh.io/recommended-hardware-resources-for-running-codeql # - https://gh.io/supported-runners-and-hardware-resources # - https://gh.io/using-larger-runners # Consider using larger runners for possible analysis time improvements. runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }} timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }} permissions: # required for all workflows security-events: write # only required for workflows in private repositories actions: read contents: read strategy: fail-fast: false matrix: language: [ 'python' ] # CodeQL supports [ 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' ] # Use only 'java-kotlin' to analyze code written in Java, Kotlin or both # Use only 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both # Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support steps: - name: Checkout repository uses: actions/checkout@v4 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL uses: github/codeql-action/init@v3 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. # By default, queries listed here will override any specified in a config file. # Prefix the list here with "+" to use these queries and those in the config file. # For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs # queries: security-extended,security-and-quality # Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild uses: github/codeql-action/autobuild@v3 # ℹ️ Command-line programs to run using the OS shell. # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun # If the Autobuild fails above, remove it and uncomment the following three lines. # modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance. # - run: | # echo "Run, Build Application using script" # ./location_of_script_within_repo/buildscript.sh - name: Perform CodeQL Analysis uses: github/codeql-action/analyze@v3 with: category: "/language:${{matrix.language}}" nvchecker-2.17/.github/workflows/mypy.yaml000066400000000000000000000021271476544462000207100ustar00rootroot00000000000000name: run mypy on: [push, pull_request] jobs: build: name: run mypy runs-on: ubuntu-latest steps: - name: Checkout Code uses: actions/checkout@v4 - name: Setup Cache uses: actions/cache@v4 env: cache-name: cache-pipmypy with: path: ~/.cache/pip key: ${{ runner.os }}-${{ env.cache-name }}-${{ hashFiles('setup.py') }} restore-keys: | ${{ runner.os }}-${{ env.cache-name }}- ${{ runner.os }}-cache-pip- - name: Install deps run: pip3 install -U tornado pytest pytest-asyncio pytest-httpbin pytest-rerunfailures structlog tomli platformdirs aiohttp httpx mypy awesomeversion - name: Run mypy for --install-types run: PATH=$HOME/.local/bin:$PATH mypy --namespace-packages --explicit-package-bases nvchecker nvchecker_source tests continue-on-error: true - name: Install types run: PATH=$HOME/.local/bin:$PATH yes | mypy --install-types - name: Run mypy run: PATH=$HOME/.local/bin:$PATH mypy --namespace-packages --explicit-package-bases nvchecker nvchecker_source tests nvchecker-2.17/.github/workflows/tests.yaml000066400000000000000000000064721476544462000210630ustar00rootroot00000000000000name: run tests on: [push, pull_request] jobs: tests: runs-on: ubuntu-latest strategy: fail-fast: false matrix: python-version: - "3.8" - "3.9" - "3.10" - "3.11" - "3.12" - "3.13" # pypy fails in some cases but we don't care much about that # with github actions we can't mark some jobs to not affect the overall # conclusion so we have to omit "allow-failure" tests. # See https://github.com/actions/toolkit/issues/399 # - pypy-3.7 deps: - tornado pycurl # timer runs when loop is closed, see https://github.com/lilydjwg/nvchecker/actions/runs/11650699759/job/32439742210 # - aiohttp - tornado - httpx[http2]>=0.14.0 exclude: [] steps: - name: Checkout code uses: actions/checkout@v4 - name: Setup Python ${{ matrix.python-version }} uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - name: Setup Cache uses: actions/cache@v4 env: cache-name: cache-pip with: path: ~/.cache/pip key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.deps }}-${{ hashFiles('pyproject.toml', 'setup.cfg') }} restore-keys: | ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.deps }}- ${{ runner.os }}-${{ env.cache-name }}- - name: Install pycurl deps if: ${{ contains(matrix.deps, 'pycurl') }} run: | sudo apt update sudo apt install -y libcurl4-openssl-dev # werkzeug is pinned for httpbin compatibility https://github.com/postmanlabs/httpbin/issues/673 - name: Install Python deps env: # use env to avoid `>` being redirection deps: ${{ matrix.deps }} run: pip install -U $deps pytest 'pytest-asyncio>=0.24' pytest-httpbin pytest-rerunfailures structlog tomli platformdirs lxml jq 'werkzeug<2.1' awesomeversion # don't use binary distribution because: # hardcoded cacert path doesn't work on Ubuntu (should have been resolved?) # limited compression support (only deflate & gzip) - name: Install pycurl if: ${{ contains(matrix.deps, 'pycurl') }} run: | pip uninstall -y pycurl pip install -U pycurl --no-binary :all: - name: Decrypt keys env: KEY: ${{ secrets.KEY }} run: if [[ -n $KEY ]]; then openssl enc -d -aes-256-ctr -pbkdf2 -k $KEY -in keyfile.toml.enc -out keyfile.toml; fi - name: Setup mitmproxy cache uses: actions/cache@v4 env: cache-name: cache-mitm with: path: ~/.mitmproxy key: ${{ env.cache-name }} restore-keys: | ${{ env.cache-name }}- - name: Install mitmproxy run: | /usr/bin/python -m venv --system-site-packages ~/.mitmproxy/venv . ~/.mitmproxy/venv/bin/activate pip install -U mitmproxy # https://github.com/DevToys-app/DevToys/issues/1373#issuecomment-2599820594 sudo sysctl -w kernel.apparmor_restrict_unprivileged_unconfined=0 sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0 # - name: Setup upterm session # uses: lhotari/action-upterm@v1 - name: Run pytest env: mitmdump: /home/runner/.mitmproxy/venv/bin/mitmdump run: scripts/run_cached_tests nvchecker-2.17/.gitignore000066400000000000000000000001611476544462000154150ustar00rootroot00000000000000*.egg-info/ __pycache__/ /build/ /dist/ .cache/ .eggs/ *.pyc *.pyo .travis.pub .pytest_cache/ .tox/ keyfile.toml nvchecker-2.17/.readthedocs.yaml000066400000000000000000000002421476544462000166540ustar00rootroot00000000000000version: 2 build: os: ubuntu-22.04 tools: python: "3.11" sphinx: configuration: docs/conf.py python: install: - requirements: docs/requirements.txt nvchecker-2.17/.typos.toml000066400000000000000000000000431476544462000155550ustar00rootroot00000000000000[default.extend-words] mis = "mis" nvchecker-2.17/LICENSE000066400000000000000000000021131476544462000144310ustar00rootroot00000000000000MIT License Copyright (c) 2013-2017 lilydjwg , et al. Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. nvchecker-2.17/README.rst000066400000000000000000000033651476544462000151250ustar00rootroot00000000000000**nvchecker** (short for *new version checker*) is for checking if a new version of some software has been released. This is the version 2.0 branch. For the old version 1.x, please switch to the ``v1.x`` branch. .. image:: https://github.com/lilydjwg/nvchecker/workflows/run%20tests/badge.svg?branch=master :alt: Test Status :target: https://github.com/lilydjwg/nvchecker/actions?query=workflow%3A%22run+tests%22 .. image:: https://badge.fury.io/py/nvchecker.svg :alt: PyPI version :target: https://badge.fury.io/py/nvchecker .. image:: https://readthedocs.org/projects/nvchecker/badge/?version=latest :target: https://nvchecker.readthedocs.io/en/latest/?badge=latest :alt: Documentation Status | .. image:: https://repology.org/badge/vertical-allrepos/nvchecker.svg :alt: Packaging status :target: https://repology.org/metapackage/nvchecker/versions .. contents:: :local: Dependency ---------- - Python 3.8+ - Python library: structlog, platformdirs, tomli (on Python < 3.11) - One of these Python library combinations (ordered by preference): * tornado + pycurl * aiohttp * httpx with http2 support (experimental; only latest version is supported) * tornado - All commands used in your software version configuration files Install and Run --------------- To install:: pip3 install nvchecker To use the latest code, you can also clone this repository and run:: pip install . To see available options:: nvchecker --help Run with one or more software version files:: nvchecker -c config_file You normally will like to specify some "version record files"; see below. Documentation ------------- For detailed documentation, see `https://nvchecker.readthedocs.io/en/latest/ `_. nvchecker-2.17/docs/000077500000000000000000000000001476544462000143575ustar00rootroot00000000000000nvchecker-2.17/docs/.gitignore000066400000000000000000000000101476544462000163360ustar00rootroot00000000000000_build/ nvchecker-2.17/docs/Makefile000066400000000000000000000013611476544462000160200ustar00rootroot00000000000000# Minimal makefile for Sphinx documentation # # You can set these variables from the command line, and also # from the environment for the first two. SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build SOURCEDIR = . BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) .PHONY: help Makefile man man: $(BUILDDIR)/man/nvchecker.1 $(BUILDDIR)/man/nvchecker.1: usage.rst mkdir -p $(BUILDDIR)/man ./myrst2man.py $< > $@ # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) nvchecker-2.17/docs/api.rst000066400000000000000000000014551476544462000156670ustar00rootroot00000000000000``nvchecker.api`` --- The source plugin API =========================================== .. automodule:: nvchecker.api :members: :imported-members: :undoc-members: .. py:data:: session :type: nvchecker.httpclient.base.BaseSession The object to send out HTTP requests, respecting various options in the configuration entry. .. automodule:: nvchecker.httpclient.base :members: BaseSession, Response :undoc-members: .. autodata:: nvchecker.api.proxy .. autodata:: nvchecker.api.user_agent .. autodata:: nvchecker.api.tries .. autodata:: nvchecker.api.verify_cert .. py:data:: nvchecker.api.entry_waiter :type: contextvars.ContextVar This :class:`ContextVar ` contains an :class:`EntryWaiter ` instance for waiting on other entries. nvchecker-2.17/docs/conf.py000066400000000000000000000022671476544462000156650ustar00rootroot00000000000000import os import sys sys.path.insert(0, os.path.abspath("..")) import nvchecker master_doc = "index" project = "nvchecker" copyright = "lilydjwg, et al." version = release = nvchecker.__version__ extensions = [ "sphinx.ext.autodoc", "sphinx.ext.doctest", "sphinx.ext.intersphinx", "sphinx.ext.viewcode", ] primary_domain = "py" default_role = "py:obj" autodoc_member_order = "bysource" autoclass_content = "both" autodoc_inherit_docstrings = False # Without this line sphinx includes a copy of object.__init__'s docstring # on any class that doesn't define __init__. # https://bitbucket.org/birkenfeld/sphinx/issue/1337/autoclass_content-both-uses-object__init__ autodoc_docstring_signature = False intersphinx_mapping = {"python": ("https://docs.python.org/3", None)} html_theme = "sphinx_rtd_theme" on_rtd = os.environ.get("READTHEDOCS", None) == "True" # On RTD we can't import sphinx_rtd_theme, but it will be applied by # default anyway. This block will use the same theme when building locally # as on RTD. if not on_rtd: import sphinx_rtd_theme html_theme_path = [sphinx_rtd_theme.get_html_theme_path()] html_theme_options = { 'collapse_navigation': False, } nvchecker-2.17/docs/index.rst000066400000000000000000000006761476544462000162310ustar00rootroot00000000000000.. nvchecker documentation master file, created by sphinx-quickstart on Thu Sep 3 00:19:02 2020. You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. Welcome to nvchecker's documentation! ===================================== .. toctree:: :maxdepth: 2 usage plugin api Indices and tables ================== * :ref:`genindex` * :ref:`modindex` * :ref:`search` nvchecker-2.17/docs/make.bat000066400000000000000000000014331476544462000157650ustar00rootroot00000000000000@ECHO OFF pushd %~dp0 REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) set SOURCEDIR=. set BUILDDIR=_build if "%1" == "" goto help %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. echo.The 'sphinx-build' command was not found. Make sure you have Sphinx echo.installed, then set the SPHINXBUILD environment variable to point echo.to the full path of the 'sphinx-build' executable. Alternatively you echo.may add the Sphinx directory to PATH. echo. echo.If you don't have Sphinx installed, grab it from echo.http://sphinx-doc.org/ exit /b 1 ) %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% goto end :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% :end popd nvchecker-2.17/docs/myrst2man.py000077500000000000000000000040341476544462000166710ustar00rootroot00000000000000#!/usr/bin/python3 import time import locale import os import sys try: locale.setlocale(locale.LC_ALL, '') except: pass sys.path.insert(0, '..') import nvchecker from docutils.core import publish_cmdline, default_description from docutils import nodes from docutils.writers import manpage from docutils.parsers.rst import roles def ref_role( role, rawtext, text, lineno, inliner, options={}, content=[], ): node = nodes.reference(rawtext, text.title(), **options) return [node], [] def doc_role( role, rawtext, text, lineno, inliner, options={}, content=[], ): node = nodes.reference(rawtext, text, **options) return [node], [] roles.register_local_role('ref', ref_role) roles.register_local_role('doc', doc_role) class MyTranslator(manpage.Translator): def visit_image(self, node): raise nodes.SkipNode def visit_topic(self, node): self.body.append('\n') raise nodes.SkipNode def visit_title(self, node): try: super().visit_title(node) except nodes.SkipNode: if self.section_level == 0: self._docinfo['title'] = 'nvchecker' self._docinfo['subtitle'] = 'New version checker for software releases' self._docinfo['title_upper'] = 'nvchecker'.upper() self._docinfo['manual_section'] = '1' # Make the generated man page reproducible. Based on the patch from # https://sourceforge.net/p/docutils/patches/132/#5333 source_date_epoch = os.environ.get('SOURCE_DATE_EPOCH') if source_date_epoch: self._docinfo['date'] = time.strftime('%Y-%m-%d', time.gmtime(int(source_date_epoch))) else: self._docinfo['date'] = time.strftime('%Y-%m-%d') self._docinfo['version'] = nvchecker.__version__ raise class MyWriter(manpage.Writer): def __init__(self): super().__init__() self.translator_class = MyTranslator def main(): description = ("Generates plain unix manual documents. " + default_description) publish_cmdline(writer=MyWriter(), description=description) if __name__ == '__main__': main() nvchecker-2.17/docs/plugin.rst000066400000000000000000000067061476544462000164200ustar00rootroot00000000000000How to develop a source plugin for nvchecker ============================================ .. contents:: :local: Source plugins enable nvchecker to discover software version strings in additional ways. Where to put the plugins ------------------------ They are Python modules put in any directories named ``nvchecker_source`` in ``sys.path``. This is called namespace packages introduced by `PEP 420 `_. For local use, ``~/.local/lib/pythonX.Y/site-packages/nvchecker_source`` is a good place, or you can define the ``PYTHONPATH`` environment variable and put nvchecker source plugins there inside a ``nvchecker_source`` directory. Plugins are referenced by their names in the configuration file (``source = "xxx"``). If multiple plugins have the same name, the first one in ``sys.path`` will be used. How to write a simple plugin ---------------------------- For simple situations, you need to define an async function with the following signature:: async def get_version( name: str, conf: Entry, *, cache: AsyncCache, keymanager: KeyManager, **kwargs, ) -> VersionResult: ... Those types are imported from :mod:`nvchecker.api`. ``name`` is the table keys in the configuration file, and ``conf`` is a dict of the content of that table. You should not modify this dict. ``cache`` is an :class:`AsyncCache ` object that caches results for you. Every plugin has its own ``cache`` object so that cache keys won't conflict. ``keymanager`` is a :class:`KeyManager ` object that you can call :meth:`.get_key(name) ` to get the key (token) from the keyfile. There may be additional keyword arguments in the future so ``**kwargs`` should be used. If you want to send an HTTP request, it's preferred to use :meth: `cache.get_json ` or the :data: `nvchecker.api.session` object. It will use the auto-selected HTTP backend and handle the ``proxy`` option automatically. For details about these objects, see :mod:`the API documentation `, or take existing source plugins as examples. How to write a more powerful plugin ----------------------------------- You may want more control in your source plugin, e.g. to do batch requests. To do this, you provide a class instead:: class Worker(BaseWorker): async def run(self) -> None: ... You will have the following in the attributes:: token_q: Queue[bool], result_q: Queue[RawResult], tasks: List[Tuple[str, Entry]], keymanager: KeyManager, You are expected to process :attr:`tasks ` and put results in :attr:`result_q `. See ``nvchecker_source/none.py`` for the simplest example, and ``nvchecker_source/aur.py`` for a complete, batching example. For details about these objects, see :mod:`the API documentation `. You can also receive a configuration section from the configuration as ``__config__.source.SOURCE_NAME``, where ``SOURCE_NAME`` is what your plugin is called. This can be used to specify a mirror site for your plugin to use, e.g. the ``npm`` plugin accepts the following config:: [__config__.source.npm] registry = "https://registry.npm.taobao.org" When such a configuration exists for your plugin, you need to define a function named ``configure`` to receive it:: def configure(config): '''use the "config" dict in some way''' ... nvchecker-2.17/docs/requirements.txt000066400000000000000000000002021476544462000176350ustar00rootroot00000000000000structlog platformdirs tornado>=6 sphinx>=3.2 # <5 has strange bottom margins for p, and no list indicators sphinx-rtd-theme>=0.5 nvchecker-2.17/docs/usage.rst000066400000000000000000001052101476544462000162140ustar00rootroot00000000000000Usage of nvchecker commands =========================== **nvchecker** (short for *new version checker*) is for checking if a new version of some software has been released. This is the version 2.0 branch. For the old version 1.x, please switch to the ``v1.x`` branch. .. image:: https://github.com/lilydjwg/nvchecker/workflows/run%20tests/badge.svg?branch=master :alt: Test Status :target: https://github.com/lilydjwg/nvchecker/actions?query=workflow%3A%22run+tests%22 .. image:: https://badge.fury.io/py/nvchecker.svg :alt: PyPI version :target: https://badge.fury.io/py/nvchecker .. contents:: :local: Dependency ---------- - Python 3.8+ - Python library: structlog, platformdirs, tomli (on Python < 3.11) - One of these Python library combinations (ordered by preference): * tornado + pycurl * aiohttp * httpx with http2 support (experimental; only latest version is supported) * tornado - All commands used in your software version configuration files Install and Run --------------- To install:: pip3 install nvchecker To use the latest code, you can also clone this repository and run:: python3 setup.py install To see available options:: nvchecker --help Run with one or more software version files:: nvchecker -c config_file.toml A simple config file may look like: .. code-block:: toml [nvchecker] source = "github" github = "lilydjwg/nvchecker" [python-toml] source = "pypi" pypi = "toml" You normally will like to specify some "version record files"; see below. JSON logging ~~~~~~~~~~~~ With ``--logger=json`` or ``--logger=both``, you can get a structured logging for programmatically consuming. You can use ``--json-log-fd=FD`` to specify the file descriptor to send logs to (take care to do line buffering). The logging level option (``-l`` or ``--logging``) doesn't take effect with this. The JSON log is one JSON string per line. The following documented events and fields are stable, undocumented ones may change without notice. event=updated An update is detected. Fields ``name``, ``revision``, ``old_version`` and ``version`` are available. ``old_version`` may be ``null`` and ``revision`` may be absent. event=up-to-date There is no update. Fields ``name`` and ``version`` are available. event=no-result No version is detected. There may be an error. Fields ``name`` is available. level=error There is an error. Fields ``name`` and ``exc_info`` may be available to give further information. Upgrade from 1.x version ~~~~~~~~~~~~~~~~~~~~~~~~ There are several backward-incompatible changes from the previous 1.x version. 1. Version 2.x requires Python 3.7+ to run. 2. The command syntax changes a bit. You need to use a ``-c`` switch to specify your software version configuration file (or use the default). 3. The configuration file format has been changed from ini to `toml`_. You can use the ``nvchecker-ini2toml`` script to convert your old configuration files. However, comments and formatting will be lost, and some options may not be converted correctly. 4. Several options have been renamed. ``max_concurrent`` to ``max_concurrency``, and all option names have their ``-`` be replaced with ``_``. 5. All software configuration tables need a ``source`` option to specify which source is to be used rather than being figured out from option names in use. This enables additional source plugins to be discovered. 6. The version record files have been changed to use JSON format (the old format will be converted on writing). 7. The ``vcs`` source is removed. (It's available inside `lilac `_ at the moment.) A ``git`` source is provided. 8. ``include_tags_pattern`` and ``ignored_tags`` are removed. Use :ref:`list options` instead. Version Record Files -------------------- Version record files record which version of the software you know or is available. They are a simple JSON object mapping software names to known versions. The ``nvtake`` Command ~~~~~~~~~~~~~~~~~~~~~~ This command helps to manage version record files. It reads both old and new version record files, and a list of names given on the commandline. It then update the versions of those names in the old version record file. This helps when you have known (and processed) some of the updated software, but not all. You can tell nvchecker that via this command instead of editing the file by hand. This command will help most if you specify where you version record files are in your config file. See below for how to use a config file. The ``nvcmp`` Command ~~~~~~~~~~~~~~~~~~~~~ This command compares the ``newver`` file with the ``oldver`` one and prints out any differences as updates, e.g.:: $ nvcmp -c sample_source.toml Sparkle Test App None -> 2.0 test 0.0 -> 0.1 Configuration Files ------------------- The software version source files are in `toml`_ format. The *key name* is the name of the software. Following fields are used to tell nvchecker how to determine the current version of that software. See `sample_source.toml `_ for an example. Configuration Table ~~~~~~~~~~~~~~~~~~~ A special table named ``__config__`` provides some configuration options. Relative path are relative to the source files, and ``~`` and environmental variables are expanded. Currently supported options are: oldver Specify a version record file containing the old version info. newver Specify a version record file to store the new version info. proxy The HTTP proxy to use. The format is ``proto://host:port``, e.g. ``http://localhost:8087``. Different backends have different level support for this, e.g. with ``pycurl`` you can use ``socks5h://host:port`` proxies. max_concurrency Max number of concurrent jobs. Default: 20. http_timeout Time in seconds to wait for HTTP requests. Default: 20. keyfile Specify a toml config file containing key (token) information. This file should contain a ``keys`` table, mapping key names to key values. See specific source for the key name(s) to use. Sample ``keyfile.toml``: .. code-block:: toml [keys] # https://github.com/settings/tokens # scope: repo -> public_repo github = "ghp_" Global Options ~~~~~~~~~~~~~~ The following options apply to every check sources. You can use them in any item in your configuration file. prefix Strip the prefix string if the version string starts with it. Otherwise the version string is returned as-is. If both ``prefix`` and ``from_pattern``/``to_pattern`` are used, ``prefix`` is applied first. from_pattern, to_pattern Both are Python-compatible regular expressions. If ``from_pattern`` is found in the version string, it will be replaced with ``to_pattern``. If ``from_pattern`` is not found, the version string remains unchanged and no error is emitted. missing_ok Suppress warnings and errors if a version checking module finds nothing. Not all sources support it. proxy The HTTP proxy to use. The format is ``proto://host:port``, e.g. ``http://localhost:8087``. Different backends have different level support for this, e.g. with ``pycurl`` you can use ``socks5h://host:port`` proxies. Set it to ``""`` (empty string) to override the global setting. This only works when the source implementation uses the builtin HTTP client, and doesn't work with the ``aur`` source because it's batched (however the global proxy config still applies). user_agent The user agent string to use for HTTP requests. tries Try specified times when a network error occurs. Default is ``1``. This only works when the source implementation uses the builtin HTTP client. httptoken A personal authorization token used to fetch the url with the ``Authorization`` header. The type of token depends on the authorization required. - For Bearer token set \: ``Bearer `` - For Basic token set \: ``Basic `` In the keyfile add ``httptoken_{name}`` token. verify_cert Whether to verify the HTTPS certificate or not. Default is ``true``. .. _list options: List Options ~~~~~~~~~~~~ The following options apply to sources that return a list. See individual source tables to determine whether they are supported. include_regex Only consider version strings that match the given regex. The whole string should match the regex. Be sure to use ``.*`` when you mean it! exclude_regex Don't consider version strings that match the given regex. The whole string should match the regex. Be sure to use ``.*`` when you mean it! This option has higher precedence that ``include_regex``; that is, if matched by this one, it's excluded even it's also matched by ``include_regex``. sort_version_key Sort the version string using this key function. Choose among ``parse_version``, ``vercmp`` and ``awesomeversion``. Default value is ``parse_version``. ``parse_version`` uses an old version of ``pkg_resources.parse_version``. ``vercmp`` uses ``pyalpm.vercmp``. ``awesomeversion`` uses `awesomeversion `_. ignored Version strings that are explicitly ignored, separated by whitespace. This can be useful to avoid some known mis-named versions, so newer ones won't be "overridden" by the old broken ones. Search in a Webpage ~~~~~~~~~~~~~~~~~~~ :: source = "regex" Search through a specific webpage for the version string. This type of version finding has these fields: url The URL of the webpage to fetch. encoding (*Optional*) The character encoding of the webpage, if ``latin1`` is not appropriate. regex A regular expression used to find the version string. It can have zero or one capture group. The capture group or the whole match is the version string. When multiple version strings are found, the maximum of those is chosen. post_data (*Optional*) When present, a ``POST`` request (instead of a ``GET``) will be used. The value should be a string containing the full body of the request. The encoding of the string can be specified using the ``post_data_type`` option. post_data_type (*Optional*) Specifies the ``Content-Type`` of the request body (``post_data``). By default, this is ``application/x-www-form-urlencoded``. This source supports :ref:`list options`. Search in an HTTP header ~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "httpheader" Send an HTTP request and search through a specific header. url The URL of the HTTP request. header (*Optional*) The header to look at. Default is ``Location``. Another useful header is ``Content-Disposition``. regex A regular expression used to find the version string. It can have zero or one capture group. The capture group or the whole match is the version string. When multiple version strings are found, the maximum of those is chosen. method (*Optional*) The HTTP method to use. Default is ``HEAD``. follow_redirects (*Optional*) Whether to follow 3xx HTTP redirects. Default is ``false``. If you are looking at a ``Location`` header, you shouldn't change this. Search with an HTML Parser ~~~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "htmlparser" Send an HTTP request and search through the body a specific xpath. url The URL of the HTTP request. xpath An xpath expression used to find the version string. post_data (*Optional*) When present, a ``POST`` request (instead of a ``GET``) will be used. The value should be a string containing the full body of the request. The encoding of the string can be specified using the ``post_data_type`` option. post_data_type (*Optional*) Specifies the ``Content-Type`` of the request body (``post_data``). By default, this is ``application/x-www-form-urlencoded``. This source can also work with XML to some extent, e.g. it can parse an RSS feed like this: .. code-block:: toml [ProxmoxVE] source = "htmlparser" url = "https://my.proxmox.com/en/announcements/tag/proxmox-ve/rss" xpath = "//item/title" from_pattern = 'Proxmox VE ([\d.]+) released!' to_pattern = '\1' .. note:: An additional dependency "lxml" is required. You can use ``pip install 'nvchecker[htmlparser]'``. Search with an JSON Parser (jq) ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "jq" Send an HTTP request and search through the body with a specific ``jq`` filter. url The URL of the HTTP request. filter An ``jq`` filter used to find the version string. post_data (*Optional*) When present, a ``POST`` request (instead of a ``GET``) will be used. The value should be a string containing the full body of the request. The encoding of the string can be specified using the ``post_data_type`` option. post_data_type (*Optional*) Specifies the ``Content-Type`` of the request body (``post_data``). By default, this is ``application/json``. This source supports :ref:`list options`. .. note:: An additional dependency "jq" is required. Find with a Command ~~~~~~~~~~~~~~~~~~~ :: source = "cmd" Use a shell command line to get the version. The output is striped first, so trailing newlines do not bother. cmd The command line to use. This will run with the system's standard shell (i.e. ``/bin/sh``). Check AUR ~~~~~~~~~ :: source = "aur" Check `Arch User Repository `_ for updates. Per-item proxy setting doesn't work for this because several items will be batched into one request. aur The package name in AUR. If empty, use the name of software (the *table name*). strip_release Strip the release part. use_last_modified Append last modified time to the version. Check GitHub ~~~~~~~~~~~~ :: source = "github" Check `GitHub `_ for updates. The version returned is in date format ``%Y%m%d.%H%M%S``, e.g. ``20130701.012212``, unless ``use_latest_release``, ``use_max_tag`` or ``use_max_release`` is used. See below. github The github repository, with author, e.g. ``lilydjwg/nvchecker``. branch Which branch to track? Default: the repository's default. path Only commits containing this file path will be returned. host Hostname for self-hosted GitHub instance. use_latest_release Set this to ``true`` to check for the latest release on GitHub. GitHub releases are not the same with git tags. You'll see big version names and descriptions in the release page for such releases, e.g. `zfsonlinux/zfs's `_, and those small ones like `nvchecker's `_ are only git tags that should use ``use_max_tag`` below. Will return the release's tag name instead of date. (For historical reasons it doesn't return the release name. See below to change.) use_max_release Set this to ``true`` to check for the max release on GitHub. This option returns the largest one sorted by the ``sort_version_key`` option. Will return the tag name instead of date. use_release_name When ``use_latest_release`` or ``use_max_release`` is ``true``, setting this to ``true`` will cause nvchecker to return the release name instead of the tag name. include_prereleases When ``use_latest_release`` or ``use_max_release`` is ``true``, set this to ``true`` to take prereleases into account. This returns the release names (not the tag names). This requires a token because it's using the v4 GraphQL API. use_latest_tag Set this to ``true`` to check for the latest tag on GitHub. This requires a token because it's using the v4 GraphQL API. query When ``use_latest_tag`` is ``true``, this sets a query for the tag. The exact matching method is not documented by GitHub. use_max_tag Set this to ``true`` to check for the max tag on GitHub. Unlike ``use_max_release``, this option includes both annotated tags and lightweight ones, and return the largest one sorted by the ``sort_version_key`` option. Will return the tag name instead of date. token A personal authorization token used to call the API. An authorization token may be needed in order to use ``use_latest_tag``, ``include_prereleases`` or to request more frequently than anonymously. To set an authorization token, you can set: - the token option - an entry in the keyfile for the host (e.g. ``github.com``) - an entry in your ``netrc`` file for the host This source supports :ref:`list options` when ``use_max_tag`` or ``use_max_release`` is set. Check Gitea ~~~~~~~~~~~ :: source = "gitea" Check `Gitea `_ for updates. The version returned is in date format ``%Y%m%d``, e.g. ``20130701``, unless ``use_max_tag`` is used. See below. gitea The gitea repository, with author, e.g. ``gitea/tea``. branch Which branch to track? Default: the repository's default. use_max_tag Set this to ``true`` to check for the max tag on Gitea. Will return the biggest one sorted by old ``pkg_resources.parse_version``. Will return the tag name instead of date. host Hostname for self-hosted Gitea instance. token Gitea authorization token used to call the API. To set an authorization token, you can set: - the token option - an entry in the keyfile for the host (e.g. ``gitea.com``) - an entry in your ``netrc`` file for the host This source supports :ref:`list options` when ``use_max_tag`` is set. Check Gogs / Forgejo / Codeberg ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Please use the above "gitea" source. Gitea is a fork of `Gogs `_. `Forgejo `_ is a fork of Gitea. Codeberg is a code hosting provider that uses Forgejo. They share the same API endpoints nvchecker uses. Alternatively, you can try the generic "git" source. Check BitBucket ~~~~~~~~~~~~~~~ :: source = "bitbucket" Check `BitBucket `_ for updates. The version returned is in date format ``%Y%m%d``, e.g. ``20130701``, unless ``use_max_tag`` is used. See below. bitbucket The bitbucket repository, with author, e.g. ``lilydjwg/dotvim``. branch Which branch to track? Default: the repository's default. use_max_tag Set this to ``true`` to check for the max tag on BitBucket. Will return the biggest one sorted by old ``pkg_resources.parse_version``. Will return the tag name instead of date. use_sorted_tags If ``true``, tags are queried and sorted according to the ``query`` and ``sort`` keys. Will return the tag name instead of the date. query A query string use to filter tags when ``use_sorted_tags`` set (see `here `__ for examples). The string does not need to be escaped. sort A field used to sort the tags when ``use_sorted_tags`` is set (see `here `__ for examples). Defaults to ``-target.date`` (sorts tags in descending order by date). max_page How many pages do we search for the max tag? Default is 3. This works when ``use_max_tag`` is set. This source supports :ref:`list options` when ``use_max_tag`` or ``use_sorted_tags`` is set. Check GitLab ~~~~~~~~~~~~ :: source = "gitlab" Check `GitLab `_ for updates. The version returned is in date format ``%Y%m%d``, e.g. ``20130701``, unless ``use_max_tag`` is used. See below. gitlab The gitlab repository, with author, e.g. ``Deepin/deepin-music``. branch Which branch to track? use_max_tag Set this to ``true`` to check for the max tag on GitLab. Will return the biggest one sorted by old ``pkg_resources.parse_version``. Will return the tag name instead of date. host Hostname for self-hosted GitLab instance. token GitLab authorization token used to call the API. To set an authorization token, you can set: - the token option - an entry in the keyfile for the host (e.g. ``gitlab.com``) - an entry in your ``netrc`` file for the host This source supports :ref:`list options` when ``use_max_tag`` is set. Check PyPI ~~~~~~~~~~ :: source = "pypi" Check `PyPI `_ for updates. Yanked releases are ignored. pypi The name used on PyPI, e.g. ``PySide``. use_pre_release Whether to accept pre release. Default is false. This source supports :ref:`list options`. .. note:: An additional dependency "packaging" is required. You can use ``pip install 'nvchecker[pypi]'``. Check RubyGems ~~~~~~~~~~~~~~ :: source = "gems" Check `RubyGems `_ for updates. gems The name used on RubyGems, e.g. ``sass``. This source supports :ref:`list options`. Check NPM Registry ~~~~~~~~~~~~~~~~~~ :: source = "npm" Check `NPM Registry `_ for updates. npm The name used on NPM Registry, e.g. ``coffee-script``. To configure which registry to query, a source plugin option is available. You can specify like this:: [__config__.source.npm] registry = "https://registry.npm.taobao.org" Check Hackage ~~~~~~~~~~~~~ :: source = "hackage" Check `Hackage `_ for updates. hackage The name used on Hackage, e.g. ``pandoc``. Check CPAN ~~~~~~~~~~ :: source = "cpan" Check `MetaCPAN `_ for updates. cpan The name used on CPAN, e.g. ``YAML``. Check CRAN ~~~~~~~~~~ :: source = "cran" Check `CRAN `_ for updates. cran The name used on CRAN, e.g. ``xml2``. Check Packagist ~~~~~~~~~~~~~~~ :: source = "packagist" Check `Packagist `_ for updates. packagist The name used on Packagist, e.g. ``monolog/monolog``. Check crates.io ~~~~~~~~~~~~~~~ :: source = "cratesio" Check `crates.io `_ for updates. Yanked releases are ignored. cratesio The crate name on crates.io, e.g. ``tokio``. use_pre_release Whether to accept pre release. Default is false. This source supports :ref:`list options`. Check Local Pacman Database ~~~~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "pacman" This is used when you run ``nvchecker`` on an Arch Linux system and the program always keeps up with a package in your configured repositories for `Pacman`_. pacman The package name to reference to. strip_release Strip the release part. Check Arch Linux official packages ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "archpkg" This enables you to track the update of `Arch Linux official packages `_, without needing of pacman and an updated local Pacman databases. archpkg Name of the Arch Linux package. strip_release Strip the release part, only return part before ``-``. provided Instead of the package version, return the version this package provides. Its value is what the package provides, and ``strip_release`` takes effect too. This is best used with libraries. Check Debian Linux official packages ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "debianpkg" This enables you to track the update of `Debian Linux official packages `_, without needing of apt and an updated local APT database. debianpkg Name of the Debian Linux source package. suite Name of the Debian release (jessie, wheezy, etc, defaults to sid) strip_release Strip the release part. Check Ubuntu Linux official packages ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "ubuntupkg" This enables you to track the update of `Ubuntu Linux official packages `_, without needing of apt and an updated local APT database. ubuntupkg Name of the Ubuntu Linux source package. suite Name of the Ubuntu release (xenial, zesty, etc, defaults to None, which means no limit on suite) strip_release Strip the release part. Check Repology ~~~~~~~~~~~~~~ :: source = "repology" This enables you to track updates from `Repology `_ (repology.org). repology Name of the ``project`` to check. repo Check the version in this repo. This field is required. subrepo Check the version in this subrepo. This field is optional. When omitted all subrepos are queried. This source supports :ref:`list options`. Check Anitya ~~~~~~~~~~~~ :: source = "anitya" This enables you to track updates from `Anitya `_ (release-monitoring.org). anitya ``distro/package``, where ``distro`` can be a lot of things like "fedora", "arch linux", "gentoo", etc. ``package`` is the package name of the chosen distribution. anitya_id The identifier of the project/package in anitya. Note that either anitya or anitya_id needs to be specified, anitya_id is preferred when both specified. Check Android SDK ~~~~~~~~~~~~~~~~~ :: source = "android_sdk" This enables you to track updates of Android SDK packages listed in ``sdkmanager --list``. android_sdk The package path prefix. This value is matched against the ``path`` attribute in all nodes in an SDK manifest XML. The first match is used for version comparisons. repo Should be one of ``addon`` or ``package``. Packages in ``addon2-1.xml`` use ``addon`` and packages in ``repository2-1.xml`` use ``package``. channel Choose the target channel from one of ``stable``, ``beta``, ``dev`` or ``canary``. This option also accepts a comma-separated list to pick from multiple channels. For example, the latest unstable version is picked with ``beta,dev,canary``. The default is ``stable``. host_os Choose the target OS for the tracked package from one of ``linux``, ``macosx``, ``windows``. The default is ``linux``. For OS-independent packages (e.g., Java JARs), this field is ignored. This source supports :ref:`list options`. Check Sparkle framework ~~~~~~~~~~~~~~~~~~~~~~~ :: source = "sparkle" This enables you to track updates of macOS applications which using `Sparkle framework `_. sparkle The url of the sparkle appcast. release_notes_language The language of release notes to return when localized release notes are available (defaults to ``en`` for English, the unlocalized release notes are used as a fallback) Check Pagure ~~~~~~~~~~~~ :: source = "pagure" This enables you to check updates from `Pagure `_. pagure The project name, optionally with a namespace. host Hostname of alternative instance like src.fedoraproject.org. This source returns tags and supports :ref:`list options`. Check APT repository ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "apt" This enables you to track the update of an arbitrary APT repository, without needing of apt and an updated local APT database. pkg Name of the APT binary package. srcpkg Name of the APT source package. mirror URL of the repository. suite Name of the APT repository release (jessie, wheezy, etc) repo Name of the APT repository (main, contrib, etc, defaults to main) arch Architecture of the repository (i386, amd64, etc, defaults to amd64) strip_release Strip the release part. Note that either pkg or srcpkg needs to be specified (but not both) or the item name will be used as pkg. Check RPM repository ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "rpmrepo" This enables you to check latest package versions in an arbitrary RPM repository in `repomd` format used by package managers such as ``dnf`` (Fedora, RHEL, AlmaLinux etc.) or ``zypper`` (openSUSE) without the need for native RPM tools. pkg Name of the RPM package (you can also use ``rpmrepo`` as with other sources, but ``pkg`` is preferred for clarity) repo URL of the repository (required, ``repodata/repomd.xml`` should be there) arch Architecture of the RPM package (``binary``, ``src``, ``any``, ``x86_64``, ``aarch64``, etc, defaults to ``binary``) This source supports :ref:`list options`. .. note:: An additional dependency "lxml" is required. You can use ``pip install 'nvchecker[rpmrepo]'``. Check Git repository ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "git" This enables you to check tags or branch commits of an arbitrary git repository, also useful for scenarios like a github project having too many tags. git URL of the Git repository. use_commit Return a commit hash instead of tags. branch When ``use_commit`` is true, return the commit on the specified branch instead of the default one. When this source returns tags (``use_commit`` is not true) it supports :ref:`list options`. Check Mercurial repository ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "mercurial" This enables you to check tags of an arbitrary mercurial (hg) repository. mercurial URL of the Mercurial repository. This source returns tags and supports :ref:`list options`. Check container registry ~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "container" This enables you to check tags of images on a container registry like Docker. container The path (and tag) for the container image. For official Docker images, use namespace ``library/`` (e.g. ``library/python``). If no tag is given, it checks latest available tag (sort by tag name), otherwise, it checks the tag's update time. registry The container registry host. Default: ``docker.io`` ``registry`` and ``container`` are the host and the path used in the pull command. Note that the ``docker`` command allows omitting some parts of the container name while this plugin requires the full name. If the host part is omitted, use ``docker.io``, and if there is no slash in the path, prepend ``library/`` to the path. Here are some examples: +-----------------------------------------------------+-----------+---------------------------------+ | Pull command | registry | container | +=====================================================+===========+=================================+ | docker pull quay.io/prometheus/node-exporter | quay.io | prometheus/node-exporter | +-----------------------------------------------------+-----------+---------------------------------+ | docker pull quay.io/prometheus/node-exporter:master | quay.io | prometheus/node-exporter:master | +-----------------------------------------------------+-----------+---------------------------------+ | docker pull openeuler/openeuler | docker.io | openeuler/openeuler | +-----------------------------------------------------+-----------+---------------------------------+ | docker pull openeuler/openeuler:20.03-lts | docker.io | openeuler/openeuler:20.03-lts | +-----------------------------------------------------+-----------+---------------------------------+ | docker pull python | docker.io | library/python | +-----------------------------------------------------+-----------+---------------------------------+ | docker pull python:3.11 | docker.io | library/python:3.11 | +-----------------------------------------------------+-----------+---------------------------------+ If no tag is given, this source returns tags and supports :ref:`list options`. Check ALPM database ~~~~~~~~~~~~~~~~~~~ :: source = "alpm" Check package updates in a local ALPM database. alpm Name of the package. repo Name of the package repository in which the package resides. If not provided, nvchecker will use ``repos`` value, see below. repos An array of possible repositories in which the package may reside in, nvchecker will use the first repository which contains the package. If not provided, ``core``, ``extra`` and ``multilib`` will be used, in that order. dbpath Path to the ALPM database directory. Default: ``/var/lib/pacman``. You need to update the database yourself. strip_release Strip the release part, only return the part before ``-``. provided Instead of the package version, return the version this package provides. Its value is what the package provides, and ``strip_release`` takes effect too. This is best used with libraries. .. note:: An additional dependency "pyalpm" is required. Check ALPM files database ~~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "alpmfiles" Search package files in a local ALPM files database. The package does not need to be installed. This can be useful for checking shared library versions if a package does not list them in its ``provides``. pkgname Name of the package. filename Regular expression for the file path. If it contains one matching group, that group is returned. Otherwise return the whole file path. Paths do not have an initial slash. For example, ``usr/lib/libuv\\.so\\.([^.]+)`` matches the major shared library version of libuv. repo Name of the package repository in which the package resides. If not provided, search all repositories. strip_dir Strip directory from the path before matching. Defaults to ``false``. dbpath Path to the ALPM database directory. Default: ``/var/lib/pacman``. You need to update the database yourself with ``pacman -Fy``. Check Open Vsx ~~~~~~~~~~~~~~~ :: source = "openvsx" Check `Open Vsx `_ for updates. openvsx The extension's Unique Identifier on open-vsx.org, e.g. ``ritwickdey.LiveServer``. Check Visual Studio Code Marketplace ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "vsmarketplace" Check `Visual Studio Code Marketplace `_ for updates. vsmarketplace The extension's Unique Identifier on marketplace.visualstudio.com/vscode, e.g. ``ritwickdey.LiveServer``. Check Go packages and modules ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "go" Check `Go packages and modules `_ for updates. go The name of Go package or module, e.g. ``github.com/caddyserver/caddy/v2/cmd``. Check opam repository ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "opam" This enables you to check latest package versions in an arbitrary `opam repository ` without the need for the opam command line tool. pkg Name of the opam package repo URL of the repository (optional, the default ``https://opam.ocaml.org`` repository is used if not specified) This source supports :ref:`list options`. Check Snapcraft ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ :: source = "snapcraft" This source allows you to check the latest package versions in the `Snapcraft `_. snap Name of the snap package. channel Name of the channel. Combine others' results ~~~~~~~~~~~~~~~~~~~~~~~ :: source = "combiner" This source can combine results from other entries. from A list of entry names to wait results for. format A format string to combine the results into the final string. Example: .. code-block:: toml [entry-1] source = "cmd" cmd = "echo 1" [entry-2] source = "cmd" cmd = "echo 2" [entry-3] source = "combiner" from = ["entry-1", "entry-2"] format = "$1-$2" Manually updating ~~~~~~~~~~~~~~~~~ :: source = "manual" This enables you to manually specify the version (maybe because you want to approve each release before it gets to the script). manual The version string. Extending ~~~~~~~~~ It's possible to extend the supported sources by writing plugins. See :doc:`plugin` for documentation. .. _Pacman: https://wiki.archlinux.org/title/Pacman .. _toml: https://toml.io/ nvchecker-2.17/keyfile.toml.enc000066400000000000000000000003461476544462000165230ustar00rootroot00000000000000Salted__K](Jv4/Lԍ(ӓO|NT̒O @ 8Jz cO˲kd/Z)Q}4׵W@* W%BowDA\W$:ؖ ߅9߽0"1yE53Uw"0j!q^ 뽼J$onhu`0GNHXl53D nvchecker-2.17/mypy.ini000066400000000000000000000010261476544462000151250ustar00rootroot00000000000000[mypy] warn_unused_configs = True warn_redundant_casts = True warn_unused_ignores = True show_error_context = True show_column_numbers = True no_implicit_optional = True [mypy-structlog] ignore_missing_imports = True [mypy-pyalpm] ignore_missing_imports = True [mypy-flaky] ignore_missing_imports = True [mypy-pytest_httpbin] ignore_missing_imports = True [mypy-lxml] ignore_missing_imports = True [mypy-tomllib] ignore_missing_imports = True [mypy-jq] ignore_missing_imports = True [mypy-tomli] ignore_missing_imports = True nvchecker-2.17/nvchecker/000077500000000000000000000000001476544462000153775ustar00rootroot00000000000000nvchecker-2.17/nvchecker/__init__.py000066400000000000000000000001451476544462000175100ustar00rootroot00000000000000# MIT licensed # Copyright (c) 2013-2025 lilydjwg , et al. __version__ = '2.17' nvchecker-2.17/nvchecker/__main__.py000077500000000000000000000067521476544462000175060ustar00rootroot00000000000000#!/usr/bin/env python3 # MIT licensed # Copyright (c) 2013-2024 lilydjwg , et al. from __future__ import annotations import sys import argparse import asyncio from typing import Coroutine, Tuple from pathlib import Path import structlog from . import core from .util import ResultData, RawResult, KeyManager, EntryWaiter from .ctxvars import proxy as ctx_proxy logger = structlog.get_logger(logger_name=__name__) def main() -> None: parser = argparse.ArgumentParser(description='New version checker for software') parser.add_argument('-k', '--keyfile', metavar='FILE', type=str, help='use specified keyfile (override the one in configuration file)') parser.add_argument('-t', '--tries', default=1, type=int, metavar='N', help='try N times when network errors occur') parser.add_argument('--failures', action='store_true', help='exit with code 3 if failures / errors happen during checking') parser.add_argument('-e', '--entry', type=str, help='only execute on specified entry (useful for debugging)') core.add_common_arguments(parser) args = parser.parse_args() if core.process_common_arguments(args): return try: entries, options = core.load_file( args.file, use_keymanager=not bool(args.keyfile)) if args.entry: if args.entry not in entries: sys.exit('Specified entry not found in config') entries = {args.entry: entries[args.entry]} if args.keyfile: keymanager = KeyManager(Path(args.keyfile)) else: keymanager = options.keymanager except core.FileLoadError as e: sys.exit(str(e)) if options.proxy is not None: ctx_proxy.set(options.proxy) task_sem = asyncio.Semaphore(options.max_concurrency) result_q: asyncio.Queue[RawResult] = asyncio.Queue() dispatcher = core.setup_httpclient( options.max_concurrency, options.httplib, options.http_timeout, ) entry_waiter = EntryWaiter() try: futures = dispatcher.dispatch( entries, task_sem, result_q, keymanager, entry_waiter, args.tries, options.source_configs, ) except ModuleNotFoundError as e: sys.exit(f'Error: {e}') if options.ver_files is not None: oldvers = core.read_verfile(options.ver_files[0]) else: oldvers = {} result_coro = core.process_result(oldvers, result_q, entry_waiter, verbose=bool(args.entry)) runner_coro = core.run_tasks(futures) if sys.version_info >= (3, 10): # Python 3.10 has deprecated asyncio.get_event_loop results, has_failures = asyncio.run(run(result_coro, runner_coro)) else: # Python < 3.10 will create an eventloop when asyncio.Queue is initialized results, has_failures = asyncio.get_event_loop().run_until_complete(run(result_coro, runner_coro)) if options.ver_files is not None: newverf = options.ver_files[1] if args.entry: # don't remove other entries when only one entry is specified on cmdline vers = core.read_verfile(newverf) else: vers = {} vers.update(results) core.write_verfile(newverf, vers) if args.failures and has_failures: sys.exit(3) async def run( result_coro: Coroutine[None, None, Tuple[ResultData, bool]], runner_coro: Coroutine[None, None, None], ) -> Tuple[ResultData, bool]: result_fu = asyncio.create_task(result_coro) runner_fu = asyncio.create_task(runner_coro) await runner_fu result_fu.cancel() return await result_fu if __name__ == '__main__': main() nvchecker-2.17/nvchecker/api.py000066400000000000000000000006171476544462000165260ustar00rootroot00000000000000# MIT licensed # Copyright (c) 2020 lilydjwg , et al. from .httpclient import session, TemporaryError, HTTPError from .util import ( Entry, BaseWorker, RawResult, VersionResult, RichResult, AsyncCache, KeyManager, GetVersionError, EntryWaiter, ) from .sortversion import sort_version_keys from .ctxvars import tries, proxy, user_agent, httptoken, entry_waiter, verify_cert nvchecker-2.17/nvchecker/core.py000066400000000000000000000314201476544462000167010ustar00rootroot00000000000000# MIT licensed # Copyright (c) 2013-2020, 2024 lilydjwg , et al. from __future__ import annotations import os import sys import asyncio from asyncio import Queue import logging import argparse from typing import ( Tuple, NamedTuple, Optional, List, Union, cast, Dict, Awaitable, Sequence, Any, TYPE_CHECKING, ) import types from pathlib import Path from importlib import import_module import re import contextvars import json import dataclasses import structlog if TYPE_CHECKING: import tomli as tomllib else: try: import tomllib except ModuleNotFoundError: import tomli as tomllib import platformdirs from .lib import nicelogger from . import slogconf from .util import ( Entry, Entries, KeyManager, RawResult, RichResult, ResultData, FunctionWorker, GetVersionError, FileLoadError, EntryWaiter, ) from . import __version__ from .sortversion import sort_version_keys from .ctxvars import tries as ctx_tries from .ctxvars import entry_waiter as ctx_entry_waiter from . import httpclient logger = structlog.get_logger(logger_name=__name__) def get_default_config() -> str: confdir = platformdirs.user_config_dir(appname='nvchecker') file = os.path.join(confdir, 'nvchecker.toml') return file def add_common_arguments(parser: argparse.ArgumentParser) -> None: parser.add_argument('-l', '--logging', choices=('debug', 'info', 'warning', 'error'), default='info', help='logging level (default: info)') parser.add_argument('--logger', default='pretty', choices=['pretty', 'json', 'both'], help='select which logger to use') parser.add_argument('--json-log-fd', metavar='FD', type=lambda fd: os.fdopen(int(fd), mode='w'), help='specify fd to send json logs to. stdout by default') parser.add_argument('-V', '--version', action='store_true', help='show version and exit') default_config = get_default_config() parser.add_argument('-c', '--file', metavar='FILE', type=str, default=default_config, help=f'software version configuration file [default: {default_config}]') def process_common_arguments(args: argparse.Namespace) -> bool: '''return True if should stop''' processors = [ slogconf.exc_info, slogconf.filter_exc, slogconf.filter_nones, slogconf.filter_taskname, ] logger_factory = None if args.logger in ['pretty', 'both']: slogconf.fix_logging() nicelogger.enable_pretty_logging( getattr(logging, args.logging.upper())) processors.append(slogconf.stdlib_renderer) if args.logger == 'pretty': logger_factory=structlog.PrintLoggerFactory( file=open(os.devnull, 'w'), ) processors.append(slogconf.null_renderer) if args.logger in ['json', 'both']: processors.extend([ structlog.processors.format_exc_info, slogconf.json_renderer, ]) if logger_factory is None: logfile = args.json_log_fd or sys.stdout logger_factory = structlog.PrintLoggerFactory(file=logfile) structlog.configure( processors = processors, logger_factory = logger_factory, ) if args.version: progname = os.path.basename(sys.argv[0]) print(f'{progname} v{__version__}') return True return False def safe_overwrite(file: Path, data: Union[bytes, str], *, method: str = 'write', mode: str = 'w', encoding: Optional[str] = None) -> None: # FIXME: directory has no read perm # FIXME: hard links resolved_path = file.resolve() tmpname = str(resolved_path) + '.tmp' # if not using "with", write can fail without exception with open(tmpname, mode, encoding=encoding) as f: getattr(f, method)(data) # see also: https://thunk.org/tytso/blog/2009/03/15/dont-fear-the-fsync/ f.flush() os.fsync(f.fileno()) # if the above write failed (because disk is full etc), the old data should be kept os.rename(tmpname, resolved_path) def read_verfile(file: Path) -> ResultData: try: with open(file) as f: data = f.read() except FileNotFoundError: return {} try: v = json.loads(data) except json.decoder.JSONDecodeError: # old format v = {} for l in data.splitlines(): name, ver = l.rstrip().split(None, 1) v[name] = ver if v.get('version') is None: v = {k: RichResult(version=a) for k, a in v.items()} elif v['version'] == 2: v = {k: RichResult(**a) for k, a in v['data'].items()} else: raise Exception('unknown verfile version', v['version']) return v def write_verfile(file: Path, versions: ResultData) -> None: d = { 'version': 2, # sort and indent to make it friendly to human and git 'data': dict(sorted(versions.items())), } data = json.dumps( d, indent = 2, ensure_ascii = False, default = json_encode, ) + '\n' safe_overwrite(file, data) def json_encode(obj): if isinstance(obj, RichResult): d = {k: v for k, v in dataclasses.asdict(obj).items() if v is not None} return d raise TypeError(obj) class Options(NamedTuple): ver_files: Optional[Tuple[Path, Path]] max_concurrency: int proxy: Optional[str] keymanager: KeyManager source_configs: Dict[str, Dict[str, Any]] httplib: Optional[str] http_timeout: int def load_file( file: str, *, use_keymanager: bool, ) -> Tuple[Entries, Options]: try: with open(file, 'rb') as f: config = tomllib.load(f) except (OSError, tomllib.TOMLDecodeError) as e: raise FileLoadError('version configuration file', file, e) ver_files: Optional[Tuple[Path, Path]] = None keymanager = KeyManager(None) source_configs = {} if '__config__' in config: c = config.pop('__config__') d = Path(file).parent if 'oldver' in c and 'newver' in c: oldver_s = os.path.expandvars( os.path.expanduser(c.get('oldver'))) oldver = d / oldver_s newver_s = os.path.expandvars( os.path.expanduser(c.get('newver'))) newver = d / newver_s ver_files = oldver, newver if use_keymanager: keyfile = c.get('keyfile') if keyfile: keyfile_s = os.path.expandvars( os.path.expanduser(c.get('keyfile'))) keyfile = d / keyfile_s keymanager = KeyManager(keyfile) if 'source' in c: source_configs = c['source'] max_concurrency = c.get('max_concurrency', 20) proxy = c.get('proxy') httplib = c.get('httplib', None) http_timeout = c.get('http_timeout', 20) else: max_concurrency = 20 proxy = None httplib = None http_timeout = 20 return cast(Entries, config), Options( ver_files, max_concurrency, proxy, keymanager, source_configs, httplib, http_timeout, ) def setup_httpclient( max_concurrency: int = 20, httplib: Optional[str] = None, http_timeout: int = 20, ) -> Dispatcher: httplib_ = httplib or httpclient.find_best_httplib() httpclient.setup( httplib_, max_concurrency, http_timeout) return Dispatcher() class Dispatcher: def dispatch( self, entries: Entries, task_sem: asyncio.Semaphore, result_q: Queue[RawResult], keymanager: KeyManager, entry_waiter: EntryWaiter, tries: int, source_configs: Dict[str, Dict[str, Any]], ) -> List[asyncio.Future]: mods: Dict[str, Tuple[types.ModuleType, List]] = {} ctx_tries.set(tries) ctx_entry_waiter.set(entry_waiter) root_ctx = contextvars.copy_context() for name, entry in entries.items(): source = entry.get('source', 'none') if source not in mods: mod = import_module('nvchecker_source.' + source) tasks: List[Tuple[str, Entry]] = [] mods[source] = mod, tasks config = source_configs.get(source) if config and getattr(mod, 'configure'): mod.configure(config) else: tasks = mods[source][1] tasks.append((name, entry)) ret = [] for mod, tasks in mods.values(): if hasattr(mod, 'Worker'): worker_cls = mod.Worker else: worker_cls = FunctionWorker ctx = root_ctx.copy() worker = ctx.run( worker_cls, task_sem, result_q, tasks, keymanager, ) if worker_cls is FunctionWorker: func = mod.get_version ctx.run(worker.initialize, func) ret.append(ctx.run(worker._run_maynot_raise)) return ret def substitute_version( version: str, conf: Entry, ) -> str: ''' Substitute the version string via defined rules in the configuration file. See usage.rst#global-options for details. ''' prefix = conf.get('prefix') if prefix: if version.startswith(prefix): version = version[len(prefix):] from_pattern = conf.get('from_pattern') if from_pattern: to_pattern = conf.get('to_pattern') if to_pattern is None: raise ValueError("from_pattern exists but to_pattern doesn't") version = re.sub(from_pattern, to_pattern, version) return version def apply_list_options( versions: List[Union[str, RichResult]], conf: Entry, name: str, ) -> Optional[Union[str, RichResult]]: pattern = conf.get('include_regex') if versions and pattern: re_pat = re.compile(pattern) versions2 = [x for x in versions if re_pat.fullmatch(str(x))] if not versions2: logger.warning('include_regex matched no versions', name=name, versions=versions, regex=pattern) return None versions = versions2 pattern = conf.get('exclude_regex') if pattern: re_pat = re.compile(pattern) versions = [x for x in versions if not re_pat.fullmatch(str(x))] ignored = set(conf.get('ignored', '').split()) if ignored: versions = [x for x in versions if str(x) not in ignored] if not versions: return None sort_version_key = sort_version_keys[ conf.get("sort_version_key", "parse_version")] versions.sort(key=lambda version: sort_version_key(str(version))) # type: ignore return versions[-1] def _process_result(r: RawResult) -> Union[RichResult, Exception]: version = r.version conf = r.conf name = r.name url = None revision = None gitref = None if isinstance(version, GetVersionError): kw = version.kwargs kw['name'] = name logger.error(version.msg, **kw) return version elif isinstance(version, Exception): logger.error('unexpected error happened', name=r.name, exc_info=r.version) return version elif isinstance(version, list): version_str = apply_list_options(version, conf, name) if isinstance(version_str, RichResult): url = version_str.url gitref = version_str.gitref revision = version_str.revision version_str = version_str.version elif isinstance(version, RichResult): version_str = version.version url = version.url gitref = version.gitref revision = version.revision else: version_str = version if version_str: version_str = version_str.replace('\n', ' ') try: version_str = substitute_version(version_str, conf) return RichResult( version = version_str, url = url, gitref = gitref, revision = revision, ) except (ValueError, re.error) as e: logger.exception('error occurred in version substitutions', name=name) return e else: return ValueError('no version returned') def check_version_update( oldvers: ResultData, name: str, r: RichResult, verbose: bool, ) -> None: if old_result := oldvers.get(name): oldver = old_result.version else: oldver = None if not oldver or oldver != r.version: logger.info( 'updated', name = name, version = r.version, revision = r.revision, old_version = oldver, url = r.url, ) else: # provide visible user feedback if it was the only entry level = logging.INFO if verbose else logging.DEBUG logger.log(level, 'up-to-date', name=name, version=r.version, url=r.url) async def process_result( oldvers: ResultData, result_q: Queue[RawResult], entry_waiter: EntryWaiter, verbose: bool = False, ) -> Tuple[ResultData, bool]: ret = {} has_failures = False try: while True: r = await result_q.get() try: r1 = _process_result(r) except Exception as e: logger.exception('error processing result', result=r) r1 = e if isinstance(r1, Exception): entry_waiter.set_exception(r.name, r1) # no versions are returned from "apply_list_options"? logger.error('no-result', name=r.name, error=repr(r1)) has_failures = True continue check_version_update(oldvers, r.name, r1, verbose) entry_waiter.set_result(r.name, r1.version) ret[r.name] = r1 except asyncio.CancelledError: return ret, has_failures async def run_tasks( futures: Sequence[Awaitable[None]] ) -> None: for fu in asyncio.as_completed(futures): await fu nvchecker-2.17/nvchecker/ctxvars.py000066400000000000000000000012371476544462000174460ustar00rootroot00000000000000# MIT licensed # Copyright (c) 2020 lilydjwg , et al. from __future__ import annotations from contextvars import ContextVar from typing import Optional, TYPE_CHECKING from . import __version__ DEFAULT_USER_AGENT = f'lilydjwg/nvchecker {__version__}' if TYPE_CHECKING: from .util import EntryWaiter tries = ContextVar('tries', default=1) proxy: ContextVar[Optional[str]] = ContextVar('proxy', default=None) user_agent = ContextVar('user_agent', default=DEFAULT_USER_AGENT) httptoken = ContextVar('httptoken', default=None) entry_waiter: ContextVar[EntryWaiter] = ContextVar('entry_waiter') verify_cert = ContextVar('verify_cert', default=True) nvchecker-2.17/nvchecker/httpclient/000077500000000000000000000000001476544462000175555ustar00rootroot00000000000000nvchecker-2.17/nvchecker/httpclient/__init__.py000066400000000000000000000021411476544462000216640ustar00rootroot00000000000000# MIT licensed # Copyright (c) 2013-2020 lilydjwg , et al. from typing import Optional from .base import TemporaryError, HTTPError class Proxy: _obj = None def set_obj(self, obj): super().__setattr__('_obj', obj) def __getattr__(self, name): return getattr(self._obj, name) def __setattr__(self, name, value): return setattr(self._obj, name, value) session = Proxy() def setup( which: Optional[str] = None, concurreny: int = 20, timeout: int = 20, ) -> None: if which is None: which = find_best_httplib() m = __import__( '%s_httpclient' % which, globals(), locals(), level=1) session.set_obj(m.session) session.setup(concurreny, timeout) def find_best_httplib() -> str: try: import tornado, pycurl # connection reuse, http/2 which = 'tornado' except ImportError: try: import aiohttp which = 'aiohttp' # connection reuse except ImportError: try: import httpx which = 'httpx' except ImportError: import tornado which = 'tornado' # fallback return which nvchecker-2.17/nvchecker/httpclient/aiohttp_httpclient.py000066400000000000000000000042741476544462000240440ustar00rootroot00000000000000# MIT licensed # Copyright (c) 2013-2020 lilydjwg , et al. import asyncio from typing import Optional, Dict import structlog import aiohttp from .base import BaseSession, TemporaryError, Response, HTTPError __all__ = ['session'] logger = structlog.get_logger(logger_name=__name__) class AiohttpSession(BaseSession): session = None def setup( self, concurreny: int = 20, timeout: int = 20, ) -> None: self._concurreny = concurreny self._timeout = timeout async def request_impl( self, url: str, *, method: str, proxy: Optional[str] = None, headers: Dict[str, str] = {}, follow_redirects: bool = True, params = (), json = None, body = None, verify_cert: bool = True, ) -> Response: if self.session is None: # need to create in async context self.session = aiohttp.ClientSession( connector = aiohttp.TCPConnector(limit=self._concurreny), timeout = aiohttp.ClientTimeout(total=self._timeout), trust_env = True, ) kwargs = { 'headers': headers, 'params': params, 'allow_redirects': follow_redirects, } if not verify_cert: kwargs['ssl'] = False if proxy is not None: kwargs['proxy'] = proxy if body is not None: # Make sure all backends have the same default encoding for post data. if 'Content-Type' not in headers: headers = {**headers, 'Content-Type': 'application/x-www-form-urlencoded'} kwargs['headers'] = headers kwargs['data'] = body.encode() elif json is not None: kwargs['json'] = json try: logger.debug('send request', method=method, url=url, kwargs=kwargs) res = await self.session.request( method, url, **kwargs) except ( asyncio.TimeoutError, aiohttp.ClientConnectorError, ) as e: raise TemporaryError(599, repr(e), e) err_cls: Optional[type] = None if res.status >= 500: err_cls = TemporaryError elif res.status >= 400: err_cls = HTTPError if err_cls is not None: raise err_cls(res.status, res.reason, res) body = await res.content.read() return Response(res.headers, body) session = AiohttpSession() nvchecker-2.17/nvchecker/httpclient/base.py000066400000000000000000000057101476544462000210440ustar00rootroot00000000000000# MIT licensed # Copyright (c) 2019-2020 lilydjwg , et al. import structlog from typing import Optional, Dict, Mapping import json as _json from ..ctxvars import tries, proxy, user_agent, httptoken, verify_cert logger = structlog.get_logger(logger_name=__name__) class Response: '''The response of an HTTP request. .. py:attribute:: body :type: bytes .. py:attribute:: headers :type: Mapping[str, str] ''' def __init__( self, headers: Mapping[str, str], body: bytes, ) -> None: self.headers = headers self.body = body def json(self): '''Convert response content to JSON.''' return _json.loads(self.body.decode('utf-8')) class BaseSession: '''The base class for different HTTP backend.''' def setup( self, concurreny: int = 20, timeout: int = 20, ) -> None: pass async def head(self, *args, **kwargs): '''Shortcut for ``HEAD`` request.''' return await self.request( method='HEAD', *args, **kwargs) async def get(self, *args, **kwargs): '''Shortcut for ``GET`` request.''' return await self.request( method='GET', *args, **kwargs) async def post(self, *args, **kwargs): '''Shortcut for ``POST`` request.''' return await self.request( method='POST', *args, **kwargs) async def request( self, url: str, *, method: str, headers: Dict[str, str] = {}, follow_redirects: bool = True, params = (), json = None, body = None, ) -> Response: t = tries.get() p = proxy.get() ua = user_agent.get() httpt = httptoken.get() verify = verify_cert.get() headers = headers.copy() headers.setdefault('User-Agent', ua) if httpt is not None: headers.setdefault('Authorization', httpt) for i in range(1, t+1): try: return await self.request_impl( url, method = method, headers = headers, params = params, follow_redirects = follow_redirects, json = json, body = body, proxy = p or None, verify_cert = verify, ) except TemporaryError as e: if i == t: raise else: logger.warning('temporary error, retrying', tries = i, exc_info = e) continue raise Exception('should not reach') async def request_impl( self, url: str, *, method: str, proxy: Optional[str] = None, headers: Dict[str, str] = {}, follow_redirects: bool = True, params = (), json = None, body = None, verify_cert: bool = True, ) -> Response: ''':meta private:''' raise NotImplementedError class BaseHTTPError(Exception): def __init__(self, code, message, response): self.code = code self.message = message self.response = response class TemporaryError(BaseHTTPError): '''A temporary error (e.g. network error) happens.''' class HTTPError(BaseHTTPError): '''An HTTP 4xx error happens''' nvchecker-2.17/nvchecker/httpclient/httpx_httpclient.py000066400000000000000000000040721476544462000235370ustar00rootroot00000000000000# MIT licensed # Copyright (c) 2020-2022,2024 lilydjwg , et al. from typing import Dict, Optional, Tuple import httpx from .base import BaseSession, TemporaryError, Response, HTTPError __all__ = ['session'] class HttpxSession(BaseSession): def setup( self, concurreny: int = 20, timeout: int = 20, ) -> None: self.clients: Dict[Tuple[Optional[str], bool], httpx.AsyncClient] = {} self.timeout = timeout async def request_impl( self, url: str, *, method: str, proxy: Optional[str] = None, headers: Dict[str, str] = {}, follow_redirects: bool = True, params = (), json = None, body = None, verify_cert: bool = True, ) -> Response: client = self.clients.get((proxy, verify_cert)) if not client: client = httpx.AsyncClient( timeout = httpx.Timeout(self.timeout, pool=None), http2 = True, proxy = proxy, verify = verify_cert, ) self.clients[(proxy, verify_cert)] = client try: if body is not None: # Make sure all backends have the same default encoding for post data. if 'Content-Type' not in headers: headers = {**headers, 'Content-Type': 'application/x-www-form-urlencoded'} body = body.encode() r = await client.request( method, url, json = json, content = body, headers = headers, follow_redirects = follow_redirects, # httpx checks for None but not () params = params or None, ) err_cls: Optional[type] = None if r.status_code >= 500: err_cls = TemporaryError elif r.status_code >= 400: err_cls = HTTPError if err_cls is not None: raise err_cls( r.status_code, r.reason_phrase, r, ) except httpx.TransportError as e: raise TemporaryError(599, repr(e), e) body = await r.aread() return Response(r.headers, body) async def aclose(self): for client in self.clients.values(): await client.aclose() del self.clients session = HttpxSession() nvchecker-2.17/nvchecker/httpclient/tornado_httpclient.py000066400000000000000000000047701476544462000240430ustar00rootroot00000000000000# MIT licensed # Copyright (c) 2013-2020 lilydjwg , et al. import json as _json from urllib.parse import urlencode from typing import Optional, Dict, Any import os from tornado.httpclient import AsyncHTTPClient, HTTPRequest try: import pycurl except ImportError: pycurl = None # type: ignore from .base import BaseSession, TemporaryError, Response, HTTPError __all__ = ['session'] HTTP2_AVAILABLE = None if pycurl else False SSL_CERT_FILE = os.environ.get('SSL_CERT_FILE') def setup_curl(curl): global HTTP2_AVAILABLE if HTTP2_AVAILABLE is None: try: curl.setopt(pycurl.HTTP_VERSION, 4) HTTP2_AVAILABLE = True except pycurl.error: HTTP2_AVAILABLE = False elif HTTP2_AVAILABLE: curl.setopt(pycurl.HTTP_VERSION, 4) if SSL_CERT_FILE: curl.setopt_string(pycurl.CAINFO, SSL_CERT_FILE) curl.setopt_string(pycurl.ACCEPT_ENCODING, "") class TornadoSession(BaseSession): def setup( self, concurreny: int = 20, timeout: int = 20, ) -> None: impl: Optional[str] if pycurl: impl = "tornado.curl_httpclient.CurlAsyncHTTPClient" else: impl = None AsyncHTTPClient.configure( impl, max_clients = concurreny) self.timeout = timeout async def request_impl( self, url: str, *, method: str, proxy: Optional[str] = None, headers: Dict[str, str] = {}, follow_redirects: bool = True, params = (), json = None, body = None, verify_cert: bool = True, ) -> Response: kwargs: Dict[str, Any] = { 'method': method, 'headers': headers, 'request_timeout': self.timeout, 'follow_redirects': follow_redirects, 'validate_cert': verify_cert, } if body: # By default the content type is already 'application/x-www-form-urlencoded' kwargs['body'] = body elif json: kwargs['body'] = _json.dumps(json) kwargs['prepare_curl_callback'] = setup_curl if proxy: host, port = proxy.rsplit(':', 1) kwargs['proxy_host'] = host kwargs['proxy_port'] = int(port) if params: q = urlencode(params) url += '?' + q r = HTTPRequest(url, **kwargs) res = await AsyncHTTPClient().fetch( r, raise_error=False) err_cls: Optional[type] = None if res.code >= 500: err_cls = TemporaryError elif res.code >= 400: err_cls = HTTPError if err_cls is not None: raise err_cls( res.code, res.reason, res ) return Response(res.headers, res.body) session = TornadoSession() nvchecker-2.17/nvchecker/lib/000077500000000000000000000000001476544462000161455ustar00rootroot00000000000000nvchecker-2.17/nvchecker/lib/README.md000066400000000000000000000002741476544462000174270ustar00rootroot00000000000000This directory contains code from other places: * `nicelogger.py`: from my [winterpy](https://github.com/lilydjwg/winterpy) * `packaging_version.py`: from python-packaging 20.9, modified nvchecker-2.17/nvchecker/lib/__init__.py000066400000000000000000000000001476544462000202440ustar00rootroot00000000000000nvchecker-2.17/nvchecker/lib/nicelogger.py000066400000000000000000000071061476544462000206410ustar00rootroot00000000000000# MIT licensed # Copyright (c) 2013-2017 lilydjwg , et al. ''' A Tornado-inspired logging formatter, with displayed time with millisecond accuracy FYI: pyftpdlib also has a Tornado-style logger. ''' import sys import time import logging class Colors: def __init__(self, color=None): if color is None: color = support_color() if color: import curses curses.setupterm() if sys.hexversion < 0x30203f0: fg_color = str(curses.tigetstr("setaf") or curses.tigetstr("setf") or "", "ascii") else: fg_color = curses.tigetstr("setaf") or curses.tigetstr("setf") or b"" self.blue = str(curses.tparm(fg_color, 4), "ascii") self.yellow = str(curses.tparm(fg_color, 3), "ascii") self.green = str(curses.tparm(fg_color, 2), "ascii") self.red = str(curses.tparm(fg_color, 1), "ascii") self.bright_red = str(curses.tparm(fg_color, 9), "ascii") self.normal = str(curses.tigetstr("sgr0"), "ascii") else: self.blue = self.yellow = self.green = self.red = self.bright_red = self.normal = "" class TornadoLogFormatter(logging.Formatter): def __init__(self, color, *args, **kwargs): super().__init__(*args, **kwargs) self._color = color if color: colors = Colors(color=color) self._colors = { logging.DEBUG: colors.blue, logging.INFO: colors.green, logging.WARNING: colors.yellow, logging.ERROR: colors.red, logging.CRITICAL: colors.bright_red, } self._normal = colors.normal def format(self, record): try: record.message = record.getMessage() except Exception as e: record.message = "Bad message (%r): %r" % (e, record.__dict__) record.asctime = time.strftime( "%m-%d %H:%M:%S", self.converter(record.created)) prefix = '[%(levelname)1.1s %(asctime)s.%(msecs)03d %(module)s:%(lineno)d]' % \ record.__dict__ if self._color: prefix = (self._colors.get(record.levelno, self._normal) + prefix + self._normal) formatted = prefix + " " + record.message formatted += ''.join( ' %s=%s' % (k, v) for k, v in record.__dict__.items() if k not in { 'levelname', 'asctime', 'module', 'lineno', 'args', 'message', 'filename', 'exc_info', 'exc_text', 'created', 'funcName', 'processName', 'process', 'msecs', 'relativeCreated', 'thread', 'threadName', 'name', 'levelno', 'msg', 'pathname', 'stack_info', 'taskName', }) if record.exc_info: if not record.exc_text: record.exc_text = self.formatException(record.exc_info) if record.exc_text: formatted = formatted.rstrip() + "\n" + record.exc_text return formatted.replace("\n", "\n ") def support_color(stream=sys.stderr): if stream.isatty(): try: import curses curses.setupterm() if curses.tigetnum("colors") > 0: return True except: import traceback traceback.print_exc() return False def enable_pretty_logging(level=logging.DEBUG, handler=None, color=None): ''' handler: specify a handler instead of default StreamHandler color: boolean, force color to be on / off. Default to be on only when ``handler`` isn't specified and the term supports color ''' logger = logging.getLogger() if handler is None: h = logging.StreamHandler() else: h = handler if color is None and handler is None: color = support_color() formatter = TornadoLogFormatter(color=color) h.setLevel(level) h.setFormatter(formatter) logger.setLevel(level) logger.addHandler(h) nvchecker-2.17/nvchecker/lib/packaging_version.py000066400000000000000000000417511476544462000222200ustar00rootroot00000000000000# This file comes from python-packaging 20.9 and is modified # This file is dual licensed under the terms of the Apache License, Version # 2.0, and the BSD License. from __future__ import annotations import collections import itertools import re from typing import TYPE_CHECKING class InfinityType(object): def __repr__(self): # type: () -> str return "Infinity" def __hash__(self): # type: () -> int return hash(repr(self)) def __lt__(self, other): # type: (object) -> bool return False def __le__(self, other): # type: (object) -> bool return False def __eq__(self, other): # type: (object) -> bool return isinstance(other, self.__class__) def __ne__(self, other): # type: (object) -> bool return not isinstance(other, self.__class__) def __gt__(self, other): # type: (object) -> bool return True def __ge__(self, other): # type: (object) -> bool return True def __neg__(self): # type: (object) -> NegativeInfinityType return NegativeInfinity Infinity = InfinityType() class NegativeInfinityType(object): def __repr__(self): # type: () -> str return "-Infinity" def __hash__(self): # type: () -> int return hash(repr(self)) def __lt__(self, other): # type: (object) -> bool return True def __le__(self, other): # type: (object) -> bool return True def __eq__(self, other): # type: (object) -> bool return isinstance(other, self.__class__) def __ne__(self, other): # type: (object) -> bool return not isinstance(other, self.__class__) def __gt__(self, other): # type: (object) -> bool return False def __ge__(self, other): # type: (object) -> bool return False def __neg__(self): # type: (object) -> InfinityType return Infinity NegativeInfinity = NegativeInfinityType() if TYPE_CHECKING: # pragma: no cover from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union InfiniteTypes = Union[InfinityType, NegativeInfinityType] PrePostDevType = Union[InfiniteTypes, Tuple[str, int]] SubLocalType = Union[InfiniteTypes, int, str] LocalType = Union[ NegativeInfinityType, Tuple[ Union[ SubLocalType, Tuple[SubLocalType, str], Tuple[NegativeInfinityType, SubLocalType], ], ..., ], ] CmpKey = Tuple[ int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType ] LegacyCmpKey = Tuple[int, Tuple[str, ...]] VersionComparisonMethod = Callable[ [Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool ] __all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"] _Version = collections.namedtuple( "_Version", ["epoch", "release", "dev", "pre", "post", "local"] ) def parse(version): # type: (str) -> Union[LegacyVersion, Version] """ Parse the given version string and return either a :class:`Version` object or a :class:`LegacyVersion` object depending on if the given version is a valid PEP 440 version or a legacy version. """ try: return Version(version) except InvalidVersion: return LegacyVersion(version) class InvalidVersion(ValueError): """ An invalid version was found, users should refer to PEP 440. """ class _BaseVersion(object): _key = None # type: Union[CmpKey, LegacyCmpKey] def __hash__(self): # type: () -> int return hash(self._key) # Please keep the duplicated `isinstance` check # in the six comparisons hereunder # unless you find a way to avoid adding overhead function calls. def __lt__(self, other): # type: (_BaseVersion) -> bool if not isinstance(other, _BaseVersion): return NotImplemented return self._key < other._key def __le__(self, other): # type: (_BaseVersion) -> bool if not isinstance(other, _BaseVersion): return NotImplemented return self._key <= other._key def __eq__(self, other): # type: (object) -> bool if not isinstance(other, _BaseVersion): return NotImplemented return self._key == other._key def __ge__(self, other): # type: (_BaseVersion) -> bool if not isinstance(other, _BaseVersion): return NotImplemented return self._key >= other._key def __gt__(self, other): # type: (_BaseVersion) -> bool if not isinstance(other, _BaseVersion): return NotImplemented return self._key > other._key def __ne__(self, other): # type: (object) -> bool if not isinstance(other, _BaseVersion): return NotImplemented return self._key != other._key class LegacyVersion(_BaseVersion): def __init__(self, version): # type: (str) -> None self._version = str(version) self._key = _legacy_cmpkey(self._version) def __str__(self): # type: () -> str return self._version def __repr__(self): # type: () -> str return "".format(repr(str(self))) @property def public(self): # type: () -> str return self._version @property def base_version(self): # type: () -> str return self._version @property def epoch(self): # type: () -> int return -1 @property def release(self): # type: () -> None return None @property def pre(self): # type: () -> None return None @property def post(self): # type: () -> None return None @property def dev(self): # type: () -> None return None @property def local(self): # type: () -> None return None @property def is_prerelease(self): # type: () -> bool return False @property def is_postrelease(self): # type: () -> bool return False @property def is_devrelease(self): # type: () -> bool return False _legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE) _legacy_version_replacement_map = { "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@", } def _parse_version_parts(s): # type: (str) -> Iterator[str] for part in _legacy_version_component_re.split(s): part = _legacy_version_replacement_map.get(part, part) if not part or part == ".": continue if part[:1] in "0123456789": # pad for numeric comparison yield part.zfill(8) else: yield "*" + part # ensure that alpha/beta/candidate are before final yield "*final" def _legacy_cmpkey(version): # type: (str) -> LegacyCmpKey # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch # greater than or equal to 0. This will effectively put the LegacyVersion, # which uses the defacto standard originally implemented by setuptools, # as before all PEP 440 versions. epoch = -1 # This scheme is taken from pkg_resources.parse_version setuptools prior to # it's adoption of the packaging library. parts = [] # type: List[str] for part in _parse_version_parts(version.lower()): if part.startswith("*"): # remove "-" before a prerelease tag if part < "*final": while parts and parts[-1] == "*final-": parts.pop() # remove trailing zeros from each series of numeric parts while parts and parts[-1] == "00000000": parts.pop() parts.append(part) return epoch, tuple(parts) # Deliberately not anchored to the start and end of the string, to make it # easier for 3rd party code to reuse VERSION_PATTERN = r""" v? (?: (?:(?P[0-9]+)!)? # epoch (?P[0-9]+(?:\.[0-9]+)*) # release segment (?P
                                          # pre-release
            [-_\.]?
            (?P(a|b|c|rc|alpha|beta|pre|preview))
            [-_\.]?
            (?P[0-9]+)?
        )?
        (?P                                         # post release
            (?:-(?P[0-9]+))
            |
            (?:
                [-_\.]?
                (?Ppost|rev|r)
                [-_\.]?
                (?P[0-9]+)?
            )
        )?
        (?P                                          # dev release
            [-_\.]?
            (?Pdev)
            [-_\.]?
            (?P[0-9]+)?
        )?
    )
    (?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))?       # local version
"""


class Version(_BaseVersion):

    _regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)

    def __init__(self, version):
        # type: (str) -> None

        # Validate the version and parse it into pieces
        match = self._regex.search(version)
        if not match:
            raise InvalidVersion("Invalid version: '{0}'".format(version))

        # Store the parsed out pieces of the version
        self._version = _Version(
            epoch=int(match.group("epoch")) if match.group("epoch") else 0,
            release=tuple(int(i) for i in match.group("release").split(".")),
            pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
            post=_parse_letter_version(
                match.group("post_l"), match.group("post_n1") or match.group("post_n2")
            ),
            dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
            local=_parse_local_version(match.group("local")),
        )

        # Generate a key which will be used for sorting
        self._key = _cmpkey(
            self._version.epoch,
            self._version.release,
            self._version.pre,
            self._version.post,
            self._version.dev,
            self._version.local,
        )

    def __repr__(self):
        # type: () -> str
        return "".format(repr(str(self)))

    def __str__(self):
        # type: () -> str
        parts = []

        # Epoch
        if self.epoch != 0:
            parts.append("{0}!".format(self.epoch))

        # Release segment
        parts.append(".".join(str(x) for x in self.release))

        # Pre-release
        if self.pre is not None:
            parts.append("".join(str(x) for x in self.pre))

        # Post-release
        if self.post is not None:
            parts.append(".post{0}".format(self.post))

        # Development release
        if self.dev is not None:
            parts.append(".dev{0}".format(self.dev))

        # Local version segment
        if self.local is not None:
            parts.append("+{0}".format(self.local))

        return "".join(parts)

    @property
    def epoch(self):
        # type: () -> int
        _epoch = self._version.epoch  # type: int
        return _epoch

    @property
    def release(self):
        # type: () -> Tuple[int, ...]
        _release = self._version.release  # type: Tuple[int, ...]
        return _release

    @property
    def pre(self):
        # type: () -> Optional[Tuple[str, int]]
        _pre = self._version.pre  # type: Optional[Tuple[str, int]]
        return _pre

    @property
    def post(self):
        # type: () -> Optional[Tuple[str, int]]
        return self._version.post[1] if self._version.post else None

    @property
    def dev(self):
        # type: () -> Optional[Tuple[str, int]]
        return self._version.dev[1] if self._version.dev else None

    @property
    def local(self):
        # type: () -> Optional[str]
        if self._version.local:
            return ".".join(str(x) for x in self._version.local)
        else:
            return None

    @property
    def public(self):
        # type: () -> str
        return str(self).split("+", 1)[0]

    @property
    def base_version(self):
        # type: () -> str
        parts = []

        # Epoch
        if self.epoch != 0:
            parts.append("{0}!".format(self.epoch))

        # Release segment
        parts.append(".".join(str(x) for x in self.release))

        return "".join(parts)

    @property
    def is_prerelease(self):
        # type: () -> bool
        return self.dev is not None or self.pre is not None

    @property
    def is_postrelease(self):
        # type: () -> bool
        return self.post is not None

    @property
    def is_devrelease(self):
        # type: () -> bool
        return self.dev is not None

    @property
    def major(self):
        # type: () -> int
        return self.release[0] if len(self.release) >= 1 else 0

    @property
    def minor(self):
        # type: () -> int
        return self.release[1] if len(self.release) >= 2 else 0

    @property
    def micro(self):
        # type: () -> int
        return self.release[2] if len(self.release) >= 3 else 0


def _parse_letter_version(
    letter,  # type: str
    number,  # type: Union[str, bytes, SupportsInt]
):
    # type: (...) -> Optional[Tuple[str, int]]

    if letter:
        # We consider there to be an implicit 0 in a pre-release if there is
        # not a numeral associated with it.
        if number is None:
            number = 0

        # We normalize any letters to their lower case form
        letter = letter.lower()

        # We consider some words to be alternate spellings of other words and
        # in those cases we want to normalize the spellings to our preferred
        # spelling.
        if letter == "alpha":
            letter = "a"
        elif letter == "beta":
            letter = "b"
        elif letter in ["c", "pre", "preview"]:
            letter = "rc"
        elif letter in ["rev", "r"]:
            letter = "post"

        return letter, int(number)
    if not letter and number:
        # We assume if we are given a number, but we are not given a letter
        # then this is using the implicit post release syntax (e.g. 1.0-1)
        letter = "post"

        return letter, int(number)

    return None


_local_version_separators = re.compile(r"[\._-]")


def _parse_local_version(local):
    # type: (str) -> Optional[LocalType]
    """
    Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
    """
    if local is not None:
        return tuple(
            part.lower() if not part.isdigit() else int(part)
            for part in _local_version_separators.split(local)
        )
    return None


def _cmpkey(
    epoch,  # type: int
    release,  # type: Tuple[int, ...]
    pre,  # type: Optional[Tuple[str, int]]
    post,  # type: Optional[Tuple[str, int]]
    dev,  # type: Optional[Tuple[str, int]]
    local,  # type: Optional[Tuple[SubLocalType]]
):
    # type: (...) -> CmpKey

    # When we compare a release version, we want to compare it with all of the
    # trailing zeros removed. So we'll use a reverse the list, drop all the now
    # leading zeros until we come to something non zero, then take the rest
    # re-reverse it back into the correct order and make it a tuple and use
    # that for our sorting key.
    _release = tuple(
        reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
    )

    # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
    # We'll do this by abusing the pre segment, but we _only_ want to do this
    # if there is not a pre or a post segment. If we have one of those then
    # the normal sorting rules will handle this case correctly.
    if pre is None and post is None and dev is not None:
        _pre = NegativeInfinity  # type: PrePostDevType
    # Versions without a pre-release (except as noted above) should sort after
    # those with one.
    elif pre is None:
        _pre = Infinity
    else:
        _pre = pre

    # Versions without a post segment should sort before those with one.
    if post is None:
        _post = NegativeInfinity  # type: PrePostDevType

    else:
        _post = post

    # Versions without a development segment should sort after those with one.
    if dev is None:
        _dev = Infinity  # type: PrePostDevType

    else:
        _dev = dev

    if local is None:
        # Versions without a local segment should sort before those with one.
        _local = NegativeInfinity  # type: LocalType
    else:
        # Versions with a local segment need that segment parsed to implement
        # the sorting rules in PEP440.
        # - Alpha numeric segments sort before numeric segments
        # - Alpha numeric segments sort lexicographically
        # - Numeric segments sort numerically
        # - Shorter versions sort before longer versions when the prefixes
        #   match exactly
        _local = tuple(
            (i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
        )

    return epoch, _release, _pre, _post, _dev, _local
nvchecker-2.17/nvchecker/slogconf.py000066400000000000000000000071741476544462000175740ustar00rootroot00000000000000# vim: se sw=2:
# MIT licensed
# Copyright (c) 2018-2020,2023-2024 lilydjwg , et al.

import logging
import os
import io
import traceback
import sys

import structlog

from .httpclient import TemporaryError

def _console_msg(event):
  evt = event['event']
  if evt == 'up-to-date':
    msg = 'up-to-date, version %s' % event['version']
    del event['version']
  elif evt == 'updated':
    if event.get('old_version'):
      msg = 'updated from %(old_version)s to %(version)s' % event
    else:
      msg = 'updated to %(version)s' % event
    del event['version'], event['old_version']
  else:
    msg = evt

  if 'revision' in event and not event['revision']:
      del event['revision']

  if 'name' in event:
    msg = f"{event['name']}: {msg}"
    del event['name']

  event['msg'] = msg

  return event

def exc_info(logger, level, event):
  if level == 'exception':
    event['exc_info'] = True
  return event

def filter_nones(logger, level, event):
  if 'url' in event and event['url'] is None:
    del event['url']
  return event

def filter_taskname(logger, level, event):
  # added in Python 3.12, not useful to us, but appears as a normal KV.
  if 'taskName' in event:
    del event['taskName']
  return event

def filter_exc(logger, level, event):
  exc_info = event.get('exc_info')
  if not exc_info:
    return event

  if exc_info is True:
    exc = sys.exc_info()[1]
  else:
    exc = exc_info

  if isinstance(exc, TemporaryError):
    if exc.code == 599: # network issues
      del event['exc_info']
  event['error'] = exc
  return event

def stdlib_renderer(logger, level, event):
  # return event unchanged for further processing
  std_event = _console_msg(event.copy())
  try:
    logger = logging.getLogger(std_event.pop('logger_name'))
  except KeyError:
    logger = logging.getLogger()
  msg = std_event.pop('msg', std_event.pop('event'))
  exc_info = std_event.pop('exc_info', None)
  if 'error' in std_event:
    std_event['error'] = repr(std_event['error'])
  getattr(logger, level)(
    msg, exc_info = exc_info, extra=std_event,
  )
  return event

_renderer = structlog.processors.JSONRenderer(ensure_ascii=False)
def json_renderer(logger, level, event):
  event['level'] = level
  return _renderer(logger, level, event)

def null_renderer(logger, level, event):
  return ''

class _Logger(logging.Logger):
  _my_srcfile = os.path.normcase(
    stdlib_renderer.__code__.co_filename)

  _structlog_dir = os.path.dirname(structlog.__file__)

  def findCaller(self, stack_info=False, stacklevel=1):
    """
    Find the stack frame of the caller so that we can note the source
    file name, line number and function name.
    """
    f = logging.currentframe()
    #On some versions of IronPython, currentframe() returns None if
    #IronPython isn't run with -X:Frames.
    if f is not None:
      f = f.f_back
    orig_f = f
    while f and stacklevel > 1:
      f = f.f_back
      stacklevel -= 1
    if not f:
      f = orig_f
    rv = "(unknown file)", 0, "(unknown function)", None
    while hasattr(f, "f_code"):
      co = f.f_code
      filename = os.path.normcase(co.co_filename)
      if filename in [logging._srcfile, self._my_srcfile] \
         or filename.startswith(self._structlog_dir):
        f = f.f_back
        continue
      sinfo = None
      if stack_info:
        sio = io.StringIO()
        sio.write('Stack (most recent call last):\n')
        traceback.print_stack(f, file=sio)
        sinfo = sio.getvalue()
        if sinfo[-1] == '\n':
          sinfo = sinfo[:-1]
        sio.close()
      rv = (co.co_filename, f.f_lineno, co.co_name, sinfo)
      break
    return rv

def fix_logging():
  logging.setLoggerClass(_Logger)

nvchecker-2.17/nvchecker/sortversion.py000066400000000000000000000016111476544462000203450ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2021 lilydjwg , et al.

'''
Sort versions using deprecated pkg_resource / packaging.parse_version or pyalpm.vercmp
'''

__all__ = ["sort_version_keys"]

from .lib.packaging_version import parse as parse_version

try:
  import pyalpm
  from functools import cmp_to_key
  vercmp = cmp_to_key(pyalpm.vercmp)
  vercmp_available = True
except ImportError:
  def vercmp(k):
    raise NotImplementedError("Using vercmp but pyalpm can not be imported!")
  vercmp_available = False

try:
  from awesomeversion import AwesomeVersion
  awesomeversion_available = True
except ImportError:
  def AwesomeVersion(k): # type: ignore
    raise NotImplementedError("Using awesomeversion but it can not be imported!")
  awesomeversion_available = False

sort_version_keys = {
  "parse_version": parse_version,
  "vercmp": vercmp,
  "awesomeversion": AwesomeVersion,
}
nvchecker-2.17/nvchecker/tools.py000066400000000000000000000135511476544462000171160ustar00rootroot00000000000000# vim: se sw=2:
# MIT licensed
# Copyright (c) 2013-2024 lilydjwg , et al.

import sys
import argparse
import shutil
import structlog
import json
import os.path

from . import core
from .util import RichResult

logger = structlog.get_logger(logger_name=__name__)

def take() -> None:
  parser = argparse.ArgumentParser(description='update version records of nvchecker')
  core.add_common_arguments(parser)
  parser.add_argument('--all', action='store_true',
                      help='take all updates')
  parser.add_argument('--ignore-nonexistent', action='store_true',
                      help='ignore nonexistent names')
  parser.add_argument('names', metavar='NAME', nargs='*',
                      help='software name to be updated. use NAME=VERSION to update '
                           'to a specific version instead of the new version.')
  args = parser.parse_args()
  if core.process_common_arguments(args):
    return

  opt = core.load_file(args.file, use_keymanager=False)[1]
  if opt.ver_files is None:
    logger.critical(
      "doesn't have 'oldver' and 'newver' set.",
      source=args.file,
    )
    sys.exit(2)
  else:
    oldverf = opt.ver_files[0]
    newverf = opt.ver_files[1]

  oldvers = core.read_verfile(oldverf)
  newvers = core.read_verfile(newverf)

  if args.all:
    oldvers.update(newvers)
  else:
    name: str
    for name in args.names:
      if "=" in name:
        name, newver = name.split("=")
        oldvers[name] = RichResult(version=newver)
      else:
        try:
          oldvers[name] = newvers[name]
        except KeyError:
          if args.ignore_nonexistent:
            logger.warning('nonexistent in newver, ignored', name=name)
            continue

          logger.critical(
            "doesn't exist in 'newver' set.", name=name,
          )
          sys.exit(2)

  try:
    if os.path.islink(oldverf):
      shutil.copy(oldverf, oldverf.with_name(oldverf.name + '~'))
    else:
      oldverf.rename(
        oldverf.with_name(oldverf.name + '~'),
      )
  except FileNotFoundError:
    pass
  core.write_verfile(oldverf, oldvers)

def cmp() -> None:
  parser = argparse.ArgumentParser(description='compare version records of nvchecker')
  core.add_common_arguments(parser)
  parser.add_argument('-j', '--json', action='store_true',
                      help='Output JSON array of dictionaries with {name, newver, oldver, [delta]} '
                           '(or array of names if --quiet)')
  parser.add_argument('-q', '--quiet', action='store_true',
                      help="Quiet mode, output only the names.")
  parser.add_argument('-a', '--all', action='store_true',
                      help="Include unchanged versions.")
  parser.add_argument('-s', '--sort',
                      choices=('parse_version', 'vercmp', 'awesomeversion', 'none'),
                      default='parse_version',
                      help='Version compare method to backwards the arrow '
                           '(default: parse_version)')
  parser.add_argument('-n', '--newer', action='store_true',
                      help='Shows only the newer ones according to --sort.')
  parser.add_argument('--exit-status', action='store_true',
                      help="exit with status 4 if there are updates")
  args = parser.parse_args()
  if core.process_common_arguments(args):
    return

  opt = core.load_file(args.file, use_keymanager=False)[1]
  if opt.ver_files is None:
    logger.critical(
      "doesn't have 'oldver' and 'newver' set.",
      source=args.file,
    )
    sys.exit(2)
  else:
    oldverf = opt.ver_files[0]
    newverf = opt.ver_files[1]

  oldvers = {k: v.version for k, v in core.read_verfile(oldverf).items()}
  newvers = {k: v.version for k, v in core.read_verfile(newverf).items()}

  differences = []

  for name, newver in sorted(newvers.items()):  # accumulate differences
    oldver = oldvers.get(name, None)

    diff = {
      'name': name,
      'oldver': oldver,
      'newver': newver
    }

    if oldver is not None and newver is not None:
      if oldver == newver:
        diff['delta'] = 'equal'

      elif args.sort == "none":
        diff['delta'] = 'new'  # assume it's a new version if we're not comparing

      else:
        from .sortversion import sort_version_keys
        version = sort_version_keys[args.sort]

        if version(oldver) > version(newver): # type: ignore
          if args.newer:
            continue  # don't store this diff
          diff['delta'] = 'old'
        else:
          diff['delta'] = 'new'

    elif oldver is None:
      diff['delta'] = 'added'

    elif newver is None:
      if args.newer:
        continue  # don't store this diff
      diff['delta'] = 'gone'

    if args.all or diff['delta'] != 'equal':
      differences.append(diff)

  if args.json:
    if args.quiet:
      print(json.dumps([diff['name'] for diff in differences], separators=(',', ':')))
    else:
      print(json.dumps(differences, sort_keys=True, separators=(',', ':')))

  elif args.quiet:
    for diff in differences:
      print(diff['name'])

  else:
    from .lib.nicelogger import Colors, support_color
    c = Colors(support_color(sys.stdout))

    diffstyles = {
      'new': {
        'symbol': '->',
        'oldc': c.red
      },
      'old': {
        'symbol': f'{c.red}<-{c.normal}',
        'oldc': c.red
      },
      'added': {
        'symbol': '++',
        'oldc': c.red
      },
      'gone': {
        'symbol': f'{c.red}--{c.normal}',
        'oldc': c.green
      },
      'equal': {
        'symbol': '==',
        'oldc': c.green
      }
    }

    for diff in differences:
      style = diffstyles[diff.get('delta', 'equal')] # type: ignore # mypy has issues with this line

      print(f'{diff["name"]} {style["oldc"]}{diff["oldver"]}{c.normal} {style["symbol"]} {c.green}{diff["newver"]}{c.normal}')

  if args.exit_status and any(
    diff.get('delta') != 'equal' for diff in differences
  ):
    sys.exit(4)
nvchecker-2.17/nvchecker/util.py000066400000000000000000000216621476544462000167350ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.

from __future__ import annotations

import sys
import asyncio
from asyncio import Queue
from typing import (
  Dict, Optional, List, NamedTuple, Union,
  Any, Tuple, Callable, Coroutine, Hashable,
  TYPE_CHECKING,
)
from pathlib import Path
import contextvars
import abc
import netrc
from dataclasses import dataclass

if TYPE_CHECKING:
  import tomli as tomllib
else:
  try:
    import tomllib
  except ModuleNotFoundError:
    import tomli as tomllib

import structlog

from .httpclient import session
from .ctxvars import tries as ctx_tries
from .ctxvars import proxy as ctx_proxy
from .ctxvars import user_agent as ctx_ua
from .ctxvars import httptoken as ctx_httpt
from .ctxvars import verify_cert as ctx_verify_cert

logger = structlog.get_logger(logger_name=__name__)

Entry = Dict[str, Any]
Entry.__doc__ = '''The configuration `dict` for an entry.'''
Entries = Dict[str, Entry]

if sys.version_info[:2] >= (3, 11):
  from typing import LiteralString
else:
  LiteralString = str

if sys.version_info[:2] >= (3, 10):
  @dataclass(kw_only=True)
  class RichResult:
    version: str
    gitref: Optional[str] = None
    revision: Optional[str] = None
    url: Optional[str] = None

    def __str__(self):
      return self.version
else:
  @dataclass
  class RichResult:
    version: str
    gitref: Optional[str] = None
    revision: Optional[str] = None
    url: Optional[str] = None

    def __str__(self):
      return self.version

VersionResult = Union[None, str, RichResult, List[Union[str, RichResult]], Exception]
VersionResult.__doc__ = '''The result of a `get_version` check.

* `None` - No version found.
* `str` - A single version string is found.
* `RichResult` - A version string with additional information.
* `List[Union[str, RichResult]]` - Multiple version strings with or without additional information are found. :ref:`list options` will be applied.
* `Exception` - An error occurred.
'''

class FileLoadError(Exception):
  def __init__(self, kind, filename, exc):
    self.kind = kind
    self.filename = filename
    self.exc = exc

  def __str__(self):
    return f'failed to load {self.kind} {self.filename!r}: {self.exc}'

class KeyManager:
  '''Manages data in the keyfile.'''
  def __init__(
    self, file: Optional[Path],
  ) -> None:
    if file is not None:
      try:
        with file.open('rb') as f:
          keys = tomllib.load(f)['keys']
      except (OSError, tomllib.TOMLDecodeError) as e:
        raise FileLoadError('keyfile', str(file), e)
    else:
      keys = {}
    self.keys = keys
    try:
      netrc_file = netrc.netrc()
      netrc_hosts = netrc_file.hosts
    except (FileNotFoundError, netrc.NetrcParseError):
      netrc_hosts = {}
    self.netrc = netrc_hosts

  def get_key(self, name: str, legacy_name: Optional[str] = None) -> Optional[str]:
    '''Get the named key (token) in the keyfile.'''
    keyfile_token = self.keys.get(name) or self.keys.get(legacy_name)
    netrc_passwd = (e := self.netrc.get(name)) and e[2]
    return keyfile_token or netrc_passwd

class EntryWaiter:
  def __init__(self) -> None:
    self._waiting: Dict[str, asyncio.Future] = {}

  async def wait(self, name: str) -> str:
    '''Wait on the ``name`` entry and return its result (the version string)'''
    fu = self._waiting.get(name)
    if fu is None:
      fu = asyncio.Future()
      self._waiting[name] = fu
    return await fu

  def set_result(self, name: str, value: str) -> None:
    fu = self._waiting.get(name)
    if fu is not None:
      fu.set_result(value)

  def set_exception(self, name: str, e: Exception) -> None:
    fu = self._waiting.get(name)
    if fu is not None:
      fu.set_exception(e)

class RawResult(NamedTuple):
  '''The unprocessed result from a check.'''
  name: str
  version: VersionResult
  conf: Entry

RawResult.name.__doc__ = 'The name (table name) of the entry.'
RawResult.version.__doc__ = 'The result from the check.'
RawResult.conf.__doc__ = 'The entry configuration (table content) of the entry.'

ResultData = Dict[str, RichResult]

class BaseWorker:
  '''The base class for defining `Worker` classes for source plugins.

  .. py:attribute:: task_sem
      :type: asyncio.Semaphore

      This is the rate-limiting semaphore. Workers should acquire it while doing one unit of work.

  .. py:attribute:: result_q
      :type: Queue[RawResult]

      Results should be put into this queue.

  .. py:attribute:: tasks
      :type: List[Tuple[str, Entry]]

      A list of tasks for the `Worker` to complete. Every task consists of
      a tuple for the task name (table name in the configuration file) and the
      content of that table (as a `dict`).

  .. py:attribute:: keymanager
      :type: KeyManager

      The `KeyManager` for retrieving keys from the keyfile.
  '''
  def __init__(
    self,
    task_sem: asyncio.Semaphore,
    result_q: Queue[RawResult],
    tasks: List[Tuple[str, Entry]],
    keymanager: KeyManager,
  ) -> None:
    self.task_sem = task_sem
    self.result_q = result_q
    self.keymanager = keymanager
    self.tasks = tasks

  @abc.abstractmethod
  async def run(self) -> None:
    '''Run the `tasks`. Subclasses should implement this method.'''
    raise NotImplementedError

  async def _run_maynot_raise(self) -> None:
    try:
      await self.run()
    except Exception:
      # don't let an exception tear down the whole process
      logger.exception('exception raised by Worker.run')

class AsyncCache:
  '''A cache for use with async functions.'''
  cache: Dict[Hashable, Any]
  lock: asyncio.Lock

  def __init__(self) -> None:
    self.cache = {}
    self.lock = asyncio.Lock()

  async def _get_json(
    self, key: Tuple[str, str, Tuple[Tuple[str, str], ...]],
  ) -> Any:
    _, url, headers = key
    res = await session.get(url, headers=dict(headers))
    return res.json()

  async def get_json(
    self, url: str, *,
    headers: Dict[str, str] = {},
  ) -> Any:
    '''Get specified ``url`` and return the response content as JSON.

    The returned data will be cached for reuse.
    '''
    key = '_jsonurl', url, tuple(sorted(headers.items()))
    return await self.get(
      key , self._get_json) # type: ignore

  async def get(
    self,
    key: Hashable,
    func: Callable[[Hashable], Coroutine[Any, Any, Any]],
  ) -> Any:
    '''Run async ``func`` and cache its return value by ``key``.

    The ``key`` should be hashable, and the function will be called with it as
    its sole argument. For multiple simultaneous calls with the same key, only
    one will actually be called, and others will wait and return the same
    (cached) value.
    '''
    async with self.lock:
      cached = self.cache.get(key)
      if cached is None:
        coro = func(key)
        fu = asyncio.create_task(coro)
        self.cache[key] = fu

    if asyncio.isfuture(cached): # pending
      return await cached
    elif cached is not None: # cached
      return cached
    else: # not cached
      r = await fu
      self.cache[key] = r
      return r

if TYPE_CHECKING:
  from typing_extensions import Protocol
  class GetVersionFunc(Protocol):
    async def __call__(
      self,
      name: str, conf: Entry,
      *,
      cache: AsyncCache,
      keymanager: KeyManager,
    ) -> VersionResult:
      ...
else:
  GetVersionFunc = Any

class FunctionWorker(BaseWorker):
  func: GetVersionFunc
  cache: AsyncCache

  def initialize(self, func: GetVersionFunc) -> None:
    self.func = func
    self.cache = AsyncCache()

  async def run(self) -> None:
    futures = []
    for name, entry in self.tasks:
      ctx = contextvars.copy_context()
      fu = ctx.run(self.run_one, name, entry)
      futures.append(fu)

    for fu2 in asyncio.as_completed(futures):
      await fu2

  async def run_one(
    self, name: str, entry: Entry,
  ) -> None:
    assert self.func is not None

    tries = entry.get('tries', None)
    if tries is not None:
      ctx_tries.set(tries)
    proxy = entry.get('proxy', None)
    if proxy is not None:
      ctx_proxy.set(proxy)
    ua = entry.get('user_agent', None)
    if ua is not None:
      ctx_ua.set(ua)
    httpt = entry.get('httptoken', None)
    if httpt is None:
      httpt = self.keymanager.get_key('httptoken_'+name)
    if httpt is not None:
      ctx_httpt.set(httpt)
    verify_cert = entry.get('verify_cert', None)
    if verify_cert is not None:
      ctx_verify_cert.set(verify_cert)

    try:
      async with self.task_sem:
        version = await self.func(
          name, entry,
          cache = self.cache,
          keymanager = self.keymanager,
        )
      await self.result_q.put(RawResult(name, version, entry))
    except Exception as e:
      await self.result_q.put(RawResult(name, e, entry))

class GetVersionError(Exception):
  '''An error occurred while getting version information.

  Raise this when a known bad situation happens.

  :param msg: The error message.
  :param kwargs: Arbitrary additional context for the error.
  '''
  def __init__(self, msg: LiteralString, **kwargs: Any) -> None:
    self.msg = msg
    self.kwargs = kwargs
nvchecker-2.17/nvchecker_source/000077500000000000000000000000001476544462000167575ustar00rootroot00000000000000nvchecker-2.17/nvchecker_source/alpm.py000066400000000000000000000022701476544462000202630ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020-2021 DDoSolitary , et al.

from nvchecker.api import GetVersionError
from pyalpm import Handle


async def open_db(info):
  dbpath, repo = info
  handle = Handle('/', dbpath)
  db = handle.register_syncdb(repo, 0)
  return handle, db


async def get_version(name, conf, *, cache, **kwargs):
  pkgname = conf.get('alpm', name)
  dbpath = conf.get('dbpath', '/var/lib/pacman')
  strip_release = conf.get('strip_release', False)
  provided = conf.get('provided')

  repo = conf.get('repo')
  if repo is None:
    repos = conf.get('repos') or ['core', 'extra', 'multilib']
  else:
    repos = [repo]

  for repo in repos:
    db = (await cache.get((dbpath, repo), open_db))[1]
    pkg = db.get_pkg(pkgname)
    if pkg is not None:
      break

  if pkg is None:
    raise GetVersionError('package not found in the ALPM database')
  if provided is None:
    version = pkg.version
  else:
    provides = dict(x.split('=', 1) for x in pkg.provides if '=' in x)
    version = provides.get(provided)
    if version is None:
      raise GetVersionError('provides element not found')
  if strip_release:
    version = version.split('-', 1)[0]
  return version
nvchecker-2.17/nvchecker_source/alpmfiles.py000066400000000000000000000027711476544462000213140ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2023 Pekka Ristola , et al.

from asyncio import create_subprocess_exec
from asyncio.subprocess import PIPE
import re
from typing import Tuple, List

from nvchecker.api import GetVersionError

async def get_files(info: Tuple[str, str]) -> List[str]:
  dbpath, pkg = info
  # there's no pyalpm bindings for the file databases
  cmd = ['pacman', '-Flq', '--dbpath', dbpath, pkg]

  p = await create_subprocess_exec(*cmd, stdout = PIPE, stderr = PIPE)
  stdout, stderr = await p.communicate()

  if p.returncode == 0:
    return stdout.decode().splitlines()
  else:
    raise GetVersionError(
      'pacman failed to get file list',
      pkg = pkg,
      cmd = cmd,
      stdout = stdout.decode(errors='replace'),
      stderr = stderr.decode(errors='replace'),
      returncode = p.returncode,
    )

async def get_version(name, conf, *, cache, **kwargs):
  pkg = conf['pkgname']
  repo = conf.get('repo')
  if repo is not None:
    pkg = f'{repo}/{pkg}'
  dbpath = conf.get('dbpath', '/var/lib/pacman')
  regex = re.compile(conf['filename'])
  if regex.groups > 1:
    raise GetVersionError('multi-group regex')
  strip_dir = conf.get('strip_dir', False)

  files = await cache.get((dbpath, pkg), get_files)

  for f in files:
    fn = f.rsplit('/', 1)[-1] if strip_dir else f
    match = regex.fullmatch(fn)
    if match:
      groups = match.groups()
      return groups[0] if len(groups) > 0 else fn

  raise GetVersionError('no file matches specified regex')
nvchecker-2.17/nvchecker_source/android_sdk.py000066400000000000000000000042041476544462000216120ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
# Copyright (c) 2017,2020 Chih-Hsuan Yen 

import os
import re
from xml.etree import ElementTree

from nvchecker.api import session

_ANDROID_REPO_MANIFESTS = {
  'addon': 'https://dl.google.com/android/repository/addon2-1.xml',
  'package': 'https://dl.google.com/android/repository/repository2-1.xml',
}

# See  tags in Android SDK XML manifests
_CHANNEL_MAP = {
  'stable': 'channel-0',
  'beta': 'channel-1',
  'dev': 'channel-2',
  'canary': 'channel-3',
}

async def _get_repo_manifest(repo):
  repo_xml_url = _ANDROID_REPO_MANIFESTS[repo]

  res = await session.get(repo_xml_url)
  data = res.body.decode('utf-8')

  repo_manifest = ElementTree.fromstring(data)
  return repo_manifest

async def get_version(name, conf, *, cache, **kwargs):
  repo = conf['repo']
  pkg_path_prefix = conf['android_sdk']
  channels = [_CHANNEL_MAP[channel]
              for channel in conf.get('channel', 'stable').split(',')]

  repo_manifest = await cache.get(repo, _get_repo_manifest)

  versions = []

  for pkg in repo_manifest.findall('.//remotePackage'):
    if not pkg.attrib['path'].startswith(pkg_path_prefix):
      continue
    channelRef = pkg.find('./channelRef')
    if channelRef.attrib['ref'] not in channels:
      continue
    for archive in pkg.findall('./archives/archive'):
      host_os = archive.find('./host-os')
      if host_os is not None and host_os.text != conf.get('host_os', 'linux'):
        continue
      archive_url = archive.find('./complete/url').text
      # revision
      rev = pkg.find('./revision')
      rev_strs = []
      for part in ('major', 'minor', 'micro'):
        part_node = rev.find('./' + part)
        if part_node is not None:
          rev_strs.append(part_node.text)
      # release number
      filename, ext = os.path.splitext(archive_url)
      rel_str = filename.rsplit('-')[-1]
      mobj = re.match(r'r\d+', rel_str)
      if mobj:
        rev_strs.append(rel_str)
      versions.append('.'.join(rev_strs))
      # A package suitable for the target host OS is found - skip remaining
      break

  return versions
nvchecker-2.17/nvchecker_source/anitya.py000066400000000000000000000007601476544462000206210ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2017-2020 lilydjwg , et al.

from nvchecker.api import RichResult

URL = 'https://release-monitoring.org/api/project/{pkg}'

async def get_version(name, conf, *, cache, **kwargs):
  pkg = conf.get('anitya_id')
  if pkg is None:
    pkg = conf.get('anitya')
  url = URL.format(pkg = pkg)
  data = await cache.get_json(url)
  return RichResult(
    version = data['version'],
    url = f'https://release-monitoring.org/project/{data["id"]}/',
  )
nvchecker-2.17/nvchecker_source/apt.py000066400000000000000000000123451476544462000201220ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Felix Yan , et al.

from __future__ import annotations

import re
import asyncio
from typing import Dict, Tuple
import itertools
import functools
from collections import defaultdict

from nvchecker.api import (
  session, GetVersionError, VersionResult,
  RichResult, Entry, AsyncCache, KeyManager,
)

APT_RELEASE_URL = "%s/dists/%s/Release"
APT_PACKAGES_PATH = "%s/binary-%s/Packages%s"
APT_PACKAGES_URL = "%s/dists/%s/%s"
APT_PACKAGES_SUFFIX_PREFER = (".xz", ".gz", "")

DpkgVersion = Tuple[int, str, str]

def parse_version(s: str) -> DpkgVersion:
  try:
    epoch_str, rest = s.split(':', 1)
  except ValueError:
    epoch = 0
    rest = s
  else:
    epoch = int(epoch_str)

  try:
    ver, rev = rest.split('-', 1)
  except ValueError:
    ver = rest
    rev = ''

  return epoch, ver, rev

def _compare_part(a: str, b: str) -> int:
  sa = re.split(r'(\d+)', a)
  sb = re.split(r'(\d+)', b)
  for idx, (pa, pb) in enumerate(itertools.zip_longest(sa, sb)):
    if pa is None:
      return -1
    elif pb is None:
      return 1

    if idx % 2 == 1:
      ret = int(pa) - int(pb)
      if ret != 0:
        return ret
    else:
      if pa < pb:
        return -1
      elif pa > pb:
        return 1

  return 0

def compare_version_parsed(a: DpkgVersion, b: DpkgVersion) -> int:
  ret = a[0] - b[0]
  if ret != 0:
    return ret
  ret = _compare_part(a[1], b[1])
  if ret != 0:
    return ret
  return _compare_part(a[2], b[2])

def compare_version(a: str, b: str) -> int:
  va = parse_version(a)
  vb = parse_version(b)
  return compare_version_parsed(va, vb)

def _decompress_data(url: str, data: bytes) -> str:
  if url.endswith(".xz"):
    import lzma
    data = lzma.decompress(data)
  elif url.endswith(".gz"):
    import gzip
    data = gzip.decompress(data)

  return data.decode('utf-8')

async def get_url(url: str) -> str:
  res = await session.get(url)
  data = res.body
  loop = asyncio.get_running_loop()
  return await loop.run_in_executor(
    None, _decompress_data,
    url, data)

async def parse_packages(key: Tuple[AsyncCache, str]) -> Tuple[Dict[str, str], Dict[str, str], Dict[str, str]]:
  cache, url = key
  apt_packages = await cache.get(url, get_url) # type: ignore

  pkg_map = defaultdict(list)
  srcpkg_map = defaultdict(list)
  pkg_to_src_map = defaultdict(list)

  pkg = None
  srcpkg = None
  for line in apt_packages.split('\n'):
    if line.startswith("Package: "):
      pkg = line[9:]
    elif line.startswith("Source: "):
      srcpkg = line[8:]
    elif line.startswith("Version: "):
      version = line[9:]
      if pkg is not None:
        pkg_map[pkg].append(version)
        pkg_to_src_map["%s/%s" % (pkg, version)] = srcpkg if srcpkg is not None else pkg
      if srcpkg is not None:
        srcpkg_map[srcpkg].append(version)
      pkg = srcpkg = None

  pkg_map_max = {pkg: max(vs, key=functools.cmp_to_key(compare_version))
                 for pkg, vs in pkg_map.items()}
  srcpkg_map_max = {pkg: max(vs, key=functools.cmp_to_key(compare_version))
                 for pkg, vs in srcpkg_map.items()}
  pkg_to_src_map_max = {pkg: pkg_to_src_map["%s/%s" % (pkg, vs)]
                 for pkg, vs in pkg_map_max.items()}

  return pkg_map_max, srcpkg_map_max, pkg_to_src_map_max

async def get_version(
  name: str, conf: Entry, *,
  cache: AsyncCache, keymanager: KeyManager,
  **kwargs,
) -> VersionResult:
  srcpkg = conf.get('srcpkg')
  pkg = conf.get('pkg')
  mirror = conf['mirror']
  suite = conf['suite']
  repo = conf.get('repo', 'main')
  arch = conf.get('arch', 'amd64')
  strip_release = conf.get('strip_release', False)

  if srcpkg and pkg:
    raise GetVersionError('Setting both srcpkg and pkg is ambiguous')
  elif not srcpkg and not pkg:
    pkg = name

  apt_release = await cache.get(
    APT_RELEASE_URL % (mirror, suite), get_url) # type: ignore
  for suffix in APT_PACKAGES_SUFFIX_PREFER:
    packages_path = APT_PACKAGES_PATH % (repo, arch, suffix)
    if " " + packages_path in apt_release:
      break
  else:
    raise GetVersionError('Packages file not found in APT repository')

  pkg_map, srcpkg_map, pkg_to_src_map = await cache.get(
    (cache, APT_PACKAGES_URL % (mirror, suite, packages_path)), parse_packages) # type: ignore

  if pkg and pkg in pkg_map:
    version = pkg_map[pkg]
    changelog_name = pkg_to_src_map[pkg]
  elif srcpkg and srcpkg in srcpkg_map:
    version = srcpkg_map[srcpkg]
    changelog_name = srcpkg
  else:
    raise GetVersionError('package not found in APT repository')

  # Get Changelogs field from the Release file
  changelogs_url = None
  for line in apt_release.split('\n'):
    if line.startswith('Changelogs: '):
      changelogs_url = line[12:]
      break

  # Build the changelog URL (see https://wiki.debian.org/DebianRepository/Format#Changelogs for spec)
  changelog = None
  if changelogs_url is not None and changelogs_url != 'no':
    changelog_section = changelog_name[:4] if changelog_name.startswith('lib') else changelog_name[:1]
    changelog = changelogs_url.replace('@CHANGEPATH@', f'{repo}/{changelog_section}/{changelog_name}/{changelog_name}_{version}')

  if strip_release:
    version = version.split("-")[0]

  if changelog is not None:
    return RichResult(
      version = version,
      url = changelog,
    )
  else:
    return version
nvchecker-2.17/nvchecker_source/archpkg.py000066400000000000000000000020751476544462000207540ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

from nvchecker.api import session, RichResult, GetVersionError

URL = 'https://archlinux.org/packages/search/json/'

async def request(pkg):
  res = await session.get(URL, params={"name": pkg})
  return res.json()

async def get_version(name, conf, *, cache, **kwargs):
  pkg = conf.get('archpkg') or name
  strip_release = conf.get('strip_release', False)
  provided = conf.get('provided')

  data = await cache.get(pkg, request)

  if not data['results']:
    raise GetVersionError('Arch package not found')

  r = [r for r in data['results'] if r['repo'] != 'testing'][0]

  if provided:
    provides = dict(x.split('=', 1) for x in r['provides'] if '=' in x)
    version = provides.get(provided, None)
    if strip_release:
      version = version.split('-', 1)[0]
  elif strip_release:
    version = r['pkgver']
  else:
    version = r['pkgver'] + '-' + r['pkgrel']

  return RichResult(
    version = version,
    url = f'https://archlinux.org/packages/{r["repo"]}/{r["arch"]}/{r["pkgname"]}/',
  )
nvchecker-2.17/nvchecker_source/aur.py000066400000000000000000000056521476544462000201300ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020,2024 lilydjwg , et al.

from datetime import datetime, timezone
import asyncio
from typing import Iterable, Dict, List, Tuple, Any, Optional

from nvchecker.api import (
  session, GetVersionError, VersionResult, RichResult,
  Entry, BaseWorker, RawResult,
)

AUR_URL = 'https://aur.archlinux.org/rpc/'

class AurResults:
  cache: Dict[str, Optional[Dict[str, Any]]]

  def __init__(self) -> None:
    self.cache = {}

  async def get_multiple(
    self,
    aurnames: Iterable[str],
  ) -> Dict[str, Optional[Dict[str, Any]]]:
    params = [('v', '5'), ('type', 'info')]
    params.extend(('arg[]', name) for name in aurnames
                  if name not in self.cache)
    res = await session.get(AUR_URL, params=params)
    data = res.json()
    new_results = {r['Name']: r for r in data['results']}

    cache = self.cache
    cache.update(new_results)
    cache.update(
      (name, None)
      for name in set(aurnames) - new_results.keys()
    )

    return {name: cache[name] for name in aurnames
            if name in cache}

class Worker(BaseWorker):
  # https://wiki.archlinux.org/index.php/Aurweb_RPC_interface#Limitations
  batch_size = 100

  async def run(self) -> None:
    tasks = self.tasks
    n_batch, left = divmod(len(tasks), self.batch_size)
    if left > 0:
      n_batch += 1

    aur_results = AurResults()

    ret = []
    for i in range(n_batch):
      s = i * self.batch_size
      batch = tasks[s : s+self.batch_size]
      fu = self._run_batch(batch, aur_results)
      ret.append(fu)

    await asyncio.gather(*ret)

  async def _run_batch(
    self,
    batch: List[Tuple[str, Entry]],
    aur_results: AurResults,
  ) -> None:
    task_by_name: Dict[str, Entry] = dict(self.tasks)

    async with self.task_sem:
      results = await _run_batch_impl(batch, aur_results)
      for name, version in results.items():
        r = RawResult(name, version, task_by_name[name])
        await self.result_q.put(r)

async def _run_batch_impl(
  batch: List[Tuple[str, Entry]],
  aur_results: AurResults,
) -> Dict[str, VersionResult]:
  aurnames = {conf.get('aur', name) for name, conf in batch}
  results = await aur_results.get_multiple(aurnames)

  ret: Dict[str, VersionResult] = {}

  for name, conf in batch:
    aurname = conf.get('aur', name)
    use_last_modified = conf.get('use_last_modified', False)
    strip_release = conf.get('strip_release', False)

    result = results.get(aurname)

    if result is None:
      ret[name] = GetVersionError('AUR upstream not found')
      continue

    version = result['Version']
    if use_last_modified:
      dt = datetime.fromtimestamp(result['LastModified'], timezone.utc)
      version += '-' + dt.strftime('%Y%m%d%H%M%S')
    if strip_release and '-' in version:
      version = version.rsplit('-', 1)[0]

    ret[name] = RichResult(
      version = version,
      url = f'https://aur.archlinux.org/packages/{name}',
    )

  return ret

nvchecker-2.17/nvchecker_source/bitbucket.py000066400000000000000000000042261476544462000213110ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

from typing import Any, List, Union
from urllib.parse import urlencode

from nvchecker.api import VersionResult, RichResult, Entry, AsyncCache

# doc: https://developer.atlassian.com/cloud/bitbucket/rest/api-group-commits/#api-repositories-workspace-repo-slug-commits-get
BITBUCKET_URL = 'https://bitbucket.org/api/2.0/repositories/%s/commits/%s'
# doc: https://developer.atlassian.com/cloud/bitbucket/rest/api-group-refs/#api-repositories-workspace-repo-slug-refs-tags-get
BITBUCKET_MAX_TAG = 'https://bitbucket.org/api/2.0/repositories/%s/refs/tags'

async def get_version(
  name: str, conf: Entry, *,
  cache: AsyncCache,
  **kwargs: Any,
) -> VersionResult:
  repo = conf['bitbucket']
  br = conf.get('branch', '')
  use_max_tag = conf.get('use_max_tag', False)
  use_sorted_tags = conf.get('use_sorted_tags', False)

  if use_sorted_tags or use_max_tag:
    parameters = {'fields': 'values.name,values.links.html.href,next'}

    if use_sorted_tags:
      parameters['sort'] = conf.get('sort', '-target.date')
      if 'query' in conf:
        parameters['q'] = conf['query']

  if use_sorted_tags:
    url = BITBUCKET_MAX_TAG % repo
    url += '?' + urlencode(parameters)

    return await _get_tags(url, max_page=1, cache=cache)

  elif use_max_tag:
    url = BITBUCKET_MAX_TAG % repo
    url += '?' + urlencode(parameters)

    max_page = conf.get('max_page', 3)
    return await _get_tags(url, max_page=max_page, cache=cache)

  else:
    url = BITBUCKET_URL % (repo, br)
    data = await cache.get_json(url)
    return RichResult(
      version = data['values'][0]['date'].split('T', 1)[0].replace('-', ''),
      url = data['values'][0]['links']['html']['href'],
    )

async def _get_tags(
  url: str, *,
  max_page: int,
  cache: AsyncCache,
) -> VersionResult:
  ret: List[Union[str, RichResult]] = []

  for _ in range(max_page):
    data = await cache.get_json(url)
    ret.extend([
      RichResult(
        version = tag['name'],
        url = tag['links']['html']['href'],
      ) for tag in data['values']
    ])
    if 'next' in data:
      url = data['next']
    else:
      break

  return ret
nvchecker-2.17/nvchecker_source/cmd.py000066400000000000000000000026461476544462000201040ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020,2025 lilydjwg , et al.

import asyncio

import structlog

from nvchecker.api import GetVersionError

logger = structlog.get_logger(logger_name=__name__)

async def run_cmd(cmd: str, timeout: int = 60) -> str:
  logger.debug('running cmd', cmd=cmd, timeout=timeout)
  p = await asyncio.create_subprocess_shell(
    cmd,
    stdout=asyncio.subprocess.PIPE,
    stderr=asyncio.subprocess.PIPE,
  )

  if hasattr(asyncio, 'timeout'):
    # Python 3.11+
    try:
      async with asyncio.timeout(timeout):
        output, error = await p.communicate()
        output_s = output.strip().decode('latin1')
        error_s = error.strip().decode(errors='replace')
    except TimeoutError:
      logger.warning('cmd timed out', cmd=cmd, timeout=timeout)
      p.terminate()
      await p.wait()
  else:
    output, error = await p.communicate()
    output_s = output.strip().decode('latin1')
    error_s = error.strip().decode(errors='replace')

  if p.returncode != 0:
    raise GetVersionError(
      'command exited with error',
      cmd=cmd, error=error_s,
      returncode=p.returncode)
  elif not output_s:
    raise GetVersionError(
      'command exited without output',
      cmd=cmd, error=error_s,
      returncode=p.returncode)
  else:
    return output_s

async def get_version(
  name, conf, *, cache, keymanager=None
):
  cmd = conf['cmd']
  return await cache.get(cmd, run_cmd)
nvchecker-2.17/nvchecker_source/combiner.py000066400000000000000000000010331476544462000211240ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2021 lilydjwg , et al.

import asyncio
import string

from nvchecker.api import entry_waiter

class CombineFormat(string.Template):
  idpattern = '[0-9]+'

async def get_version(
  name, conf, *, cache, keymanager=None
):
  t = CombineFormat(conf['format'])
  from_ = conf['from']
  waiter = entry_waiter.get()
  entries = [waiter.wait(name) for name in from_]
  vers = await asyncio.gather(*entries)
  versdict = {str(i+1): v for i, v in enumerate(vers)}
  return t.substitute(versdict)
nvchecker-2.17/nvchecker_source/container.py000066400000000000000000000130351476544462000213150ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Chih-Hsuan Yen 

from typing import Dict, List, NamedTuple, Optional, Tuple
from urllib.request import parse_http_list
from urllib.parse import urljoin
import json

from nvchecker.api import session, HTTPError

class AuthInfo(NamedTuple):
  service: Optional[str]
  realm: str

def parse_www_authenticate_header(header: str) -> Tuple[str, Dict[str, str]]:
  '''
  Parse WWW-Authenticate header used in OAuth2 authentication for container
  registries. This is NOT RFC-compliant!

  Simplified from http.parse_www_authenticate_header in Werkzeug (BSD license)
  '''
  auth_type, auth_info = header.split(None, 1)
  result = {}
  for item in parse_http_list(auth_info):
    name, value = item.split("=", 1)
    if value[:1] == value[-1:] == '"':
      value = value[1:-1]
    result[name] = value
  return auth_type, result

# Inspired by https://stackoverflow.com/a/51921869
# Reference: https://github.com/containers/image/blob/v5.6.0/docker/docker_client.go

class UnsupportedAuthenticationError(NotImplementedError):
  def __init__(self):
    super().__init__('Only Bearer authentication supported for now')

async def get_registry_auth_info(registry_host: str) -> AuthInfo:
  auth_service = auth_realm = None

  try:
    await session.get(f'https://{registry_host}/v2/')
    raise UnsupportedAuthenticationError  # No authentication needed
  except HTTPError as e:
    if e.code != 401:
      raise

    auth_type, auth_info = parse_www_authenticate_header(e.response.headers['WWW-Authenticate'])
    if auth_type.lower() != 'bearer':
      raise UnsupportedAuthenticationError

    # Although 'service' is needed as per https://docs.docker.com/registry/spec/auth/token/,
    # ghcr.io (GitHub container registry) does not provide it
    auth_service = auth_info.get('service')
    auth_realm = auth_info['realm']

    return AuthInfo(auth_service, auth_realm)

async def get_container_tags(info: Tuple[str, str, AuthInfo]) -> List[str]:
  image_path, registry_host, auth_info = info
  token = await get_auth_token(auth_info, image_path)
  tags = []
  url = f'https://{registry_host}/v2/{image_path}/tags/list'

  while True:
    res = await session.get(url, headers={
      'Authorization': f'Bearer {token}',
      'Accept': 'application/json',
    })
    tags += res.json()['tags']
    link = res.headers.get('Link')
    if link is None:
      break
    else:
      url = urljoin(url, parse_next_link(link))

  return tags


async def get_auth_token(auth_info, image_path):
  auth_params = {
    'scope': f'repository:{image_path}:pull',
  }
  if auth_info.service:
    auth_params['service'] = auth_info.service
  res = await session.get(auth_info.realm, params=auth_params)
  token = res.json()['token']
  return token


def parse_next_link(value: str) -> str:
  ending = '>; rel="next"'
  if value.endswith(ending):
    return value[1:-len(ending)]
  else:
    raise ValueError(value)


async def get_container_tag_update_time(info: Tuple[str, str, str, AuthInfo]):
  '''
  Find the update time of a container tag.

  In fact, it's the creation time of the image ID referred by the tag. Tag itself does not have any update time.
  '''
  image_path, image_tag, registry_host, auth_info = info
  token = await get_auth_token(auth_info, image_path)

  # HTTP headers
  headers = {
    'Authorization': f'Bearer {token}',
    # Prefer Image Manifest Version 2, Schema 2: https://distribution.github.io/distribution/spec/manifest-v2-2/
    'Accept': ', '.join([
      'application/vnd.oci.image.manifest.v1+json',
      'application/vnd.oci.image.index.v1+json',
      'application/vnd.docker.distribution.manifest.v2+json',
      'application/vnd.docker.distribution.manifest.list.v2+json',
      'application/json',
    ]),
  }

  # Get tag manifest
  url = f'https://{registry_host}/v2/{image_path}/manifests/{image_tag}'
  res = await session.get(url, headers=headers)
  data = res.json()
  # Schema 1 returns the creation time in the response
  if data['schemaVersion'] == 1:
    return json.loads(data['history'][0]['v1Compatibility'])['created']

  # For schema 2, we have to fetch the config's blob
  # For multi-arch images, multiple manifests are bounded with the same tag. We should choose one and then request
  # the manifest's detail
  if data.get('manifests'):
    # It's quite hard to find the manifest matching with current CPU architecture and system.
    # For now we just choose the first and it should probably work for most cases
    image_digest = data['manifests'][0]['digest']
    url = f'https://{registry_host}/v2/{image_path}/manifests/{image_digest}'
    res = await session.get(url, headers=headers)
    data = res.json()

  digest = data['config']['digest']
  url = f'https://{registry_host}/v2/{image_path}/blobs/{digest}'
  res = await session.get(url, headers=headers)
  data = res.json()
  return data['created']


async def get_version(name, conf, *, cache, **kwargs):
  image_path = conf.get('container', name)
  image_tag = None
  # image tag is optional
  if ':' in image_path:
    image_path, image_tag = image_path.split(':', 1)
  registry_host = conf.get('registry', 'docker.io')
  if registry_host == 'docker.io':
    registry_host = 'registry-1.docker.io'

  auth_info = await cache.get(registry_host, get_registry_auth_info)

  # if a tag is given, return the tag's update time, otherwise return the image's tag list
  if image_tag:
    key = image_path, image_tag, registry_host, auth_info
    return await cache.get(key, get_container_tag_update_time)
  key = image_path, registry_host, auth_info
  return await cache.get(key, get_container_tags)
nvchecker-2.17/nvchecker_source/cpan.py000066400000000000000000000007111476544462000202510ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

from nvchecker.api import RichResult

# Using metacpan
CPAN_URL = 'https://fastapi.metacpan.org/release/%s'

async def get_version(name, conf, *, cache, **kwargs):
  key = conf.get('cpan', name)
  data = await cache.get_json(CPAN_URL % key)
  return RichResult(
    version = str(data['version']),
    url = f'https://metacpan.org/release/{data["author"]}/{data["name"]}',
  )
nvchecker-2.17/nvchecker_source/cran.py000066400000000000000000000014621476544462000202570ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2022 Pekka Ristola , et al.

from nvchecker.api import session, RichResult, GetVersionError

CRAN_URL = 'https://cran.r-project.org/package=%s/DESCRIPTION'
VERSION_FIELD = 'Version: '

async def request(pkg):
  url = CRAN_URL % pkg
  res = await session.get(url)
  return res.body.decode('utf-8', errors='ignore')

async def get_version(name, conf, *, cache, **kwargs):
  package = conf.get('cran', name)

  desc = await cache.get(package, request)

  for line in desc.splitlines():
    if line.startswith(VERSION_FIELD):
      version = line[len(VERSION_FIELD):]
      break
  else:
    raise GetVersionError('Invalid DESCRIPTION file')

  return RichResult(
    version = version,
    url = f'https://cran.r-project.org/web/packages/{package}/',
  )
nvchecker-2.17/nvchecker_source/cratesio.py000066400000000000000000000023761476544462000211520ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import re

import structlog

from nvchecker.api import RichResult

logger = structlog.get_logger(logger_name=__name__)


API_URL = 'https://crates.io/api/v1/crates/%s'
# https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string
VERSION_PATTERN = r'^(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)\.(?P0|[1-9]\d*)(?:-(?P(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$'


async def get_version(name, conf, *, cache, **kwargs):
  name = conf.get('cratesio') or name
  use_pre_release = conf.get('use_pre_release', False)
  data = await cache.get_json(API_URL % name)
  results = []
  for v in data['versions']:
    if v['yanked']:
      continue
    version = v['num']
    match = re.fullmatch(VERSION_PATTERN, version)
    if match is None:
      logger.warning('ignoring invalid version', version=version)
      continue
    if not use_pre_release and match.group('prerelease'):
      continue
    results.append(
      RichResult(
        version=version,
        url=f'https://crates.io/crates/{name}/{version}',
      )
    )

  return results
nvchecker-2.17/nvchecker_source/debianpkg.py000066400000000000000000000015461476544462000212630ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
# Copyright (c) 2017 Felix Yan , et al.

from nvchecker.api import RichResult, GetVersionError

URL = 'https://sources.debian.org/api/src/%(pkgname)s/?suite=%(suite)s'

async def get_version(name, conf, *, cache, **kwargs):
  pkg = conf.get('debianpkg') or name
  strip_release = conf.get('strip_release', False)
  suite = conf.get('suite') or "sid"
  url = URL % {"pkgname": pkg, "suite": suite}
  data = await cache.get_json(url)

  if not data.get('versions'):
    raise GetVersionError('Debian package not found')

  r = data['versions'][0]
  if strip_release:
    version = r['version'].split("-")[0]
  else:
    version = r['version']

  return RichResult(
    version = version,
    url = f'https://sources.debian.org/src/{data["package"]}/{r["version"]}/',
  )
nvchecker-2.17/nvchecker_source/gems.py000066400000000000000000000007251476544462000202700ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

from nvchecker.api import RichResult

GEMS_URL = 'https://rubygems.org/api/v1/versions/%s.json'

async def get_version(name, conf, *, cache, **kwargs):
  key = conf.get('gems', name)
  data = await cache.get_json(GEMS_URL % key)
  return [
    RichResult(
      version = item['number'],
      url = f'https://rubygems.org/gems/{key}/versions/{item["number"]}',
    ) for item in data
  ]
nvchecker-2.17/nvchecker_source/git.py000066400000000000000000000020061476544462000201120ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Felix Yan , et al.

from .cmd import run_cmd

from nvchecker.api import RichResult

async def get_version(
  name, conf, *, cache, keymanager=None
):
  git = conf['git']

  use_commit = conf.get('use_commit', False)
  if use_commit:
    ref = conf.get('branch')
    if ref is None:
      ref = 'HEAD'
      gitref = None
    else:
      ref = 'refs/heads/' + ref
      gitref = ref
    cmd = f"git ls-remote {git} {ref}"
    data = await cache.get(cmd, run_cmd)
    version = data.split(None, 1)[0]
    return RichResult(
      version = version,
      revision = version,
      gitref = gitref,
    )
  else:
    cmd = f"git ls-remote --tags --refs {git}"
    data = await cache.get(cmd, run_cmd)
    versions = []
    for line in data.splitlines():
      revision, version = line.split("\trefs/tags/", 1)
      versions.append(RichResult(
        version = version,
        revision = revision,
        gitref = f"refs/tags/{version}",
      ))
    return versions
nvchecker-2.17/nvchecker_source/gitea.py000066400000000000000000000026621476544462000204300ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

from __future__ import annotations

import urllib.parse

GITEA_URL = 'https://%s/api/v1/repos/%s/commits'
GITEA_MAX_TAG = 'https://%s/api/v1/repos/%s/tags'

from nvchecker.api import (
  VersionResult, RichResult, Entry,
  AsyncCache, KeyManager,
)

async def get_version(
  name: str, conf: Entry, *,
  cache: AsyncCache, keymanager: KeyManager,
) -> VersionResult:
  repo = urllib.parse.quote(conf['gitea'])
  br = conf.get('branch')
  host = conf.get('host', 'gitea.com')
  use_max_tag = conf.get('use_max_tag', False)

  if use_max_tag:
    url = GITEA_MAX_TAG % (host, repo)
  else:
    url = GITEA_URL % (host, repo)
    if br:
      url += '?sha=' + br

  # Load token from config
  token = conf.get('token')
  # Load token from keyman
  if token is None:
    token = keymanager.get_key(host.lower(), 'gitea_' + host.lower())

  # Set private token if token exists.
  headers = {}
  if token:
    headers["Authorization"] = f'token {token}'

  data = await cache.get_json(url, headers = headers)
  if use_max_tag:
    return [
      RichResult(
        version = tag['name'],
        revision = tag['id'],
        url = f'https://{host}/{conf["gitea"]}/releases/tag/{tag["name"]}',
      ) for tag in data
    ]
  else:
    return RichResult(
      version = data[0]['commit']['committer']['date'],
      revision = data[0]['sha'],
      url = data[0]['html_url'],
    )
nvchecker-2.17/nvchecker_source/github.py000066400000000000000000000171771476544462000206300ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020, 2024 lilydjwg , et al.

import time
from urllib.parse import urlencode
from typing import List, Tuple, Union, Optional
import asyncio

import structlog

from nvchecker.api import (
  VersionResult, Entry, AsyncCache, KeyManager,
  HTTPError, session, RichResult, GetVersionError,
)

logger = structlog.get_logger(logger_name=__name__)
ALLOW_REQUEST = None
RATE_LIMITED_ERROR = False

GITHUB_URL = 'https://api.%s/repos/%s/commits'
GITHUB_LATEST_RELEASE = 'https://api.%s/repos/%s/releases/latest'
# https://developer.github.com/v3/git/refs/#get-all-references
GITHUB_MAX_TAG = 'https://api.%s/repos/%s/git/refs/tags'
GITHUB_MAX_RELEASE = 'https://api.%s/repos/%s/releases'
GITHUB_GRAPHQL_URL = 'https://api.%s/graphql'

async def get_version(name, conf, **kwargs):
  global RATE_LIMITED_ERROR, ALLOW_REQUEST

  if RATE_LIMITED_ERROR:
    raise RuntimeError('rate limited')

  if ALLOW_REQUEST is None:
    ALLOW_REQUEST = asyncio.Event()
    ALLOW_REQUEST.set()

  for _ in range(2): # retry once
    try:
      await ALLOW_REQUEST.wait()
      return await get_version_real(name, conf, **kwargs)
    except HTTPError as e:
      if e.code in [403, 429]:
        if n := check_ratelimit(e, name):
          ALLOW_REQUEST.clear()
          await asyncio.sleep(n+1)
          ALLOW_REQUEST.set()
          continue
        RATE_LIMITED_ERROR = True
      raise

QUERY_LATEST_TAG = '''
{{
  repository(name: "{name}", owner: "{owner}") {{
    refs(refPrefix: "refs/tags/", first: 1,
         query: "{query}",
         orderBy: {{field: TAG_COMMIT_DATE, direction: DESC}}) {{
      edges {{
        node {{
          name
          target {{
            oid
          }}
        }}
      }}
    }}
  }}
}}
'''

QUERY_LATEST_RELEASE_WITH_PRERELEASES = '''
{{
  repository(name: "{name}", owner: "{owner}") {{
    releases(first: 1, orderBy: {{field: CREATED_AT, direction: DESC}}) {{
      edges {{
        node {{
          name
          url
          tag {{
            name
          }}
          tagCommit {{
            oid
          }}
        }}
      }}
    }}
  }}
}}
'''

async def get_latest_tag(key: Tuple[str, str, str, str]) -> RichResult:
  host, repo, query, token = key
  owner, reponame = repo.split('/')
  headers = {
    'Authorization': f'bearer {token}',
    'Content-Type': 'application/json',
  }
  q = QUERY_LATEST_TAG.format(
    owner = owner,
    name = reponame,
    query = query,
  )

  res = await session.post(
    GITHUB_GRAPHQL_URL % host,
    headers = headers,
    json = {'query': q},
  )
  j = res.json()

  refs = j['data']['repository']['refs']['edges']
  if not refs:
    raise GetVersionError('no tag found')

  version = refs[0]['node']['name']
  revision = refs[0]['node']['target']['oid']
  return RichResult(
    version = version,
    gitref = f"refs/tags/{version}",
    revision = revision,
    url = f'https://github.com/{repo}/releases/tag/{version}',
  )

async def get_latest_release_with_prereleases(key: Tuple[str, str, str, str]) -> RichResult:
  host, repo, token, use_release_name = key
  owner, reponame = repo.split('/')
  headers = {
    'Authorization': f'bearer {token}',
    'Content-Type': 'application/json',
  }
  q = QUERY_LATEST_RELEASE_WITH_PRERELEASES.format(
    owner = owner,
    name = reponame,
  )

  res = await session.post(
    GITHUB_GRAPHQL_URL % host,
    headers = headers,
    json = {'query': q},
  )
  j = res.json()

  refs = j['data']['repository']['releases']['edges']
  if not refs:
    raise GetVersionError('no release found')

  tag_name = refs[0]['node']['tag']['name']
  if use_release_name:
    version = refs[0]['node']['name']
  else:
    version = tag_name

  return RichResult(
    version = version,
    gitref = f"refs/tags/{tag_name}",
    revision = refs[0]['node']['tagCommit']['oid'],
    url = refs[0]['node']['url'],
  )

async def get_version_real(
  name: str, conf: Entry, *,
  cache: AsyncCache, keymanager: KeyManager,
  **kwargs,
) -> VersionResult:
  repo = conf['github']
  host = conf.get('host', "github.com")

  # Load token from config
  token = conf.get('token')
  # Load token from keyman
  if token is None:
    token = keymanager.get_key(host.lower(), 'github')

  use_latest_tag = conf.get('use_latest_tag', False)
  if use_latest_tag:
    if not token:
      raise GetVersionError('token not given but it is required')

    query = conf.get('query', '')
    return await cache.get((host, repo, query, token), get_latest_tag) # type: ignore

  use_latest_release = conf.get('use_latest_release', False)
  include_prereleases = conf.get('include_prereleases', False)
  use_release_name = conf.get('use_release_name', False)
  if use_latest_release and include_prereleases:
    if not token:
      raise GetVersionError('token not given but it is required')

    return await cache.get(
      (host, repo, token, use_release_name),
      get_latest_release_with_prereleases) # type: ignore

  br = conf.get('branch')
  path = conf.get('path')
  use_max_tag = conf.get('use_max_tag', False)
  use_max_release = conf.get('use_max_release', False)
  if use_latest_release:
    url = GITHUB_LATEST_RELEASE % (host, repo)
  elif use_max_tag:
    url = GITHUB_MAX_TAG % (host, repo)
  elif use_max_release:
    url = GITHUB_MAX_RELEASE % (host, repo)
  else:
    url = GITHUB_URL % (host, repo)
    parameters = {}
    if br:
      parameters['sha'] = br
    if path:
      parameters['path'] = path
    url += '?' + urlencode(parameters)
  headers = {
    'Accept': 'application/vnd.github.quicksilver-preview+json',
  }
  if token:
    headers['Authorization'] = f'token {token}'

  data = await cache.get_json(url, headers = headers)

  if use_max_tag:
    tags: List[Union[str, RichResult]] = [
      RichResult(
        version = ref['ref'].split('/', 2)[-1],
        gitref = ref['ref'],
        revision = ref['object']['sha'],
        url = f'https://github.com/{repo}/releases/tag/{ref["ref"].split("/", 2)[-1]}',
      ) for ref in data
    ]
    if not tags:
      raise GetVersionError('No tag found in upstream repository.')
    return tags

  if use_max_release:
    releases: List[Union[str, RichResult]] = [
      RichResult(
        version = ref['name'] if use_release_name else ref['tag_name'],
        gitref = f"refs/tags/{ref['tag_name']}",
        url = ref['html_url'],
      ) for ref in data if include_prereleases or not ref['prerelease']
    ]
    if not releases:
      raise GetVersionError('No release found in upstream repository.')
    return releases

  if use_latest_release:
    if 'tag_name' not in data:
      raise GetVersionError('No release found in upstream repository.')

    if use_release_name:
      version = data['name']
    else:
      version = data['tag_name']

    return RichResult(
      version = version,
      gitref = f"refs/tags/{data['tag_name']}",
      url = data['html_url'],
    )

  else:
    return RichResult(
      # YYYYMMDD.HHMMSS
      version = data[0]['commit']['committer']['date'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.'),
      revision = data[0]['sha'],
      url = data[0]['html_url'],
    )

def check_ratelimit(exc: HTTPError, name: str) -> Optional[int]:
  res = exc.response
  if not res:
    raise exc

  if v := res.headers.get('retry-after'):
    n = int(v)
    logger.warning('retry-after', n=n)
    return n

  # default -1 is used to re-raise the exception
  n = int(res.headers.get('X-RateLimit-Remaining', -1))
  if n == 0:
    reset = int(res.headers.get('X-RateLimit-Reset'))
    logger.error(f'rate limited, resetting at {time.ctime(reset)}. '
                  'Or get an API token to increase the allowance if not yet',
                 name = name,
                 reset = reset)
    return None

  raise exc
nvchecker-2.17/nvchecker_source/gitlab.py000066400000000000000000000041161476544462000205750ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import urllib.parse

import structlog

from nvchecker.api import (
  VersionResult, RichResult, Entry,
  AsyncCache, KeyManager, TemporaryError,
)

GITLAB_URL = 'https://%s/api/v4/projects/%s/repository/commits'
GITLAB_MAX_TAG = 'https://%s/api/v4/projects/%s/repository/tags'

logger = structlog.get_logger(logger_name=__name__)

async def get_version(name, conf, **kwargs):
  try:
    return await get_version_real(name, conf, **kwargs)
  except TemporaryError as e:
    check_ratelimit(e, name)

async def get_version_real(
  name: str, conf: Entry, *,
  cache: AsyncCache, keymanager: KeyManager,
  **kwargs,
) -> VersionResult:
  repo = urllib.parse.quote_plus(conf['gitlab'])
  br = conf.get('branch')
  host = conf.get('host', "gitlab.com")
  use_max_tag = conf.get('use_max_tag', False)

  if use_max_tag:
    url = GITLAB_MAX_TAG % (host, repo)
  else:
    url = GITLAB_URL % (host, repo)
    if br:
      url += '?ref_name=%s' % br

  # Load token from config
  token = conf.get('token')
  # Load token from keyman
  if token is None:
    token = keymanager.get_key(host.lower(), 'gitlab_' + host.lower())

  # Set private token if token exists.
  headers = {}
  if token:
    headers["PRIVATE-TOKEN"] = token

  data = await cache.get_json(url, headers = headers)
  if use_max_tag:
    return [
      RichResult(
        version = tag['name'],
        revision = tag['commit']['id'],
        url = f'https://{host}/{conf["gitlab"]}/-/tags/{tag["name"]}',
      ) for tag in data
    ]
  else:
    return RichResult(
      version = data[0]['created_at'].split('T', 1)[0].replace('-', ''),
      revision = data[0]['id'],
      url = data[0]['web_url'],
    )

def check_ratelimit(exc, name):
  res = exc.response
  if not res:
    raise

  # default -1 is used to re-raise the exception
  n = int(res.headers.get('RateLimit-Remaining', -1))
  if n == 0:
    logger.error('gitlab rate limited. Wait some time '
                 'or get an API token to increase the allowance if not yet',
                 name = name)
  else:
    raise
nvchecker-2.17/nvchecker_source/go.py000066400000000000000000000020401476544462000177320ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2024 bgme .

from lxml import html

from nvchecker.api import (
  RichResult, Entry, AsyncCache, KeyManager,
  session, GetVersionError,
)

GO_PKG_URL = 'https://pkg.go.dev/{pkg}?tab=versions'
GO_PKG_VERSION_URL = 'https://pkg.go.dev/{pkg}@{version}'


async def get_version(
    name: str, conf: Entry, *,
    cache: AsyncCache, keymanager: KeyManager,
    **kwargs,
) -> RichResult:
  key = tuple(sorted(conf.items()))
  return await cache.get(key, get_version_impl)


async def get_version_impl(info) -> RichResult:
  conf = dict(info)
  pkg_name = conf.get('go')

  url = GO_PKG_URL.format(pkg=pkg_name)
  res = await session.get(url)
  doc = html.fromstring(res.body.decode())

  elements = doc.xpath("//div[@class='Version-tag']/a/text()")
  try:
    version = elements[0] # type: ignore
    return RichResult(
      version = version, # type: ignore
      url = GO_PKG_VERSION_URL.format(pkg=pkg_name, version=version),
    )
  except IndexError:
    raise GetVersionError("parse error", pkg_name=pkg_name)
nvchecker-2.17/nvchecker_source/hackage.py000066400000000000000000000007371476544462000207230ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

from nvchecker.api import RichResult

HACKAGE_URL = 'https://hackage.haskell.org/package/%s/preferred.json'

async def get_version(name, conf, *, cache, **kwargs):
  key = conf.get('hackage', name)
  data = await cache.get_json(HACKAGE_URL % key)
  version = data['normal-version'][0]
  return RichResult(
    version = version,
    url = f'https://hackage.haskell.org/package/{key}-{version}',
  )
nvchecker-2.17/nvchecker_source/htmlparser.py000066400000000000000000000022771476544462000215220ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Ypsilik , et al.
# Copyright (c) 2013-2020 lilydjwg , et al.

from lxml import html, etree

from nvchecker.api import session, GetVersionError

async def get_version(name, conf, *, cache, **kwargs):
  key = tuple(sorted(conf.items()))
  return await cache.get(key, get_version_impl)

async def get_version_impl(info):
  conf = dict(info)

  encoding = conf.get('encoding')
  parser = html.HTMLParser(encoding=encoding)
  data = conf.get('post_data')
  if data is None:
    res = await session.get(conf['url'])
  else:
    res = await session.post(conf['url'], body = data, headers = {
        'Content-Type': conf.get('post_data_type', 'application/x-www-form-urlencoded')
      })
  doc = html.fromstring(res.body, base_url=conf['url'], parser=parser)

  try:
    els = doc.xpath(conf.get('xpath'))
  except ValueError:
    if not conf.get('missing_ok', False):
      raise GetVersionError('version string not found.')
  except etree.XPathEvalError as e:
    raise GetVersionError('bad xpath', exc_info=e)

  version = [
    str(el)
    if isinstance(el, str)
    else str(el.text_content())
    for el in els
  ]
  return version
nvchecker-2.17/nvchecker_source/httpheader.py000066400000000000000000000020271476544462000214620ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2021 lilydjwg , et al.

import re

from nvchecker.api import session, GetVersionError

async def get_version(name, conf, *, cache, **kwargs):
  key = tuple(sorted(conf.items()))
  return await cache.get(key, get_version_impl)

async def get_version_impl(info):
  conf = dict(info)
  url = conf['url']
  header = conf.get('header', 'Location')
  follow_redirects = conf.get('follow_redirects', False)
  method = conf.get('method', 'HEAD')

  try:
    regex = re.compile(conf['regex'])
  except re.error as e:
    raise GetVersionError('bad regex', exc_info=e)

  res = await session.request(
    url,
    method = method,
    follow_redirects = follow_redirects,
  )

  header_value = res.headers.get(header)
  if not header_value:
    raise GetVersionError(
      'header not found or is empty',
      header = header,
      value = header_value,
    )

  try:
    version = regex.findall(header_value)
  except ValueError:
    raise GetVersionError('version string not found.')
  return version
nvchecker-2.17/nvchecker_source/jq.py000066400000000000000000000022351476544462000177450ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2024 Rocket Aaron , et al.

import json
import jq

from nvchecker.api import session, GetVersionError

async def get_version(name, conf, *, cache, **kwargs):
  key = tuple(sorted(conf.items()))
  return await cache.get(key, get_version_impl)

async def get_version_impl(info):
  conf = dict(info)

  try:
    program = jq.compile(conf.get('filter', '.'))
  except ValueError as e:
    raise GetVersionError('bad jq filter', exc_info=e)

  data = conf.get('post_data')
  if data is None:
    res = await session.get(conf['url'])
  else:
    res = await session.post(conf['url'], body = data, headers = {
        'Content-Type': conf.get('post_data_type', 'application/json')
      })

  try:
    obj = json.loads(res.body)
  except json.decoder.JSONDecodeError as e:
    raise GetVersionError('bad json string', exc_info=e)

  try:
    version = program.input(obj).all()
    if version == [None] and not conf.get('missing_ok', False):
      raise GetVersionError('version string not found.')
    version = [str(v) for v in version]
  except ValueError as e:
    raise GetVersionError('failed to filter json', exc_info=e)

  return version
nvchecker-2.17/nvchecker_source/launchpad.py000066400000000000000000000011551476544462000212720ustar00rootroot00000000000000# MIT Licensed
# Copyright (c) 2024 Bert Peters , et al.
from __future__ import annotations
from nvchecker.api import AsyncCache, Entry, RichResult

PROJECT_INFO_URL = "https://api.launchpad.net/1.0/{launchpad}"

async def get_version(name: str, conf: Entry, *, cache: AsyncCache, **kwargs):
  launchpad = conf["launchpad"]

  project_data = await cache.get_json(PROJECT_INFO_URL.format(launchpad=launchpad))
  data = await cache.get_json(project_data['releases_collection_link'])

  return [
    RichResult(version=entry["version"], url=entry["web_link"])
    for entry in data["entries"]
  ]



nvchecker-2.17/nvchecker_source/manual.py000066400000000000000000000002561476544462000206110ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

async def get_version(name, conf, **kwargs):
  return str(conf.get('manual')).strip() or None
nvchecker-2.17/nvchecker_source/mercurial.py000066400000000000000000000004321476544462000213130ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Felix Yan , et al.

async def get_version(name, conf, *, cache, **kwargs):
  url = conf['mercurial'] + '/json-tags'

  data = await cache.get_json(url)

  version = [tag['tag'] for tag in data['tags']]
  return version
nvchecker-2.17/nvchecker_source/none.py000066400000000000000000000006531476544462000202740ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.

from __future__ import annotations

from nvchecker.api import (
  BaseWorker, GetVersionError, RawResult,
)

class Worker(BaseWorker):
  async def run(self) -> None:
    exc = GetVersionError('no source specified')
    async with self.task_sem:
      for name, conf in self.tasks:
        await self.result_q.put(
          RawResult(name, exc, conf))
nvchecker-2.17/nvchecker_source/npm.py000066400000000000000000000020301476544462000201160ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import json
import re
from nvchecker.api import session, RichResult

NPM_URL = 'https://registry.npmjs.org/%s'

def configure(config):
  global NPM_URL
  url = config.get('registry')
  if url:
    NPM_URL = f'{url.rstrip("/")}/%s'

async def get_first_1k(url):
  headers = {
    "Accept": "application/vnd.npm.install-v1+json",
    "Range": "bytes=0-1023",
  }
  res = await session.get(url, headers=headers)
  return res.body

async def get_version(name, conf, *, cache, **kwargs):
  key = conf.get('npm', name)
  data = await cache.get(NPM_URL % key, get_first_1k)

  dist_tags = json.loads(re.search(b'"dist-tags":({.*?})', data).group(1))
  version = dist_tags['latest']

  # There is no standardised URL scheme, so we only return an URL for the default registry
  if NPM_URL.startswith('https://registry.npmjs.org/'):
    return RichResult(
      version = version,
      url = f'https://www.npmjs.com/package/{key}/v/{version}',
    )
  else:
    return version
nvchecker-2.17/nvchecker_source/opam.py000066400000000000000000000045031476544462000202670ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2024 Daniel Peukert , et al.

import asyncio
from io import BytesIO
import tarfile
from typing import List

from nvchecker.api import (
  session, VersionResult,
  Entry, AsyncCache,
  KeyManager, RichResult
)

OPAM_REPO_INDEX_URL = "%s/index.tar.gz"
OPAM_VERSION_PATH_PREFIX = "packages/%s/%s."
OPAM_VERSION_PATH_SUFFIX = "/opam"

OPAM_DEFAULT_REPO = 'https://opam.ocaml.org'
OPAM_DEFAULT_REPO_VERSION_URL = "%s/packages/%s/%s.%s"

def _decompress_and_list_files(data: bytes) -> List[str]:
  # Convert the bytes to a file object and get a list of files
  archive = tarfile.open(mode='r', fileobj=BytesIO(data))
  return archive.getnames()

async def get_files(url: str) -> List[str]:
  # Download the file and get its contents
  res = await session.get(url)
  data = res.body

  # Get the file list of the archive
  loop = asyncio.get_running_loop()
  return await loop.run_in_executor(None, _decompress_and_list_files, data)

async def get_package_versions(files: List[str], pkg: str) -> List[str]:
  # Prepare the filename prefix based on the package name
  prefix = OPAM_VERSION_PATH_PREFIX % (pkg , pkg)

  # Only keep opam files that are relevant to the package we're working with
  filtered_files = []

  for filename in files:
    if filename.startswith(prefix) and filename.endswith(OPAM_VERSION_PATH_SUFFIX):
      filtered_files.append(filename[len(prefix):-1*len(OPAM_VERSION_PATH_SUFFIX)])

  return filtered_files

async def get_version(
  name: str, conf: Entry, *,
  cache: AsyncCache, keymanager: KeyManager,
  **kwargs,
):
  pkg = conf.get('pkg', name)
  repo = conf.get('repo', OPAM_DEFAULT_REPO).rstrip('/')

  # Get the list of files in the repo index (see https://opam.ocaml.org/doc/Manual.html#Repositories for repo structure)
  files = await cache.get(OPAM_REPO_INDEX_URL % repo, get_files) # type: ignore

  # Parse the version strings from the file names
  raw_versions = await get_package_versions(files, pkg)

  # Convert the version strings into RichResults
  versions = []
  for version in raw_versions:
    versions.append(RichResult(
      version = version,
      # There is no standardised URL scheme, so we only return an URL for the default registry
      url = OPAM_DEFAULT_REPO_VERSION_URL % (repo, pkg, pkg, version) if repo == OPAM_DEFAULT_REPO else None,
    ))
  return versions
nvchecker-2.17/nvchecker_source/openvsx.py000066400000000000000000000010771476544462000210400ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2021 Th3Whit3Wolf , et al.

from nvchecker.api import RichResult

API_URL = 'https://open-vsx.org/api/%s/%s'

async def get_version(name, conf, *, cache, **kwargs):
  name = conf.get('openvsx') or name
  splitName = name.split('.')
  publisher = splitName[0]
  extension = splitName[1]
  data = await cache.get_json(API_URL % (publisher, extension))
  version = data['version']
  return RichResult(
    version = version,
    url = f'https://open-vsx.org/extension/{publisher}/{extension}/{version}',
  )
nvchecker-2.17/nvchecker_source/packagist.py000066400000000000000000000012571476544462000213040ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

from nvchecker.api import RichResult

PACKAGIST_URL = 'https://packagist.org/packages/%s.json'

async def get_version(name, conf, *, cache, **kwargs):
  key = conf.get('packagist', name)
  data = await cache.get_json(PACKAGIST_URL % key)

  versions = {
    version: details
    for version, details in data["package"]['versions'].items()
    if version != "dev-master"
  }

  if len(versions):
    version = max(versions, key=lambda version: versions[version]["time"])
    return RichResult(
      version = version,
      url = f'https://packagist.org/packages/{data["package"]["name"]}#{version}',
    )
nvchecker-2.17/nvchecker_source/pacman.py000066400000000000000000000007771476544462000206030ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

from nvchecker_source import cmd

async def get_version(name, conf, **kwargs):
  referree = conf.get('pacman') or name
  c = "LANG=C pacman -Si %s | grep -F Version | awk '{print $3}' | head -n 1" % referree
  conf['cmd'] = c
  strip_release = conf.get('strip_release', False)

  version = await cmd.get_version(name, conf, **kwargs)

  if strip_release and '-' in version:
    version = version.rsplit('-', 1)[0]
  return version
nvchecker-2.17/nvchecker_source/pagure.py000066400000000000000000000013721476544462000206170ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Felix Yan , et al.

import urllib.parse

import structlog

from nvchecker.api import (
  VersionResult, RichResult, Entry, AsyncCache, KeyManager,
)

PAGURE_URL = 'https://%s/api/0/%s/git/tags?with_commits=true'

logger = structlog.get_logger(logger_name=__name__)

async def get_version(
  name: str, conf: Entry, *,
  cache: AsyncCache, keymanager: KeyManager,
  **kwargs,
) -> VersionResult:
  repo = conf['pagure']
  host = conf.get('host', "pagure.io")

  url = PAGURE_URL % (host, repo)

  data = await cache.get_json(url)
  return [
    RichResult(
      version = version,
      url = f'https://{host}/{repo}/tree/{version_hash}',
    ) for version, version_hash in data["tags"].items()
  ]
nvchecker-2.17/nvchecker_source/pypi.py000066400000000000000000000022671476544462000203210ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2021,2023-2024 lilydjwg , et al.

import structlog
from packaging.version import Version, InvalidVersion

from nvchecker.api import RichResult

logger = structlog.get_logger(logger_name=__name__)

async def get_version(name, conf, *, cache, **kwargs):
  ret = []

  package = conf.get('pypi') or name
  use_pre_release = conf.get('use_pre_release', False)

  url = 'https://pypi.org/pypi/{}/json'.format(package)

  data = await cache.get_json(url)

  for version in data['releases'].keys():
    # Skip versions that are marked as yanked.
    if (vers := data['releases'][version]) and vers[0]['yanked']:
      continue

    try:
      parsed_version = Version(version)
    except InvalidVersion:
      if data['releases'][version]:
        # emit a warning if there is something under the invalid version
        # sympy has an empty "0.5.13-hg" version
        logger.warning('ignoring invalid version', version=version)
      continue

    if not use_pre_release and parsed_version.is_prerelease:
      continue

    ret.append(RichResult(
      version = version,
      url = f'https://pypi.org/project/{package}/{version}/',
    ))

  return ret
nvchecker-2.17/nvchecker_source/regex.py000066400000000000000000000020371476544462000204450ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import re

from nvchecker.api import session, GetVersionError

async def get_version(name, conf, *, cache, **kwargs):
  try:
    regex = re.compile(conf['regex'])
  except re.error as e:
    raise GetVersionError('bad regex', exc_info=e)
  if regex.groups > 1:
    raise GetVersionError('multi-group regex')

  key = (
    conf['url'],
    conf.get('encoding', 'latin1'),
    conf.get('post_data'),
    conf.get('post_data_type', 'application/x-www-form-urlencoded'),
  )
  body = await cache.get(key, get_url)

  versions = regex.findall(body)
  if not versions and not conf.get('missing_ok', False):
    raise GetVersionError('version string not found.')
  return versions

async def get_url(info):
  url, encoding, post_data, post_data_type = info

  if post_data is None:
    res = await session.get(url)
  else:
    res = await session.post(url, body = post_data, headers = {
      'Content-Type': post_data_type,
    })
  body = res.body.decode(encoding)
  return body
nvchecker-2.17/nvchecker_source/repology.py000066400000000000000000000017411476544462000211740ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2019 lilydjwg , et al.

from nvchecker.api import RichResult, GetVersionError

API_URL = 'https://repology.org/api/v1/project/{}'

async def get_version(name, conf, *, cache, **kwargs):
  project = conf.get('repology') or name
  repo = conf.get('repo')
  subrepo = conf.get('subrepo')
  if not repo:
    raise GetVersionError('repo field is required for repology source')

  url = API_URL.format(project)
  data = await cache.get_json(url)

  pkgs = [pkg for pkg in data if pkg['repo'] == repo]
  if not pkgs:
    raise GetVersionError('package is not found', repo=repo)

  if subrepo:
    pkgs = [pkg for pkg in pkgs if pkg.get('subrepo') == subrepo]
    if not pkgs:
        raise GetVersionError('package is not found in subrepo',
                              repo=repo, subrepo=subrepo)

  return [
    RichResult(
      version = pkg['version'],
      url = f'https://repology.org/project/{project}/packages',
    ) for pkg in pkgs
  ]
nvchecker-2.17/nvchecker_source/rpmrepo.py000066400000000000000000000047611476544462000210250ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2024 Jakub Ružička , et al.

import asyncio
import gzip
import pathlib
import urllib
from typing import Set

import lxml.etree
from nvchecker.api import session, AsyncCache, Entry, KeyManager, VersionResult


# XML namespaces used in repodata (dead links haha)
NS = {
    'common': 'http://linux.duke.edu/metadata/common',
    'repo':   'http://linux.duke.edu/metadata/repo',
    'rpm':    'http://linux.duke.edu/metadata/rpm'
}


async def get_version(
  name: str, conf: Entry, *,
  cache: AsyncCache, keymanager: KeyManager,
  **kwargs,
) -> VersionResult:
  repo = conf['repo']
  arch = conf.get('arch', 'binary')
  pkg = conf.get('pkg')
  if not pkg:
    pkg = conf.get('rpmrepo', name)

  repo_url = urllib.parse.urlparse(repo)
  repo_path = pathlib.PurePosixPath(repo_url.path)

  # get the url of repomd.xml
  repomd_path = repo_path / 'repodata' / 'repomd.xml'
  repomd_url = repo_url._replace(path=str(repomd_path)).geturl()
  # download repomd.xml (use cache)
  repomd_body = await cache.get(repomd_url, get_file) # type: ignore
  # parse repomd.xml
  repomd_xml = lxml.etree.fromstring(repomd_body)

  # get the url of *primary.xml.gz
  primary_element = repomd_xml.find('repo:data[@type="primary"]/repo:location', namespaces=NS)
  primary_path = repo_path / primary_element.get('href') # type: ignore
  primary_url = repo_url._replace(path=str(primary_path)).geturl()
  # download and decompress *primary.xml.gz (use cache)
  primary_body = await cache.get(primary_url, get_file_gz) # type: ignore
  # parse *primary.xml metadata
  metadata = lxml.etree.fromstring(primary_body)

  # use set to eliminate duplication
  versions_set: Set[str] = set()
  # iterate package metadata
  for el in metadata.findall(f'common:package[common:name="{pkg}"]', namespaces=NS):
    pkg_arch = el.findtext('common:arch', namespaces=NS)

    # filter bych arch
    if arch == 'binary':
      if pkg_arch == 'src':
        continue
    elif arch != 'any':
      if pkg_arch != arch:
        continue

    version_info = el.find('common:version', namespaces=NS)
    version = version_info.get('ver') # type: ignore
    versions_set.add(version) # type: ignore

  versions = list(versions_set)
  return versions # type: ignore


async def get_file(url: str) -> bytes:
  res = await session.get(url)
  return res.body


async def get_file_gz(url: str) -> bytes:
  res = await session.get(url)
  loop = asyncio.get_running_loop()
  return await loop.run_in_executor(
    None, gzip.decompress, res.body)
nvchecker-2.17/nvchecker_source/snapcraft.py000066400000000000000000000014051476544462000213120ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2025 Maxim Slipenko , et al.

from nvchecker.api import (
  GetVersionError
)
from nvchecker.httpclient.base import HTTPError

URL="https://api.snapcraft.io/v2/snaps/info/%(snap)s"

async def get_version(
  name: str, conf, *,
  cache, keymanager,
  **kwargs,
):
  try:
    snap = conf.get("snap")
    channel = conf.get("channel")

    result = await cache.get_json(
      URL % { "snap": snap },
      headers={
        "Snap-Device-Series": "16",
      },
    )
  except HTTPError:
    raise GetVersionError(f"Failed to request snap info for {snap}")
  
  for c in result['channel-map']:
    if c['channel']['name'] == channel:
      return c['version']
  
  raise GetVersionError(f"Failed to find version for {snap}")
nvchecker-2.17/nvchecker_source/sparkle.py000066400000000000000000000037411476544462000207770ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
# Copyright (c) 2020 Sunlei 

from xml.etree import ElementTree

from nvchecker.api import session, RichResult

XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace'
SPARKLE_NAMESPACE = 'http://www.andymatuschak.org/xml-namespaces/sparkle'

async def get_version(name, conf, *, cache, **kwargs):
  sparkle = conf['sparkle']
  release_notes_language = conf.get('release_notes_language', 'en')
  return await cache.get((sparkle, release_notes_language), get_version_impl)


async def get_version_impl(info):
  sparkle, release_notes_language = info
  res = await session.get(sparkle)
  root = ElementTree.fromstring(res.body).find('./channel/item[1]')
  item = root.find('./enclosure')

  version_string = item.get(f'{{{SPARKLE_NAMESPACE}}}shortVersionString')
  build_number = item.get(f'{{{SPARKLE_NAMESPACE}}}version')

  if (version_string and version_string.isdigit()) and (
    build_number and not build_number.isdigit()
  ):
    version_string, build_number = build_number, version_string

  version = []

  if version_string:
    version.append(version_string)
  if build_number and (build_number not in version):
    version.append(build_number)

  version_str = '-'.join(version) if version else None

  release_notes_link = None
  for release_notes in root.findall(f'./{{{SPARKLE_NAMESPACE}}}releaseNotesLink'):
    language = release_notes.get(f'{{{XML_NAMESPACE}}}lang')

    # If the release notes have no language set, store them, but keep looking for our preferred language
    if language is None:
      release_notes_link = release_notes.text.strip()

    # If the release notes match our preferred language, store them and stop looking
    if language == release_notes_language:
      release_notes_link = release_notes.text.strip()
      break

  if release_notes_link is not None:
    return RichResult(
      version = version_str,
      url = release_notes_link,
    )
  else:
    return version_str
nvchecker-2.17/nvchecker_source/ubuntupkg.py000066400000000000000000000025601476544462000213600ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
# Copyright (c) 2017 Felix Yan , et al.

from nvchecker.api import RichResult, GetVersionError

URL = 'https://api.launchpad.net/1.0/ubuntu/+archive/primary?ws.op=getPublishedSources&source_name=%s&exact_match=true'

async def get_version(name, conf, *, cache, **kwargs):
  pkg = conf.get('ubuntupkg') or name
  strip_release = conf.get('strip_release', False)
  suite = conf.get('suite')
  url = URL % pkg

  if suite:
    suite = "https://api.launchpad.net/1.0/ubuntu/" + suite

  releases = []

  while not releases:
    data = await cache.get_json(url)

    if not data.get('entries'):
      raise GetVersionError('Ubuntu package not found')

    releases = [r for r in data["entries"] if r["status"] == "Published"]

    if suite:
      releases = [r for r in releases if r["distro_series_link"] == suite]

    if "next_collection_link" not in data:
      break

    url = data["next_collection_link"]

  if not releases:
    raise GetVersionError('Ubuntu package not found')
    return

  if strip_release:
    version = releases[0]['source_package_version'].split("-")[0]
  else:
    version = releases[0]['source_package_version']

  return RichResult(
    version = version,
    url = f'https://packages.ubuntu.com/{releases[0]["distro_series_link"].rsplit("/", 1)[-1]}/{pkg}',
  )
nvchecker-2.17/nvchecker_source/vsmarketplace.py000066400000000000000000000024601476544462000221740ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2021 Th3Whit3Wolf , et al.

from nvchecker.api import (
  VersionResult, Entry, AsyncCache, KeyManager,
  TemporaryError, session, RichResult, GetVersionError,
)

API_URL = 'https://marketplace.visualstudio.com/_apis/public/gallery/extensionquery'

HEADERS = {
  'Accept': 'application/json;api-version=6.1-preview.1',
  'Content-Type': 'application/json'
}

async def get_version(name: str, conf: Entry, *, cache: AsyncCache, **kwargs):
  name = conf.get('vsmarketplace') or name

  q = {
    'filters': [
      {
        'criteria': [
          {
            'filterType': 8,
            'value': 'Microsoft.VisualStudio.Code'
          },
          {
            'filterType': 7,
            'value': name
          },
          {
            'filterType': 12,
            'value': '4096'
          }
        ],
        'pageNumber': 1,
        'pageSize': 2,
        'sortBy': 0,
        'sortOrder': 0
      }
    ],
    'assetTypes': [],
    'flags': 946
  }

  res = await session.post(
    API_URL,
    headers = HEADERS,
    json = q,
  )
  j = res.json()

  version = j['results'][0]['extensions'][0]['versions'][0]['version']
  return RichResult(
    version = version,
    url = f'https://marketplace.visualstudio.com/items?itemName={name}',
  )
nvchecker-2.17/pyproject.toml000066400000000000000000000004701476544462000163440ustar00rootroot00000000000000[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"

[tool.pytest.ini_options]
# addopts = -n auto
asyncio_mode = "strict"
asyncio_default_fixture_loop_scope = "session"

# build and upload
# rm -rf dist && python -m build --no-isolation && twine check dist/* && twine upload dist/*
nvchecker-2.17/sample_config.toml000066400000000000000000000012071476544462000171320ustar00rootroot00000000000000[__config__]
oldver = "old_ver.json"
newver = "new_ver.json"

[google-chrome]
source = "cmd"
cmd = '''wget -qO- http://dl.google.com/linux/chrome/rpm/stable/x86_64/repodata/other.xml.gz | zgrep -A1 "google-chrome-stable" | awk -F\" '/version/ {print $4"-"$6}' '''

[fbcat]
source = "aur"

[winterpy]
source = "github"
github = "lilydjwg/winterpy"

[nvchecker]
source = "github"
github = "lilydjwg/nvchecker"

[ssed]
source = "regex"
regex = "The current version is ([\\d.]+)\\."
url = "https://sed.sourceforge.net/grabbag/ssed/"
proxy = "http://localhost:8087"

[PySide]
source = "pypi"
pypi = "nvchecker"

[test]
source = "manual"
manual = "0.1"
nvchecker-2.17/scripts/000077500000000000000000000000001476544462000151165ustar00rootroot00000000000000nvchecker-2.17/scripts/README.rst000066400000000000000000000000451476544462000166040ustar00rootroot00000000000000Additional scripts may help someone.
nvchecker-2.17/scripts/nvchecker-ini2toml000077500000000000000000000037401476544462000205530ustar00rootroot00000000000000#!/usr/bin/python3
# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.

import argparse

import configparser
import toml

_handler_precedence = (
  'github', 'aur', 'pypi', 'archpkg', 'debianpkg', 'ubuntupkg',
  'gems', 'pacman',
  'cmd', 'bitbucket', 'regex', 'manual', 'vcs',
  'cratesio', 'npm', 'hackage', 'cpan', 'gitlab', 'packagist',
  'repology', 'anitya', 'android_sdk', 'sparkle', 'gitea'
)

BOOL_KEYS = [
  'strip_release', 'use_last_modified',
  'use_latest_release', 'use_latest_tag',
  'use_max_release', 'use_max_tag', 'use_pre_release',
]

INT_KEYS = [
  'max_page',
]

def main():
  parser = argparse.ArgumentParser(description='convert 1.x ini file to 2.x toml file')
  parser.add_argument('ini', type=argparse.FileType(),
                      help='the old ini file')
  parser.add_argument('toml', type=argparse.FileType(mode='w'),
                      help='the new ini file')
  args = parser.parse_args()

  old = configparser.ConfigParser(
    dict_type=dict, allow_no_value=True, interpolation=None,
  )
  old.read_file(args.ini)

  if '__config__' in old:
    c = old['__config__']
    newconf = dict(c)
    x = newconf.pop('max_concurrent', None)
    if x is not None:
      newconf['max_concurrency'] = x
    confs = {'__config__': newconf}
  else:
    confs = {}

  for section in old.sections():
    if section == '__config__':
      continue

    conf = old[section]
    newconf = {}

    for key in _handler_precedence:
      if key not in conf:
        continue
      newconf['source'] = key
      if conf.get(key):
        newconf[key] = conf.get(key)
      break

    dconf = dict(conf)

    for k, v in dconf.items():
      if '-' in k:
        k = k.replace('-', '_')

      if k in BOOL_KEYS:
        newconf[k] = conf.getboolean(k)
      elif k in INT_KEYS:
        newconf[k] = conf.getint(k)
      elif v != '':
        newconf[k] = v

    confs[section] = newconf

  toml.dump(confs, args.toml)
  args.toml.flush()

if __name__ == '__main__':
  main()
nvchecker-2.17/scripts/nvchecker-notify000077500000000000000000000037751476544462000203360ustar00rootroot00000000000000#!/usr/bin/env python3
# MIT licensed
# Copyright (c) 2020,2022 lilydjwg , et al.

'''
A simple wrapper to show desktop notifications while running nvchecker.
'''

import os
import subprocess
import json

import gi
try:
  gi.require_version('Notify', '0.8')
except ValueError:
  gi.require_version('Notify', '0.7')
from gi.repository import Notify

def get_args():
  import argparse
  parser = argparse.ArgumentParser(description='show desktop notifications while running nvchecker')
  parser.add_argument('-c', '--file',
                      metavar='FILE', type=str,
                      help='software version configuration file if not default')
  parser.add_argument('-k', '--keyfile',
                      metavar='FILE', type=str,
                      help='use specified keyfile (override the one in configuration file)')
  parser.add_argument('-t', '--tries', default=1, type=int, metavar='N',
                      help='try N times when network errors occur')
  parser.add_argument('--failures', action='store_true',
                      help='exit with code 3 if failures / errors happen during checking')

  return parser.parse_args()

def main():
  args = get_args()

  Notify.init('nvchecker')
  notif = Notify.Notification()
  updates = []

  rfd, wfd = os.pipe()
  cmd = [
    'nvchecker', '--logger', 'both', '--json-log-fd', str(wfd),
  ]
  if args.file:
    cmd.extend(['-c', args.file])
  if args.keyfile:
    cmd.extend(['-k', args.keyfile])
  if args.tries:
    cmd.extend(['-t', str(args.tries)])
  if args.failures:
    cmd.append('--failures')

  process = subprocess.Popen(cmd, pass_fds=(wfd,))
  os.close(wfd)

  output = os.fdopen(rfd)
  for l in output:
    j = json.loads(l)
    event = j['event']
    if event == 'updated':
      updates.append('%(name)s updated to version %(version)s' % j)
      notif.update('nvchecker', '\n'.join(updates))
      notif.show()

  ret = process.wait()
  if ret != 0:
    raise subprocess.CalledProcessError(ret, cmd)

if __name__ == '__main__':
  main()
nvchecker-2.17/scripts/nvtake.bash_completion000066400000000000000000000010651476544462000215000ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Felix Yan , et al.

_nvtake() {
    local cur _nvchecker_conf_mtime
    _init_completion || return

    _nvchecker_conf_mtime="$(stat -c %Y $HOME/.config/nvchecker/*)"
    if [ -z "$_nvtake_completion_cache" -o "$_nvchecker_conf_mtime" != "$_nvchecker_conf_mtime_cached" ]; then
        _nvtake_completion_cache="$(nvcmp -q)"
        _nvchecker_conf_mtime_cached="$_nvchecker_conf_mtime"
    fi

    COMPREPLY=( $(compgen -W "$_nvtake_completion_cache" -- "$cur") )
} &&
    complete -F _nvtake nvtake
nvchecker-2.17/scripts/run_cached_tests000077500000000000000000000015341476544462000203640ustar00rootroot00000000000000#!/bin/bash -e

mitmdump=${mitmdump:-mitmdump}

if [[ -f ~/.mitmproxy/nvdump ]]; then
  $mitmdump -S ~/.mitmproxy/nvdump -p 7890 --ignore-hosts '127\.0\.0\.1' --server-replay-reuse --server-replay-extra=forward -w newdump >mitmdump_output &
else
  $mitmdump -w ~/.mitmproxy/nvdump -p 7890 --ignore-hosts '127\.0\.0\.1' >mitmdump_output &
fi

mitm_pid=$!

on_exit () {
  kill -INT $mitm_pid

  if [[ -s newdump ]]; then
    cat newdump >> ~/.mitmproxy/nvdump
  fi

  cat mitmdump_output
}

trap on_exit EXIT

if [[ -f keyfile.toml ]]; then 
  export KEYFILE=keyfile.toml
fi

for _ in {1..10}; do
  if [[ -s ~/.mitmproxy/mitmproxy-ca-cert.pem ]]; then
    break
  fi
  sleep 1
done

export SSL_CERT_FILE=$HOME/.mitmproxy/mitmproxy-ca-cert.pem
export GIT_SSL_CAINFO=$SSL_CERT_FILE
export http_proxy=http://localhost:7890 https_proxy=http://localhost:7890

pytest
nvchecker-2.17/setup.cfg000066400000000000000000000035461476544462000152600ustar00rootroot00000000000000# The complex upload command:
# rm -rf dist && python -m build --sdist && twine check dist/* && twine upload -s dist/*

[metadata]
name = nvchecker
version = attr: nvchecker.__version__
author = lilydjwg
author_email = lilydjwg@gmail.com
description = New version checker for software
license = MIT
keywords = new, version, build, check
url = https://github.com/lilydjwg/nvchecker
long_description = file: README.rst
long_description_content_type = text/x-rst
platforms = any

classifiers =
  Development Status :: 5 - Production/Stable
  Environment :: Console
  Intended Audience :: Developers
  Intended Audience :: System Administrators
  License :: OSI Approved :: MIT License
  Operating System :: OS Independent
  Programming Language :: Python
  Programming Language :: Python :: 3
  Programming Language :: Python :: 3 :: Only
  Programming Language :: Python :: 3.8
  Programming Language :: Python :: 3.9
  Programming Language :: Python :: 3.10
  Programming Language :: Python :: 3.11
  Programming Language :: Python :: 3.12
  Programming Language :: Python :: 3.13
  Topic :: Internet
  Topic :: Internet :: WWW/HTTP
  Topic :: Software Development
  Topic :: System :: Archiving :: Packaging
  Topic :: System :: Software Distribution
  Topic :: Utilities

[options]
zip_safe = True
python_requires = >=3.8

packages = find_namespace:
install_requires =
  tomli; python_version<"3.11"
  structlog
  platformdirs
  tornado>=6
  pycurl
scripts =
  scripts/nvchecker-ini2toml
  scripts/nvchecker-notify

[options.packages.find]
exclude = tests, build*, docs*

[options.extras_require]
vercmp =
  pyalpm
awesomeversion =
  awesomeversion
pypi =
  packaging
htmlparser =
  lxml
rpmrepo =
  lxml
jq =
  jq

[options.entry_points]
console_scripts =
  nvchecker = nvchecker.__main__:main
  nvtake = nvchecker.tools:take
  nvcmp = nvchecker.tools:cmp

[flake8]
ignore = E111, E302, E501
nvchecker-2.17/tests/000077500000000000000000000000001476544462000145715ustar00rootroot00000000000000nvchecker-2.17/tests/__init__.py000066400000000000000000000001131476544462000166750ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.

nvchecker-2.17/tests/conftest.py000066400000000000000000000053761476544462000170030ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020, 2024 lilydjwg , et al.

import asyncio
import structlog
import os
from pathlib import Path
from typing import TYPE_CHECKING, Dict

if TYPE_CHECKING:
  import tomli as tomllib
else:
  try:
    import tomllib
  except ModuleNotFoundError:
    import tomli as tomllib

import pytest
import pytest_asyncio

from nvchecker import core
from nvchecker import __main__ as main
from nvchecker.util import Entries, ResultData, RawResult

use_keyfile = False

async def run(
  entries: Entries, max_concurrency: int = 20,
) -> Dict[str, str]:
  task_sem = asyncio.Semaphore(max_concurrency)
  result_q: asyncio.Queue[RawResult] = asyncio.Queue()
  keyfile = os.environ.get('KEYFILE')
  if use_keyfile and keyfile:
    filepath = Path(keyfile)
    keymanager = core.KeyManager(filepath)
  else:
    keymanager = core.KeyManager(None)

  dispatcher = core.setup_httpclient()
  entry_waiter = core.EntryWaiter()
  futures = dispatcher.dispatch(
    entries, task_sem, result_q,
    keymanager, entry_waiter, 1, {},
  )

  oldvers: ResultData = {}
  result_coro = core.process_result(oldvers, result_q, entry_waiter)
  runner_coro = core.run_tasks(futures)

  results, _has_failures = await main.run(result_coro, runner_coro)
  return {k: r.version for k, r in results.items()}

@pytest_asyncio.fixture(scope="session")
async def get_version():
  async def __call__(name, config):
    entries = {name: config}
    newvers = await run(entries)
    return newvers.get(name)

  return __call__

@pytest_asyncio.fixture(scope="session")
async def run_str():
  async def __call__(str):
    entries = tomllib.loads(str)
    newvers = await run(entries)
    return newvers.popitem()[1]

  return __call__

@pytest_asyncio.fixture(scope="session")
async def run_str_multi():
  async def __call__(str):
    entries = tomllib.loads(str)
    newvers = await run(entries)
    return newvers

  return __call__

@pytest.fixture(scope="session", autouse=True)
def raise_on_logger_msg():
  def proc(logger, method_name, event_dict):
    if method_name in ('warning', 'error'):
      if 'exc_info' in event_dict:
        exc = event_dict['exc_info']
        if isinstance(exc, Exception):
          raise exc
        else: # exc_info=True
          raise
      if not event_dict['event'].startswith(('rate limited', 'no-result')):
        raise RuntimeError(event_dict['event'])
    return event_dict['event']

  structlog.configure([proc])

def pytest_configure(config):
  # register an additional marker
  config.addinivalue_line(
    'markers', 'needs_net: mark test to require Internet access',
  )

@pytest.fixture
def keyfile():
  global use_keyfile
  if 'KEYFILE' not in os.environ:
    pytest.skip('KEYFILE not set')
    return

  use_keyfile = True
  yield
  use_keyfile = False
nvchecker-2.17/tests/test_alpm.py000066400000000000000000000053161476544462000171400ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 DDoSolitary , et al.

import pathlib
import shutil
import subprocess
import tempfile

import pytest

pytestmark = [
  pytest.mark.asyncio,
  pytest.mark.skipif(shutil.which('makepkg') is None, reason='requires makepkg command'),
  pytest.mark.skipif(shutil.which('repo-add') is None, reason='requires repo-add command')
]

global temp_dir, db_path


def setup_module(module):
  global temp_dir, db_path
  temp_dir = tempfile.TemporaryDirectory()
  temp_path = pathlib.Path(temp_dir.name)
  pkg_path = temp_path / 'test-pkg'
  pkg_path.mkdir()
  with (pkg_path / 'PKGBUILD').open('w') as f:
    f.write(
      'pkgname=test-pkg\n'
      'pkgver=1.2.3\n'
      'pkgrel=4\n'
      'arch=(any)\n'
      'provides=("test-provides=5.6-7" "test-provides-unversioned")\n'
      'options=(!debug)\n'
    )
  subprocess.check_call(['makepkg', '--nosign'], cwd=pkg_path)
  pkg_file = subprocess.check_output(['makepkg', '--packagelist'], cwd=pkg_path, text=True).strip()
  db_path = pkg_path / 'test-db'
  db_path.mkdir()
  repo_path = db_path / 'sync'
  repo_path.mkdir()
  subprocess.check_call([
    'repo-add',
    repo_path / 'test-repo.db.tar.gz',
    pkg_path / pkg_file
  ])


def teardown_module(module):
  temp_dir.cleanup()


async def test_alpm(get_version):
  assert await get_version('test-pkg', {
    'source': 'alpm',
    'dbpath': str(db_path),
    'repo': 'test-repo'
  }) == '1.2.3-4'


async def test_alpm_strip(get_version):
  assert await get_version('test-pkg', {
    'source': 'alpm',
    'dbpath': str(db_path),
    'repo': 'test-repo',
    'strip_release': True
  }) == '1.2.3'


async def test_alpm_provided(get_version):
  assert await get_version('test-pkg', {
    'source': 'alpm',
    'dbpath': str(db_path),
    'repo': 'test-repo',
    'provided': 'test-provides'
  }) == '5.6-7'


async def test_alpm_provided_strip(get_version):
  assert await get_version('test-pkg', {
    'source': 'alpm',
    'dbpath': str(db_path),
    'repo': 'test-repo',
    'provided': 'test-provides',
    'strip_release': True
  }) == '5.6'


async def test_alpm_missing_repo(get_version):
  with pytest.raises(RuntimeError):
    await get_version('test-pkg', {
      'source': 'alpm',
      'dbpath': str(db_path),
      'repo': 'wrong-repo'
    })


async def test_alpm_missing_pkg(get_version):
  with pytest.raises(RuntimeError):
    await get_version('wrong-pkg', {
      'source': 'alpm',
      'dbpath': str(db_path),
      'repo': 'test-repo'
    })


async def test_alpm_missing_provides(get_version):
  with pytest.raises(RuntimeError):
    await get_version('test-pkg', {
      'source': 'alpm',
      'dbpath': str(db_path),
      'repo': 'test-repo',
      'provided': 'wrong-provides'
    })
nvchecker-2.17/tests/test_alpmfiles.py000066400000000000000000000023251476544462000201600ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2023 Pekka Ristola , et al.

import pathlib
import shutil
import subprocess
import tempfile

import pytest

pytestmark = [
  pytest.mark.asyncio,
  pytest.mark.skipif(shutil.which('pacman') is None, reason='requires pacman command'),
  pytest.mark.skipif(shutil.which('fakeroot') is None, reason='requires fakeroot command'),
]

global temp_dir, db_path


def setup_module(module):
  global temp_dir, db_path

  temp_dir = tempfile.TemporaryDirectory()
  temp_path = pathlib.Path(temp_dir.name)
  db_path = temp_path / 'test-db'

  db_path.mkdir(exist_ok=True)

  cmd = ['fakeroot', 'pacman', '-Fy', '--dbpath', db_path]
  subprocess.check_call(cmd)


def teardown_module(module):
  temp_dir.cleanup()


async def test_alpmfiles(get_version):
  assert await get_version('test', {
    'source': 'alpmfiles',
    'pkgname': 'libuv',
    'filename': 'usr/lib/libuv\\.so\\.([^.]+)',
    'dbpath': db_path,
  }) == '1'

async def test_alpmfiles_strip(get_version):
  assert await get_version('test', {
    'source': 'alpmfiles',
    'pkgname': 'glibc',
    'repo': 'core',
    'filename': 'libc\\.so\\.[^.]+',
    'strip_dir': True,
    'dbpath': db_path,
  }) == 'libc.so.6'
nvchecker-2.17/tests/test_android_sdk.py000066400000000000000000000033331476544462000204650ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
# Copyright (c) 2017 Chih-Hsuan Yen 

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

@pytest.mark.flaky(reruns=10)
async def test_android_addon(get_version):
    assert await get_version("android-google-play-apk-expansion", {
        "source": "android_sdk",
        "android_sdk": "extras;google;market_apk_expansion",
        "repo": "addon",
    }) == "1.r03"

async def test_android_package(get_version):
    version = await get_version("android-sdk-cmake", {
        "source": "android_sdk",
        "android_sdk": "cmake;",
        "repo": "package",
    })
    assert version.startswith("3.")


async def test_android_package_channel(get_version):
    assert await get_version("android-sdk-cmake", {
        "source": "android_sdk",
        "android_sdk": "ndk;",
        "repo": "package",
        "channel": "beta,dev,canary",
    }) == "26.0.10636728"

async def test_android_list(get_version):
    assert await get_version("android-sdk-cmake-older", {
        "source": "android_sdk",
        "android_sdk": "cmake;",
        "repo": "package",
        "include_regex": r"3\.10.*",
    }) == "3.10.2"

async def test_android_package_os(get_version):
    assert await get_version("android-usb-driver", {
        "source": "android_sdk",
        "android_sdk": "extras;google;usb_driver",
        "repo": "addon",
        "host_os": "windows"
    }) == "13"

async def test_android_package_os_missing(get_version):
    assert await get_version("android-usb-driver", {
        "source": "android_sdk",
        "android_sdk": "extras;google;usb_driver",
        "repo": "addon",
        "host_os": "linux"
    }) == None
nvchecker-2.17/tests/test_anitya.py000066400000000000000000000011101476544462000174600ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
# Copyright (c) 2017 Felix Yan , et al.

import re

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_anitya(get_version):
  version = await get_version("shutter", {
    "source": "anitya",
    "anitya": "fedora/shutter",
  })
  assert re.match(r"[0-9.]+", version)

async def test_anitya_by_id(get_version):
  version = await get_version("shutter", {
    "source": "anitya",
    "anitya_id": "4813",
  })
  assert re.match(r"[0-9.]+", version)
nvchecker-2.17/tests/test_apt.py000066400000000000000000000025101476544462000167640ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020-2021 lilydjwg , et al.
# Copyright (c) 2017 Felix Yan , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

@pytest.mark.flaky(reruns=10)
async def test_apt(get_version):
    assert await get_version("sigrok-firmware-fx2lafw", {
        "source": "apt",
        "mirror": "http://deb.debian.org/debian/",
        "suite": "sid",
    }) == "0.1.7-3"

@pytest.mark.flaky(reruns=10)
async def test_apt_srcpkg(get_version):
    ver = await get_version("test", {
        "source": "apt",
        "srcpkg": "golang-github-dataence-porter2",
        "mirror": "http://deb.debian.org/debian/",
        "suite": "sid",
    })
    assert ver.startswith("0.0~git20150829.56e4718-")

@pytest.mark.flaky(reruns=10)
async def test_apt_strip_release(get_version):
    assert await get_version("sigrok-firmware-fx2lafw", {
        "source": "apt",
        "mirror": "http://deb.debian.org/debian/",
        "suite": "sid",
        "strip_release": 1,
    }) == "0.1.7"

@pytest.mark.skip
@pytest.mark.flaky(reruns=10)
async def test_apt_deepin(get_version):
    assert await get_version("sigrok-firmware-fx2lafw", {
        "source": "apt",
        "mirror": "https://community-packages.deepin.com/deepin",
        "suite": "apricot",
    }) == "0.1.6-1"

nvchecker-2.17/tests/test_archpkg.py000066400000000000000000000015611476544462000176240ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

@pytest.mark.flaky
async def test_archpkg(get_version):
    assert await get_version("base", {
        "source": "archpkg",
    }) == "3-2"

@pytest.mark.flaky
async def test_archpkg_strip_release(get_version):
    assert await get_version("base", {
        "source": "archpkg",
        "strip_release": True,
    }) == "3"

@pytest.mark.flaky
async def test_archpkg_provided(get_version):
    assert await get_version("dbus", {
        "source": "archpkg",
        "provided": "libdbus-1.so",
    }) == "3-64"

@pytest.mark.flaky
async def test_archpkg_provided_strip(get_version):
    int(await get_version("jsoncpp", {
        "source": "archpkg",
        "provided": "libjsoncpp.so",
        "strip_release": True,
    }))

nvchecker-2.17/tests/test_aur.py000066400000000000000000000013131476544462000167670ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import pytest
pytestmark = [pytest.mark.asyncio,
              pytest.mark.needs_net]

@pytest.mark.flaky(reruns=10)
async def test_aur(get_version):
    assert await get_version("ssed", {
        "source": "aur",
    }) == "3.62-2"

@pytest.mark.flaky(reruns=10)
async def test_aur_strip_release(get_version):
    assert await get_version("ssed", {
        "source": "aur",
        "strip_release": 1,
    }) == "3.62"

@pytest.mark.flaky(reruns=10)
async def test_aur_use_last_modified(get_version):
    assert await get_version("ssed", {
        "source": "aur",
        'use_last_modified': True,
    }) == "3.62-2-20150725052412"
nvchecker-2.17/tests/test_bitbucket.py000066400000000000000000000026631476544462000201650ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_bitbucket(get_version):
    assert await get_version("example", {
        "source": "bitbucket",
        "bitbucket": "prawee/git-tag",
    }) == "20150303"

async def test_bitbucket_max_tag(get_version):
    assert await get_version("example", {
        "source": "bitbucket",
        "bitbucket": "prawee/git-tag",
        "use_max_tag": True,
    }) == "1.7.0"

async def test_bitbucket_max_tag_with_ignored(get_version):
    assert await get_version("example", {
        "source": "bitbucket",
        "bitbucket": "prawee/git-tag",
        "use_max_tag": True,
        "ignored": "1.6.0 1.7.0",
    }) == "v1.5"

async def test_bitbucket_sorted_tags(get_version):
    assert await get_version("example", {
        "source": "bitbucket",
        "bitbucket": "prawee/git-tag",
        "use_sorted_tags": True,
    }) == "1.7.0"

    assert await get_version("example", {
        "source": "bitbucket",
        "bitbucket": "prawee/git-tag",
        "use_sorted_tags": True,
        "query": 'name~"v"',
    }) == "v1.5"

    assert await get_version("example", {
        "source": "bitbucket",
        "bitbucket": "berkeleylab/gasnet",
        "use_sorted_tags": True,
        "query": 'name~"CVS/BERKELEY_UPC" AND name!~"rc"',
        "prefix": "CVS/BERKELEY_UPC_",
    }) == "2_18_0"
nvchecker-2.17/tests/test_cache.py000066400000000000000000000005521476544462000172470ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.

import pytest
pytestmark = pytest.mark.asyncio

async def test_cache(run_str_multi):
  conf = r'''
[cache-1]
source = "cmd"
cmd = "bash -c 'echo $RANDOM'"

[cache-2]
source = "cmd"
cmd = "bash -c 'echo $RANDOM'"
'''

  r = await run_str_multi(conf)
  assert r['cache-1'] == r['cache-2']
nvchecker-2.17/tests/test_cmd.py000066400000000000000000000012521476544462000167450ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import time
import pytest
pytestmark = pytest.mark.asyncio

async def test_cmd(get_version):
    assert await get_version("example", {
        "source": "cmd",
        "cmd": "echo Meow",
    }) == "Meow"

async def test_cmd_complex(get_version):
    assert await get_version("example", {
        "source": "cmd",
        "cmd": "echo Meow | sed 's/meow/woof/i'",
    }) == "woof"

async def test_cmd_with_percent(run_str):
    test_conf = '''\
[example]
source = "cmd"
cmd = "date +%Y-%m-%d"'''
    date = await run_str(test_conf)
    expected = time.strftime('%Y-%m-%d')
    assert date == expected

nvchecker-2.17/tests/test_combiner.py000066400000000000000000000006441476544462000200040ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2021 lilydjwg , et al.

import pytest
pytestmark = pytest.mark.asyncio

async def test_combiner(run_str_multi):
  conf = r'''
[entry-1]
source = "cmd"
cmd = "echo 1"

[entry-2]
source = "cmd"
cmd = "echo 2"

[entry-3]
source = "combiner"
from = ["entry-1", "entry-2", "entry-2"]
format = "$1-$2-$3"
'''

  r = await run_str_multi(conf)
  assert r['entry-3'] == '1-2-2'
nvchecker-2.17/tests/test_container.py000066400000000000000000000035571476544462000201760ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Chih-Hsuan Yen 

import os
import datetime

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net,
             pytest.mark.skipif(bool(os.environ.get('GITHUB_RUN_ID')), reason="400 very often")]

async def test_container(get_version):
  assert await get_version("hello-world", {
    "source": "container",
    "container": "library/hello-world",
    "include_regex": "linux",
  }) == "linux"

async def test_container_with_tag(get_version):
  update_time = await get_version("bitnami/mongodb:5.0", {
    "source": "container",
    "container": "bitnami/mongodb:5.0",
  })
  # the update time is changing occasionally, so we can not compare the exact time, otherwise the test will be failed in the future
  assert datetime.date.fromisoformat(update_time.split('T')[0]) > datetime.date(2023, 12, 1)

async def test_container_with_tag_and_multi_arch(get_version):
  update_time = await get_version("hello-world:linux", {
    "source": "container",
    "container": "library/hello-world:linux",
  })
  # the update time is changing occasionally, so we can not compare the exact time, otherwise the test will be failed in the future
  assert datetime.date.fromisoformat(update_time.split('T')[0]) > datetime.date(2023, 1, 1)

async def test_container_with_tag_and_registry(get_version):
  update_time = await get_version("hello-world-nginx:v1.0", {
    "source": "container",
    "registry": "quay.io",
    "container": "redhattraining/hello-world-nginx:v1.0",
  })
  # the update time probably won't be changed
  assert datetime.date.fromisoformat(update_time.split('T')[0]) == datetime.date(2019, 6, 26)

async def test_container_paging(get_version):
  assert await get_version("prometheus-operator", {
    "source": "container",
    "registry": "quay.io",
    "container": "redhattraining/hello-world-nginx",
  }) == "v1.0"
nvchecker-2.17/tests/test_cpan.py000066400000000000000000000004511476544462000171230ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_cpan(get_version):
    assert await get_version("POE-Component-Server-HTTPServer", {
        "source": "cpan",
    }) == "0.9.2"
nvchecker-2.17/tests/test_cran.py000066400000000000000000000004631476544462000171300ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2022 Pekka Ristola , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_cran(get_version):
    ver = await get_version("xml2", {
        "source": "cran",
    })
    assert ver.startswith("1.3.")
nvchecker-2.17/tests/test_cratesio.py000066400000000000000000000016211476544462000200130ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_cratesio(get_version):
    assert await get_version("example", {
        "source": "cratesio",
    }) == "1.1.0"

async def test_cratesio_list(get_version):
    assert await get_version("example", {
        "source": "cratesio",
        "include_regex": r"^1\.0.*",
    }) == "1.0.2"

async def test_cratesio_skip_prerelease(get_version):
    with pytest.raises(RuntimeError, match='include_regex matched no versions'):
        await get_version("cargo-lock", {
            "source": "cratesio",
            "include_regex": r".*-.*",
        })

async def test_cratesio_use_prerelease(get_version):
    await get_version("cargo-lock", {
        "source": "cratesio",
        "use_pre_release": "true",
        "include_regex": r".*-.*",
    })
nvchecker-2.17/tests/test_debianpkg.py000066400000000000000000000014701476544462000201300ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
# Copyright (c) 2017 Felix Yan , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

@pytest.mark.flaky(reruns=10)
async def test_debianpkg(get_version):
    assert await get_version("sigrok-firmware-fx2lafw", {
        "source": "debianpkg",
    }) == "0.1.7-3"

@pytest.mark.flaky(reruns=10)
async def test_debianpkg_strip_release(get_version):
    assert await get_version("sigrok-firmware-fx2lafw", {
        "source": "debianpkg",
        "strip_release": 1,
    }) == "0.1.7"

@pytest.mark.flaky(reruns=10)
async def test_debianpkg_suite(get_version):
    assert await get_version("sigrok-firmware-fx2lafw", {
        "source": "debianpkg",
        "suite": "buster",
    }) == "0.1.6-1"
nvchecker-2.17/tests/test_gems.py000066400000000000000000000004211476544462000171320ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_gems(get_version):
    assert await get_version("example", {
        "source": "gems",
    }) == "1.0.2"
nvchecker-2.17/tests/test_git.py000066400000000000000000000015721476544462000167720ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Felix Yan , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_git(get_version):
    assert await get_version("example", {
        "source": "git",
        "git": "https://gitlab.com/gitlab-org/gitlab-test.git",
    }) == "v1.1.1"

async def test_git_commit(get_version):
    assert await get_version("example", {
        "source": "git",
        "git": "https://gitlab.com/gitlab-org/gitlab-test.git",
        "use_commit": True,
    }) == "ddd0f15ae83993f5cb66a927a28673882e99100b"

async def test_git_commit_branch(get_version):
    assert await get_version("example", {
        "source": "git",
        "git": "https://gitlab.com/gitlab-org/gitlab-test.git",
        "use_commit": True,
        "branch": "with-executables",
    }) == "6b8dc4a827797aa025ff6b8f425e583858a10d4f"
nvchecker-2.17/tests/test_gitea.py000066400000000000000000000012201476544462000172660ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import pytest
pytestmark = [pytest.mark.asyncio,
              pytest.mark.needs_net]

@pytest.mark.flaky(reruns=10)
async def test_gitea(get_version):
    ver = await get_version("example", {
        "source": "gitea",
        "gitea": "gitea/tea"})
    assert ver.startswith('20')
    assert 'T' in ver

@pytest.mark.flaky(reruns=10)
async def test_gitea_max_tag_with_include(get_version):
    assert await get_version("example", {
        "source": "gitea",
        "gitea": "gitea/tea",
        "use_max_tag": True,
        "include_regex": r'v0\.3.*',
    }) == "v0.3.1"
nvchecker-2.17/tests/test_github.py000066400000000000000000000075731476544462000175000ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import re

import pytest

pytestmark = [pytest.mark.asyncio,
              pytest.mark.needs_net,
              pytest.mark.usefixtures('keyfile')]

async def test_github(get_version):
    assert await get_version("example", {
        "source": "github",
        "github": "harry-sanabria/ReleaseTestRepo",
    }) == "20140122.012101"

async def test_github_default_not_master(get_version):
    assert await get_version("example", {
        "source": "github",
        "github": "MariaDB/server",
    }) is not None

async def test_github_latest_release(get_version):
    assert await get_version("example", {
        "source": "github",
        "github": "dpeukert/ReleaseTestRepo",
        "use_latest_release": True,
    }) == "v0.0.0"

async def test_github_latest_release_include_prereleases(get_version):
    assert await get_version("example", {
        "source": "github",
        "github": "dpeukert/ReleaseTestRepo",
        "use_latest_release": True,
        "include_prereleases": True,
    }) == "v0.0.1-pre"

async def test_github_max_tag(get_version):
    assert await get_version("example", {
        "source": "github",
        "github": "harry-sanabria/ReleaseTestRepo",
        "use_max_tag": True,
    }) == "second_release"

async def test_github_max_release(get_version):
    assert await get_version("example", {
        "source": "github",
        "github": "harry-sanabria/ReleaseTestRepo",
        "use_max_release": True,
    }) == "second_release"

    assert await get_version("example", {
        "source": "github",
        "github": "harry-sanabria/ReleaseTestRepo",
        "use_max_release": True,
        "use_release_name": True,
    }) == "second_release"

async def test_github_max_tag_with_ignored(get_version):
    assert await get_version("example", {
        "source": "github",
        "github": "harry-sanabria/ReleaseTestRepo",
        "use_max_tag": True,
        "ignored": "second_release release3",
    }) == "first_release"

async def test_github_max_release_with_ignored(get_version):
    assert await get_version("example", {
        "source": "github",
        "github": "harry-sanabria/ReleaseTestRepo",
        "use_max_release": True,
        "ignored": "second_release release3",
    }) == "first_release"
    assert await get_version("example", {
        "source": "github",
        "github": "harry-sanabria/ReleaseTestRepo",
        "use_max_release": True,
        "ignored": "second_release",
        "use_release_name": True,
    }) == "release #3"

async def test_github_with_path(get_version):
    assert await get_version("example", {
        "source": "github",
        "github": "petronny/ReleaseTestRepo",
        "path": "test_directory",
    }) == "20140122.012101"

async def test_github_with_path_and_branch(get_version):
    assert await get_version("example", {
        "source": "github",
        "github": "petronny/ReleaseTestRepo",
        "branch": "test",
        "path": "test_directory/test_directory",
    }) == "20190128.113201"

async def test_github_max_tag_with_include(get_version):
    version = await get_version("example", {
        "source": "github",
        "github": "EFForg/https-everywhere",
        "use_max_tag": True,
        "include_regex": r"chrome-\d.*",
    })
    assert re.match(r'chrome-[\d.]+', version)

async def test_github_max_release_with_include(get_version):
    version = await get_version("example", {
        "source": "github",
        "github": "EFForg/https-everywhere",
        "use_max_release": True,
        "use_release_name": True,
        "include_regex": r"Release \d.*",
    })
    assert re.match(r'Release [\d.]+', version)

async def test_github_latest_tag(get_version):
    assert await get_version("example", {
        "source": "github",
        "github": "harry-sanabria/ReleaseTestRepo",
        "use_latest_tag": True,
    }) == "release3"

nvchecker-2.17/tests/test_gitlab.py000066400000000000000000000024541476544462000174510ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_gitlab(get_version):
    ver = await get_version("example", {
        "source": "gitlab",
        "gitlab": "gitlab-org/gitlab-test",
    })
    assert len(ver) == 8
    assert ver.isdigit()

async def test_gitlab_blm(get_version):
    # repo with a custom main branch
    ver = await get_version("example", {
        "source": "gitlab",
        "gitlab": "asus-linux/asusctl",
    })
    assert len(ver) == 8
    assert ver.isdigit()

async def test_gitlab_max_tag(get_version):
    assert await get_version("example", {
        "source": "gitlab",
        "gitlab": "gitlab-org/gitlab-test",
        "use_max_tag": True,
    }) == "v1.1.1"

async def test_gitlab_max_tag_with_include(get_version):
    assert await get_version("example", {
        "source": "gitlab",
        "gitlab": "gitlab-org/gitlab-test",
        "use_max_tag": True,
        "include_regex": r'v1\.0.*',
    }) == "v1.0.0"

async def test_gitlab_max_tag_with_ignored(get_version):
    assert await get_version("example", {
        "source": "gitlab",
        "gitlab": "gitlab-org/gitlab-test",
        "use_max_tag": True,
        "ignored": "v1.1.0 v1.1.1",
    }) == "v1.0.0"

nvchecker-2.17/tests/test_go.py000066400000000000000000000014251476544462000166110ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2024 bgme .

import pytest

from nvchecker.api import HTTPError

try:
  import lxml
  lxml_available = True
except ImportError:
  lxml_available = False

pytestmark = [
  pytest.mark.asyncio,
  pytest.mark.needs_net,
  pytest.mark.skipif(not lxml_available, reason="needs lxml")
]


async def test_go(get_version):
  ver = await get_version("one version", {
    "source": "go",
    "go": "github.com/caddyserver/replace-response",
  })

  assert ver.startswith("v0.0.0-")

  assert await get_version("multiple version", {
    "source": "go",
    "go": "github.com/corazawaf/coraza-caddy",
  }) == "v1.2.2"

  with pytest.raises(HTTPError):
    await get_version("not found", {
      "source": "go",
      "go": "github.com/asdas/sadfasdf",
    })
nvchecker-2.17/tests/test_hackage.py000066400000000000000000000004721476544462000175700ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

@pytest.mark.flaky(reruns=10)
async def test_hackage(get_version):
    assert await get_version("sessions", {
        "source": "hackage",
    }) == "2008.7.18"
nvchecker-2.17/tests/test_htmlparser.py000066400000000000000000000015141476544462000203640ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2021 ypsilik , et al.

import pytest

lxml_available = True
try:
  import lxml
except ImportError:
  lxml_available = False

pytestmark = [
  pytest.mark.asyncio,
  pytest.mark.needs_net,
  pytest.mark.skipif(not lxml_available, reason="needs lxml"),
]

async def test_xpath_ok(get_version):
    ver = await get_version("aur", {
        "source": "htmlparser",
        "url": "https://aur.archlinux.org/",
        "xpath": '//div[@id="footer"]/p[1]/a/text()',
    })
    assert ver.startswith('v')
    assert '.' in ver

async def test_xpath_element(get_version):
    ver = await get_version("aur", {
        "source": "htmlparser",
        "url": "https://aur.archlinux.org/",
        "xpath": '//div[@id="footer"]/p[1]/a',
    })
    assert ver.startswith('v')
    assert '.' in ver

nvchecker-2.17/tests/test_httpheader.py000066400000000000000000000016441476544462000203370ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2021,2024 lilydjwg , et al.

import pytest

httpbin_available = True
try:
  import pytest_httpbin
  assert pytest_httpbin # for pyflakes
except ImportError:
  httpbin_available = False

pytestmark = pytest.mark.asyncio

@pytest.mark.needs_net
async def test_redirection(get_version):
  assert await get_version("unifiedremote", {
    "source": "httpheader",
    "url": "https://www.unifiedremote.com/download/linux-x64-deb",
    "regex": r'urserver-([\d.]+).deb',
  }) is not None

@pytest.mark.skipif(not httpbin_available, reason="needs pytest_httpbin")
async def test_get_version_withtoken(get_version, httpbin):
  assert await get_version("unifiedremote", {
    "source": "httpheader",
    "url": httpbin.url + "/basic-auth/username/superpassword",
    "httptoken": "Basic dXNlcm5hbWU6c3VwZXJwYXNzd29yZA==",
    "header": "server",
    "regex": r'([0-9.]+)*',
  }) is not None
nvchecker-2.17/tests/test_jq.py000066400000000000000000000014541476544462000166200ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2024 Rocket Aaron , et al.

import pytest

jq_available = True
try:
  import jq
except ImportError:
  jq_available = False

pytestmark = [
  pytest.mark.asyncio,
  pytest.mark.needs_net,
  pytest.mark.skipif(not jq_available, reason="needs jq"),
]

async def test_jq(get_version):
    ver = await get_version("aur", {
        "source": "jq",
        "url": "https://aur.archlinux.org/rpc/v5/info?arg[]=nvchecker-git"
    })
    ver = ver.strip()
    assert ver.startswith("{")
    assert ver.endswith("}")

async def test_jq_filter(get_version):
    ver = await get_version("aur", {
        "source": "jq",
        "url": "https://aur.archlinux.org/rpc/v5/info?arg[]=nvchecker-git",
        "filter": '.results[0].PackageBase',
    })
    assert ver == "nvchecker-git"
nvchecker-2.17/tests/test_launchpad.py000066400000000000000000000005631476544462000201450ustar00rootroot00000000000000# MIT Licensed
# Copyright (c) 2024 Bert Peters , et al.
import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

@pytest.mark.flaky(reruns=10)
async def test_launchpad(get_version):
  version = await get_version(
    "sakura",
    {
      "source": "launchpad",
      "launchpad": "sakura",
    }
  )

  assert version == '3.8.8'
nvchecker-2.17/tests/test_manual.py000066400000000000000000000004251476544462000174600ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import pytest
pytestmark = pytest.mark.asyncio

async def test_manual(get_version):
    assert await get_version("example", {
        "source": "manual",
        "manual": "Meow",
    }) == "Meow"
nvchecker-2.17/tests/test_mercurial.py000066400000000000000000000005661476544462000201740ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Felix Yan , et al.

import pytest
pytestmark = [
  pytest.mark.asyncio,
  pytest.mark.needs_net,
]

@pytest.mark.skip
async def test_mercurial(get_version):
  assert await get_version("example", {
    "source": "mercurial",
    "mercurial": "https://repo.mercurial-scm.org/hg-website/json-tags",
  }) == "v1.0"
nvchecker-2.17/tests/test_npm.py000066400000000000000000000004171476544462000167760ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_npm(get_version):
    assert await get_version("example", {
        "source": "npm",
    }) == "0.0.0"
nvchecker-2.17/tests/test_opam.py000066400000000000000000000013301476544462000171330ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2024 Daniel Peukert , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_opam_official(get_version):
    assert await get_version("test", {
        "source": "opam",
        "pkg": "omigrate",
    }) == "0.3.2"

async def test_opam_coq(get_version):
    assert await get_version("test", {
        "source": "opam",
        "repo": "https://coq.inria.fr/opam/released",
        "pkg": "coq-abp",
    }) == "8.10.0"

async def test_opam_coq_trailing_slash(get_version):
    assert await get_version("test", {
        "source": "opam",
        "repo": "https://coq.inria.fr/opam/released/",
        "pkg": "coq-abp",
    }) == "8.10.0"
nvchecker-2.17/tests/test_openvsx.py000066400000000000000000000004751476544462000177120ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2021 Th3Whit3Wolf , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_openvsx(get_version):
    assert await get_version("usernamehw.indent-one-space", {
        "source": "openvsx",
    }) == "0.3.0"
nvchecker-2.17/tests/test_packagist.py000066400000000000000000000004651476544462000201550ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_packagist(get_version):
    assert await get_version("butterfly/example-web-application", {
        "source": "packagist",
    }) == "1.2.0"
nvchecker-2.17/tests/test_pacman.py000066400000000000000000000013441476544462000174430ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import pathlib
import shutil
import pytest
pytestmark = [pytest.mark.asyncio,
              pytest.mark.skipif(shutil.which("pacman") is None,
                                 reason="requires pacman command"),
              pytest.mark.skipif(not pathlib.Path("/var/lib/pacman/sync/core.db").exists(),
                                 reason="requires synced pacman databases")]

async def test_pacman(get_version):
    assert await get_version("base", {
        "source": "pacman",
    }) == "3-2"

async def test_pacman_strip_release(get_version):
    assert await get_version("base", {
        "source": "pacman",
        "strip_release": 1,
    }) == "3"
nvchecker-2.17/tests/test_pagure.py000066400000000000000000000014641476544462000174720ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Felix Yan , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_pagure(get_version):
    ver = await get_version("example", {
        "source": "pagure",
        "pagure": "nvchecker-test",
    })
    assert ver == "0.2"

async def test_pagure_with_ignored(get_version):
    ver = await get_version("example", {
        "source": "pagure",
        "pagure": "nvchecker-test",
        "ignored": "0.2",
    })
    assert ver == "0.1"

async def test_pagure_with_alternative_host(get_version):
    ver = await get_version("example", {
        "source": "pagure",
        "pagure": "rpms/glibc",
        "host": "src.fedoraproject.org",
        "include_regex": r"F-\d+-start",
    })
    assert ver == "F-13-start"
nvchecker-2.17/tests/test_pypi.py000066400000000000000000000021221476544462000171600ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_pypi(get_version):
    assert await get_version("example", {
        "source": "pypi",
    }) == "0.1.0"

async def test_pypi_release(get_version):
    assert await get_version("example-test-package", {
        "source": "pypi",
        "pypi": "example-test-package",
    }) == "1.0.0"

async def test_pypi_pre_release(get_version):
    assert await get_version("example-test-package", {
        "source": "pypi",
        "use_pre_release": 1,
    }) == "1.0.1a1"

async def test_pypi_list(get_version):
    assert await get_version("urllib3", {
        "source": "pypi",
        "include_regex": "^1\\..*",
    }) == "1.26.20"

async def test_pypi_invalid_version(get_version):
    await get_version("sympy", {
        "source": "pypi",
    })

async def test_pypi_yanked_version(get_version):
    assert await get_version("urllib3", {
        "source": "pypi",
        "include_regex": "^(1\\..*)|(2\\.0\\.[0,1])",
    }) == "1.26.20"
nvchecker-2.17/tests/test_regex.py000066400000000000000000000101651476544462000173170ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020,2024 lilydjwg , et al.

import base64

import pytest

httpbin_available = True
try:
  import pytest_httpbin
  assert pytest_httpbin # for pyflakes
except ImportError:
  httpbin_available = False

pytestmark = [
  pytest.mark.asyncio,
  pytest.mark.skipif(not httpbin_available, reason="needs pytest_httpbin"),
]

def base64_encode(s):
  return base64.b64encode(s.encode('utf-8')).decode('ascii')

async def test_regex_httpbin_default_user_agent(get_version, httpbin):
  ua = await get_version("example", {
    "source": "regex",
    "url": httpbin.url + "/get",
    "regex": r'"User-Agent":\s*"([^"]+)"',
  })
  assert ua.startswith("lilydjwg/nvchecker")

async def test_regex_httpbin_user_agent(get_version, httpbin):
  assert await get_version("example", {
    "source": "regex",
    "url": httpbin.url + "/get",
    "regex": r'"User-Agent":\s*"(\w+)"',
    "user_agent": "Meow",
  }) == "Meow"

async def test_regex(get_version, httpbin):
  assert await get_version("example", {
    "source": "regex",
    "url": httpbin.url + "/base64/" + base64_encode("version 1.12 released"),
    "regex": r'version ([0-9.]+)',
  }) == "1.12"

async def test_missing_ok(get_version, httpbin):
  assert await get_version("example", {
    "source": "regex",
    "url": httpbin.url + "/base64/" + base64_encode("something not there"),
    "regex": "foobar",
    "missing_ok": True,
  }) is None

async def test_missing(get_version, httpbin):
  with pytest.raises(RuntimeError):
    await get_version("example", {
      "source": "regex",
      "url": httpbin.url + "/base64/" + base64_encode("something not there"),
      "regex": "foobar",
    })

async def test_multi_group(get_version, httpbin):
  with pytest.raises(RuntimeError):
    await get_version("example", {
      "source": "regex",
      "url": httpbin.url + "/base64/" + base64_encode("1.2"),
      "regex": r"(\d+)\.(\d+)",
    })

async def test_regex_with_tokenBasic(get_version, httpbin):
  assert await get_version("example", {
    "source": "regex",
    "url": httpbin.url + "/basic-auth/username/superpassword",
    "httptoken": "Basic dXNlcm5hbWU6c3VwZXJwYXNzd29yZA==",
    "regex": r'"user":\s*"([a-w]+)"',
  }) == "username"

async def test_regex_with_tokenBearer(get_version, httpbin):
  assert await get_version("example", {
    "source": "regex",
    "url": httpbin.url + "/bearer",
    "httptoken": "Bearer username:password",
    "regex": r'"token":\s*"([a-w]+):.*"',
  }) == "username"

async def test_regex_no_verify_ssl(get_version, httpbin_secure):
  assert await get_version("example", {
    "source": "regex",
    "url": httpbin_secure.url + "/base64/" + base64_encode("version 1.12 released"),
    "regex": r'version ([0-9.]+)',
    "verify_cert": False,
  }) == "1.12"

async def test_regex_bad_ssl(get_version, httpbin_secure):
  try:
    await get_version("example", {
      "source": "regex",
      "url": httpbin_secure.url + "/base64/" + base64_encode("version 1.12 released"),
      "regex": r'version ([0-9.]+)',
    })
  except Exception:
    pass
  else:
    assert False, 'certificate should not be trusted'

async def test_regex_post(get_version, httpbin):
  assert await get_version("example", {
    "source": "regex",
    "url": httpbin.url + "/post",
    "regex": r'"ABCDEF":\s*"(\w+)"',
    "post_data": "ABCDEF=234&CDEFG=xyz"
  }) == "234"

async def test_regex_post2(get_version, httpbin):
  assert await get_version("example", {
    "source": "regex",
    "url": httpbin.url + "/post",
    "regex": r'"CDEFG":\s*"(\w+)"',
    "post_data": "ABCDEF=234&CDEFG=xyz"
  }) == "xyz"

async def test_regex_post_json(get_version, httpbin):
  assert await get_version("example", {
    "source": "regex",
    "url": httpbin.url + "/post",
    "regex": r'"ABCDEF":\s*(\w+)',
    "post_data": '{"ABCDEF":234,"CDEFG":"xyz"}',
    "post_data_type": "application/json"
  }) == "234"

async def test_regex_post_json2(get_version, httpbin):
  assert await get_version("example", {
    "source": "regex",
    "url": httpbin.url + "/post",
    "regex": r'"CDEFG":\s*"(\w+)"',
    "post_data": '{"ABCDEF":234,"CDEFG":"xyz"}',
    "post_data_type": "application/json"
  }) == "xyz"
nvchecker-2.17/tests/test_repology.py000066400000000000000000000020721476544462000200430ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2019-2020 lilydjwg , et al.

import pytest
pytestmark = [pytest.mark.asyncio,
              pytest.mark.needs_net]

@pytest.mark.flaky(reruns=10)
async def test_repology(get_version):
  assert await get_version("ssed", {
        "source": "repology",
        "repo": "aur",
  }) == "3.62"

@pytest.mark.flaky(reruns=10)
async def test_repology_subrepo(get_version):
  assert await get_version("asciiquarium", {
        "source": "repology",
        "repo": "fedora_32",
        "subrepo": "release"
  }) == "1.1"

async def test_repology_bad_subrepo(get_version):
  try:
    assert await get_version("asciiquarium", {
            "source": "repology",
            "repo": "fedora_32",
            "subrepo": "badsubrepo"
    }) is None
  except RuntimeError as e:
    assert "package is not found in subrepo" in str(e)

async def test_repology_no_repo(get_version):
  try:
    assert await get_version("ssed", {
        "source": "repology",
    }) is None
  except RuntimeError as e:
    assert "repo field is required" in str(e)
nvchecker-2.17/tests/test_rpmrepo.py000066400000000000000000000011701476544462000176650ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2024 Jakub Ružička , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_rpmrepo_fedora(get_version):
    assert await get_version("knot_fedora-39", {
        "source": "rpmrepo",
        "pkg": "knot",
        "repo": "http://ftp.sh.cvut.cz/fedora/linux/updates/39/Everything/x86_64/",
    }) == "3.3.9"

async def test_rpmrepo_alma(get_version):
    assert await get_version("knot_fedora-39", {
        "source": "rpmrepo",
        "pkg": "tmux",
        "repo": "http://ftp.sh.cvut.cz/almalinux/9.5/BaseOS/x86_64/os/",
    }) == "3.2a"
nvchecker-2.17/tests/test_simplerun.py000066400000000000000000000006531476544462000202240ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2022 lilydjwg , et al.

import sys
import tempfile
import subprocess

def test_simple_run():
  '''make sure the tool as a whole can run the simplest check'''
  with tempfile.NamedTemporaryFile(mode='w') as f:
    f.write('''\
[t]
source = "cmd"
cmd = "echo 1"
''')
    f.flush()
    subprocess.check_call([
      sys.executable, '-m', 'nvchecker',
      '-c', f.name,
    ])

nvchecker-2.17/tests/test_snapcraft.py000066400000000000000000000017371476544462000201730ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2025 Maxim Slipenko , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_snapcraft(get_version):
    assert await get_version("test", {
        "source": "snapcraft",
        "snap": "test-snapd-public",
        "channel": "edge",
    }) == "2.0"

async def test_snapcraft_non_existent_snap(get_version):
    with pytest.raises(RuntimeError, match='Failed to request snap info for not-existent-snap'):
        assert await get_version("test", {
            "source": "snapcraft",
            "snap": "not-existent-snap",
            "channel": "stable",
        })

async def test_snapcraft_non_existent_channel(get_version):
    with pytest.raises(RuntimeError, match='Failed to find version for test-snapd-public'):
        assert await get_version("test", {
            "source": "snapcraft",
            "snap": "test-snapd-public",
            "channel": "non-existent-channel",
        })
nvchecker-2.17/tests/test_sortversion.py000066400000000000000000000012531476544462000206000ustar00rootroot00000000000000import pytest

from nvchecker.sortversion import (
  parse_version,
  vercmp, vercmp_available,
  AwesomeVersion, awesomeversion_available,
)

def test_parse_version():
  assert parse_version("v6.0") < parse_version("6.1")
  assert parse_version("v6.0") > parse_version("v6.1-stable")

@pytest.mark.skipif(not vercmp_available,
                    reason="needs pyalpm")
def test_vercmp():
  assert vercmp("v6.0") < vercmp("v6.1-stable")

@pytest.mark.skipif(not awesomeversion_available,
                    reason="needs awesomeversion")
def test_awesomeversion():
  assert AwesomeVersion("v6.0") < AwesomeVersion("6.1")
  assert AwesomeVersion("v6.0") > AwesomeVersion("v6.0b0")

nvchecker-2.17/tests/test_sparkle.py000066400000000000000000000010451476544462000176430ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
# Copyright (c) 2020 Sunlei 

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_sparkle(get_version):
    assert await get_version('example', {
        'source': 'sparkle',
        'sparkle': (
            'https://raw.githubusercontent.com/sparkle-project/Sparkle/'
            'f453625573fc9a251760b65c74df59023b1471c1/Tests/Resources/'
            'testlocalizedreleasenotesappcast.xml'
        ),
    }) == '6.0'
nvchecker-2.17/tests/test_substitute.py000066400000000000000000000027511476544462000204220ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.

import pytest
pytestmark = pytest.mark.asyncio

async def test_substitute_prefix(get_version):
    assert await get_version("example", {
        "source": "manual",
        "manual": "v1.0",
        "prefix": "v",
    }) == "1.0"

async def test_substitute_prefix_missing_ok(get_version):
    assert await get_version("example", {
        "source": "manual",
        "manual": "1.0",
        "prefix": "v",
    }) == "1.0"

async def test_substitute_regex(get_version):
    assert await get_version("example", {
        "source": "manual",
        "manual": "r15c",
        "from_pattern": r"r(\d+)([a-z])",
        "to_pattern": r"r\1.\2",
    }) == "r15.c"

async def test_substitute_regex_missing_ok(get_version):
    assert await get_version("example", {
        "source": "manual",
        "manual": "r15",
        "from_pattern": r"r(\d+)([a-z])",
        "to_pattern": r"r\1.\2",
    }) == "r15"

async def test_substitute_regex_empty_to_pattern(get_version):
    assert await get_version("example", {
        "source": "manual",
        "manual": "15-debian",
        "from_pattern": r"-\w+$",
        "to_pattern": r"",
    }) == "15"

async def test_substitute_prefix_has_higher_priority(get_version):
    assert await get_version("example", {
        "source": "manual",
        "manual": "Version 1.2 Beta 3",
        "prefix": "Version ",
        "from_pattern": r" Beta ",
        "to_pattern": r"b",
    }) == "1.2b3"
nvchecker-2.17/tests/test_ubuntupkg.py000066400000000000000000000020141476544462000202230ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020,2024 lilydjwg , et al.
# Copyright (c) 2017 Felix Yan , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

@pytest.mark.flaky
async def test_ubuntupkg(get_version):
    v = await get_version("sigrok-firmware-fx2lafw", {
        "source": "ubuntupkg",
    })
    assert v.startswith("0.1.7-")

@pytest.mark.flaky
async def test_ubuntupkg_strip_release(get_version):
    assert await get_version("sigrok-firmware-fx2lafw", {
        "source": "ubuntupkg",
        "strip_release": True,
    }) == "0.1.7"

@pytest.mark.flaky
async def test_ubuntupkg_suite(get_version):
    assert await get_version("sigrok-firmware-fx2lafw", {
        "source": "ubuntupkg",
        "suite": "xenial",
    }) == "0.1.2-1"

@pytest.mark.flaky(reruns=10)
async def test_ubuntupkg_suite_with_paging(get_version):
    assert await get_version("ffmpeg", {
        "source": "ubuntupkg",
        "suite": "xenial",
    }) == "7:2.8.17-0ubuntu0.1"
nvchecker-2.17/tests/test_vsmarketplace.py000066400000000000000000000005111476544462000210400ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2021 Th3Whit3Wolf , et al.

import pytest
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]

async def test_vsmarketplace(get_version):
    assert await get_version("usernamehw.indent-one-space", {
        "source": "vsmarketplace",
    }) == "1.0.0"
nvchecker-2.17/tox.ini000066400000000000000000000004321476544462000147410ustar00rootroot00000000000000[tox]
isolated_build = True
# you may find `tox --skip-missing-interpreters=true` helpful.
envlist = py3{8,9,10,11,12}

[testenv]
usedevelop = false
deps =
  pytest
  pytest-asyncio
  pytest-httpbin
  flaky
extras =
  htmlparser
passenv = KEYFILE
commands = pytest -r fEs {posargs}