mirror of
https://github.com/lilydjwg/nvchecker.git
synced 2025-03-10 06:14:02 +00:00
Compare commits
40 commits
Author | SHA1 | Date | |
---|---|---|---|
|
fc8d90f46b | ||
|
e1882a2778 | ||
|
b83cbd2ead | ||
|
d4be5189fe | ||
|
cc9001ad88 | ||
|
fca89f7830 | ||
|
92a424f946 | ||
|
d3bf5b2db5 | ||
|
bd72ea04d2 | ||
|
50d5342586 | ||
|
1e3ed1e4b9 | ||
|
72c0730725 | ||
|
6759d2f206 | ||
|
17e351f825 | ||
|
d58638733e | ||
|
287282eb2c | ||
|
fa9ca38690 | ||
|
5c7918bf7a | ||
|
c03bec7452 | ||
|
7cacd9edaf | ||
|
af21f93bd1 | ||
|
ca011221cf | ||
|
745f0decdb | ||
|
6f5870787a | ||
|
4364759b29 | ||
|
dbf6c4601f | ||
|
4d5e29f26b | ||
|
ad892b713e | ||
|
5803237d39 | ||
|
71a0002624 | ||
|
2722ccc7fe | ||
|
228139ddd4 | ||
|
372fce4445 | ||
|
babb3aa74e | ||
|
d44a50c273 | ||
|
c0d6568434 | ||
|
2a64f7ab8f | ||
|
af1a855fd5 | ||
|
eeddd56156 | ||
|
29b0f63103 |
31 changed files with 531 additions and 51 deletions
43
.github/workflows/tests.yaml
vendored
43
.github/workflows/tests.yaml
vendored
|
@ -42,11 +42,6 @@ jobs:
|
||||||
restore-keys: |
|
restore-keys: |
|
||||||
${{ runner.os }}-${{ env.cache-name }}-${{ matrix.deps }}-
|
${{ runner.os }}-${{ env.cache-name }}-${{ matrix.deps }}-
|
||||||
${{ runner.os }}-${{ env.cache-name }}-
|
${{ runner.os }}-${{ env.cache-name }}-
|
||||||
- name: workaround pycurl wheel
|
|
||||||
if: ${{ contains(matrix.deps, 'pycurl') }}
|
|
||||||
run: |
|
|
||||||
sudo mkdir -p /etc/pki/tls/certs
|
|
||||||
sudo ln -s /etc/ssl/certs/ca-certificates.crt /etc/pki/tls/certs/ca-bundle.crt
|
|
||||||
|
|
||||||
- name: Install pycurl deps
|
- name: Install pycurl deps
|
||||||
if: ${{ contains(matrix.deps, 'pycurl') }}
|
if: ${{ contains(matrix.deps, 'pycurl') }}
|
||||||
|
@ -55,10 +50,44 @@ jobs:
|
||||||
sudo apt install -y libcurl4-openssl-dev
|
sudo apt install -y libcurl4-openssl-dev
|
||||||
# werkzeug is pinned for httpbin compatibility https://github.com/postmanlabs/httpbin/issues/673
|
# werkzeug is pinned for httpbin compatibility https://github.com/postmanlabs/httpbin/issues/673
|
||||||
- name: Install Python deps
|
- name: Install Python deps
|
||||||
run: pip install -U ${{ matrix.deps }} pytest 'pytest-asyncio>=0.24' pytest-httpbin pytest-rerunfailures structlog tomli platformdirs lxml jq 'werkzeug<2.1' awesomeversion
|
env:
|
||||||
|
# use env to avoid `>` being redirection
|
||||||
|
deps: ${{ matrix.deps }}
|
||||||
|
run: pip install -U $deps pytest 'pytest-asyncio>=0.24' pytest-httpbin pytest-rerunfailures structlog tomli platformdirs lxml jq 'werkzeug<2.1' awesomeversion
|
||||||
|
# don't use binary distribution because:
|
||||||
|
# hardcoded cacert path doesn't work on Ubuntu (should have been resolved?)
|
||||||
|
# limited compression support (only deflate & gzip)
|
||||||
|
- name: Install pycurl
|
||||||
|
if: ${{ contains(matrix.deps, 'pycurl') }}
|
||||||
|
run: |
|
||||||
|
pip uninstall -y pycurl
|
||||||
|
pip install -U pycurl --no-binary :all:
|
||||||
- name: Decrypt keys
|
- name: Decrypt keys
|
||||||
env:
|
env:
|
||||||
KEY: ${{ secrets.KEY }}
|
KEY: ${{ secrets.KEY }}
|
||||||
run: if [[ -n $KEY ]]; then openssl enc -d -aes-256-ctr -pbkdf2 -k $KEY -in keyfile.toml.enc -out keyfile.toml; fi
|
run: if [[ -n $KEY ]]; then openssl enc -d -aes-256-ctr -pbkdf2 -k $KEY -in keyfile.toml.enc -out keyfile.toml; fi
|
||||||
|
|
||||||
|
- name: Setup mitmproxy cache
|
||||||
|
uses: actions/cache@v4
|
||||||
|
env:
|
||||||
|
cache-name: cache-mitm
|
||||||
|
with:
|
||||||
|
path: ~/.mitmproxy
|
||||||
|
key: ${{ env.cache-name }}
|
||||||
|
restore-keys: |
|
||||||
|
${{ env.cache-name }}-
|
||||||
|
- name: Install mitmproxy
|
||||||
|
run: |
|
||||||
|
/usr/bin/python -m venv --system-site-packages ~/.mitmproxy/venv
|
||||||
|
. ~/.mitmproxy/venv/bin/activate
|
||||||
|
pip install -U mitmproxy
|
||||||
|
# https://github.com/DevToys-app/DevToys/issues/1373#issuecomment-2599820594
|
||||||
|
sudo sysctl -w kernel.apparmor_restrict_unprivileged_unconfined=0
|
||||||
|
sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0
|
||||||
|
|
||||||
|
# - name: Setup upterm session
|
||||||
|
# uses: lhotari/action-upterm@v1
|
||||||
- name: Run pytest
|
- name: Run pytest
|
||||||
run: if [[ -f keyfile.toml ]]; then KEYFILE=keyfile.toml pytest; else pytest; fi
|
env:
|
||||||
|
mitmdump: /home/runner/.mitmproxy/venv/bin/mitmdump
|
||||||
|
run: scripts/run_cached_tests
|
||||||
|
|
106
docs/usage.rst
106
docs/usage.rst
|
@ -71,8 +71,8 @@ The JSON log is one JSON string per line. The following documented events and
|
||||||
fields are stable, undocumented ones may change without notice.
|
fields are stable, undocumented ones may change without notice.
|
||||||
|
|
||||||
event=updated
|
event=updated
|
||||||
An update is detected. Fields ``name``, ``old_version`` and ``version`` are
|
An update is detected. Fields ``name``, ``revision``, ``old_version`` and ``version`` are
|
||||||
available. ``old_version`` maybe ``null``.
|
available. ``old_version`` may be ``null`` and ``revision`` may be absent.
|
||||||
|
|
||||||
event=up-to-date
|
event=up-to-date
|
||||||
There is no update. Fields ``name`` and ``version`` are available.
|
There is no update. Fields ``name`` and ``version`` are available.
|
||||||
|
@ -322,12 +322,23 @@ post_data
|
||||||
post_data_type
|
post_data_type
|
||||||
(*Optional*) Specifies the ``Content-Type`` of the request body (``post_data``). By default, this is ``application/x-www-form-urlencoded``.
|
(*Optional*) Specifies the ``Content-Type`` of the request body (``post_data``). By default, this is ``application/x-www-form-urlencoded``.
|
||||||
|
|
||||||
|
This source can also work with XML to some extent, e.g. it can parse an RSS feed like this:
|
||||||
|
|
||||||
|
.. code-block:: toml
|
||||||
|
|
||||||
|
[ProxmoxVE]
|
||||||
|
source = "htmlparser"
|
||||||
|
url = "https://my.proxmox.com/en/announcements/tag/proxmox-ve/rss"
|
||||||
|
xpath = "//item/title"
|
||||||
|
from_pattern = 'Proxmox VE ([\d.]+) released!'
|
||||||
|
to_pattern = '\1'
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
An additional dependency "lxml" is required.
|
An additional dependency "lxml" is required.
|
||||||
You can use ``pip install 'nvchecker[htmlparser]'``.
|
You can use ``pip install 'nvchecker[htmlparser]'``.
|
||||||
|
|
||||||
Search with an JSON Parser (jq)
|
Search with an JSON Parser (jq)
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
::
|
::
|
||||||
|
|
||||||
source = "jq"
|
source = "jq"
|
||||||
|
@ -388,8 +399,8 @@ Check GitHub
|
||||||
source = "github"
|
source = "github"
|
||||||
|
|
||||||
Check `GitHub <https://github.com/>`_ for updates. The version returned is in
|
Check `GitHub <https://github.com/>`_ for updates. The version returned is in
|
||||||
date format ``%Y%m%d.%H%M%S``, e.g. ``20130701.012212``, unless ``use_latest_release``
|
date format ``%Y%m%d.%H%M%S``, e.g. ``20130701.012212``, unless ``use_latest_release``,
|
||||||
or ``use_max_tag`` is used. See below.
|
``use_max_tag`` or ``use_max_release`` is used. See below.
|
||||||
|
|
||||||
github
|
github
|
||||||
The github repository, with author, e.g. ``lilydjwg/nvchecker``.
|
The github repository, with author, e.g. ``lilydjwg/nvchecker``.
|
||||||
|
@ -415,13 +426,19 @@ use_latest_release
|
||||||
Will return the release's tag name instead of date. (For historical reasons
|
Will return the release's tag name instead of date. (For historical reasons
|
||||||
it doesn't return the release name. See below to change.)
|
it doesn't return the release name. See below to change.)
|
||||||
|
|
||||||
|
use_max_release
|
||||||
|
Set this to ``true`` to check for the max release on GitHub.
|
||||||
|
This option returns the largest one sorted by the
|
||||||
|
``sort_version_key`` option. Will return the tag name instead of date.
|
||||||
|
|
||||||
use_release_name
|
use_release_name
|
||||||
When ``use_latest_release`` is ``true``, setting this to ``true`` will cause
|
When ``use_latest_release`` or ``use_max_release`` is ``true``,
|
||||||
nvchecker to return the release name instead of the tag name.
|
setting this to ``true`` will cause nvchecker to return the release name
|
||||||
|
instead of the tag name.
|
||||||
|
|
||||||
include_prereleases
|
include_prereleases
|
||||||
When ``use_latest_release`` is ``true``, set this to ``true`` to take prereleases into
|
When ``use_latest_release`` or ``use_max_release`` is ``true``,
|
||||||
account.
|
set this to ``true`` to take prereleases into account.
|
||||||
|
|
||||||
This returns the release names (not the tag names).
|
This returns the release names (not the tag names).
|
||||||
|
|
||||||
|
@ -438,7 +455,7 @@ query
|
||||||
|
|
||||||
use_max_tag
|
use_max_tag
|
||||||
Set this to ``true`` to check for the max tag on GitHub. Unlike
|
Set this to ``true`` to check for the max tag on GitHub. Unlike
|
||||||
``use_latest_release``, this option includes both annotated tags and
|
``use_max_release``, this option includes both annotated tags and
|
||||||
lightweight ones, and return the largest one sorted by the
|
lightweight ones, and return the largest one sorted by the
|
||||||
``sort_version_key`` option. Will return the tag name instead of date.
|
``sort_version_key`` option. Will return the tag name instead of date.
|
||||||
|
|
||||||
|
@ -454,7 +471,8 @@ To set an authorization token, you can set:
|
||||||
- an entry in the keyfile for the host (e.g. ``github.com``)
|
- an entry in the keyfile for the host (e.g. ``github.com``)
|
||||||
- an entry in your ``netrc`` file for the host
|
- an entry in your ``netrc`` file for the host
|
||||||
|
|
||||||
This source supports :ref:`list options` when ``use_max_tag`` is set.
|
This source supports :ref:`list options` when ``use_max_tag`` or
|
||||||
|
``use_max_release`` is set.
|
||||||
|
|
||||||
Check Gitea
|
Check Gitea
|
||||||
~~~~~~~~~~~
|
~~~~~~~~~~~
|
||||||
|
@ -489,6 +507,15 @@ To set an authorization token, you can set:
|
||||||
|
|
||||||
This source supports :ref:`list options` when ``use_max_tag`` is set.
|
This source supports :ref:`list options` when ``use_max_tag`` is set.
|
||||||
|
|
||||||
|
Check Gogs / Forgejo / Codeberg
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
Please use the above "gitea" source. Gitea is a fork of `Gogs <https://gogs.io/>`_.
|
||||||
|
`Forgejo <https://forgejo.org/>`_ is a fork of Gitea. Codeberg is a code
|
||||||
|
hosting provider that uses Forgejo. They share the same API endpoints nvchecker uses.
|
||||||
|
|
||||||
|
Alternatively, you can try the generic "git" source.
|
||||||
|
|
||||||
Check BitBucket
|
Check BitBucket
|
||||||
~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~
|
||||||
::
|
::
|
||||||
|
@ -569,7 +596,7 @@ Check PyPI
|
||||||
|
|
||||||
source = "pypi"
|
source = "pypi"
|
||||||
|
|
||||||
Check `PyPI <https://pypi.python.org/>`_ for updates.
|
Check `PyPI <https://pypi.python.org/>`_ for updates. Yanked releases are ignored.
|
||||||
|
|
||||||
pypi
|
pypi
|
||||||
The name used on PyPI, e.g. ``PySide``.
|
The name used on PyPI, e.g. ``PySide``.
|
||||||
|
@ -663,7 +690,7 @@ Check crates.io
|
||||||
|
|
||||||
source = "cratesio"
|
source = "cratesio"
|
||||||
|
|
||||||
Check `crates.io <https://crates.io/>`_ for updates.
|
Check `crates.io <https://crates.io/>`_ for updates. Yanked releases are ignored.
|
||||||
|
|
||||||
cratesio
|
cratesio
|
||||||
The crate name on crates.io, e.g. ``tokio``.
|
The crate name on crates.io, e.g. ``tokio``.
|
||||||
|
@ -857,6 +884,29 @@ strip_release
|
||||||
|
|
||||||
Note that either pkg or srcpkg needs to be specified (but not both) or the item name will be used as pkg.
|
Note that either pkg or srcpkg needs to be specified (but not both) or the item name will be used as pkg.
|
||||||
|
|
||||||
|
Check RPM repository
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
::
|
||||||
|
|
||||||
|
source = "rpmrepo"
|
||||||
|
|
||||||
|
This enables you to check latest package versions in an arbitrary RPM repository in `repomd` format used by package managers such as ``dnf`` (Fedora, RHEL, AlmaLinux etc.) or ``zypper`` (openSUSE) without the need for native RPM tools.
|
||||||
|
|
||||||
|
pkg
|
||||||
|
Name of the RPM package (you can also use ``rpmrepo`` as with other sources, but ``pkg`` is preferred for clarity)
|
||||||
|
|
||||||
|
repo
|
||||||
|
URL of the repository (required, ``repodata/repomd.xml`` should be there)
|
||||||
|
|
||||||
|
arch
|
||||||
|
Architecture of the RPM package (``binary``, ``src``, ``any``, ``x86_64``, ``aarch64``, etc, defaults to ``binary``)
|
||||||
|
|
||||||
|
This source supports :ref:`list options`.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
An additional dependency "lxml" is required.
|
||||||
|
You can use ``pip install 'nvchecker[rpmrepo]'``.
|
||||||
|
|
||||||
Check Git repository
|
Check Git repository
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
::
|
::
|
||||||
|
@ -1014,6 +1064,36 @@ Check `Go packages and modules <https://pkg.go.dev/>`_ for updates.
|
||||||
go
|
go
|
||||||
The name of Go package or module, e.g. ``github.com/caddyserver/caddy/v2/cmd``.
|
The name of Go package or module, e.g. ``github.com/caddyserver/caddy/v2/cmd``.
|
||||||
|
|
||||||
|
Check opam repository
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
::
|
||||||
|
|
||||||
|
source = "opam"
|
||||||
|
|
||||||
|
This enables you to check latest package versions in an arbitrary `opam repository <https://opam.ocaml.org/doc/Manual.html#Repositories>` without the need for the opam command line tool.
|
||||||
|
|
||||||
|
pkg
|
||||||
|
Name of the opam package
|
||||||
|
|
||||||
|
repo
|
||||||
|
URL of the repository (optional, the default ``https://opam.ocaml.org`` repository is used if not specified)
|
||||||
|
|
||||||
|
This source supports :ref:`list options`.
|
||||||
|
|
||||||
|
Check Snapcraft
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
::
|
||||||
|
|
||||||
|
source = "snapcraft"
|
||||||
|
|
||||||
|
This source allows you to check the latest package versions in the `Snapcraft <https://snapcraft.io>`_.
|
||||||
|
|
||||||
|
snap
|
||||||
|
Name of the snap package.
|
||||||
|
|
||||||
|
channel
|
||||||
|
Name of the channel.
|
||||||
|
|
||||||
Combine others' results
|
Combine others' results
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
::
|
::
|
||||||
|
|
3
mypy.ini
3
mypy.ini
|
@ -26,3 +26,6 @@ ignore_missing_imports = True
|
||||||
|
|
||||||
[mypy-jq]
|
[mypy-jq]
|
||||||
ignore_missing_imports = True
|
ignore_missing_imports = True
|
||||||
|
|
||||||
|
[mypy-tomli]
|
||||||
|
ignore_missing_imports = True
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
# MIT licensed
|
# MIT licensed
|
||||||
# Copyright (c) 2013-2024 lilydjwg <lilydjwg@gmail.com>, et al.
|
# Copyright (c) 2013-2024 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||||
|
|
||||||
__version__ = '2.16'
|
__version__ = '2.17dev'
|
||||||
|
|
|
@ -417,6 +417,7 @@ def check_version_update(
|
||||||
'updated',
|
'updated',
|
||||||
name = name,
|
name = name,
|
||||||
version = r.version,
|
version = r.version,
|
||||||
|
revision = r.revision,
|
||||||
old_version = oldver,
|
old_version = oldver,
|
||||||
url = r.url,
|
url = r.url,
|
||||||
)
|
)
|
||||||
|
|
|
@ -4,6 +4,7 @@
|
||||||
import json as _json
|
import json as _json
|
||||||
from urllib.parse import urlencode
|
from urllib.parse import urlencode
|
||||||
from typing import Optional, Dict, Any
|
from typing import Optional, Dict, Any
|
||||||
|
import os
|
||||||
|
|
||||||
from tornado.httpclient import AsyncHTTPClient, HTTPRequest
|
from tornado.httpclient import AsyncHTTPClient, HTTPRequest
|
||||||
|
|
||||||
|
@ -17,8 +18,9 @@ from .base import BaseSession, TemporaryError, Response, HTTPError
|
||||||
__all__ = ['session']
|
__all__ = ['session']
|
||||||
|
|
||||||
HTTP2_AVAILABLE = None if pycurl else False
|
HTTP2_AVAILABLE = None if pycurl else False
|
||||||
|
SSL_CERT_FILE = os.environ.get('SSL_CERT_FILE')
|
||||||
|
|
||||||
def try_use_http2(curl):
|
def setup_curl(curl):
|
||||||
global HTTP2_AVAILABLE
|
global HTTP2_AVAILABLE
|
||||||
if HTTP2_AVAILABLE is None:
|
if HTTP2_AVAILABLE is None:
|
||||||
try:
|
try:
|
||||||
|
@ -29,6 +31,10 @@ def try_use_http2(curl):
|
||||||
elif HTTP2_AVAILABLE:
|
elif HTTP2_AVAILABLE:
|
||||||
curl.setopt(pycurl.HTTP_VERSION, 4)
|
curl.setopt(pycurl.HTTP_VERSION, 4)
|
||||||
|
|
||||||
|
if SSL_CERT_FILE:
|
||||||
|
curl.setopt_string(pycurl.CAINFO, SSL_CERT_FILE)
|
||||||
|
curl.setopt_string(pycurl.ACCEPT_ENCODING, "")
|
||||||
|
|
||||||
class TornadoSession(BaseSession):
|
class TornadoSession(BaseSession):
|
||||||
def setup(
|
def setup(
|
||||||
self,
|
self,
|
||||||
|
@ -68,7 +74,7 @@ class TornadoSession(BaseSession):
|
||||||
kwargs['body'] = body
|
kwargs['body'] = body
|
||||||
elif json:
|
elif json:
|
||||||
kwargs['body'] = _json.dumps(json)
|
kwargs['body'] = _json.dumps(json)
|
||||||
kwargs['prepare_curl_callback'] = try_use_http2
|
kwargs['prepare_curl_callback'] = setup_curl
|
||||||
|
|
||||||
if proxy:
|
if proxy:
|
||||||
host, port = proxy.rsplit(':', 1)
|
host, port = proxy.rsplit(':', 1)
|
||||||
|
|
|
@ -26,6 +26,9 @@ def _console_msg(event):
|
||||||
else:
|
else:
|
||||||
msg = evt
|
msg = evt
|
||||||
|
|
||||||
|
if 'revision' in event and not event['revision']:
|
||||||
|
del event['revision']
|
||||||
|
|
||||||
if 'name' in event:
|
if 'name' in event:
|
||||||
msg = f"{event['name']}: {msg}"
|
msg = f"{event['name']}: {msg}"
|
||||||
del event['name']
|
del event['name']
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
|
|
||||||
from nvchecker.api import session, RichResult, GetVersionError
|
from nvchecker.api import session, RichResult, GetVersionError
|
||||||
|
|
||||||
URL = 'https://www.archlinux.org/packages/search/json/'
|
URL = 'https://archlinux.org/packages/search/json/'
|
||||||
|
|
||||||
async def request(pkg):
|
async def request(pkg):
|
||||||
res = await session.get(URL, params={"name": pkg})
|
res = await session.get(URL, params={"name": pkg})
|
||||||
|
|
|
@ -51,7 +51,7 @@ async def get_version(
|
||||||
]
|
]
|
||||||
else:
|
else:
|
||||||
return RichResult(
|
return RichResult(
|
||||||
version = data[0]['commit']['committer']['date'].split('T', 1)[0].replace('-', ''),
|
version = data[0]['commit']['committer']['date'],
|
||||||
revision = data[0]['sha'],
|
revision = data[0]['sha'],
|
||||||
url = data[0]['html_url'],
|
url = data[0]['html_url'],
|
||||||
)
|
)
|
||||||
|
|
|
@ -21,6 +21,7 @@ GITHUB_URL = 'https://api.%s/repos/%s/commits'
|
||||||
GITHUB_LATEST_RELEASE = 'https://api.%s/repos/%s/releases/latest'
|
GITHUB_LATEST_RELEASE = 'https://api.%s/repos/%s/releases/latest'
|
||||||
# https://developer.github.com/v3/git/refs/#get-all-references
|
# https://developer.github.com/v3/git/refs/#get-all-references
|
||||||
GITHUB_MAX_TAG = 'https://api.%s/repos/%s/git/refs/tags'
|
GITHUB_MAX_TAG = 'https://api.%s/repos/%s/git/refs/tags'
|
||||||
|
GITHUB_MAX_RELEASE = 'https://api.%s/repos/%s/releases'
|
||||||
GITHUB_GRAPHQL_URL = 'https://api.%s/graphql'
|
GITHUB_GRAPHQL_URL = 'https://api.%s/graphql'
|
||||||
|
|
||||||
async def get_version(name, conf, **kwargs):
|
async def get_version(name, conf, **kwargs):
|
||||||
|
@ -192,10 +193,13 @@ async def get_version_real(
|
||||||
br = conf.get('branch')
|
br = conf.get('branch')
|
||||||
path = conf.get('path')
|
path = conf.get('path')
|
||||||
use_max_tag = conf.get('use_max_tag', False)
|
use_max_tag = conf.get('use_max_tag', False)
|
||||||
|
use_max_release = conf.get('use_max_release', False)
|
||||||
if use_latest_release:
|
if use_latest_release:
|
||||||
url = GITHUB_LATEST_RELEASE % (host, repo)
|
url = GITHUB_LATEST_RELEASE % (host, repo)
|
||||||
elif use_max_tag:
|
elif use_max_tag:
|
||||||
url = GITHUB_MAX_TAG % (host, repo)
|
url = GITHUB_MAX_TAG % (host, repo)
|
||||||
|
elif use_max_release:
|
||||||
|
url = GITHUB_MAX_RELEASE % (host, repo)
|
||||||
else:
|
else:
|
||||||
url = GITHUB_URL % (host, repo)
|
url = GITHUB_URL % (host, repo)
|
||||||
parameters = {}
|
parameters = {}
|
||||||
|
@ -225,6 +229,18 @@ async def get_version_real(
|
||||||
raise GetVersionError('No tag found in upstream repository.')
|
raise GetVersionError('No tag found in upstream repository.')
|
||||||
return tags
|
return tags
|
||||||
|
|
||||||
|
if use_max_release:
|
||||||
|
releases: List[Union[str, RichResult]] = [
|
||||||
|
RichResult(
|
||||||
|
version = ref['name'] if use_release_name else ref['tag_name'],
|
||||||
|
gitref = f"refs/tags/{ref['tag_name']}",
|
||||||
|
url = ref['html_url'],
|
||||||
|
) for ref in data if include_prereleases or not ref['prerelease']
|
||||||
|
]
|
||||||
|
if not releases:
|
||||||
|
raise GetVersionError('No release found in upstream repository.')
|
||||||
|
return releases
|
||||||
|
|
||||||
if use_latest_release:
|
if use_latest_release:
|
||||||
if 'tag_name' not in data:
|
if 'tag_name' not in data:
|
||||||
raise GetVersionError('No release found in upstream repository.')
|
raise GetVersionError('No release found in upstream repository.')
|
||||||
|
|
|
@ -31,9 +31,9 @@ async def get_version_impl(info) -> RichResult:
|
||||||
|
|
||||||
elements = doc.xpath("//div[@class='Version-tag']/a/text()")
|
elements = doc.xpath("//div[@class='Version-tag']/a/text()")
|
||||||
try:
|
try:
|
||||||
version = elements[0]
|
version = elements[0] # type: ignore
|
||||||
return RichResult(
|
return RichResult(
|
||||||
version = version,
|
version = version, # type: ignore
|
||||||
url = GO_PKG_VERSION_URL.format(pkg=pkg_name, version=version),
|
url = GO_PKG_VERSION_URL.format(pkg=pkg_name, version=version),
|
||||||
)
|
)
|
||||||
except IndexError:
|
except IndexError:
|
||||||
|
|
71
nvchecker_source/opam.py
Normal file
71
nvchecker_source/opam.py
Normal file
|
@ -0,0 +1,71 @@
|
||||||
|
# MIT licensed
|
||||||
|
# Copyright (c) 2024 Daniel Peukert <daniel@peukert.cc>, et al.
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
from io import BytesIO
|
||||||
|
import tarfile
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
from nvchecker.api import (
|
||||||
|
session, VersionResult,
|
||||||
|
Entry, AsyncCache,
|
||||||
|
KeyManager, RichResult
|
||||||
|
)
|
||||||
|
|
||||||
|
OPAM_REPO_INDEX_URL = "%s/index.tar.gz"
|
||||||
|
OPAM_VERSION_PATH_PREFIX = "packages/%s/%s."
|
||||||
|
OPAM_VERSION_PATH_SUFFIX = "/opam"
|
||||||
|
|
||||||
|
OPAM_DEFAULT_REPO = 'https://opam.ocaml.org'
|
||||||
|
OPAM_DEFAULT_REPO_VERSION_URL = "%s/packages/%s/%s.%s"
|
||||||
|
|
||||||
|
def _decompress_and_list_files(data: bytes) -> List[str]:
|
||||||
|
# Convert the bytes to a file object and get a list of files
|
||||||
|
archive = tarfile.open(mode='r', fileobj=BytesIO(data))
|
||||||
|
return archive.getnames()
|
||||||
|
|
||||||
|
async def get_files(url: str) -> List[str]:
|
||||||
|
# Download the file and get its contents
|
||||||
|
res = await session.get(url)
|
||||||
|
data = res.body
|
||||||
|
|
||||||
|
# Get the file list of the archive
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
return await loop.run_in_executor(None, _decompress_and_list_files, data)
|
||||||
|
|
||||||
|
async def get_package_versions(files: List[str], pkg: str) -> List[str]:
|
||||||
|
# Prepare the filename prefix based on the package name
|
||||||
|
prefix = OPAM_VERSION_PATH_PREFIX % (pkg , pkg)
|
||||||
|
|
||||||
|
# Only keep opam files that are relevant to the package we're working with
|
||||||
|
filtered_files = []
|
||||||
|
|
||||||
|
for filename in files:
|
||||||
|
if filename.startswith(prefix) and filename.endswith(OPAM_VERSION_PATH_SUFFIX):
|
||||||
|
filtered_files.append(filename[len(prefix):-1*len(OPAM_VERSION_PATH_SUFFIX)])
|
||||||
|
|
||||||
|
return filtered_files
|
||||||
|
|
||||||
|
async def get_version(
|
||||||
|
name: str, conf: Entry, *,
|
||||||
|
cache: AsyncCache, keymanager: KeyManager,
|
||||||
|
**kwargs,
|
||||||
|
):
|
||||||
|
pkg = conf.get('pkg', name)
|
||||||
|
repo = conf.get('repo', OPAM_DEFAULT_REPO).rstrip('/')
|
||||||
|
|
||||||
|
# Get the list of files in the repo index (see https://opam.ocaml.org/doc/Manual.html#Repositories for repo structure)
|
||||||
|
files = await cache.get(OPAM_REPO_INDEX_URL % repo, get_files) # type: ignore
|
||||||
|
|
||||||
|
# Parse the version strings from the file names
|
||||||
|
raw_versions = await get_package_versions(files, pkg)
|
||||||
|
|
||||||
|
# Convert the version strings into RichResults
|
||||||
|
versions = []
|
||||||
|
for version in raw_versions:
|
||||||
|
versions.append(RichResult(
|
||||||
|
version = version,
|
||||||
|
# There is no standardised URL scheme, so we only return an URL for the default registry
|
||||||
|
url = OPAM_DEFAULT_REPO_VERSION_URL % (repo, pkg, pkg, version) if repo == OPAM_DEFAULT_REPO else None,
|
||||||
|
))
|
||||||
|
return versions
|
|
@ -19,6 +19,10 @@ async def get_version(name, conf, *, cache, **kwargs):
|
||||||
data = await cache.get_json(url)
|
data = await cache.get_json(url)
|
||||||
|
|
||||||
for version in data['releases'].keys():
|
for version in data['releases'].keys():
|
||||||
|
# Skip versions that are marked as yanked.
|
||||||
|
if (vers := data['releases'][version]) and vers[0]['yanked']:
|
||||||
|
continue
|
||||||
|
|
||||||
try:
|
try:
|
||||||
parsed_version = Version(version)
|
parsed_version = Version(version)
|
||||||
except InvalidVersion:
|
except InvalidVersion:
|
||||||
|
|
84
nvchecker_source/rpmrepo.py
Normal file
84
nvchecker_source/rpmrepo.py
Normal file
|
@ -0,0 +1,84 @@
|
||||||
|
# MIT licensed
|
||||||
|
# Copyright (c) 2024 Jakub Ružička <jru@debian.org>, et al.
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import gzip
|
||||||
|
import pathlib
|
||||||
|
import urllib
|
||||||
|
from typing import Set
|
||||||
|
|
||||||
|
import lxml.etree
|
||||||
|
from nvchecker.api import session, AsyncCache, Entry, KeyManager, VersionResult
|
||||||
|
|
||||||
|
|
||||||
|
# XML namespaces used in repodata (dead links haha)
|
||||||
|
NS = {
|
||||||
|
'common': 'http://linux.duke.edu/metadata/common',
|
||||||
|
'repo': 'http://linux.duke.edu/metadata/repo',
|
||||||
|
'rpm': 'http://linux.duke.edu/metadata/rpm'
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
async def get_version(
|
||||||
|
name: str, conf: Entry, *,
|
||||||
|
cache: AsyncCache, keymanager: KeyManager,
|
||||||
|
**kwargs,
|
||||||
|
) -> VersionResult:
|
||||||
|
repo = conf['repo']
|
||||||
|
arch = conf.get('arch', 'binary')
|
||||||
|
pkg = conf.get('pkg')
|
||||||
|
if not pkg:
|
||||||
|
pkg = conf.get('rpmrepo', name)
|
||||||
|
|
||||||
|
repo_url = urllib.parse.urlparse(repo)
|
||||||
|
repo_path = pathlib.PurePosixPath(repo_url.path)
|
||||||
|
|
||||||
|
# get the url of repomd.xml
|
||||||
|
repomd_path = repo_path / 'repodata' / 'repomd.xml'
|
||||||
|
repomd_url = repo_url._replace(path=str(repomd_path)).geturl()
|
||||||
|
# download repomd.xml (use cache)
|
||||||
|
repomd_body = await cache.get(repomd_url, get_file) # type: ignore
|
||||||
|
# parse repomd.xml
|
||||||
|
repomd_xml = lxml.etree.fromstring(repomd_body)
|
||||||
|
|
||||||
|
# get the url of *primary.xml.gz
|
||||||
|
primary_element = repomd_xml.find('repo:data[@type="primary"]/repo:location', namespaces=NS)
|
||||||
|
primary_path = repo_path / primary_element.get('href') # type: ignore
|
||||||
|
primary_url = repo_url._replace(path=str(primary_path)).geturl()
|
||||||
|
# download and decompress *primary.xml.gz (use cache)
|
||||||
|
primary_body = await cache.get(primary_url, get_file_gz) # type: ignore
|
||||||
|
# parse *primary.xml metadata
|
||||||
|
metadata = lxml.etree.fromstring(primary_body)
|
||||||
|
|
||||||
|
# use set to eliminate duplication
|
||||||
|
versions_set: Set[str] = set()
|
||||||
|
# iterate package metadata
|
||||||
|
for el in metadata.findall(f'common:package[common:name="{pkg}"]', namespaces=NS):
|
||||||
|
pkg_arch = el.findtext('common:arch', namespaces=NS)
|
||||||
|
|
||||||
|
# filter bych arch
|
||||||
|
if arch == 'binary':
|
||||||
|
if pkg_arch == 'src':
|
||||||
|
continue
|
||||||
|
elif arch != 'any':
|
||||||
|
if pkg_arch != arch:
|
||||||
|
continue
|
||||||
|
|
||||||
|
version_info = el.find('common:version', namespaces=NS)
|
||||||
|
version = version_info.get('ver') # type: ignore
|
||||||
|
versions_set.add(version) # type: ignore
|
||||||
|
|
||||||
|
versions = list(versions_set)
|
||||||
|
return versions # type: ignore
|
||||||
|
|
||||||
|
|
||||||
|
async def get_file(url: str) -> bytes:
|
||||||
|
res = await session.get(url)
|
||||||
|
return res.body
|
||||||
|
|
||||||
|
|
||||||
|
async def get_file_gz(url: str) -> bytes:
|
||||||
|
res = await session.get(url)
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
return await loop.run_in_executor(
|
||||||
|
None, gzip.decompress, res.body)
|
33
nvchecker_source/snapcraft.py
Normal file
33
nvchecker_source/snapcraft.py
Normal file
|
@ -0,0 +1,33 @@
|
||||||
|
# MIT licensed
|
||||||
|
# Copyright (c) 2025 Maxim Slipenko <maxim@slipenko.com>, et al.
|
||||||
|
|
||||||
|
from nvchecker.api import (
|
||||||
|
GetVersionError
|
||||||
|
)
|
||||||
|
from nvchecker.httpclient.base import HTTPError
|
||||||
|
|
||||||
|
URL="https://api.snapcraft.io/v2/snaps/info/%(snap)s"
|
||||||
|
|
||||||
|
async def get_version(
|
||||||
|
name: str, conf, *,
|
||||||
|
cache, keymanager,
|
||||||
|
**kwargs,
|
||||||
|
):
|
||||||
|
try:
|
||||||
|
snap = conf.get("snap")
|
||||||
|
channel = conf.get("channel")
|
||||||
|
|
||||||
|
result = await cache.get_json(
|
||||||
|
URL % { "snap": snap },
|
||||||
|
headers={
|
||||||
|
"Snap-Device-Series": "16",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
except HTTPError:
|
||||||
|
raise GetVersionError(f"Failed to request snap info for {snap}")
|
||||||
|
|
||||||
|
for c in result['channel-map']:
|
||||||
|
if c['channel']['name'] == channel:
|
||||||
|
return c['version']
|
||||||
|
|
||||||
|
raise GetVersionError(f"Failed to find version for {snap}")
|
|
@ -2,11 +2,6 @@
|
||||||
oldver = "old_ver.json"
|
oldver = "old_ver.json"
|
||||||
newver = "new_ver.json"
|
newver = "new_ver.json"
|
||||||
|
|
||||||
[vim]
|
|
||||||
source = "regex"
|
|
||||||
regex = "7\\.3\\.\\d+"
|
|
||||||
url = "http://ftp.vim.org/pub/vim/patches/7.3/"
|
|
||||||
|
|
||||||
[google-chrome]
|
[google-chrome]
|
||||||
source = "cmd"
|
source = "cmd"
|
||||||
cmd = '''wget -qO- http://dl.google.com/linux/chrome/rpm/stable/x86_64/repodata/other.xml.gz | zgrep -A1 "google-chrome-stable" | awk -F\" '/version/ {print $4"-"$6}' '''
|
cmd = '''wget -qO- http://dl.google.com/linux/chrome/rpm/stable/x86_64/repodata/other.xml.gz | zgrep -A1 "google-chrome-stable" | awk -F\" '/version/ {print $4"-"$6}' '''
|
||||||
|
@ -25,17 +20,13 @@ github = "lilydjwg/nvchecker"
|
||||||
[ssed]
|
[ssed]
|
||||||
source = "regex"
|
source = "regex"
|
||||||
regex = "The current version is ([\\d.]+)\\."
|
regex = "The current version is ([\\d.]+)\\."
|
||||||
url = "http://sed.sourceforge.net/grabbag/ssed/"
|
url = "https://sed.sourceforge.net/grabbag/ssed/"
|
||||||
proxy = "http://localhost:8087"
|
proxy = "http://localhost:8087"
|
||||||
|
|
||||||
[PySide]
|
[PySide]
|
||||||
source = "pypi"
|
source = "pypi"
|
||||||
pypi = "PySide"
|
pypi = "nvchecker"
|
||||||
|
|
||||||
[test]
|
[test]
|
||||||
source = "manual"
|
source = "manual"
|
||||||
manual = "0.1"
|
manual = "0.1"
|
||||||
|
|
||||||
["Sparkle Test App"]
|
|
||||||
source = "sparkle"
|
|
||||||
sparkle = "https://sparkle-project.org/files/sparkletestcast.xml"
|
|
||||||
|
|
|
@ -18,7 +18,7 @@ _handler_precedence = (
|
||||||
BOOL_KEYS = [
|
BOOL_KEYS = [
|
||||||
'strip_release', 'use_last_modified',
|
'strip_release', 'use_last_modified',
|
||||||
'use_latest_release', 'use_latest_tag',
|
'use_latest_release', 'use_latest_tag',
|
||||||
'use_max_tag', 'use_pre_release',
|
'use_max_release', 'use_max_tag', 'use_pre_release',
|
||||||
]
|
]
|
||||||
|
|
||||||
INT_KEYS = [
|
INT_KEYS = [
|
||||||
|
|
40
scripts/run_cached_tests
Executable file
40
scripts/run_cached_tests
Executable file
|
@ -0,0 +1,40 @@
|
||||||
|
#!/bin/bash -e
|
||||||
|
|
||||||
|
mitmdump=${mitmdump:-mitmdump}
|
||||||
|
|
||||||
|
if [[ -f ~/.mitmproxy/nvdump ]]; then
|
||||||
|
$mitmdump -S ~/.mitmproxy/nvdump -p 7890 --ignore-hosts '127\.0\.0\.1' --server-replay-reuse --server-replay-extra=forward -w newdump >mitmdump_output &
|
||||||
|
else
|
||||||
|
$mitmdump -w ~/.mitmproxy/nvdump -p 7890 --ignore-hosts '127\.0\.0\.1' >mitmdump_output &
|
||||||
|
fi
|
||||||
|
|
||||||
|
mitm_pid=$!
|
||||||
|
|
||||||
|
on_exit () {
|
||||||
|
kill -INT $mitm_pid
|
||||||
|
|
||||||
|
if [[ -s newdump ]]; then
|
||||||
|
cat newdump >> ~/.mitmproxy/nvdump
|
||||||
|
fi
|
||||||
|
|
||||||
|
cat mitmdump_output
|
||||||
|
}
|
||||||
|
|
||||||
|
trap on_exit EXIT
|
||||||
|
|
||||||
|
if [[ -f keyfile.toml ]]; then
|
||||||
|
export KEYFILE=keyfile.toml
|
||||||
|
fi
|
||||||
|
|
||||||
|
for _ in {1..10}; do
|
||||||
|
if [[ -s ~/.mitmproxy/mitmproxy-ca-cert.pem ]]; then
|
||||||
|
break
|
||||||
|
fi
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
|
||||||
|
export SSL_CERT_FILE=$HOME/.mitmproxy/mitmproxy-ca-cert.pem
|
||||||
|
export GIT_SSL_CAINFO=$SSL_CERT_FILE
|
||||||
|
export http_proxy=http://localhost:7890 https_proxy=http://localhost:7890
|
||||||
|
|
||||||
|
pytest
|
|
@ -39,10 +39,10 @@ classifiers =
|
||||||
|
|
||||||
[options]
|
[options]
|
||||||
zip_safe = True
|
zip_safe = True
|
||||||
|
python_requires = >=3.8
|
||||||
|
|
||||||
packages = find_namespace:
|
packages = find_namespace:
|
||||||
install_requires =
|
install_requires =
|
||||||
setuptools; python_version<"3.8"
|
|
||||||
tomli; python_version<"3.11"
|
tomli; python_version<"3.11"
|
||||||
structlog
|
structlog
|
||||||
platformdirs
|
platformdirs
|
||||||
|
@ -64,6 +64,8 @@ pypi =
|
||||||
packaging
|
packaging
|
||||||
htmlparser =
|
htmlparser =
|
||||||
lxml
|
lxml
|
||||||
|
rpmrepo =
|
||||||
|
lxml
|
||||||
jq =
|
jq =
|
||||||
jq
|
jq
|
||||||
|
|
||||||
|
|
|
@ -5,6 +5,7 @@
|
||||||
import pytest
|
import pytest
|
||||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||||
|
|
||||||
|
@pytest.mark.flaky(reruns=10)
|
||||||
async def test_android_addon(get_version):
|
async def test_android_addon(get_version):
|
||||||
assert await get_version("android-google-play-apk-expansion", {
|
assert await get_version("android-google-play-apk-expansion", {
|
||||||
"source": "android_sdk",
|
"source": "android_sdk",
|
||||||
|
|
|
@ -1,13 +1,9 @@
|
||||||
# MIT licensed
|
# MIT licensed
|
||||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
pytestmark = [pytest.mark.asyncio,
|
pytestmark = [pytest.mark.asyncio,
|
||||||
pytest.mark.needs_net,
|
pytest.mark.needs_net]
|
||||||
pytest.mark.skipif(os.environ.get('TRAVIS') == 'true',
|
|
||||||
reason="fail too often")]
|
|
||||||
|
|
||||||
@pytest.mark.flaky(reruns=10)
|
@pytest.mark.flaky(reruns=10)
|
||||||
async def test_aur(get_version):
|
async def test_aur(get_version):
|
||||||
|
|
|
@ -1,9 +1,12 @@
|
||||||
# MIT licensed
|
# MIT licensed
|
||||||
# Copyright (c) 2020 Chih-Hsuan Yen <yan12125 at gmail dot com>
|
# Copyright (c) 2020 Chih-Hsuan Yen <yan12125 at gmail dot com>
|
||||||
|
|
||||||
import pytest
|
import os
|
||||||
import datetime
|
import datetime
|
||||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
|
||||||
|
import pytest
|
||||||
|
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net,
|
||||||
|
pytest.mark.skipif(bool(os.environ.get('GITHUB_RUN_ID')), reason="400 very often")]
|
||||||
|
|
||||||
async def test_container(get_version):
|
async def test_container(get_version):
|
||||||
assert await get_version("hello-world", {
|
assert await get_version("hello-world", {
|
||||||
|
|
|
@ -7,4 +7,4 @@ pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||||
async def test_cran(get_version):
|
async def test_cran(get_version):
|
||||||
assert await get_version("xml2", {
|
assert await get_version("xml2", {
|
||||||
"source": "cran",
|
"source": "cran",
|
||||||
}) == "1.3.6"
|
}) == "1.3.7"
|
||||||
|
|
|
@ -10,8 +10,8 @@ async def test_gitea(get_version):
|
||||||
ver = await get_version("example", {
|
ver = await get_version("example", {
|
||||||
"source": "gitea",
|
"source": "gitea",
|
||||||
"gitea": "gitea/tea"})
|
"gitea": "gitea/tea"})
|
||||||
assert len(ver) == 8
|
assert ver.startswith('20')
|
||||||
assert ver.isdigit()
|
assert 'T' in ver
|
||||||
|
|
||||||
@pytest.mark.flaky(reruns=10)
|
@pytest.mark.flaky(reruns=10)
|
||||||
async def test_gitea_max_tag_with_include(get_version):
|
async def test_gitea_max_tag_with_include(get_version):
|
||||||
|
|
|
@ -59,6 +59,20 @@ async def test_github_max_tag(get_version):
|
||||||
"use_max_tag": True,
|
"use_max_tag": True,
|
||||||
}) == "second_release"
|
}) == "second_release"
|
||||||
|
|
||||||
|
async def test_github_max_release(get_version):
|
||||||
|
assert await get_version("example", {
|
||||||
|
"source": "github",
|
||||||
|
"github": "harry-sanabria/ReleaseTestRepo",
|
||||||
|
"use_max_release": True,
|
||||||
|
}) == "second_release"
|
||||||
|
|
||||||
|
assert await get_version("example", {
|
||||||
|
"source": "github",
|
||||||
|
"github": "harry-sanabria/ReleaseTestRepo",
|
||||||
|
"use_max_release": True,
|
||||||
|
"use_release_name": True,
|
||||||
|
}) == "second_release"
|
||||||
|
|
||||||
async def test_github_max_tag_with_ignored(get_version):
|
async def test_github_max_tag_with_ignored(get_version):
|
||||||
assert await get_version("example", {
|
assert await get_version("example", {
|
||||||
"source": "github",
|
"source": "github",
|
||||||
|
@ -67,6 +81,21 @@ async def test_github_max_tag_with_ignored(get_version):
|
||||||
"ignored": "second_release release3",
|
"ignored": "second_release release3",
|
||||||
}) == "first_release"
|
}) == "first_release"
|
||||||
|
|
||||||
|
async def test_github_max_release_with_ignored(get_version):
|
||||||
|
assert await get_version("example", {
|
||||||
|
"source": "github",
|
||||||
|
"github": "harry-sanabria/ReleaseTestRepo",
|
||||||
|
"use_max_release": True,
|
||||||
|
"ignored": "second_release release3",
|
||||||
|
}) == "first_release"
|
||||||
|
assert await get_version("example", {
|
||||||
|
"source": "github",
|
||||||
|
"github": "harry-sanabria/ReleaseTestRepo",
|
||||||
|
"use_max_release": True,
|
||||||
|
"ignored": "second_release",
|
||||||
|
"use_release_name": True,
|
||||||
|
}) == "release #3"
|
||||||
|
|
||||||
async def test_github_with_path(get_version):
|
async def test_github_with_path(get_version):
|
||||||
assert await get_version("example", {
|
assert await get_version("example", {
|
||||||
"source": "github",
|
"source": "github",
|
||||||
|
@ -91,6 +120,16 @@ async def test_github_max_tag_with_include(get_version):
|
||||||
})
|
})
|
||||||
assert re.match(r'chrome-[\d.]+', version)
|
assert re.match(r'chrome-[\d.]+', version)
|
||||||
|
|
||||||
|
async def test_github_max_release_with_include(get_version):
|
||||||
|
version = await get_version("example", {
|
||||||
|
"source": "github",
|
||||||
|
"github": "EFForg/https-everywhere",
|
||||||
|
"use_max_release": True,
|
||||||
|
"use_release_name": True,
|
||||||
|
"include_regex": r"Release \d.*",
|
||||||
|
})
|
||||||
|
assert re.match(r'Release [\d.]+', version)
|
||||||
|
|
||||||
async def test_github_latest_tag(get_version):
|
async def test_github_latest_tag(get_version):
|
||||||
assert await get_version("example", {
|
assert await get_version("example", {
|
||||||
"source": "github",
|
"source": "github",
|
||||||
|
|
|
@ -3,6 +3,7 @@
|
||||||
import pytest
|
import pytest
|
||||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||||
|
|
||||||
|
@pytest.mark.flaky(reruns=10)
|
||||||
async def test_launchpad(get_version):
|
async def test_launchpad(get_version):
|
||||||
version = await get_version(
|
version = await get_version(
|
||||||
"sakura",
|
"sakura",
|
||||||
|
|
25
tests/test_opam.py
Normal file
25
tests/test_opam.py
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
# MIT licensed
|
||||||
|
# Copyright (c) 2024 Daniel Peukert <daniel@peukert.cc>, et al.
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||||
|
|
||||||
|
async def test_opam_official(get_version):
|
||||||
|
assert await get_version("test", {
|
||||||
|
"source": "opam",
|
||||||
|
"pkg": "omigrate",
|
||||||
|
}) == "0.3.2"
|
||||||
|
|
||||||
|
async def test_opam_coq(get_version):
|
||||||
|
assert await get_version("test", {
|
||||||
|
"source": "opam",
|
||||||
|
"repo": "https://coq.inria.fr/opam/released",
|
||||||
|
"pkg": "coq-abp",
|
||||||
|
}) == "8.10.0"
|
||||||
|
|
||||||
|
async def test_opam_coq_trailing_slash(get_version):
|
||||||
|
assert await get_version("test", {
|
||||||
|
"source": "opam",
|
||||||
|
"repo": "https://coq.inria.fr/opam/released/",
|
||||||
|
"pkg": "coq-abp",
|
||||||
|
}) == "8.10.0"
|
|
@ -32,3 +32,8 @@ async def test_pypi_invalid_version(get_version):
|
||||||
"source": "pypi",
|
"source": "pypi",
|
||||||
})
|
})
|
||||||
|
|
||||||
|
async def test_pypi_yanked_version(get_version):
|
||||||
|
assert await get_version("urllib3", {
|
||||||
|
"source": "pypi",
|
||||||
|
"include_regex": "^(1\\..*)|(2\\.0\\.[0,1])",
|
||||||
|
}) == "1.26.20"
|
||||||
|
|
19
tests/test_rpmrepo.py
Normal file
19
tests/test_rpmrepo.py
Normal file
|
@ -0,0 +1,19 @@
|
||||||
|
# MIT licensed
|
||||||
|
# Copyright (c) 2024 Jakub Ružička <jru@debian.org>, et al.
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||||
|
|
||||||
|
async def test_rpmrepo_fedora(get_version):
|
||||||
|
assert await get_version("knot_fedora-39", {
|
||||||
|
"source": "rpmrepo",
|
||||||
|
"pkg": "knot",
|
||||||
|
"repo": "http://ftp.sh.cvut.cz/fedora/linux/updates/39/Everything/x86_64/",
|
||||||
|
}) == "3.3.9"
|
||||||
|
|
||||||
|
async def test_rpmrepo_alma(get_version):
|
||||||
|
assert await get_version("knot_fedora-39", {
|
||||||
|
"source": "rpmrepo",
|
||||||
|
"pkg": "tmux",
|
||||||
|
"repo": "http://ftp.sh.cvut.cz/almalinux/9.5/BaseOS/x86_64/os/",
|
||||||
|
}) == "3.2a"
|
28
tests/test_snapcraft.py
Normal file
28
tests/test_snapcraft.py
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
# MIT licensed
|
||||||
|
# Copyright (c) 2025 Maxim Slipenko <maxim@slipenko.com>, et al.
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||||
|
|
||||||
|
async def test_snapcraft(get_version):
|
||||||
|
assert await get_version("test", {
|
||||||
|
"source": "snapcraft",
|
||||||
|
"snap": "test-snapd-public",
|
||||||
|
"channel": "edge",
|
||||||
|
}) == "2.0"
|
||||||
|
|
||||||
|
async def test_snapcraft_non_existent_snap(get_version):
|
||||||
|
with pytest.raises(RuntimeError, match='Failed to request snap info for not-existent-snap'):
|
||||||
|
assert await get_version("test", {
|
||||||
|
"source": "snapcraft",
|
||||||
|
"snap": "not-existent-snap",
|
||||||
|
"channel": "stable",
|
||||||
|
})
|
||||||
|
|
||||||
|
async def test_snapcraft_non_existent_channel(get_version):
|
||||||
|
with pytest.raises(RuntimeError, match='Failed to find version for test-snapd-public'):
|
||||||
|
assert await get_version("test", {
|
||||||
|
"source": "snapcraft",
|
||||||
|
"snap": "test-snapd-public",
|
||||||
|
"channel": "non-existent-channel",
|
||||||
|
})
|
|
@ -26,7 +26,7 @@ async def test_ubuntupkg_suite(get_version):
|
||||||
"suite": "xenial",
|
"suite": "xenial",
|
||||||
}) == "0.1.2-1"
|
}) == "0.1.2-1"
|
||||||
|
|
||||||
@pytest.mark.flaky
|
@pytest.mark.flaky(reruns=10)
|
||||||
async def test_ubuntupkg_suite_with_paging(get_version):
|
async def test_ubuntupkg_suite_with_paging(get_version):
|
||||||
assert await get_version("ffmpeg", {
|
assert await get_version("ffmpeg", {
|
||||||
"source": "ubuntupkg",
|
"source": "ubuntupkg",
|
||||||
|
|
Loading…
Add table
Reference in a new issue