mirror of
https://github.com/lilydjwg/nvchecker.git
synced 2025-03-10 06:14:02 +00:00
Compare commits
62 commits
Author | SHA1 | Date | |
---|---|---|---|
|
fc8d90f46b | ||
|
e1882a2778 | ||
|
b83cbd2ead | ||
|
d4be5189fe | ||
|
cc9001ad88 | ||
|
fca89f7830 | ||
|
92a424f946 | ||
|
d3bf5b2db5 | ||
|
bd72ea04d2 | ||
|
50d5342586 | ||
|
1e3ed1e4b9 | ||
|
72c0730725 | ||
|
6759d2f206 | ||
|
17e351f825 | ||
|
d58638733e | ||
|
287282eb2c | ||
|
fa9ca38690 | ||
|
5c7918bf7a | ||
|
c03bec7452 | ||
|
7cacd9edaf | ||
|
af21f93bd1 | ||
|
ca011221cf | ||
|
745f0decdb | ||
|
6f5870787a | ||
|
4364759b29 | ||
|
dbf6c4601f | ||
|
4d5e29f26b | ||
|
ad892b713e | ||
|
5803237d39 | ||
|
71a0002624 | ||
|
2722ccc7fe | ||
|
228139ddd4 | ||
|
372fce4445 | ||
|
babb3aa74e | ||
|
d44a50c273 | ||
|
c0d6568434 | ||
|
2a64f7ab8f | ||
|
af1a855fd5 | ||
|
eeddd56156 | ||
|
29b0f63103 | ||
|
3a87f920a5 | ||
|
af7acde251 | ||
|
c7f571ae79 | ||
|
256bb9dd3b | ||
|
28f1ab5144 | ||
|
e1a6517169 | ||
|
afad2f08f2 | ||
|
5cead67640 | ||
|
728dcca1bd | ||
|
89b4cd90ba | ||
|
4eb70a0fbe | ||
|
8d3b6adaaa | ||
|
5b561456ae | ||
|
d5cc276000 | ||
|
3abe5ad394 | ||
|
e4cff0b507 | ||
|
d4e27c22ac | ||
|
7848f0907d | ||
|
a1ab77f92a | ||
|
5a3ca69c1c | ||
|
5677c9595e | ||
|
61ca3c95d0 |
69 changed files with 760 additions and 113 deletions
49
.github/workflows/tests.yaml
vendored
49
.github/workflows/tests.yaml
vendored
|
@ -12,6 +12,7 @@ jobs:
|
|||
- "3.10"
|
||||
- "3.11"
|
||||
- "3.12"
|
||||
- "3.13"
|
||||
# pypy fails in some cases but we don't care much about that
|
||||
# with github actions we can't mark some jobs to not affect the overall
|
||||
# conclusion so we have to omit "allow-failure" tests.
|
||||
|
@ -19,7 +20,8 @@ jobs:
|
|||
# - pypy-3.7
|
||||
deps:
|
||||
- tornado pycurl
|
||||
- aiohttp
|
||||
# timer runs when loop is closed, see https://github.com/lilydjwg/nvchecker/actions/runs/11650699759/job/32439742210
|
||||
# - aiohttp
|
||||
- tornado
|
||||
- httpx[http2]>=0.14.0
|
||||
exclude: []
|
||||
|
@ -40,17 +42,52 @@ jobs:
|
|||
restore-keys: |
|
||||
${{ runner.os }}-${{ env.cache-name }}-${{ matrix.deps }}-
|
||||
${{ runner.os }}-${{ env.cache-name }}-
|
||||
- name: workaround pycurl wheel
|
||||
|
||||
- name: Install pycurl deps
|
||||
if: ${{ contains(matrix.deps, 'pycurl') }}
|
||||
run: |
|
||||
sudo mkdir -p /etc/pki/tls/certs
|
||||
sudo ln -s /etc/ssl/certs/ca-certificates.crt /etc/pki/tls/certs/ca-bundle.crt
|
||||
sudo apt update
|
||||
sudo apt install -y libcurl4-openssl-dev
|
||||
# werkzeug is pinned for httpbin compatibility https://github.com/postmanlabs/httpbin/issues/673
|
||||
- name: Install Python deps
|
||||
run: pip install -U ${{ matrix.deps }} pytest 'pytest-asyncio>=0.23' pytest-httpbin pytest-rerunfailures structlog tomli platformdirs lxml jq 'werkzeug<2.1' awesomeversion
|
||||
env:
|
||||
# use env to avoid `>` being redirection
|
||||
deps: ${{ matrix.deps }}
|
||||
run: pip install -U $deps pytest 'pytest-asyncio>=0.24' pytest-httpbin pytest-rerunfailures structlog tomli platformdirs lxml jq 'werkzeug<2.1' awesomeversion
|
||||
# don't use binary distribution because:
|
||||
# hardcoded cacert path doesn't work on Ubuntu (should have been resolved?)
|
||||
# limited compression support (only deflate & gzip)
|
||||
- name: Install pycurl
|
||||
if: ${{ contains(matrix.deps, 'pycurl') }}
|
||||
run: |
|
||||
pip uninstall -y pycurl
|
||||
pip install -U pycurl --no-binary :all:
|
||||
- name: Decrypt keys
|
||||
env:
|
||||
KEY: ${{ secrets.KEY }}
|
||||
run: if [[ -n $KEY ]]; then openssl enc -d -aes-256-ctr -pbkdf2 -k $KEY -in keyfile.toml.enc -out keyfile.toml; fi
|
||||
|
||||
- name: Setup mitmproxy cache
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
cache-name: cache-mitm
|
||||
with:
|
||||
path: ~/.mitmproxy
|
||||
key: ${{ env.cache-name }}
|
||||
restore-keys: |
|
||||
${{ env.cache-name }}-
|
||||
- name: Install mitmproxy
|
||||
run: |
|
||||
/usr/bin/python -m venv --system-site-packages ~/.mitmproxy/venv
|
||||
. ~/.mitmproxy/venv/bin/activate
|
||||
pip install -U mitmproxy
|
||||
# https://github.com/DevToys-app/DevToys/issues/1373#issuecomment-2599820594
|
||||
sudo sysctl -w kernel.apparmor_restrict_unprivileged_unconfined=0
|
||||
sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0
|
||||
|
||||
# - name: Setup upterm session
|
||||
# uses: lhotari/action-upterm@v1
|
||||
- name: Run pytest
|
||||
run: if [[ -f keyfile.toml ]]; then KEYFILE=keyfile.toml pytest; else pytest; fi
|
||||
env:
|
||||
mitmdump: /home/runner/.mitmproxy/venv/bin/mitmdump
|
||||
run: scripts/run_cached_tests
|
||||
|
|
121
docs/usage.rst
121
docs/usage.rst
|
@ -71,8 +71,8 @@ The JSON log is one JSON string per line. The following documented events and
|
|||
fields are stable, undocumented ones may change without notice.
|
||||
|
||||
event=updated
|
||||
An update is detected. Fields ``name``, ``old_version`` and ``version`` are
|
||||
available. ``old_version`` maybe ``null``.
|
||||
An update is detected. Fields ``name``, ``revision``, ``old_version`` and ``version`` are
|
||||
available. ``old_version`` may be ``null`` and ``revision`` may be absent.
|
||||
|
||||
event=up-to-date
|
||||
There is no update. Fields ``name`` and ``version`` are available.
|
||||
|
@ -322,12 +322,23 @@ post_data
|
|||
post_data_type
|
||||
(*Optional*) Specifies the ``Content-Type`` of the request body (``post_data``). By default, this is ``application/x-www-form-urlencoded``.
|
||||
|
||||
This source can also work with XML to some extent, e.g. it can parse an RSS feed like this:
|
||||
|
||||
.. code-block:: toml
|
||||
|
||||
[ProxmoxVE]
|
||||
source = "htmlparser"
|
||||
url = "https://my.proxmox.com/en/announcements/tag/proxmox-ve/rss"
|
||||
xpath = "//item/title"
|
||||
from_pattern = 'Proxmox VE ([\d.]+) released!'
|
||||
to_pattern = '\1'
|
||||
|
||||
.. note::
|
||||
An additional dependency "lxml" is required.
|
||||
You can use ``pip install 'nvchecker[htmlparser]'``.
|
||||
|
||||
Search with an JSON Parser (jq)
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
::
|
||||
|
||||
source = "jq"
|
||||
|
@ -388,8 +399,8 @@ Check GitHub
|
|||
source = "github"
|
||||
|
||||
Check `GitHub <https://github.com/>`_ for updates. The version returned is in
|
||||
date format ``%Y%m%d.%H%M%S``, e.g. ``20130701.012212``, unless ``use_latest_release``
|
||||
or ``use_max_tag`` is used. See below.
|
||||
date format ``%Y%m%d.%H%M%S``, e.g. ``20130701.012212``, unless ``use_latest_release``,
|
||||
``use_max_tag`` or ``use_max_release`` is used. See below.
|
||||
|
||||
github
|
||||
The github repository, with author, e.g. ``lilydjwg/nvchecker``.
|
||||
|
@ -412,11 +423,24 @@ use_latest_release
|
|||
small ones like `nvchecker's <https://github.com/lilydjwg/nvchecker/releases>`_
|
||||
are only git tags that should use ``use_max_tag`` below.
|
||||
|
||||
Will return the release name instead of date.
|
||||
Will return the release's tag name instead of date. (For historical reasons
|
||||
it doesn't return the release name. See below to change.)
|
||||
|
||||
use_max_release
|
||||
Set this to ``true`` to check for the max release on GitHub.
|
||||
This option returns the largest one sorted by the
|
||||
``sort_version_key`` option. Will return the tag name instead of date.
|
||||
|
||||
use_release_name
|
||||
When ``use_latest_release`` or ``use_max_release`` is ``true``,
|
||||
setting this to ``true`` will cause nvchecker to return the release name
|
||||
instead of the tag name.
|
||||
|
||||
include_prereleases
|
||||
When ``use_latest_release`` is ``true``, set this to ``true`` to take prereleases into
|
||||
account.
|
||||
When ``use_latest_release`` or ``use_max_release`` is ``true``,
|
||||
set this to ``true`` to take prereleases into account.
|
||||
|
||||
This returns the release names (not the tag names).
|
||||
|
||||
This requires a token because it's using the v4 GraphQL API.
|
||||
|
||||
|
@ -431,7 +455,7 @@ query
|
|||
|
||||
use_max_tag
|
||||
Set this to ``true`` to check for the max tag on GitHub. Unlike
|
||||
``use_latest_release``, this option includes both annotated tags and
|
||||
``use_max_release``, this option includes both annotated tags and
|
||||
lightweight ones, and return the largest one sorted by the
|
||||
``sort_version_key`` option. Will return the tag name instead of date.
|
||||
|
||||
|
@ -447,7 +471,8 @@ To set an authorization token, you can set:
|
|||
- an entry in the keyfile for the host (e.g. ``github.com``)
|
||||
- an entry in your ``netrc`` file for the host
|
||||
|
||||
This source supports :ref:`list options` when ``use_max_tag`` is set.
|
||||
This source supports :ref:`list options` when ``use_max_tag`` or
|
||||
``use_max_release`` is set.
|
||||
|
||||
Check Gitea
|
||||
~~~~~~~~~~~
|
||||
|
@ -482,6 +507,15 @@ To set an authorization token, you can set:
|
|||
|
||||
This source supports :ref:`list options` when ``use_max_tag`` is set.
|
||||
|
||||
Check Gogs / Forgejo / Codeberg
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
Please use the above "gitea" source. Gitea is a fork of `Gogs <https://gogs.io/>`_.
|
||||
`Forgejo <https://forgejo.org/>`_ is a fork of Gitea. Codeberg is a code
|
||||
hosting provider that uses Forgejo. They share the same API endpoints nvchecker uses.
|
||||
|
||||
Alternatively, you can try the generic "git" source.
|
||||
|
||||
Check BitBucket
|
||||
~~~~~~~~~~~~~~~
|
||||
::
|
||||
|
@ -562,7 +596,7 @@ Check PyPI
|
|||
|
||||
source = "pypi"
|
||||
|
||||
Check `PyPI <https://pypi.python.org/>`_ for updates.
|
||||
Check `PyPI <https://pypi.python.org/>`_ for updates. Yanked releases are ignored.
|
||||
|
||||
pypi
|
||||
The name used on PyPI, e.g. ``PySide``.
|
||||
|
@ -656,11 +690,16 @@ Check crates.io
|
|||
|
||||
source = "cratesio"
|
||||
|
||||
Check `crates.io <https://crates.io/>`_ for updates.
|
||||
Check `crates.io <https://crates.io/>`_ for updates. Yanked releases are ignored.
|
||||
|
||||
cratesio
|
||||
The crate name on crates.io, e.g. ``tokio``.
|
||||
|
||||
use_pre_release
|
||||
Whether to accept pre release. Default is false.
|
||||
|
||||
This source supports :ref:`list options`.
|
||||
|
||||
Check Local Pacman Database
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
::
|
||||
|
@ -757,6 +796,11 @@ This enables you to track updates from `Anitya <https://release-monitoring.org/>
|
|||
anitya
|
||||
``distro/package``, where ``distro`` can be a lot of things like "fedora", "arch linux", "gentoo", etc. ``package`` is the package name of the chosen distribution.
|
||||
|
||||
anitya_id
|
||||
The identifier of the project/package in anitya.
|
||||
|
||||
Note that either anitya or anitya_id needs to be specified, anitya_id is preferred when both specified.
|
||||
|
||||
Check Android SDK
|
||||
~~~~~~~~~~~~~~~~~
|
||||
::
|
||||
|
@ -840,6 +884,29 @@ strip_release
|
|||
|
||||
Note that either pkg or srcpkg needs to be specified (but not both) or the item name will be used as pkg.
|
||||
|
||||
Check RPM repository
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
::
|
||||
|
||||
source = "rpmrepo"
|
||||
|
||||
This enables you to check latest package versions in an arbitrary RPM repository in `repomd` format used by package managers such as ``dnf`` (Fedora, RHEL, AlmaLinux etc.) or ``zypper`` (openSUSE) without the need for native RPM tools.
|
||||
|
||||
pkg
|
||||
Name of the RPM package (you can also use ``rpmrepo`` as with other sources, but ``pkg`` is preferred for clarity)
|
||||
|
||||
repo
|
||||
URL of the repository (required, ``repodata/repomd.xml`` should be there)
|
||||
|
||||
arch
|
||||
Architecture of the RPM package (``binary``, ``src``, ``any``, ``x86_64``, ``aarch64``, etc, defaults to ``binary``)
|
||||
|
||||
This source supports :ref:`list options`.
|
||||
|
||||
.. note::
|
||||
An additional dependency "lxml" is required.
|
||||
You can use ``pip install 'nvchecker[rpmrepo]'``.
|
||||
|
||||
Check Git repository
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
::
|
||||
|
@ -997,6 +1064,36 @@ Check `Go packages and modules <https://pkg.go.dev/>`_ for updates.
|
|||
go
|
||||
The name of Go package or module, e.g. ``github.com/caddyserver/caddy/v2/cmd``.
|
||||
|
||||
Check opam repository
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
::
|
||||
|
||||
source = "opam"
|
||||
|
||||
This enables you to check latest package versions in an arbitrary `opam repository <https://opam.ocaml.org/doc/Manual.html#Repositories>` without the need for the opam command line tool.
|
||||
|
||||
pkg
|
||||
Name of the opam package
|
||||
|
||||
repo
|
||||
URL of the repository (optional, the default ``https://opam.ocaml.org`` repository is used if not specified)
|
||||
|
||||
This source supports :ref:`list options`.
|
||||
|
||||
Check Snapcraft
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
::
|
||||
|
||||
source = "snapcraft"
|
||||
|
||||
This source allows you to check the latest package versions in the `Snapcraft <https://snapcraft.io>`_.
|
||||
|
||||
snap
|
||||
Name of the snap package.
|
||||
|
||||
channel
|
||||
Name of the channel.
|
||||
|
||||
Combine others' results
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
::
|
||||
|
|
3
mypy.ini
3
mypy.ini
|
@ -26,3 +26,6 @@ ignore_missing_imports = True
|
|||
|
||||
[mypy-jq]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-tomli]
|
||||
ignore_missing_imports = True
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2024 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
__version__ = '2.15'
|
||||
__version__ = '2.17dev'
|
||||
|
|
|
@ -417,6 +417,7 @@ def check_version_update(
|
|||
'updated',
|
||||
name = name,
|
||||
version = r.version,
|
||||
revision = r.revision,
|
||||
old_version = oldver,
|
||||
url = r.url,
|
||||
)
|
||||
|
@ -444,7 +445,7 @@ async def process_result(
|
|||
if isinstance(r1, Exception):
|
||||
entry_waiter.set_exception(r.name, r1)
|
||||
# no versions are returned from "apply_list_options"?
|
||||
logger.error('no-result', name=r.name, error=str(r1))
|
||||
logger.error('no-result', name=r.name, error=repr(r1))
|
||||
has_failures = True
|
||||
continue
|
||||
check_version_update(oldvers, r.name, r1, verbose)
|
||||
|
|
|
@ -12,7 +12,6 @@ from .base import BaseSession, TemporaryError, Response, HTTPError
|
|||
__all__ = ['session']
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
connector = aiohttp.TCPConnector(limit=20)
|
||||
|
||||
class AiohttpSession(BaseSession):
|
||||
session = None
|
||||
|
|
|
@ -49,7 +49,8 @@ class HttpxSession(BaseSession):
|
|||
method, url, json = json, content = body,
|
||||
headers = headers,
|
||||
follow_redirects = follow_redirects,
|
||||
params = params,
|
||||
# httpx checks for None but not ()
|
||||
params = params or None,
|
||||
)
|
||||
err_cls: Optional[type] = None
|
||||
if r.status_code >= 500:
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
import json as _json
|
||||
from urllib.parse import urlencode
|
||||
from typing import Optional, Dict, Any
|
||||
import os
|
||||
|
||||
from tornado.httpclient import AsyncHTTPClient, HTTPRequest
|
||||
|
||||
|
@ -17,8 +18,9 @@ from .base import BaseSession, TemporaryError, Response, HTTPError
|
|||
__all__ = ['session']
|
||||
|
||||
HTTP2_AVAILABLE = None if pycurl else False
|
||||
SSL_CERT_FILE = os.environ.get('SSL_CERT_FILE')
|
||||
|
||||
def try_use_http2(curl):
|
||||
def setup_curl(curl):
|
||||
global HTTP2_AVAILABLE
|
||||
if HTTP2_AVAILABLE is None:
|
||||
try:
|
||||
|
@ -29,6 +31,10 @@ def try_use_http2(curl):
|
|||
elif HTTP2_AVAILABLE:
|
||||
curl.setopt(pycurl.HTTP_VERSION, 4)
|
||||
|
||||
if SSL_CERT_FILE:
|
||||
curl.setopt_string(pycurl.CAINFO, SSL_CERT_FILE)
|
||||
curl.setopt_string(pycurl.ACCEPT_ENCODING, "")
|
||||
|
||||
class TornadoSession(BaseSession):
|
||||
def setup(
|
||||
self,
|
||||
|
@ -68,7 +74,7 @@ class TornadoSession(BaseSession):
|
|||
kwargs['body'] = body
|
||||
elif json:
|
||||
kwargs['body'] = _json.dumps(json)
|
||||
kwargs['prepare_curl_callback'] = try_use_http2
|
||||
kwargs['prepare_curl_callback'] = setup_curl
|
||||
|
||||
if proxy:
|
||||
host, port = proxy.rsplit(':', 1)
|
||||
|
|
|
@ -26,6 +26,9 @@ def _console_msg(event):
|
|||
else:
|
||||
msg = evt
|
||||
|
||||
if 'revision' in event and not event['revision']:
|
||||
del event['revision']
|
||||
|
||||
if 'name' in event:
|
||||
msg = f"{event['name']}: {msg}"
|
||||
del event['name']
|
||||
|
|
|
@ -6,7 +6,9 @@ from nvchecker.api import RichResult
|
|||
URL = 'https://release-monitoring.org/api/project/{pkg}'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
pkg = conf.get('anitya')
|
||||
pkg = conf.get('anitya_id')
|
||||
if pkg is None:
|
||||
pkg = conf.get('anitya')
|
||||
url = URL.format(pkg = pkg)
|
||||
data = await cache.get_json(url)
|
||||
return RichResult(
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
from nvchecker.api import session, RichResult, GetVersionError
|
||||
|
||||
URL = 'https://www.archlinux.org/packages/search/json/'
|
||||
URL = 'https://archlinux.org/packages/search/json/'
|
||||
|
||||
async def request(pkg):
|
||||
res = await session.get(URL, params={"name": pkg})
|
||||
|
|
|
@ -1,15 +1,40 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import re
|
||||
|
||||
import structlog
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
|
||||
API_URL = 'https://crates.io/api/v1/crates/%s'
|
||||
# https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string
|
||||
VERSION_PATTERN = r'^(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$'
|
||||
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
name = conf.get('cratesio') or name
|
||||
use_pre_release = conf.get('use_pre_release', False)
|
||||
data = await cache.get_json(API_URL % name)
|
||||
version = [v['num'] for v in data['versions'] if not v['yanked']][0]
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://crates.io/crates/{name}/{version}',
|
||||
)
|
||||
results = []
|
||||
for v in data['versions']:
|
||||
if v['yanked']:
|
||||
continue
|
||||
version = v['num']
|
||||
match = re.fullmatch(VERSION_PATTERN, version)
|
||||
if match is None:
|
||||
logger.warning('ignoring invalid version', version=version)
|
||||
continue
|
||||
if not use_pre_release and match.group('prerelease'):
|
||||
continue
|
||||
results.append(
|
||||
RichResult(
|
||||
version=version,
|
||||
url=f'https://crates.io/crates/{name}/{version}',
|
||||
)
|
||||
)
|
||||
|
||||
return results
|
||||
|
|
|
@ -51,7 +51,7 @@ async def get_version(
|
|||
]
|
||||
else:
|
||||
return RichResult(
|
||||
version = data[0]['commit']['committer']['date'].split('T', 1)[0].replace('-', ''),
|
||||
version = data[0]['commit']['committer']['date'],
|
||||
revision = data[0]['sha'],
|
||||
url = data[0]['html_url'],
|
||||
)
|
||||
|
|
|
@ -21,6 +21,7 @@ GITHUB_URL = 'https://api.%s/repos/%s/commits'
|
|||
GITHUB_LATEST_RELEASE = 'https://api.%s/repos/%s/releases/latest'
|
||||
# https://developer.github.com/v3/git/refs/#get-all-references
|
||||
GITHUB_MAX_TAG = 'https://api.%s/repos/%s/git/refs/tags'
|
||||
GITHUB_MAX_RELEASE = 'https://api.%s/repos/%s/releases'
|
||||
GITHUB_GRAPHQL_URL = 'https://api.%s/graphql'
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
|
@ -120,8 +121,8 @@ async def get_latest_tag(key: Tuple[str, str, str, str]) -> RichResult:
|
|||
url = f'https://github.com/{repo}/releases/tag/{version}',
|
||||
)
|
||||
|
||||
async def get_latest_release_with_prereleases(key: Tuple[str, str, str]) -> RichResult:
|
||||
host, repo, token = key
|
||||
async def get_latest_release_with_prereleases(key: Tuple[str, str, str, str]) -> RichResult:
|
||||
host, repo, token, use_release_name = key
|
||||
owner, reponame = repo.split('/')
|
||||
headers = {
|
||||
'Authorization': f'bearer {token}',
|
||||
|
@ -143,9 +144,15 @@ async def get_latest_release_with_prereleases(key: Tuple[str, str, str]) -> Rich
|
|||
if not refs:
|
||||
raise GetVersionError('no release found')
|
||||
|
||||
tag_name = refs[0]['node']['tag']['name']
|
||||
if use_release_name:
|
||||
version = refs[0]['node']['name']
|
||||
else:
|
||||
version = tag_name
|
||||
|
||||
return RichResult(
|
||||
version = refs[0]['node']['name'],
|
||||
gitref = refs[0]['node']['tag']['name'],
|
||||
version = version,
|
||||
gitref = f"refs/tags/{tag_name}",
|
||||
revision = refs[0]['node']['tagCommit']['oid'],
|
||||
url = refs[0]['node']['url'],
|
||||
)
|
||||
|
@ -174,19 +181,25 @@ async def get_version_real(
|
|||
|
||||
use_latest_release = conf.get('use_latest_release', False)
|
||||
include_prereleases = conf.get('include_prereleases', False)
|
||||
use_release_name = conf.get('use_release_name', False)
|
||||
if use_latest_release and include_prereleases:
|
||||
if not token:
|
||||
raise GetVersionError('token not given but it is required')
|
||||
|
||||
return await cache.get((host, repo, token), get_latest_release_with_prereleases) # type: ignore
|
||||
return await cache.get(
|
||||
(host, repo, token, use_release_name),
|
||||
get_latest_release_with_prereleases) # type: ignore
|
||||
|
||||
br = conf.get('branch')
|
||||
path = conf.get('path')
|
||||
use_max_tag = conf.get('use_max_tag', False)
|
||||
use_max_release = conf.get('use_max_release', False)
|
||||
if use_latest_release:
|
||||
url = GITHUB_LATEST_RELEASE % (host, repo)
|
||||
elif use_max_tag:
|
||||
url = GITHUB_MAX_TAG % (host, repo)
|
||||
elif use_max_release:
|
||||
url = GITHUB_MAX_RELEASE % (host, repo)
|
||||
else:
|
||||
url = GITHUB_URL % (host, repo)
|
||||
parameters = {}
|
||||
|
@ -216,11 +229,29 @@ async def get_version_real(
|
|||
raise GetVersionError('No tag found in upstream repository.')
|
||||
return tags
|
||||
|
||||
if use_max_release:
|
||||
releases: List[Union[str, RichResult]] = [
|
||||
RichResult(
|
||||
version = ref['name'] if use_release_name else ref['tag_name'],
|
||||
gitref = f"refs/tags/{ref['tag_name']}",
|
||||
url = ref['html_url'],
|
||||
) for ref in data if include_prereleases or not ref['prerelease']
|
||||
]
|
||||
if not releases:
|
||||
raise GetVersionError('No release found in upstream repository.')
|
||||
return releases
|
||||
|
||||
if use_latest_release:
|
||||
if 'tag_name' not in data:
|
||||
raise GetVersionError('No release found in upstream repository.')
|
||||
|
||||
if use_release_name:
|
||||
version = data['name']
|
||||
else:
|
||||
version = data['tag_name']
|
||||
|
||||
return RichResult(
|
||||
version = data['tag_name'],
|
||||
version = version,
|
||||
gitref = f"refs/tags/{data['tag_name']}",
|
||||
url = data['html_url'],
|
||||
)
|
||||
|
|
|
@ -31,9 +31,9 @@ async def get_version_impl(info) -> RichResult:
|
|||
|
||||
elements = doc.xpath("//div[@class='Version-tag']/a/text()")
|
||||
try:
|
||||
version = elements[0]
|
||||
version = elements[0] # type: ignore
|
||||
return RichResult(
|
||||
version = version,
|
||||
version = version, # type: ignore
|
||||
url = GO_PKG_VERSION_URL.format(pkg=pkg_name, version=version),
|
||||
)
|
||||
except IndexError:
|
||||
|
|
20
nvchecker_source/launchpad.py
Normal file
20
nvchecker_source/launchpad.py
Normal file
|
@ -0,0 +1,20 @@
|
|||
# MIT Licensed
|
||||
# Copyright (c) 2024 Bert Peters <bertptrs@archlinux.org>, et al.
|
||||
from __future__ import annotations
|
||||
from nvchecker.api import AsyncCache, Entry, RichResult
|
||||
|
||||
PROJECT_INFO_URL = "https://api.launchpad.net/1.0/{launchpad}"
|
||||
|
||||
async def get_version(name: str, conf: Entry, *, cache: AsyncCache, **kwargs):
|
||||
launchpad = conf["launchpad"]
|
||||
|
||||
project_data = await cache.get_json(PROJECT_INFO_URL.format(launchpad=launchpad))
|
||||
data = await cache.get_json(project_data['releases_collection_link'])
|
||||
|
||||
return [
|
||||
RichResult(version=entry["version"], url=entry["web_link"])
|
||||
for entry in data["entries"]
|
||||
]
|
||||
|
||||
|
||||
|
71
nvchecker_source/opam.py
Normal file
71
nvchecker_source/opam.py
Normal file
|
@ -0,0 +1,71 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2024 Daniel Peukert <daniel@peukert.cc>, et al.
|
||||
|
||||
import asyncio
|
||||
from io import BytesIO
|
||||
import tarfile
|
||||
from typing import List
|
||||
|
||||
from nvchecker.api import (
|
||||
session, VersionResult,
|
||||
Entry, AsyncCache,
|
||||
KeyManager, RichResult
|
||||
)
|
||||
|
||||
OPAM_REPO_INDEX_URL = "%s/index.tar.gz"
|
||||
OPAM_VERSION_PATH_PREFIX = "packages/%s/%s."
|
||||
OPAM_VERSION_PATH_SUFFIX = "/opam"
|
||||
|
||||
OPAM_DEFAULT_REPO = 'https://opam.ocaml.org'
|
||||
OPAM_DEFAULT_REPO_VERSION_URL = "%s/packages/%s/%s.%s"
|
||||
|
||||
def _decompress_and_list_files(data: bytes) -> List[str]:
|
||||
# Convert the bytes to a file object and get a list of files
|
||||
archive = tarfile.open(mode='r', fileobj=BytesIO(data))
|
||||
return archive.getnames()
|
||||
|
||||
async def get_files(url: str) -> List[str]:
|
||||
# Download the file and get its contents
|
||||
res = await session.get(url)
|
||||
data = res.body
|
||||
|
||||
# Get the file list of the archive
|
||||
loop = asyncio.get_running_loop()
|
||||
return await loop.run_in_executor(None, _decompress_and_list_files, data)
|
||||
|
||||
async def get_package_versions(files: List[str], pkg: str) -> List[str]:
|
||||
# Prepare the filename prefix based on the package name
|
||||
prefix = OPAM_VERSION_PATH_PREFIX % (pkg , pkg)
|
||||
|
||||
# Only keep opam files that are relevant to the package we're working with
|
||||
filtered_files = []
|
||||
|
||||
for filename in files:
|
||||
if filename.startswith(prefix) and filename.endswith(OPAM_VERSION_PATH_SUFFIX):
|
||||
filtered_files.append(filename[len(prefix):-1*len(OPAM_VERSION_PATH_SUFFIX)])
|
||||
|
||||
return filtered_files
|
||||
|
||||
async def get_version(
|
||||
name: str, conf: Entry, *,
|
||||
cache: AsyncCache, keymanager: KeyManager,
|
||||
**kwargs,
|
||||
):
|
||||
pkg = conf.get('pkg', name)
|
||||
repo = conf.get('repo', OPAM_DEFAULT_REPO).rstrip('/')
|
||||
|
||||
# Get the list of files in the repo index (see https://opam.ocaml.org/doc/Manual.html#Repositories for repo structure)
|
||||
files = await cache.get(OPAM_REPO_INDEX_URL % repo, get_files) # type: ignore
|
||||
|
||||
# Parse the version strings from the file names
|
||||
raw_versions = await get_package_versions(files, pkg)
|
||||
|
||||
# Convert the version strings into RichResults
|
||||
versions = []
|
||||
for version in raw_versions:
|
||||
versions.append(RichResult(
|
||||
version = version,
|
||||
# There is no standardised URL scheme, so we only return an URL for the default registry
|
||||
url = OPAM_DEFAULT_REPO_VERSION_URL % (repo, pkg, pkg, version) if repo == OPAM_DEFAULT_REPO else None,
|
||||
))
|
||||
return versions
|
|
@ -1,10 +1,13 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2021 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
# Copyright (c) 2013-2021,2023-2024 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from packaging.version import Version
|
||||
import structlog
|
||||
from packaging.version import Version, InvalidVersion
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
ret = []
|
||||
|
||||
|
@ -16,7 +19,18 @@ async def get_version(name, conf, *, cache, **kwargs):
|
|||
data = await cache.get_json(url)
|
||||
|
||||
for version in data['releases'].keys():
|
||||
parsed_version = Version(version)
|
||||
# Skip versions that are marked as yanked.
|
||||
if (vers := data['releases'][version]) and vers[0]['yanked']:
|
||||
continue
|
||||
|
||||
try:
|
||||
parsed_version = Version(version)
|
||||
except InvalidVersion:
|
||||
if data['releases'][version]:
|
||||
# emit a warning if there is something under the invalid version
|
||||
# sympy has an empty "0.5.13-hg" version
|
||||
logger.warning('ignoring invalid version', version=version)
|
||||
continue
|
||||
|
||||
if not use_pre_release and parsed_version.is_prerelease:
|
||||
continue
|
||||
|
|
84
nvchecker_source/rpmrepo.py
Normal file
84
nvchecker_source/rpmrepo.py
Normal file
|
@ -0,0 +1,84 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2024 Jakub Ružička <jru@debian.org>, et al.
|
||||
|
||||
import asyncio
|
||||
import gzip
|
||||
import pathlib
|
||||
import urllib
|
||||
from typing import Set
|
||||
|
||||
import lxml.etree
|
||||
from nvchecker.api import session, AsyncCache, Entry, KeyManager, VersionResult
|
||||
|
||||
|
||||
# XML namespaces used in repodata (dead links haha)
|
||||
NS = {
|
||||
'common': 'http://linux.duke.edu/metadata/common',
|
||||
'repo': 'http://linux.duke.edu/metadata/repo',
|
||||
'rpm': 'http://linux.duke.edu/metadata/rpm'
|
||||
}
|
||||
|
||||
|
||||
async def get_version(
|
||||
name: str, conf: Entry, *,
|
||||
cache: AsyncCache, keymanager: KeyManager,
|
||||
**kwargs,
|
||||
) -> VersionResult:
|
||||
repo = conf['repo']
|
||||
arch = conf.get('arch', 'binary')
|
||||
pkg = conf.get('pkg')
|
||||
if not pkg:
|
||||
pkg = conf.get('rpmrepo', name)
|
||||
|
||||
repo_url = urllib.parse.urlparse(repo)
|
||||
repo_path = pathlib.PurePosixPath(repo_url.path)
|
||||
|
||||
# get the url of repomd.xml
|
||||
repomd_path = repo_path / 'repodata' / 'repomd.xml'
|
||||
repomd_url = repo_url._replace(path=str(repomd_path)).geturl()
|
||||
# download repomd.xml (use cache)
|
||||
repomd_body = await cache.get(repomd_url, get_file) # type: ignore
|
||||
# parse repomd.xml
|
||||
repomd_xml = lxml.etree.fromstring(repomd_body)
|
||||
|
||||
# get the url of *primary.xml.gz
|
||||
primary_element = repomd_xml.find('repo:data[@type="primary"]/repo:location', namespaces=NS)
|
||||
primary_path = repo_path / primary_element.get('href') # type: ignore
|
||||
primary_url = repo_url._replace(path=str(primary_path)).geturl()
|
||||
# download and decompress *primary.xml.gz (use cache)
|
||||
primary_body = await cache.get(primary_url, get_file_gz) # type: ignore
|
||||
# parse *primary.xml metadata
|
||||
metadata = lxml.etree.fromstring(primary_body)
|
||||
|
||||
# use set to eliminate duplication
|
||||
versions_set: Set[str] = set()
|
||||
# iterate package metadata
|
||||
for el in metadata.findall(f'common:package[common:name="{pkg}"]', namespaces=NS):
|
||||
pkg_arch = el.findtext('common:arch', namespaces=NS)
|
||||
|
||||
# filter bych arch
|
||||
if arch == 'binary':
|
||||
if pkg_arch == 'src':
|
||||
continue
|
||||
elif arch != 'any':
|
||||
if pkg_arch != arch:
|
||||
continue
|
||||
|
||||
version_info = el.find('common:version', namespaces=NS)
|
||||
version = version_info.get('ver') # type: ignore
|
||||
versions_set.add(version) # type: ignore
|
||||
|
||||
versions = list(versions_set)
|
||||
return versions # type: ignore
|
||||
|
||||
|
||||
async def get_file(url: str) -> bytes:
|
||||
res = await session.get(url)
|
||||
return res.body
|
||||
|
||||
|
||||
async def get_file_gz(url: str) -> bytes:
|
||||
res = await session.get(url)
|
||||
loop = asyncio.get_running_loop()
|
||||
return await loop.run_in_executor(
|
||||
None, gzip.decompress, res.body)
|
33
nvchecker_source/snapcraft.py
Normal file
33
nvchecker_source/snapcraft.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2025 Maxim Slipenko <maxim@slipenko.com>, et al.
|
||||
|
||||
from nvchecker.api import (
|
||||
GetVersionError
|
||||
)
|
||||
from nvchecker.httpclient.base import HTTPError
|
||||
|
||||
URL="https://api.snapcraft.io/v2/snaps/info/%(snap)s"
|
||||
|
||||
async def get_version(
|
||||
name: str, conf, *,
|
||||
cache, keymanager,
|
||||
**kwargs,
|
||||
):
|
||||
try:
|
||||
snap = conf.get("snap")
|
||||
channel = conf.get("channel")
|
||||
|
||||
result = await cache.get_json(
|
||||
URL % { "snap": snap },
|
||||
headers={
|
||||
"Snap-Device-Series": "16",
|
||||
},
|
||||
)
|
||||
except HTTPError:
|
||||
raise GetVersionError(f"Failed to request snap info for {snap}")
|
||||
|
||||
for c in result['channel-map']:
|
||||
if c['channel']['name'] == channel:
|
||||
return c['version']
|
||||
|
||||
raise GetVersionError(f"Failed to find version for {snap}")
|
|
@ -5,6 +5,7 @@ build-backend = "setuptools.build_meta"
|
|||
[tool.pytest.ini_options]
|
||||
# addopts = -n auto
|
||||
asyncio_mode = "strict"
|
||||
asyncio_default_fixture_loop_scope = "session"
|
||||
|
||||
# build and upload
|
||||
# rm -rf dist && python -m build --no-isolation && twine check dist/* && twine upload dist/*
|
||||
|
|
|
@ -2,11 +2,6 @@
|
|||
oldver = "old_ver.json"
|
||||
newver = "new_ver.json"
|
||||
|
||||
[vim]
|
||||
source = "regex"
|
||||
regex = "7\\.3\\.\\d+"
|
||||
url = "http://ftp.vim.org/pub/vim/patches/7.3/"
|
||||
|
||||
[google-chrome]
|
||||
source = "cmd"
|
||||
cmd = '''wget -qO- http://dl.google.com/linux/chrome/rpm/stable/x86_64/repodata/other.xml.gz | zgrep -A1 "google-chrome-stable" | awk -F\" '/version/ {print $4"-"$6}' '''
|
||||
|
@ -25,17 +20,13 @@ github = "lilydjwg/nvchecker"
|
|||
[ssed]
|
||||
source = "regex"
|
||||
regex = "The current version is ([\\d.]+)\\."
|
||||
url = "http://sed.sourceforge.net/grabbag/ssed/"
|
||||
url = "https://sed.sourceforge.net/grabbag/ssed/"
|
||||
proxy = "http://localhost:8087"
|
||||
|
||||
[PySide]
|
||||
source = "pypi"
|
||||
pypi = "PySide"
|
||||
pypi = "nvchecker"
|
||||
|
||||
[test]
|
||||
source = "manual"
|
||||
manual = "0.1"
|
||||
|
||||
["Sparkle Test App"]
|
||||
source = "sparkle"
|
||||
sparkle = "https://sparkle-project.org/files/sparkletestcast.xml"
|
||||
|
|
|
@ -18,7 +18,7 @@ _handler_precedence = (
|
|||
BOOL_KEYS = [
|
||||
'strip_release', 'use_last_modified',
|
||||
'use_latest_release', 'use_latest_tag',
|
||||
'use_max_tag', 'use_pre_release',
|
||||
'use_max_release', 'use_max_tag', 'use_pre_release',
|
||||
]
|
||||
|
||||
INT_KEYS = [
|
||||
|
|
40
scripts/run_cached_tests
Executable file
40
scripts/run_cached_tests
Executable file
|
@ -0,0 +1,40 @@
|
|||
#!/bin/bash -e
|
||||
|
||||
mitmdump=${mitmdump:-mitmdump}
|
||||
|
||||
if [[ -f ~/.mitmproxy/nvdump ]]; then
|
||||
$mitmdump -S ~/.mitmproxy/nvdump -p 7890 --ignore-hosts '127\.0\.0\.1' --server-replay-reuse --server-replay-extra=forward -w newdump >mitmdump_output &
|
||||
else
|
||||
$mitmdump -w ~/.mitmproxy/nvdump -p 7890 --ignore-hosts '127\.0\.0\.1' >mitmdump_output &
|
||||
fi
|
||||
|
||||
mitm_pid=$!
|
||||
|
||||
on_exit () {
|
||||
kill -INT $mitm_pid
|
||||
|
||||
if [[ -s newdump ]]; then
|
||||
cat newdump >> ~/.mitmproxy/nvdump
|
||||
fi
|
||||
|
||||
cat mitmdump_output
|
||||
}
|
||||
|
||||
trap on_exit EXIT
|
||||
|
||||
if [[ -f keyfile.toml ]]; then
|
||||
export KEYFILE=keyfile.toml
|
||||
fi
|
||||
|
||||
for _ in {1..10}; do
|
||||
if [[ -s ~/.mitmproxy/mitmproxy-ca-cert.pem ]]; then
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
|
||||
export SSL_CERT_FILE=$HOME/.mitmproxy/mitmproxy-ca-cert.pem
|
||||
export GIT_SSL_CAINFO=$SSL_CERT_FILE
|
||||
export http_proxy=http://localhost:7890 https_proxy=http://localhost:7890
|
||||
|
||||
pytest
|
|
@ -29,6 +29,7 @@ classifiers =
|
|||
Programming Language :: Python :: 3.10
|
||||
Programming Language :: Python :: 3.11
|
||||
Programming Language :: Python :: 3.12
|
||||
Programming Language :: Python :: 3.13
|
||||
Topic :: Internet
|
||||
Topic :: Internet :: WWW/HTTP
|
||||
Topic :: Software Development
|
||||
|
@ -38,10 +39,10 @@ classifiers =
|
|||
|
||||
[options]
|
||||
zip_safe = True
|
||||
python_requires = >=3.8
|
||||
|
||||
packages = find_namespace:
|
||||
install_requires =
|
||||
setuptools; python_version<"3.8"
|
||||
tomli; python_version<"3.11"
|
||||
structlog
|
||||
platformdirs
|
||||
|
@ -63,6 +64,8 @@ pypi =
|
|||
packaging
|
||||
htmlparser =
|
||||
lxml
|
||||
rpmrepo =
|
||||
lxml
|
||||
jq =
|
||||
jq
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ import tempfile
|
|||
import pytest
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.asyncio(scope="session"),
|
||||
pytest.mark.asyncio,
|
||||
pytest.mark.skipif(shutil.which('makepkg') is None, reason='requires makepkg command'),
|
||||
pytest.mark.skipif(shutil.which('repo-add') is None, reason='requires repo-add command')
|
||||
]
|
||||
|
|
|
@ -9,7 +9,7 @@ import tempfile
|
|||
import pytest
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.asyncio(scope="session"),
|
||||
pytest.mark.asyncio,
|
||||
pytest.mark.skipif(shutil.which('pacman') is None, reason='requires pacman command'),
|
||||
pytest.mark.skipif(shutil.which('fakeroot') is None, reason='requires fakeroot command'),
|
||||
]
|
||||
|
|
|
@ -3,8 +3,9 @@
|
|||
# Copyright (c) 2017 Chih-Hsuan Yen <yan12125 at gmail dot com>
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
|
||||
@pytest.mark.flaky(reruns=10)
|
||||
async def test_android_addon(get_version):
|
||||
assert await get_version("android-google-play-apk-expansion", {
|
||||
"source": "android_sdk",
|
||||
|
@ -13,11 +14,12 @@ async def test_android_addon(get_version):
|
|||
}) == "1.r03"
|
||||
|
||||
async def test_android_package(get_version):
|
||||
assert await get_version("android-sdk-cmake", {
|
||||
version = await get_version("android-sdk-cmake", {
|
||||
"source": "android_sdk",
|
||||
"android_sdk": "cmake;",
|
||||
"repo": "package",
|
||||
}) == "3.22.1"
|
||||
})
|
||||
assert version.startswith("3.")
|
||||
|
||||
|
||||
async def test_android_package_channel(get_version):
|
||||
|
|
|
@ -5,7 +5,7 @@
|
|||
import re
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
|
||||
async def test_anitya(get_version):
|
||||
version = await get_version("shutter", {
|
||||
|
@ -13,3 +13,10 @@ async def test_anitya(get_version):
|
|||
"anitya": "fedora/shutter",
|
||||
})
|
||||
assert re.match(r"[0-9.]+", version)
|
||||
|
||||
async def test_anitya_by_id(get_version):
|
||||
version = await get_version("shutter", {
|
||||
"source": "anitya",
|
||||
"anitya_id": "4813",
|
||||
})
|
||||
assert re.match(r"[0-9.]+", version)
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
# Copyright (c) 2017 Felix Yan <felixonmars@archlinux.org>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
|
||||
@pytest.mark.flaky(reruns=10)
|
||||
async def test_apt(get_version):
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
|
||||
@pytest.mark.flaky
|
||||
async def test_archpkg(get_version):
|
||||
|
@ -26,9 +26,9 @@ async def test_archpkg_provided(get_version):
|
|||
|
||||
@pytest.mark.flaky
|
||||
async def test_archpkg_provided_strip(get_version):
|
||||
assert await get_version("jsoncpp", {
|
||||
int(await get_version("jsoncpp", {
|
||||
"source": "archpkg",
|
||||
"provided": "libjsoncpp.so",
|
||||
"strip_release": True,
|
||||
}) == "25"
|
||||
}))
|
||||
|
||||
|
|
|
@ -1,13 +1,9 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import os
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"),
|
||||
pytest.mark.needs_net,
|
||||
pytest.mark.skipif(os.environ.get('TRAVIS') == 'true',
|
||||
reason="fail too often")]
|
||||
pytestmark = [pytest.mark.asyncio,
|
||||
pytest.mark.needs_net]
|
||||
|
||||
@pytest.mark.flaky(reruns=10)
|
||||
async def test_aur(get_version):
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
|
||||
async def test_bitbucket(get_version):
|
||||
assert await get_version("example", {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = pytest.mark.asyncio(scope="session")
|
||||
pytestmark = pytest.mark.asyncio
|
||||
|
||||
async def test_cache(run_str_multi):
|
||||
conf = r'''
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
import time
|
||||
import pytest
|
||||
pytestmark = pytest.mark.asyncio(scope="session")
|
||||
pytestmark = pytest.mark.asyncio
|
||||
|
||||
async def test_cmd(get_version):
|
||||
assert await get_version("example", {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright (c) 2021 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = pytest.mark.asyncio(scope="session")
|
||||
pytestmark = pytest.mark.asyncio
|
||||
|
||||
async def test_combiner(run_str_multi):
|
||||
conf = r'''
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 Chih-Hsuan Yen <yan12125 at gmail dot com>
|
||||
|
||||
import pytest
|
||||
import os
|
||||
import datetime
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net,
|
||||
pytest.mark.skipif(bool(os.environ.get('GITHUB_RUN_ID')), reason="400 very often")]
|
||||
|
||||
async def test_container(get_version):
|
||||
assert await get_version("hello-world", {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
|
||||
async def test_cpan(get_version):
|
||||
assert await get_version("POE-Component-Server-HTTPServer", {
|
||||
|
|
|
@ -2,9 +2,9 @@
|
|||
# Copyright (c) 2022 Pekka Ristola <pekkarr [at] protonmail [dot] com>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
|
||||
async def test_cran(get_version):
|
||||
assert await get_version("xml2", {
|
||||
"source": "cran",
|
||||
}) == "1.3.6"
|
||||
}) == "1.3.7"
|
||||
|
|
|
@ -2,9 +2,29 @@
|
|||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
|
||||
async def test_cratesio(get_version):
|
||||
assert await get_version("example", {
|
||||
"source": "cratesio",
|
||||
}) == "1.1.0"
|
||||
|
||||
async def test_cratesio_list(get_version):
|
||||
assert await get_version("example", {
|
||||
"source": "cratesio",
|
||||
"include_regex": r"^1\.0.*",
|
||||
}) == "1.0.2"
|
||||
|
||||
async def test_cratesio_skip_prerelease(get_version):
|
||||
with pytest.raises(RuntimeError, match='include_regex matched no versions'):
|
||||
await get_version("cargo-lock", {
|
||||
"source": "cratesio",
|
||||
"include_regex": r".*-.*",
|
||||
})
|
||||
|
||||
async def test_cratesio_use_prerelease(get_version):
|
||||
await get_version("cargo-lock", {
|
||||
"source": "cratesio",
|
||||
"use_pre_release": "true",
|
||||
"include_regex": r".*-.*",
|
||||
})
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
# Copyright (c) 2017 Felix Yan <felixonmars@archlinux.org>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
|
||||
@pytest.mark.flaky(reruns=10)
|
||||
async def test_debianpkg(get_version):
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
|
||||
async def test_gems(get_version):
|
||||
assert await get_version("example", {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright (c) 2020 Felix Yan <felixonmars@archlinux.org>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
|
||||
async def test_git(get_version):
|
||||
assert await get_version("example", {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"),
|
||||
pytestmark = [pytest.mark.asyncio,
|
||||
pytest.mark.needs_net]
|
||||
|
||||
@pytest.mark.flaky(reruns=10)
|
||||
|
@ -10,8 +10,8 @@ async def test_gitea(get_version):
|
|||
ver = await get_version("example", {
|
||||
"source": "gitea",
|
||||
"gitea": "gitea/tea"})
|
||||
assert len(ver) == 8
|
||||
assert ver.isdigit()
|
||||
assert ver.startswith('20')
|
||||
assert 'T' in ver
|
||||
|
||||
@pytest.mark.flaky(reruns=10)
|
||||
async def test_gitea_max_tag_with_include(get_version):
|
||||
|
|
|
@ -5,7 +5,7 @@ import re
|
|||
|
||||
import pytest
|
||||
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"),
|
||||
pytestmark = [pytest.mark.asyncio,
|
||||
pytest.mark.needs_net,
|
||||
pytest.mark.usefixtures('keyfile')]
|
||||
|
||||
|
@ -36,6 +36,22 @@ async def test_github_latest_release_include_prereleases(get_version):
|
|||
"include_prereleases": True,
|
||||
}) == "v0.0.1-pre"
|
||||
|
||||
async def test_github_latest_release_with_release_name(get_version):
|
||||
version = await get_version("example", {
|
||||
"source": "github",
|
||||
"github": "mamba-org/mamba",
|
||||
"use_latest_release": True,
|
||||
})
|
||||
assert version.startswith('20') # tag name
|
||||
|
||||
version = await get_version("example", {
|
||||
"source": "github",
|
||||
"github": "mamba-org/mamba",
|
||||
"use_latest_release": True,
|
||||
"use_release_name": True,
|
||||
})
|
||||
assert not version.startswith('20') # release name
|
||||
|
||||
async def test_github_max_tag(get_version):
|
||||
assert await get_version("example", {
|
||||
"source": "github",
|
||||
|
@ -43,6 +59,20 @@ async def test_github_max_tag(get_version):
|
|||
"use_max_tag": True,
|
||||
}) == "second_release"
|
||||
|
||||
async def test_github_max_release(get_version):
|
||||
assert await get_version("example", {
|
||||
"source": "github",
|
||||
"github": "harry-sanabria/ReleaseTestRepo",
|
||||
"use_max_release": True,
|
||||
}) == "second_release"
|
||||
|
||||
assert await get_version("example", {
|
||||
"source": "github",
|
||||
"github": "harry-sanabria/ReleaseTestRepo",
|
||||
"use_max_release": True,
|
||||
"use_release_name": True,
|
||||
}) == "second_release"
|
||||
|
||||
async def test_github_max_tag_with_ignored(get_version):
|
||||
assert await get_version("example", {
|
||||
"source": "github",
|
||||
|
@ -51,6 +81,21 @@ async def test_github_max_tag_with_ignored(get_version):
|
|||
"ignored": "second_release release3",
|
||||
}) == "first_release"
|
||||
|
||||
async def test_github_max_release_with_ignored(get_version):
|
||||
assert await get_version("example", {
|
||||
"source": "github",
|
||||
"github": "harry-sanabria/ReleaseTestRepo",
|
||||
"use_max_release": True,
|
||||
"ignored": "second_release release3",
|
||||
}) == "first_release"
|
||||
assert await get_version("example", {
|
||||
"source": "github",
|
||||
"github": "harry-sanabria/ReleaseTestRepo",
|
||||
"use_max_release": True,
|
||||
"ignored": "second_release",
|
||||
"use_release_name": True,
|
||||
}) == "release #3"
|
||||
|
||||
async def test_github_with_path(get_version):
|
||||
assert await get_version("example", {
|
||||
"source": "github",
|
||||
|
@ -75,6 +120,16 @@ async def test_github_max_tag_with_include(get_version):
|
|||
})
|
||||
assert re.match(r'chrome-[\d.]+', version)
|
||||
|
||||
async def test_github_max_release_with_include(get_version):
|
||||
version = await get_version("example", {
|
||||
"source": "github",
|
||||
"github": "EFForg/https-everywhere",
|
||||
"use_max_release": True,
|
||||
"use_release_name": True,
|
||||
"include_regex": r"Release \d.*",
|
||||
})
|
||||
assert re.match(r'Release [\d.]+', version)
|
||||
|
||||
async def test_github_latest_tag(get_version):
|
||||
assert await get_version("example", {
|
||||
"source": "github",
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
|
||||
async def test_gitlab(get_version):
|
||||
ver = await get_version("example", {
|
||||
|
|
|
@ -12,17 +12,19 @@ except ImportError:
|
|||
lxml_available = False
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.asyncio(scope="session"),
|
||||
pytest.mark.asyncio,
|
||||
pytest.mark.needs_net,
|
||||
pytest.mark.skipif(not lxml_available, reason="needs lxml")
|
||||
]
|
||||
|
||||
|
||||
async def test_go(get_version):
|
||||
assert await get_version("one version", {
|
||||
ver = await get_version("one version", {
|
||||
"source": "go",
|
||||
"go": "github.com/caddyserver/replace-response",
|
||||
}) == "v0.0.0-20231221003037-a85d4ddc11d6"
|
||||
})
|
||||
|
||||
assert ver.startswith("v0.0.0-")
|
||||
|
||||
assert await get_version("multiple version", {
|
||||
"source": "go",
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
|
||||
@pytest.mark.flaky(reruns=10)
|
||||
async def test_hackage(get_version):
|
||||
|
|
|
@ -10,7 +10,7 @@ except ImportError:
|
|||
lxml_available = False
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.asyncio(scope="session"),
|
||||
pytest.mark.asyncio,
|
||||
pytest.mark.needs_net,
|
||||
pytest.mark.skipif(not lxml_available, reason="needs lxml"),
|
||||
]
|
||||
|
|
|
@ -10,7 +10,7 @@ try:
|
|||
except ImportError:
|
||||
httpbin_available = False
|
||||
|
||||
pytestmark = pytest.mark.asyncio(scope="session")
|
||||
pytestmark = pytest.mark.asyncio
|
||||
|
||||
@pytest.mark.needs_net
|
||||
async def test_redirection(get_version):
|
||||
|
|
|
@ -10,7 +10,7 @@ except ImportError:
|
|||
jq_available = False
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.asyncio(scope="session"),
|
||||
pytest.mark.asyncio,
|
||||
pytest.mark.needs_net,
|
||||
pytest.mark.skipif(not jq_available, reason="needs jq"),
|
||||
]
|
||||
|
|
16
tests/test_launchpad.py
Normal file
16
tests/test_launchpad.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
# MIT Licensed
|
||||
# Copyright (c) 2024 Bert Peters <bertptrs@archlinux.org>, et al.
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
|
||||
@pytest.mark.flaky(reruns=10)
|
||||
async def test_launchpad(get_version):
|
||||
version = await get_version(
|
||||
"sakura",
|
||||
{
|
||||
"source": "launchpad",
|
||||
"launchpad": "sakura",
|
||||
}
|
||||
)
|
||||
|
||||
assert version == '3.8.8'
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = pytest.mark.asyncio(scope="session")
|
||||
pytestmark = pytest.mark.asyncio
|
||||
|
||||
async def test_manual(get_version):
|
||||
assert await get_version("example", {
|
||||
|
|
|
@ -3,10 +3,11 @@
|
|||
|
||||
import pytest
|
||||
pytestmark = [
|
||||
pytest.mark.asyncio(scope="session"),
|
||||
pytest.mark.asyncio,
|
||||
pytest.mark.needs_net,
|
||||
]
|
||||
|
||||
@pytest.mark.skip
|
||||
async def test_mercurial(get_version):
|
||||
assert await get_version("example", {
|
||||
"source": "mercurial",
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
|
||||
async def test_npm(get_version):
|
||||
assert await get_version("example", {
|
||||
|
|
25
tests/test_opam.py
Normal file
25
tests/test_opam.py
Normal file
|
@ -0,0 +1,25 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2024 Daniel Peukert <daniel@peukert.cc>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
|
||||
async def test_opam_official(get_version):
|
||||
assert await get_version("test", {
|
||||
"source": "opam",
|
||||
"pkg": "omigrate",
|
||||
}) == "0.3.2"
|
||||
|
||||
async def test_opam_coq(get_version):
|
||||
assert await get_version("test", {
|
||||
"source": "opam",
|
||||
"repo": "https://coq.inria.fr/opam/released",
|
||||
"pkg": "coq-abp",
|
||||
}) == "8.10.0"
|
||||
|
||||
async def test_opam_coq_trailing_slash(get_version):
|
||||
assert await get_version("test", {
|
||||
"source": "opam",
|
||||
"repo": "https://coq.inria.fr/opam/released/",
|
||||
"pkg": "coq-abp",
|
||||
}) == "8.10.0"
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright (c) 2013-2021 Th3Whit3Wolf <the.white.wolf.is.1337@gmail.com>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
|
||||
async def test_openvsx(get_version):
|
||||
assert await get_version("usernamehw.indent-one-space", {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
|
||||
async def test_packagist(get_version):
|
||||
assert await get_version("butterfly/example-web-application", {
|
||||
|
|
|
@ -4,7 +4,7 @@
|
|||
import pathlib
|
||||
import shutil
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"),
|
||||
pytestmark = [pytest.mark.asyncio,
|
||||
pytest.mark.skipif(shutil.which("pacman") is None,
|
||||
reason="requires pacman command"),
|
||||
pytest.mark.skipif(not pathlib.Path("/var/lib/pacman/sync/core.db").exists(),
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright (c) 2020 Felix Yan <felixonmars@archlinux.org>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
|
||||
async def test_pagure(get_version):
|
||||
ver = await get_version("example", {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
|
||||
async def test_pypi(get_version):
|
||||
assert await get_version("example", {
|
||||
|
@ -25,4 +25,15 @@ async def test_pypi_list(get_version):
|
|||
assert await get_version("urllib3", {
|
||||
"source": "pypi",
|
||||
"include_regex": "^1\\..*",
|
||||
}) == "1.26.18"
|
||||
}) == "1.26.20"
|
||||
|
||||
async def test_pypi_invalid_version(get_version):
|
||||
await get_version("sympy", {
|
||||
"source": "pypi",
|
||||
})
|
||||
|
||||
async def test_pypi_yanked_version(get_version):
|
||||
assert await get_version("urllib3", {
|
||||
"source": "pypi",
|
||||
"include_regex": "^(1\\..*)|(2\\.0\\.[0,1])",
|
||||
}) == "1.26.20"
|
||||
|
|
|
@ -13,7 +13,7 @@ except ImportError:
|
|||
httpbin_available = False
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.asyncio(scope="session"),
|
||||
pytest.mark.asyncio,
|
||||
pytest.mark.skipif(not httpbin_available, reason="needs pytest_httpbin"),
|
||||
]
|
||||
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright (c) 2019-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"),
|
||||
pytestmark = [pytest.mark.asyncio,
|
||||
pytest.mark.needs_net]
|
||||
|
||||
@pytest.mark.flaky(reruns=10)
|
||||
|
|
19
tests/test_rpmrepo.py
Normal file
19
tests/test_rpmrepo.py
Normal file
|
@ -0,0 +1,19 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2024 Jakub Ružička <jru@debian.org>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
|
||||
async def test_rpmrepo_fedora(get_version):
|
||||
assert await get_version("knot_fedora-39", {
|
||||
"source": "rpmrepo",
|
||||
"pkg": "knot",
|
||||
"repo": "http://ftp.sh.cvut.cz/fedora/linux/updates/39/Everything/x86_64/",
|
||||
}) == "3.3.9"
|
||||
|
||||
async def test_rpmrepo_alma(get_version):
|
||||
assert await get_version("knot_fedora-39", {
|
||||
"source": "rpmrepo",
|
||||
"pkg": "tmux",
|
||||
"repo": "http://ftp.sh.cvut.cz/almalinux/9.5/BaseOS/x86_64/os/",
|
||||
}) == "3.2a"
|
28
tests/test_snapcraft.py
Normal file
28
tests/test_snapcraft.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2025 Maxim Slipenko <maxim@slipenko.com>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
|
||||
async def test_snapcraft(get_version):
|
||||
assert await get_version("test", {
|
||||
"source": "snapcraft",
|
||||
"snap": "test-snapd-public",
|
||||
"channel": "edge",
|
||||
}) == "2.0"
|
||||
|
||||
async def test_snapcraft_non_existent_snap(get_version):
|
||||
with pytest.raises(RuntimeError, match='Failed to request snap info for not-existent-snap'):
|
||||
assert await get_version("test", {
|
||||
"source": "snapcraft",
|
||||
"snap": "not-existent-snap",
|
||||
"channel": "stable",
|
||||
})
|
||||
|
||||
async def test_snapcraft_non_existent_channel(get_version):
|
||||
with pytest.raises(RuntimeError, match='Failed to find version for test-snapd-public'):
|
||||
assert await get_version("test", {
|
||||
"source": "snapcraft",
|
||||
"snap": "test-snapd-public",
|
||||
"channel": "non-existent-channel",
|
||||
})
|
|
@ -3,7 +3,7 @@
|
|||
# Copyright (c) 2020 Sunlei <guizaicn@gmail.com>
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
|
||||
async def test_sparkle(get_version):
|
||||
assert await get_version('example', {
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = pytest.mark.asyncio(scope="session")
|
||||
pytestmark = pytest.mark.asyncio
|
||||
|
||||
async def test_substitute_prefix(get_version):
|
||||
assert await get_version("example", {
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
# Copyright (c) 2017 Felix Yan <felixonmars@archlinux.org>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
|
||||
@pytest.mark.flaky
|
||||
async def test_ubuntupkg(get_version):
|
||||
|
@ -26,7 +26,7 @@ async def test_ubuntupkg_suite(get_version):
|
|||
"suite": "xenial",
|
||||
}) == "0.1.2-1"
|
||||
|
||||
@pytest.mark.flaky
|
||||
@pytest.mark.flaky(reruns=10)
|
||||
async def test_ubuntupkg_suite_with_paging(get_version):
|
||||
assert await get_version("ffmpeg", {
|
||||
"source": "ubuntupkg",
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright (c) 2013-2021 Th3Whit3Wolf <the.white.wolf.is.1337@gmail.com>, et al.
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
|
||||
async def test_vsmarketplace(get_version):
|
||||
assert await get_version("usernamehw.indent-one-space", {
|
||||
|
|
Loading…
Add table
Reference in a new issue