mirror of
https://github.com/lilydjwg/nvchecker.git
synced 2025-03-10 06:14:02 +00:00
Merge pull request #239 from dpeukert/feature/source-url-support
Implement URL result support for other sources
This commit is contained in:
commit
c401d239b2
25 changed files with 254 additions and 88 deletions
|
@ -761,6 +761,9 @@ This enables you to track updates of macOS applications which using `Sparkle fra
|
|||
sparkle
|
||||
The url of the sparkle appcast.
|
||||
|
||||
release_notes_language
|
||||
The language of release notes to return when localized release notes are available (defaults to ``en`` for English, the unlocalized release notes are used as a fallback)
|
||||
|
||||
Check Pagure
|
||||
~~~~~~~~~~~~
|
||||
::
|
||||
|
|
|
@ -298,30 +298,31 @@ def substitute_version(
|
|||
return version
|
||||
|
||||
def apply_list_options(
|
||||
versions: List[str], conf: Entry,
|
||||
) -> Optional[str]:
|
||||
versions: List[Union[str, RichResult]], conf: Entry,
|
||||
) -> Optional[Union[str, RichResult]]:
|
||||
pattern = conf.get('include_regex')
|
||||
if pattern:
|
||||
re_pat = re.compile(pattern)
|
||||
versions = [x for x in versions
|
||||
if re_pat.fullmatch(x)]
|
||||
if re_pat.fullmatch(str(x))]
|
||||
|
||||
pattern = conf.get('exclude_regex')
|
||||
if pattern:
|
||||
re_pat = re.compile(pattern)
|
||||
versions = [x for x in versions
|
||||
if not re_pat.fullmatch(x)]
|
||||
if not re_pat.fullmatch(str(x))]
|
||||
|
||||
ignored = set(conf.get('ignored', '').split())
|
||||
if ignored:
|
||||
versions = [x for x in versions if x not in ignored]
|
||||
versions = [x for x in versions
|
||||
if str(x) not in ignored]
|
||||
|
||||
if not versions:
|
||||
return None
|
||||
|
||||
sort_version_key = sort_version_keys[
|
||||
conf.get("sort_version_key", "parse_version")]
|
||||
versions.sort(key=sort_version_key) # type: ignore
|
||||
versions.sort(key=lambda version: sort_version_key(str(version))) # type: ignore
|
||||
|
||||
return versions[-1]
|
||||
|
||||
|
@ -342,6 +343,9 @@ def _process_result(r: RawResult) -> Union[Result, Exception]:
|
|||
return version
|
||||
elif isinstance(version, list):
|
||||
version_str = apply_list_options(version, conf)
|
||||
if isinstance(version_str, RichResult):
|
||||
url = version_str.url
|
||||
version_str = version_str.version
|
||||
elif isinstance(version, RichResult):
|
||||
version_str = version.version
|
||||
url = version.url
|
||||
|
|
|
@ -45,19 +45,25 @@ if sys.version_info[:2] >= (3, 10):
|
|||
class RichResult:
|
||||
version: str
|
||||
url: Optional[str] = None
|
||||
|
||||
def __str__(self):
|
||||
return self.version
|
||||
else:
|
||||
@dataclass
|
||||
class RichResult:
|
||||
version: str
|
||||
url: Optional[str] = None
|
||||
|
||||
VersionResult = Union[None, str, List[str], RichResult, Exception]
|
||||
def __str__(self):
|
||||
return self.version
|
||||
|
||||
VersionResult = Union[None, str, RichResult, List[Union[str, RichResult]], Exception]
|
||||
VersionResult.__doc__ = '''The result of a `get_version` check.
|
||||
|
||||
* `None` - No version found.
|
||||
* `str` - A single version string is found.
|
||||
* `List[str]` - Multiple version strings are found. :ref:`list options` will be applied.
|
||||
* `RichResult` - A version string with additional information.
|
||||
* `List[Union[str, RichResult]]` - Multiple version strings with or without additional information are found. :ref:`list options` will be applied.
|
||||
* `Exception` - An error occurred.
|
||||
'''
|
||||
|
||||
|
|
|
@ -1,10 +1,15 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2017-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
URL = 'https://release-monitoring.org/api/project/{pkg}'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
pkg = conf.get('anitya')
|
||||
url = URL.format(pkg = pkg)
|
||||
data = await cache.get_json(url)
|
||||
return data['version']
|
||||
return RichResult(
|
||||
version = data['version'],
|
||||
url = f'https://release-monitoring.org/project/{data["id"]}/',
|
||||
)
|
||||
|
|
|
@ -11,8 +11,8 @@ import functools
|
|||
from collections import defaultdict
|
||||
|
||||
from nvchecker.api import (
|
||||
session, GetVersionError,
|
||||
VersionResult, Entry, AsyncCache, KeyManager,
|
||||
session, GetVersionError, VersionResult,
|
||||
RichResult, Entry, AsyncCache, KeyManager,
|
||||
)
|
||||
|
||||
APT_RELEASE_URL = "%s/dists/%s/Release"
|
||||
|
@ -92,12 +92,13 @@ async def get_url(url: str) -> str:
|
|||
None, _decompress_data,
|
||||
url, data)
|
||||
|
||||
async def parse_packages(key: Tuple[AsyncCache, str]) -> Tuple[Dict[str, str], Dict[str, str]]:
|
||||
async def parse_packages(key: Tuple[AsyncCache, str]) -> Tuple[Dict[str, str], Dict[str, str], Dict[str, str]]:
|
||||
cache, url = key
|
||||
apt_packages = await cache.get(url, get_url) # type: ignore
|
||||
|
||||
pkg_map = defaultdict(list)
|
||||
srcpkg_map = defaultdict(list)
|
||||
pkg_to_src_map = defaultdict(list)
|
||||
|
||||
pkg = None
|
||||
srcpkg = None
|
||||
|
@ -110,6 +111,7 @@ async def parse_packages(key: Tuple[AsyncCache, str]) -> Tuple[Dict[str, str], D
|
|||
version = line[9:]
|
||||
if pkg is not None:
|
||||
pkg_map[pkg].append(version)
|
||||
pkg_to_src_map["%s/%s" % (pkg, version)] = srcpkg if srcpkg is not None else pkg
|
||||
if srcpkg is not None:
|
||||
srcpkg_map[srcpkg].append(version)
|
||||
pkg = srcpkg = None
|
||||
|
@ -118,8 +120,10 @@ async def parse_packages(key: Tuple[AsyncCache, str]) -> Tuple[Dict[str, str], D
|
|||
for pkg, vs in pkg_map.items()}
|
||||
srcpkg_map_max = {pkg: max(vs, key=functools.cmp_to_key(compare_version))
|
||||
for pkg, vs in srcpkg_map.items()}
|
||||
pkg_to_src_map_max = {pkg: pkg_to_src_map["%s/%s" % (pkg, vs)]
|
||||
for pkg, vs in pkg_map_max.items()}
|
||||
|
||||
return pkg_map_max, srcpkg_map_max
|
||||
return pkg_map_max, srcpkg_map_max, pkg_to_src_map_max
|
||||
|
||||
async def get_version(
|
||||
name: str, conf: Entry, *,
|
||||
|
@ -148,16 +152,38 @@ async def get_version(
|
|||
else:
|
||||
raise GetVersionError('Packages file not found in APT repository')
|
||||
|
||||
pkg_map, srcpkg_map = await cache.get(
|
||||
pkg_map, srcpkg_map, pkg_to_src_map = await cache.get(
|
||||
(cache, APT_PACKAGES_URL % (mirror, suite, packages_path)), parse_packages) # type: ignore
|
||||
|
||||
if pkg and pkg in pkg_map:
|
||||
version = pkg_map[pkg]
|
||||
changelog_name = pkg_to_src_map[pkg]
|
||||
elif srcpkg and srcpkg in srcpkg_map:
|
||||
version = srcpkg_map[srcpkg]
|
||||
changelog_name = srcpkg
|
||||
else:
|
||||
raise GetVersionError('package not found in APT repository')
|
||||
|
||||
# Get Changelogs field from the Release file
|
||||
changelogs_url = None
|
||||
for line in apt_release.split('\n'):
|
||||
if line.startswith('Changelogs: '):
|
||||
changelogs_url = line[12:]
|
||||
break
|
||||
|
||||
# Build the changelog URL (see https://wiki.debian.org/DebianRepository/Format#Changelogs for spec)
|
||||
changelog = None
|
||||
if changelogs_url is not None and changelogs_url != 'no':
|
||||
changelog_section = changelog_name[:4] if changelog_name.startswith('lib') else changelog_name[:1]
|
||||
changelog = changelogs_url.replace('@CHANGEPATH@', f'{repo}/{changelog_section}/{changelog_name}/{changelog_name}_{version}')
|
||||
|
||||
if strip_release:
|
||||
version = version.split("-")[0]
|
||||
return version
|
||||
|
||||
if changelog is not None:
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = changelog,
|
||||
)
|
||||
else:
|
||||
return version
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import session, GetVersionError
|
||||
from nvchecker.api import session, RichResult, GetVersionError
|
||||
|
||||
URL = 'https://www.archlinux.org/packages/search/json/'
|
||||
|
||||
|
@ -31,4 +31,7 @@ async def get_version(name, conf, *, cache, **kwargs):
|
|||
else:
|
||||
version = r['pkgver'] + '-' + r['pkgrel']
|
||||
|
||||
return version
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://archlinux.org/packages/{r["repo"]}/{r["arch"]}/{r["pkgname"]}/',
|
||||
)
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from typing import Any, List
|
||||
from typing import Any, List, Union
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from nvchecker.api import VersionResult, Entry, AsyncCache
|
||||
from nvchecker.api import VersionResult, RichResult, Entry, AsyncCache
|
||||
|
||||
# doc: https://developer.atlassian.com/cloud/bitbucket/rest/api-group-commits/#api-repositories-workspace-repo-slug-commits-get
|
||||
BITBUCKET_URL = 'https://bitbucket.org/api/2.0/repositories/%s/commits/%s'
|
||||
|
@ -22,7 +22,7 @@ async def get_version(
|
|||
use_sorted_tags = conf.get('use_sorted_tags', False)
|
||||
|
||||
if use_sorted_tags or use_max_tag:
|
||||
parameters = {'fields': 'values.name,next'}
|
||||
parameters = {'fields': 'values.name,values.links.html.href,next'}
|
||||
|
||||
if use_sorted_tags:
|
||||
parameters['sort'] = conf.get('sort', '-target.date')
|
||||
|
@ -33,37 +33,41 @@ async def get_version(
|
|||
url = BITBUCKET_MAX_TAG % repo
|
||||
url += '?' + urlencode(parameters)
|
||||
|
||||
version = await _get_tags(url, max_page=1, cache=cache)
|
||||
return await _get_tags(url, max_page=1, cache=cache)
|
||||
|
||||
elif use_max_tag:
|
||||
url = BITBUCKET_MAX_TAG % repo
|
||||
url += '?' + urlencode(parameters)
|
||||
|
||||
max_page = conf.get('max_page', 3)
|
||||
version = await _get_tags(url, max_page=max_page, cache=cache)
|
||||
return await _get_tags(url, max_page=max_page, cache=cache)
|
||||
|
||||
else:
|
||||
url = BITBUCKET_URL % (repo, br)
|
||||
data = await cache.get_json(url)
|
||||
|
||||
version = data['values'][0]['date'].split('T', 1)[0].replace('-', '')
|
||||
|
||||
return version
|
||||
return RichResult(
|
||||
version = data['values'][0]['date'].split('T', 1)[0].replace('-', ''),
|
||||
url = data['values'][0]['links']['html']['href'],
|
||||
)
|
||||
|
||||
async def _get_tags(
|
||||
url: str, *,
|
||||
max_page: int,
|
||||
cache: AsyncCache,
|
||||
) -> List[str]:
|
||||
ret: List[str] = []
|
||||
) -> VersionResult:
|
||||
ret: List[Union[str, RichResult]] = []
|
||||
|
||||
for _ in range(max_page):
|
||||
data = await cache.get_json(url)
|
||||
ret.extend(x['name'] for x in data['values'])
|
||||
ret.extend([
|
||||
RichResult(
|
||||
version = tag['name'],
|
||||
url = tag['links']['html']['href'],
|
||||
) for tag in data['values']
|
||||
])
|
||||
if 'next' in data:
|
||||
url = data['next']
|
||||
else:
|
||||
break
|
||||
|
||||
return ret
|
||||
|
||||
|
|
|
@ -1,11 +1,15 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
# Using metacpan
|
||||
CPAN_URL = 'https://fastapi.metacpan.org/release/%s'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
key = conf.get('cpan', name)
|
||||
data = await cache.get_json(CPAN_URL % key)
|
||||
return str(data['version'])
|
||||
|
||||
return RichResult(
|
||||
version = str(data['version']),
|
||||
url = f'https://metacpan.org/release/{data["author"]}/{data["name"]}',
|
||||
)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2022 Pekka Ristola <pekkarr [at] protonmail [dot] com>, et al.
|
||||
|
||||
from nvchecker.api import session, GetVersionError
|
||||
from nvchecker.api import session, RichResult, GetVersionError
|
||||
|
||||
CRAN_URL = 'https://cran.r-project.org/package=%s/DESCRIPTION'
|
||||
VERSION_FIELD = 'Version: '
|
||||
|
@ -23,4 +23,7 @@ async def get_version(name, conf, *, cache, **kwargs):
|
|||
else:
|
||||
raise GetVersionError('Invalid DESCRIPTION file')
|
||||
|
||||
return version
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://cran.r-project.org/web/packages/{package}/',
|
||||
)
|
||||
|
|
|
@ -1,10 +1,15 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
API_URL = 'https://crates.io/api/v1/crates/%s'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
name = conf.get('cratesio') or name
|
||||
data = await cache.get_json(API_URL % name)
|
||||
version = [v['num'] for v in data['versions'] if not v['yanked']][0]
|
||||
return version
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://crates.io/crates/{name}/{version}',
|
||||
)
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
# Copyright (c) 2017 Felix Yan <felixonmars@archlinux.org>, et al.
|
||||
|
||||
from nvchecker.api import GetVersionError
|
||||
from nvchecker.api import RichResult, GetVersionError
|
||||
|
||||
URL = 'https://sources.debian.org/api/src/%(pkgname)s/?suite=%(suite)s'
|
||||
|
||||
|
@ -22,4 +22,7 @@ async def get_version(name, conf, *, cache, **kwargs):
|
|||
else:
|
||||
version = r['version']
|
||||
|
||||
return version
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://sources.debian.org/src/{data["package"]}/{r["version"]}/',
|
||||
)
|
||||
|
|
|
@ -1,9 +1,16 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
GEMS_URL = 'https://rubygems.org/api/v1/versions/%s.json'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
key = conf.get('gems', name)
|
||||
data = await cache.get_json(GEMS_URL % key)
|
||||
return [item['number'] for item in data]
|
||||
return [
|
||||
RichResult(
|
||||
version = item['number'],
|
||||
url = f'https://rubygems.org/gems/{key}/versions/{item["number"]}',
|
||||
) for item in data
|
||||
]
|
||||
|
|
|
@ -9,7 +9,8 @@ GITEA_URL = 'https://%s/api/v1/repos/%s/commits'
|
|||
GITEA_MAX_TAG = 'https://%s/api/v1/repos/%s/tags'
|
||||
|
||||
from nvchecker.api import (
|
||||
VersionResult, Entry, AsyncCache, KeyManager,
|
||||
VersionResult, RichResult, Entry,
|
||||
AsyncCache, KeyManager,
|
||||
)
|
||||
|
||||
async def get_version(
|
||||
|
@ -42,7 +43,14 @@ async def get_version(
|
|||
|
||||
data = await cache.get_json(url, headers = headers)
|
||||
if use_max_tag:
|
||||
version = [tag["name"] for tag in data]
|
||||
return [
|
||||
RichResult(
|
||||
version = tag['name'],
|
||||
url = f'https://{host}/{conf["gitea"]}/releases/tag/{tag["name"]}',
|
||||
) for tag in data
|
||||
]
|
||||
else:
|
||||
version = data[0]['commit']['committer']['date'].split('T', 1)[0].replace('-', '')
|
||||
return version
|
||||
return RichResult(
|
||||
version = data[0]['commit']['committer']['date'].split('T', 1)[0].replace('-', ''),
|
||||
url = data[0]['html_url'],
|
||||
)
|
||||
|
|
|
@ -3,13 +3,13 @@
|
|||
|
||||
import time
|
||||
from urllib.parse import urlencode
|
||||
from typing import Tuple
|
||||
from typing import List, Tuple, Union
|
||||
|
||||
import structlog
|
||||
|
||||
from nvchecker.api import (
|
||||
VersionResult, Entry, AsyncCache, KeyManager,
|
||||
TemporaryError, session, GetVersionError,
|
||||
TemporaryError, session, RichResult, GetVersionError,
|
||||
)
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
@ -49,6 +49,7 @@ QUERY_LATEST_RELEASE_WITH_PRERELEASES = '''
|
|||
edges {{
|
||||
node {{
|
||||
name
|
||||
url
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
|
@ -56,7 +57,7 @@ QUERY_LATEST_RELEASE_WITH_PRERELEASES = '''
|
|||
}}
|
||||
'''
|
||||
|
||||
async def get_latest_tag(key: Tuple[str, str, str]) -> str:
|
||||
async def get_latest_tag(key: Tuple[str, str, str]) -> RichResult:
|
||||
repo, query, token = key
|
||||
owner, reponame = repo.split('/')
|
||||
headers = {
|
||||
|
@ -80,9 +81,13 @@ async def get_latest_tag(key: Tuple[str, str, str]) -> str:
|
|||
if not refs:
|
||||
raise GetVersionError('no tag found')
|
||||
|
||||
return refs[0]['node']['name']
|
||||
version = refs[0]['node']['name']
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://github.com/{repo}/releases/tag/{version}',
|
||||
)
|
||||
|
||||
async def get_latest_release_with_prereleases(key: Tuple[str, str]) -> str:
|
||||
async def get_latest_release_with_prereleases(key: Tuple[str, str]) -> RichResult:
|
||||
repo, token = key
|
||||
owner, reponame = repo.split('/')
|
||||
headers = {
|
||||
|
@ -105,7 +110,10 @@ async def get_latest_release_with_prereleases(key: Tuple[str, str]) -> str:
|
|||
if not refs:
|
||||
raise GetVersionError('no release found')
|
||||
|
||||
return refs[0]['node']['name']
|
||||
return RichResult(
|
||||
version = refs[0]['node']['name'],
|
||||
url = refs[0]['node']['url'],
|
||||
)
|
||||
|
||||
async def get_version_real(
|
||||
name: str, conf: Entry, *,
|
||||
|
@ -160,7 +168,12 @@ async def get_version_real(
|
|||
data = await cache.get_json(url, headers = headers)
|
||||
|
||||
if use_max_tag:
|
||||
tags = [ref['ref'].split('/', 2)[-1] for ref in data]
|
||||
tags: List[Union[str, RichResult]] = [
|
||||
RichResult(
|
||||
version = ref['ref'].split('/', 2)[-1],
|
||||
url = f'https://github.com/{repo}/releases/tag/{ref["ref"].split("/", 2)[-1]}',
|
||||
) for ref in data
|
||||
]
|
||||
if not tags:
|
||||
raise GetVersionError('No tag found in upstream repository.')
|
||||
return tags
|
||||
|
@ -168,14 +181,17 @@ async def get_version_real(
|
|||
if use_latest_release:
|
||||
if 'tag_name' not in data:
|
||||
raise GetVersionError('No release found in upstream repository.')
|
||||
version = data['tag_name']
|
||||
return RichResult(
|
||||
version = data['tag_name'],
|
||||
url = data['html_url'],
|
||||
)
|
||||
|
||||
else:
|
||||
# YYYYMMDD.HHMMSS
|
||||
version = data[0]['commit']['committer']['date'] \
|
||||
.rstrip('Z').replace('-', '').replace(':', '').replace('T', '.')
|
||||
|
||||
return version
|
||||
return RichResult(
|
||||
# YYYYMMDD.HHMMSS
|
||||
version = data[0]['commit']['committer']['date'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.'),
|
||||
url = data[0]['html_url'],
|
||||
)
|
||||
|
||||
def check_ratelimit(exc, name):
|
||||
res = exc.response
|
||||
|
|
|
@ -6,8 +6,8 @@ import urllib.parse
|
|||
import structlog
|
||||
|
||||
from nvchecker.api import (
|
||||
VersionResult, Entry, AsyncCache, KeyManager,
|
||||
TemporaryError,
|
||||
VersionResult, RichResult, Entry,
|
||||
AsyncCache, KeyManager, TemporaryError,
|
||||
)
|
||||
|
||||
GITLAB_URL = 'https://%s/api/v4/projects/%s/repository/commits'
|
||||
|
@ -52,10 +52,17 @@ async def get_version_real(
|
|||
|
||||
data = await cache.get_json(url, headers = headers)
|
||||
if use_max_tag:
|
||||
version = [tag["name"] for tag in data]
|
||||
return [
|
||||
RichResult(
|
||||
version = tag['name'],
|
||||
url = f'https://{host}/{conf["gitlab"]}/-/tags/{tag["name"]}',
|
||||
) for tag in data
|
||||
]
|
||||
else:
|
||||
version = data[0]['created_at'].split('T', 1)[0].replace('-', '')
|
||||
return version
|
||||
return RichResult(
|
||||
version = data[0]['created_at'].split('T', 1)[0].replace('-', ''),
|
||||
url = data[0]['web_url'],
|
||||
)
|
||||
|
||||
def check_ratelimit(exc, name):
|
||||
res = exc.response
|
||||
|
|
|
@ -1,10 +1,15 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
HACKAGE_URL = 'https://hackage.haskell.org/package/%s/preferred.json'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
key = conf.get('hackage', name)
|
||||
data = await cache.get_json(HACKAGE_URL % key)
|
||||
return data['normal-version'][0]
|
||||
|
||||
version = data['normal-version'][0]
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://hackage.haskell.org/package/{key}-{version}',
|
||||
)
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
import json
|
||||
import re
|
||||
from nvchecker.api import session
|
||||
from nvchecker.api import session, RichResult
|
||||
|
||||
NPM_URL = 'https://registry.npmjs.org/%s'
|
||||
|
||||
|
@ -26,4 +26,13 @@ async def get_version(name, conf, *, cache, **kwargs):
|
|||
data = await cache.get(NPM_URL % key, get_first_1k)
|
||||
|
||||
dist_tags = json.loads(re.search(b'"dist-tags":({.*?})', data).group(1))
|
||||
return dist_tags['latest']
|
||||
version = dist_tags['latest']
|
||||
|
||||
# There is no standardised URL scheme, so we only return an URL for the default registry
|
||||
if NPM_URL.startswith('https://registry.npmjs.org/'):
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://www.npmjs.com/package/{key}/v/{version}',
|
||||
)
|
||||
else:
|
||||
return version
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2021 Th3Whit3Wolf <the.white.wolf.is.1337@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
API_URL = 'https://open-vsx.org/api/%s/%s'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
|
@ -10,4 +12,7 @@ async def get_version(name, conf, *, cache, **kwargs):
|
|||
extension = splitName[1]
|
||||
data = await cache.get_json(API_URL % (publisher, extension))
|
||||
version = data['version']
|
||||
return version
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://open-vsx.org/extension/{publisher}/{extension}/{version}',
|
||||
)
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
PACKAGIST_URL = 'https://packagist.org/packages/%s.json'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
|
@ -14,4 +16,8 @@ async def get_version(name, conf, *, cache, **kwargs):
|
|||
}
|
||||
|
||||
if len(versions):
|
||||
return max(versions, key=lambda version: versions[version]["time"])
|
||||
version = max(versions, key=lambda version: versions[version]["time"])
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://packagist.org/packages/{data["package"]["name"]}#{version}',
|
||||
)
|
||||
|
|
|
@ -6,10 +6,10 @@ import urllib.parse
|
|||
import structlog
|
||||
|
||||
from nvchecker.api import (
|
||||
VersionResult, Entry, AsyncCache, KeyManager,
|
||||
VersionResult, RichResult, Entry, AsyncCache, KeyManager,
|
||||
)
|
||||
|
||||
PAGURE_URL = 'https://%s/api/0/%s/git/tags'
|
||||
PAGURE_URL = 'https://%s/api/0/%s/git/tags?with_commits=true'
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
|
@ -24,5 +24,9 @@ async def get_version(
|
|||
url = PAGURE_URL % (host, repo)
|
||||
|
||||
data = await cache.get_json(url)
|
||||
version = data["tags"]
|
||||
return version
|
||||
return [
|
||||
RichResult(
|
||||
version = version,
|
||||
url = f'https://{host}/{repo}/tree/{version_hash}',
|
||||
) for version, version_hash in data["tags"].items()
|
||||
]
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2019 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import GetVersionError
|
||||
from nvchecker.api import RichResult, GetVersionError
|
||||
|
||||
API_URL = 'https://repology.org/api/v1/project/{}'
|
||||
|
||||
|
@ -25,5 +25,9 @@ async def get_version(name, conf, *, cache, **kwargs):
|
|||
raise GetVersionError('package is not found in subrepo',
|
||||
repo=repo, subrepo=subrepo)
|
||||
|
||||
versions = [pkg['version'] for pkg in pkgs]
|
||||
return versions
|
||||
return [
|
||||
RichResult(
|
||||
version = pkg['version'],
|
||||
url = f'https://repology.org/project/{project}/packages',
|
||||
) for pkg in pkgs
|
||||
]
|
||||
|
|
|
@ -4,23 +4,25 @@
|
|||
|
||||
from xml.etree import ElementTree
|
||||
|
||||
from nvchecker.api import session
|
||||
|
||||
NAMESPACE = 'http://www.andymatuschak.org/xml-namespaces/sparkle'
|
||||
from nvchecker.api import session, RichResult
|
||||
|
||||
XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace'
|
||||
SPARKLE_NAMESPACE = 'http://www.andymatuschak.org/xml-namespaces/sparkle'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
sparkle = conf['sparkle']
|
||||
return await cache.get(sparkle, get_version_impl)
|
||||
release_notes_language = conf.get('release_notes_language', 'en')
|
||||
return await cache.get((sparkle, release_notes_language), get_version_impl)
|
||||
|
||||
|
||||
async def get_version_impl(sparkle):
|
||||
async def get_version_impl(info):
|
||||
sparkle, release_notes_language = info
|
||||
res = await session.get(sparkle)
|
||||
root = ElementTree.fromstring(res.body)
|
||||
item = root.find('./channel/item[1]/enclosure')
|
||||
root = ElementTree.fromstring(res.body).find('./channel/item[1]')
|
||||
item = root.find('./enclosure')
|
||||
|
||||
version_string = item.get(f'{{{NAMESPACE}}}shortVersionString')
|
||||
build_number = item.get(f'{{{NAMESPACE}}}version')
|
||||
version_string = item.get(f'{{{SPARKLE_NAMESPACE}}}shortVersionString')
|
||||
build_number = item.get(f'{{{SPARKLE_NAMESPACE}}}version')
|
||||
|
||||
if (version_string and version_string.isdigit()) and (
|
||||
build_number and not build_number.isdigit()
|
||||
|
@ -34,4 +36,25 @@ async def get_version_impl(sparkle):
|
|||
if build_number and (build_number not in version):
|
||||
version.append(build_number)
|
||||
|
||||
return '-'.join(version) if version else None
|
||||
version_str = '-'.join(version) if version else None
|
||||
|
||||
release_notes_link = None
|
||||
for release_notes in root.findall(f'./{{{SPARKLE_NAMESPACE}}}releaseNotesLink'):
|
||||
language = release_notes.get(f'{{{XML_NAMESPACE}}}lang')
|
||||
|
||||
# If the release notes have no language set, store them, but keep looking for our preferred language
|
||||
if language is None:
|
||||
release_notes_link = release_notes.text.strip()
|
||||
|
||||
# If the release notes match our preferred language, store them and stop looking
|
||||
if language == release_notes_language:
|
||||
release_notes_link = release_notes.text.strip()
|
||||
break
|
||||
|
||||
if release_notes_link is not None:
|
||||
return RichResult(
|
||||
version = version_str,
|
||||
url = release_notes_link,
|
||||
)
|
||||
else:
|
||||
return version_str
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
# Copyright (c) 2017 Felix Yan <felixonmars@archlinux.org>, et al.
|
||||
|
||||
from nvchecker.api import GetVersionError
|
||||
from nvchecker.api import RichResult, GetVersionError
|
||||
|
||||
URL = 'https://api.launchpad.net/1.0/ubuntu/+archive/primary?ws.op=getPublishedSources&source_name=%s&exact_match=true'
|
||||
|
||||
|
@ -42,4 +42,7 @@ async def get_version(name, conf, *, cache, **kwargs):
|
|||
else:
|
||||
version = releases[0]['source_package_version']
|
||||
|
||||
return version
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://packages.ubuntu.com/{releases[0]["distro_series_link"].rsplit("/", 1)[-1]}/{pkg}',
|
||||
)
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
from nvchecker.api import (
|
||||
VersionResult, Entry, AsyncCache, KeyManager,
|
||||
TemporaryError, session, GetVersionError,
|
||||
TemporaryError, session, RichResult, GetVersionError,
|
||||
)
|
||||
|
||||
API_URL = 'https://marketplace.visualstudio.com/_apis/public/gallery/extensionquery'
|
||||
|
@ -51,4 +51,7 @@ async def get_version(name: str, conf: Entry, *, cache: AsyncCache, **kwargs):
|
|||
j = res.json()
|
||||
|
||||
version = j['results'][0]['extensions'][0]['versions'][0]['version']
|
||||
return version
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://marketplace.visualstudio.com/items?itemName={name}',
|
||||
)
|
||||
|
|
|
@ -13,7 +13,7 @@ pytestmark = [pytest.mark.asyncio,
|
|||
async def test_pacman(get_version):
|
||||
assert await get_version("base", {
|
||||
"source": "pacman",
|
||||
}) == "3-1"
|
||||
}) == "3-2"
|
||||
|
||||
async def test_pacman_strip_release(get_version):
|
||||
assert await get_version("base", {
|
||||
|
|
Loading…
Add table
Reference in a new issue