mirror of
https://github.com/lilydjwg/nvchecker.git
synced 2025-03-10 06:14:02 +00:00
port more sources, add cache.get_json
This commit is contained in:
parent
961c1315ef
commit
72d1d27f89
22 changed files with 195 additions and 255 deletions
4
NEW
4
NEW
|
@ -1,4 +1,6 @@
|
|||
TODO:
|
||||
* pass `tries` to `httpclient`
|
||||
* use contextvars for `tries` and `proxy` (passing to `httpclient`)
|
||||
* update tests
|
||||
* update README
|
||||
* create source plugin documentation
|
||||
* move things to a seperate `api.py`
|
||||
|
|
|
@ -1,19 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import structlog
|
||||
|
||||
from . import session
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
URL = 'https://release-monitoring.org/api/project/{pkg}'
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
pkg = conf.get('anitya')
|
||||
url = URL.format(pkg = pkg)
|
||||
|
||||
async with session.get(url) as res:
|
||||
data = await res.json()
|
||||
|
||||
return data['version']
|
|
@ -1,15 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2018 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from . import session, conf_cacheable_with_name
|
||||
|
||||
API_URL = 'https://crates.io/api/v1/crates/%s'
|
||||
|
||||
get_cacheable_conf = conf_cacheable_with_name('cratesio')
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
name = conf.get('cratesio') or name
|
||||
async with session.get(API_URL % name) as res:
|
||||
data = await res.json()
|
||||
version = [v['num'] for v in data['versions'] if not v['yanked']][0]
|
||||
return version
|
|
@ -1,26 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from pkg_resources import parse_version
|
||||
|
||||
from . import conf_cacheable_with_name, session
|
||||
|
||||
get_cacheable_conf = conf_cacheable_with_name('pypi')
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
package = conf.get('pypi') or name
|
||||
use_pre_release = conf.getboolean('use_pre_release', False)
|
||||
|
||||
url = 'https://pypi.org/pypi/{}/json'.format(package)
|
||||
|
||||
async with session.get(url) as res:
|
||||
data = await res.json()
|
||||
|
||||
if use_pre_release:
|
||||
version = sorted(
|
||||
data['releases'].keys(),
|
||||
key = parse_version,
|
||||
)[-1]
|
||||
else:
|
||||
version = data['info']['version']
|
||||
return version
|
|
@ -1,33 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2019 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import structlog
|
||||
import functools
|
||||
|
||||
from . import session
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
API_URL = 'https://repology.org/api/v1/project/{}'
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
project = conf.get('repology') or name
|
||||
repo = conf.get('repo')
|
||||
if not repo:
|
||||
logger.error('repo field is required for repology source', name = name)
|
||||
|
||||
|
||||
url = API_URL.format(project)
|
||||
data = await _request(url)
|
||||
|
||||
versions = [pkg['version'] for pkg in data if pkg['repo'] == repo]
|
||||
if not versions:
|
||||
logger.error('package is not found', name=name, repo=repo)
|
||||
return
|
||||
|
||||
return versions[0]
|
||||
|
||||
@functools.lru_cache()
|
||||
async def _request(url):
|
||||
async with session.get(url) as res:
|
||||
return await res.json()
|
|
@ -1,22 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from . import session, conf_cacheable_with_name
|
||||
|
||||
def simple_json(urlpat, confkey, version_from_json):
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
repo = conf.get(confkey) or name
|
||||
url = urlpat % repo
|
||||
kwargs = {}
|
||||
if conf.get('proxy'):
|
||||
kwargs["proxy"] = conf.get('proxy')
|
||||
|
||||
async with session.get(url, **kwargs) as res:
|
||||
data = await res.json(content_type=None)
|
||||
version = version_from_json(data)
|
||||
return version
|
||||
|
||||
get_cacheable_conf = conf_cacheable_with_name(confkey)
|
||||
|
||||
return get_version, get_cacheable_conf
|
|
@ -1,63 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2018 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import asyncio
|
||||
import os.path as _path
|
||||
|
||||
from pkg_resources import parse_version
|
||||
import structlog
|
||||
|
||||
from . import conf_cacheable_with_name
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
_self_path = _path.dirname(_path.abspath(__file__))
|
||||
_cmd_prefix = ['/bin/bash', _path.join(_self_path, 'vcs.sh')]
|
||||
|
||||
PROT_VER = 1
|
||||
|
||||
get_cacheable_conf = conf_cacheable_with_name('vcs')
|
||||
|
||||
def _parse_oldver(oldver):
|
||||
if oldver is None:
|
||||
return PROT_VER, 0, ''
|
||||
try:
|
||||
prot_ver, count, ver = oldver.split('.', maxsplit=2)
|
||||
prot_ver = int(prot_ver)
|
||||
count = int(count)
|
||||
except:
|
||||
return PROT_VER, 0, ''
|
||||
if prot_ver != PROT_VER:
|
||||
return PROT_VER, 0, ver
|
||||
return PROT_VER, count, ver
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
vcs = conf['vcs'] or ''
|
||||
use_max_tag = conf.getboolean('use_max_tag', False)
|
||||
ignored_tags = conf.get("ignored_tags", "").split()
|
||||
oldver = conf.get('oldver')
|
||||
cmd = _cmd_prefix + [name, vcs]
|
||||
if use_max_tag:
|
||||
cmd += ["get_tags"]
|
||||
p = await asyncio.create_subprocess_exec(
|
||||
*cmd,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE,
|
||||
)
|
||||
|
||||
output, error = await asyncio.wait_for(p.communicate(), 20)
|
||||
output = output.strip().decode('latin1')
|
||||
error = error.strip().decode('latin1')
|
||||
|
||||
if p.returncode != 0:
|
||||
logger.error('command exited with error', output=output,
|
||||
name=name, returncode=p.returncode, error=error)
|
||||
return
|
||||
else:
|
||||
if use_max_tag:
|
||||
return [tag for tag in output.split("\n") if tag not in ignored_tags]
|
||||
else:
|
||||
oldvers = _parse_oldver(oldver)
|
||||
if output == oldvers[2]:
|
||||
return oldver
|
||||
else:
|
||||
return "%d.%d.%s" % (oldvers[0], oldvers[1] + 1, output)
|
|
@ -16,6 +16,8 @@ from pathlib import Path
|
|||
import toml
|
||||
import structlog
|
||||
|
||||
from .httpclient import session
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
Entry = Dict[str, Any]
|
||||
|
@ -83,6 +85,15 @@ class AsyncCache(Generic[T, S]):
|
|||
self.cache = {}
|
||||
self.lock = asyncio.Lock()
|
||||
|
||||
async def _get_json(self, key: Tuple[str, str]) -> Any:
|
||||
url = key[1]
|
||||
async with session.get(url) as res:
|
||||
return await res.json(content_type=None)
|
||||
|
||||
async def get_json(self, url: str) -> Any:
|
||||
return await self.get(
|
||||
('_jsonurl', url), self._get_json)
|
||||
|
||||
async def get(
|
||||
self,
|
||||
key: T,
|
||||
|
|
|
@ -1,44 +1,32 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
# Copyright (c) 2017 Yen Chi Hsuan <yan12125 at gmail dot com>
|
||||
|
||||
from asyncio.locks import Lock
|
||||
import os
|
||||
import re
|
||||
from xml.etree import ElementTree
|
||||
|
||||
from . import session
|
||||
from nvchecker.httpclient import session
|
||||
|
||||
_ANDROID_REPO_MANIFESTS = {
|
||||
'addon': 'https://dl.google.com/android/repository/addon2-1.xml',
|
||||
'package': 'https://dl.google.com/android/repository/repository2-1.xml',
|
||||
}
|
||||
|
||||
_repo_manifests_cache = {}
|
||||
_repo_manifests_locks = {}
|
||||
|
||||
for repo in _ANDROID_REPO_MANIFESTS.keys():
|
||||
_repo_manifests_locks[repo] = Lock()
|
||||
|
||||
async def _get_repo_manifest(repo):
|
||||
async with _repo_manifests_locks[repo]:
|
||||
if repo in _repo_manifests_cache:
|
||||
return _repo_manifests_cache[repo]
|
||||
repo_xml_url = _ANDROID_REPO_MANIFESTS[repo]
|
||||
|
||||
repo_xml_url = _ANDROID_REPO_MANIFESTS[repo]
|
||||
async with session.get(repo_xml_url) as res:
|
||||
data = (await res.read()).decode('utf-8')
|
||||
|
||||
async with session.get(repo_xml_url) as res:
|
||||
data = (await res.read()).decode('utf-8')
|
||||
repo_manifest = ElementTree.fromstring(data)
|
||||
return repo_manifest
|
||||
|
||||
repo_manifest = ElementTree.fromstring(data)
|
||||
_repo_manifests_cache[repo] = repo_manifest
|
||||
|
||||
return repo_manifest
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
repo = conf['repo']
|
||||
pkg_path_prefix = conf['android_sdk']
|
||||
|
||||
repo_manifest = await _get_repo_manifest(repo)
|
||||
repo_manifest = await cache.get(repo, _get_repo_manifest)
|
||||
|
||||
for pkg in repo_manifest.findall('.//remotePackage'):
|
||||
if not pkg.attrib['path'].startswith(pkg_path_prefix):
|
10
nvchecker_source/anitya.py
Normal file
10
nvchecker_source/anitya.py
Normal file
|
@ -0,0 +1,10 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2017-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
URL = 'https://release-monitoring.org/api/project/{pkg}'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
pkg = conf.get('anitya')
|
||||
url = URL.format(pkg = pkg)
|
||||
data = await cache.get_json(url)
|
||||
return data['version']
|
|
@ -1,14 +1,13 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2019 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from . import session
|
||||
from ..sortversion import sort_version_keys
|
||||
from nvchecker.sortversion import sort_version_keys
|
||||
|
||||
# doc: https://confluence.atlassian.com/display/BITBUCKET/commits+or+commit+Resource
|
||||
BITBUCKET_URL = 'https://bitbucket.org/api/2.0/repositories/%s/commits/%s'
|
||||
BITBUCKET_MAX_TAG = 'https://bitbucket.org/api/2.0/repositories/%s/refs/tags'
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
repo = conf.get('bitbucket')
|
||||
br = conf.get('branch', '')
|
||||
use_max_tag = conf.getboolean('use_max_tag', False)
|
||||
|
@ -18,12 +17,11 @@ async def get_version(name, conf, **kwargs):
|
|||
if use_max_tag:
|
||||
url = BITBUCKET_MAX_TAG % repo
|
||||
max_page = conf.getint('max_page', 3)
|
||||
data = await _get_tags(url, max_page=max_page)
|
||||
data = await _get_tags(url, max_page=max_page, cache=cache)
|
||||
|
||||
else:
|
||||
url = BITBUCKET_URL % (repo, br)
|
||||
async with session.get(url) as res:
|
||||
data = await res.json()
|
||||
data = await cache.get_json(url)
|
||||
|
||||
if use_max_tag:
|
||||
data = [tag for tag in data if tag not in ignored_tags]
|
||||
|
@ -33,12 +31,11 @@ async def get_version(name, conf, **kwargs):
|
|||
version = data['values'][0]['date'].split('T', 1)[0].replace('-', '')
|
||||
return version
|
||||
|
||||
async def _get_tags(url, *, max_page):
|
||||
async def _get_tags(url, *, max_page, cache):
|
||||
ret = []
|
||||
|
||||
for _ in range(max_page):
|
||||
async with session.get(url) as res:
|
||||
data = await res.json()
|
||||
data = await cache.get_json(url)
|
||||
ret.extend(x['name'] for x in data['values'])
|
||||
if 'next' in data:
|
||||
url = data['next']
|
10
nvchecker_source/cratesio.py
Normal file
10
nvchecker_source/cratesio.py
Normal file
|
@ -0,0 +1,10 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
API_URL = 'https://crates.io/api/v1/crates/%s'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
name = conf.get('cratesio') or name
|
||||
data = await cache.get_json(API_URL % name)
|
||||
version = [v['num'] for v in data['versions'] if not v['yanked']][0]
|
||||
return version
|
|
@ -1,27 +1,20 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
# Copyright (c) 2017 Felix Yan <felixonmars@archlinux.org>, et al.
|
||||
|
||||
import structlog
|
||||
|
||||
from . import session, conf_cacheable_with_name
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
from nvchecker.util import GetVersionError
|
||||
|
||||
URL = 'https://sources.debian.org/api/src/%(pkgname)s/?suite=%(suite)s'
|
||||
|
||||
get_cacheable_conf = conf_cacheable_with_name('debianpkg')
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
pkg = conf.get('debianpkg') or name
|
||||
strip_release = conf.getboolean('strip-release', False)
|
||||
strip_release = conf.get('strip_release', False)
|
||||
suite = conf.get('suite') or "sid"
|
||||
url = URL % {"pkgname": pkg, "suite": suite}
|
||||
async with session.get(url) as res:
|
||||
data = await res.json()
|
||||
data = await cache.get_json(url)
|
||||
|
||||
if not data.get('versions'):
|
||||
logger.error('Debian package not found', name=name)
|
||||
return
|
||||
raise GetVersionError('Debian package not found')
|
||||
|
||||
r = data['versions'][0]
|
||||
if strip_release:
|
|
@ -1,5 +1,5 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
return conf.get('manual').strip() or None
|
|
@ -1,17 +1,15 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from . import cmd, conf_cacheable_with_name
|
||||
|
||||
get_cacheable_conf = conf_cacheable_with_name('pacman')
|
||||
from . import cmd
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
referree = conf.get('pacman') or name
|
||||
c = "LANG=C pacman -Si %s | grep -F Version | awk '{print $3}' | head -n 1" % referree
|
||||
conf['cmd'] = c
|
||||
strip_release = conf.getboolean('strip-release', False)
|
||||
strip_release = conf.get('strip_release', False)
|
||||
|
||||
version = await cmd.get_version(name, conf)
|
||||
version = await cmd.get_version(name, conf, **kwargs)
|
||||
|
||||
if strip_release and '-' in version:
|
||||
version = version.rsplit('-', 1)[0]
|
21
nvchecker_source/pypi.py
Normal file
21
nvchecker_source/pypi.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from pkg_resources import parse_version
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
package = conf.get('pypi') or name
|
||||
use_pre_release = conf.get('use_pre_release', False)
|
||||
|
||||
url = 'https://pypi.org/pypi/{}/json'.format(package)
|
||||
|
||||
data = await cache.get_json(url)
|
||||
|
||||
if use_pre_release:
|
||||
version = sorted(
|
||||
data['releases'].keys(),
|
||||
key = parse_version,
|
||||
)[-1]
|
||||
else:
|
||||
version = data['info']['version']
|
||||
return version
|
|
@ -1,21 +1,27 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import re
|
||||
import sre_constants
|
||||
|
||||
import structlog
|
||||
|
||||
from . import session
|
||||
from nvchecker.httpclient import session
|
||||
from nvchecker.util import GetVersionError
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
key = sorted(conf.items())
|
||||
return await cache.get(key, get_version_impl)
|
||||
|
||||
async def get_version_impl(info):
|
||||
conf = dict(info)
|
||||
|
||||
try:
|
||||
regex = re.compile(conf['regex'])
|
||||
except sre_constants.error:
|
||||
logger.warning('bad regex, skipped.', name=name, exc_info=True)
|
||||
return
|
||||
except sre_constants.error as e:
|
||||
raise GetVersionError('bad regex', exc_info=e)
|
||||
|
||||
encoding = conf.get('encoding', 'latin1')
|
||||
|
||||
|
@ -32,6 +38,6 @@ async def get_version(name, conf, **kwargs):
|
|||
version = regex.findall(body)
|
||||
except ValueError:
|
||||
version = None
|
||||
if not conf.getboolean('missing_ok', False):
|
||||
logger.error('version string not found.', name=name)
|
||||
if not conf.get('missing_ok', False):
|
||||
raise GetVersionError('version string not found.')
|
||||
return version
|
21
nvchecker_source/repology.py
Normal file
21
nvchecker_source/repology.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2019 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from nvchecker.util import GetVersionError
|
||||
|
||||
API_URL = 'https://repology.org/api/v1/project/{}'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
project = conf.get('repology') or name
|
||||
repo = conf.get('repo')
|
||||
if not repo:
|
||||
raise GetVersionError('repo field is required for repology source')
|
||||
|
||||
url = API_URL.format(project)
|
||||
data = await cache.get_json(url)
|
||||
|
||||
versions = [pkg['version'] for pkg in data if pkg['repo'] == repo]
|
||||
if not versions:
|
||||
raise GetVersionError('package is not found', repo=repo)
|
||||
|
||||
return versions[0]
|
|
@ -1,14 +1,16 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
# Copyright (c) 2020 Sunlei <guizaicn@gmail.com>
|
||||
|
||||
from xml.etree import ElementTree
|
||||
|
||||
from . import session
|
||||
from nvchecker.httpclient import session
|
||||
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
sparkle = conf['sparkle']
|
||||
return await cache.get(sparkle, get_version_impl)
|
||||
|
||||
async def get_version_impl(sparkle):
|
||||
async with session.get(sparkle) as res:
|
||||
resp = await res.read()
|
||||
|
|
@ -1,19 +1,14 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
# Copyright (c) 2017 Felix Yan <felixonmars@archlinux.org>, et al.
|
||||
|
||||
import structlog
|
||||
|
||||
from . import session, conf_cacheable_with_name
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
from nvchecker.util import GetVersionError
|
||||
|
||||
URL = 'https://api.launchpad.net/1.0/ubuntu/+archive/primary?ws.op=getPublishedSources&source_name=%s&exact_match=true'
|
||||
|
||||
get_cacheable_conf = conf_cacheable_with_name('ubuntupkg')
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
pkg = conf.get('ubuntupkg') or name
|
||||
strip_release = conf.getboolean('strip-release', False)
|
||||
strip_release = conf.get('strip_release', False)
|
||||
suite = conf.get('suite')
|
||||
url = URL % pkg
|
||||
|
||||
|
@ -23,12 +18,10 @@ async def get_version(name, conf, **kwargs):
|
|||
releases = []
|
||||
|
||||
while not releases:
|
||||
async with session.get(url) as res:
|
||||
data = await res.json()
|
||||
data = await cache.get_json(url)
|
||||
|
||||
if not data.get('entries'):
|
||||
logger.error('Ubuntu package not found', name=name)
|
||||
return
|
||||
raise GetVersionError('Ubuntu package not found')
|
||||
|
||||
releases = [r for r in data["entries"] if r["status"] == "Published"]
|
||||
|
||||
|
@ -41,7 +34,7 @@ async def get_version(name, conf, **kwargs):
|
|||
url = data["next_collection_link"]
|
||||
|
||||
if not releases:
|
||||
logger.error('Ubuntu package not found', name=name)
|
||||
raise GetVersionError('Ubuntu package not found')
|
||||
return
|
||||
|
||||
if strip_release:
|
66
nvchecker_source/vcs.py
Normal file
66
nvchecker_source/vcs.py
Normal file
|
@ -0,0 +1,66 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import asyncio
|
||||
import os.path as _path
|
||||
|
||||
import structlog
|
||||
|
||||
from nvchecker.util import GetVersionError
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
_self_path = _path.dirname(_path.abspath(__file__))
|
||||
_cmd_prefix = ['/bin/bash', _path.join(_self_path, 'vcs.sh')]
|
||||
|
||||
PROT_VER = 1
|
||||
|
||||
def _parse_oldver(oldver):
|
||||
if oldver is None:
|
||||
return PROT_VER, 0, ''
|
||||
try:
|
||||
prot_ver, count, ver = oldver.split('.', maxsplit=2)
|
||||
prot_ver = int(prot_ver)
|
||||
count = int(count)
|
||||
except Exception:
|
||||
return PROT_VER, 0, ''
|
||||
if prot_ver != PROT_VER:
|
||||
return PROT_VER, 0, ver
|
||||
return PROT_VER, count, ver
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
vcs = conf['vcs'] or ''
|
||||
use_max_tag = conf.getboolean('use_max_tag', False)
|
||||
ignored_tags = conf.get("ignored_tags", "").split()
|
||||
oldver = conf.get('oldver')
|
||||
cmd = _cmd_prefix + [name, vcs]
|
||||
if use_max_tag:
|
||||
cmd += ["get_tags"]
|
||||
|
||||
output = await cache.get(tuple(cmd), run_cmd)
|
||||
|
||||
if use_max_tag:
|
||||
return [tag for tag in output.split("\n") if tag not in ignored_tags]
|
||||
else:
|
||||
oldvers = _parse_oldver(oldver)
|
||||
if output == oldvers[2]:
|
||||
return oldver
|
||||
else:
|
||||
return "%d.%d.%s" % (oldvers[0], oldvers[1] + 1, output)
|
||||
|
||||
async def run_cmd(cmd):
|
||||
p = await asyncio.create_subprocess_exec(
|
||||
*cmd,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE,
|
||||
)
|
||||
|
||||
output, error = await asyncio.wait_for(p.communicate(), 20)
|
||||
output = output.strip().decode('latin1')
|
||||
error = error.strip().decode('latin1')
|
||||
|
||||
if p.returncode != 0:
|
||||
raise GetVersionError(
|
||||
'command exited with error', output=output,
|
||||
returncode=p.returncode, error=error)
|
||||
else:
|
||||
return output
|
Loading…
Add table
Reference in a new issue