version 2: MVP

This commit is contained in:
lilydjwg 2020-08-11 17:43:03 +08:00
parent fbdde9bf58
commit 14b3863f11
49 changed files with 537 additions and 403 deletions

21
NEW Normal file
View file

@ -0,0 +1,21 @@
source file
| parse
entries: name, conf, global options
| dedupe
entries: names, conf, global options
| dispatch
one future per module
| run; token queue; result queue
result task, runner task
| receive
(names, result)
| transform back
(name, result)
| runner task done
| write record file
file
TODO:
* dedupe & cache
* update tests
* update README

13
mypy.ini Normal file
View file

@ -0,0 +1,13 @@
[mypy]
warn_unused_configs = True
warn_redundant_casts = True
warn_unused_ignores = True
show_error_context = True
show_column_numbers = True
no_implicit_optional = True
[mypy-structlog]
ignore_missing_imports = True
[mypy-pyalpm]
ignore_missing_imports = True

View file

@ -0,0 +1,57 @@
# MIT licensed
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
from importlib import import_module
import structlog
logger = structlog.get_logger(logger_name=__name__)
_handler_precedence = (
'github', 'aur', 'pypi', 'archpkg', 'debianpkg', 'ubuntupkg',
'gems', 'pacman',
'cmd', 'bitbucket', 'regex', 'manual', 'vcs',
'cratesio', 'npm', 'hackage', 'cpan', 'gitlab', 'packagist',
'repology', 'anitya', 'android_sdk', 'sparkle', 'gitea'
)
_Task = namedtuple('_Task', 'batch_mode main args names')
class Dispatcher:
def __init__(self):
self.futures = []
self.mods = {}
self.tasks_dedupe = {}
def add_task(self, name, conf, **kwargs):
for key in _handler_precedence:
if key not in conf:
continue
value = self.mods.get(key)
if not value:
mod = import_module(
'.source.' + key, __package__)
batch_mode = getattr(mod, 'BATCH_MODE', False)
if batch_mode:
main = mod.Batcher()
else:
main = mod.get_version
get_cacheable_conf = getattr(mod, 'get_cacheable_conf', lambda name, conf: conf)
self.mods[key] = batch_mode, main, get_cacheable_conf
else:
batch_mode, main, get_cacheable_conf = value
cacheable_conf = get_cacheable_conf(name, conf)
cache_key = tuple(sorted(cacheable_conf.items()))
task = self.tasks_dedupe.get(cache_key)
if task is None:
self.tasks_dedupe[cache_key] = _Task(
batch_mode, main, (cacheable_conf, kwargs), [name])
else:
task.names.append(name)
else:
logger.error(
'no idea to get version info.', name=name)

View file

@ -1,4 +1,4 @@
# MIT licensed
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
__version__ = '1.8dev'
__version__ = '2.0dev'

View file

@ -1,33 +1,23 @@
#!/usr/bin/env python3
# MIT licensed
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
from __future__ import annotations
import sys
import argparse
import asyncio
from typing import Coroutine
import structlog
from .lib import notify
from . import core
from .util import VersData, RawResult
logger = structlog.get_logger(logger_name=__name__)
notifications = []
args = None
class Source(core.Source):
def on_update(self, name, version, oldver):
if args.notify:
msg = '%s updated to version %s' % (name, version)
notifications.append(msg)
notify.update('nvchecker', '\n'.join(notifications))
def main():
global args
def main() -> None:
parser = argparse.ArgumentParser(description='New version checker for software')
parser.add_argument('-n', '--notify', action='store_true', default=False,
help='show desktop notifications when a new version is available')
parser.add_argument('-t', '--tries', default=1, type=int, metavar='N',
help='try N times when errors occur')
core.add_common_arguments(parser)
@ -36,12 +26,47 @@ def main():
return
if not args.file:
return
try:
file = open(core.get_default_config())
except FileNotFoundError:
sys.exit('version configuration file not given and default does not exist')
else:
file = args.file
s = Source(args.file, args.tries)
entries, options = core.load_file(file)
token_q = core.token_queue(options.max_concurrent)
result_q: asyncio.Queue[RawResult] = asyncio.Queue()
try:
futures = core.dispatch(
entries, token_q, result_q,
options.keymanager, args.tries,
)
except ModuleNotFoundError as e:
sys.exit(f'Error: {e}')
ioloop = asyncio.get_event_loop()
ioloop.run_until_complete(s.check())
if options.ver_files is not None:
oldvers = core.read_verfile(options.ver_files[0])
else:
oldvers = {}
result_coro = core.process_result(oldvers, result_q)
runner_coro = core.run_tasks(futures)
# asyncio.run doesn't work because it always creates new eventloops
loop = asyncio.get_event_loop()
newvers = loop.run_until_complete(run(result_coro, runner_coro))
if options.ver_files is not None:
core.write_verfile(options.ver_files[1], newvers)
async def run(
result_coro: Coroutine[None, None, VersData],
runner_coro: Coroutine[None, None, None],
) -> VersData:
result_fu = asyncio.create_task(result_coro)
runner_fu = asyncio.create_task(runner_coro)
await runner_fu
result_fu.cancel()
return await result_fu
if __name__ == '__main__':
main()

View file

@ -1,25 +1,43 @@
# vim: se sw=2:
# MIT licensed
# Copyright (c) 2013-2018 lilydjwg <lilydjwg@gmail.com>, et al.
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
from __future__ import annotations
import os
import sys
import configparser
import asyncio
from asyncio import Queue
import logging
import argparse
from typing import (
TextIO, Tuple, NamedTuple, Optional, List, Union,
cast, Dict, Awaitable, Sequence,
)
import types
from pathlib import Path
from importlib import import_module
import re
import structlog
import toml
from .lib import nicelogger
from .get_version import get_version
from .source import session
from . import slogconf
from .util import (
Entry, Entries, KeyManager, RawResult, Result, VersData,
)
from . import __version__
from .sortversion import sort_version_keys
logger = structlog.get_logger(logger_name=__name__)
def add_common_arguments(parser):
def get_default_config() -> str:
confdir = os.environ.get('XDG_CONFIG_DIR', os.path.expanduser('~/.config'))
file = os.path.join(confdir, 'nvchecker/nvchecker.toml')
return file
def add_common_arguments(parser: argparse.ArgumentParser) -> None:
parser.add_argument('-l', '--logging',
choices=('debug', 'info', 'warning', 'error'), default='info',
help='logging level (default: info)')
@ -31,10 +49,11 @@ def add_common_arguments(parser):
help='specify fd to send json logs to. stdout by default')
parser.add_argument('-V', '--version', action='store_true',
help='show version and exit')
parser.add_argument('file', metavar='FILE', nargs='?', type=open,
help='software version source file')
parser.add_argument('-c', '--file',
metavar='FILE', type=open,
help='software version configuration file [default: %s]' % get_default_config)
def process_common_arguments(args):
def process_common_arguments(args: argparse.Namespace) -> bool:
'''return True if should stop'''
processors = [
slogconf.exc_info,
@ -71,8 +90,10 @@ def process_common_arguments(args):
progname = os.path.basename(sys.argv[0])
print('%s v%s' % (progname, __version__))
return True
return False
def safe_overwrite(fname, data, *, method='write', mode='w', encoding=None):
def safe_overwrite(fname: str, data: Union[bytes, str], *,
method: str = 'write', mode: str = 'w', encoding: Optional[str] = None) -> None:
# FIXME: directory has no read perm
# FIXME: symlinks and hard links
tmpname = fname + '.tmp'
@ -85,7 +106,7 @@ def safe_overwrite(fname, data, *, method='write', mode='w', encoding=None):
# if the above write failed (because disk is full etc), the old data should be kept
os.rename(tmpname, fname)
def read_verfile(file):
def read_verfile(file: Path) -> VersData:
v = {}
try:
with open(file) as f:
@ -96,134 +117,199 @@ def read_verfile(file):
pass
return v
def write_verfile(file, versions):
# sort using only alphanums, as done by the sort command, and needed by
# comm command
def write_verfile(file: Path, versions: VersData) -> None:
# sort using only alphanums, as done by the sort command,
# and needed by comm command
data = ['%s %s\n' % item
for item in sorted(versions.items(), key=lambda i: (''.join(filter(str.isalnum, i[0])), i[1]))]
safe_overwrite(file, data, method='writelines')
safe_overwrite(
str(file), ''.join(data), method='writelines')
class Source:
oldver = newver = None
tries = 1
class Options(NamedTuple):
ver_files: Optional[Tuple[Path, Path]]
max_concurrent: int
keymanager: KeyManager
def __init__(self, file, tries=1):
self.config = config = configparser.ConfigParser(
dict_type=dict, allow_no_value=True, interpolation=None,
def load_file(
file: TextIO,
) -> Tuple[Entries, Options]:
config = toml.load(file)
ver_files: Optional[Tuple[Path, Path]] = None
if '__config__' in config:
c = config.pop('__config__')
d = Path(file.name).parent
if 'oldver' in c and 'newver' in c:
oldver_s = os.path.expandvars(
os.path.expanduser(c.get('oldver')))
oldver = d / oldver_s
newver_s = os.path.expandvars(
os.path.expanduser(c.get('newver')))
newver = d / newver_s
ver_files = oldver, newver
keyfile = c.get('keyfile')
if keyfile:
keyfile_s = os.path.expandvars(
os.path.expanduser(c.get('keyfile')))
keyfile = d / keyfile_s
max_concurrent = c.getint(
'max_concurrent', 20)
keymanager = KeyManager(keyfile)
else:
max_concurrent = 20
keymanager = KeyManager(None)
return cast(Entries, config), Options(
ver_files, max_concurrent, keymanager)
def token_queue(maxsize: int) -> Queue[bool]:
token_q: Queue[bool] = Queue(maxsize=maxsize)
for _ in range(maxsize):
token_q.put_nowait(True)
return token_q
def dispatch(
entries: Entries,
token_q: Queue[bool],
result_q: Queue[RawResult],
keymanager: KeyManager,
tries: int,
) -> List[asyncio.Future]:
mods: Dict[str, Tuple[types.ModuleType, List]] = {}
for name, entry in entries.items():
source = entry.get('source', 'none')
if source not in mods:
mod = import_module('nvchecker_source.' + source)
tasks: List[Tuple[str, Entry]] = []
mods[source] = mod, tasks
else:
tasks = mods[source][1]
tasks.append((name, entry))
ret = []
for mod, tasks in mods.values():
worker = mod.Worker( # type: ignore
token_q, result_q, tasks,
tries, keymanager,
)
self.name = file.name
self.tries = tries
config.read_file(file)
if '__config__' in config:
c = config['__config__']
ret.append(worker.run())
d = os.path.dirname(file.name)
if 'oldver' in c and 'newver' in c:
self.oldver = os.path.expandvars(os.path.expanduser(
os.path.join(d, c.get('oldver'))))
self.newver = os.path.expandvars(os.path.expanduser(
os.path.join(d, c.get('newver'))))
return ret
keyfile = c.get('keyfile')
if keyfile:
keyfile = os.path.expandvars(os.path.expanduser(
os.path.join(d, c.get('keyfile'))))
def substitute_version(
version: str, conf: Entry,
) -> str:
'''
Substitute the version string via defined rules in the configuration file.
See README.rst#global-options for details.
'''
prefix = conf.get('prefix')
if prefix:
if version.startswith(prefix):
version = version[len(prefix):]
return version
self.max_concurrent = c.getint('max_concurrent', 20)
self.keymanager = KeyManager(keyfile)
session.nv_config = config["__config__"]
from_pattern = conf.get('from_pattern')
if from_pattern:
to_pattern = conf.get('to_pattern')
if not to_pattern:
raise ValueError("from_pattern exists but to_pattern doesn't")
else:
self.max_concurrent = 20
self.keymanager = KeyManager(None)
return re.sub(from_pattern, to_pattern, version)
async def check(self):
if self.oldver:
self.oldvers = read_verfile(self.oldver)
else:
self.oldvers = {}
self.curvers = self.oldvers.copy()
# No substitution rules found. Just return the original version string.
return version
tries = self.tries
token_q = asyncio.Queue(maxsize=self.max_concurrent)
def apply_list_options(
versions: List[str], conf: Entry,
) -> Optional[str]:
pattern = conf.get('include_regex')
if pattern:
re_pat = re.compile(pattern)
versions = [x for x in versions
if re_pat.fullmatch(x)]
for _ in range(self.max_concurrent):
await token_q.put(True)
pattern = conf.get('exclude_regex')
if pattern:
re_pat = re.compile(pattern)
versions = [x for x in versions
if not re_pat.fullmatch(x)]
async def worker(name, conf):
await token_q.get()
try:
for i in range(tries):
try:
ret = await get_version(
name, conf, keyman=self.keymanager)
return name, ret
except Exception as e:
if i + 1 < tries:
logger.warning('failed, retrying',
name=name, exc_info=e)
await asyncio.sleep(i)
else:
return name, e
finally:
await token_q.put(True)
ignored = set(conf.get('ignored', '').split())
if ignored:
versions = [x for x in versions if x not in ignored]
config = self.config
futures = []
for name in config.sections():
if name == '__config__':
if not versions:
return None
sort_version_key = sort_version_keys[
conf.get("sort_version_key", "parse_version")]
versions.sort(key=sort_version_key)
return versions[-1]
def _process_result(r: RawResult) -> Optional[Result]:
version = r.version
conf = r.conf
name = r.name
if isinstance(version, Exception):
logger.error('unexpected error happened',
name=r.name, exc_info=r.version)
return None
elif isinstance(version, list):
version_str = apply_list_options(version, conf)
else:
version_str = version
if version_str:
version_str = version_str.replace('\n', ' ')
try:
version_str = substitute_version(version_str, conf)
return Result(name, version_str, conf)
except (ValueError, re.error):
logger.exception('error occurred in version substitutions', name=name)
return None
def check_version_update(
oldvers: VersData, name: str, version: str,
) -> Optional[str]:
oldver = oldvers.get(name, None)
if not oldver or oldver != version:
logger.info('updated', name=name, version=version, old_version=oldver)
return version
else:
logger.debug('up-to-date', name=name, version=version)
return None
async def process_result(
oldvers: VersData,
result_q: Queue[RawResult],
) -> VersData:
ret = {}
try:
while True:
r = await result_q.get()
r1 = _process_result(r)
if r1 is None:
continue
v = check_version_update(
oldvers, r1.name, r1.version)
if v is not None:
ret[r1.name] = v
except asyncio.CancelledError:
return ret
conf = config[name]
conf['oldver'] = self.oldvers.get(name, None)
fu = asyncio.ensure_future(worker(name, conf))
futures.append(fu)
for fu in asyncio.as_completed(futures):
name, result = await fu
if isinstance(result, Exception):
logger.error('unexpected error happened',
name=name, exc_info=result)
self.on_exception(name, result)
elif result is not None:
self.print_version_update(name, result)
else:
conf = config[name]
if not conf.getboolean('missing_ok', False):
logger.warning('no-result', name=name)
self.on_no_result(name)
if self.newver:
write_verfile(self.newver, self.curvers)
def print_version_update(self, name, version):
oldver = self.oldvers.get(name, None)
if not oldver or oldver != version:
logger.info('updated', name=name, version=version, old_version=oldver)
self.curvers[name] = version
self.on_update(name, version, oldver)
else:
logger.debug('up-to-date', name=name, version=version)
def on_update(self, name, version, oldver):
pass
def on_no_result(self, name):
pass
def on_exception(self, name, exc):
pass
def __repr__(self):
return '<Source from %r>' % self.name
class KeyManager:
def __init__(self, file):
self.config = config = configparser.ConfigParser(dict_type=dict)
if file is not None:
config.read([file])
else:
config.add_section('keys')
def get_key(self, name):
return self.config.get('keys', name, fallback=None)
async def run_tasks(
futures: Sequence[Awaitable[None]]
) -> None:
for fu in asyncio.as_completed(futures):
await fu

View file

@ -1,105 +0,0 @@
# MIT licensed
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
import re
from importlib import import_module
import structlog
from .sortversion import sort_version_keys
logger = structlog.get_logger(logger_name=__name__)
handler_precedence = (
'github', 'aur', 'pypi', 'archpkg', 'debianpkg', 'ubuntupkg',
'gems', 'pacman',
'cmd', 'bitbucket', 'regex', 'manual', 'vcs',
'cratesio', 'npm', 'hackage', 'cpan', 'gitlab', 'packagist',
'repology', 'anitya', 'android_sdk', 'sparkle', 'gitea'
)
def substitute_version(version, name, conf):
'''
Substitute the version string via defined rules in the configuration file.
See README.rst#global-options for details.
'''
prefix = conf.get('prefix')
if prefix:
if version.startswith(prefix):
version = version[len(prefix):]
return version
from_pattern = conf.get('from_pattern')
if from_pattern:
to_pattern = conf.get('to_pattern')
if not to_pattern:
raise ValueError('%s: from_pattern exists but to_pattern doesn\'t', name)
return re.sub(from_pattern, to_pattern, version)
# No substitution rules found. Just return the original version string.
return version
def apply_list_options(versions, conf):
pattern = conf.get('include_regex')
if pattern:
pattern = re.compile(pattern)
versions = [x for x in versions
if pattern.fullmatch(x)]
pattern = conf.get('exclude_regex')
if pattern:
pattern = re.compile(pattern)
versions = [x for x in versions
if not pattern.fullmatch(x)]
ignored = set(conf.get('ignored', '').split())
if ignored:
versions = [x for x in versions if x not in ignored]
if not versions:
return
sort_version_key = sort_version_keys[
conf.get("sort_version_key", "parse_version")]
versions.sort(key=sort_version_key)
return versions[-1]
_cache = {}
async def get_version(name, conf, **kwargs):
for key in handler_precedence:
if key in conf:
mod = import_module('.source.' + key, __package__)
func = mod.get_version
get_cacheable_conf = getattr(mod, 'get_cacheable_conf', lambda name, conf: conf)
break
else:
logger.error('no idea to get version info.', name=name)
return
cacheable_conf = get_cacheable_conf(name, conf)
cache_key = tuple(sorted(cacheable_conf.items()))
if cache_key in _cache:
version = _cache[cache_key]
logger.debug('cache hit', name=name,
cache_key=cache_key, cached=version)
return version
version = await func(name, conf, **kwargs)
if isinstance(version, list):
version = apply_list_options(version, conf)
if version:
version = version.replace('\n', ' ')
try:
version = substitute_version(version, name, conf)
except (ValueError, re.error):
logger.exception('error occurred in version substitutions', name=name)
if version is not None:
_cache[cache_key] = version
return version

View file

@ -19,10 +19,3 @@ m = __import__('%s_httpclient' % which, globals(), locals(), level=1)
__all__ = m.__all__
for x in __all__:
globals()[x] = getattr(m, x)
def conf_cacheable_with_name(key):
def get_cacheable_conf(name, conf):
conf = dict(conf)
conf[key] = conf.get(key) or name
return conf
return get_cacheable_conf

View file

@ -11,7 +11,7 @@ try:
import pycurl
AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient", max_clients=20)
except ImportError:
pycurl = None
pycurl = None # type: ignore
from .httpclient import DEFAULT_USER_AGENT
@ -76,8 +76,8 @@ async def json_response(self, **kwargs):
async def read(self):
return self.body
HTTPResponse.json = json_response
HTTPResponse.read = read
HTTPResponse.json = json_response # type: ignore
HTTPResponse.read = read # type: ignore
session = Session()
NetworkErrors = ()

View file

@ -1 +1 @@
This directory belongs to modules from my [winterpy](https://github.com/lilydjwg/winterpy) and can be synced from there without care.
This directory contains modules from my [winterpy](https://github.com/lilydjwg/winterpy).

View file

@ -0,0 +1,3 @@
# MIT licensed
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.

View file

@ -44,8 +44,7 @@ class TornadoLogFormatter(logging.Formatter):
record.message = "Bad message (%r): %r" % (e, record.__dict__)
record.asctime = time.strftime(
"%m-%d %H:%M:%S", self.converter(record.created))
record.asctime += '.%03d' % ((record.created % 1) * 1000)
prefix = '[%(levelname)1.1s %(asctime)s %(module)s:%(lineno)d]' % \
prefix = '[%(levelname)1.1s %(asctime)s.%(msecs)03d %(module)s:%(lineno)d]' % \
record.__dict__
if self._color:
prefix = (self._colors.get(record.levelno, self._normal) +

View file

@ -1,102 +0,0 @@
# MIT licensed
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
'''
调用 libnotify
'''
__all__ = ["set", "show", "update", "set_timeout", "set_urgency"]
from ctypes import *
from threading import Lock
import atexit
NOTIFY_URGENCY_LOW = 0
NOTIFY_URGENCY_NORMAL = 1
NOTIFY_URGENCY_CRITICAL = 2
UrgencyLevel = {NOTIFY_URGENCY_LOW, NOTIFY_URGENCY_NORMAL, NOTIFY_URGENCY_CRITICAL}
libnotify = None
gobj = None
libnotify_lock = Lock()
libnotify_inited = False
class obj: pass
notify_st = obj()
def set(summary=None, body=None, icon_str=None):
with libnotify_lock:
init()
if summary is not None:
notify_st.summary = summary.encode()
notify_st.body = notify_st.icon_str = None
if body is not None:
notify_st.body = body.encode()
if icon_str is not None:
notify_st.icon_str = icon_str.encode()
libnotify.notify_notification_update(
notify_st.notify,
notify_st.summary,
notify_st.body,
notify_st.icon_str,
)
def show():
libnotify.notify_notification_show(notify_st.notify, c_void_p())
def update(summary=None, body=None, icon_str=None):
if not any((summary, body)):
raise TypeError('at least one argument please')
set(summary, body, icon_str)
show()
def set_timeout(self, timeout):
'''set `timeout' in milliseconds'''
libnotify.notify_notification_set_timeout(notify_st.notify, int(timeout))
def set_urgency(self, urgency):
if urgency not in UrgencyLevel:
raise ValueError
libnotify.notify_notification_set_urgency(notify_st.notify, urgency)
def init():
global libnotify_inited, libnotify, gobj
if libnotify_inited:
return
try:
libnotify = CDLL('libnotify.so')
except OSError:
libnotify = CDLL('libnotify.so.4')
gobj = CDLL('libgobject-2.0.so')
libnotify.notify_init('pynotify')
libnotify_inited = True
libnotify.notify_notification_new.restype = c_void_p
notify_st.notify = c_void_p(libnotify.notify_notification_new(
c_void_p(), c_void_p(), c_void_p(),
))
atexit.register(uninit)
def uninit():
global libnotify_inited
try:
if libnotify_inited:
gobj.g_object_unref(notify_st.notify)
libnotify.notify_uninit()
libnotify_inited = False
except AttributeError:
# libnotify.so 已被卸载
pass
if __name__ == '__main__':
from time import sleep
notify = __import__('__main__')
notify.set('This is a test', '测试一下。')
notify.show()
sleep(1)
notify.update(body='再测试一下。')

View file

@ -10,7 +10,7 @@ import sys
import structlog
from .source import HTTPError, NetworkErrors
from .httpclient import HTTPError, NetworkErrors # type: ignore
def _console_msg(event):
evt = event['event']

View file

@ -1,31 +0,0 @@
# MIT licensed
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
import structlog
from datetime import datetime
from . import session, conf_cacheable_with_name
logger = structlog.get_logger(logger_name=__name__)
AUR_URL = 'https://aur.archlinux.org/rpc/'
get_cacheable_conf = conf_cacheable_with_name('aur')
async def get_version(name, conf, **kwargs):
aurname = conf.get('aur') or name
use_last_modified = conf.getboolean('use_last_modified', False)
strip_release = conf.getboolean('strip-release', False)
async with session.get(AUR_URL, params={"v": 5, "type": "info", "arg[]": aurname}) as res:
data = await res.json()
if not data['results']:
logger.error('AUR upstream not found', name=name)
return
version = data['results'][0]['Version']
if use_last_modified:
version += '-' + datetime.utcfromtimestamp(data['results'][0]['LastModified']).strftime('%Y%m%d%H%M%S')
if strip_release and '-' in version:
version = version.rsplit('-', 1)[0]
return version

72
nvchecker/util.py Normal file
View file

@ -0,0 +1,72 @@
# MIT licensed
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
from __future__ import annotations
from asyncio import Queue
import contextlib
from typing import (
Dict, Optional, List, AsyncGenerator, NamedTuple, Union,
Any, Tuple,
)
from pathlib import Path
import toml
Entry = Dict[str, Any]
Entries = Dict[str, Entry]
VersData = Dict[str, str]
class KeyManager:
def __init__(
self, file: Optional[Path],
) -> None:
if file is not None:
with file.open() as f:
keys = toml.load(f)['keys']
else:
keys = {}
self.keys = keys
def get_key(self, name: str) -> Optional[str]:
return self.keys.get(name)
class BaseWorker:
def __init__(
self,
token_q: Queue[bool],
result_q: Queue[RawResult],
tasks: List[Tuple[str, Entry]],
tries: int,
keymanager: KeyManager,
) -> None:
self.token_q = token_q
self.result_q = result_q
self.tries = tries
self.keymanager = keymanager
self.tasks = tasks
@contextlib.asynccontextmanager
async def acquire_token(self) -> AsyncGenerator[None, None]:
token = await self.token_q.get()
try:
yield
finally:
await self.token_q.put(token)
class RawResult(NamedTuple):
name: str
version: Union[Exception, List[str], str]
conf: Entry
class Result(NamedTuple):
name: str
version: str
conf: Entry
def conf_cacheable_with_name(key):
def get_cacheable_conf(name, conf):
conf = dict(conf)
conf[key] = conf.get(key) or name
return conf
return get_cacheable_conf

84
nvchecker_source/aur.py Normal file
View file

@ -0,0 +1,84 @@
# MIT licensed
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
import structlog
from datetime import datetime
import asyncio
from typing import Iterable, Dict, List, Tuple, Any
from nvchecker.util import (
Entry, BaseWorker, RawResult,
conf_cacheable_with_name,
)
from nvchecker.httpclient import session # type: ignore
get_cacheable_conf = conf_cacheable_with_name('aur')
logger = structlog.get_logger(logger_name=__name__)
AUR_URL = 'https://aur.archlinux.org/rpc/'
class Worker(BaseWorker):
# https://wiki.archlinux.org/index.php/Aurweb_RPC_interface#Limitations
batch_size = 100
async def run(self) -> None:
tasks = self.tasks
n_batch, left = divmod(len(tasks), self.batch_size)
if left > 0:
n_batch += 1
ret = []
for i in range(n_batch):
s = i * self.batch_size
batch = tasks[s : s+self.batch_size]
fu = self._run_batch(batch)
ret.append(fu)
await asyncio.wait(ret)
async def _run_batch(self, batch: List[Tuple[str, Entry]]) -> None:
task_by_name: Dict[str, Entry] = dict(self.tasks)
async with self.acquire_token():
results = await _run_batch_impl(batch)
for name, version in results.items():
r = RawResult(name, version, task_by_name[name])
await self.result_q.put(r)
async def _run_batch_impl(batch: List[Tuple[str, Entry]]) -> Dict[str, str]:
aurnames = {conf.get('aur', name) for name, conf in batch}
results = await _aur_get_multiple(aurnames)
ret = {}
for name, conf in batch:
aurname = conf.get('aur', name)
use_last_modified = conf.get('use_last_modified', False)
strip_release = conf.get('strip-release', False)
result = results.get(aurname)
if result is None:
logger.error('AUR upstream not found', name=name)
continue
version = result['Version']
if use_last_modified:
version += '-' + datetime.utcfromtimestamp(result['LastModified']).strftime('%Y%m%d%H%M%S')
if strip_release and '-' in version:
version = version.rsplit('-', 1)[0]
ret[name] = version
return ret
async def _aur_get_multiple(
aurnames: Iterable[str],
) -> Dict[str, Dict[str, Any]]:
params = [('v', '5'), ('type', 'info')]
params.extend(('arg[]', name) for name in aurnames)
async with session.get(AUR_URL, params=params) as res:
data = await res.json()
results = {r['Name']: r for r in data['results']}
return results

15
nvchecker_source/none.py Normal file
View file

@ -0,0 +1,15 @@
# MIT licensed
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
from __future__ import annotations
import structlog
from nvchecker.util import BaseWorker
logger = structlog.get_logger(logger_name=__name__)
class Worker(BaseWorker):
async def run(self) -> None:
async with self.acquire_token():
for name, _ in self.tasks:
logger.error('no source specified', name=name)

View file

@ -21,7 +21,7 @@ setup(
zip_safe = False,
packages = find_packages(exclude=["tests"]),
install_requires = ['setuptools', 'structlog', 'tornado>=6', 'pycurl'],
install_requires = ['setuptools', 'toml', 'structlog', 'tornado>=6', 'pycurl'],
extras_require = {
'vercmp': ['pyalpm'],
},
@ -50,8 +50,6 @@ setup(
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3 :: Only",
"Programming Language :: Python :: 3.5",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Topic :: Internet",

View file

@ -0,0 +1,3 @@
# MIT licensed
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.

View file

@ -1,3 +1,6 @@
# MIT licensed
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
import configparser
import pytest
import asyncio