mirror of
https://github.com/lilydjwg/archrepo2.git
synced 2025-03-10 12:02:43 +00:00
Compare commits
25 commits
Author | SHA1 | Date | |
---|---|---|---|
|
e7e8142d05 | ||
|
eaddcd33ba | ||
|
954068b000 | ||
|
fa4f743045 | ||
|
5bc16468cb | ||
|
668140131b | ||
|
9187984b6b | ||
|
c0031235a6 | ||
|
7a7ab1b4e4 | ||
|
c074e3b271 | ||
|
eef3c96242 | ||
|
bd1ac0962f | ||
|
fe1e4b10cf | ||
|
79494a5722 | ||
|
b3d473d79b | ||
|
3c26780080 | ||
|
2ef72fe1a9 | ||
|
059e0bb581 | ||
|
db7e1797fc | ||
|
05c6a7b233 | ||
|
763d1ec110 | ||
|
e00de1a616 | ||
|
65660f23c5 | ||
|
bcca713e5c | ||
|
5a28db12f2 |
12 changed files with 293 additions and 266 deletions
|
@ -13,8 +13,9 @@ DEPENDENCIES
|
||||||
|
|
||||||
- Python, >= 3.3, with sqlite support
|
- Python, >= 3.3, with sqlite support
|
||||||
- setuptools
|
- setuptools
|
||||||
- tornado, > 2.4.1
|
- tornado, > 3.1
|
||||||
- pyinotify, tested with 0.9.4
|
- pyinotify, tested with 0.9.4
|
||||||
|
- pyalpm, tested with 0.10.6
|
||||||
|
|
||||||
NOTE
|
NOTE
|
||||||
====
|
====
|
||||||
|
@ -25,11 +26,9 @@ NOTE
|
||||||
TODO
|
TODO
|
||||||
====
|
====
|
||||||
|
|
||||||
- [high] singleton daemon
|
|
||||||
- [high] adding and then removing it before adding complete will result
|
- [high] adding and then removing it before adding complete will result
|
||||||
in not-in-database removing
|
in not-in-database removing
|
||||||
- [middle] specify what architectures we have and don't require others
|
- [middle] specify what architectures we have and don't require others
|
||||||
- [low] fork to background
|
|
||||||
- [low] use one common command queue (now one each repo)
|
- [low] use one common command queue (now one each repo)
|
||||||
- [low] verify packages
|
- [low] verify packages
|
||||||
|
|
||||||
|
|
|
@ -1 +1 @@
|
||||||
__version__ = '0.5'
|
__version__ = '0.6dev'
|
||||||
|
|
|
@ -33,16 +33,18 @@ def main():
|
||||||
config.read(conffile)
|
config.read(conffile)
|
||||||
repos = check_and_get_repos(config)
|
repos = check_and_get_repos(config)
|
||||||
|
|
||||||
notifiers = [repomon(config[repo]) for repo in repos]
|
notifiers = []
|
||||||
|
for repo in repos:
|
||||||
|
notifiers.extend(repomon(config[repo]))
|
||||||
|
|
||||||
ioloop = IOLoop.instance()
|
ioloop = IOLoop.current()
|
||||||
logger.info('starting archreposrv.')
|
logger.info('starting archreposrv.')
|
||||||
try:
|
try:
|
||||||
ioloop.start()
|
ioloop.start()
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
ioloop.close()
|
|
||||||
for notifier in notifiers:
|
for notifier in notifiers:
|
||||||
notifier.stop()
|
notifier.stop()
|
||||||
|
ioloop.close()
|
||||||
print()
|
print()
|
||||||
|
|
||||||
if __name__ == '__main__':
|
if __name__ == '__main__':
|
||||||
|
|
|
@ -1,93 +1,68 @@
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from collections import defaultdict, namedtuple
|
from collections import namedtuple
|
||||||
import subprocess
|
import subprocess
|
||||||
import re
|
import re
|
||||||
|
from typing import List, Dict
|
||||||
|
|
||||||
from pkg_resources import parse_version
|
import pyalpm
|
||||||
|
|
||||||
class PkgNameInfo(namedtuple('PkgNameInfo', 'name, version, release, arch')):
|
class PkgNameInfo(namedtuple('PkgNameInfo', 'name, version, release, arch')):
|
||||||
def __lt__(self, other):
|
def __lt__(self, other) -> bool:
|
||||||
if self.name != other.name or self.arch != other.arch:
|
if self.name != other.name or self.arch != other.arch:
|
||||||
return NotImplemented
|
return NotImplemented
|
||||||
if self.version != other.version:
|
if self.version != other.version:
|
||||||
return parse_version(self.version) < parse_version(other.version)
|
return pyalpm.vercmp(self.version, other.version) < 0
|
||||||
return float(self.release) < float(other.release)
|
return float(self.release) < float(other.release)
|
||||||
|
|
||||||
def __gt__(self, other):
|
def __gt__(self, other) -> bool:
|
||||||
# No, try the other side please.
|
# No, try the other side please.
|
||||||
return NotImplemented
|
return NotImplemented
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def fullversion(self):
|
def fullversion(self) -> str:
|
||||||
return '%s-%s' % (self.version, self.release)
|
return '%s-%s' % (self.version, self.release)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def parseFilename(cls, filename):
|
def parseFilename(cls, filename: str) -> 'PkgNameInfo':
|
||||||
return cls(*trimext(filename, 3).rsplit('-', 3))
|
return cls(*trimext(filename, 3).rsplit('-', 3))
|
||||||
|
|
||||||
def trimext(name, num=1):
|
def trimext(name: str, num: int = 1) -> str:
|
||||||
for i in range(num):
|
for i in range(num):
|
||||||
name = os.path.splitext(name)[0]
|
name = os.path.splitext(name)[0]
|
||||||
return name
|
return name
|
||||||
|
|
||||||
def get_pkgname_with_bash(PKGBUILD):
|
def get_pkgname_with_bash(PKGBUILD: str) -> List[str]:
|
||||||
script = '''\
|
script = '''\
|
||||||
. '%s'
|
. '%s'
|
||||||
echo ${pkgname[*]}''' % PKGBUILD
|
echo ${pkgname[*]}''' % PKGBUILD
|
||||||
# Python 3.4 has 'input' arg for check_output
|
# Python 3.4 has 'input' arg for check_output
|
||||||
p = subprocess.Popen(['bash'], stdin=subprocess.PIPE,
|
p = subprocess.Popen(
|
||||||
stdout=subprocess.PIPE)
|
['bwrap', '--unshare-all', '--ro-bind', '/', '/', '--tmpfs', '/home',
|
||||||
output = p.communicate(script.encode('latin1'))[0].decode('latin1')
|
'--tmpfs', '/run', '--die-with-parent',
|
||||||
|
'--tmpfs', '/tmp', '--proc', '/proc', '--dev', '/dev', '/bin/bash'],
|
||||||
|
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
|
||||||
|
)
|
||||||
|
output = p.communicate(script.encode())[0].decode()
|
||||||
ret = p.wait()
|
ret = p.wait()
|
||||||
if ret != 0:
|
if ret != 0:
|
||||||
raise subprocess.CalledProcessError(
|
raise subprocess.CalledProcessError(
|
||||||
ret, ['bash'], output)
|
ret, ['bash'], output)
|
||||||
return output.split()
|
return output.split()
|
||||||
|
|
||||||
def _run_bash(script):
|
pkgfile_pat = re.compile(r'(?:^|/).+-[^-]+-[\d.]+-(?:\w+)\.pkg\.tar\.(?:xz|zst)$')
|
||||||
p = subprocess.Popen(['bash'], stdin=subprocess.PIPE)
|
|
||||||
p.communicate(script.encode('latin1'))
|
|
||||||
ret = p.wait()
|
|
||||||
if ret != 0:
|
|
||||||
raise subprocess.CalledProcessError(
|
|
||||||
ret, ['bash'])
|
|
||||||
|
|
||||||
def get_aur_pkgbuild_with_bash(name):
|
def _strip_ver(s: str) -> str:
|
||||||
script = '''\
|
|
||||||
. /usr/lib/yaourt/util.sh
|
|
||||||
. /usr/lib/yaourt/aur.sh
|
|
||||||
init_color
|
|
||||||
aur_get_pkgbuild '%s' ''' % name
|
|
||||||
_run_bash(script)
|
|
||||||
|
|
||||||
def get_abs_pkgbuild_with_bash(name):
|
|
||||||
script = '''\
|
|
||||||
. /usr/lib/yaourt/util.sh
|
|
||||||
. /usr/lib/yaourt/abs.sh
|
|
||||||
init_paths
|
|
||||||
init_color
|
|
||||||
arg=$(pacman -Sp --print-format '%%r/%%n' '%s')
|
|
||||||
RSYNCOPT="$RSYNCOPT -O"
|
|
||||||
abs_get_pkgbuild "$arg" ''' % name
|
|
||||||
_run_bash(script)
|
|
||||||
|
|
||||||
pkgfile_pat = re.compile(r'(?:^|/).+-[^-]+-[\d.]+-(?:\w+)\.pkg\.tar\.xz$')
|
|
||||||
|
|
||||||
def _strip_ver(s):
|
|
||||||
return re.sub(r'[<>=].*', '', s)
|
return re.sub(r'[<>=].*', '', s)
|
||||||
|
|
||||||
def get_package_dependencies(name):
|
def get_package_info(name: str, local: bool = False) -> Dict[str, str]:
|
||||||
out = subprocess.check_output(["package-query", "-Sii", "-f", "%D", name])
|
|
||||||
out = out.decode('latin1')
|
|
||||||
return [_strip_ver(x) for x in out.split() if x != '-']
|
|
||||||
|
|
||||||
def get_package_info(name, local=False):
|
|
||||||
old_lang = os.environ['LANG']
|
old_lang = os.environ['LANG']
|
||||||
os.environ['LANG'] = 'C'
|
os.environ['LANG'] = 'C'
|
||||||
args = '-Qi' if local else '-Si'
|
args = '-Qi' if local else '-Si'
|
||||||
try:
|
try:
|
||||||
out = subprocess.check_output(["pacman", args, name])
|
outb = subprocess.check_output(["pacman", args, name])
|
||||||
out = out.decode('latin1')
|
out = outb.decode('latin1')
|
||||||
finally:
|
finally:
|
||||||
os.environ['LANG'] = old_lang
|
os.environ['LANG'] = old_lang
|
||||||
|
|
||||||
|
@ -104,14 +79,3 @@ def get_package_info(name, local=False):
|
||||||
ret[key] += ' ' + l.strip()
|
ret[key] += ' ' + l.strip()
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
def get_package_repository(name):
|
|
||||||
try:
|
|
||||||
out = subprocess.check_output(["package-query", "-Sii", "-f", "%r", name])
|
|
||||||
repo = out.strip().decode('latin1')
|
|
||||||
except subprocess.CalledProcessError:
|
|
||||||
repo = 'local'
|
|
||||||
return repo
|
|
||||||
|
|
||||||
def is_official(name):
|
|
||||||
repo = get_package_repository(name)
|
|
||||||
return repo in ('core', 'extra', 'community', 'multilib', 'testing')
|
|
||||||
|
|
|
@ -4,13 +4,15 @@ A Tornado-inspired logging formatter, with displayed time with millisecond accur
|
||||||
FYI: pyftpdlib also has a Tornado-style logger.
|
FYI: pyftpdlib also has a Tornado-style logger.
|
||||||
'''
|
'''
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
import time
|
import time
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
class TornadoLogFormatter(logging.Formatter):
|
class TornadoLogFormatter(logging.Formatter):
|
||||||
def __init__(self, color, *args, **kwargs):
|
def __init__(self, color, *args, **kwargs):
|
||||||
super().__init__(self, *args, **kwargs)
|
super().__init__(*args, **kwargs)
|
||||||
self._color = color
|
self._color = color
|
||||||
if color:
|
if color:
|
||||||
import curses
|
import curses
|
||||||
|
@ -41,13 +43,23 @@ class TornadoLogFormatter(logging.Formatter):
|
||||||
record.message = "Bad message (%r): %r" % (e, record.__dict__)
|
record.message = "Bad message (%r): %r" % (e, record.__dict__)
|
||||||
record.asctime = time.strftime(
|
record.asctime = time.strftime(
|
||||||
"%m-%d %H:%M:%S", self.converter(record.created))
|
"%m-%d %H:%M:%S", self.converter(record.created))
|
||||||
record.asctime += '.%03d' % ((record.created % 1) * 1000)
|
prefix = '[%(levelname)1.1s %(asctime)s.%(msecs)03d %(module)s:%(lineno)d]' % \
|
||||||
prefix = '[%(levelname)1.1s %(asctime)s %(module)s:%(lineno)d]' % \
|
|
||||||
record.__dict__
|
record.__dict__
|
||||||
if self._color:
|
if self._color:
|
||||||
prefix = (self._colors.get(record.levelno, self._normal) +
|
prefix = (self._colors.get(record.levelno, self._normal) +
|
||||||
prefix + self._normal)
|
prefix + self._normal)
|
||||||
formatted = prefix + " " + record.message
|
formatted = prefix + " " + record.message
|
||||||
|
|
||||||
|
formatted += ''.join(
|
||||||
|
' %s=%s' % (k, v) for k, v in record.__dict__.items()
|
||||||
|
if k not in {
|
||||||
|
'levelname', 'asctime', 'module', 'lineno', 'args', 'message',
|
||||||
|
'filename', 'exc_info', 'exc_text', 'created', 'funcName',
|
||||||
|
'processName', 'process', 'msecs', 'relativeCreated', 'thread',
|
||||||
|
'threadName', 'name', 'levelno', 'msg', 'pathname', 'stack_info',
|
||||||
|
'taskName',
|
||||||
|
})
|
||||||
|
|
||||||
if record.exc_info:
|
if record.exc_info:
|
||||||
if not record.exc_text:
|
if not record.exc_text:
|
||||||
record.exc_text = self.formatException(record.exc_info)
|
record.exc_text = self.formatException(record.exc_info)
|
||||||
|
|
|
@ -1,44 +0,0 @@
|
||||||
import tarfile
|
|
||||||
import logging
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
multikeys = {'depend', 'makepkgopt', 'optdepend', 'replaces', 'conflict',
|
|
||||||
'provides', 'license', 'backup', 'group', 'makedepend', 'checkdepend'}
|
|
||||||
|
|
||||||
def _add_to_dict(d, key, value):
|
|
||||||
if key in multikeys:
|
|
||||||
if key in d:
|
|
||||||
d[key].append(value)
|
|
||||||
else:
|
|
||||||
d[key] = [value]
|
|
||||||
else:
|
|
||||||
assert key not in d, 'unexpected multi-value key "%s"' % key
|
|
||||||
d[key] = value
|
|
||||||
|
|
||||||
def readpkg(file):
|
|
||||||
tar = tarfile.open(file)
|
|
||||||
info = tar.next()
|
|
||||||
if not info or info.name != '.PKGINFO':
|
|
||||||
logger.warn('%s is not a nice package!', file)
|
|
||||||
info = '.PKGINFO' # have to look further
|
|
||||||
f = tar.extractfile(info)
|
|
||||||
data = f.read().decode()
|
|
||||||
tar.close()
|
|
||||||
|
|
||||||
d = {}
|
|
||||||
key = None
|
|
||||||
for l in data.split('\n'):
|
|
||||||
if l.startswith('#'):
|
|
||||||
continue
|
|
||||||
if not l:
|
|
||||||
continue
|
|
||||||
if '=' not in l:
|
|
||||||
value += l
|
|
||||||
else:
|
|
||||||
if key is not None:
|
|
||||||
_add_to_dict(d, key, value)
|
|
||||||
key, value = l.split(' = ', 1)
|
|
||||||
_add_to_dict(d, key, value)
|
|
||||||
|
|
||||||
return d
|
|
|
@ -12,7 +12,6 @@ import sqlite3
|
||||||
import socket
|
import socket
|
||||||
import time
|
import time
|
||||||
import hashlib
|
import hashlib
|
||||||
import pickle
|
|
||||||
from os.path import relpath
|
from os.path import relpath
|
||||||
|
|
||||||
import pyinotify
|
import pyinotify
|
||||||
|
@ -21,14 +20,10 @@ from tornado.ioloop import IOLoop
|
||||||
import tornado.process
|
import tornado.process
|
||||||
|
|
||||||
from .lib import archpkg
|
from .lib import archpkg
|
||||||
from . import pkgreader
|
|
||||||
from . import dbutil
|
from . import dbutil
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
# handles only x86_64, i686 and any arch packages
|
|
||||||
_pkgfile_pat = re.compile(r'(?:^|/).+-[^-]+-[\d.]+-(?:x86_64|i686|any)\.pkg\.tar\.xz(?:\.sig)?$')
|
|
||||||
|
|
||||||
def same_existent_file(a, b):
|
def same_existent_file(a, b):
|
||||||
try:
|
try:
|
||||||
return os.path.samefile(a, b)
|
return os.path.samefile(a, b)
|
||||||
|
@ -53,10 +48,11 @@ class RepoMan:
|
||||||
_cmd_queue = queue.Queue()
|
_cmd_queue = queue.Queue()
|
||||||
_cmd_running = False
|
_cmd_running = False
|
||||||
|
|
||||||
def __init__(self, config, base, ioloop=None):
|
def __init__(self, config, base, siteman):
|
||||||
self.action = []
|
self.action = []
|
||||||
self._ioloop = ioloop or IOLoop.instance()
|
self._ioloop = IOLoop.current()
|
||||||
self._base = base
|
self._base = base
|
||||||
|
self._siteman = siteman
|
||||||
|
|
||||||
self._repo_dir = config.get('path')
|
self._repo_dir = config.get('path')
|
||||||
self.name = config.get('name')
|
self.name = config.get('name')
|
||||||
|
@ -69,34 +65,27 @@ class RepoMan:
|
||||||
self._auto_rename = config.getboolean('auto-rename', True)
|
self._auto_rename = config.getboolean('auto-rename', True)
|
||||||
self._symlink_any = config.getboolean('symlink-any', True)
|
self._symlink_any = config.getboolean('symlink-any', True)
|
||||||
|
|
||||||
notification_type = config.get('notification-type', 'null')
|
|
||||||
if notification_type != 'null':
|
|
||||||
self._notification_addrs = config.get('notification-addresses')
|
|
||||||
self._notification_secret = config.get('notification-secret')
|
|
||||||
self.send_notification = getattr(
|
|
||||||
self,
|
|
||||||
'send_notification_' + notification_type.replace('-', '_'),
|
|
||||||
)
|
|
||||||
|
|
||||||
def queue_command(self, cmd, callbacks=None):
|
def queue_command(self, cmd, callbacks=None):
|
||||||
self._cmd_queue.put((cmd, callbacks))
|
self._cmd_queue.put((cmd, callbacks))
|
||||||
if not self._cmd_running:
|
if not self._cmd_running:
|
||||||
self.run_command()
|
self.run_command()
|
||||||
|
|
||||||
def run_command(self):
|
def run_command(self):
|
||||||
|
if not self._cmd_running:
|
||||||
|
self._siteman.inc_running()
|
||||||
self.__class__._cmd_running = True
|
self.__class__._cmd_running = True
|
||||||
try:
|
try:
|
||||||
cmd, callbacks = self._cmd_queue.get_nowait()
|
cmd, callbacks = self._cmd_queue.get_nowait()
|
||||||
except queue.Empty:
|
except queue.Empty:
|
||||||
self.send_notification()
|
|
||||||
self.__class__._cmd_running = False
|
self.__class__._cmd_running = False
|
||||||
|
self._siteman.dec_running()
|
||||||
return
|
return
|
||||||
|
|
||||||
logger.info('Running cmd: %r', cmd)
|
logger.info('Running cmd: %r', cmd)
|
||||||
# have to specify io_loop or we'll get error tracebacks in some versions
|
# no longer have to specify io_loop in Tornado > 3.1. Let's drop them for
|
||||||
# of Tornado
|
# Tornado >= 5
|
||||||
try:
|
try:
|
||||||
p = tornado.process.Subprocess(cmd, io_loop=self._ioloop)
|
p = tornado.process.Subprocess(cmd)
|
||||||
except OSError:
|
except OSError:
|
||||||
logger.error('failed to run command.', exc_info=True)
|
logger.error('failed to run command.', exc_info=True)
|
||||||
self.run_command()
|
self.run_command()
|
||||||
|
@ -151,52 +140,6 @@ class RepoMan:
|
||||||
self.run,
|
self.run,
|
||||||
)
|
)
|
||||||
|
|
||||||
def send_notification_simple_udp(self):
|
|
||||||
msg = self._new_notification_msg()
|
|
||||||
|
|
||||||
socks = {}
|
|
||||||
for address, port in self._parse_notification_address_inet():
|
|
||||||
try:
|
|
||||||
af, socktype, proto, canonname, sockaddr = socket.getaddrinfo(
|
|
||||||
address, port, 0, socket.SOCK_DGRAM, 0, 0)[0]
|
|
||||||
except:
|
|
||||||
logger.exception('failed to create socket to %r for notification',
|
|
||||||
(address, port))
|
|
||||||
continue
|
|
||||||
|
|
||||||
info = af, socktype, proto
|
|
||||||
if info not in socks:
|
|
||||||
sock = socket.socket(*info)
|
|
||||||
socks[info] = sock
|
|
||||||
else:
|
|
||||||
sock = socks[info]
|
|
||||||
sock.sendto(msg, sockaddr)
|
|
||||||
logger.info('simple udp notification sent to %s.', (address, port))
|
|
||||||
|
|
||||||
def _new_notification_msg(self):
|
|
||||||
s = 'update'
|
|
||||||
t = str(int(time.time()))
|
|
||||||
data = s + '|' + t
|
|
||||||
hashing = data + self._notification_secret
|
|
||||||
sig = hashlib.sha1(hashing.encode('utf-8')).hexdigest()
|
|
||||||
msg = data + '|' + sig
|
|
||||||
logger.info('new notification msg: %s.', msg)
|
|
||||||
return msg.encode('utf-8')
|
|
||||||
|
|
||||||
def _parse_notification_address_inet(self):
|
|
||||||
cached = self._notification_addrs
|
|
||||||
if isinstance(cached, str):
|
|
||||||
addresses = []
|
|
||||||
for addr in cached.split():
|
|
||||||
host, port = addr.rsplit(':', 1)
|
|
||||||
port = int(port)
|
|
||||||
addresses.append((host, port))
|
|
||||||
cached = self._notification_addrs = tuple(addresses)
|
|
||||||
return cached
|
|
||||||
|
|
||||||
def send_notification_null(self):
|
|
||||||
logger.info('null notification sent.')
|
|
||||||
|
|
||||||
def run(self):
|
def run(self):
|
||||||
self._timeout = None
|
self._timeout = None
|
||||||
actions = self.action
|
actions = self.action
|
||||||
|
@ -221,14 +164,36 @@ class RepoMan:
|
||||||
self._do_remove(toremove)
|
self._do_remove(toremove)
|
||||||
|
|
||||||
class EventHandler(pyinotify.ProcessEvent):
|
class EventHandler(pyinotify.ProcessEvent):
|
||||||
def my_init(self, config, wm, ioloop=None):
|
_n_running = 0
|
||||||
|
|
||||||
|
def my_init(
|
||||||
|
self, filter_pkg, supported_archs, config, wm,
|
||||||
|
):
|
||||||
|
|
||||||
|
notification_type = config.get(
|
||||||
|
'notification-type', 'null')
|
||||||
|
if notification_type != 'null':
|
||||||
|
self._notification_addrs = config.get(
|
||||||
|
'notification-addresses')
|
||||||
|
self._notification_secret = config.get(
|
||||||
|
'notification-secret')
|
||||||
|
self.send_notification = getattr(
|
||||||
|
self,
|
||||||
|
'send_notification_' +
|
||||||
|
notification_type.replace('-', '_'),
|
||||||
|
)
|
||||||
|
|
||||||
|
self.filter_pkg = filter_pkg
|
||||||
self.moved_away = {}
|
self.moved_away = {}
|
||||||
|
self.created = {}
|
||||||
self.repomans = {}
|
self.repomans = {}
|
||||||
# TODO: use a expiring dict
|
# TODO: use an expiring dict
|
||||||
self.our_links = set()
|
self.our_links = set()
|
||||||
self._ioloop = ioloop or IOLoop.instance()
|
self._ioloop = IOLoop.current()
|
||||||
|
|
||||||
base = config.get('path')
|
base = config.get('path')
|
||||||
|
self._lastupdate_file = os.path.join(base, 'lastupdate')
|
||||||
|
|
||||||
dbname = config.get('info-db', os.path.join(base, 'pkginfo.db'))
|
dbname = config.get('info-db', os.path.join(base, 'pkginfo.db'))
|
||||||
new_db = not os.path.exists(dbname)
|
new_db = not os.path.exists(dbname)
|
||||||
self._db = sqlite3.connect(dbname, isolation_level=None) # isolation_level=None means autocommit
|
self._db = sqlite3.connect(dbname, isolation_level=None) # isolation_level=None means autocommit
|
||||||
|
@ -252,15 +217,19 @@ class EventHandler(pyinotify.ProcessEvent):
|
||||||
(filename text unique,
|
(filename text unique,
|
||||||
pkgrepo text)''')
|
pkgrepo text)''')
|
||||||
|
|
||||||
dirs = [os.path.join(base, x) for x in ('any', 'i686', 'x86_64')]
|
self._supported_archs = supported_archs
|
||||||
|
dirs = [os.path.join(base, x) for x in self._supported_archs]
|
||||||
self.files = files = set()
|
self.files = files = set()
|
||||||
for d in dirs:
|
for d in dirs:
|
||||||
|
os.makedirs(d, exist_ok=True)
|
||||||
for f in os.listdir(d):
|
for f in os.listdir(d):
|
||||||
p = os.path.join(d, f)
|
p = os.path.join(d, f)
|
||||||
if os.path.exists(p): # filter broken symlinks
|
if os.path.exists(p): # filter broken symlinks
|
||||||
files.add(p)
|
files.add(p)
|
||||||
wm.add_watch(d, pyinotify.ALL_EVENTS)
|
wm.add_watch(d, pyinotify.IN_CLOSE_WRITE | pyinotify.IN_DELETE |
|
||||||
self.repomans[d] = RepoMan(config, d, self._ioloop)
|
pyinotify.IN_CREATE | pyinotify.IN_MOVED_FROM |
|
||||||
|
pyinotify.IN_MOVED_TO | pyinotify.IN_OPEN)
|
||||||
|
self.repomans[d] = RepoMan(config, d, self)
|
||||||
self.name = self.repomans[d].name
|
self.name = self.repomans[d].name
|
||||||
self._auto_rename = self.repomans[d]._auto_rename
|
self._auto_rename = self.repomans[d]._auto_rename
|
||||||
self._symlink_any = self.repomans[d]._symlink_any
|
self._symlink_any = self.repomans[d]._symlink_any
|
||||||
|
@ -272,10 +241,12 @@ class EventHandler(pyinotify.ProcessEvent):
|
||||||
oldfiles.update(f[0] for f in self._db.execute('select filename from sigfiles where pkgrepo = ?', (self.name,)))
|
oldfiles.update(f[0] for f in self._db.execute('select filename from sigfiles where pkgrepo = ?', (self.name,)))
|
||||||
oldfiles = {os.path.join(self._db_dir, f) for f in oldfiles}
|
oldfiles = {os.path.join(self._db_dir, f) for f in oldfiles}
|
||||||
|
|
||||||
for f in sorted(filterfalse(filterPkg, files - oldfiles), key=pkgsortkey):
|
for f in sorted(filterfalse(self.filter_pkg, files - oldfiles),
|
||||||
|
key=pkgsortkey):
|
||||||
self.dispatch(f, 'add')
|
self.dispatch(f, 'add')
|
||||||
|
|
||||||
for f in sorted(filterfalse(filterPkg, oldfiles - files), key=pkgsortkey):
|
for f in sorted(filterfalse(self.filter_pkg, oldfiles - files),
|
||||||
|
key=pkgsortkey):
|
||||||
self.dispatch(f, 'remove')
|
self.dispatch(f, 'remove')
|
||||||
|
|
||||||
def process_IN_CLOSE_WRITE(self, event):
|
def process_IN_CLOSE_WRITE(self, event):
|
||||||
|
@ -301,9 +272,31 @@ class EventHandler(pyinotify.ProcessEvent):
|
||||||
logger.debug('Symlinked: %s', file)
|
logger.debug('Symlinked: %s', file)
|
||||||
self.dispatch(file, 'add')
|
self.dispatch(file, 'add')
|
||||||
self.files.add(file)
|
self.files.add(file)
|
||||||
|
else:
|
||||||
|
logger.debug('Created: %s', file)
|
||||||
|
self.created[file] = self._ioloop.add_timeout(
|
||||||
|
self._ioloop.time() + 0.1,
|
||||||
|
partial(self.linked, file),
|
||||||
|
)
|
||||||
|
|
||||||
|
def process_IN_OPEN(self, event):
|
||||||
|
file = event.pathname
|
||||||
|
try:
|
||||||
|
timeout = self.created.pop(file)
|
||||||
|
except KeyError:
|
||||||
|
return
|
||||||
|
|
||||||
|
self._ioloop.remove_timeout(timeout)
|
||||||
|
|
||||||
|
def linked(self, file):
|
||||||
|
logger.debug('Linked: %s', file)
|
||||||
|
del self.created[file]
|
||||||
|
self.dispatch(file, 'add')
|
||||||
|
self.files.add(file)
|
||||||
|
|
||||||
def movedOut(self, event):
|
def movedOut(self, event):
|
||||||
logger.debug('Moved away: %s', event.pathname)
|
logger.debug('Moved away: %s', event.pathname)
|
||||||
|
del self.moved_away[event.cookie]
|
||||||
self.dispatch(event.pathname, 'remove')
|
self.dispatch(event.pathname, 'remove')
|
||||||
|
|
||||||
def process_IN_MOVED_FROM(self, event):
|
def process_IN_MOVED_FROM(self, event):
|
||||||
|
@ -319,7 +312,7 @@ class EventHandler(pyinotify.ProcessEvent):
|
||||||
self.files.add(event.pathname)
|
self.files.add(event.pathname)
|
||||||
|
|
||||||
if event.cookie in self.moved_away:
|
if event.cookie in self.moved_away:
|
||||||
self._ioloop.remove_timeout(self.moved_away[event.cookie])
|
self._ioloop.remove_timeout(self.moved_away.pop(event.cookie))
|
||||||
else:
|
else:
|
||||||
logger.debug('Moved here: %s', event.pathname)
|
logger.debug('Moved here: %s', event.pathname)
|
||||||
self.dispatch(event.pathname, 'add')
|
self.dispatch(event.pathname, 'add')
|
||||||
|
@ -348,7 +341,7 @@ class EventHandler(pyinotify.ProcessEvent):
|
||||||
d = newd
|
d = newd
|
||||||
|
|
||||||
if self._symlink_any and act.arch == 'any':
|
if self._symlink_any and act.arch == 'any':
|
||||||
for newarch in ('i686', 'x86_64', 'any'):
|
for newarch in self._supported_archs:
|
||||||
if newarch == arch:
|
if newarch == arch:
|
||||||
# this file itself
|
# this file itself
|
||||||
continue
|
continue
|
||||||
|
@ -357,6 +350,7 @@ class EventHandler(pyinotify.ProcessEvent):
|
||||||
if action == 'add':
|
if action == 'add':
|
||||||
oldpath = os.path.join('..', arch, file)
|
oldpath = os.path.join('..', arch, file)
|
||||||
if not same_existent_file(oldpath, newpath):
|
if not same_existent_file(oldpath, newpath):
|
||||||
|
os.makedirs(newd, exist_ok=True)
|
||||||
try:
|
try:
|
||||||
self.our_links.add(newpath)
|
self.our_links.add(newpath)
|
||||||
os.symlink(oldpath, newpath)
|
os.symlink(oldpath, newpath)
|
||||||
|
@ -387,13 +381,7 @@ class EventHandler(pyinotify.ProcessEvent):
|
||||||
except KeyError:
|
except KeyError:
|
||||||
owner = 'uid_%d' % stat.st_uid
|
owner = 'uid_%d' % stat.st_uid
|
||||||
|
|
||||||
try:
|
info = None
|
||||||
info = pkgreader.readpkg(act.path)
|
|
||||||
except:
|
|
||||||
logger.error('failed to read info for package %s', act.path, exc_info=True)
|
|
||||||
info = None
|
|
||||||
info = pickle.dumps(info)
|
|
||||||
|
|
||||||
self._db.execute(
|
self._db.execute(
|
||||||
'''insert or replace into pkginfo
|
'''insert or replace into pkginfo
|
||||||
(filename, pkgrepo, pkgname, pkgarch, pkgver, forarch, state, owner, mtime, info) values
|
(filename, pkgrepo, pkgname, pkgarch, pkgver, forarch, state, owner, mtime, info) values
|
||||||
|
@ -448,10 +436,70 @@ class EventHandler(pyinotify.ProcessEvent):
|
||||||
self._db.execute('''delete from sigfiles where filename = ? and pkgrepo = ?''',
|
self._db.execute('''delete from sigfiles where filename = ? and pkgrepo = ?''',
|
||||||
(rpath, self.name))
|
(rpath, self.name))
|
||||||
|
|
||||||
def filterPkg(path):
|
def dec_running(self):
|
||||||
|
self._n_running -= 1
|
||||||
|
if self._n_running == 0:
|
||||||
|
self.send_notification()
|
||||||
|
self.update_lastupdate()
|
||||||
|
|
||||||
|
def inc_running(self):
|
||||||
|
self._n_running += 1
|
||||||
|
|
||||||
|
def send_notification_simple_udp(self):
|
||||||
|
msg = self._new_notification_msg()
|
||||||
|
|
||||||
|
socks = {}
|
||||||
|
for address, port in self._parse_notification_address_inet():
|
||||||
|
try:
|
||||||
|
af, socktype, proto, canonname, sockaddr = socket.getaddrinfo(
|
||||||
|
address, port, 0, socket.SOCK_DGRAM, 0, 0)[0]
|
||||||
|
except:
|
||||||
|
logger.exception('failed to create socket to %r for notification',
|
||||||
|
(address, port))
|
||||||
|
continue
|
||||||
|
|
||||||
|
info = af, socktype, proto
|
||||||
|
if info not in socks:
|
||||||
|
sock = socket.socket(*info)
|
||||||
|
socks[info] = sock
|
||||||
|
else:
|
||||||
|
sock = socks[info]
|
||||||
|
sock.sendto(msg, sockaddr)
|
||||||
|
logger.info('simple udp notification sent to %s.', (address, port))
|
||||||
|
|
||||||
|
def _new_notification_msg(self):
|
||||||
|
s = 'update'
|
||||||
|
t = str(int(time.time()))
|
||||||
|
data = s + '|' + t
|
||||||
|
hashing = data + self._notification_secret
|
||||||
|
sig = hashlib.sha1(hashing.encode('utf-8')).hexdigest()
|
||||||
|
msg = data + '|' + sig
|
||||||
|
logger.info('new notification msg: %s.', msg)
|
||||||
|
return msg.encode('utf-8')
|
||||||
|
|
||||||
|
def _parse_notification_address_inet(self):
|
||||||
|
cached = self._notification_addrs
|
||||||
|
if isinstance(cached, str):
|
||||||
|
addresses = []
|
||||||
|
for addr in cached.split():
|
||||||
|
host, port = addr.rsplit(':', 1)
|
||||||
|
port = int(port)
|
||||||
|
addresses.append((host, port))
|
||||||
|
cached = self._notification_addrs = tuple(addresses)
|
||||||
|
return cached
|
||||||
|
|
||||||
|
def send_notification_null(self):
|
||||||
|
logger.info('null notification sent.')
|
||||||
|
|
||||||
|
def update_lastupdate(self):
|
||||||
|
t = '%d\n' % time.time()
|
||||||
|
with open(self._lastupdate_file, 'w') as f:
|
||||||
|
f.write(t)
|
||||||
|
|
||||||
|
def filter_pkg(regex, path):
|
||||||
if isinstance(path, Event):
|
if isinstance(path, Event):
|
||||||
path = path.pathname
|
path = path.pathname
|
||||||
return not _pkgfile_pat.search(path)
|
return not regex.search(path)
|
||||||
|
|
||||||
def pkgsortkey(path):
|
def pkgsortkey(path):
|
||||||
pkg = archpkg.PkgNameInfo.parseFilename(os.path.split(path)[1])
|
pkg = archpkg.PkgNameInfo.parseFilename(os.path.split(path)[1])
|
||||||
|
@ -459,16 +507,100 @@ def pkgsortkey(path):
|
||||||
|
|
||||||
def repomon(config):
|
def repomon(config):
|
||||||
wm = pyinotify.WatchManager()
|
wm = pyinotify.WatchManager()
|
||||||
ioloop = IOLoop.instance()
|
|
||||||
|
|
||||||
|
supported_archs = config.get('supported-archs', 'i686 x86_64').split()
|
||||||
|
if 'any' not in supported_archs:
|
||||||
|
supported_archs.append('any')
|
||||||
|
# assume none of the archs has regex meta characters
|
||||||
|
regex = re.compile(r'(?:^|/)[^.].*-[^-]+-[\d.]+-(?:' + '|'.join(supported_archs) + r')\.pkg\.tar\.(?:xz|zst)(?:\.sig)?$')
|
||||||
|
|
||||||
|
filter_func = partial(filter_pkg, regex)
|
||||||
handler = EventHandler(
|
handler = EventHandler(
|
||||||
filterPkg,
|
filter_func,
|
||||||
config=config,
|
filter_pkg = filter_func,
|
||||||
wm=wm,
|
supported_archs = supported_archs,
|
||||||
ioloop=ioloop,
|
config = config,
|
||||||
|
wm = wm,
|
||||||
)
|
)
|
||||||
return pyinotify.TornadoAsyncNotifier(
|
ioloop = IOLoop.current()
|
||||||
|
ret = [pyinotify.TornadoAsyncNotifier(
|
||||||
wm,
|
wm,
|
||||||
ioloop,
|
|
||||||
default_proc_fun=handler,
|
default_proc_fun=handler,
|
||||||
)
|
ioloop = ioloop,
|
||||||
|
)]
|
||||||
|
|
||||||
|
if config.get('spool-directory'):
|
||||||
|
wm = pyinotify.WatchManager()
|
||||||
|
handler = SpoolHandler(
|
||||||
|
filter_func,
|
||||||
|
filter_pkg = filter_func,
|
||||||
|
path = config.get('spool-directory'),
|
||||||
|
dstpath = os.path.join(config.get('path'), 'any'),
|
||||||
|
wm = wm,
|
||||||
|
)
|
||||||
|
ret.append(pyinotify.TornadoAsyncNotifier(
|
||||||
|
wm, default_proc_fun=handler,
|
||||||
|
ioloop = ioloop,
|
||||||
|
))
|
||||||
|
|
||||||
|
return ret
|
||||||
|
|
||||||
|
class SpoolHandler(pyinotify.ProcessEvent):
|
||||||
|
def my_init(self, filter_pkg, path, dstpath, wm):
|
||||||
|
self.filter_pkg = filter_pkg
|
||||||
|
self.dstpath = dstpath
|
||||||
|
self._ioloop = IOLoop.current()
|
||||||
|
self.created = {}
|
||||||
|
|
||||||
|
files = set()
|
||||||
|
for f in os.listdir(path):
|
||||||
|
p = os.path.join(path, f)
|
||||||
|
if os.path.exists(p): # filter broken symlinks
|
||||||
|
files.add(p)
|
||||||
|
|
||||||
|
wm.add_watch(path, pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE |
|
||||||
|
pyinotify.IN_MOVED_TO | pyinotify.IN_OPEN)
|
||||||
|
self._initial_update(files)
|
||||||
|
|
||||||
|
def _initial_update(self, files):
|
||||||
|
for f in sorted(filterfalse(self.filter_pkg, files),
|
||||||
|
key=pkgsortkey):
|
||||||
|
self.dispatch(f)
|
||||||
|
|
||||||
|
def process_IN_CLOSE_WRITE(self, event):
|
||||||
|
logger.debug('Writing done: %s', event.pathname)
|
||||||
|
self.dispatch(event.pathname)
|
||||||
|
|
||||||
|
def process_IN_CREATE(self, event):
|
||||||
|
file = event.pathname
|
||||||
|
if os.path.islink(file):
|
||||||
|
logger.debug('Symlinked: %s', file)
|
||||||
|
self.dispatch(file)
|
||||||
|
else:
|
||||||
|
logger.debug('Created: %s', file)
|
||||||
|
self.created[file] = self._ioloop.add_timeout(
|
||||||
|
self._ioloop.time() + 0.1,
|
||||||
|
partial(self.linked, file),
|
||||||
|
)
|
||||||
|
|
||||||
|
def process_IN_OPEN(self, event):
|
||||||
|
file = event.pathname
|
||||||
|
try:
|
||||||
|
timeout = self.created.pop(file)
|
||||||
|
except KeyError:
|
||||||
|
return
|
||||||
|
|
||||||
|
self._ioloop.remove_timeout(timeout)
|
||||||
|
|
||||||
|
def linked(self, file):
|
||||||
|
logger.debug('Linked: %s', file)
|
||||||
|
del self.created[file]
|
||||||
|
self.dispatch(file)
|
||||||
|
|
||||||
|
def process_IN_MOVED_TO(self, event):
|
||||||
|
logger.debug('Moved here: %s', event.pathname)
|
||||||
|
self.dispatch(event.pathname)
|
||||||
|
|
||||||
|
def dispatch(self, path):
|
||||||
|
filename = os.path.basename(path)
|
||||||
|
os.rename(path, os.path.join(self.dstpath, filename))
|
||||||
|
|
|
@ -19,6 +19,11 @@ name: archlinuxcn
|
||||||
# files in this directory, remember to update the configuration of inotify.
|
# files in this directory, remember to update the configuration of inotify.
|
||||||
path: /home/lilydjwg/tmpfs/test
|
path: /home/lilydjwg/tmpfs/test
|
||||||
|
|
||||||
|
# If enabled, packages put into this directory will be moved into the repo.
|
||||||
|
# This path should be on the same filesystem as the repo path
|
||||||
|
# Should be used with auto-rename on
|
||||||
|
spool-directory: /home/lilydjwg/tmpfs/spool
|
||||||
|
|
||||||
# A database to store package info. Default to ${path}/pkginfo.db
|
# A database to store package info. Default to ${path}/pkginfo.db
|
||||||
#info-db: /home/lilydjwg/tmpfs/test/pkginfo.db
|
#info-db: /home/lilydjwg/tmpfs/test/pkginfo.db
|
||||||
|
|
||||||
|
@ -31,8 +36,12 @@ path: /home/lilydjwg/tmpfs/test
|
||||||
# directory. Default is on.
|
# directory. Default is on.
|
||||||
#auto-rename: on
|
#auto-rename: on
|
||||||
|
|
||||||
|
# What archs we support? The default is i686 and x86_64. And you can add more
|
||||||
|
# like arm, armv6h, aarch64. Archs are separated by spaces.
|
||||||
|
#supported-archs: i686 x86_64 arm
|
||||||
|
|
||||||
# By enabling symlink-any, the server will automatically symlink the package
|
# By enabling symlink-any, the server will automatically symlink the package
|
||||||
# files of 'any' architecture to 'i686' and 'x86_64'
|
# files of 'any' architecture to supported archs.
|
||||||
# Default is on.
|
# Default is on.
|
||||||
#symlink-any: on
|
#symlink-any: on
|
||||||
|
|
10
misc/archrepo2.service
Normal file
10
misc/archrepo2.service
Normal file
|
@ -0,0 +1,10 @@
|
||||||
|
[Unit]
|
||||||
|
Description=archrepo2 service for archlinuxcn repo
|
||||||
|
|
||||||
|
[Service]
|
||||||
|
Type=simple
|
||||||
|
ExecStart=/usr/bin/archreposrv /etc/archrepo2.ini
|
||||||
|
Restart=on-failure
|
||||||
|
|
||||||
|
[Install]
|
||||||
|
WantedBy=multi-user.target
|
|
@ -1,11 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# vim:fileencoding=utf-8
|
|
||||||
|
|
||||||
from subprocess import getoutput
|
|
||||||
allpkgs = getoutput(r"locate -be --regex '\.pkg\.tar\.xz$'").split('\n')
|
|
||||||
|
|
||||||
from archrepo2.pkgreader import readpkg
|
|
||||||
for p in allpkgs:
|
|
||||||
print('reading package:', p)
|
|
||||||
d = readpkg(p)
|
|
||||||
print('desc:', d.get('pkgdesc', '(nothing)'))
|
|
|
@ -1,46 +0,0 @@
|
||||||
#!/usr/bin/env python3
|
|
||||||
# vim:fileencoding=utf-8
|
|
||||||
|
|
||||||
import os, sys
|
|
||||||
import sqlite3
|
|
||||||
import configparser
|
|
||||||
import pickle
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from archrepo2.lib.nicelogger import enable_pretty_logging
|
|
||||||
enable_pretty_logging(logging.DEBUG)
|
|
||||||
|
|
||||||
import archrepo2.pkgreader
|
|
||||||
from archrepo2.dbutil import *
|
|
||||||
|
|
||||||
def main(conffile):
|
|
||||||
config = configparser.ConfigParser()
|
|
||||||
config.read(conffile)
|
|
||||||
config = config['repository']
|
|
||||||
|
|
||||||
base = config.get('path')
|
|
||||||
dbname = config.get('info-db', os.path.join(base, 'pkginfo.db'))
|
|
||||||
db = sqlite3.connect(dbname, isolation_level=None)
|
|
||||||
assert getver(db) == '0.1', 'wrong database version'
|
|
||||||
input('Please stop the service and then press Enter.')
|
|
||||||
try:
|
|
||||||
db.execute('alter table pkginfo add info blob')
|
|
||||||
except sqlite3.OperationalError:
|
|
||||||
# the column is already there
|
|
||||||
pass
|
|
||||||
pkgs = [x[0] for x in db.execute('select filename from pkginfo')]
|
|
||||||
for p in pkgs:
|
|
||||||
try:
|
|
||||||
info = pkgreader.readpkg(p)
|
|
||||||
except:
|
|
||||||
logging.error('failed to read info for package %s', act.path)
|
|
||||||
info = None
|
|
||||||
info = pickle.dumps(info)
|
|
||||||
db.execute('update pkginfo set info=?', (info,))
|
|
||||||
setver(db, '0.2')
|
|
||||||
db.close()
|
|
||||||
|
|
||||||
input('Please re-start the service with new code and then press Enter.')
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main(sys.argv[1])
|
|
2
setup.py
2
setup.py
|
@ -7,7 +7,7 @@ setup(
|
||||||
name = 'archrepo2',
|
name = 'archrepo2',
|
||||||
version = archrepo2.__version__,
|
version = archrepo2.__version__,
|
||||||
packages = find_packages(),
|
packages = find_packages(),
|
||||||
install_requires = ['tornado>2.4.1', 'pyinotify', 'setuptools'],
|
install_requires = ['tornado>2.4.1', 'pyinotify', 'pyalpm'],
|
||||||
entry_points = {
|
entry_points = {
|
||||||
'console_scripts': [
|
'console_scripts': [
|
||||||
'archreposrv = archrepo2.archreposrv:main',
|
'archreposrv = archrepo2.archreposrv:main',
|
||||||
|
|
Loading…
Add table
Reference in a new issue