Compare commits

..

No commits in common. "master" and "v0.3.1" have entirely different histories.

17 changed files with 303 additions and 635 deletions

View file

@ -12,10 +12,9 @@ DEPENDENCIES
============
- Python, >= 3.3, with sqlite support
- setuptools
- tornado, > 3.1
- distribute
- tornado, > 2.4.1
- pyinotify, tested with 0.9.4
- pyalpm, tested with 0.10.6
NOTE
====
@ -29,6 +28,7 @@ TODO
- [high] adding and then removing it before adding complete will result
in not-in-database removing
- [middle] specify what architectures we have and don't require others
- [low] fork to background
- [low] use one common command queue (now one each repo)
- [low] verify packages

View file

@ -19,11 +19,6 @@ name: archlinuxcn
# files in this directory, remember to update the configuration of inotify.
path: /home/lilydjwg/tmpfs/test
# If enabled, packages put into this directory will be moved into the repo.
# This path should be on the same filesystem as the repo path
# Should be used with auto-rename on
spool-directory: /home/lilydjwg/tmpfs/spool
# A database to store package info. Default to ${path}/pkginfo.db
#info-db: /home/lilydjwg/tmpfs/test/pkginfo.db
@ -36,12 +31,8 @@ spool-directory: /home/lilydjwg/tmpfs/spool
# directory. Default is on.
#auto-rename: on
# What archs we support? The default is i686 and x86_64. And you can add more
# like arm, armv6h, aarch64. Archs are separated by spaces.
#supported-archs: i686 x86_64 arm
# By enabling symlink-any, the server will automatically symlink the package
# files of 'any' architecture to supported archs.
# files of 'any' architecture to 'i686' and 'x86_64'
# Default is on.
#symlink-any: on
@ -51,7 +42,7 @@ wait-time: 3
# Notification type to use when done. Currently available: simple-udp, null
notification-type: simple-udp
notification-addresses: 127.0.0.1:9900 ::1:9900
notification-address: 127.0.0.1:9900
notification-secret: JiUHuGY987G76djItfOskOj
# If for any reason, you don't want actual database creation or update:

View file

@ -1 +1,3 @@
__version__ = '0.6dev'
#!/usr/bin/env python3
__version__ = '0.3.1'

View file

@ -33,21 +33,17 @@ def main():
config.read(conffile)
repos = check_and_get_repos(config)
notifiers = []
for repo in repos:
notifiers.extend(repomon(config[repo]))
notifiers = [repomon(config[repo]) for repo in repos]
ioloop = IOLoop.current()
ioloop = IOLoop.instance()
logger.info('starting archreposrv.')
try:
ioloop.start()
except KeyboardInterrupt:
ioloop.close()
for notifier in notifiers:
notifier.stop()
ioloop.close()
print()
if __name__ == '__main__':
if sys.version_info[:2] < (3, 3):
raise OSError('Python 3.3+ required.')
main()

View file

@ -1,81 +1,36 @@
from __future__ import annotations
import os
from collections import namedtuple
import subprocess
import re
from typing import List, Dict
from collections import defaultdict, namedtuple
import pyalpm
from pkg_resources import parse_version
class PkgNameInfo(namedtuple('PkgNameInfo', 'name, version, release, arch')):
def __lt__(self, other) -> bool:
def __lt__(self, other):
if self.name != other.name or self.arch != other.arch:
return NotImplemented
if self.version != other.version:
return pyalpm.vercmp(self.version, other.version) < 0
return float(self.release) < float(other.release)
return parse_version(self.version) < parse_version(other.version)
return int(self.release) < int(other.release)
def __gt__(self, other) -> bool:
def __gt__(self, other):
# No, try the other side please.
return NotImplemented
@property
def fullversion(self) -> str:
def fullversion(self):
return '%s-%s' % (self.version, self.release)
@classmethod
def parseFilename(cls, filename: str) -> 'PkgNameInfo':
def parseFilename(cls, filename):
return cls(*trimext(filename, 3).rsplit('-', 3))
def trimext(name: str, num: int = 1) -> str:
def trimext(name, num=1):
for i in range(num):
name = os.path.splitext(name)[0]
return name
def get_pkgname_with_bash(PKGBUILD: str) -> List[str]:
script = '''\
. '%s'
echo ${pkgname[*]}''' % PKGBUILD
# Python 3.4 has 'input' arg for check_output
p = subprocess.Popen(
['bwrap', '--unshare-all', '--ro-bind', '/', '/', '--tmpfs', '/home',
'--tmpfs', '/run', '--die-with-parent',
'--tmpfs', '/tmp', '--proc', '/proc', '--dev', '/dev', '/bin/bash'],
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
)
output = p.communicate(script.encode())[0].decode()
ret = p.wait()
if ret != 0:
raise subprocess.CalledProcessError(
ret, ['bash'], output)
return output.split()
pkgfile_pat = re.compile(r'(?:^|/).+-[^-]+-[\d.]+-(?:\w+)\.pkg\.tar\.(?:xz|zst)$')
def _strip_ver(s: str) -> str:
return re.sub(r'[<>=].*', '', s)
def get_package_info(name: str, local: bool = False) -> Dict[str, str]:
old_lang = os.environ['LANG']
os.environ['LANG'] = 'C'
args = '-Qi' if local else '-Si'
try:
outb = subprocess.check_output(["pacman", args, name])
out = outb.decode('latin1')
finally:
os.environ['LANG'] = old_lang
ret = {}
for l in out.splitlines():
if not l:
continue
if l[0] not in ' \t':
key, value = l.split(':', 1)
key = key.strip()
value = value.strip()
ret[key] = value
else:
ret[key] += ' ' + l.strip()
return ret
def finddups(pkgs, n=1):
ret = defaultdict(list)
for f in pkgs:
name, ver, build, arch = PkgNameInfo.parseFilename(os.path.split(f)[1])
ret[name].append('%s-%s' % (ver, build))
return {k: sorted(v) for k, v in ret.items() if len(v) > n}

View file

@ -1,23 +1,15 @@
'''
A Tornado-inspired logging formatter, with displayed time with millisecond accuracy
FYI: pyftpdlib also has a Tornado-style logger.
'''
from __future__ import annotations
import sys
import time
import logging
class TornadoLogFormatter(logging.Formatter):
def __init__(self, color, *args, **kwargs):
super().__init__(*args, **kwargs)
super().__init__(self, *args, **kwargs)
self._color = color
if color:
import curses
curses.setupterm()
if sys.hexversion < 0x30203f0:
if sys.hexversion < 50463728:
fg_color = str(curses.tigetstr("setaf") or
curses.tigetstr("setf") or "", "ascii")
else:
@ -31,8 +23,6 @@ class TornadoLogFormatter(logging.Formatter):
"ascii"),
logging.ERROR: str(curses.tparm(fg_color, 1), # Red
"ascii"),
logging.CRITICAL: str(curses.tparm(fg_color, 9), # Bright Red
"ascii"),
}
self._normal = str(curses.tigetstr("sgr0"), "ascii")
@ -43,23 +33,13 @@ class TornadoLogFormatter(logging.Formatter):
record.message = "Bad message (%r): %r" % (e, record.__dict__)
record.asctime = time.strftime(
"%m-%d %H:%M:%S", self.converter(record.created))
prefix = '[%(levelname)1.1s %(asctime)s.%(msecs)03d %(module)s:%(lineno)d]' % \
record.asctime += '.%03d' % ((record.created % 1) * 1000)
prefix = '[%(levelname)1.1s %(asctime)s %(module)s:%(lineno)d]' % \
record.__dict__
if self._color:
prefix = (self._colors.get(record.levelno, self._normal) +
prefix + self._normal)
formatted = prefix + " " + record.message
formatted += ''.join(
' %s=%s' % (k, v) for k, v in record.__dict__.items()
if k not in {
'levelname', 'asctime', 'module', 'lineno', 'args', 'message',
'filename', 'exc_info', 'exc_text', 'created', 'funcName',
'processName', 'process', 'msecs', 'relativeCreated', 'thread',
'threadName', 'name', 'levelno', 'msg', 'pathname', 'stack_info',
'taskName',
})
if record.exc_info:
if not record.exc_text:
record.exc_text = self.formatException(record.exc_info)
@ -67,30 +47,23 @@ class TornadoLogFormatter(logging.Formatter):
formatted = formatted.rstrip() + "\n" + record.exc_text
return formatted.replace("\n", "\n ")
def enable_pretty_logging(level=logging.DEBUG, handler=None, color=None):
'''
handler: specify a handler instead of default StreamHandler
color: boolean, force color to be on / off. Default to be on only when
``handler`` isn't specified and the term supports color
'''
def enable_pretty_logging(level=logging.DEBUG):
logger = logging.getLogger()
if handler is None:
h = logging.StreamHandler()
else:
h = handler
if color is None:
h = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s:%(levelname)-7s:%(name)-12s:%(message)s')
try:
import curses
color = False
if handler is None and sys.stderr.isatty():
try:
import curses
curses.setupterm()
if curses.tigetnum("colors") > 0:
color = True
except:
import traceback
traceback.print_exc()
formatter = TornadoLogFormatter(color=color)
h.setLevel(level)
h.setFormatter(formatter)
logger.setLevel(level)
logger.addHandler(h)
curses.setupterm()
if curses.tigetnum("colors") > 0:
color = True
formatter = TornadoLogFormatter(color=color)
except:
import traceback
traceback.print_exc()
finally:
h.setLevel(level)
h.setFormatter(formatter)
logger.setLevel(level)
logger.addHandler(h)

44
archrepo2/pkgreader.py Normal file
View file

@ -0,0 +1,44 @@
import tarfile
import logging
logger = logging.getLogger(__name__)
multikeys = {'depend', 'makepkgopt', 'optdepend', 'replaces', 'conflict',
'provides', 'license', 'backup', 'group', 'makedepend', 'checkdepend'}
def _add_to_dict(d, key, value):
if key in multikeys:
if key in d:
d[key].append(value)
else:
d[key] = [value]
else:
assert key not in d, 'unexpected multi-value key "%s"' % key
d[key] = value
def readpkg(file):
tar = tarfile.open(file)
info = tar.next()
if not info or info.name != '.PKGINFO':
logger.warn('%s is not a nice package!', file)
info = '.PKGINFO' # have to look further
f = tar.extractfile(info)
data = f.read().decode()
tar.close()
d = {}
key = None
for l in data.split('\n'):
if l.startswith('#'):
continue
if not l:
continue
if '=' not in l:
value += l
else:
if key is not None:
_add_to_dict(d, key, value)
key, value = l.split(' = ', 1)
_add_to_dict(d, key, value)
return d

View file

@ -1,9 +1,9 @@
#!/usr/bin/env python3
# vim:fileencoding=utf-8
import os
import re
import pwd
import stat
from functools import partial
from itertools import filterfalse
import queue
@ -12,7 +12,7 @@ import sqlite3
import socket
import time
import hashlib
from os.path import relpath
import pickle
import pyinotify
Event = pyinotify.Event
@ -20,15 +20,13 @@ from tornado.ioloop import IOLoop
import tornado.process
from .lib import archpkg
from . import pkgreader
from . import dbutil
logger = logging.getLogger(__name__)
def same_existent_file(a, b):
try:
return os.path.samefile(a, b)
except OSError:
return False
# handles only x86_64, i686 and any arch packages
_pkgfile_pat = re.compile(r'(?:^|/)[a-z0-9_-]+-[^-]+-\d+-(?:x86_64|i686|any)\.pkg\.tar\.xz(?:\.sig)?$')
class ActionInfo(archpkg.PkgNameInfo):
def __new__(cls, path, action, four=None, five=None, pkgpath=None):
@ -48,11 +46,10 @@ class RepoMan:
_cmd_queue = queue.Queue()
_cmd_running = False
def __init__(self, config, base, siteman):
def __init__(self, config, base, ioloop=None):
self.action = []
self._ioloop = IOLoop.current()
self._ioloop = ioloop or IOLoop.instance()
self._base = base
self._siteman = siteman
self._repo_dir = config.get('path')
self.name = config.get('name')
@ -65,32 +62,36 @@ class RepoMan:
self._auto_rename = config.getboolean('auto-rename', True)
self._symlink_any = config.getboolean('symlink-any', True)
notification_type = config.get('notification-type', 'null')
if notification_type != 'null':
self._notification_addr = config.get('notification-address')
self._notification_secret = config.get('notification-secret')
self.send_notification = getattr(
self,
'send_notification_' + notification_type.replace('-', '_'),
)
def queue_command(self, cmd, callbacks=None):
self._cmd_queue.put((cmd, callbacks))
if not self._cmd_running:
self.run_command()
def run_command(self):
if not self._cmd_running:
self._siteman.inc_running()
self.__class__._cmd_running = True
try:
cmd, callbacks = self._cmd_queue.get_nowait()
except queue.Empty:
self.send_notification()
self.__class__._cmd_running = False
self._siteman.dec_running()
return
logger.info('Running cmd: %r', cmd)
# no longer have to specify io_loop in Tornado > 3.1. Let's drop them for
# Tornado >= 5
# have to specify io_loop or we'll get error tracebacks
try:
p = tornado.process.Subprocess(cmd)
p = tornado.process.Subprocess(cmd, io_loop=self._ioloop)
p.set_exit_callback(partial(self.command_done, callbacks))
except OSError:
logger.error('failed to run command.', exc_info=True)
self.run_command()
else:
p.set_exit_callback(partial(self.command_done, callbacks))
def command_done(self, callbacks, status):
if status == 0:
@ -105,7 +106,10 @@ class RepoMan:
def _do_cmd(self, cmd, items, callbacks):
cmd1 = [cmd, self._db_file]
cmd1.extend(items)
self.queue_command(cmd1, callbacks)
cmd2 = [cmd, '-f', self._files_name]
cmd2.extend(items)
self.queue_command(cmd1)
self.queue_command(cmd2, callbacks)
def _do_add(self, toadd):
if toadd:
@ -140,6 +144,41 @@ class RepoMan:
self.run,
)
def send_notification_simple_udp(self):
address, port = self._parse_notification_address_inet()
try:
af, socktype, proto, canonname, sockaddr = socket.getaddrinfo(
address, port, 0, socket.SOCK_DGRAM, 0, 0)[0]
except:
logger.error('failed to create socket for notification', exc_info=True)
return
sock = socket.socket(af, socktype, proto)
msg = self._new_notification_msg()
sock.sendto(msg, sockaddr)
sock.close()
logger.info('simple udp notification sent.')
def _new_notification_msg(self):
s = 'update'
t = str(int(time.time()))
part1 = s + '|' + t
part2 = hashlib.sha1(part1.encode('utf-8')).hexdigest()
msg = part1 + '|' + part2
logger.info('new notification msg: %s.', msg)
return msg.encode('utf-8')
def _parse_notification_address_inet(self):
cached = self._notification_addr
if isinstance(cached, str):
host, port = cached.rsplit(':', 1)
port = int(port)
cached = self._notification_addr = (host, port)
return cached
def send_notification_null(self):
logger.info('null notification sent.')
def run(self):
self._timeout = None
actions = self.action
@ -150,58 +189,32 @@ class RepoMan:
actiondict[act.name] = act
else:
oldact = actiondict[act.name]
if oldact != act:
# different packages, do the latter, but record the former
try:
actiondict[act.name].callback(state=0)
except:
logger.exception('failed to run action %r.', actiondict[act.name])
# same package, do the latter, and discard the forter
actiondict[act.name] = act
if oldact == act and oldact.action != act.action:
# same package, opposite actions, do nothing
del actiondict[act.name]
else:
# take the later action
actiondict[act.name] = act
toadd = [(x.path, x.callback) for x in actiondict.values() if x.action == 'add']
toremove = [(x.name, x.callback) for x in actiondict.values() if x.action == 'remove']
self._do_add(toadd)
self._do_remove(toremove)
class EventHandler(pyinotify.ProcessEvent):
_n_running = 0
def my_init(
self, filter_pkg, supported_archs, config, wm,
):
notification_type = config.get(
'notification-type', 'null')
if notification_type != 'null':
self._notification_addrs = config.get(
'notification-addresses')
self._notification_secret = config.get(
'notification-secret')
self.send_notification = getattr(
self,
'send_notification_' +
notification_type.replace('-', '_'),
)
self.filter_pkg = filter_pkg
def my_init(self, config, wm, ioloop=None):
self.moved_away = {}
self.created = {}
self.repomans = {}
# TODO: use an expiring dict
self.our_links = set()
self._ioloop = IOLoop.current()
self._ioloop = ioloop or IOLoop.instance()
base = config.get('path')
self._lastupdate_file = os.path.join(base, 'lastupdate')
dbname = config.get('info-db', os.path.join(base, 'pkginfo.db'))
new_db = not os.path.exists(dbname)
self._db = sqlite3.connect(dbname, isolation_level=None) # isolation_level=None means autocommit
self._db_dir = os.path.dirname(dbname)
if new_db:
dbutil.setver(self._db, '0.4')
dbutil.setver(self._db, '0.3')
else:
assert dbutil.getver(self._db) == '0.4', 'wrong database version, please upgrade (see scripts directory)'
assert dbutil.getver(self._db) == '0.3', 'wrong database version, please upgrade (see scripts directory)'
self._db.execute('''create table if not exists pkginfo
(filename text unique,
pkgrepo text,
@ -217,19 +230,12 @@ class EventHandler(pyinotify.ProcessEvent):
(filename text unique,
pkgrepo text)''')
self._supported_archs = supported_archs
dirs = [os.path.join(base, x) for x in self._supported_archs]
dirs = [os.path.join(base, x) for x in ('any', 'i686', 'x86_64')]
self.files = files = set()
for d in dirs:
os.makedirs(d, exist_ok=True)
for f in os.listdir(d):
p = os.path.join(d, f)
if os.path.exists(p): # filter broken symlinks
files.add(p)
wm.add_watch(d, pyinotify.IN_CLOSE_WRITE | pyinotify.IN_DELETE |
pyinotify.IN_CREATE | pyinotify.IN_MOVED_FROM |
pyinotify.IN_MOVED_TO | pyinotify.IN_OPEN)
self.repomans[d] = RepoMan(config, d, self)
files.update(os.path.join(d, f) for f in os.listdir(d))
wm.add_watch(d, pyinotify.ALL_EVENTS)
self.repomans[d] = RepoMan(config, d, self._ioloop)
self.name = self.repomans[d].name
self._auto_rename = self.repomans[d]._auto_rename
self._symlink_any = self.repomans[d]._symlink_any
@ -239,14 +245,11 @@ class EventHandler(pyinotify.ProcessEvent):
def _initial_update(self, files):
oldfiles = {f[0] for f in self._db.execute('select filename from pkginfo where pkgrepo = ?', (self.name,))}
oldfiles.update(f[0] for f in self._db.execute('select filename from sigfiles where pkgrepo = ?', (self.name,)))
oldfiles = {os.path.join(self._db_dir, f) for f in oldfiles}
for f in sorted(filterfalse(self.filter_pkg, files - oldfiles),
key=pkgsortkey):
for f in sorted(filterfalse(filterPkg, files - oldfiles), key=pkgsortkey):
self.dispatch(f, 'add')
for f in sorted(filterfalse(self.filter_pkg, oldfiles - files),
key=pkgsortkey):
for f in sorted(filterfalse(filterPkg, oldfiles - files), key=pkgsortkey):
self.dispatch(f, 'remove')
def process_IN_CLOSE_WRITE(self, event):
@ -272,31 +275,9 @@ class EventHandler(pyinotify.ProcessEvent):
logger.debug('Symlinked: %s', file)
self.dispatch(file, 'add')
self.files.add(file)
else:
logger.debug('Created: %s', file)
self.created[file] = self._ioloop.add_timeout(
self._ioloop.time() + 0.1,
partial(self.linked, file),
)
def process_IN_OPEN(self, event):
file = event.pathname
try:
timeout = self.created.pop(file)
except KeyError:
return
self._ioloop.remove_timeout(timeout)
def linked(self, file):
logger.debug('Linked: %s', file)
del self.created[file]
self.dispatch(file, 'add')
self.files.add(file)
def movedOut(self, event):
logger.debug('Moved away: %s', event.pathname)
del self.moved_away[event.cookie]
self.dispatch(event.pathname, 'remove')
def process_IN_MOVED_FROM(self, event):
@ -312,7 +293,7 @@ class EventHandler(pyinotify.ProcessEvent):
self.files.add(event.pathname)
if event.cookie in self.moved_away:
self._ioloop.remove_timeout(self.moved_away.pop(event.cookie))
self._ioloop.remove_timeout(self.moved_away[event.cookie])
else:
logger.debug('Moved here: %s', event.pathname)
self.dispatch(event.pathname, 'add')
@ -320,7 +301,7 @@ class EventHandler(pyinotify.ProcessEvent):
def dispatch(self, path, action):
if path.endswith('.sig'):
act = ActionInfo(path, action, pkgpath=path[:-4])
callback = self._signature_changed
callback = self._record_signatures
else:
act = ActionInfo(path, action)
callback = self._real_dispatch
@ -332,31 +313,27 @@ class EventHandler(pyinotify.ProcessEvent):
if self._auto_rename and action == 'add' and act.arch != arch:
newd = os.path.join(base, act.arch)
newpath = os.path.join(newd, file)
if not same_existent_file(path, newpath):
os.rename(path, newpath)
os.rename(path, newpath)
act.path = newpath
path = newpath
arch = act.arch
d = newd
act.path = newpath
path = newpath
arch = act.arch
d = newd
if self._symlink_any and act.arch == 'any':
for newarch in self._supported_archs:
for newarch in ('i686', 'x86_64', 'any'):
if newarch == arch:
# this file itself
continue
newd = os.path.join(base, newarch)
newpath = os.path.join(newd, file)
if action == 'add':
oldpath = os.path.join('..', arch, file)
if not same_existent_file(oldpath, newpath):
os.makedirs(newd, exist_ok=True)
try:
self.our_links.add(newpath)
os.symlink(oldpath, newpath)
except FileExistsError:
pass
callback(newd, ActionInfo(newpath, action))
try:
self.our_links.add(newpath)
os.symlink(os.path.join('..', arch, file), newpath)
except FileExistsError:
pass
callback(newd, ActionInfo(newpath, action))
else:
try:
os.unlink(newpath)
@ -370,136 +347,63 @@ class EventHandler(pyinotify.ProcessEvent):
def _real_dispatch(self, d, act):
if act.action == 'add':
arch = os.path.split(d)[1]
def callback(stat, state=1):
def callback():
self._db.execute(
'update pkginfo set state = 0 where pkgname = ? and forarch = ? and pkgrepo = ?',
(act.name, arch, self.name)
)
stat = os.stat(act.path)
mtime = int(stat.st_mtime)
try:
owner = pwd.getpwuid(stat.st_uid).pw_name
except KeyError:
owner = 'uid_%d' % stat.st_uid
info = None
try:
info = pkgreader.readpkg(act.path)
except:
logger.error('failed to read info for package %s', act.path, exc_info=True)
info = None
info = pickle.dumps(info)
self._db.execute(
'''insert or replace into pkginfo
(filename, pkgrepo, pkgname, pkgarch, pkgver, forarch, state, owner, mtime, info) values
(?, ?, ?, ?, ?, ?, ?, ?, ?, ?)''',
(relpath(act.path, start=self._db_dir),
self.name, act.name, act.arch, act.fullversion, arch, state, owner, mtime, info))
logger.info('Action %r done.', act)
(act.path, self.name, act.name, act.arch, act.fullversion, arch, 1, owner, mtime, info))
# stat path here, so that it is more unlikely to have disappeared since
callback = partial(callback, os.stat(act.path))
else:
rpath = relpath(act.path, start=self._db_dir)
res = self._db.execute(
'select state from pkginfo where filename = ? and state = 1 and pkgrepo = ? limit 1',
(rpath, self.name)
(act.path, self.name)
)
if tuple(res) == ():
# the file isn't in repo database, just delete from our info database
logger.debug('deleting entry for not-in-database package: %s', rpath)
self._db.execute('delete from pkginfo where filename = ? and pkgrepo = ?', (rpath, self.name))
logger.debug('deleting entry for not-in-database package: %s', act.path)
self._db.execute('delete from pkginfo where filename = ? and pkgrepo = ?', (act.path, self.name))
return
def callback(state=any):
'''``state`` is not used'''
self._db.execute('delete from pkginfo where filename = ? and pkgrepo = ?', (rpath, self.name))
def callback():
self._db.execute('delete from pkginfo where filename = ? and pkgrepo = ?', (act.path, self.name))
act.callback = callback
self.repomans[d].add_action(act)
def _signature_changed(self, d, action):
def _record_signatures(self, d, action):
path = action.path
action = action.action
logger.info('%s signature %s.', action, path)
# Touch the pacakge file so that we'll repo-add it again to include the
# sig change later.
pkg = path[:-4]
try:
st = os.lstat(pkg)
if stat.S_ISREG(st.st_mode):
logger.info('touching %s.', pkg)
os.close(os.open(pkg, os.O_WRONLY))
os.utime(pkg)
except FileNotFoundError:
pass
rpath = relpath(path, start=self._db_dir)
if action == 'add':
self._db.execute('''insert or replace into sigfiles
(filename, pkgrepo) values (?, ?)''',
(rpath, self.name))
(path, self.name))
else:
self._db.execute('''delete from sigfiles where filename = ? and pkgrepo = ?''',
(rpath, self.name))
(path, self.name))
def dec_running(self):
self._n_running -= 1
if self._n_running == 0:
self.send_notification()
self.update_lastupdate()
def inc_running(self):
self._n_running += 1
def send_notification_simple_udp(self):
msg = self._new_notification_msg()
socks = {}
for address, port in self._parse_notification_address_inet():
try:
af, socktype, proto, canonname, sockaddr = socket.getaddrinfo(
address, port, 0, socket.SOCK_DGRAM, 0, 0)[0]
except:
logger.exception('failed to create socket to %r for notification',
(address, port))
continue
info = af, socktype, proto
if info not in socks:
sock = socket.socket(*info)
socks[info] = sock
else:
sock = socks[info]
sock.sendto(msg, sockaddr)
logger.info('simple udp notification sent to %s.', (address, port))
def _new_notification_msg(self):
s = 'update'
t = str(int(time.time()))
data = s + '|' + t
hashing = data + self._notification_secret
sig = hashlib.sha1(hashing.encode('utf-8')).hexdigest()
msg = data + '|' + sig
logger.info('new notification msg: %s.', msg)
return msg.encode('utf-8')
def _parse_notification_address_inet(self):
cached = self._notification_addrs
if isinstance(cached, str):
addresses = []
for addr in cached.split():
host, port = addr.rsplit(':', 1)
port = int(port)
addresses.append((host, port))
cached = self._notification_addrs = tuple(addresses)
return cached
def send_notification_null(self):
logger.info('null notification sent.')
def update_lastupdate(self):
t = '%d\n' % time.time()
with open(self._lastupdate_file, 'w') as f:
f.write(t)
def filter_pkg(regex, path):
def filterPkg(path):
if isinstance(path, Event):
path = path.pathname
return not regex.search(path)
return not _pkgfile_pat.search(path)
def pkgsortkey(path):
pkg = archpkg.PkgNameInfo.parseFilename(os.path.split(path)[1])
@ -507,100 +411,16 @@ def pkgsortkey(path):
def repomon(config):
wm = pyinotify.WatchManager()
ioloop = IOLoop.instance()
supported_archs = config.get('supported-archs', 'i686 x86_64').split()
if 'any' not in supported_archs:
supported_archs.append('any')
# assume none of the archs has regex meta characters
regex = re.compile(r'(?:^|/)[^.].*-[^-]+-[\d.]+-(?:' + '|'.join(supported_archs) + r')\.pkg\.tar\.(?:xz|zst)(?:\.sig)?$')
filter_func = partial(filter_pkg, regex)
handler = EventHandler(
filter_func,
filter_pkg = filter_func,
supported_archs = supported_archs,
config = config,
wm = wm,
filterPkg,
config=config,
wm=wm,
ioloop=ioloop,
)
ioloop = IOLoop.current()
ret = [pyinotify.TornadoAsyncNotifier(
return pyinotify.TornadoAsyncNotifier(
wm,
ioloop,
default_proc_fun=handler,
ioloop = ioloop,
)]
if config.get('spool-directory'):
wm = pyinotify.WatchManager()
handler = SpoolHandler(
filter_func,
filter_pkg = filter_func,
path = config.get('spool-directory'),
dstpath = os.path.join(config.get('path'), 'any'),
wm = wm,
)
ret.append(pyinotify.TornadoAsyncNotifier(
wm, default_proc_fun=handler,
ioloop = ioloop,
))
return ret
class SpoolHandler(pyinotify.ProcessEvent):
def my_init(self, filter_pkg, path, dstpath, wm):
self.filter_pkg = filter_pkg
self.dstpath = dstpath
self._ioloop = IOLoop.current()
self.created = {}
files = set()
for f in os.listdir(path):
p = os.path.join(path, f)
if os.path.exists(p): # filter broken symlinks
files.add(p)
wm.add_watch(path, pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE |
pyinotify.IN_MOVED_TO | pyinotify.IN_OPEN)
self._initial_update(files)
def _initial_update(self, files):
for f in sorted(filterfalse(self.filter_pkg, files),
key=pkgsortkey):
self.dispatch(f)
def process_IN_CLOSE_WRITE(self, event):
logger.debug('Writing done: %s', event.pathname)
self.dispatch(event.pathname)
def process_IN_CREATE(self, event):
file = event.pathname
if os.path.islink(file):
logger.debug('Symlinked: %s', file)
self.dispatch(file)
else:
logger.debug('Created: %s', file)
self.created[file] = self._ioloop.add_timeout(
self._ioloop.time() + 0.1,
partial(self.linked, file),
)
def process_IN_OPEN(self, event):
file = event.pathname
try:
timeout = self.created.pop(file)
except KeyError:
return
self._ioloop.remove_timeout(timeout)
def linked(self, file):
logger.debug('Linked: %s', file)
del self.created[file]
self.dispatch(file)
def process_IN_MOVED_TO(self, event):
logger.debug('Moved here: %s', event.pathname)
self.dispatch(event.pathname)
def dispatch(self, path):
filename = os.path.basename(path)
os.rename(path, os.path.join(self.dstpath, filename))
)

View file

@ -1,10 +0,0 @@
[Unit]
Description=archrepo2 service for archlinuxcn repo
[Service]
Type=simple
ExecStart=/usr/bin/archreposrv /etc/archrepo2.ini
Restart=on-failure
[Install]
WantedBy=multi-user.target

View file

@ -1,92 +0,0 @@
#!/usr/bin/env python3
import argparse
import hashlib
import logging
import os
import select
import socket
import subprocess
import sys
import time
from nicelogger import enable_pretty_logging
def run_command(command):
logging.info('running command %r', command)
try:
subprocess.check_call(command, shell=True)
except:
logging.exception('failed to run command %r', command)
def decode_msg(msg, secret):
act, t, sig = msg.split('|')
hashing = act + '|' + t + secret
mysig = hashlib.sha1(hashing.encode('utf-8')).hexdigest()
if mysig != sig:
raise ValueError('signature mismatch')
return act, int(t)
def main(args, secret):
af, socktype, proto, canonname, sockaddr = socket.getaddrinfo(
args.host, args.port, 0, socket.SOCK_DGRAM, 0, 0)[0]
sock = socket.socket(af, socktype, proto)
sock.bind((args.host, args.port))
last_run = 0
while True:
r, w, e = select.select([sock], [], [], args.timeout)
if r:
msg, remote = sock.recvfrom(4096)
try:
msg = msg.decode('utf-8')
act, t = decode_msg(msg, secret)
now = time.time()
if not (act == 'update' and abs(t - now) < args.threshold):
logging.warn('skipping unknown or expired msg %r from %r...',
msg, remote)
continue
if abs(now - last_run) < args.repeat_window:
logging.warn('refuse to run too frequently. last run: %r. msg %r from %r...',
time.ctime(last_run), msg, remote)
continue
last_run = now
run_command(args.command)
except:
logging.exception('error occurred, skipping msg %r from %r...',
msg, remote)
else:
run_command(args.command)
if __name__ == '__main__':
enable_pretty_logging('INFO')
parser = argparse.ArgumentParser(
description='run command on archrepo2 update notification',
add_help=False,
)
parser.add_argument('-h', '--host', default='0.0.0.0',
help='host to bind to. default: IPv4 wild address')
parser.add_argument('-p', '--port', type=int, required=True,
help='port to wait on')
parser.add_argument('-t', '--timeout', type=float,
help='timeout for waiting. will run command')
parser.add_argument('-r', '--threshold', type=int, default=60,
help='time threshold for message timestamp. default: 60')
parser.add_argument('-w', '--repeat-window', metavar='SECS', type=int, default=60,
help="don't repeat within this amount of seconds. default: 60")
parser.add_argument('--help', action='help',
help='show this help message and exit')
parser.add_argument('command',
help='command to run')
args = parser.parse_args()
secret = os.environ.get('REPO_SECRET', '')
if not secret:
logging.fatal('REPO_SECRET environment variable not set')
sys.exit(1)
logging.info('started')
try:
main(args, secret)
except KeyboardInterrupt:
logging.info('stopped')

11
scripts/test_readpkg.py Executable file
View file

@ -0,0 +1,11 @@
#!/usr/bin/env python3
# vim:fileencoding=utf-8
from subprocess import getoutput
allpkgs = getoutput(r"locate -be --regex '\.pkg\.tar\.xz$'").split('\n')
from archrepo2.pkgreader import readpkg
for p in allpkgs:
print('reading package:', p)
d = readpkg(p)
print('desc:', d.get('pkgdesc', '(nothing)'))

View file

@ -1,9 +0,0 @@
#!/bin/bash -e
for f; do
p=${f%.sig}
if [[ -f $p && $f -nt $p ]]; then
echo "touching $p."
touch "$p"
fi
done

View file

@ -0,0 +1,46 @@
#!/usr/bin/env python3
# vim:fileencoding=utf-8
import os, sys
import sqlite3
import configparser
import pickle
import logging
from archrepo2.lib.nicelogger import enable_pretty_logging
enable_pretty_logging(logging.DEBUG)
import archrepo2.pkgreader
from archrepo2.dbutil import *
def main(conffile):
config = configparser.ConfigParser()
config.read(conffile)
config = config['repository']
base = config.get('path')
dbname = config.get('info-db', os.path.join(base, 'pkginfo.db'))
db = sqlite3.connect(dbname, isolation_level=None)
assert getver(db) == '0.1', 'wrong database version'
input('Please stop the service and then press Enter.')
try:
db.execute('alter table pkginfo add info blob')
except sqlite3.OperationalError:
# the column is already there
pass
pkgs = [x[0] for x in db.execute('select filename from pkginfo')]
for p in pkgs:
try:
info = pkgreader.readpkg(p)
except:
logging.error('failed to read info for package %s', act.path)
info = None
info = pickle.dumps(info)
db.execute('update pkginfo set info=?', (info,))
setver(db, '0.2')
db.close()
input('Please re-start the service with new code and then press Enter.')
if __name__ == '__main__':
main(sys.argv[1])

View file

@ -1,44 +0,0 @@
#!/usr/bin/env python3
# vim:fileencoding=utf-8
import os, sys
import sqlite3
import pickle
import logging
from archrepo2.lib.nicelogger import enable_pretty_logging
enable_pretty_logging(logging.DEBUG)
from archrepo2.dbutil import *
def main(dbname):
db = sqlite3.connect(dbname, isolation_level=None)
if getver(db) != '0.3':
raise Exception('wrong database version')
base_dir = os.path.dirname(dbname)
input('Please stop the service and then press Enter.')
p = db.execute('select filename from sigfiles limit 1').fetchone()[0]
newp = os.path.relpath(p, start=base_dir)
suffix_len = len(os.path.commonprefix((newp[::-1], p[::-1])))
old_prefix = p[:-suffix_len]
new_prefix = newp[:-suffix_len]
db.execute('''
UPDATE OR REPLACE sigfiles
SET filename = REPLACE(filename, ?, ?)
''', (old_prefix, new_prefix))
db.execute('''
UPDATE OR REPLACE pkginfo
SET filename = REPLACE(filename, ?, ?)
''', (old_prefix, new_prefix))
setver(db, '0.4')
db.close()
input('Please re-start the service with new code and then press Enter.')
if __name__ == '__main__':
if len(sys.argv) != 2:
sys.exit('usage: upgrade_from_0.3_to_0.4.py info-database-file')
main(*sys.argv[1:])

2
setup.py Executable file → Normal file
View file

@ -7,7 +7,7 @@ setup(
name = 'archrepo2',
version = archrepo2.__version__,
packages = find_packages(),
install_requires = ['tornado>2.4.1', 'pyinotify', 'pyalpm'],
install_requires = ['tornado>2.4.1', 'pyinotify', 'distribute'],
entry_points = {
'console_scripts': [
'archreposrv = archrepo2.archreposrv:main',

View file

@ -1,59 +1,51 @@
base_dir: /var/cache/pacman/pkg
# wait for server to start
wait
# x86_64 packages
# i686 and x86_64 packages
add: any zip-3.0-3-x86_64.pkg.tar.xz
add: x86_64 zip-3.0-3-i686.pkg.tar.xz
wait
checky: x86_64/zip-3.0-3-x86_64.pkg.tar.xz
checky: i686/zip-3.0-3-i686.pkg.tar.xz
checkp: x86_64 zip=3.0-3
checkp: i686 zip=3.0-3
# 'any' package
add: x86_64 youtube-dl-2014.01.28.1-1-any.pkg.tar.xz
add: i686 ydcv-0.3-1-any.pkg.tar.xz
wait
checky: any/youtube-dl-2014.01.28.1-1-any.pkg.tar.xz
checky: i686/youtube-dl-2014.01.28.1-1-any.pkg.tar.xz
checky: x86_64/youtube-dl-2014.01.28.1-1-any.pkg.tar.xz
checkp: x86_64 youtube-dl=2014.01.28.1-1
checkp: i686 youtube-dl=2014.01.28.1-1
checky: any/ydcv-0.3-1-any.pkg.tar.xz
checky: i686/ydcv-0.3-1-any.pkg.tar.xz
checky: x86_64/ydcv-0.3-1-any.pkg.tar.xz
checkp: x86_64 ydcv=0.3-1
checkp: i686 ydcv=0.3-1
# update a package
add: any youtube-dl-2014.01.29-1-any.pkg.tar.xz
add: any ydcv-0.3.2-1-any.pkg.tar.xz
wait
checky: any/youtube-dl-2014.01.29-1-any.pkg.tar.xz
checky: i686/youtube-dl-2014.01.29-1-any.pkg.tar.xz
checky: x86_64/youtube-dl-2014.01.29-1-any.pkg.tar.xz
checkp: x86_64 youtube-dl=2014.01.29-1
checkp: i686 youtube-dl=2014.01.29-1
checky: any/ydcv-0.3.2-1-any.pkg.tar.xz
checky: i686/ydcv-0.3.2-1-any.pkg.tar.xz
checky: x86_64/ydcv-0.3.2-1-any.pkg.tar.xz
checkp: x86_64 ydcv=0.3.2-1
checkp: i686 ydcv=0.3.2-1
# downgrade and remove
add: x86_64 youtube-dl-2014.01.29-1-any.pkg.tar.xz
remove: any youtube-dl-2014.01.28.1-1-any.pkg.tar.xz
add: i686 ydcv-0.3.1-1-any.pkg.tar.xz
remove: any ydcv-0.3-1-any.pkg.tar.xz
wait
checkn: any/youtube-dl-2014.01.28.1-1-any.pkg.tar.xz
checkn: i686/youtube-dl-2014.01.28.1-1-any.pkg.tar.xz
checkn: x86_64/youtube-dl-2014.01.28.1-1-any.pkg.tar.xz
checkp: x86_64 youtube-dl=2014.01.29-1
checkp: i686 youtube-dl=2014.01.29-1
checkn: any/ydcv-0.3-1-any.pkg.tar.xz
checkn: i686/ydcv-0.3-1-any.pkg.tar.xz
checkn: x86_64/ydcv-0.3-1-any.pkg.tar.xz
checkp: x86_64 ydcv=0.3.1-1
checkp: i686 ydcv=0.3.1-1
# completely remove packages
remove: any youtube-dl-2014.01.29-1-any.pkg.tar.xz
remove: x86_64 zip-3.0-3-x86_64.pkg.tar.xz
remove: any ydcv-0.3.1-1-any.pkg.tar.xz
remove: i686 zip-3.0-3-i686.pkg.tar.xz
wait
checkn: any/youtube-dl-2014.01.29-1-any.pkg.tar.xz
checkn: i686/youtube-dl-2014.01.29-1-any.pkg.tar.xz
checkn: x86_64/youtube-dl-2014.01.29-1-any.pkg.tar.xz
checkp: x86_64 youtube-dl=null
checkp: i686 youtube-dl=null
checkp: any youtube-dl=null
checkn: any/ydcv-0.3.1-1-any.pkg.tar.xz
checkn: i686/ydcv-0.3.1-1-any.pkg.tar.xz
checkn: x86_64/ydcv-0.3.1-1-any.pkg.tar.xz
checkp: x86_64 ydcv=null
checkp: i686 ydcv=null
checkp: any ydcv=null
checkp: i686 zip=null
checkp: x86_64 zip=null
# add then, while adding, remove it
# look at the log carefully!
add: x86_64 linux-3.12.8-1-x86_64.pkg.tar.xz
racing-wait
remove: x86_64 linux-3.12.8-1-x86_64.pkg.tar.xz
wait
checkn: x86_64/linux-3.12.8-1-x86_64.pkg.tar.xz
checkp: x86_64 linux=null
checkp: x86_64 zip=3.0-3

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python3
# vim:fileencoding=utf-8
import os
import sys
@ -25,13 +26,6 @@ class WaitCommand(Command):
logging.info('waiting for %d seconds...', t)
time.sleep(t)
class RacingWaitCommand(Command):
cmd = 'racing-wait'
def run(self):
t = self.ctx['wait_time'] + 0.3
logging.info('Racing-waiting for %s seconds...', t)
time.sleep(t)
class BaseDirCommand(Command):
cmd = 'base_dir'
def run(self):
@ -116,9 +110,6 @@ def run_action_file(conf, actlines):
cmd = cmd.rstrip(':')
try:
cmdmap[cmd](ctx, args)
except KeyboardInterrupt:
logging.info('Interrupted.')
break
except:
logging.error('error running action: %s', l, exc_info=True)
logging.info('done running action file.')
@ -128,8 +119,10 @@ class Server:
self.conffile = conffile
def start(self):
server_path = os.path.join('.', os.path.normpath(os.path.join(__file__, '../../archreposrv')))
logging.debug('server path: %s', server_path)
logging.info('starting server...')
self.p = subprocess.Popen(['archreposrv', self.conffile])
self.p = subprocess.Popen([server_path, self.conffile])
def stop(self):
logging.info('quitting server...')