Compare commits

..

No commits in common. "master" and "v0.2.3" have entirely different histories.

24 changed files with 602 additions and 1206 deletions

18
README.mkd Normal file
View file

@ -0,0 +1,18 @@
USAGE
====
Edit a copy of `archrepo.ini` and then run `./archreposrv <config>`.
DEPENDENCIES
====
* Python, &gt;= 3.3, with sqlite support
* distribute
* tornado, > 2.4.1
* pyinotify, tested with 0.9.4
* winterpy (add `pylib` to `$PYTHONPATH`
TODO
====
* [high] adding and then removing it before adding complete will result in not-in-database removing
* [low] support symlinking packages here
* [low] verify packages

View file

@ -1,34 +0,0 @@
USAGE
=====
Install::
python3 setup.py install
Edit a copy of ``archrepo.ini.example`` and then run
``archreposrv <config>``.
DEPENDENCIES
============
- Python, >= 3.3, with sqlite support
- setuptools
- tornado, > 3.1
- pyinotify, tested with 0.9.4
- pyalpm, tested with 0.10.6
NOTE
====
- relative symlinks may be broken when moving to the right architecture
directory
TODO
====
- [high] adding and then removing it before adding complete will result
in not-in-database removing
- [middle] specify what architectures we have and don't require others
- [low] use one common command queue (now one each repo)
- [low] verify packages

30
archrepo.ini Normal file
View file

@ -0,0 +1,30 @@
[repository]
# Name of the repository. In below example the Pacman repository db file name
# will be archlinuxcn.db.tar.gz
name: archlinuxcn
# Path to the repository - directory should normally contain any, i686 and
# x86_64. The server will monitor files in it with inotify. If you have lots of
# files in this directory, remember to update the configuration of inotify.
path: /home/lilydjwg/tmpfs/test
# A database to store package info. Default to ${path}/pkginfo.db
#info-db: /home/lilydjwg/tmpfs/test/pkginfo.db
# Specify where to find these commands
#command-add: repo-add
#command-remove: repo-remove
# By enabling auto-rename, the server will automatically rename the package
# files according to .PKGINFO, and move them under the correct architecture
# directory. Default is on.
#auto-rename: on
# Seconds before actually running the command. 10s by default.
#wait-time: 10
wait-time: 3
# Notification type to use when done. Currently available: simple-udp, null
notification-type: simple-udp
notification-address: 127.0.0.1:9900
notification-secret: JiUHuGY987G76djItfOskOj

View file

@ -1 +0,0 @@
__version__ = '0.6dev'

View file

@ -1,53 +0,0 @@
#!/usr/bin/env python3
import sys
import configparser
import logging
from tornado.ioloop import IOLoop
from .lib.nicelogger import enable_pretty_logging
enable_pretty_logging(logging.DEBUG)
from .repomon import repomon
logger = logging.getLogger(__name__)
def check_and_get_repos(config):
repos = config['multi'].get('repos', 'repository')
for field in ('name', 'path'):
if config['multi'].get(field, None) is not None:
raise ValueError('config %r cannot have default value.' % field)
repos = {repo.strip() for repo in repos.split(',')}
for field in ('name', 'path'):
vals = [config[repo].get(field) for repo in repos]
if len(vals) != len(set(vals)):
raise ValueError('duplicate %s in different repositories.' % field)
return repos
def main():
conffile = sys.argv[1]
config = configparser.ConfigParser(default_section='multi')
config.read(conffile)
repos = check_and_get_repos(config)
notifiers = []
for repo in repos:
notifiers.extend(repomon(config[repo]))
ioloop = IOLoop.current()
logger.info('starting archreposrv.')
try:
ioloop.start()
except KeyboardInterrupt:
for notifier in notifiers:
notifier.stop()
ioloop.close()
print()
if __name__ == '__main__':
if sys.version_info[:2] < (3, 3):
raise OSError('Python 3.3+ required.')
main()

View file

@ -1,5 +0,0 @@
'''
moduels in this directory are taken from `winterpy <https://github.com/lilydjwg/winterpy>`_.
last sync is at 2013-08-23.
'''

View file

@ -1,81 +0,0 @@
from __future__ import annotations
import os
from collections import namedtuple
import subprocess
import re
from typing import List, Dict
import pyalpm
class PkgNameInfo(namedtuple('PkgNameInfo', 'name, version, release, arch')):
def __lt__(self, other) -> bool:
if self.name != other.name or self.arch != other.arch:
return NotImplemented
if self.version != other.version:
return pyalpm.vercmp(self.version, other.version) < 0
return float(self.release) < float(other.release)
def __gt__(self, other) -> bool:
# No, try the other side please.
return NotImplemented
@property
def fullversion(self) -> str:
return '%s-%s' % (self.version, self.release)
@classmethod
def parseFilename(cls, filename: str) -> 'PkgNameInfo':
return cls(*trimext(filename, 3).rsplit('-', 3))
def trimext(name: str, num: int = 1) -> str:
for i in range(num):
name = os.path.splitext(name)[0]
return name
def get_pkgname_with_bash(PKGBUILD: str) -> List[str]:
script = '''\
. '%s'
echo ${pkgname[*]}''' % PKGBUILD
# Python 3.4 has 'input' arg for check_output
p = subprocess.Popen(
['bwrap', '--unshare-all', '--ro-bind', '/', '/', '--tmpfs', '/home',
'--tmpfs', '/run', '--die-with-parent',
'--tmpfs', '/tmp', '--proc', '/proc', '--dev', '/dev', '/bin/bash'],
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
)
output = p.communicate(script.encode())[0].decode()
ret = p.wait()
if ret != 0:
raise subprocess.CalledProcessError(
ret, ['bash'], output)
return output.split()
pkgfile_pat = re.compile(r'(?:^|/).+-[^-]+-[\d.]+-(?:\w+)\.pkg\.tar\.(?:xz|zst)$')
def _strip_ver(s: str) -> str:
return re.sub(r'[<>=].*', '', s)
def get_package_info(name: str, local: bool = False) -> Dict[str, str]:
old_lang = os.environ['LANG']
os.environ['LANG'] = 'C'
args = '-Qi' if local else '-Si'
try:
outb = subprocess.check_output(["pacman", args, name])
out = outb.decode('latin1')
finally:
os.environ['LANG'] = old_lang
ret = {}
for l in out.splitlines():
if not l:
continue
if l[0] not in ' \t':
key, value = l.split(':', 1)
key = key.strip()
value = value.strip()
ret[key] = value
else:
ret[key] += ' ' + l.strip()
return ret

View file

@ -1,96 +0,0 @@
'''
A Tornado-inspired logging formatter, with displayed time with millisecond accuracy
FYI: pyftpdlib also has a Tornado-style logger.
'''
from __future__ import annotations
import sys
import time
import logging
class TornadoLogFormatter(logging.Formatter):
def __init__(self, color, *args, **kwargs):
super().__init__(*args, **kwargs)
self._color = color
if color:
import curses
curses.setupterm()
if sys.hexversion < 0x30203f0:
fg_color = str(curses.tigetstr("setaf") or
curses.tigetstr("setf") or "", "ascii")
else:
fg_color = curses.tigetstr("setaf") or curses.tigetstr("setf") or b""
self._colors = {
logging.DEBUG: str(curses.tparm(fg_color, 4), # Blue
"ascii"),
logging.INFO: str(curses.tparm(fg_color, 2), # Green
"ascii"),
logging.WARNING: str(curses.tparm(fg_color, 3), # Yellow
"ascii"),
logging.ERROR: str(curses.tparm(fg_color, 1), # Red
"ascii"),
logging.CRITICAL: str(curses.tparm(fg_color, 9), # Bright Red
"ascii"),
}
self._normal = str(curses.tigetstr("sgr0"), "ascii")
def format(self, record):
try:
record.message = record.getMessage()
except Exception as e:
record.message = "Bad message (%r): %r" % (e, record.__dict__)
record.asctime = time.strftime(
"%m-%d %H:%M:%S", self.converter(record.created))
prefix = '[%(levelname)1.1s %(asctime)s.%(msecs)03d %(module)s:%(lineno)d]' % \
record.__dict__
if self._color:
prefix = (self._colors.get(record.levelno, self._normal) +
prefix + self._normal)
formatted = prefix + " " + record.message
formatted += ''.join(
' %s=%s' % (k, v) for k, v in record.__dict__.items()
if k not in {
'levelname', 'asctime', 'module', 'lineno', 'args', 'message',
'filename', 'exc_info', 'exc_text', 'created', 'funcName',
'processName', 'process', 'msecs', 'relativeCreated', 'thread',
'threadName', 'name', 'levelno', 'msg', 'pathname', 'stack_info',
'taskName',
})
if record.exc_info:
if not record.exc_text:
record.exc_text = self.formatException(record.exc_info)
if record.exc_text:
formatted = formatted.rstrip() + "\n" + record.exc_text
return formatted.replace("\n", "\n ")
def enable_pretty_logging(level=logging.DEBUG, handler=None, color=None):
'''
handler: specify a handler instead of default StreamHandler
color: boolean, force color to be on / off. Default to be on only when
``handler`` isn't specified and the term supports color
'''
logger = logging.getLogger()
if handler is None:
h = logging.StreamHandler()
else:
h = handler
if color is None:
color = False
if handler is None and sys.stderr.isatty():
try:
import curses
curses.setupterm()
if curses.tigetnum("colors") > 0:
color = True
except:
import traceback
traceback.print_exc()
formatter = TornadoLogFormatter(color=color)
h.setLevel(level)
h.setFormatter(formatter)
logger.setLevel(level)
logger.addHandler(h)

View file

@ -1,606 +0,0 @@
#!/usr/bin/env python3
import os
import re
import pwd
import stat
from functools import partial
from itertools import filterfalse
import queue
import logging
import sqlite3
import socket
import time
import hashlib
from os.path import relpath
import pyinotify
Event = pyinotify.Event
from tornado.ioloop import IOLoop
import tornado.process
from .lib import archpkg
from . import dbutil
logger = logging.getLogger(__name__)
def same_existent_file(a, b):
try:
return os.path.samefile(a, b)
except OSError:
return False
class ActionInfo(archpkg.PkgNameInfo):
def __new__(cls, path, action, four=None, five=None, pkgpath=None):
if four is not None:
return super().__new__(cls, path, action, four, five)
file = os.path.split(pkgpath or path)[1]
self = cls.parseFilename(file)
self.action = action
self.path = path
return self
def __repr__(self):
return '<ActionInfo: %s %s>' % (self.action, self.path)
class RepoMan:
_timeout = None
_cmd_queue = queue.Queue()
_cmd_running = False
def __init__(self, config, base, siteman):
self.action = []
self._ioloop = IOLoop.current()
self._base = base
self._siteman = siteman
self._repo_dir = config.get('path')
self.name = config.get('name')
self._db_file = os.path.join(base, self.name + '.db.tar.gz')
self._files_name = os.path.join(base, self._db_file.replace('.db.tar.gz', '.files.tar.gz'))
self._command_add = config.get('command-add', 'repo-add')
self._command_remove = config.get('command-remove', 'repo-remove')
self._wait_time = config.getint('wait-time', 10)
self._without_db = config.getboolean('without-db', False)
self._auto_rename = config.getboolean('auto-rename', True)
self._symlink_any = config.getboolean('symlink-any', True)
def queue_command(self, cmd, callbacks=None):
self._cmd_queue.put((cmd, callbacks))
if not self._cmd_running:
self.run_command()
def run_command(self):
if not self._cmd_running:
self._siteman.inc_running()
self.__class__._cmd_running = True
try:
cmd, callbacks = self._cmd_queue.get_nowait()
except queue.Empty:
self.__class__._cmd_running = False
self._siteman.dec_running()
return
logger.info('Running cmd: %r', cmd)
# no longer have to specify io_loop in Tornado > 3.1. Let's drop them for
# Tornado >= 5
try:
p = tornado.process.Subprocess(cmd)
except OSError:
logger.error('failed to run command.', exc_info=True)
self.run_command()
else:
p.set_exit_callback(partial(self.command_done, callbacks))
def command_done(self, callbacks, status):
if status == 0:
if callbacks:
for cb in callbacks:
cb()
logger.info('previous command done.')
else:
logger.warn('previous command failed with status code %d.', status)
self.run_command()
def _do_cmd(self, cmd, items, callbacks):
cmd1 = [cmd, self._db_file]
cmd1.extend(items)
self.queue_command(cmd1, callbacks)
def _do_add(self, toadd):
if toadd:
files, callbacks = zip(*toadd)
if self._without_db:
self._do_callbacks(callbacks)
else:
self._do_cmd(self._command_add, files, callbacks)
def _do_remove(self, toremove):
if toremove:
files, callbacks = zip(*toremove)
if self._without_db:
self._do_callbacks(callbacks)
else:
self._do_cmd(self._command_remove, files, callbacks)
def _do_callbacks(self, callbacks):
for cb in callbacks:
cb()
def add_action(self, action):
logger.info('Adding action %r to db %r', action, self._db_file)
self._action_pending(action)
def _action_pending(self, act):
self.action.append(act)
if self._timeout:
self._ioloop.remove_timeout(self._timeout)
self._timeout = self._ioloop.add_timeout(
self._ioloop.time() + self._wait_time,
self.run,
)
def run(self):
self._timeout = None
actions = self.action
self.action = []
actiondict = {}
for act in actions:
if act.name not in actiondict:
actiondict[act.name] = act
else:
oldact = actiondict[act.name]
if oldact != act:
# different packages, do the latter, but record the former
try:
actiondict[act.name].callback(state=0)
except:
logger.exception('failed to run action %r.', actiondict[act.name])
# same package, do the latter, and discard the forter
actiondict[act.name] = act
toadd = [(x.path, x.callback) for x in actiondict.values() if x.action == 'add']
toremove = [(x.name, x.callback) for x in actiondict.values() if x.action == 'remove']
self._do_add(toadd)
self._do_remove(toremove)
class EventHandler(pyinotify.ProcessEvent):
_n_running = 0
def my_init(
self, filter_pkg, supported_archs, config, wm,
):
notification_type = config.get(
'notification-type', 'null')
if notification_type != 'null':
self._notification_addrs = config.get(
'notification-addresses')
self._notification_secret = config.get(
'notification-secret')
self.send_notification = getattr(
self,
'send_notification_' +
notification_type.replace('-', '_'),
)
self.filter_pkg = filter_pkg
self.moved_away = {}
self.created = {}
self.repomans = {}
# TODO: use an expiring dict
self.our_links = set()
self._ioloop = IOLoop.current()
base = config.get('path')
self._lastupdate_file = os.path.join(base, 'lastupdate')
dbname = config.get('info-db', os.path.join(base, 'pkginfo.db'))
new_db = not os.path.exists(dbname)
self._db = sqlite3.connect(dbname, isolation_level=None) # isolation_level=None means autocommit
self._db_dir = os.path.dirname(dbname)
if new_db:
dbutil.setver(self._db, '0.4')
else:
assert dbutil.getver(self._db) == '0.4', 'wrong database version, please upgrade (see scripts directory)'
self._db.execute('''create table if not exists pkginfo
(filename text unique,
pkgrepo text,
pkgname text,
pkgarch text,
pkgver text,
forarch text,
owner text,
mtime int,
state int,
info blob)''')
self._db.execute('''create table if not exists sigfiles
(filename text unique,
pkgrepo text)''')
self._supported_archs = supported_archs
dirs = [os.path.join(base, x) for x in self._supported_archs]
self.files = files = set()
for d in dirs:
os.makedirs(d, exist_ok=True)
for f in os.listdir(d):
p = os.path.join(d, f)
if os.path.exists(p): # filter broken symlinks
files.add(p)
wm.add_watch(d, pyinotify.IN_CLOSE_WRITE | pyinotify.IN_DELETE |
pyinotify.IN_CREATE | pyinotify.IN_MOVED_FROM |
pyinotify.IN_MOVED_TO | pyinotify.IN_OPEN)
self.repomans[d] = RepoMan(config, d, self)
self.name = self.repomans[d].name
self._auto_rename = self.repomans[d]._auto_rename
self._symlink_any = self.repomans[d]._symlink_any
self._initial_update(files)
def _initial_update(self, files):
oldfiles = {f[0] for f in self._db.execute('select filename from pkginfo where pkgrepo = ?', (self.name,))}
oldfiles.update(f[0] for f in self._db.execute('select filename from sigfiles where pkgrepo = ?', (self.name,)))
oldfiles = {os.path.join(self._db_dir, f) for f in oldfiles}
for f in sorted(filterfalse(self.filter_pkg, files - oldfiles),
key=pkgsortkey):
self.dispatch(f, 'add')
for f in sorted(filterfalse(self.filter_pkg, oldfiles - files),
key=pkgsortkey):
self.dispatch(f, 'remove')
def process_IN_CLOSE_WRITE(self, event):
logger.debug('Writing done: %s', event.pathname)
self.dispatch(event.pathname, 'add')
self.files.add(event.pathname)
def process_IN_DELETE(self, event):
logger.debug('Removing: %s', event.pathname)
self.dispatch(event.pathname, 'remove')
try:
self.files.remove(event.pathname)
except KeyError:
# symlinks haven't been added
pass
def process_IN_CREATE(self, event):
file = event.pathname
if os.path.islink(file):
if file in self.our_links:
self.our_links.remove(file)
else:
logger.debug('Symlinked: %s', file)
self.dispatch(file, 'add')
self.files.add(file)
else:
logger.debug('Created: %s', file)
self.created[file] = self._ioloop.add_timeout(
self._ioloop.time() + 0.1,
partial(self.linked, file),
)
def process_IN_OPEN(self, event):
file = event.pathname
try:
timeout = self.created.pop(file)
except KeyError:
return
self._ioloop.remove_timeout(timeout)
def linked(self, file):
logger.debug('Linked: %s', file)
del self.created[file]
self.dispatch(file, 'add')
self.files.add(file)
def movedOut(self, event):
logger.debug('Moved away: %s', event.pathname)
del self.moved_away[event.cookie]
self.dispatch(event.pathname, 'remove')
def process_IN_MOVED_FROM(self, event):
self.moved_away[event.cookie] = self._ioloop.add_timeout(
self._ioloop.time() + 0.1,
partial(self.movedOut, event),
)
self.files.remove(event.pathname)
def process_IN_MOVED_TO(self, event):
if event.pathname in self.files:
logger.warn('Overwritten: %s', event.pathname)
self.files.add(event.pathname)
if event.cookie in self.moved_away:
self._ioloop.remove_timeout(self.moved_away.pop(event.cookie))
else:
logger.debug('Moved here: %s', event.pathname)
self.dispatch(event.pathname, 'add')
def dispatch(self, path, action):
if path.endswith('.sig'):
act = ActionInfo(path, action, pkgpath=path[:-4])
callback = self._signature_changed
else:
act = ActionInfo(path, action)
callback = self._real_dispatch
d, file = os.path.split(path)
base, arch = os.path.split(d)
# rename if a packager has added to a wrong directory
if self._auto_rename and action == 'add' and act.arch != arch:
newd = os.path.join(base, act.arch)
newpath = os.path.join(newd, file)
if not same_existent_file(path, newpath):
os.rename(path, newpath)
act.path = newpath
path = newpath
arch = act.arch
d = newd
if self._symlink_any and act.arch == 'any':
for newarch in self._supported_archs:
if newarch == arch:
# this file itself
continue
newd = os.path.join(base, newarch)
newpath = os.path.join(newd, file)
if action == 'add':
oldpath = os.path.join('..', arch, file)
if not same_existent_file(oldpath, newpath):
os.makedirs(newd, exist_ok=True)
try:
self.our_links.add(newpath)
os.symlink(oldpath, newpath)
except FileExistsError:
pass
callback(newd, ActionInfo(newpath, action))
else:
try:
os.unlink(newpath)
# this will be detected and handled later
except FileNotFoundError:
# someone deleted the file for us
pass
callback(d, act)
def _real_dispatch(self, d, act):
if act.action == 'add':
arch = os.path.split(d)[1]
def callback(stat, state=1):
self._db.execute(
'update pkginfo set state = 0 where pkgname = ? and forarch = ? and pkgrepo = ?',
(act.name, arch, self.name)
)
mtime = int(stat.st_mtime)
try:
owner = pwd.getpwuid(stat.st_uid).pw_name
except KeyError:
owner = 'uid_%d' % stat.st_uid
info = None
self._db.execute(
'''insert or replace into pkginfo
(filename, pkgrepo, pkgname, pkgarch, pkgver, forarch, state, owner, mtime, info) values
(?, ?, ?, ?, ?, ?, ?, ?, ?, ?)''',
(relpath(act.path, start=self._db_dir),
self.name, act.name, act.arch, act.fullversion, arch, state, owner, mtime, info))
logger.info('Action %r done.', act)
# stat path here, so that it is more unlikely to have disappeared since
callback = partial(callback, os.stat(act.path))
else:
rpath = relpath(act.path, start=self._db_dir)
res = self._db.execute(
'select state from pkginfo where filename = ? and state = 1 and pkgrepo = ? limit 1',
(rpath, self.name)
)
if tuple(res) == ():
# the file isn't in repo database, just delete from our info database
logger.debug('deleting entry for not-in-database package: %s', rpath)
self._db.execute('delete from pkginfo where filename = ? and pkgrepo = ?', (rpath, self.name))
return
def callback(state=any):
'''``state`` is not used'''
self._db.execute('delete from pkginfo where filename = ? and pkgrepo = ?', (rpath, self.name))
act.callback = callback
self.repomans[d].add_action(act)
def _signature_changed(self, d, action):
path = action.path
action = action.action
logger.info('%s signature %s.', action, path)
# Touch the pacakge file so that we'll repo-add it again to include the
# sig change later.
pkg = path[:-4]
try:
st = os.lstat(pkg)
if stat.S_ISREG(st.st_mode):
logger.info('touching %s.', pkg)
os.close(os.open(pkg, os.O_WRONLY))
os.utime(pkg)
except FileNotFoundError:
pass
rpath = relpath(path, start=self._db_dir)
if action == 'add':
self._db.execute('''insert or replace into sigfiles
(filename, pkgrepo) values (?, ?)''',
(rpath, self.name))
else:
self._db.execute('''delete from sigfiles where filename = ? and pkgrepo = ?''',
(rpath, self.name))
def dec_running(self):
self._n_running -= 1
if self._n_running == 0:
self.send_notification()
self.update_lastupdate()
def inc_running(self):
self._n_running += 1
def send_notification_simple_udp(self):
msg = self._new_notification_msg()
socks = {}
for address, port in self._parse_notification_address_inet():
try:
af, socktype, proto, canonname, sockaddr = socket.getaddrinfo(
address, port, 0, socket.SOCK_DGRAM, 0, 0)[0]
except:
logger.exception('failed to create socket to %r for notification',
(address, port))
continue
info = af, socktype, proto
if info not in socks:
sock = socket.socket(*info)
socks[info] = sock
else:
sock = socks[info]
sock.sendto(msg, sockaddr)
logger.info('simple udp notification sent to %s.', (address, port))
def _new_notification_msg(self):
s = 'update'
t = str(int(time.time()))
data = s + '|' + t
hashing = data + self._notification_secret
sig = hashlib.sha1(hashing.encode('utf-8')).hexdigest()
msg = data + '|' + sig
logger.info('new notification msg: %s.', msg)
return msg.encode('utf-8')
def _parse_notification_address_inet(self):
cached = self._notification_addrs
if isinstance(cached, str):
addresses = []
for addr in cached.split():
host, port = addr.rsplit(':', 1)
port = int(port)
addresses.append((host, port))
cached = self._notification_addrs = tuple(addresses)
return cached
def send_notification_null(self):
logger.info('null notification sent.')
def update_lastupdate(self):
t = '%d\n' % time.time()
with open(self._lastupdate_file, 'w') as f:
f.write(t)
def filter_pkg(regex, path):
if isinstance(path, Event):
path = path.pathname
return not regex.search(path)
def pkgsortkey(path):
pkg = archpkg.PkgNameInfo.parseFilename(os.path.split(path)[1])
return (pkg.name, pkg.arch, pkg)
def repomon(config):
wm = pyinotify.WatchManager()
supported_archs = config.get('supported-archs', 'i686 x86_64').split()
if 'any' not in supported_archs:
supported_archs.append('any')
# assume none of the archs has regex meta characters
regex = re.compile(r'(?:^|/)[^.].*-[^-]+-[\d.]+-(?:' + '|'.join(supported_archs) + r')\.pkg\.tar\.(?:xz|zst)(?:\.sig)?$')
filter_func = partial(filter_pkg, regex)
handler = EventHandler(
filter_func,
filter_pkg = filter_func,
supported_archs = supported_archs,
config = config,
wm = wm,
)
ioloop = IOLoop.current()
ret = [pyinotify.TornadoAsyncNotifier(
wm,
default_proc_fun=handler,
ioloop = ioloop,
)]
if config.get('spool-directory'):
wm = pyinotify.WatchManager()
handler = SpoolHandler(
filter_func,
filter_pkg = filter_func,
path = config.get('spool-directory'),
dstpath = os.path.join(config.get('path'), 'any'),
wm = wm,
)
ret.append(pyinotify.TornadoAsyncNotifier(
wm, default_proc_fun=handler,
ioloop = ioloop,
))
return ret
class SpoolHandler(pyinotify.ProcessEvent):
def my_init(self, filter_pkg, path, dstpath, wm):
self.filter_pkg = filter_pkg
self.dstpath = dstpath
self._ioloop = IOLoop.current()
self.created = {}
files = set()
for f in os.listdir(path):
p = os.path.join(path, f)
if os.path.exists(p): # filter broken symlinks
files.add(p)
wm.add_watch(path, pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE |
pyinotify.IN_MOVED_TO | pyinotify.IN_OPEN)
self._initial_update(files)
def _initial_update(self, files):
for f in sorted(filterfalse(self.filter_pkg, files),
key=pkgsortkey):
self.dispatch(f)
def process_IN_CLOSE_WRITE(self, event):
logger.debug('Writing done: %s', event.pathname)
self.dispatch(event.pathname)
def process_IN_CREATE(self, event):
file = event.pathname
if os.path.islink(file):
logger.debug('Symlinked: %s', file)
self.dispatch(file)
else:
logger.debug('Created: %s', file)
self.created[file] = self._ioloop.add_timeout(
self._ioloop.time() + 0.1,
partial(self.linked, file),
)
def process_IN_OPEN(self, event):
file = event.pathname
try:
timeout = self.created.pop(file)
except KeyError:
return
self._ioloop.remove_timeout(timeout)
def linked(self, file):
logger.debug('Linked: %s', file)
del self.created[file]
self.dispatch(file)
def process_IN_MOVED_TO(self, event):
logger.debug('Moved here: %s', event.pathname)
self.dispatch(event.pathname)
def dispatch(self, path):
filename = os.path.basename(path)
os.rename(path, os.path.join(self.dstpath, filename))

29
archreposrv Executable file
View file

@ -0,0 +1,29 @@
#!/usr/bin/env python3
# vim:fileencoding=utf-8
import sys
import configparser
import logging
from tornado.ioloop import IOLoop
from myutils import enable_pretty_logging
enable_pretty_logging(logging.DEBUG)
from repomon import repomon
def main(conffile):
config = configparser.ConfigParser()
config.read(conffile)
notifier = repomon(config['repository'])
ioloop = IOLoop.instance()
try:
ioloop.start()
except KeyboardInterrupt:
ioloop.close()
notifier.stop()
if __name__ == '__main__':
main(sys.argv[1])

View file

@ -1,60 +0,0 @@
[multi]
# Names of repository config sections, if you have multiple repositories.
# The names should be delimited by commas. And this value is default to
# "repository".
# config names are the same as below, and will act as default if not specified
# in that repository config.
repos: repository
# You can specify a "info-db" here to have them all in the same repo
#info-db: /home/lilydjwg/tmpfs/test/pkginfo.db
[repository]
# Name of the repository. In below example the Pacman repository db file name
# will be archlinuxcn.db.tar.gz
name: archlinuxcn
# Path to the repository - directory should normally contain any, i686 and
# x86_64. The server will monitor files in it with inotify. If you have lots of
# files in this directory, remember to update the configuration of inotify.
path: /home/lilydjwg/tmpfs/test
# If enabled, packages put into this directory will be moved into the repo.
# This path should be on the same filesystem as the repo path
# Should be used with auto-rename on
spool-directory: /home/lilydjwg/tmpfs/spool
# A database to store package info. Default to ${path}/pkginfo.db
#info-db: /home/lilydjwg/tmpfs/test/pkginfo.db
# Specify where to find these commands
#command-add: repo-add
#command-remove: repo-remove
# By enabling auto-rename, the server will automatically rename the package
# files according to filenames, and move them under the correct architecture
# directory. Default is on.
#auto-rename: on
# What archs we support? The default is i686 and x86_64. And you can add more
# like arm, armv6h, aarch64. Archs are separated by spaces.
#supported-archs: i686 x86_64 arm
# By enabling symlink-any, the server will automatically symlink the package
# files of 'any' architecture to supported archs.
# Default is on.
#symlink-any: on
# Seconds before actually running the command. 10s by default.
#wait-time: 10
wait-time: 3
# Notification type to use when done. Currently available: simple-udp, null
notification-type: simple-udp
notification-addresses: 127.0.0.1:9900 ::1:9900
notification-secret: JiUHuGY987G76djItfOskOj
# If for any reason, you don't want actual database creation or update:
#without-db: true
# vim: se ft=dosini:

View file

@ -1,10 +0,0 @@
[Unit]
Description=archrepo2 service for archlinuxcn repo
[Service]
Type=simple
ExecStart=/usr/bin/archreposrv /etc/archrepo2.ini
Restart=on-failure
[Install]
WantedBy=multi-user.target

44
pkgreader.py Normal file
View file

@ -0,0 +1,44 @@
import tarfile
import logging
logger = logging.getLogger(__name__)
multikeys = {'depend', 'makepkgopt', 'optdepend', 'replaces', 'conflict',
'provides', 'license', 'backup', 'group', 'makedepend'}
def _add_to_dict(d, key, value):
if key in multikeys:
if key in d:
d[key].append(value)
else:
d[key] = [value]
else:
assert key not in d, 'unexpected multi-value key "%s"' % key
d[key] = value
def readpkg(file):
tar = tarfile.open(file)
info = tar.next()
if not info or info.name != '.PKGINFO':
logger.warn('%s is not a nice package!', file)
info = '.PKGINFO' # have to look further
f = tar.extractfile(info)
data = f.read().decode()
tar.close()
d = {}
key = None
for l in data.split('\n'):
if l.startswith('#'):
continue
if not l:
continue
if '=' not in l:
value += l
else:
if key is not None:
_add_to_dict(d, key, value)
key, value = l.split(' = ', 1)
_add_to_dict(d, key, value)
return d

385
repomon.py Executable file
View file

@ -0,0 +1,385 @@
#!/usr/bin/env python3
# vim:fileencoding=utf-8
import os
import re
import pwd
from functools import partial
from itertools import filterfalse
import queue
import logging
import sqlite3
import socket
import time
import hashlib
import pickle
import pyinotify
Event = pyinotify.Event
from tornado.ioloop import IOLoop
import tornado.process
import archpkg
import pkgreader
import dbutil
logger = logging.getLogger(__name__)
# handles only x86_64, i686 and any arch packages
_pkgfile_pat = re.compile(r'(?:^|/)[a-z0-9_-]+-[a-z0-9_.]+-\d+-(?:x86_64|i686|any)\.pkg\.tar\.xz(?:\.sig)?$')
class ActionInfo(archpkg.PkgNameInfo):
def __new__(cls, path, action, four=None, five=None, pkgpath=None):
if four is not None:
return super().__new__(cls, path, action, four, five)
file = os.path.split(pkgpath or path)[1]
self = cls.parseFilename(file)
self.action = action
self.path = path
return self
def __repr__(self):
return '<ActionInfo: %s %s>' % (self.action, self.path)
class RepoMan:
_timeout = None
_cmd_queue = queue.Queue()
_cmd_running = False
def __init__(self, config, base, ioloop=None):
self.action = []
self._ioloop = ioloop or IOLoop.instance()
self._base = base
self._repo_dir = config.get('path')
self._db_name = os.path.join(base, config.get('name') + '.db.tar.gz')
self._files_name = os.path.join(base, self._db_name.replace('.db.tar.gz', '.files.tar.gz'))
self._command_add = config.get('command-add', 'repo-add')
self._command_remove = config.get('command-remove', 'repo-remove')
self._wait_time = config.getint('wait-time', 10)
notification_type = config.get('notification-type', 'null')
if notification_type != 'null':
self._notification_addr = config.get('notification-address')
self._notification_secret = config.get('notification-secret')
self.send_notification = getattr(
self,
'send_notification_' + notification_type.replace('-', '_'),
)
def queue_command(self, cmd, callbacks=None):
self._cmd_queue.put((cmd, callbacks))
if not self._cmd_running:
self.run_command()
def run_command(self):
self.__class__._cmd_running = True
try:
cmd, callbacks = self._cmd_queue.get_nowait()
except queue.Empty:
self.send_notification()
self.__class__._cmd_running = False
return
logger.info('Running cmd: %r', cmd)
# have to specify io_loop or we'll get error tracebacks
p = tornado.process.Subprocess(cmd, io_loop=self._ioloop)
p.set_exit_callback(partial(self.command_done, callbacks))
def command_done(self, callbacks, status):
if status == 0:
if callbacks:
for cb in callbacks:
cb()
logger.info('previous command done.')
else:
logger.warn('previous command failed with status code %d.', status)
self.run_command()
def _do_cmd(self, cmd, items, callbacks):
cmd1 = [cmd, self._db_name]
cmd1.extend(items)
cmd2 = [cmd, '-f', self._files_name]
cmd2.extend(items)
self.queue_command(cmd1)
self.queue_command(cmd2, callbacks)
def _do_add(self, toadd):
if toadd:
files, callbacks = zip(*toadd)
self._do_cmd(self._command_add, files, callbacks)
def _do_remove(self, toremove):
if toremove:
files, callbacks = zip(*toremove)
self._do_cmd(self._command_remove, files, callbacks)
def add_action(self, action):
logger.info('Adding action %r to db %r', action, self._db_name)
self._action_pending(action)
def _action_pending(self, act):
self.action.append(act)
if self._timeout:
self._ioloop.remove_timeout(self._timeout)
self._timeout = self._ioloop.add_timeout(
self._ioloop.time() + self._wait_time,
self.run,
)
def send_notification_simple_udp(self):
address, port = self._parse_notification_address_inet()
try:
af, socktype, proto, canonname, sockaddr = socket.getaddrinfo(
address, port, 0, socket.SOCK_DGRAM, 0, 0)[0]
except:
logger.error('failed to create socket for notification', exc_info=True)
return
sock = socket.socket(af, socktype, proto)
msg = self._new_notification_msg()
sock.sendto(msg, sockaddr)
sock.close()
logger.info('simple udp notification sent.')
def _new_notification_msg(self):
s = 'update'
t = str(int(time.time()))
part1 = s + '|' + t
part2 = hashlib.sha1(part1.encode('utf-8')).hexdigest()
msg = part1 + '|' + part2
logger.info('new notification msg: %s.', msg)
return msg.encode('utf-8')
def _parse_notification_address_inet(self):
cached = self._notification_addr
if isinstance(cached, str):
host, port = cached.rsplit(':', 1)
port = int(port)
cached = self._notification_addr = (host, port)
return cached
def send_notification_null(self):
logger.info('null notification sent.')
def run(self):
self._timeout = None
actions = self.action
self.action = []
actiondict = {}
for act in actions:
if act.name not in actiondict:
actiondict[act.name] = act
else:
oldact = actiondict[act.name]
if oldact == act and oldact.action != act.action:
# same package, opposite actions, do nothing
del actiondict[act.name]
else:
# take the later action
actiondict[act.name] = act
toadd = [(x.path, x.callback) for x in actiondict.values() if x.action == 'add']
toremove = [(x.name, x.callback) for x in actiondict.values() if x.action == 'remove']
self._do_add(toadd)
self._do_remove(toremove)
class EventHandler(pyinotify.ProcessEvent):
def my_init(self, config, wm, ioloop=None):
self.moved_away = {}
self.repomans = {}
self._ioloop = ioloop or IOLoop.instance()
base = config.get('path')
dbname = config.get('info-db', os.path.join(base, 'pkginfo.db'))
new_db = not os.path.exists(dbname)
self._db = sqlite3.connect(dbname, isolation_level=None) # isolation_level=None means autocommit
if new_db:
dbutil.setver(self._db, '0.2')
else:
assert dbutil.getver(self._db) == '0.2', 'wrong database version, please upgrade (see scripts directory)'
self._db.execute('''create table if not exists pkginfo
(filename text unique,
pkgname text,
pkgarch text,
pkgver text,
forarch text,
owner text,
mtime int,
state int,
info blob)''')
self._db.execute('''create table if not exists sigfiles
(filename text unique)''')
dirs = [os.path.join(base, x) for x in ('any', 'i686', 'x86_64')]
self.files = files = set()
for d in dirs:
files.update(os.path.join(d, f) for f in os.listdir(d))
wm.add_watch(d, pyinotify.ALL_EVENTS)
self.repomans[d] = RepoMan(config, d, self._ioloop)
self._initial_update(files)
def _initial_update(self, files):
oldfiles = {f[0] for f in self._db.execute('select filename from pkginfo')}
oldfiles.update(f[0] for f in self._db.execute('select filename from sigfiles'))
for f in sorted(filterfalse(filterPkg, files - oldfiles), key=pkgsortkey):
self.dispatch(f, 'add')
for f in sorted(filterfalse(filterPkg, oldfiles - files), key=pkgsortkey):
self.dispatch(f, 'remove')
def process_IN_CLOSE_WRITE(self, event):
logger.debug('Writing done: %s', event.pathname)
self.dispatch(event.pathname, 'add')
self.files.add(event.pathname)
def process_IN_DELETE(self, event):
logger.debug('Removing: %s', event.pathname)
self.dispatch(event.pathname, 'remove')
try:
self.files.remove(event.pathname)
except KeyError:
# symlinks haven't been added
pass
def movedOut(self, event):
logger.debug('Moved away: %s', event.pathname)
self.dispatch(event.pathname, 'remove')
def process_IN_MOVED_FROM(self, event):
self.moved_away[event.cookie] = self._ioloop.add_timeout(
self._ioloop.time() + 0.1,
partial(self.movedOut, event),
)
self.files.remove(event.pathname)
def process_IN_MOVED_TO(self, event):
if event.pathname in self.files:
logger.warn('Overwritten: %s', event.pathname)
self.files.add(event.pathname)
if event.cookie in self.moved_away:
self._ioloop.remove_timeout(self.moved_away[event.cookie])
else:
logger.debug('Moved here: %s', event.pathname)
self.dispatch(event.pathname, 'add')
def dispatch(self, path, action):
if path.endswith('.sig'):
act = ActionInfo(path, action, pkgpath=path[:-4])
callback = self._record_signatures
else:
act = ActionInfo(path, action)
callback = self._real_dispatch
d, file = os.path.split(path)
base, arch = os.path.split(d)
# rename if a packager has added to a wrong directory
# but not for a link that has arch=any, as that's what we created
if action == 'add' and act.arch != arch and not \
(os.path.islink(path) and act.arch == 'any'):
newd = os.path.join(base, act.arch)
newpath = os.path.join(newd, file)
os.rename(path, newpath)
act.path = newpath
path = newpath
arch = act.arch
d = newd
if arch == 'any':
for newarch in ('i686', 'x86_64'):
newd = os.path.join(base, newarch)
newpath = os.path.join(newd, file)
if action == 'add':
try:
os.symlink(os.path.join('..', arch, file), newpath)
except FileExistsError:
pass
callback(newd, ActionInfo(newpath, action))
else:
try:
os.unlink(newpath)
# this will be detected and handled later
except FileNotFoundError:
# someone deleted the file for us
pass
callback(d, act)
def _real_dispatch(self, d, act):
if act.action == 'add':
arch = os.path.split(d)[1]
def callback():
self._db.execute('update pkginfo set state = 0 where pkgname = ? and forarch = ?', (act.name, arch))
stat = os.stat(act.path)
mtime = int(stat.st_mtime)
try:
owner = pwd.getpwuid(stat.st_uid).pw_name
except KeyError:
owner = 'uid_%d' % stat.st_uid
try:
info = pkgreader.readpkg(act.path)
except:
logger.error('failed to read info for package %s', act.path, exc_info=True)
info = None
info = pickle.dumps(info)
self._db.execute(
'''insert or replace into pkginfo
(filename, pkgname, pkgarch, pkgver, forarch, state, owner, mtime, info) values
(?, ?, ?, ?, ?, ?, ?, ?, ?)''',
(act.path, act.name, act.arch, act.fullversion, arch, 1, owner, mtime, info))
else:
res = self._db.execute('select state from pkginfo where filename = ? and state = 1 limit 1', (act.path,))
if tuple(res) == ():
# the file isn't in repo database, just delete from our info database
logger.debug('deleting entry for not-in-database package: %s', act.path)
self._db.execute('delete from pkginfo where filename = ?', (act.path,))
return
def callback():
self._db.execute('delete from pkginfo where filename = ?', (act.path,))
act.callback = callback
self.repomans[d].add_action(act)
def _record_signatures(self, d, action):
path = action.path
action = action.action
logger.info('%s signature %s.', action, path)
if action == 'add':
self._db.execute('''insert or replace into sigfiles
(filename) values (?)''',
(path,))
else:
self._db.execute('''delete from sigfiles where filename = ?''',
(path,))
def filterPkg(path):
if isinstance(path, Event):
path = path.pathname
return not _pkgfile_pat.search(path)
def pkgsortkey(path):
pkg = archpkg.PkgNameInfo.parseFilename(os.path.split(path)[1])
return (pkg.name, pkg.arch, pkg)
def repomon(config):
wm = pyinotify.WatchManager()
ioloop = IOLoop.instance()
handler = EventHandler(
filterPkg,
config=config,
wm=wm,
ioloop=ioloop,
)
return pyinotify.TornadoAsyncNotifier(
wm,
ioloop,
default_proc_fun=handler,
)

View file

@ -1,92 +0,0 @@
#!/usr/bin/env python3
import argparse
import hashlib
import logging
import os
import select
import socket
import subprocess
import sys
import time
from nicelogger import enable_pretty_logging
def run_command(command):
logging.info('running command %r', command)
try:
subprocess.check_call(command, shell=True)
except:
logging.exception('failed to run command %r', command)
def decode_msg(msg, secret):
act, t, sig = msg.split('|')
hashing = act + '|' + t + secret
mysig = hashlib.sha1(hashing.encode('utf-8')).hexdigest()
if mysig != sig:
raise ValueError('signature mismatch')
return act, int(t)
def main(args, secret):
af, socktype, proto, canonname, sockaddr = socket.getaddrinfo(
args.host, args.port, 0, socket.SOCK_DGRAM, 0, 0)[0]
sock = socket.socket(af, socktype, proto)
sock.bind((args.host, args.port))
last_run = 0
while True:
r, w, e = select.select([sock], [], [], args.timeout)
if r:
msg, remote = sock.recvfrom(4096)
try:
msg = msg.decode('utf-8')
act, t = decode_msg(msg, secret)
now = time.time()
if not (act == 'update' and abs(t - now) < args.threshold):
logging.warn('skipping unknown or expired msg %r from %r...',
msg, remote)
continue
if abs(now - last_run) < args.repeat_window:
logging.warn('refuse to run too frequently. last run: %r. msg %r from %r...',
time.ctime(last_run), msg, remote)
continue
last_run = now
run_command(args.command)
except:
logging.exception('error occurred, skipping msg %r from %r...',
msg, remote)
else:
run_command(args.command)
if __name__ == '__main__':
enable_pretty_logging('INFO')
parser = argparse.ArgumentParser(
description='run command on archrepo2 update notification',
add_help=False,
)
parser.add_argument('-h', '--host', default='0.0.0.0',
help='host to bind to. default: IPv4 wild address')
parser.add_argument('-p', '--port', type=int, required=True,
help='port to wait on')
parser.add_argument('-t', '--timeout', type=float,
help='timeout for waiting. will run command')
parser.add_argument('-r', '--threshold', type=int, default=60,
help='time threshold for message timestamp. default: 60')
parser.add_argument('-w', '--repeat-window', metavar='SECS', type=int, default=60,
help="don't repeat within this amount of seconds. default: 60")
parser.add_argument('--help', action='help',
help='show this help message and exit')
parser.add_argument('command',
help='command to run')
args = parser.parse_args()
secret = os.environ.get('REPO_SECRET', '')
if not secret:
logging.fatal('REPO_SECRET environment variable not set')
sys.exit(1)
logging.info('started')
try:
main(args, secret)
except KeyboardInterrupt:
logging.info('stopped')

View file

@ -1,9 +0,0 @@
#!/bin/bash -e
for f; do
p=${f%.sig}
if [[ -f $p && $f -nt $p ]]; then
echo "touching $p."
touch "$p"
fi
done

49
scripts/upgrade_from_0.1.py Executable file
View file

@ -0,0 +1,49 @@
#!/usr/bin/env python3
# vim:fileencoding=utf-8
import os, sys
import sqlite3
import configparser
import pickle
import logging
from myutils import enable_pretty_logging
enable_pretty_logging(logging.DEBUG)
top_dir = os.path.normpath(os.path.join(__file__, '../..'))
sys.path.append(top_dir)
import pkgreader
from dbutil import *
def main(conffile):
config = configparser.ConfigParser()
config.read(conffile)
config = config['repository']
base = config.get('path')
dbname = config.get('info-db', os.path.join(base, 'pkginfo.db'))
db = sqlite3.connect(dbname, isolation_level=None)
assert getver(db) == '0.1', 'wrong database version'
input('Please stop the service and then press Enter.')
try:
db.execute('alter table pkginfo add info blob')
except sqlite3.OperationalError:
# the column is already there
pass
pkgs = [x[0] for x in db.execute('select filename from pkginfo')]
for p in pkgs:
try:
info = pkgreader.readpkg(p)
except:
logging.error('failed to read info for package %s', act.path)
info = None
info = pickle.dumps(info)
db.execute('update pkginfo set info=?', (info,))
setver(db, '0.2')
db.close()
input('Please re-start the service with new code and then press Enter.')
if __name__ == '__main__':
main(sys.argv[1])

View file

@ -1,41 +0,0 @@
#!/usr/bin/env python3
# vim:fileencoding=utf-8
import os, sys
import sqlite3
import pickle
import logging
from archrepo2.lib.nicelogger import enable_pretty_logging
enable_pretty_logging(logging.DEBUG)
from archrepo2.dbutil import *
def main(dbname, reponame):
db = sqlite3.connect(dbname, isolation_level=None)
if getver(db) != '0.2':
raise Exception('wrong database version')
input('Please stop the service and then press Enter.')
try:
db.execute('alter table pkginfo add pkgrepo text')
db.execute('update pkginfo set pkgrepo = ?', (reponame,))
except sqlite3.OperationalError:
# the column is already there
pass
try:
db.execute('alter table sigfiles add pkgrepo text')
db.execute('update sigfiles set pkgrepo = ?', (reponame,))
except sqlite3.OperationalError:
# the column is already there
pass
setver(db, '0.3')
db.close()
input('Please re-start the service with new code and then press Enter.')
if __name__ == '__main__':
if len(sys.argv) != 3:
sys.exit('usage: upgrade_from_0.2_to_0.3.py info-database-file repository-name')
main(*sys.argv[1:])

View file

@ -1,44 +0,0 @@
#!/usr/bin/env python3
# vim:fileencoding=utf-8
import os, sys
import sqlite3
import pickle
import logging
from archrepo2.lib.nicelogger import enable_pretty_logging
enable_pretty_logging(logging.DEBUG)
from archrepo2.dbutil import *
def main(dbname):
db = sqlite3.connect(dbname, isolation_level=None)
if getver(db) != '0.3':
raise Exception('wrong database version')
base_dir = os.path.dirname(dbname)
input('Please stop the service and then press Enter.')
p = db.execute('select filename from sigfiles limit 1').fetchone()[0]
newp = os.path.relpath(p, start=base_dir)
suffix_len = len(os.path.commonprefix((newp[::-1], p[::-1])))
old_prefix = p[:-suffix_len]
new_prefix = newp[:-suffix_len]
db.execute('''
UPDATE OR REPLACE sigfiles
SET filename = REPLACE(filename, ?, ?)
''', (old_prefix, new_prefix))
db.execute('''
UPDATE OR REPLACE pkginfo
SET filename = REPLACE(filename, ?, ?)
''', (old_prefix, new_prefix))
setver(db, '0.4')
db.close()
input('Please re-start the service with new code and then press Enter.')
if __name__ == '__main__':
if len(sys.argv) != 2:
sys.exit('usage: upgrade_from_0.3_to_0.4.py info-database-file')
main(*sys.argv[1:])

View file

@ -1,23 +0,0 @@
#!/usr/bin/env python3
from setuptools import setup, find_packages
import archrepo2
setup(
name = 'archrepo2',
version = archrepo2.__version__,
packages = find_packages(),
install_requires = ['tornado>2.4.1', 'pyinotify', 'pyalpm'],
entry_points = {
'console_scripts': [
'archreposrv = archrepo2.archreposrv:main',
],
},
author = 'lilydjwg',
author_email = 'lilydjwg@gmail.com',
description = 'Arch Linux repository manager',
license = 'MIT',
keywords = 'archlinux linux',
url = 'https://github.com/lilydjwg/archrepo2',
)

View file

@ -1,59 +1,51 @@
base_dir: /var/cache/pacman/pkg
# wait for server to start
wait
# x86_64 packages
# i686 and x86_64 packages
add: any zip-3.0-3-x86_64.pkg.tar.xz
add: x86_64 zip-3.0-3-i686.pkg.tar.xz
wait
checky: x86_64/zip-3.0-3-x86_64.pkg.tar.xz
checky: i686/zip-3.0-3-i686.pkg.tar.xz
checkp: x86_64 zip=3.0-3
checkp: i686 zip=3.0-3
# 'any' package
add: x86_64 youtube-dl-2014.01.28.1-1-any.pkg.tar.xz
add: i686 ydcv-0.3-1-any.pkg.tar.xz
wait
checky: any/youtube-dl-2014.01.28.1-1-any.pkg.tar.xz
checky: i686/youtube-dl-2014.01.28.1-1-any.pkg.tar.xz
checky: x86_64/youtube-dl-2014.01.28.1-1-any.pkg.tar.xz
checkp: x86_64 youtube-dl=2014.01.28.1-1
checkp: i686 youtube-dl=2014.01.28.1-1
checky: any/ydcv-0.3-1-any.pkg.tar.xz
checky: i686/ydcv-0.3-1-any.pkg.tar.xz
checky: x86_64/ydcv-0.3-1-any.pkg.tar.xz
checkp: x86_64 ydcv=0.3-1
checkp: i686 ydcv=0.3-1
# update a package
add: any youtube-dl-2014.01.29-1-any.pkg.tar.xz
add: any ydcv-0.3.2-1-any.pkg.tar.xz
wait
checky: any/youtube-dl-2014.01.29-1-any.pkg.tar.xz
checky: i686/youtube-dl-2014.01.29-1-any.pkg.tar.xz
checky: x86_64/youtube-dl-2014.01.29-1-any.pkg.tar.xz
checkp: x86_64 youtube-dl=2014.01.29-1
checkp: i686 youtube-dl=2014.01.29-1
checky: any/ydcv-0.3.2-1-any.pkg.tar.xz
checky: i686/ydcv-0.3.2-1-any.pkg.tar.xz
checky: x86_64/ydcv-0.3.2-1-any.pkg.tar.xz
checkp: x86_64 ydcv=0.3.2-1
checkp: i686 ydcv=0.3.2-1
# downgrade and remove
add: x86_64 youtube-dl-2014.01.29-1-any.pkg.tar.xz
remove: any youtube-dl-2014.01.28.1-1-any.pkg.tar.xz
add: i686 ydcv-0.3.1-1-any.pkg.tar.xz
remove: any ydcv-0.3-1-any.pkg.tar.xz
wait
checkn: any/youtube-dl-2014.01.28.1-1-any.pkg.tar.xz
checkn: i686/youtube-dl-2014.01.28.1-1-any.pkg.tar.xz
checkn: x86_64/youtube-dl-2014.01.28.1-1-any.pkg.tar.xz
checkp: x86_64 youtube-dl=2014.01.29-1
checkp: i686 youtube-dl=2014.01.29-1
checkn: any/ydcv-0.3-1-any.pkg.tar.xz
checkn: i686/ydcv-0.3-1-any.pkg.tar.xz
checkn: x86_64/ydcv-0.3-1-any.pkg.tar.xz
checkp: x86_64 ydcv=0.3.1-1
checkp: i686 ydcv=0.3.1-1
# completely remove packages
remove: any youtube-dl-2014.01.29-1-any.pkg.tar.xz
remove: x86_64 zip-3.0-3-x86_64.pkg.tar.xz
remove: any ydcv-0.3.1-1-any.pkg.tar.xz
remove: i686 zip-3.0-3-i686.pkg.tar.xz
wait
checkn: any/youtube-dl-2014.01.29-1-any.pkg.tar.xz
checkn: i686/youtube-dl-2014.01.29-1-any.pkg.tar.xz
checkn: x86_64/youtube-dl-2014.01.29-1-any.pkg.tar.xz
checkp: x86_64 youtube-dl=null
checkp: i686 youtube-dl=null
checkp: any youtube-dl=null
checkn: any/ydcv-0.3.1-1-any.pkg.tar.xz
checkn: i686/ydcv-0.3.1-1-any.pkg.tar.xz
checkn: x86_64/ydcv-0.3.1-1-any.pkg.tar.xz
checkp: x86_64 ydcv=null
checkp: i686 ydcv=null
checkp: any ydcv=null
checkp: i686 zip=null
checkp: x86_64 zip=null
# add then, while adding, remove it
# look at the log carefully!
add: x86_64 linux-3.12.8-1-x86_64.pkg.tar.xz
racing-wait
remove: x86_64 linux-3.12.8-1-x86_64.pkg.tar.xz
wait
checkn: x86_64/linux-3.12.8-1-x86_64.pkg.tar.xz
checkp: x86_64 linux=null
checkp: x86_64 zip=3.0-3

View file

@ -1,4 +1,5 @@
#!/usr/bin/env python3
# vim:fileencoding=utf-8
import os
import sys
@ -25,13 +26,6 @@ class WaitCommand(Command):
logging.info('waiting for %d seconds...', t)
time.sleep(t)
class RacingWaitCommand(Command):
cmd = 'racing-wait'
def run(self):
t = self.ctx['wait_time'] + 0.3
logging.info('Racing-waiting for %s seconds...', t)
time.sleep(t)
class BaseDirCommand(Command):
cmd = 'base_dir'
def run(self):
@ -116,9 +110,6 @@ def run_action_file(conf, actlines):
cmd = cmd.rstrip(':')
try:
cmdmap[cmd](ctx, args)
except KeyboardInterrupt:
logging.info('Interrupted.')
break
except:
logging.error('error running action: %s', l, exc_info=True)
logging.info('done running action file.')
@ -128,8 +119,10 @@ class Server:
self.conffile = conffile
def start(self):
server_path = os.path.join('.', os.path.normpath(os.path.join(__file__, '../../archreposrv')))
logging.debug('server path: %s', server_path)
logging.info('starting server...')
self.p = subprocess.Popen(['archreposrv', self.conffile])
self.p = subprocess.Popen([server_path, self.conffile])
def stop(self):
logging.info('quitting server...')

11
test_readpkg.py Executable file
View file

@ -0,0 +1,11 @@
#!/usr/bin/env python3
# vim:fileencoding=utf-8
from subprocess import getoutput
allpkgs = getoutput(r"locate -be --regex '\.pkg\.tar\.xz$'").split('\n')
from pkgreader import readpkg
for p in allpkgs:
print('reading package:', p)
d = readpkg(p)
print('desc:', d.get('pkgdesc', '(nothing)'))