Compare commits

...

25 commits
v0.5 ... master

Author SHA1 Message Date
lilydjwg
e7e8142d05 watch necessary events only
so that an overflow is less likely to happen.
2025-02-22 20:55:20 +08:00
lilydjwg
eaddcd33ba update nicelogger 2024-04-28 16:30:36 +08:00
lilydjwg
954068b000 sync archpkg.py 2023-08-26 10:32:38 +08:00
依云
fa4f743045
Merge pull request #9 from yingziwu/vercmp
fix bug on pkgver compare
2023-08-23 13:15:19 +08:00
bgme
5bc16468cb remove setuptools from setup.py install_requires 2023-08-23 01:24:18 +08:00
bgme
668140131b replace subprocess with pyalpm 2023-08-22 01:29:56 +08:00
bgme
9187984b6b fix bug on pkgver compare 2023-08-22 01:29:43 +08:00
lilydjwg
c0031235a6 distinguish between linking and creating new files
fixes https://github.com/archlinuxcn/lilac/issues/139
2019-11-27 16:36:38 +08:00
lilydjwg
7a7ab1b4e4 enable spool-directory in example config 2019-11-27 16:33:01 +08:00
lilydjwg
c074e3b271 db version 0.1 is gone 2019-10-17 15:55:40 +08:00
lilydjwg
eef3c96242 drop pkgreader
This field currently has no use, and even if we'll need it later, we
should implement it using pyalpm.

This changes is made to support zstd-compressed packages.
2019-10-17 15:54:29 +08:00
lilydjwg
bd1ac0962f update modules from winterpy 2019-10-17 15:50:02 +08:00
lilydjwg
fe1e4b10cf write a lastupdate file in the repo 2019-02-26 15:50:14 +08:00
lilydjwg
79494a5722 repomon: move send_notification to EventHandler 2019-02-26 15:44:46 +08:00
lilydjwg
b3d473d79b rely on systemd service for daemonization things 2019-02-23 17:53:46 +08:00
lilydjwg
3c26780080 pkgreader: look further for a nice package 2018-08-28 17:30:48 +08:00
lilydjwg
2ef72fe1a9 pkgreader: better detection of "nice packages" 2018-08-28 17:05:02 +08:00
lilydjwg
059e0bb581 pyinotify needs the ioloop arg 2018-08-10 18:34:13 +08:00
lilydjwg
db7e1797fc remove one more ioloop arg 2018-08-10 16:54:13 +08:00
lilydjwg
05c6a7b233 remove explicite ioloop argument, supporting tornado >= 5
This also makes it no longer support tornado < 3.1, which was at least
five years old.

Fixes #4.
2018-08-10 15:40:52 +08:00
lilydjwg
763d1ec110 add systemd service file 2018-06-25 14:28:36 +08:00
lilydjwg
e00de1a616 add support for a spool directory 2017-08-08 14:54:32 +08:00
依云
65660f23c5 handle CREATE event for hard links 2016-10-08 12:38:14 +08:00
依云
bcca713e5c configurable supported archs 2016-08-22 17:47:31 +08:00
Yichao Yu
5a28db12f2
Add ARM (arm, armv6h, armv7h) and AArch64 support
Also make sure the directory is created before watching it/creating files in it.
2016-08-08 21:40:52 +08:00
12 changed files with 293 additions and 266 deletions

View file

@ -13,8 +13,9 @@ DEPENDENCIES
- Python, >= 3.3, with sqlite support
- setuptools
- tornado, > 2.4.1
- tornado, > 3.1
- pyinotify, tested with 0.9.4
- pyalpm, tested with 0.10.6
NOTE
====
@ -25,11 +26,9 @@ NOTE
TODO
====
- [high] singleton daemon
- [high] adding and then removing it before adding complete will result
in not-in-database removing
- [middle] specify what architectures we have and don't require others
- [low] fork to background
- [low] use one common command queue (now one each repo)
- [low] verify packages

View file

@ -1 +1 @@
__version__ = '0.5'
__version__ = '0.6dev'

View file

@ -33,16 +33,18 @@ def main():
config.read(conffile)
repos = check_and_get_repos(config)
notifiers = [repomon(config[repo]) for repo in repos]
notifiers = []
for repo in repos:
notifiers.extend(repomon(config[repo]))
ioloop = IOLoop.instance()
ioloop = IOLoop.current()
logger.info('starting archreposrv.')
try:
ioloop.start()
except KeyboardInterrupt:
ioloop.close()
for notifier in notifiers:
notifier.stop()
ioloop.close()
print()
if __name__ == '__main__':

View file

@ -1,93 +1,68 @@
from __future__ import annotations
import os
from collections import defaultdict, namedtuple
from collections import namedtuple
import subprocess
import re
from typing import List, Dict
from pkg_resources import parse_version
import pyalpm
class PkgNameInfo(namedtuple('PkgNameInfo', 'name, version, release, arch')):
def __lt__(self, other):
def __lt__(self, other) -> bool:
if self.name != other.name or self.arch != other.arch:
return NotImplemented
if self.version != other.version:
return parse_version(self.version) < parse_version(other.version)
return pyalpm.vercmp(self.version, other.version) < 0
return float(self.release) < float(other.release)
def __gt__(self, other):
def __gt__(self, other) -> bool:
# No, try the other side please.
return NotImplemented
@property
def fullversion(self):
def fullversion(self) -> str:
return '%s-%s' % (self.version, self.release)
@classmethod
def parseFilename(cls, filename):
def parseFilename(cls, filename: str) -> 'PkgNameInfo':
return cls(*trimext(filename, 3).rsplit('-', 3))
def trimext(name, num=1):
def trimext(name: str, num: int = 1) -> str:
for i in range(num):
name = os.path.splitext(name)[0]
return name
def get_pkgname_with_bash(PKGBUILD):
def get_pkgname_with_bash(PKGBUILD: str) -> List[str]:
script = '''\
. '%s'
echo ${pkgname[*]}''' % PKGBUILD
# Python 3.4 has 'input' arg for check_output
p = subprocess.Popen(['bash'], stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
output = p.communicate(script.encode('latin1'))[0].decode('latin1')
p = subprocess.Popen(
['bwrap', '--unshare-all', '--ro-bind', '/', '/', '--tmpfs', '/home',
'--tmpfs', '/run', '--die-with-parent',
'--tmpfs', '/tmp', '--proc', '/proc', '--dev', '/dev', '/bin/bash'],
stdin=subprocess.PIPE, stdout=subprocess.PIPE,
)
output = p.communicate(script.encode())[0].decode()
ret = p.wait()
if ret != 0:
raise subprocess.CalledProcessError(
ret, ['bash'], output)
return output.split()
def _run_bash(script):
p = subprocess.Popen(['bash'], stdin=subprocess.PIPE)
p.communicate(script.encode('latin1'))
ret = p.wait()
if ret != 0:
raise subprocess.CalledProcessError(
ret, ['bash'])
pkgfile_pat = re.compile(r'(?:^|/).+-[^-]+-[\d.]+-(?:\w+)\.pkg\.tar\.(?:xz|zst)$')
def get_aur_pkgbuild_with_bash(name):
script = '''\
. /usr/lib/yaourt/util.sh
. /usr/lib/yaourt/aur.sh
init_color
aur_get_pkgbuild '%s' ''' % name
_run_bash(script)
def get_abs_pkgbuild_with_bash(name):
script = '''\
. /usr/lib/yaourt/util.sh
. /usr/lib/yaourt/abs.sh
init_paths
init_color
arg=$(pacman -Sp --print-format '%%r/%%n' '%s')
RSYNCOPT="$RSYNCOPT -O"
abs_get_pkgbuild "$arg" ''' % name
_run_bash(script)
pkgfile_pat = re.compile(r'(?:^|/).+-[^-]+-[\d.]+-(?:\w+)\.pkg\.tar\.xz$')
def _strip_ver(s):
def _strip_ver(s: str) -> str:
return re.sub(r'[<>=].*', '', s)
def get_package_dependencies(name):
out = subprocess.check_output(["package-query", "-Sii", "-f", "%D", name])
out = out.decode('latin1')
return [_strip_ver(x) for x in out.split() if x != '-']
def get_package_info(name, local=False):
def get_package_info(name: str, local: bool = False) -> Dict[str, str]:
old_lang = os.environ['LANG']
os.environ['LANG'] = 'C'
args = '-Qi' if local else '-Si'
try:
out = subprocess.check_output(["pacman", args, name])
out = out.decode('latin1')
outb = subprocess.check_output(["pacman", args, name])
out = outb.decode('latin1')
finally:
os.environ['LANG'] = old_lang
@ -104,14 +79,3 @@ def get_package_info(name, local=False):
ret[key] += ' ' + l.strip()
return ret
def get_package_repository(name):
try:
out = subprocess.check_output(["package-query", "-Sii", "-f", "%r", name])
repo = out.strip().decode('latin1')
except subprocess.CalledProcessError:
repo = 'local'
return repo
def is_official(name):
repo = get_package_repository(name)
return repo in ('core', 'extra', 'community', 'multilib', 'testing')

View file

@ -4,13 +4,15 @@ A Tornado-inspired logging formatter, with displayed time with millisecond accur
FYI: pyftpdlib also has a Tornado-style logger.
'''
from __future__ import annotations
import sys
import time
import logging
class TornadoLogFormatter(logging.Formatter):
def __init__(self, color, *args, **kwargs):
super().__init__(self, *args, **kwargs)
super().__init__(*args, **kwargs)
self._color = color
if color:
import curses
@ -41,13 +43,23 @@ class TornadoLogFormatter(logging.Formatter):
record.message = "Bad message (%r): %r" % (e, record.__dict__)
record.asctime = time.strftime(
"%m-%d %H:%M:%S", self.converter(record.created))
record.asctime += '.%03d' % ((record.created % 1) * 1000)
prefix = '[%(levelname)1.1s %(asctime)s %(module)s:%(lineno)d]' % \
prefix = '[%(levelname)1.1s %(asctime)s.%(msecs)03d %(module)s:%(lineno)d]' % \
record.__dict__
if self._color:
prefix = (self._colors.get(record.levelno, self._normal) +
prefix + self._normal)
formatted = prefix + " " + record.message
formatted += ''.join(
' %s=%s' % (k, v) for k, v in record.__dict__.items()
if k not in {
'levelname', 'asctime', 'module', 'lineno', 'args', 'message',
'filename', 'exc_info', 'exc_text', 'created', 'funcName',
'processName', 'process', 'msecs', 'relativeCreated', 'thread',
'threadName', 'name', 'levelno', 'msg', 'pathname', 'stack_info',
'taskName',
})
if record.exc_info:
if not record.exc_text:
record.exc_text = self.formatException(record.exc_info)

View file

@ -1,44 +0,0 @@
import tarfile
import logging
logger = logging.getLogger(__name__)
multikeys = {'depend', 'makepkgopt', 'optdepend', 'replaces', 'conflict',
'provides', 'license', 'backup', 'group', 'makedepend', 'checkdepend'}
def _add_to_dict(d, key, value):
if key in multikeys:
if key in d:
d[key].append(value)
else:
d[key] = [value]
else:
assert key not in d, 'unexpected multi-value key "%s"' % key
d[key] = value
def readpkg(file):
tar = tarfile.open(file)
info = tar.next()
if not info or info.name != '.PKGINFO':
logger.warn('%s is not a nice package!', file)
info = '.PKGINFO' # have to look further
f = tar.extractfile(info)
data = f.read().decode()
tar.close()
d = {}
key = None
for l in data.split('\n'):
if l.startswith('#'):
continue
if not l:
continue
if '=' not in l:
value += l
else:
if key is not None:
_add_to_dict(d, key, value)
key, value = l.split(' = ', 1)
_add_to_dict(d, key, value)
return d

View file

@ -12,7 +12,6 @@ import sqlite3
import socket
import time
import hashlib
import pickle
from os.path import relpath
import pyinotify
@ -21,14 +20,10 @@ from tornado.ioloop import IOLoop
import tornado.process
from .lib import archpkg
from . import pkgreader
from . import dbutil
logger = logging.getLogger(__name__)
# handles only x86_64, i686 and any arch packages
_pkgfile_pat = re.compile(r'(?:^|/).+-[^-]+-[\d.]+-(?:x86_64|i686|any)\.pkg\.tar\.xz(?:\.sig)?$')
def same_existent_file(a, b):
try:
return os.path.samefile(a, b)
@ -53,10 +48,11 @@ class RepoMan:
_cmd_queue = queue.Queue()
_cmd_running = False
def __init__(self, config, base, ioloop=None):
def __init__(self, config, base, siteman):
self.action = []
self._ioloop = ioloop or IOLoop.instance()
self._ioloop = IOLoop.current()
self._base = base
self._siteman = siteman
self._repo_dir = config.get('path')
self.name = config.get('name')
@ -69,34 +65,27 @@ class RepoMan:
self._auto_rename = config.getboolean('auto-rename', True)
self._symlink_any = config.getboolean('symlink-any', True)
notification_type = config.get('notification-type', 'null')
if notification_type != 'null':
self._notification_addrs = config.get('notification-addresses')
self._notification_secret = config.get('notification-secret')
self.send_notification = getattr(
self,
'send_notification_' + notification_type.replace('-', '_'),
)
def queue_command(self, cmd, callbacks=None):
self._cmd_queue.put((cmd, callbacks))
if not self._cmd_running:
self.run_command()
def run_command(self):
if not self._cmd_running:
self._siteman.inc_running()
self.__class__._cmd_running = True
try:
cmd, callbacks = self._cmd_queue.get_nowait()
except queue.Empty:
self.send_notification()
self.__class__._cmd_running = False
self._siteman.dec_running()
return
logger.info('Running cmd: %r', cmd)
# have to specify io_loop or we'll get error tracebacks in some versions
# of Tornado
# no longer have to specify io_loop in Tornado > 3.1. Let's drop them for
# Tornado >= 5
try:
p = tornado.process.Subprocess(cmd, io_loop=self._ioloop)
p = tornado.process.Subprocess(cmd)
except OSError:
logger.error('failed to run command.', exc_info=True)
self.run_command()
@ -151,52 +140,6 @@ class RepoMan:
self.run,
)
def send_notification_simple_udp(self):
msg = self._new_notification_msg()
socks = {}
for address, port in self._parse_notification_address_inet():
try:
af, socktype, proto, canonname, sockaddr = socket.getaddrinfo(
address, port, 0, socket.SOCK_DGRAM, 0, 0)[0]
except:
logger.exception('failed to create socket to %r for notification',
(address, port))
continue
info = af, socktype, proto
if info not in socks:
sock = socket.socket(*info)
socks[info] = sock
else:
sock = socks[info]
sock.sendto(msg, sockaddr)
logger.info('simple udp notification sent to %s.', (address, port))
def _new_notification_msg(self):
s = 'update'
t = str(int(time.time()))
data = s + '|' + t
hashing = data + self._notification_secret
sig = hashlib.sha1(hashing.encode('utf-8')).hexdigest()
msg = data + '|' + sig
logger.info('new notification msg: %s.', msg)
return msg.encode('utf-8')
def _parse_notification_address_inet(self):
cached = self._notification_addrs
if isinstance(cached, str):
addresses = []
for addr in cached.split():
host, port = addr.rsplit(':', 1)
port = int(port)
addresses.append((host, port))
cached = self._notification_addrs = tuple(addresses)
return cached
def send_notification_null(self):
logger.info('null notification sent.')
def run(self):
self._timeout = None
actions = self.action
@ -221,14 +164,36 @@ class RepoMan:
self._do_remove(toremove)
class EventHandler(pyinotify.ProcessEvent):
def my_init(self, config, wm, ioloop=None):
_n_running = 0
def my_init(
self, filter_pkg, supported_archs, config, wm,
):
notification_type = config.get(
'notification-type', 'null')
if notification_type != 'null':
self._notification_addrs = config.get(
'notification-addresses')
self._notification_secret = config.get(
'notification-secret')
self.send_notification = getattr(
self,
'send_notification_' +
notification_type.replace('-', '_'),
)
self.filter_pkg = filter_pkg
self.moved_away = {}
self.created = {}
self.repomans = {}
# TODO: use a expiring dict
# TODO: use an expiring dict
self.our_links = set()
self._ioloop = ioloop or IOLoop.instance()
self._ioloop = IOLoop.current()
base = config.get('path')
self._lastupdate_file = os.path.join(base, 'lastupdate')
dbname = config.get('info-db', os.path.join(base, 'pkginfo.db'))
new_db = not os.path.exists(dbname)
self._db = sqlite3.connect(dbname, isolation_level=None) # isolation_level=None means autocommit
@ -252,15 +217,19 @@ class EventHandler(pyinotify.ProcessEvent):
(filename text unique,
pkgrepo text)''')
dirs = [os.path.join(base, x) for x in ('any', 'i686', 'x86_64')]
self._supported_archs = supported_archs
dirs = [os.path.join(base, x) for x in self._supported_archs]
self.files = files = set()
for d in dirs:
os.makedirs(d, exist_ok=True)
for f in os.listdir(d):
p = os.path.join(d, f)
if os.path.exists(p): # filter broken symlinks
files.add(p)
wm.add_watch(d, pyinotify.ALL_EVENTS)
self.repomans[d] = RepoMan(config, d, self._ioloop)
wm.add_watch(d, pyinotify.IN_CLOSE_WRITE | pyinotify.IN_DELETE |
pyinotify.IN_CREATE | pyinotify.IN_MOVED_FROM |
pyinotify.IN_MOVED_TO | pyinotify.IN_OPEN)
self.repomans[d] = RepoMan(config, d, self)
self.name = self.repomans[d].name
self._auto_rename = self.repomans[d]._auto_rename
self._symlink_any = self.repomans[d]._symlink_any
@ -272,10 +241,12 @@ class EventHandler(pyinotify.ProcessEvent):
oldfiles.update(f[0] for f in self._db.execute('select filename from sigfiles where pkgrepo = ?', (self.name,)))
oldfiles = {os.path.join(self._db_dir, f) for f in oldfiles}
for f in sorted(filterfalse(filterPkg, files - oldfiles), key=pkgsortkey):
for f in sorted(filterfalse(self.filter_pkg, files - oldfiles),
key=pkgsortkey):
self.dispatch(f, 'add')
for f in sorted(filterfalse(filterPkg, oldfiles - files), key=pkgsortkey):
for f in sorted(filterfalse(self.filter_pkg, oldfiles - files),
key=pkgsortkey):
self.dispatch(f, 'remove')
def process_IN_CLOSE_WRITE(self, event):
@ -301,9 +272,31 @@ class EventHandler(pyinotify.ProcessEvent):
logger.debug('Symlinked: %s', file)
self.dispatch(file, 'add')
self.files.add(file)
else:
logger.debug('Created: %s', file)
self.created[file] = self._ioloop.add_timeout(
self._ioloop.time() + 0.1,
partial(self.linked, file),
)
def process_IN_OPEN(self, event):
file = event.pathname
try:
timeout = self.created.pop(file)
except KeyError:
return
self._ioloop.remove_timeout(timeout)
def linked(self, file):
logger.debug('Linked: %s', file)
del self.created[file]
self.dispatch(file, 'add')
self.files.add(file)
def movedOut(self, event):
logger.debug('Moved away: %s', event.pathname)
del self.moved_away[event.cookie]
self.dispatch(event.pathname, 'remove')
def process_IN_MOVED_FROM(self, event):
@ -319,7 +312,7 @@ class EventHandler(pyinotify.ProcessEvent):
self.files.add(event.pathname)
if event.cookie in self.moved_away:
self._ioloop.remove_timeout(self.moved_away[event.cookie])
self._ioloop.remove_timeout(self.moved_away.pop(event.cookie))
else:
logger.debug('Moved here: %s', event.pathname)
self.dispatch(event.pathname, 'add')
@ -348,7 +341,7 @@ class EventHandler(pyinotify.ProcessEvent):
d = newd
if self._symlink_any and act.arch == 'any':
for newarch in ('i686', 'x86_64', 'any'):
for newarch in self._supported_archs:
if newarch == arch:
# this file itself
continue
@ -357,6 +350,7 @@ class EventHandler(pyinotify.ProcessEvent):
if action == 'add':
oldpath = os.path.join('..', arch, file)
if not same_existent_file(oldpath, newpath):
os.makedirs(newd, exist_ok=True)
try:
self.our_links.add(newpath)
os.symlink(oldpath, newpath)
@ -387,13 +381,7 @@ class EventHandler(pyinotify.ProcessEvent):
except KeyError:
owner = 'uid_%d' % stat.st_uid
try:
info = pkgreader.readpkg(act.path)
except:
logger.error('failed to read info for package %s', act.path, exc_info=True)
info = None
info = pickle.dumps(info)
info = None
self._db.execute(
'''insert or replace into pkginfo
(filename, pkgrepo, pkgname, pkgarch, pkgver, forarch, state, owner, mtime, info) values
@ -448,10 +436,70 @@ class EventHandler(pyinotify.ProcessEvent):
self._db.execute('''delete from sigfiles where filename = ? and pkgrepo = ?''',
(rpath, self.name))
def filterPkg(path):
def dec_running(self):
self._n_running -= 1
if self._n_running == 0:
self.send_notification()
self.update_lastupdate()
def inc_running(self):
self._n_running += 1
def send_notification_simple_udp(self):
msg = self._new_notification_msg()
socks = {}
for address, port in self._parse_notification_address_inet():
try:
af, socktype, proto, canonname, sockaddr = socket.getaddrinfo(
address, port, 0, socket.SOCK_DGRAM, 0, 0)[0]
except:
logger.exception('failed to create socket to %r for notification',
(address, port))
continue
info = af, socktype, proto
if info not in socks:
sock = socket.socket(*info)
socks[info] = sock
else:
sock = socks[info]
sock.sendto(msg, sockaddr)
logger.info('simple udp notification sent to %s.', (address, port))
def _new_notification_msg(self):
s = 'update'
t = str(int(time.time()))
data = s + '|' + t
hashing = data + self._notification_secret
sig = hashlib.sha1(hashing.encode('utf-8')).hexdigest()
msg = data + '|' + sig
logger.info('new notification msg: %s.', msg)
return msg.encode('utf-8')
def _parse_notification_address_inet(self):
cached = self._notification_addrs
if isinstance(cached, str):
addresses = []
for addr in cached.split():
host, port = addr.rsplit(':', 1)
port = int(port)
addresses.append((host, port))
cached = self._notification_addrs = tuple(addresses)
return cached
def send_notification_null(self):
logger.info('null notification sent.')
def update_lastupdate(self):
t = '%d\n' % time.time()
with open(self._lastupdate_file, 'w') as f:
f.write(t)
def filter_pkg(regex, path):
if isinstance(path, Event):
path = path.pathname
return not _pkgfile_pat.search(path)
return not regex.search(path)
def pkgsortkey(path):
pkg = archpkg.PkgNameInfo.parseFilename(os.path.split(path)[1])
@ -459,16 +507,100 @@ def pkgsortkey(path):
def repomon(config):
wm = pyinotify.WatchManager()
ioloop = IOLoop.instance()
supported_archs = config.get('supported-archs', 'i686 x86_64').split()
if 'any' not in supported_archs:
supported_archs.append('any')
# assume none of the archs has regex meta characters
regex = re.compile(r'(?:^|/)[^.].*-[^-]+-[\d.]+-(?:' + '|'.join(supported_archs) + r')\.pkg\.tar\.(?:xz|zst)(?:\.sig)?$')
filter_func = partial(filter_pkg, regex)
handler = EventHandler(
filterPkg,
config=config,
wm=wm,
ioloop=ioloop,
filter_func,
filter_pkg = filter_func,
supported_archs = supported_archs,
config = config,
wm = wm,
)
return pyinotify.TornadoAsyncNotifier(
ioloop = IOLoop.current()
ret = [pyinotify.TornadoAsyncNotifier(
wm,
ioloop,
default_proc_fun=handler,
)
ioloop = ioloop,
)]
if config.get('spool-directory'):
wm = pyinotify.WatchManager()
handler = SpoolHandler(
filter_func,
filter_pkg = filter_func,
path = config.get('spool-directory'),
dstpath = os.path.join(config.get('path'), 'any'),
wm = wm,
)
ret.append(pyinotify.TornadoAsyncNotifier(
wm, default_proc_fun=handler,
ioloop = ioloop,
))
return ret
class SpoolHandler(pyinotify.ProcessEvent):
def my_init(self, filter_pkg, path, dstpath, wm):
self.filter_pkg = filter_pkg
self.dstpath = dstpath
self._ioloop = IOLoop.current()
self.created = {}
files = set()
for f in os.listdir(path):
p = os.path.join(path, f)
if os.path.exists(p): # filter broken symlinks
files.add(p)
wm.add_watch(path, pyinotify.IN_CLOSE_WRITE | pyinotify.IN_CREATE |
pyinotify.IN_MOVED_TO | pyinotify.IN_OPEN)
self._initial_update(files)
def _initial_update(self, files):
for f in sorted(filterfalse(self.filter_pkg, files),
key=pkgsortkey):
self.dispatch(f)
def process_IN_CLOSE_WRITE(self, event):
logger.debug('Writing done: %s', event.pathname)
self.dispatch(event.pathname)
def process_IN_CREATE(self, event):
file = event.pathname
if os.path.islink(file):
logger.debug('Symlinked: %s', file)
self.dispatch(file)
else:
logger.debug('Created: %s', file)
self.created[file] = self._ioloop.add_timeout(
self._ioloop.time() + 0.1,
partial(self.linked, file),
)
def process_IN_OPEN(self, event):
file = event.pathname
try:
timeout = self.created.pop(file)
except KeyError:
return
self._ioloop.remove_timeout(timeout)
def linked(self, file):
logger.debug('Linked: %s', file)
del self.created[file]
self.dispatch(file)
def process_IN_MOVED_TO(self, event):
logger.debug('Moved here: %s', event.pathname)
self.dispatch(event.pathname)
def dispatch(self, path):
filename = os.path.basename(path)
os.rename(path, os.path.join(self.dstpath, filename))

View file

@ -19,6 +19,11 @@ name: archlinuxcn
# files in this directory, remember to update the configuration of inotify.
path: /home/lilydjwg/tmpfs/test
# If enabled, packages put into this directory will be moved into the repo.
# This path should be on the same filesystem as the repo path
# Should be used with auto-rename on
spool-directory: /home/lilydjwg/tmpfs/spool
# A database to store package info. Default to ${path}/pkginfo.db
#info-db: /home/lilydjwg/tmpfs/test/pkginfo.db
@ -31,8 +36,12 @@ path: /home/lilydjwg/tmpfs/test
# directory. Default is on.
#auto-rename: on
# What archs we support? The default is i686 and x86_64. And you can add more
# like arm, armv6h, aarch64. Archs are separated by spaces.
#supported-archs: i686 x86_64 arm
# By enabling symlink-any, the server will automatically symlink the package
# files of 'any' architecture to 'i686' and 'x86_64'
# files of 'any' architecture to supported archs.
# Default is on.
#symlink-any: on

10
misc/archrepo2.service Normal file
View file

@ -0,0 +1,10 @@
[Unit]
Description=archrepo2 service for archlinuxcn repo
[Service]
Type=simple
ExecStart=/usr/bin/archreposrv /etc/archrepo2.ini
Restart=on-failure
[Install]
WantedBy=multi-user.target

View file

@ -1,11 +0,0 @@
#!/usr/bin/env python3
# vim:fileencoding=utf-8
from subprocess import getoutput
allpkgs = getoutput(r"locate -be --regex '\.pkg\.tar\.xz$'").split('\n')
from archrepo2.pkgreader import readpkg
for p in allpkgs:
print('reading package:', p)
d = readpkg(p)
print('desc:', d.get('pkgdesc', '(nothing)'))

View file

@ -1,46 +0,0 @@
#!/usr/bin/env python3
# vim:fileencoding=utf-8
import os, sys
import sqlite3
import configparser
import pickle
import logging
from archrepo2.lib.nicelogger import enable_pretty_logging
enable_pretty_logging(logging.DEBUG)
import archrepo2.pkgreader
from archrepo2.dbutil import *
def main(conffile):
config = configparser.ConfigParser()
config.read(conffile)
config = config['repository']
base = config.get('path')
dbname = config.get('info-db', os.path.join(base, 'pkginfo.db'))
db = sqlite3.connect(dbname, isolation_level=None)
assert getver(db) == '0.1', 'wrong database version'
input('Please stop the service and then press Enter.')
try:
db.execute('alter table pkginfo add info blob')
except sqlite3.OperationalError:
# the column is already there
pass
pkgs = [x[0] for x in db.execute('select filename from pkginfo')]
for p in pkgs:
try:
info = pkgreader.readpkg(p)
except:
logging.error('failed to read info for package %s', act.path)
info = None
info = pickle.dumps(info)
db.execute('update pkginfo set info=?', (info,))
setver(db, '0.2')
db.close()
input('Please re-start the service with new code and then press Enter.')
if __name__ == '__main__':
main(sys.argv[1])

View file

@ -7,7 +7,7 @@ setup(
name = 'archrepo2',
version = archrepo2.__version__,
packages = find_packages(),
install_requires = ['tornado>2.4.1', 'pyinotify', 'setuptools'],
install_requires = ['tornado>2.4.1', 'pyinotify', 'pyalpm'],
entry_points = {
'console_scripts': [
'archreposrv = archrepo2.archreposrv:main',