mirror of
https://github.com/lilydjwg/nvchecker.git
synced 2025-03-10 06:14:02 +00:00
apt: run decompress in an executor to give other tasks time to run
This commit is contained in:
parent
450fa7de10
commit
f4983eaea3
1 changed files with 26 additions and 8 deletions
|
@ -1,27 +1,43 @@
|
||||||
# MIT licensed
|
# MIT licensed
|
||||||
# Copyright (c) 2020 Felix Yan <felixonmars@archlinux.org>, et al.
|
# Copyright (c) 2020 Felix Yan <felixonmars@archlinux.org>, et al.
|
||||||
|
|
||||||
from nvchecker.api import session, GetVersionError
|
import asyncio
|
||||||
|
|
||||||
|
from nvchecker.api import (
|
||||||
|
session, GetVersionError,
|
||||||
|
VersionResult, Entry, AsyncCache, KeyManager,
|
||||||
|
)
|
||||||
|
|
||||||
APT_RELEASE_URL = "%s/dists/%s/Release"
|
APT_RELEASE_URL = "%s/dists/%s/Release"
|
||||||
APT_PACKAGES_PATH = "%s/binary-%s/Packages%s"
|
APT_PACKAGES_PATH = "%s/binary-%s/Packages%s"
|
||||||
APT_PACKAGES_URL = "%s/dists/%s/%s"
|
APT_PACKAGES_URL = "%s/dists/%s/%s"
|
||||||
APT_PACKAGES_SUFFIX_PREFER = (".xz", ".gz", "")
|
APT_PACKAGES_SUFFIX_PREFER = (".xz", ".gz", "")
|
||||||
|
|
||||||
async def get_url(url):
|
def _decompress_data(url: str, data: bytes) -> str:
|
||||||
res = await session.get(url)
|
|
||||||
data = res.body
|
|
||||||
|
|
||||||
if url.endswith(".xz"):
|
if url.endswith(".xz"):
|
||||||
import lzma
|
import lzma
|
||||||
data = lzma.decompress(data)
|
data = lzma.decompress(data)
|
||||||
elif url.endswith(".gz"):
|
elif url.endswith(".gz"):
|
||||||
import gzip
|
import gzip
|
||||||
data = gzip.decompress(data)
|
data = gzip.decompress(data)
|
||||||
|
else:
|
||||||
|
raise NotImplementedError(url)
|
||||||
|
|
||||||
return data.decode('utf-8')
|
return data.decode('utf-8')
|
||||||
|
|
||||||
async def get_version(name, conf, *, cache, **kwargs):
|
async def get_url(url: str) -> str:
|
||||||
|
res = await session.get(url)
|
||||||
|
data = res.body
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
return await loop.run_in_executor(
|
||||||
|
None, _decompress_data,
|
||||||
|
url, data)
|
||||||
|
|
||||||
|
async def get_version(
|
||||||
|
name: str, conf: Entry, *,
|
||||||
|
cache: AsyncCache, keymanager: KeyManager,
|
||||||
|
**kwargs,
|
||||||
|
) -> VersionResult:
|
||||||
srcpkg = conf.get('srcpkg')
|
srcpkg = conf.get('srcpkg')
|
||||||
pkg = conf.get('pkg')
|
pkg = conf.get('pkg')
|
||||||
mirror = conf['mirror']
|
mirror = conf['mirror']
|
||||||
|
@ -35,7 +51,8 @@ async def get_version(name, conf, *, cache, **kwargs):
|
||||||
elif not srcpkg and not pkg:
|
elif not srcpkg and not pkg:
|
||||||
pkg = name
|
pkg = name
|
||||||
|
|
||||||
apt_release = await cache.get(APT_RELEASE_URL % (mirror, suite), get_url)
|
apt_release = await cache.get(
|
||||||
|
APT_RELEASE_URL % (mirror, suite), get_url) # type: ignore
|
||||||
for suffix in APT_PACKAGES_SUFFIX_PREFER:
|
for suffix in APT_PACKAGES_SUFFIX_PREFER:
|
||||||
packages_path = APT_PACKAGES_PATH % (repo, arch, suffix)
|
packages_path = APT_PACKAGES_PATH % (repo, arch, suffix)
|
||||||
if " " + packages_path in apt_release:
|
if " " + packages_path in apt_release:
|
||||||
|
@ -43,7 +60,8 @@ async def get_version(name, conf, *, cache, **kwargs):
|
||||||
else:
|
else:
|
||||||
raise GetVersionError('Packages file not found in APT repository')
|
raise GetVersionError('Packages file not found in APT repository')
|
||||||
|
|
||||||
apt_packages = await cache.get(APT_PACKAGES_URL % (mirror, suite, packages_path), get_url)
|
apt_packages = await cache.get(
|
||||||
|
APT_PACKAGES_URL % (mirror, suite, packages_path), get_url) # type: ignore
|
||||||
|
|
||||||
pkg_found = False
|
pkg_found = False
|
||||||
for line in apt_packages.split("\n"):
|
for line in apt_packages.split("\n"):
|
||||||
|
|
Loading…
Add table
Reference in a new issue