mirror of
https://github.com/lilydjwg/nvchecker.git
synced 2025-03-10 06:14:02 +00:00
aur: optimize requests
This commit is contained in:
parent
14b3863f11
commit
0232d0fb4f
1 changed files with 40 additions and 15 deletions
|
@ -18,6 +18,31 @@ logger = structlog.get_logger(logger_name=__name__)
|
||||||
|
|
||||||
AUR_URL = 'https://aur.archlinux.org/rpc/'
|
AUR_URL = 'https://aur.archlinux.org/rpc/'
|
||||||
|
|
||||||
|
class AurResults:
|
||||||
|
def __init__(self) -> None:
|
||||||
|
self.cache = {}
|
||||||
|
|
||||||
|
async def get_multiple(
|
||||||
|
self,
|
||||||
|
aurnames: Iterable[str],
|
||||||
|
) -> Dict[str, Dict[str, Any]]:
|
||||||
|
params = [('v', '5'), ('type', 'info')]
|
||||||
|
params.extend(('arg[]', name) for name in aurnames
|
||||||
|
if name not in self.cache)
|
||||||
|
async with session.get(AUR_URL, params=params) as res:
|
||||||
|
data = await res.json()
|
||||||
|
new_results = {r['Name']: r for r in data['results']}
|
||||||
|
|
||||||
|
cache = self.cache
|
||||||
|
cache.update(new_results)
|
||||||
|
cache.update(
|
||||||
|
(name, None)
|
||||||
|
for name in set(aurnames) - new_results.keys()
|
||||||
|
)
|
||||||
|
|
||||||
|
return {name: cache[name] for name in aurnames
|
||||||
|
if name in cache}
|
||||||
|
|
||||||
class Worker(BaseWorker):
|
class Worker(BaseWorker):
|
||||||
# https://wiki.archlinux.org/index.php/Aurweb_RPC_interface#Limitations
|
# https://wiki.archlinux.org/index.php/Aurweb_RPC_interface#Limitations
|
||||||
batch_size = 100
|
batch_size = 100
|
||||||
|
@ -28,26 +53,36 @@ class Worker(BaseWorker):
|
||||||
if left > 0:
|
if left > 0:
|
||||||
n_batch += 1
|
n_batch += 1
|
||||||
|
|
||||||
|
aur_results = AurResults()
|
||||||
|
|
||||||
ret = []
|
ret = []
|
||||||
for i in range(n_batch):
|
for i in range(n_batch):
|
||||||
s = i * self.batch_size
|
s = i * self.batch_size
|
||||||
batch = tasks[s : s+self.batch_size]
|
batch = tasks[s : s+self.batch_size]
|
||||||
fu = self._run_batch(batch)
|
fu = self._run_batch(batch, aur_results)
|
||||||
ret.append(fu)
|
ret.append(fu)
|
||||||
|
|
||||||
await asyncio.wait(ret)
|
await asyncio.wait(ret)
|
||||||
|
|
||||||
async def _run_batch(self, batch: List[Tuple[str, Entry]]) -> None:
|
async def _run_batch(
|
||||||
|
self,
|
||||||
|
batch: List[Tuple[str, Entry]],
|
||||||
|
aur_results: AurResults,
|
||||||
|
) -> None:
|
||||||
task_by_name: Dict[str, Entry] = dict(self.tasks)
|
task_by_name: Dict[str, Entry] = dict(self.tasks)
|
||||||
|
|
||||||
async with self.acquire_token():
|
async with self.acquire_token():
|
||||||
results = await _run_batch_impl(batch)
|
results = await _run_batch_impl(batch, aur_results)
|
||||||
for name, version in results.items():
|
for name, version in results.items():
|
||||||
r = RawResult(name, version, task_by_name[name])
|
r = RawResult(name, version, task_by_name[name])
|
||||||
await self.result_q.put(r)
|
await self.result_q.put(r)
|
||||||
|
|
||||||
async def _run_batch_impl(batch: List[Tuple[str, Entry]]) -> Dict[str, str]:
|
async def _run_batch_impl(
|
||||||
|
batch: List[Tuple[str, Entry]],
|
||||||
|
aur_results: AurResults,
|
||||||
|
) -> Dict[str, str]:
|
||||||
aurnames = {conf.get('aur', name) for name, conf in batch}
|
aurnames = {conf.get('aur', name) for name, conf in batch}
|
||||||
results = await _aur_get_multiple(aurnames)
|
results = await aur_results.get_multiple(aurnames)
|
||||||
|
|
||||||
ret = {}
|
ret = {}
|
||||||
|
|
||||||
|
@ -72,13 +107,3 @@ async def _run_batch_impl(batch: List[Tuple[str, Entry]]) -> Dict[str, str]:
|
||||||
|
|
||||||
return ret
|
return ret
|
||||||
|
|
||||||
async def _aur_get_multiple(
|
|
||||||
aurnames: Iterable[str],
|
|
||||||
) -> Dict[str, Dict[str, Any]]:
|
|
||||||
params = [('v', '5'), ('type', 'info')]
|
|
||||||
params.extend(('arg[]', name) for name in aurnames)
|
|
||||||
async with session.get(AUR_URL, params=params) as res:
|
|
||||||
data = await res.json()
|
|
||||||
results = {r['Name']: r for r in data['results']}
|
|
||||||
return results
|
|
||||||
|
|
||||||
|
|
Loading…
Add table
Reference in a new issue