mirror of
https://github.com/lilydjwg/nvchecker.git
synced 2025-03-10 06:14:02 +00:00
Compare commits
623 commits
Author | SHA1 | Date | |
---|---|---|---|
|
fc8d90f46b | ||
|
e1882a2778 | ||
|
b83cbd2ead | ||
|
d4be5189fe | ||
|
cc9001ad88 | ||
|
fca89f7830 | ||
|
92a424f946 | ||
|
d3bf5b2db5 | ||
|
bd72ea04d2 | ||
|
50d5342586 | ||
|
1e3ed1e4b9 | ||
|
72c0730725 | ||
|
6759d2f206 | ||
|
17e351f825 | ||
|
d58638733e | ||
|
287282eb2c | ||
|
fa9ca38690 | ||
|
5c7918bf7a | ||
|
c03bec7452 | ||
|
7cacd9edaf | ||
|
af21f93bd1 | ||
|
ca011221cf | ||
|
745f0decdb | ||
|
6f5870787a | ||
|
4364759b29 | ||
|
dbf6c4601f | ||
|
4d5e29f26b | ||
|
ad892b713e | ||
|
5803237d39 | ||
|
71a0002624 | ||
|
2722ccc7fe | ||
|
228139ddd4 | ||
|
372fce4445 | ||
|
babb3aa74e | ||
|
d44a50c273 | ||
|
c0d6568434 | ||
|
2a64f7ab8f | ||
|
af1a855fd5 | ||
|
eeddd56156 | ||
|
29b0f63103 | ||
|
3a87f920a5 | ||
|
af7acde251 | ||
|
c7f571ae79 | ||
|
256bb9dd3b | ||
|
28f1ab5144 | ||
|
e1a6517169 | ||
|
afad2f08f2 | ||
|
5cead67640 | ||
|
728dcca1bd | ||
|
89b4cd90ba | ||
|
4eb70a0fbe | ||
|
8d3b6adaaa | ||
|
5b561456ae | ||
|
d5cc276000 | ||
|
3abe5ad394 | ||
|
e4cff0b507 | ||
|
d4e27c22ac | ||
|
7848f0907d | ||
|
a1ab77f92a | ||
|
5a3ca69c1c | ||
|
5677c9595e | ||
|
61ca3c95d0 | ||
|
1d55664b31 | ||
|
c66ca7f2bc | ||
|
2800f392d9 | ||
|
3cf403fc51 | ||
|
6af59aa2ae | ||
|
8397cbbce0 | ||
|
d49d07437b | ||
|
5db6a89fc3 | ||
|
9f6706e0e7 | ||
|
2a63ddef7a | ||
|
fdac4af358 | ||
|
0f441b0bf2 | ||
|
0890c7e966 | ||
|
7e2583be56 | ||
|
d13cbeea38 | ||
|
5140fa2542 | ||
|
c367b49703 | ||
|
62fdca8fbf | ||
|
f1ff604b4c | ||
|
61b3705f35 | ||
|
bd2b2f9494 | ||
|
0355af5e4c | ||
|
67ccc10f17 | ||
|
2f6f803c1a | ||
|
be8967b1d2 | ||
|
7960541a34 | ||
|
c306b44711 | ||
|
47ce6fc2e4 | ||
|
511922e6fb | ||
|
beceadddee | ||
|
de1a3c6fc2 | ||
|
f29bdee6a3 | ||
|
9dd7fb1736 | ||
|
03819388e8 | ||
|
471b66ff4c | ||
|
4ec12d30fd | ||
|
1005bd5f01 | ||
|
4ad3bdb95d | ||
|
35a19f30ae | ||
|
5227df6f5b | ||
|
e19f033f15 | ||
|
0e0eb2c363 | ||
|
a7e6512e70 | ||
|
c53952c75c | ||
|
9784e64307 | ||
|
5d908b34a6 | ||
|
1d19082673 | ||
|
370711c878 | ||
|
03c2e25bbd | ||
|
3691fa9a51 | ||
|
f8a73a5a92 | ||
|
8c82c3c289 | ||
|
e3e8415251 | ||
|
6b73d8cd87 | ||
|
60d88ac5d2 | ||
|
c9017d7e07 | ||
|
e5751d2894 | ||
|
1e5de6cbcb | ||
|
c944cbcac3 | ||
|
28c3c7076b | ||
|
07c3704641 | ||
|
fac30d46d9 | ||
|
d35fb3b5bc | ||
|
6c63d7991f | ||
|
56869a45bc | ||
|
4ca61ba11a | ||
|
918457afed | ||
|
eaac776a90 | ||
|
4833135f87 | ||
|
0ba8cd41de | ||
|
c863d24695 | ||
|
6da6145350 | ||
|
9221a476c5 | ||
|
c401d239b2 | ||
|
55b3f671c8 | ||
|
592b4a4f6b | ||
|
6bf34873d3 | ||
|
5a6fee2817 | ||
|
a04d6b0fc6 | ||
|
5dcb3bc36a | ||
|
d00203d103 | ||
|
bdd025d995 | ||
|
cc4a0cd301 | ||
|
0fd35f4458 | ||
|
3563d71fb2 | ||
|
4f79058cbd | ||
|
b8c0e8a6b3 | ||
|
f3da72403e | ||
|
efc99cb791 | ||
|
2683f47e04 | ||
|
0a90d195d1 | ||
|
83272dc04a | ||
|
5052d82550 | ||
|
587eeafd65 | ||
|
386d05f8e3 | ||
|
07cddd9bd6 | ||
|
598bb94135 | ||
|
ea542d1fa1 | ||
|
fe1342e9fb | ||
|
5a972cd0d0 | ||
|
4e7dce4e93 | ||
|
d9888cc49d | ||
|
b54c5a0cf7 | ||
|
82dc93ceff | ||
|
e708d19d35 | ||
|
d3603fcee8 | ||
|
ac57d8fd15 | ||
|
62ab3b05ef | ||
|
8157e08b59 | ||
|
af77af34f3 | ||
|
62a3f336b8 | ||
|
e9ecbd514b | ||
|
34e87db8f9 | ||
|
8cd75fd101 | ||
|
fed4fc52ff | ||
|
1be02fc29c | ||
|
4a0caf3213 | ||
|
166dfb11a4 | ||
|
80343945a1 | ||
|
1bbbdca638 | ||
|
414c20a60f | ||
|
931b3fdf3c | ||
|
577a42ef89 | ||
|
addc7d0b52 | ||
|
e71dd309e7 | ||
|
8366e519d6 | ||
|
2782205efc | ||
|
2107e6ef6f | ||
|
4bace14110 | ||
|
fad7cf631d | ||
|
add6951eae | ||
|
8ca78f7445 | ||
|
324f2f5f59 | ||
|
115963ab93 | ||
|
2fc6be9d4e | ||
|
3d3d8c6136 | ||
|
51ffbbdf61 | ||
|
4c4479d4ea | ||
|
a1d2a1325c | ||
|
dada15505c | ||
|
706952609e | ||
|
7b2a464821 | ||
|
13cca483e1 | ||
|
9ee53041b6 | ||
|
8b8f2d1d8a | ||
|
268bd9125e | ||
|
2325d2ac74 | ||
|
2de30c0acd | ||
|
914c6414e8 | ||
|
8b32c26d1e | ||
|
e683476fb2 | ||
|
88cbdd925c | ||
|
ffe84cae08 | ||
|
d706961669 | ||
|
f2aa4cad20 | ||
|
48f82e819b | ||
|
257b0f63fe | ||
|
5605f62269 | ||
|
69a5d82d25 | ||
|
2e042d7576 | ||
|
2ea44d3694 | ||
|
1235d77e72 | ||
|
c15e9b7576 | ||
|
2598b0fd19 | ||
|
9005c96d10 | ||
|
c6ed37ada1 | ||
|
a62866a2d3 | ||
|
15020dfcd6 | ||
|
8b929322cb | ||
|
f4e9ab80b9 | ||
|
ff1c980456 | ||
|
e03023bd6c | ||
|
7739f5c8c0 | ||
|
3dff0466d1 | ||
|
d8c26fa666 | ||
|
ea01b6d9ee | ||
|
b61fbe5a37 | ||
|
6d8f00036d | ||
|
9ea6f37712 | ||
|
3e88d74043 | ||
|
da1d204307 | ||
|
222e3e9b4c | ||
|
acb56d7827 | ||
|
4bf2755b0e | ||
|
a8228bb594 | ||
|
4f06ce7862 | ||
|
31492c4eb0 | ||
|
d5920efa54 | ||
|
ac1aafc9f1 | ||
|
dd87c0e80c | ||
|
b6eef8a54a | ||
|
2f61336ce3 | ||
|
bc2c2edfde | ||
|
1fb3c970cf | ||
|
112c916a6d | ||
|
83286263d2 | ||
|
5e60a1dc86 | ||
|
8d4982d440 | ||
|
91cbed1f8e | ||
|
b8b6e47d59 | ||
|
f3be1c585f | ||
|
f8be3e7418 | ||
|
e93f91af57 | ||
|
d24f64a4f0 | ||
|
ceb573f337 | ||
|
3ef83b0a95 | ||
|
750999f397 | ||
|
c43d4e900f | ||
|
c65a5343ed | ||
|
4d6e87a975 | ||
|
9acae25c0f | ||
|
f443fa8e96 | ||
|
b8afc43430 | ||
|
20183a83e5 | ||
|
44b9f1856f | ||
|
02515fc82d | ||
|
b70de0a628 | ||
|
4643652eaf | ||
|
ad24b841cf | ||
|
494c4ddf67 | ||
|
11aea624e6 | ||
|
ddd99bd59f | ||
|
9a42ed9503 | ||
|
606b3f65fb | ||
|
151b03801f | ||
|
25d7a7498d | ||
|
ad2dc19415 | ||
|
46d724b465 | ||
|
f8dd1a4212 | ||
|
0f44759b35 | ||
|
822eb76a1c | ||
|
92e422c726 | ||
|
6fd3ba95ba | ||
|
4c4b770c27 | ||
|
ffaca8c949 | ||
|
3eee6480ab | ||
|
9deb55806d | ||
|
4033c0b9ba | ||
|
3c2abb01f0 | ||
|
ae506ba9cf | ||
|
d83d8d5367 | ||
|
943847da74 | ||
|
68fcfe3924 | ||
|
6f7633a93c | ||
|
1064f3d948 | ||
|
61bc0c5562 | ||
|
8db5c6a938 | ||
|
c65513b37d | ||
|
9d2d47ed15 | ||
|
134a6885f1 | ||
|
d7c553ae94 | ||
|
7366d82bfc | ||
|
1b7736a549 | ||
|
46da41147f | ||
|
f8f261c476 | ||
|
968af3df0b | ||
|
bf8a4c9298 | ||
|
bace59de78 | ||
|
e6e8573c33 | ||
|
de0a5fe02e | ||
|
27269534d2 | ||
|
0114a411f2 | ||
|
d2a5a1bf79 | ||
|
d2130b64c6 | ||
|
29f2bad400 | ||
|
305c329e02 | ||
|
9c2d2a39d7 | ||
|
2e069fe1c7 | ||
|
8b9bd1ca1c | ||
|
91daf5ad4d | ||
|
e09750b7a2 | ||
|
491a71add7 | ||
|
a0c32ce5f0 | ||
|
f00c6d163f | ||
|
d053fc3ba4 | ||
|
e6c0ce38e5 | ||
|
e5a28b0fb1 | ||
|
372454136f | ||
|
5622d6d0c1 | ||
|
630f41c19b | ||
|
64994e4a74 | ||
|
b4689369c8 | ||
|
024849c242 | ||
|
b83f92cc67 | ||
|
a8073e2ad5 | ||
|
cddf866ef6 | ||
|
c8b0e673ca | ||
|
6815b035a1 | ||
|
30055a9dcb | ||
|
ab66fc0468 | ||
|
8d1a079703 | ||
|
ea1200126b | ||
|
d4b07d67b8 | ||
|
f8566f3f91 | ||
|
8a988ba3a3 | ||
|
441f2a9fbd | ||
|
75e72c11b3 | ||
|
8d83d7ac66 | ||
|
f957acc756 | ||
|
42dfd483c7 | ||
|
e080e32c89 | ||
|
926e75c075 | ||
|
71612ad9b8 | ||
|
108016aa45 | ||
|
55c4cd9868 | ||
|
7d4b2daa65 | ||
|
ccae7d8fcf | ||
|
c251af817a | ||
|
b3bc7ec049 | ||
|
e9190df7c2 | ||
|
494c67beb0 | ||
|
54596bde3f | ||
|
854399a30a | ||
|
071b57c580 | ||
|
0eca4a9b43 | ||
|
4cb63cbe65 | ||
|
4f3a900505 | ||
|
254a229401 | ||
|
cd1cbfde30 | ||
|
121b94a7eb | ||
|
3ffb34257a | ||
|
7213d84056 | ||
|
d7624defd7 | ||
|
c9b689d67b | ||
|
e6ecb753d6 | ||
|
050f440465 | ||
|
a78c06163c | ||
|
71a63f90f6 | ||
|
07951bd745 | ||
|
23bcbf8906 | ||
|
c03e33db56 | ||
|
82d5ebb630 | ||
|
ac73106e43 | ||
|
185a7e88a9 | ||
|
f4983eaea3 | ||
|
450fa7de10 | ||
|
2502b0babc | ||
|
ef808b7517 | ||
|
d380e31881 | ||
|
a018f71186 | ||
|
a2b03867a9 | ||
|
da5b489173 | ||
|
b620ed4e90 | ||
|
819a8461a4 | ||
|
ae563d007f | ||
|
e744a27572 | ||
|
f28cc7ae61 | ||
|
f8444926ba | ||
|
61e53b6698 | ||
|
fbb7484b32 | ||
|
daae6b5341 | ||
|
bf8baa2c21 | ||
|
b9ac0794aa | ||
|
cf42eab8c2 | ||
|
03f99944cf | ||
|
1ceea05e15 | ||
|
7bae8ece16 | ||
|
717c3e2367 | ||
|
8c99ae8eea | ||
|
0397ea5cc5 | ||
|
52ab39d92e | ||
|
8e277e1528 | ||
|
30fe6e6d49 | ||
|
3591bd8ce8 | ||
|
0eb5860584 | ||
|
2604b8377c | ||
|
36358c889a | ||
|
61daa29d48 | ||
|
8c4b5bbe57 | ||
|
8a892f3909 | ||
|
042217eee2 | ||
|
465b9ee4dd | ||
|
d238a219f3 | ||
|
ade7d53a1a | ||
|
7f64ffa12a | ||
|
a7da4207ae | ||
|
0cfaac774d | ||
|
0d8b65a9c0 | ||
|
25b3eb5261 | ||
|
bc1f9df8c6 | ||
|
2a2c67090c | ||
|
307c0db0f8 | ||
|
a2a239bdc6 | ||
|
290240eafb | ||
|
b8d5bc151e | ||
|
5689bc0a3f | ||
|
3d90c02273 | ||
|
c2b4e51f56 | ||
|
b54ebe6bcf | ||
|
c8c7bc5d4e | ||
|
376e5ad499 | ||
|
275e502be3 | ||
|
027b8f9b46 | ||
|
6a6d5df682 | ||
|
4f515d75db | ||
|
6c2e31193c | ||
|
7f8310e685 | ||
|
e59766f839 | ||
|
57626faecf | ||
|
58a7ae05ca | ||
|
e6d0d158f0 | ||
|
8ed445d238 | ||
|
04a95de7af | ||
|
649799389f | ||
|
e339827436 | ||
|
2d0d3cfcee | ||
|
f6ff697d21 | ||
|
ce828df545 | ||
|
eb6833475e | ||
|
e228e31b84 | ||
|
95150fa8e9 | ||
|
61a67a4a5b | ||
|
7de923c1e1 | ||
|
2f4629fb22 | ||
|
71015be582 | ||
|
c0d836ba65 | ||
|
870fb99182 | ||
|
6737400a4c | ||
|
34f7de18c0 | ||
|
894706819a | ||
|
0f9099aae5 | ||
|
4887ec66d9 | ||
|
616cf2bc20 | ||
|
10f6b1561e | ||
|
1f7552bbf7 | ||
|
8520a62271 | ||
|
5e209cc9ad | ||
|
19553c3564 | ||
|
72d1d27f89 | ||
|
5dcd13306a | ||
|
961c1315ef | ||
|
b76bfb5606 | ||
|
435edf8589 | ||
|
0232d0fb4f | ||
|
14b3863f11 | ||
|
fbdde9bf58 | ||
|
2b3e88be13 | ||
|
4022a375d5 | ||
|
a35a35a784 | ||
|
d120f61b11 | ||
|
273764c2a4 | ||
|
145af9f0e5 | ||
|
f367d04287 | ||
|
ddc21adad5 | ||
|
44687da8a6 | ||
|
6101dde1e5 | ||
|
fa42c573b6 | ||
|
1bcd02aad2 | ||
|
cbd7e13867 | ||
|
2fc1ec3355 | ||
|
36c995eef6 | ||
|
e176078226 | ||
|
7a2d3d226b | ||
|
ac3e2beef9 | ||
|
3dfbca494c | ||
|
e7634a625c | ||
|
4109091955 | ||
|
f0869aa1c1 | ||
|
dd15f68033 | ||
|
bc2fcd148d | ||
|
4fc62d3588 | ||
|
430ce48e6f | ||
|
eba37c47a4 | ||
|
89fe2988cc | ||
|
de6fc3b5ba | ||
|
249e6b8f9b | ||
|
09f65b2dc3 | ||
|
04101c11e0 | ||
|
31f618e586 | ||
|
374ef20ad5 | ||
|
47c434ae3e | ||
|
add890fbea | ||
|
fe2ff0aa74 | ||
|
9c9e7f89d9 | ||
|
d9daa17a50 | ||
|
912e3cb758 | ||
|
b6d26e68fb | ||
|
a9639eee98 | ||
|
7055ad555e | ||
|
3546821471 | ||
|
1cb902d691 | ||
|
d10f3fc5e1 | ||
|
a86c4fdc13 | ||
|
374f7c3d42 | ||
|
38c1a4ce9d | ||
|
058b7d9679 | ||
|
761c944f19 | ||
|
3e47944a5a | ||
|
f677eda656 | ||
|
2fca9e025c | ||
|
be7e54404e | ||
|
7e44dd5767 | ||
|
eafeaca789 | ||
|
b7b7a08ae4 | ||
|
b45ac1831d | ||
|
6fb9ba590a | ||
|
737c7b9448 | ||
|
6f48b774ae | ||
|
484778218d | ||
|
893836fb09 | ||
|
5577e2cbd8 | ||
|
6674a875bf | ||
|
1f36032a28 | ||
|
1e1f431f9b | ||
|
981143f62b | ||
|
ed37892c87 | ||
|
8b0186bd25 | ||
|
e12a9c4250 | ||
|
851e141f3d | ||
|
629e82ac8a | ||
|
8720d59f79 | ||
|
51e2348d55 | ||
|
5e324ef6ff | ||
|
077a752045 | ||
|
93d1ca16db | ||
|
3194ef8e6c | ||
|
b1856f7426 | ||
|
3481013f78 | ||
|
b014759f81 | ||
|
8ade0f68b0 | ||
|
cc763205e8 | ||
|
0f101bda4d | ||
|
05848d1bc2 | ||
|
c6ef132ebb | ||
|
9a271c030b | ||
|
7dab7c3b6a | ||
|
2677d29f1d | ||
|
7370b8d23b | ||
|
c64766cc0e | ||
|
445ae6452b | ||
|
34ce4b0678 | ||
|
e2f68f764d | ||
|
a7f7a632a2 | ||
|
c34c2e70ee | ||
|
056fe65e79 | ||
|
e58142e37f | ||
|
2a9b60155b | ||
|
be7618988e | ||
|
ab1ecc231d | ||
|
5437100f08 | ||
|
ca68c44201 | ||
|
c23ef737d4 | ||
|
e5d52a9762 | ||
|
5b47391af0 | ||
|
de381bf26d | ||
|
7897317294 | ||
|
58a68957e9 | ||
|
6e60b84740 | ||
|
5a8577dec6 | ||
|
87a55c6cf9 | ||
|
a6315985d0 | ||
|
f482ee6499 | ||
|
620c7a89cd | ||
|
2f24822c88 | ||
|
d2d0279d56 | ||
|
b108d9624a | ||
|
ae011170af | ||
|
2e55a7db60 | ||
|
22c7b5db20 | ||
|
6002e273a9 | ||
|
77de8da557 |
172 changed files with 7899 additions and 2384 deletions
84
.github/workflows/codeql.yml
vendored
Normal file
84
.github/workflows/codeql.yml
vendored
Normal file
|
@ -0,0 +1,84 @@
|
|||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "master" ]
|
||||
pull_request:
|
||||
branches: [ "master" ]
|
||||
schedule:
|
||||
- cron: '34 14 * * 4'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
# Runner size impacts CodeQL analysis time. To learn more, please see:
|
||||
# - https://gh.io/recommended-hardware-resources-for-running-codeql
|
||||
# - https://gh.io/supported-runners-and-hardware-resources
|
||||
# - https://gh.io/using-larger-runners
|
||||
# Consider using larger runners for possible analysis time improvements.
|
||||
runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
|
||||
timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }}
|
||||
permissions:
|
||||
# required for all workflows
|
||||
security-events: write
|
||||
|
||||
# only required for workflows in private repositories
|
||||
actions: read
|
||||
contents: read
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'python' ]
|
||||
# CodeQL supports [ 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' ]
|
||||
# Use only 'java-kotlin' to analyze code written in Java, Kotlin or both
|
||||
# Use only 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
|
||||
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
|
||||
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||
# queries: security-extended,security-and-quality
|
||||
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
|
||||
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
||||
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
||||
|
||||
# - run: |
|
||||
# echo "Run, Build Application using script"
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
28
.github/workflows/mypy.yaml
vendored
Normal file
28
.github/workflows/mypy.yaml
vendored
Normal file
|
@ -0,0 +1,28 @@
|
|||
name: run mypy
|
||||
on: [push, pull_request]
|
||||
jobs:
|
||||
build:
|
||||
name: run mypy
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Cache
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
cache-name: cache-pipmypy
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-${{ env.cache-name }}-${{ hashFiles('setup.py') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-${{ env.cache-name }}-
|
||||
${{ runner.os }}-cache-pip-
|
||||
- name: Install deps
|
||||
run: pip3 install -U tornado pytest pytest-asyncio pytest-httpbin pytest-rerunfailures structlog tomli platformdirs aiohttp httpx mypy awesomeversion
|
||||
- name: Run mypy for --install-types
|
||||
run: PATH=$HOME/.local/bin:$PATH mypy --namespace-packages --explicit-package-bases nvchecker nvchecker_source tests
|
||||
continue-on-error: true
|
||||
- name: Install types
|
||||
run: PATH=$HOME/.local/bin:$PATH yes | mypy --install-types
|
||||
- name: Run mypy
|
||||
run: PATH=$HOME/.local/bin:$PATH mypy --namespace-packages --explicit-package-bases nvchecker nvchecker_source tests
|
93
.github/workflows/tests.yaml
vendored
Normal file
93
.github/workflows/tests.yaml
vendored
Normal file
|
@ -0,0 +1,93 @@
|
|||
name: run tests
|
||||
on: [push, pull_request]
|
||||
jobs:
|
||||
tests:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version:
|
||||
- "3.8"
|
||||
- "3.9"
|
||||
- "3.10"
|
||||
- "3.11"
|
||||
- "3.12"
|
||||
- "3.13"
|
||||
# pypy fails in some cases but we don't care much about that
|
||||
# with github actions we can't mark some jobs to not affect the overall
|
||||
# conclusion so we have to omit "allow-failure" tests.
|
||||
# See https://github.com/actions/toolkit/issues/399
|
||||
# - pypy-3.7
|
||||
deps:
|
||||
- tornado pycurl
|
||||
# timer runs when loop is closed, see https://github.com/lilydjwg/nvchecker/actions/runs/11650699759/job/32439742210
|
||||
# - aiohttp
|
||||
- tornado
|
||||
- httpx[http2]>=0.14.0
|
||||
exclude: []
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Setup Cache
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
cache-name: cache-pip
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.deps }}-${{ hashFiles('pyproject.toml', 'setup.cfg') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-${{ env.cache-name }}-${{ matrix.deps }}-
|
||||
${{ runner.os }}-${{ env.cache-name }}-
|
||||
|
||||
- name: Install pycurl deps
|
||||
if: ${{ contains(matrix.deps, 'pycurl') }}
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install -y libcurl4-openssl-dev
|
||||
# werkzeug is pinned for httpbin compatibility https://github.com/postmanlabs/httpbin/issues/673
|
||||
- name: Install Python deps
|
||||
env:
|
||||
# use env to avoid `>` being redirection
|
||||
deps: ${{ matrix.deps }}
|
||||
run: pip install -U $deps pytest 'pytest-asyncio>=0.24' pytest-httpbin pytest-rerunfailures structlog tomli platformdirs lxml jq 'werkzeug<2.1' awesomeversion
|
||||
# don't use binary distribution because:
|
||||
# hardcoded cacert path doesn't work on Ubuntu (should have been resolved?)
|
||||
# limited compression support (only deflate & gzip)
|
||||
- name: Install pycurl
|
||||
if: ${{ contains(matrix.deps, 'pycurl') }}
|
||||
run: |
|
||||
pip uninstall -y pycurl
|
||||
pip install -U pycurl --no-binary :all:
|
||||
- name: Decrypt keys
|
||||
env:
|
||||
KEY: ${{ secrets.KEY }}
|
||||
run: if [[ -n $KEY ]]; then openssl enc -d -aes-256-ctr -pbkdf2 -k $KEY -in keyfile.toml.enc -out keyfile.toml; fi
|
||||
|
||||
- name: Setup mitmproxy cache
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
cache-name: cache-mitm
|
||||
with:
|
||||
path: ~/.mitmproxy
|
||||
key: ${{ env.cache-name }}
|
||||
restore-keys: |
|
||||
${{ env.cache-name }}-
|
||||
- name: Install mitmproxy
|
||||
run: |
|
||||
/usr/bin/python -m venv --system-site-packages ~/.mitmproxy/venv
|
||||
. ~/.mitmproxy/venv/bin/activate
|
||||
pip install -U mitmproxy
|
||||
# https://github.com/DevToys-app/DevToys/issues/1373#issuecomment-2599820594
|
||||
sudo sysctl -w kernel.apparmor_restrict_unprivileged_unconfined=0
|
||||
sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0
|
||||
|
||||
# - name: Setup upterm session
|
||||
# uses: lhotari/action-upterm@v1
|
||||
- name: Run pytest
|
||||
env:
|
||||
mitmdump: /home/runner/.mitmproxy/venv/bin/mitmdump
|
||||
run: scripts/run_cached_tests
|
4
.gitignore
vendored
4
.gitignore
vendored
|
@ -1,10 +1,12 @@
|
|||
records/
|
||||
*.egg-info/
|
||||
__pycache__/
|
||||
/build/
|
||||
/dist/
|
||||
.cache/
|
||||
.eggs/
|
||||
*.pyc
|
||||
*.pyo
|
||||
.travis.pub
|
||||
.pytest_cache/
|
||||
.tox/
|
||||
keyfile.toml
|
||||
|
|
10
.readthedocs.yaml
Normal file
10
.readthedocs.yaml
Normal file
|
@ -0,0 +1,10 @@
|
|||
version: 2
|
||||
build:
|
||||
os: ubuntu-22.04
|
||||
tools:
|
||||
python: "3.11"
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
python:
|
||||
install:
|
||||
- requirements: docs/requirements.txt
|
30
.travis.yml
30
.travis.yml
|
@ -1,30 +0,0 @@
|
|||
sudo: false
|
||||
dist: xenial
|
||||
language: python
|
||||
cache: pip
|
||||
python:
|
||||
- "3.5"
|
||||
- "3.6"
|
||||
- "3.7"
|
||||
- "nightly"
|
||||
- "pypy3.5"
|
||||
install: pip install -U $DEPS pytest pytest-asyncio pytest-xdist flaky structlog
|
||||
script: pytest
|
||||
env:
|
||||
global:
|
||||
# github
|
||||
- secure: "JNuxbHbO+Qj88r0So+FKp8GBVmobGlBNi0hkZIyOH4cBXtuiM1Jo6FtRYInfTUH5TcgfMQml1a8p9g8n1fbRcTsxPt3kkT0ZleW1fJNudOHJFOmDooM4gC2/A+6aMl3xdnLCQ9cXxqsXjIUBie3GhqC4ufInU7VshxOn7KZADbI3zDuLuw9gdsBQf/OADY4oO3y1URxdnWjssP8pwfDFRSEkuLKNDtsYrhkmp3jRAq5DMtMXTEyHly9CJHow7yMyoBHa6Q/J7+C57pI4JsO8c0nJWy/wQUnqw9EeLE/9gAHY1sHlEpjZtJrV45kRd+KC6x4FtoFjvngxymK2A0zmecBI3DRTWBAZedPPVatAD9nlDmwAacBtwvuZJkt6fMUBWMY1I1NEiwdYxceBiqrnvU48FfNOylXE6KuarCQZik/VWk8olIQjXIukMu8EQ58pnEuLZB7wbwNzMLheomuVMEK1nfLOltKaytztl/7cKlsx6SmxY5rQI/x7QInd+rq9OxDDwCo+jEofPKvAcCbUJj6SqfB7QAUxJwwD/ER4/Bji9KSz3BoCu+x7h/ILcskNqLlg4LDCcpxqMOyxePk7A30sSop1E5YLWo0lmS9s88mEz89tzCWSDVIzwQrdMghNBe6JFMzOoKDRDhEkMrs3MAK+FUJkbteGhHrdC86EidU="
|
||||
# gitlab
|
||||
- secure: "ZmD5E59cLbGylhId+uYsuaM7HCcuP7E3DVZUHtSKQ7ZtiDIPG2EFCl+WlcPBS2JhdyEJ1v7PbxSCq6/jkSK2EGVcWaHMDIyZRu8TFY+l8mqghHi18l0jeegE7FSqkW2JMWmB3y6jsv7EV6YffrSuvHiNgHyZhhIRkbV/B4N9KvJwNYoyVxGYGoJRe5yuvE+2Xkwc9y0ddxh/p+nILU+Vt0i3Z+yVfg4jul7zN1KhK8I8ax4wpAq+0V1PpWbF6/4UK5Xc/1UMEyWE0f8aEzn4kdC6UetOKacWycq7ag3W1vWKVYJvXyRKjGWmef+Al7z8KbwBkU6KR0Hb2OZWKF3SsCvv8bQEjbqcIeGKQT9J2LTgqTxgFtCMmKWXM3BxLASac/WEdQyyZmQq3XHI/OyJ/1hsSLCgvpexIueITo68LkOigrRRiobSER6KK1CaA1AQFWnPnEwrC3QCtzYUIHPT70nTy1Dx0PiOeQrG/stUoPQ9V0FCBf2tKYg2tUDlJbk7avt8rsmLht1uGx8I75qgg3Di/03N19wEBf6V50H9T23wYXRJ/q2mqPiBCBWIlHwE8NbLZgRfBvCFyUBRvggNAyvZaEOmWRl3U9JEipcqrAQtddzDP1dUbr6SYJChR6yfMkWXXsUvy3FxrOOILeaBT2j0goSaye8aLncUqArgWSw="
|
||||
matrix:
|
||||
- DEPS=aiohttp
|
||||
- DEPS=tornado pycurl
|
||||
- DEPS=tornado
|
||||
- DEPS="tornado<5" pycurl
|
||||
- DEPS="tornado<5"
|
||||
matrix:
|
||||
fast_finish: true
|
||||
allow_failures:
|
||||
# doesn't work well, see https://travis-ci.org/lilydjwg/nvchecker/jobs/376326582
|
||||
- python: pypy3.5
|
||||
env: DEPS=aiohttp
|
2
.typos.toml
Normal file
2
.typos.toml
Normal file
|
@ -0,0 +1,2 @@
|
|||
[default.extend-words]
|
||||
mis = "mis"
|
482
README.rst
482
README.rst
|
@ -1,496 +1,60 @@
|
|||
**nvchecker** (short for *new version checker*) is for checking if a new version of some software has been released.
|
||||
|
||||
.. image:: https://travis-ci.org/lilydjwg/nvchecker.svg
|
||||
:alt: Build Status
|
||||
:target: https://travis-ci.org/lilydjwg/nvchecker
|
||||
This is the version 2.0 branch. For the old version 1.x, please switch to the ``v1.x`` branch.
|
||||
|
||||
.. image:: https://github.com/lilydjwg/nvchecker/workflows/run%20tests/badge.svg?branch=master
|
||||
:alt: Test Status
|
||||
:target: https://github.com/lilydjwg/nvchecker/actions?query=workflow%3A%22run+tests%22
|
||||
.. image:: https://badge.fury.io/py/nvchecker.svg
|
||||
:alt: PyPI version
|
||||
:target: https://badge.fury.io/py/nvchecker
|
||||
.. image:: https://readthedocs.org/projects/nvchecker/badge/?version=latest
|
||||
:target: https://nvchecker.readthedocs.io/en/latest/?badge=latest
|
||||
:alt: Documentation Status
|
||||
|
||||
|
|
||||
|
||||
.. image:: https://repology.org/badge/vertical-allrepos/nvchecker.svg
|
||||
:alt: Packaging status
|
||||
:target: https://repology.org/metapackage/nvchecker/versions
|
||||
|
||||
Contents
|
||||
========
|
||||
|
||||
* `Dependency <#dependency>`_
|
||||
* `Install and Run <#running>`_
|
||||
|
||||
* `JSON logging <#json-logging>`_
|
||||
|
||||
* `Version Record Files <#version-record-files>`_
|
||||
|
||||
* `The nvtake Command <#the-nvtake-command>`_
|
||||
|
||||
* `Version Source Files <#version-source-files>`_
|
||||
|
||||
* `Configuration Section <#configuration-section>`_
|
||||
* `Global Optons <#global-options>`_
|
||||
* `Search in a Webpage <#search-in-a-webpage>`_
|
||||
* `Find with a Command <#find-with-a-command>`_
|
||||
* `Check AUR <#check-aur>`_
|
||||
* `Check GitHub <#check-github>`_
|
||||
* `Check BitBucket <#check-bitbucket>`_
|
||||
* `Check GitLab <#check-gitlab>`_
|
||||
* `Check PyPI <#check-pypi>`_
|
||||
* `Check RubyGems <#check-rubygems>`_
|
||||
* `Check NPM Registry <#check-npm-registry>`_
|
||||
* `Check Hackage <#check-hackage>`_
|
||||
* `Check CPAN <#check-cpan>`_
|
||||
* `Check Packagist <#check-packagist>`_
|
||||
* `Check Local Pacman Database <#check-local-pacman-database>`_
|
||||
* `Check Arch Linux official packages <#check-arch-linux-official-packages>`_
|
||||
* `Check Debian Linux official packages <#check-debian-linux-official-packages>`_
|
||||
* `Check Ubuntu Linux official packages <#check-ubuntu-linux-official-packages>`_
|
||||
* `Check Anitya (release-monitoring.org) <#check-anitya>`_
|
||||
* `Check Android SDK <#check-android-sdk>`_
|
||||
* `Manually updating <#manually-updating>`_
|
||||
* `Version Control System (VCS) (git, hg, svn, bzr) <#version-control-system-vcs-git-hg-svn-bzr>`_
|
||||
* `Other <#other>`_
|
||||
|
||||
* `Bugs <#bugs>`_
|
||||
* `Footnotes <#footnotes>`_
|
||||
.. contents::
|
||||
:local:
|
||||
|
||||
Dependency
|
||||
==========
|
||||
- Python 3.5+
|
||||
- Python library: structlog
|
||||
----------
|
||||
- Python 3.8+
|
||||
- Python library: structlog, platformdirs, tomli (on Python < 3.11)
|
||||
- One of these Python library combinations (ordered by preference):
|
||||
|
||||
* tornado + pycurl
|
||||
* aiohttp
|
||||
* httpx with http2 support (experimental; only latest version is supported)
|
||||
* tornado
|
||||
|
||||
- All commands used in your version source files
|
||||
- All commands used in your software version configuration files
|
||||
|
||||
Install and Run
|
||||
===============
|
||||
---------------
|
||||
To install::
|
||||
|
||||
pip3 install nvchecker
|
||||
|
||||
To use the latest code, you can also clone this repository and run::
|
||||
|
||||
python3 setup.py install
|
||||
pip install .
|
||||
|
||||
To see available options::
|
||||
|
||||
nvchecker --help
|
||||
|
||||
Run with one or more software version source files::
|
||||
Run with one or more software version files::
|
||||
|
||||
nvchecker source_file
|
||||
nvchecker -c config_file
|
||||
|
||||
You normally will like to specify some "version record files"; see below.
|
||||
|
||||
JSON logging
|
||||
------------
|
||||
With ``--logger=json`` or ``--logger=both``, you can get a structured logging
|
||||
for programmatically consuming. You can use ``--json-log-fd=N`` to specify the
|
||||
file descriptor to send logs to (take care to do line buffering). The logging
|
||||
level option (``-l`` or ``--logging``) doesn't take effect with this.
|
||||
|
||||
The JSON log is one JSON string per line. The following documented events and
|
||||
fields are stable, undocumented ones may change without notice.
|
||||
|
||||
event=updated
|
||||
An update is detected. Fields ``name``, ``old_version`` and ``version`` are
|
||||
available. ``old_version`` maybe ``null``.
|
||||
|
||||
event=up-to-date
|
||||
There is no update. Fields ``name`` and ``version`` are available.
|
||||
|
||||
event=no-result
|
||||
No version is detected. There may be an error. Fields ``name`` is available.
|
||||
|
||||
level=error
|
||||
There is an error. Fields ``name`` and ``exc_info`` may be available to give
|
||||
further information.
|
||||
|
||||
Version Record Files
|
||||
====================
|
||||
Version record files record which version of the software you know or is available. They are simple key-value pairs of ``(name, version)`` separated by a space::
|
||||
|
||||
fcitx 4.2.7
|
||||
google-chrome 27.0.1453.93-200836
|
||||
vim 7.3.1024
|
||||
|
||||
Say you've got a version record file called ``old_ver.txt`` which records all your watched software and their versions, as well as some configuration entries. To update it using ``nvchecker``::
|
||||
|
||||
nvchecker source.ini
|
||||
|
||||
See what are updated with ``nvcmp``::
|
||||
|
||||
nvcmp source.ini
|
||||
|
||||
Manually compare the two files for updates (assuming they are sorted alphabetically; files generated by ``nvchecker`` are already sorted)::
|
||||
|
||||
comm -13 old_ver.txt new_ver.txt
|
||||
# or say that in English:
|
||||
comm -13 old_ver.txt new_ver.txt | awk '{print $1 " has updated to version " $2 "."}'
|
||||
# show both old and new versions
|
||||
join old_ver.txt new_ver.txt | awk '$2 != $3'
|
||||
|
||||
The ``nvtake`` Command
|
||||
----------------------
|
||||
This command helps to manage version record files. It reads both old and new version record files, and a list of names given on the commandline. It then update the versions of those names in the old version record file.
|
||||
|
||||
This helps when you have known (and processed) some of the updated software, but not all. You can tell nvchecker that via this command instead of editing the file by hand.
|
||||
|
||||
This command will help most if you specify where you version record files are in your config file. See below for how to use a config file.
|
||||
|
||||
Version Source Files
|
||||
====================
|
||||
The software version source files are in ini format. *Section names* is the name of the software. Following fields are used to tell nvchecker how to determine the current version of that software.
|
||||
|
||||
See ``sample_source.ini`` for an example.
|
||||
|
||||
Configuration Section
|
||||
---------------------
|
||||
A special section named ``__config__`` is special, it provides some configuration options.
|
||||
|
||||
Relative path are relative to the source files, and ``~`` and environmental variables are expanded.
|
||||
|
||||
Currently supported options are:
|
||||
|
||||
oldver
|
||||
Specify a version record file containing the old version info.
|
||||
|
||||
newver
|
||||
Specify a version record file to store the new version info.
|
||||
|
||||
proxy
|
||||
The HTTP proxy to use. The format is ``host:port``, e.g. ``localhost:8087``.
|
||||
|
||||
max_concurrent
|
||||
Max number of concurrent jobs. Default: 20.
|
||||
|
||||
keyfile
|
||||
Specify an ini config file containing key (token) information. This file should contain a ``keys`` section, mapping key names to key values. See specific source for the key name(s) to use.
|
||||
|
||||
Global Options
|
||||
--------------
|
||||
The following options apply to all checkers.
|
||||
|
||||
prefix
|
||||
Strip the prefix string if the version string starts with it. Otherwise the
|
||||
version string is returned as-is.
|
||||
|
||||
from_pattern, to_pattern
|
||||
Both are Python-compatible regular expressions. If ``from_pattern`` is found
|
||||
in the version string, it will be replaced with ``to_pattern``.
|
||||
|
||||
missing_ok
|
||||
Suppress warnings and errors if a version checking module finds nothing.
|
||||
Currently only ``regex`` supports it.
|
||||
|
||||
If both ``prefix`` and ``from_pattern``/``to_pattern`` are used,
|
||||
``from_pattern``/``to_pattern`` are ignored. If you want to strip the prefix
|
||||
and then do something special, just use ``from_pattern```/``to_pattern``. For
|
||||
example, the transformation of ``v1_1_0`` => ``1.1.0`` can be achieved with
|
||||
``from_pattern = v(\d+)_(\d+)_(\d+)`` and ``to_pattern = \1.\2.\3``.
|
||||
|
||||
Search in a Webpage
|
||||
-------------------
|
||||
Search through a specific webpage for the version string. This type of version finding has these fields:
|
||||
|
||||
url
|
||||
The URL of the webpage to fetch.
|
||||
|
||||
encoding
|
||||
(*Optional*) The character encoding of the webpage, if ``latin1`` is not appropriate.
|
||||
|
||||
regex
|
||||
A regular expression used to find the version string.
|
||||
|
||||
It can have zero or one capture group. The capture group or the whole match is the version string.
|
||||
|
||||
When multiple version strings are found, the maximum of those is chosen.
|
||||
|
||||
proxy
|
||||
The HTTP proxy to use. The format is ``host:port``, e.g. ``localhost:8087``.
|
||||
|
||||
user_agent
|
||||
The ``User-Agent`` header value to use. Use something more like a tool (e.g. ``curl/7.40.0``) in Europe or the real web page won't get through because cookie policies (SourceForge has this issue).
|
||||
|
||||
sort_version_key
|
||||
Sort the version string using this key function. Choose between ``parse_version`` and
|
||||
``vercmp``. Default value is ``parse_version``. ``parse_version`` use
|
||||
``pkg_resources.parse_version``. ``vercmp`` use ``pyalpm.vercmp``.
|
||||
|
||||
Find with a Command
|
||||
-------------------
|
||||
Use a shell command line to get the version. The output is striped first, so trailing newlines do not bother.
|
||||
|
||||
cmd
|
||||
The command line to use. This will run with the system's standard shell (i.e. ``/bin/sh``).
|
||||
|
||||
Check AUR
|
||||
---------
|
||||
Check `Arch User Repository <https://aur.archlinux.org/>`_ for updates.
|
||||
|
||||
aur
|
||||
The package name in AUR. If empty, use the name of software (the *section name*).
|
||||
|
||||
strip-release
|
||||
Strip the release part.
|
||||
|
||||
use_last_modified
|
||||
Append last modified time to the version.
|
||||
|
||||
Check GitHub
|
||||
------------
|
||||
Check `GitHub <https://github.com/>`_ for updates. The version returned is in date format ``%Y%m%d``, e.g. ``20130701``.
|
||||
|
||||
github
|
||||
The github repository, with author, e.g. ``lilydjwg/nvchecker``.
|
||||
|
||||
branch
|
||||
Which branch to track? Default: ``master``.
|
||||
|
||||
use_latest_release
|
||||
Set this to ``true`` to check for the latest release on GitHub. An annotated
|
||||
tag creates a "release" on GitHub. It's not the same with git tags, which
|
||||
includes both annotated tags and lightweight ones.
|
||||
|
||||
use_max_tag
|
||||
Set this to ``true`` to check for the max tag on GitHub. Unlike ``use_latest_release``,
|
||||
this option includes both annotated tags and lightweight ones, and return the biggest one
|
||||
sorted by ``pkg_resources.parse_version``.
|
||||
|
||||
include_tags_pattern
|
||||
Only include tags matching this pattern. This option must be used together
|
||||
with ``use_max_tag``.
|
||||
|
||||
ignored_tags
|
||||
Ignore certain tags while computing the max tag. Tags are separate by
|
||||
whitespaces. This option must be used together with ``use_max_tag``. This can
|
||||
be useful to avoid some known badly versioned tags, so the newer tags won't
|
||||
be "overridden" by the old broken ones.
|
||||
|
||||
sort_version_key
|
||||
Sort the version string using this key function. Choose between ``parse_version`` and
|
||||
``vercmp``. Default value is ``parse_version``. ``parse_version`` use
|
||||
``pkg_resources.parse_version``. ``vercmp`` use ``pyalpm.vercmp``.
|
||||
|
||||
proxy
|
||||
The HTTP proxy to use. The format is ``host:port``, e.g. ``localhost:8087``.
|
||||
|
||||
An environment variable ``NVCHECKER_GITHUB_TOKEN`` or a key named ``github``
|
||||
can be set to a GitHub OAuth token in order to request more frequently than
|
||||
anonymously.
|
||||
|
||||
Check BitBucket
|
||||
---------------
|
||||
Check `BitBucket <https://bitbucket.org/>`_ for updates. The version returned
|
||||
is in date format ``%Y%m%d``, e.g. ``20130701``.
|
||||
|
||||
bitbucket
|
||||
The bitbucket repository, with author, e.g. ``lilydjwg/dotvim``.
|
||||
|
||||
branch
|
||||
Which branch to track? Default is the repository's default.
|
||||
|
||||
use_max_tag
|
||||
Set this to ``true`` to check for the max tag on BitBucket. Will return the biggest one
|
||||
sorted by ``pkg_resources.parse_version``.
|
||||
|
||||
ignored_tags
|
||||
Ignore certain tags while computing the max tag. Tags are separate by
|
||||
whitespaces. This option must be used together with ``use_max_tag``. This can
|
||||
be useful to avoid some known badly versioned tags, so the newer tags won't
|
||||
be "overridden" by the old broken ones.
|
||||
|
||||
sort_version_key
|
||||
Sort the version string using this key function. Choose between ``parse_version`` and
|
||||
``vercmp``. Default value is ``parse_version``. ``parse_version`` use
|
||||
``pkg_resources.parse_version``. ``vercmp`` use ``pyalpm.vercmp``.
|
||||
|
||||
Check GitLab
|
||||
Documentation
|
||||
-------------
|
||||
Check `GitLab <https://gitlab.com/>`_ for updates. The version returned is in date format ``%Y%m%d``, e.g. ``20130701``.
|
||||
|
||||
gitlab
|
||||
The gitlab repository, with author, e.g. ``Deepin/deepin-music``.
|
||||
|
||||
branch
|
||||
Which branch to track? Default: ``master``.
|
||||
|
||||
use_max_tag
|
||||
Set this to ``true`` to check for the max tag on BitBucket. Will return the biggest one
|
||||
sorted by ``pkg_resources.parse_version``.
|
||||
|
||||
ignored_tags
|
||||
Ignore certain tags while computing the max tag. Tags are separate by
|
||||
whitespaces. This option must be used together with ``use_max_tag``. This can
|
||||
be useful to avoid some known badly versioned tags, so the newer tags won't
|
||||
be "overridden" by the old broken ones.
|
||||
|
||||
sort_version_key
|
||||
Sort the version string using this key function. Choose between ``parse_version`` and
|
||||
``vercmp``. Default value is ``parse_version``. ``parse_version`` use
|
||||
``pkg_resources.parse_version``. ``vercmp`` use ``pyalpm.vercmp``.
|
||||
|
||||
host
|
||||
Hostname for self-hosted GitLab instance.
|
||||
|
||||
token
|
||||
GitLab authorization token used to call the API.
|
||||
|
||||
Authenticated only.
|
||||
|
||||
To set a authorization token, you can set:
|
||||
|
||||
- a key named ``gitlab_{host}`` in the keyfile (where ``host`` is formed the
|
||||
same as the environment variable, but all lowercased).
|
||||
- an environment variable ``NVCHECKER_GITLAB_TOKEN_{host}`` must provide that
|
||||
token. The ``host`` part is the uppercased version of the ``host`` setting,
|
||||
with dots (``.``) and slashes (``/``) replaced by underscores (``_``), e.g.
|
||||
``NVCHECKER_GITLAB_TOKEN_GITLAB_COM``.
|
||||
- the token option
|
||||
|
||||
Check PyPI
|
||||
----------
|
||||
Check `PyPI <https://pypi.python.org/>`_ for updates.
|
||||
|
||||
pypi
|
||||
The name used on PyPI, e.g. ``PySide``.
|
||||
|
||||
Check RubyGems
|
||||
--------------
|
||||
Check `RubyGems <https://rubygems.org/>`_ for updates.
|
||||
|
||||
gems
|
||||
The name used on RubyGems, e.g. ``sass``.
|
||||
|
||||
Check NPM Registry
|
||||
------------------
|
||||
Check `NPM Registry <https://registry.npmjs.org/>`_ for updates.
|
||||
|
||||
npm
|
||||
The name used on NPM Registry, e.g. ``coffee-script``.
|
||||
|
||||
Check Hackage
|
||||
-------------
|
||||
Check `Hackage <https://hackage.haskell.org/>`_ for updates.
|
||||
|
||||
hackage
|
||||
The name used on Hackage, e.g. ``pandoc``.
|
||||
|
||||
Check CPAN
|
||||
--------------
|
||||
Check `MetaCPAN <https://metacpan.org/>`_ for updates.
|
||||
|
||||
cpan
|
||||
The name used on CPAN, e.g. ``YAML``.
|
||||
|
||||
proxy
|
||||
The HTTP proxy to use. The format is ``host:port``, e.g. ``localhost:8087``.
|
||||
|
||||
Check Packagist
|
||||
---------------
|
||||
Check `Packagist <https://packagist.org/>`_ for updates.
|
||||
|
||||
packagist
|
||||
The name used on Packagist, e.g. ``monolog/monolog``.
|
||||
|
||||
Check Local Pacman Database
|
||||
---------------------------
|
||||
This is used when you run ``nvchecker`` on an Arch Linux system and the program always keeps up with a package in your configured repositories for `Pacman`_.
|
||||
|
||||
pacman
|
||||
The package name to reference to.
|
||||
|
||||
strip-release
|
||||
Strip the release part.
|
||||
|
||||
Check Arch Linux official packages
|
||||
----------------------------------
|
||||
This enables you to track the update of `Arch Linux official packages <https://www.archlinux.org/packages/>`_, without needing of pacman and an updated local Pacman databases.
|
||||
|
||||
archpkg
|
||||
Name of the Arch Linux package.
|
||||
|
||||
strip-release
|
||||
Strip the release part.
|
||||
|
||||
Check Debian Linux official packages
|
||||
------------------------------------
|
||||
This enables you to track the update of `Debian Linux official packages <https://packages.debian.org>`_, without needing of apt and an updated local APT database.
|
||||
|
||||
debianpkg
|
||||
Name of the Debian Linux source package.
|
||||
|
||||
suite
|
||||
Name of the Debian release (jessie, wheezy, etc, defaults to sid)
|
||||
|
||||
strip-release
|
||||
Strip the release part.
|
||||
|
||||
Check Ubuntu Linux official packages
|
||||
------------------------------------
|
||||
This enables you to track the update of `Ubuntu Linux official packages <https://packages.ubuntu.com/>`_, without needing of apt and an updated local APT database.
|
||||
|
||||
ubuntupkg
|
||||
Name of the Ubuntu Linux source package.
|
||||
|
||||
suite
|
||||
Name of the Ubuntu release (xenial, zesty, etc, defaults to None, which means no limit on suite)
|
||||
|
||||
strip-release
|
||||
Strip the release part.
|
||||
|
||||
Check Anitya
|
||||
------------
|
||||
This enables you to track updates from `Anitya <https://release-monitoring.org/>`_ (release-monitoring.org).
|
||||
|
||||
anitya
|
||||
``distro/package``, where ``distro`` can be a lot of things like "fedora", "arch linux", "gentoo", etc. ``package`` is the package name of the chosen distribution.
|
||||
|
||||
Check Android SDK
|
||||
-----------------
|
||||
This enables you to track updates of Android SDK packages listed in ``sdkmanager --list``.
|
||||
|
||||
android_sdk
|
||||
The package path prefix. This value is matched against the ``path`` attribute in all <remotePackage> nodes in an SDK manifest XML. The first match is used for version comparisons.
|
||||
|
||||
repo
|
||||
Should be one of ``addon`` or ``package``. Packages in ``addon2-1.xml`` use ``addon`` and packages in ``repository2-1.xml`` use ``package``.
|
||||
|
||||
Manually updating
|
||||
-----------------
|
||||
This enables you to manually specify the version (maybe because you want to approve each release before it gets to the script).
|
||||
|
||||
manual
|
||||
The version string.
|
||||
|
||||
Version Control System (VCS) (git, hg, svn, bzr)
|
||||
------------------------------------------------
|
||||
Check a VCS repo for new commits. The version returned is currently not related to the version of the software and will increase whenever the referred VCS branch changes. This is mainly for Arch Linux.
|
||||
|
||||
vcs
|
||||
The url of the remote VCS repo, using the same syntax with a VCS url in PKGBUILD (`Pacman`_'s build script). The first VCS url found in the source array of the PKGBUILD will be used if this is left blank. (Note: for a blank ``vcs`` setting to work correctly, the PKGBUILD has to be in a directory with the name of the software under the path where nvchecker is run. Also, all the commands, if any, needed when sourcing the PKGBUILD need to be installed).
|
||||
|
||||
use_max_tag
|
||||
Set this to ``true`` to check for the max tag. Currently only supported for ``git``.
|
||||
This option returns the biggest tag sorted by ``pkg_resources.parse_version``.
|
||||
|
||||
ignored_tags
|
||||
Ignore certain tags while computing the max tag. Tags are separate by
|
||||
whitespaces. This option must be used together with ``use_max_tag``. This can
|
||||
be useful to avoid some known badly versioned tags, so the newer tags won't
|
||||
be "overridden" by the old broken ones.
|
||||
|
||||
Other
|
||||
-----
|
||||
More to come. Send me a patch or pull request if you can't wait and have written one yourself :-)
|
||||
|
||||
Bugs
|
||||
====
|
||||
* Finish writing results even on Ctrl-C or other interruption.
|
||||
|
||||
Footnotes
|
||||
=========
|
||||
|
||||
.. _Pacman: https://wiki.archlinux.org/index.php/Pacman
|
||||
For detailed documentation, see `https://nvchecker.readthedocs.io/en/latest/ <https://nvchecker.readthedocs.io/en/latest/>`_.
|
||||
|
|
1
docs/.gitignore
vendored
Normal file
1
docs/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
_build/
|
26
docs/Makefile
Normal file
26
docs/Makefile
Normal file
|
@ -0,0 +1,26 @@
|
|||
# Minimal makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line, and also
|
||||
# from the environment for the first two.
|
||||
SPHINXOPTS ?=
|
||||
SPHINXBUILD ?= sphinx-build
|
||||
SOURCEDIR = .
|
||||
BUILDDIR = _build
|
||||
|
||||
# Put it first so that "make" without argument is like "make help".
|
||||
help:
|
||||
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
|
||||
.PHONY: help Makefile man
|
||||
|
||||
man: $(BUILDDIR)/man/nvchecker.1
|
||||
|
||||
$(BUILDDIR)/man/nvchecker.1: usage.rst
|
||||
mkdir -p $(BUILDDIR)/man
|
||||
./myrst2man.py $< > $@
|
||||
|
||||
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||
%:
|
||||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
26
docs/api.rst
Normal file
26
docs/api.rst
Normal file
|
@ -0,0 +1,26 @@
|
|||
``nvchecker.api`` --- The source plugin API
|
||||
===========================================
|
||||
|
||||
.. automodule:: nvchecker.api
|
||||
:members:
|
||||
:imported-members:
|
||||
:undoc-members:
|
||||
|
||||
.. py:data:: session
|
||||
:type: nvchecker.httpclient.base.BaseSession
|
||||
|
||||
The object to send out HTTP requests, respecting various options in the configuration entry.
|
||||
|
||||
.. automodule:: nvchecker.httpclient.base
|
||||
:members: BaseSession, Response
|
||||
:undoc-members:
|
||||
|
||||
.. autodata:: nvchecker.api.proxy
|
||||
.. autodata:: nvchecker.api.user_agent
|
||||
.. autodata:: nvchecker.api.tries
|
||||
.. autodata:: nvchecker.api.verify_cert
|
||||
|
||||
.. py:data:: nvchecker.api.entry_waiter
|
||||
:type: contextvars.ContextVar
|
||||
|
||||
This :class:`ContextVar <contextvars.ContextVar>` contains an :class:`EntryWaiter <nvchecker.api.EntryWaiter>` instance for waiting on other entries.
|
47
docs/conf.py
Normal file
47
docs/conf.py
Normal file
|
@ -0,0 +1,47 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.abspath(".."))
|
||||
import nvchecker
|
||||
|
||||
master_doc = "index"
|
||||
|
||||
project = "nvchecker"
|
||||
copyright = "lilydjwg, et al."
|
||||
|
||||
version = release = nvchecker.__version__
|
||||
|
||||
extensions = [
|
||||
"sphinx.ext.autodoc",
|
||||
"sphinx.ext.doctest",
|
||||
"sphinx.ext.intersphinx",
|
||||
"sphinx.ext.viewcode",
|
||||
]
|
||||
|
||||
primary_domain = "py"
|
||||
default_role = "py:obj"
|
||||
|
||||
autodoc_member_order = "bysource"
|
||||
autoclass_content = "both"
|
||||
autodoc_inherit_docstrings = False
|
||||
|
||||
# Without this line sphinx includes a copy of object.__init__'s docstring
|
||||
# on any class that doesn't define __init__.
|
||||
# https://bitbucket.org/birkenfeld/sphinx/issue/1337/autoclass_content-both-uses-object__init__
|
||||
autodoc_docstring_signature = False
|
||||
|
||||
intersphinx_mapping = {"python": ("https://docs.python.org/3", None)}
|
||||
|
||||
html_theme = "sphinx_rtd_theme"
|
||||
on_rtd = os.environ.get("READTHEDOCS", None) == "True"
|
||||
|
||||
# On RTD we can't import sphinx_rtd_theme, but it will be applied by
|
||||
# default anyway. This block will use the same theme when building locally
|
||||
# as on RTD.
|
||||
if not on_rtd:
|
||||
import sphinx_rtd_theme
|
||||
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
||||
|
||||
html_theme_options = {
|
||||
'collapse_navigation': False,
|
||||
}
|
22
docs/index.rst
Normal file
22
docs/index.rst
Normal file
|
@ -0,0 +1,22 @@
|
|||
.. nvchecker documentation master file, created by
|
||||
sphinx-quickstart on Thu Sep 3 00:19:02 2020.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
Welcome to nvchecker's documentation!
|
||||
=====================================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
usage
|
||||
plugin
|
||||
api
|
||||
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
35
docs/make.bat
Normal file
35
docs/make.bat
Normal file
|
@ -0,0 +1,35 @@
|
|||
@ECHO OFF
|
||||
|
||||
pushd %~dp0
|
||||
|
||||
REM Command file for Sphinx documentation
|
||||
|
||||
if "%SPHINXBUILD%" == "" (
|
||||
set SPHINXBUILD=sphinx-build
|
||||
)
|
||||
set SOURCEDIR=.
|
||||
set BUILDDIR=_build
|
||||
|
||||
if "%1" == "" goto help
|
||||
|
||||
%SPHINXBUILD% >NUL 2>NUL
|
||||
if errorlevel 9009 (
|
||||
echo.
|
||||
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
||||
echo.installed, then set the SPHINXBUILD environment variable to point
|
||||
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
||||
echo.may add the Sphinx directory to PATH.
|
||||
echo.
|
||||
echo.If you don't have Sphinx installed, grab it from
|
||||
echo.http://sphinx-doc.org/
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
goto end
|
||||
|
||||
:help
|
||||
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
|
||||
:end
|
||||
popd
|
74
docs/myrst2man.py
Executable file
74
docs/myrst2man.py
Executable file
|
@ -0,0 +1,74 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
import time
|
||||
import locale
|
||||
import os
|
||||
import sys
|
||||
try:
|
||||
locale.setlocale(locale.LC_ALL, '')
|
||||
except:
|
||||
pass
|
||||
|
||||
sys.path.insert(0, '..')
|
||||
import nvchecker
|
||||
|
||||
from docutils.core import publish_cmdline, default_description
|
||||
from docutils import nodes
|
||||
from docutils.writers import manpage
|
||||
from docutils.parsers.rst import roles
|
||||
|
||||
def ref_role(
|
||||
role, rawtext, text, lineno, inliner,
|
||||
options={}, content=[],
|
||||
):
|
||||
node = nodes.reference(rawtext, text.title(), **options)
|
||||
return [node], []
|
||||
|
||||
def doc_role(
|
||||
role, rawtext, text, lineno, inliner,
|
||||
options={}, content=[],
|
||||
):
|
||||
node = nodes.reference(rawtext, text, **options)
|
||||
return [node], []
|
||||
|
||||
roles.register_local_role('ref', ref_role)
|
||||
roles.register_local_role('doc', doc_role)
|
||||
|
||||
class MyTranslator(manpage.Translator):
|
||||
def visit_image(self, node):
|
||||
raise nodes.SkipNode
|
||||
|
||||
def visit_topic(self, node):
|
||||
self.body.append('\n')
|
||||
raise nodes.SkipNode
|
||||
|
||||
def visit_title(self, node):
|
||||
try:
|
||||
super().visit_title(node)
|
||||
except nodes.SkipNode:
|
||||
if self.section_level == 0:
|
||||
self._docinfo['title'] = 'nvchecker'
|
||||
self._docinfo['subtitle'] = 'New version checker for software releases'
|
||||
self._docinfo['title_upper'] = 'nvchecker'.upper()
|
||||
self._docinfo['manual_section'] = '1'
|
||||
# Make the generated man page reproducible. Based on the patch from
|
||||
# https://sourceforge.net/p/docutils/patches/132/#5333
|
||||
source_date_epoch = os.environ.get('SOURCE_DATE_EPOCH')
|
||||
if source_date_epoch:
|
||||
self._docinfo['date'] = time.strftime('%Y-%m-%d', time.gmtime(int(source_date_epoch)))
|
||||
else:
|
||||
self._docinfo['date'] = time.strftime('%Y-%m-%d')
|
||||
self._docinfo['version'] = nvchecker.__version__
|
||||
raise
|
||||
|
||||
class MyWriter(manpage.Writer):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.translator_class = MyTranslator
|
||||
|
||||
def main():
|
||||
description = ("Generates plain unix manual documents. " + default_description)
|
||||
publish_cmdline(writer=MyWriter(), description=description)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
96
docs/plugin.rst
Normal file
96
docs/plugin.rst
Normal file
|
@ -0,0 +1,96 @@
|
|||
How to develop a source plugin for nvchecker
|
||||
============================================
|
||||
|
||||
.. contents::
|
||||
:local:
|
||||
|
||||
Source plugins enable nvchecker to discover software version strings in
|
||||
additional ways.
|
||||
|
||||
Where to put the plugins
|
||||
------------------------
|
||||
|
||||
They are Python modules put in any directories named ``nvchecker_source`` in
|
||||
``sys.path``. This is called namespace packages introduced by `PEP 420 <https:
|
||||
//www.python.org/dev/peps/pep-0420/>`_. For local use,
|
||||
``~/.local/lib/pythonX.Y/site-packages/nvchecker_source`` is a good place, or
|
||||
you can define the ``PYTHONPATH`` environment variable and put nvchecker source
|
||||
plugins there inside a ``nvchecker_source`` directory.
|
||||
|
||||
Plugins are referenced by their names in the configuration file (``source = "xxx"``).
|
||||
If multiple plugins have the same name, the first one in ``sys.path`` will be used.
|
||||
|
||||
How to write a simple plugin
|
||||
----------------------------
|
||||
|
||||
For simple situations, you need to define an async function with the following signature::
|
||||
|
||||
async def get_version(
|
||||
name: str, conf: Entry, *,
|
||||
cache: AsyncCache, keymanager: KeyManager,
|
||||
**kwargs,
|
||||
) -> VersionResult:
|
||||
...
|
||||
|
||||
Those types are imported from :mod:`nvchecker.api`.
|
||||
|
||||
``name`` is the table keys in the configuration file, and ``conf`` is a dict of
|
||||
the content of that table. You should not modify this dict.
|
||||
|
||||
``cache`` is an :class:`AsyncCache <nvchecker.api.AsyncCache>` object that
|
||||
caches results for you. Every plugin has its own ``cache`` object so that cache
|
||||
keys won't conflict.
|
||||
|
||||
``keymanager`` is a :class:`KeyManager <nvchecker.api.KeyManager>` object that
|
||||
you can call :meth:`.get_key(name) <nvchecker.api.KeyManager.get_key>` to get
|
||||
the key (token) from the keyfile.
|
||||
|
||||
There may be additional keyword arguments in the future so ``**kwargs`` should be used.
|
||||
|
||||
If you want to send an HTTP request, it's preferred to use :meth:
|
||||
`cache.get_json <nvchecker.api.AsyncCache.get_json>` or the :data:
|
||||
`nvchecker.api.session` object. It will use the auto-selected HTTP backend and
|
||||
handle the ``proxy`` option automatically.
|
||||
|
||||
For details about these objects, see :mod:`the API documentation <nvchecker.api>`,
|
||||
or take existing source plugins as examples.
|
||||
|
||||
How to write a more powerful plugin
|
||||
-----------------------------------
|
||||
|
||||
You may want more control in your source plugin, e.g. to do batch requests. To
|
||||
do this, you provide a class instead::
|
||||
|
||||
class Worker(BaseWorker):
|
||||
async def run(self) -> None:
|
||||
...
|
||||
|
||||
|
||||
You will have the following in the attributes::
|
||||
|
||||
token_q: Queue[bool],
|
||||
result_q: Queue[RawResult],
|
||||
tasks: List[Tuple[str, Entry]],
|
||||
keymanager: KeyManager,
|
||||
|
||||
You are expected to process :attr:`tasks <nvchecker.api.BaseWorker.tasks>` and
|
||||
put results in :attr:`result_q <nvchecker.api.BaseWorker.result_q>`. See
|
||||
``nvchecker_source/none.py`` for the simplest example, and
|
||||
``nvchecker_source/aur.py`` for a complete, batching example.
|
||||
|
||||
For details about these objects, see :mod:`the API documentation <nvchecker.api>`.
|
||||
|
||||
You can also receive a configuration section from the configuration as
|
||||
``__config__.source.SOURCE_NAME``, where ``SOURCE_NAME`` is what your plugin is
|
||||
called. This can be used to specify a mirror site for your plugin to use, e.g.
|
||||
the ``npm`` plugin accepts the following config::
|
||||
|
||||
[__config__.source.npm]
|
||||
registry = "https://registry.npm.taobao.org"
|
||||
|
||||
When such a configuration exists for your plugin, you need to define a function
|
||||
named ``configure`` to receive it::
|
||||
|
||||
def configure(config):
|
||||
'''use the "config" dict in some way'''
|
||||
...
|
6
docs/requirements.txt
Normal file
6
docs/requirements.txt
Normal file
|
@ -0,0 +1,6 @@
|
|||
structlog
|
||||
platformdirs
|
||||
tornado>=6
|
||||
sphinx>=3.2
|
||||
# <5 has strange bottom margins for p, and no list indicators
|
||||
sphinx-rtd-theme>=0.5
|
1145
docs/usage.rst
Normal file
1145
docs/usage.rst
Normal file
File diff suppressed because it is too large
Load diff
1
keyfile.toml.enc
Normal file
1
keyfile.toml.enc
Normal file
|
@ -0,0 +1 @@
|
|||
Salted__ÇßKÊ]å¯õ´›äó<C3A4>(¯J×ýºŒv4/ÆáôLÔ<4C>µ(Ó“O|ôNTÌ’¡O @”¬”×Ûª¨8—ïû‡Jz
cÈOüžË²úäk›d·—æ/œ÷ÛZ)<29>Q}Þá4×µËÒW§@í*´ÃW¹%¢·Bäðo¤¨wDA<44>›Ú\W´µÏ$:Ø– ß…´µ9Èß½0"1yE53«UÍwÅ"â0j!µéâœq^®ë½¼J$oÜÛn±hñ—u`ÅÚ0GÝÛôËNHµX®¼l53‘×D ÿ€Œ
|
31
mypy.ini
Normal file
31
mypy.ini
Normal file
|
@ -0,0 +1,31 @@
|
|||
[mypy]
|
||||
warn_unused_configs = True
|
||||
warn_redundant_casts = True
|
||||
warn_unused_ignores = True
|
||||
show_error_context = True
|
||||
show_column_numbers = True
|
||||
no_implicit_optional = True
|
||||
|
||||
[mypy-structlog]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-pyalpm]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-flaky]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-pytest_httpbin]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-lxml]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-tomllib]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-jq]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-tomli]
|
||||
ignore_missing_imports = True
|
|
@ -1,4 +1,4 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2018 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
# Copyright (c) 2013-2024 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
__version__ = '1.3'
|
||||
__version__ = '2.17dev'
|
||||
|
|
112
nvchecker/__main__.py
Executable file
112
nvchecker/__main__.py
Executable file
|
@ -0,0 +1,112 @@
|
|||
#!/usr/bin/env python3
|
||||
# MIT licensed
|
||||
# Copyright (c) 2013-2024 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
import asyncio
|
||||
from typing import Coroutine, Tuple
|
||||
from pathlib import Path
|
||||
|
||||
import structlog
|
||||
|
||||
from . import core
|
||||
from .util import ResultData, RawResult, KeyManager, EntryWaiter
|
||||
from .ctxvars import proxy as ctx_proxy
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(description='New version checker for software')
|
||||
parser.add_argument('-k', '--keyfile',
|
||||
metavar='FILE', type=str,
|
||||
help='use specified keyfile (override the one in configuration file)')
|
||||
parser.add_argument('-t', '--tries', default=1, type=int, metavar='N',
|
||||
help='try N times when network errors occur')
|
||||
parser.add_argument('--failures', action='store_true',
|
||||
help='exit with code 3 if failures / errors happen during checking')
|
||||
parser.add_argument('-e', '--entry', type=str,
|
||||
help='only execute on specified entry (useful for debugging)')
|
||||
core.add_common_arguments(parser)
|
||||
args = parser.parse_args()
|
||||
if core.process_common_arguments(args):
|
||||
return
|
||||
|
||||
try:
|
||||
entries, options = core.load_file(
|
||||
args.file, use_keymanager=not bool(args.keyfile))
|
||||
|
||||
if args.entry:
|
||||
if args.entry not in entries:
|
||||
sys.exit('Specified entry not found in config')
|
||||
entries = {args.entry: entries[args.entry]}
|
||||
|
||||
if args.keyfile:
|
||||
keymanager = KeyManager(Path(args.keyfile))
|
||||
else:
|
||||
keymanager = options.keymanager
|
||||
except core.FileLoadError as e:
|
||||
sys.exit(str(e))
|
||||
|
||||
if options.proxy is not None:
|
||||
ctx_proxy.set(options.proxy)
|
||||
|
||||
task_sem = asyncio.Semaphore(options.max_concurrency)
|
||||
result_q: asyncio.Queue[RawResult] = asyncio.Queue()
|
||||
dispatcher = core.setup_httpclient(
|
||||
options.max_concurrency,
|
||||
options.httplib,
|
||||
options.http_timeout,
|
||||
)
|
||||
entry_waiter = EntryWaiter()
|
||||
try:
|
||||
futures = dispatcher.dispatch(
|
||||
entries, task_sem, result_q,
|
||||
keymanager, entry_waiter,
|
||||
args.tries,
|
||||
options.source_configs,
|
||||
)
|
||||
except ModuleNotFoundError as e:
|
||||
sys.exit(f'Error: {e}')
|
||||
|
||||
if options.ver_files is not None:
|
||||
oldvers = core.read_verfile(options.ver_files[0])
|
||||
else:
|
||||
oldvers = {}
|
||||
result_coro = core.process_result(oldvers, result_q, entry_waiter, verbose=bool(args.entry))
|
||||
runner_coro = core.run_tasks(futures)
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
# Python 3.10 has deprecated asyncio.get_event_loop
|
||||
results, has_failures = asyncio.run(run(result_coro, runner_coro))
|
||||
else:
|
||||
# Python < 3.10 will create an eventloop when asyncio.Queue is initialized
|
||||
results, has_failures = asyncio.get_event_loop().run_until_complete(run(result_coro, runner_coro))
|
||||
|
||||
if options.ver_files is not None:
|
||||
newverf = options.ver_files[1]
|
||||
if args.entry:
|
||||
# don't remove other entries when only one entry is specified on cmdline
|
||||
vers = core.read_verfile(newverf)
|
||||
else:
|
||||
vers = {}
|
||||
vers.update(results)
|
||||
core.write_verfile(newverf, vers)
|
||||
|
||||
if args.failures and has_failures:
|
||||
sys.exit(3)
|
||||
|
||||
async def run(
|
||||
result_coro: Coroutine[None, None, Tuple[ResultData, bool]],
|
||||
runner_coro: Coroutine[None, None, None],
|
||||
) -> Tuple[ResultData, bool]:
|
||||
result_fu = asyncio.create_task(result_coro)
|
||||
runner_fu = asyncio.create_task(runner_coro)
|
||||
await runner_fu
|
||||
result_fu.cancel()
|
||||
return await result_fu
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
11
nvchecker/api.py
Normal file
11
nvchecker/api.py
Normal file
|
@ -0,0 +1,11 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from .httpclient import session, TemporaryError, HTTPError
|
||||
from .util import (
|
||||
Entry, BaseWorker, RawResult, VersionResult, RichResult,
|
||||
AsyncCache, KeyManager, GetVersionError, EntryWaiter,
|
||||
)
|
||||
from .sortversion import sort_version_keys
|
||||
|
||||
from .ctxvars import tries, proxy, user_agent, httptoken, entry_waiter, verify_cert
|
|
@ -1,43 +1,84 @@
|
|||
# vim: se sw=2:
|
||||
# MIT licensed
|
||||
# Copyright (c) 2013-2018 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
# Copyright (c) 2013-2020, 2024 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
import configparser
|
||||
import asyncio
|
||||
from asyncio import Queue
|
||||
import logging
|
||||
import argparse
|
||||
from typing import (
|
||||
Tuple, NamedTuple, Optional, List, Union,
|
||||
cast, Dict, Awaitable, Sequence, Any,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
import types
|
||||
from pathlib import Path
|
||||
from importlib import import_module
|
||||
import re
|
||||
import contextvars
|
||||
import json
|
||||
import dataclasses
|
||||
|
||||
import structlog
|
||||
|
||||
from .lib import nicelogger
|
||||
from .get_version import get_version
|
||||
from .source import session
|
||||
from . import slogconf
|
||||
if TYPE_CHECKING:
|
||||
import tomli as tomllib
|
||||
else:
|
||||
try:
|
||||
import tomllib
|
||||
except ModuleNotFoundError:
|
||||
import tomli as tomllib
|
||||
|
||||
import platformdirs
|
||||
|
||||
from .lib import nicelogger
|
||||
from . import slogconf
|
||||
from .util import (
|
||||
Entry, Entries, KeyManager, RawResult, RichResult, ResultData,
|
||||
FunctionWorker, GetVersionError,
|
||||
FileLoadError, EntryWaiter,
|
||||
)
|
||||
from . import __version__
|
||||
from .sortversion import sort_version_keys
|
||||
from .ctxvars import tries as ctx_tries
|
||||
from .ctxvars import entry_waiter as ctx_entry_waiter
|
||||
from . import httpclient
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
def add_common_arguments(parser):
|
||||
def get_default_config() -> str:
|
||||
confdir = platformdirs.user_config_dir(appname='nvchecker')
|
||||
file = os.path.join(confdir, 'nvchecker.toml')
|
||||
return file
|
||||
|
||||
def add_common_arguments(parser: argparse.ArgumentParser) -> None:
|
||||
parser.add_argument('-l', '--logging',
|
||||
choices=('debug', 'info', 'warning', 'error'), default='info',
|
||||
help='logging level (default: info)')
|
||||
parser.add_argument('--logger', default='pretty',
|
||||
choices=['pretty', 'json', 'both'],
|
||||
help='select which logger to use')
|
||||
parser.add_argument('--json-log-fd',
|
||||
parser.add_argument('--json-log-fd', metavar='FD',
|
||||
type=lambda fd: os.fdopen(int(fd), mode='w'),
|
||||
help='specify fd to send json logs to. stdout by default')
|
||||
parser.add_argument('-V', '--version', action='store_true',
|
||||
help='show version and exit')
|
||||
parser.add_argument('file', metavar='FILE', nargs='?', type=open,
|
||||
help='software version source file')
|
||||
default_config = get_default_config()
|
||||
parser.add_argument('-c', '--file',
|
||||
metavar='FILE', type=str,
|
||||
default=default_config,
|
||||
help=f'software version configuration file [default: {default_config}]')
|
||||
|
||||
def process_common_arguments(args):
|
||||
def process_common_arguments(args: argparse.Namespace) -> bool:
|
||||
'''return True if should stop'''
|
||||
processors = [
|
||||
slogconf.exc_info,
|
||||
slogconf.filter_exc,
|
||||
slogconf.filter_nones,
|
||||
slogconf.filter_taskname,
|
||||
]
|
||||
logger_factory = None
|
||||
|
||||
|
@ -68,13 +109,16 @@ def process_common_arguments(args):
|
|||
|
||||
if args.version:
|
||||
progname = os.path.basename(sys.argv[0])
|
||||
print('%s v%s' % (progname, __version__))
|
||||
print(f'{progname} v{__version__}')
|
||||
return True
|
||||
return False
|
||||
|
||||
def safe_overwrite(fname, data, *, method='write', mode='w', encoding=None):
|
||||
def safe_overwrite(file: Path, data: Union[bytes, str], *,
|
||||
method: str = 'write', mode: str = 'w', encoding: Optional[str] = None) -> None:
|
||||
# FIXME: directory has no read perm
|
||||
# FIXME: symlinks and hard links
|
||||
tmpname = fname + '.tmp'
|
||||
# FIXME: hard links
|
||||
resolved_path = file.resolve()
|
||||
tmpname = str(resolved_path) + '.tmp'
|
||||
# if not using "with", write can fail without exception
|
||||
with open(tmpname, mode, encoding=encoding) as f:
|
||||
getattr(f, method)(data)
|
||||
|
@ -82,136 +126,336 @@ def safe_overwrite(fname, data, *, method='write', mode='w', encoding=None):
|
|||
f.flush()
|
||||
os.fsync(f.fileno())
|
||||
# if the above write failed (because disk is full etc), the old data should be kept
|
||||
os.rename(tmpname, fname)
|
||||
os.rename(tmpname, resolved_path)
|
||||
|
||||
def read_verfile(file):
|
||||
v = {}
|
||||
def read_verfile(file: Path) -> ResultData:
|
||||
try:
|
||||
with open(file) as f:
|
||||
for l in f:
|
||||
name, ver = l.rstrip().split(None, 1)
|
||||
v[name] = ver
|
||||
data = f.read()
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
return {}
|
||||
|
||||
try:
|
||||
v = json.loads(data)
|
||||
except json.decoder.JSONDecodeError:
|
||||
# old format
|
||||
v = {}
|
||||
for l in data.splitlines():
|
||||
name, ver = l.rstrip().split(None, 1)
|
||||
v[name] = ver
|
||||
|
||||
if v.get('version') is None:
|
||||
v = {k: RichResult(version=a) for k, a in v.items()}
|
||||
elif v['version'] == 2:
|
||||
v = {k: RichResult(**a) for k, a in v['data'].items()}
|
||||
else:
|
||||
raise Exception('unknown verfile version', v['version'])
|
||||
|
||||
return v
|
||||
|
||||
def write_verfile(file, versions):
|
||||
# sort using only alphanums, as done by the sort command, and needed by
|
||||
# comm command
|
||||
data = ['%s %s\n' % item
|
||||
for item in sorted(versions.items(), key=lambda i: (''.join(filter(str.isalnum, i[0])), i[1]))]
|
||||
safe_overwrite(file, data, method='writelines')
|
||||
def write_verfile(file: Path, versions: ResultData) -> None:
|
||||
d = {
|
||||
'version': 2,
|
||||
# sort and indent to make it friendly to human and git
|
||||
'data': dict(sorted(versions.items())),
|
||||
}
|
||||
data = json.dumps(
|
||||
d,
|
||||
indent = 2,
|
||||
ensure_ascii = False,
|
||||
default = json_encode,
|
||||
) + '\n'
|
||||
safe_overwrite(file, data)
|
||||
|
||||
class Source:
|
||||
oldver = newver = None
|
||||
def json_encode(obj):
|
||||
if isinstance(obj, RichResult):
|
||||
d = {k: v for k, v in dataclasses.asdict(obj).items() if v is not None}
|
||||
return d
|
||||
raise TypeError(obj)
|
||||
|
||||
def __init__(self, file):
|
||||
self.config = config = configparser.ConfigParser(
|
||||
dict_type=dict, allow_no_value=True
|
||||
)
|
||||
self.name = file.name
|
||||
config.read_file(file)
|
||||
if '__config__' in config:
|
||||
c = config['__config__']
|
||||
class Options(NamedTuple):
|
||||
ver_files: Optional[Tuple[Path, Path]]
|
||||
max_concurrency: int
|
||||
proxy: Optional[str]
|
||||
keymanager: KeyManager
|
||||
source_configs: Dict[str, Dict[str, Any]]
|
||||
httplib: Optional[str]
|
||||
http_timeout: int
|
||||
|
||||
d = os.path.dirname(file.name)
|
||||
if 'oldver' in c and 'newver' in c:
|
||||
self.oldver = os.path.expandvars(os.path.expanduser(
|
||||
os.path.join(d, c.get('oldver'))))
|
||||
self.newver = os.path.expandvars(os.path.expanduser(
|
||||
os.path.join(d, c.get('newver'))))
|
||||
def load_file(
|
||||
file: str, *,
|
||||
use_keymanager: bool,
|
||||
) -> Tuple[Entries, Options]:
|
||||
try:
|
||||
with open(file, 'rb') as f:
|
||||
config = tomllib.load(f)
|
||||
except (OSError, tomllib.TOMLDecodeError) as e:
|
||||
raise FileLoadError('version configuration file', file, e)
|
||||
|
||||
ver_files: Optional[Tuple[Path, Path]] = None
|
||||
keymanager = KeyManager(None)
|
||||
source_configs = {}
|
||||
|
||||
if '__config__' in config:
|
||||
c = config.pop('__config__')
|
||||
d = Path(file).parent
|
||||
|
||||
if 'oldver' in c and 'newver' in c:
|
||||
oldver_s = os.path.expandvars(
|
||||
os.path.expanduser(c.get('oldver')))
|
||||
oldver = d / oldver_s
|
||||
newver_s = os.path.expandvars(
|
||||
os.path.expanduser(c.get('newver')))
|
||||
newver = d / newver_s
|
||||
ver_files = oldver, newver
|
||||
|
||||
if use_keymanager:
|
||||
keyfile = c.get('keyfile')
|
||||
if keyfile:
|
||||
keyfile = os.path.expandvars(os.path.expanduser(
|
||||
os.path.join(d, c.get('keyfile'))))
|
||||
keyfile_s = os.path.expandvars(
|
||||
os.path.expanduser(c.get('keyfile')))
|
||||
keyfile = d / keyfile_s
|
||||
keymanager = KeyManager(keyfile)
|
||||
|
||||
self.max_concurrent = c.getint('max_concurrent', 20)
|
||||
self.keymanager = KeyManager(keyfile)
|
||||
session.nv_config = config["__config__"]
|
||||
if 'source' in c:
|
||||
source_configs = c['source']
|
||||
|
||||
else:
|
||||
self.max_concurrent = 20
|
||||
self.keymanager = KeyManager(None)
|
||||
max_concurrency = c.get('max_concurrency', 20)
|
||||
proxy = c.get('proxy')
|
||||
httplib = c.get('httplib', None)
|
||||
http_timeout = c.get('http_timeout', 20)
|
||||
else:
|
||||
max_concurrency = 20
|
||||
proxy = None
|
||||
httplib = None
|
||||
http_timeout = 20
|
||||
|
||||
async def check(self):
|
||||
if self.oldver:
|
||||
self.oldvers = read_verfile(self.oldver)
|
||||
else:
|
||||
self.oldvers = {}
|
||||
self.curvers = self.oldvers.copy()
|
||||
return cast(Entries, config), Options(
|
||||
ver_files, max_concurrency, proxy, keymanager,
|
||||
source_configs, httplib, http_timeout,
|
||||
)
|
||||
|
||||
token_q = asyncio.Queue(maxsize=self.max_concurrent)
|
||||
def setup_httpclient(
|
||||
max_concurrency: int = 20,
|
||||
httplib: Optional[str] = None,
|
||||
http_timeout: int = 20,
|
||||
) -> Dispatcher:
|
||||
httplib_ = httplib or httpclient.find_best_httplib()
|
||||
httpclient.setup(
|
||||
httplib_, max_concurrency, http_timeout)
|
||||
return Dispatcher()
|
||||
|
||||
for _ in range(self.max_concurrent):
|
||||
await token_q.put(True)
|
||||
class Dispatcher:
|
||||
def dispatch(
|
||||
self,
|
||||
entries: Entries,
|
||||
task_sem: asyncio.Semaphore,
|
||||
result_q: Queue[RawResult],
|
||||
keymanager: KeyManager,
|
||||
entry_waiter: EntryWaiter,
|
||||
tries: int,
|
||||
source_configs: Dict[str, Dict[str, Any]],
|
||||
) -> List[asyncio.Future]:
|
||||
mods: Dict[str, Tuple[types.ModuleType, List]] = {}
|
||||
ctx_tries.set(tries)
|
||||
ctx_entry_waiter.set(entry_waiter)
|
||||
root_ctx = contextvars.copy_context()
|
||||
|
||||
async def worker(name, conf):
|
||||
await token_q.get()
|
||||
try:
|
||||
ret = await get_version(name, conf, keyman=self.keymanager)
|
||||
return name, ret
|
||||
except Exception as e:
|
||||
return name, e
|
||||
finally:
|
||||
await token_q.put(True)
|
||||
|
||||
config = self.config
|
||||
futures = []
|
||||
for name in config.sections():
|
||||
if name == '__config__':
|
||||
continue
|
||||
|
||||
conf = config[name]
|
||||
conf['oldver'] = self.oldvers.get(name, None)
|
||||
fu = asyncio.ensure_future(worker(name, conf))
|
||||
futures.append(fu)
|
||||
|
||||
for fu in asyncio.as_completed(futures):
|
||||
name, result = await fu
|
||||
if isinstance(result, Exception):
|
||||
logger.error('unexpected error happened',
|
||||
name=name, exc_info=result)
|
||||
self.on_exception(name, result)
|
||||
elif result is not None:
|
||||
self.print_version_update(name, result)
|
||||
for name, entry in entries.items():
|
||||
source = entry.get('source', 'none')
|
||||
if source not in mods:
|
||||
mod = import_module('nvchecker_source.' + source)
|
||||
tasks: List[Tuple[str, Entry]] = []
|
||||
mods[source] = mod, tasks
|
||||
config = source_configs.get(source)
|
||||
if config and getattr(mod, 'configure'):
|
||||
mod.configure(config)
|
||||
else:
|
||||
conf = config[name]
|
||||
if not conf.getboolean('missing_ok', False):
|
||||
logger.warn('no-result', name=name)
|
||||
self.on_no_result(name)
|
||||
tasks = mods[source][1]
|
||||
tasks.append((name, entry))
|
||||
|
||||
if self.newver:
|
||||
write_verfile(self.newver, self.curvers)
|
||||
ret = []
|
||||
for mod, tasks in mods.values():
|
||||
if hasattr(mod, 'Worker'):
|
||||
worker_cls = mod.Worker
|
||||
else:
|
||||
worker_cls = FunctionWorker
|
||||
|
||||
def print_version_update(self, name, version):
|
||||
oldver = self.oldvers.get(name, None)
|
||||
if not oldver or oldver != version:
|
||||
logger.info('updated', name=name, version=version, old_version=oldver)
|
||||
self.curvers[name] = version
|
||||
self.on_update(name, version, oldver)
|
||||
else:
|
||||
logger.debug('up-to-date', name=name, version=version)
|
||||
ctx = root_ctx.copy()
|
||||
worker = ctx.run(
|
||||
worker_cls,
|
||||
task_sem, result_q, tasks, keymanager,
|
||||
)
|
||||
if worker_cls is FunctionWorker:
|
||||
func = mod.get_version
|
||||
ctx.run(worker.initialize, func)
|
||||
|
||||
def on_update(self, name, version, oldver):
|
||||
pass
|
||||
ret.append(ctx.run(worker._run_maynot_raise))
|
||||
|
||||
def on_no_result(self, name):
|
||||
pass
|
||||
return ret
|
||||
|
||||
def on_exception(self, name, exc):
|
||||
pass
|
||||
def substitute_version(
|
||||
version: str, conf: Entry,
|
||||
) -> str:
|
||||
'''
|
||||
Substitute the version string via defined rules in the configuration file.
|
||||
See usage.rst#global-options for details.
|
||||
'''
|
||||
prefix = conf.get('prefix')
|
||||
if prefix:
|
||||
if version.startswith(prefix):
|
||||
version = version[len(prefix):]
|
||||
|
||||
def __repr__(self):
|
||||
return '<Source from %r>' % self.name
|
||||
from_pattern = conf.get('from_pattern')
|
||||
if from_pattern:
|
||||
to_pattern = conf.get('to_pattern')
|
||||
if to_pattern is None:
|
||||
raise ValueError("from_pattern exists but to_pattern doesn't")
|
||||
|
||||
class KeyManager:
|
||||
def __init__(self, file):
|
||||
self.config = config = configparser.ConfigParser(dict_type=dict)
|
||||
if file is not None:
|
||||
config.read([file])
|
||||
else:
|
||||
config.add_section('keys')
|
||||
version = re.sub(from_pattern, to_pattern, version)
|
||||
|
||||
def get_key(self, name):
|
||||
return self.config.get('keys', name, fallback=None)
|
||||
return version
|
||||
|
||||
def apply_list_options(
|
||||
versions: List[Union[str, RichResult]],
|
||||
conf: Entry,
|
||||
name: str,
|
||||
) -> Optional[Union[str, RichResult]]:
|
||||
pattern = conf.get('include_regex')
|
||||
if versions and pattern:
|
||||
re_pat = re.compile(pattern)
|
||||
versions2 = [x for x in versions
|
||||
if re_pat.fullmatch(str(x))]
|
||||
if not versions2:
|
||||
logger.warning('include_regex matched no versions',
|
||||
name=name, versions=versions, regex=pattern)
|
||||
return None
|
||||
versions = versions2
|
||||
|
||||
pattern = conf.get('exclude_regex')
|
||||
if pattern:
|
||||
re_pat = re.compile(pattern)
|
||||
versions = [x for x in versions
|
||||
if not re_pat.fullmatch(str(x))]
|
||||
|
||||
ignored = set(conf.get('ignored', '').split())
|
||||
if ignored:
|
||||
versions = [x for x in versions
|
||||
if str(x) not in ignored]
|
||||
|
||||
if not versions:
|
||||
return None
|
||||
|
||||
sort_version_key = sort_version_keys[
|
||||
conf.get("sort_version_key", "parse_version")]
|
||||
versions.sort(key=lambda version: sort_version_key(str(version))) # type: ignore
|
||||
|
||||
return versions[-1]
|
||||
|
||||
def _process_result(r: RawResult) -> Union[RichResult, Exception]:
|
||||
version = r.version
|
||||
conf = r.conf
|
||||
name = r.name
|
||||
|
||||
url = None
|
||||
revision = None
|
||||
gitref = None
|
||||
if isinstance(version, GetVersionError):
|
||||
kw = version.kwargs
|
||||
kw['name'] = name
|
||||
logger.error(version.msg, **kw)
|
||||
return version
|
||||
elif isinstance(version, Exception):
|
||||
logger.error('unexpected error happened',
|
||||
name=r.name, exc_info=r.version)
|
||||
return version
|
||||
elif isinstance(version, list):
|
||||
version_str = apply_list_options(version, conf, name)
|
||||
if isinstance(version_str, RichResult):
|
||||
url = version_str.url
|
||||
gitref = version_str.gitref
|
||||
revision = version_str.revision
|
||||
version_str = version_str.version
|
||||
elif isinstance(version, RichResult):
|
||||
version_str = version.version
|
||||
url = version.url
|
||||
gitref = version.gitref
|
||||
revision = version.revision
|
||||
else:
|
||||
version_str = version
|
||||
|
||||
if version_str:
|
||||
version_str = version_str.replace('\n', ' ')
|
||||
|
||||
try:
|
||||
version_str = substitute_version(version_str, conf)
|
||||
return RichResult(
|
||||
version = version_str,
|
||||
url = url,
|
||||
gitref = gitref,
|
||||
revision = revision,
|
||||
)
|
||||
except (ValueError, re.error) as e:
|
||||
logger.exception('error occurred in version substitutions', name=name)
|
||||
return e
|
||||
|
||||
else:
|
||||
return ValueError('no version returned')
|
||||
|
||||
def check_version_update(
|
||||
oldvers: ResultData,
|
||||
name: str,
|
||||
r: RichResult,
|
||||
verbose: bool,
|
||||
) -> None:
|
||||
if old_result := oldvers.get(name):
|
||||
oldver = old_result.version
|
||||
else:
|
||||
oldver = None
|
||||
if not oldver or oldver != r.version:
|
||||
logger.info(
|
||||
'updated',
|
||||
name = name,
|
||||
version = r.version,
|
||||
revision = r.revision,
|
||||
old_version = oldver,
|
||||
url = r.url,
|
||||
)
|
||||
else:
|
||||
# provide visible user feedback if it was the only entry
|
||||
level = logging.INFO if verbose else logging.DEBUG
|
||||
logger.log(level, 'up-to-date', name=name, version=r.version, url=r.url)
|
||||
|
||||
async def process_result(
|
||||
oldvers: ResultData,
|
||||
result_q: Queue[RawResult],
|
||||
entry_waiter: EntryWaiter,
|
||||
verbose: bool = False,
|
||||
) -> Tuple[ResultData, bool]:
|
||||
ret = {}
|
||||
has_failures = False
|
||||
try:
|
||||
while True:
|
||||
r = await result_q.get()
|
||||
try:
|
||||
r1 = _process_result(r)
|
||||
except Exception as e:
|
||||
logger.exception('error processing result', result=r)
|
||||
r1 = e
|
||||
if isinstance(r1, Exception):
|
||||
entry_waiter.set_exception(r.name, r1)
|
||||
# no versions are returned from "apply_list_options"?
|
||||
logger.error('no-result', name=r.name, error=repr(r1))
|
||||
has_failures = True
|
||||
continue
|
||||
check_version_update(oldvers, r.name, r1, verbose)
|
||||
entry_waiter.set_result(r.name, r1.version)
|
||||
ret[r.name] = r1
|
||||
except asyncio.CancelledError:
|
||||
return ret, has_failures
|
||||
|
||||
async def run_tasks(
|
||||
futures: Sequence[Awaitable[None]]
|
||||
) -> None:
|
||||
for fu in asyncio.as_completed(futures):
|
||||
await fu
|
||||
|
|
21
nvchecker/ctxvars.py
Normal file
21
nvchecker/ctxvars.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextvars import ContextVar
|
||||
from typing import Optional, TYPE_CHECKING
|
||||
|
||||
from . import __version__
|
||||
|
||||
DEFAULT_USER_AGENT = f'lilydjwg/nvchecker {__version__}'
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .util import EntryWaiter
|
||||
|
||||
tries = ContextVar('tries', default=1)
|
||||
proxy: ContextVar[Optional[str]] = ContextVar('proxy', default=None)
|
||||
user_agent = ContextVar('user_agent', default=DEFAULT_USER_AGENT)
|
||||
httptoken = ContextVar('httptoken', default=None)
|
||||
entry_waiter: ContextVar[EntryWaiter] = ContextVar('entry_waiter')
|
||||
verify_cert = ContextVar('verify_cert', default=True)
|
|
@ -1,72 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import re
|
||||
from importlib import import_module
|
||||
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
handler_precedence = (
|
||||
'github', 'aur', 'pypi', 'archpkg', 'debianpkg', 'ubuntupkg',
|
||||
'gems', 'pacman',
|
||||
'cmd', 'bitbucket', 'regex', 'manual', 'vcs',
|
||||
'cratesio', 'npm', 'hackage', 'cpan', 'gitlab', 'packagist',
|
||||
'anitya', 'android_sdk',
|
||||
)
|
||||
|
||||
def substitute_version(version, name, conf):
|
||||
'''
|
||||
Substitute the version string via defined rules in the configuration file.
|
||||
See README.rst#global-options for details.
|
||||
'''
|
||||
prefix = conf.get('prefix')
|
||||
if prefix:
|
||||
if version.startswith(prefix):
|
||||
version = version[len(prefix):]
|
||||
return version
|
||||
|
||||
from_pattern = conf.get('from_pattern')
|
||||
if from_pattern:
|
||||
to_pattern = conf.get('to_pattern')
|
||||
if not to_pattern:
|
||||
raise ValueError('%s: from_pattern exists but to_pattern doesn\'t', name)
|
||||
|
||||
return re.sub(from_pattern, to_pattern, version)
|
||||
|
||||
# No substitution rules found. Just return the original version string.
|
||||
return version
|
||||
|
||||
_cache = {}
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
for key in handler_precedence:
|
||||
if key in conf:
|
||||
mod = import_module('.source.' + key, __package__)
|
||||
func = mod.get_version
|
||||
get_cacheable_conf = getattr(mod, 'get_cacheable_conf', lambda name, conf: conf)
|
||||
break
|
||||
else:
|
||||
logger.error('no idea to get version info.', name=name)
|
||||
return
|
||||
|
||||
cacheable_conf = get_cacheable_conf(name, conf)
|
||||
cache_key = tuple(sorted(cacheable_conf.items()))
|
||||
if cache_key in _cache:
|
||||
version = _cache[cache_key]
|
||||
logger.debug('cache hit', name=name,
|
||||
cache_key=cache_key, cached=version)
|
||||
return version
|
||||
|
||||
version = await func(name, conf, **kwargs)
|
||||
if version:
|
||||
version = version.replace('\n', ' ')
|
||||
try:
|
||||
version = substitute_version(version, name, conf)
|
||||
except (ValueError, re.error):
|
||||
logger.exception('error occurred in version substitutions', name=name)
|
||||
|
||||
if version is not None:
|
||||
_cache[cache_key] = version
|
||||
return version
|
55
nvchecker/httpclient/__init__.py
Normal file
55
nvchecker/httpclient/__init__.py
Normal file
|
@ -0,0 +1,55 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from .base import TemporaryError, HTTPError
|
||||
|
||||
class Proxy:
|
||||
_obj = None
|
||||
|
||||
def set_obj(self, obj):
|
||||
super().__setattr__('_obj', obj)
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._obj, name)
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
return setattr(self._obj, name, value)
|
||||
|
||||
session = Proxy()
|
||||
|
||||
def setup(
|
||||
which: Optional[str] = None,
|
||||
concurreny: int = 20,
|
||||
timeout: int = 20,
|
||||
) -> None:
|
||||
if which is None:
|
||||
which = find_best_httplib()
|
||||
|
||||
m = __import__(
|
||||
'%s_httpclient' % which, globals(), locals(), level=1)
|
||||
|
||||
session.set_obj(m.session)
|
||||
session.setup(concurreny, timeout)
|
||||
|
||||
def find_best_httplib() -> str:
|
||||
try:
|
||||
import tornado, pycurl
|
||||
# connection reuse, http/2
|
||||
which = 'tornado'
|
||||
except ImportError:
|
||||
try:
|
||||
import aiohttp
|
||||
which = 'aiohttp'
|
||||
# connection reuse
|
||||
except ImportError:
|
||||
try:
|
||||
import httpx
|
||||
which = 'httpx'
|
||||
except ImportError:
|
||||
import tornado
|
||||
which = 'tornado'
|
||||
# fallback
|
||||
|
||||
return which
|
85
nvchecker/httpclient/aiohttp_httpclient.py
Normal file
85
nvchecker/httpclient/aiohttp_httpclient.py
Normal file
|
@ -0,0 +1,85 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import asyncio
|
||||
from typing import Optional, Dict
|
||||
|
||||
import structlog
|
||||
import aiohttp
|
||||
|
||||
from .base import BaseSession, TemporaryError, Response, HTTPError
|
||||
|
||||
__all__ = ['session']
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
class AiohttpSession(BaseSession):
|
||||
session = None
|
||||
|
||||
def setup(
|
||||
self,
|
||||
concurreny: int = 20,
|
||||
timeout: int = 20,
|
||||
) -> None:
|
||||
self._concurreny = concurreny
|
||||
self._timeout = timeout
|
||||
|
||||
async def request_impl(
|
||||
self, url: str, *,
|
||||
method: str,
|
||||
proxy: Optional[str] = None,
|
||||
headers: Dict[str, str] = {},
|
||||
follow_redirects: bool = True,
|
||||
params = (),
|
||||
json = None,
|
||||
body = None,
|
||||
verify_cert: bool = True,
|
||||
) -> Response:
|
||||
if self.session is None:
|
||||
# need to create in async context
|
||||
self.session = aiohttp.ClientSession(
|
||||
connector = aiohttp.TCPConnector(limit=self._concurreny),
|
||||
timeout = aiohttp.ClientTimeout(total=self._timeout),
|
||||
trust_env = True,
|
||||
)
|
||||
|
||||
kwargs = {
|
||||
'headers': headers,
|
||||
'params': params,
|
||||
'allow_redirects': follow_redirects,
|
||||
}
|
||||
if not verify_cert:
|
||||
kwargs['ssl'] = False
|
||||
|
||||
if proxy is not None:
|
||||
kwargs['proxy'] = proxy
|
||||
if body is not None:
|
||||
# Make sure all backends have the same default encoding for post data.
|
||||
if 'Content-Type' not in headers:
|
||||
headers = {**headers, 'Content-Type': 'application/x-www-form-urlencoded'}
|
||||
kwargs['headers'] = headers
|
||||
kwargs['data'] = body.encode()
|
||||
elif json is not None:
|
||||
kwargs['json'] = json
|
||||
|
||||
try:
|
||||
logger.debug('send request', method=method, url=url, kwargs=kwargs)
|
||||
res = await self.session.request(
|
||||
method, url, **kwargs)
|
||||
except (
|
||||
asyncio.TimeoutError, aiohttp.ClientConnectorError,
|
||||
) as e:
|
||||
raise TemporaryError(599, repr(e), e)
|
||||
|
||||
err_cls: Optional[type] = None
|
||||
if res.status >= 500:
|
||||
err_cls = TemporaryError
|
||||
elif res.status >= 400:
|
||||
err_cls = HTTPError
|
||||
if err_cls is not None:
|
||||
raise err_cls(res.status, res.reason, res)
|
||||
|
||||
body = await res.content.read()
|
||||
return Response(res.headers, body)
|
||||
|
||||
session = AiohttpSession()
|
124
nvchecker/httpclient/base.py
Normal file
124
nvchecker/httpclient/base.py
Normal file
|
@ -0,0 +1,124 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2019-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import structlog
|
||||
from typing import Optional, Dict, Mapping
|
||||
import json as _json
|
||||
|
||||
from ..ctxvars import tries, proxy, user_agent, httptoken, verify_cert
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
class Response:
|
||||
'''The response of an HTTP request.
|
||||
|
||||
.. py:attribute:: body
|
||||
:type: bytes
|
||||
|
||||
.. py:attribute:: headers
|
||||
:type: Mapping[str, str]
|
||||
'''
|
||||
def __init__(
|
||||
self,
|
||||
headers: Mapping[str, str],
|
||||
body: bytes,
|
||||
) -> None:
|
||||
self.headers = headers
|
||||
self.body = body
|
||||
|
||||
def json(self):
|
||||
'''Convert response content to JSON.'''
|
||||
return _json.loads(self.body.decode('utf-8'))
|
||||
|
||||
class BaseSession:
|
||||
'''The base class for different HTTP backend.'''
|
||||
def setup(
|
||||
self,
|
||||
concurreny: int = 20,
|
||||
timeout: int = 20,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
async def head(self, *args, **kwargs):
|
||||
'''Shortcut for ``HEAD`` request.'''
|
||||
return await self.request(
|
||||
method='HEAD', *args, **kwargs)
|
||||
|
||||
async def get(self, *args, **kwargs):
|
||||
'''Shortcut for ``GET`` request.'''
|
||||
return await self.request(
|
||||
method='GET', *args, **kwargs)
|
||||
|
||||
async def post(self, *args, **kwargs):
|
||||
'''Shortcut for ``POST`` request.'''
|
||||
return await self.request(
|
||||
method='POST', *args, **kwargs)
|
||||
|
||||
async def request(
|
||||
self, url: str, *,
|
||||
method: str,
|
||||
headers: Dict[str, str] = {},
|
||||
follow_redirects: bool = True,
|
||||
params = (),
|
||||
json = None,
|
||||
body = None,
|
||||
) -> Response:
|
||||
t = tries.get()
|
||||
p = proxy.get()
|
||||
ua = user_agent.get()
|
||||
httpt = httptoken.get()
|
||||
verify = verify_cert.get()
|
||||
|
||||
headers = headers.copy()
|
||||
headers.setdefault('User-Agent', ua)
|
||||
if httpt is not None:
|
||||
headers.setdefault('Authorization', httpt)
|
||||
|
||||
for i in range(1, t+1):
|
||||
try:
|
||||
return await self.request_impl(
|
||||
url,
|
||||
method = method,
|
||||
headers = headers,
|
||||
params = params,
|
||||
follow_redirects = follow_redirects,
|
||||
json = json,
|
||||
body = body,
|
||||
proxy = p or None,
|
||||
verify_cert = verify,
|
||||
)
|
||||
except TemporaryError as e:
|
||||
if i == t:
|
||||
raise
|
||||
else:
|
||||
logger.warning('temporary error, retrying',
|
||||
tries = i, exc_info = e)
|
||||
continue
|
||||
|
||||
raise Exception('should not reach')
|
||||
|
||||
async def request_impl(
|
||||
self, url: str, *,
|
||||
method: str,
|
||||
proxy: Optional[str] = None,
|
||||
headers: Dict[str, str] = {},
|
||||
follow_redirects: bool = True,
|
||||
params = (),
|
||||
json = None,
|
||||
body = None,
|
||||
verify_cert: bool = True,
|
||||
) -> Response:
|
||||
''':meta private:'''
|
||||
raise NotImplementedError
|
||||
|
||||
class BaseHTTPError(Exception):
|
||||
def __init__(self, code, message, response):
|
||||
self.code = code
|
||||
self.message = message
|
||||
self.response = response
|
||||
|
||||
class TemporaryError(BaseHTTPError):
|
||||
'''A temporary error (e.g. network error) happens.'''
|
||||
|
||||
class HTTPError(BaseHTTPError):
|
||||
'''An HTTP 4xx error happens'''
|
78
nvchecker/httpclient/httpx_httpclient.py
Normal file
78
nvchecker/httpclient/httpx_httpclient.py
Normal file
|
@ -0,0 +1,78 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020-2022,2024 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from typing import Dict, Optional, Tuple
|
||||
|
||||
import httpx
|
||||
|
||||
from .base import BaseSession, TemporaryError, Response, HTTPError
|
||||
|
||||
__all__ = ['session']
|
||||
|
||||
class HttpxSession(BaseSession):
|
||||
def setup(
|
||||
self,
|
||||
concurreny: int = 20,
|
||||
timeout: int = 20,
|
||||
) -> None:
|
||||
self.clients: Dict[Tuple[Optional[str], bool], httpx.AsyncClient] = {}
|
||||
self.timeout = timeout
|
||||
|
||||
async def request_impl(
|
||||
self, url: str, *,
|
||||
method: str,
|
||||
proxy: Optional[str] = None,
|
||||
headers: Dict[str, str] = {},
|
||||
follow_redirects: bool = True,
|
||||
params = (),
|
||||
json = None,
|
||||
body = None,
|
||||
verify_cert: bool = True,
|
||||
) -> Response:
|
||||
client = self.clients.get((proxy, verify_cert))
|
||||
if not client:
|
||||
client = httpx.AsyncClient(
|
||||
timeout = httpx.Timeout(self.timeout, pool=None),
|
||||
http2 = True,
|
||||
proxy = proxy,
|
||||
verify = verify_cert,
|
||||
)
|
||||
self.clients[(proxy, verify_cert)] = client
|
||||
|
||||
try:
|
||||
if body is not None:
|
||||
# Make sure all backends have the same default encoding for post data.
|
||||
if 'Content-Type' not in headers:
|
||||
headers = {**headers, 'Content-Type': 'application/x-www-form-urlencoded'}
|
||||
body = body.encode()
|
||||
r = await client.request(
|
||||
method, url, json = json, content = body,
|
||||
headers = headers,
|
||||
follow_redirects = follow_redirects,
|
||||
# httpx checks for None but not ()
|
||||
params = params or None,
|
||||
)
|
||||
err_cls: Optional[type] = None
|
||||
if r.status_code >= 500:
|
||||
err_cls = TemporaryError
|
||||
elif r.status_code >= 400:
|
||||
err_cls = HTTPError
|
||||
if err_cls is not None:
|
||||
raise err_cls(
|
||||
r.status_code,
|
||||
r.reason_phrase,
|
||||
r,
|
||||
)
|
||||
|
||||
except httpx.TransportError as e:
|
||||
raise TemporaryError(599, repr(e), e)
|
||||
|
||||
body = await r.aread()
|
||||
return Response(r.headers, body)
|
||||
|
||||
async def aclose(self):
|
||||
for client in self.clients.values():
|
||||
await client.aclose()
|
||||
del self.clients
|
||||
|
||||
session = HttpxSession()
|
103
nvchecker/httpclient/tornado_httpclient.py
Normal file
103
nvchecker/httpclient/tornado_httpclient.py
Normal file
|
@ -0,0 +1,103 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import json as _json
|
||||
from urllib.parse import urlencode
|
||||
from typing import Optional, Dict, Any
|
||||
import os
|
||||
|
||||
from tornado.httpclient import AsyncHTTPClient, HTTPRequest
|
||||
|
||||
try:
|
||||
import pycurl
|
||||
except ImportError:
|
||||
pycurl = None # type: ignore
|
||||
|
||||
from .base import BaseSession, TemporaryError, Response, HTTPError
|
||||
|
||||
__all__ = ['session']
|
||||
|
||||
HTTP2_AVAILABLE = None if pycurl else False
|
||||
SSL_CERT_FILE = os.environ.get('SSL_CERT_FILE')
|
||||
|
||||
def setup_curl(curl):
|
||||
global HTTP2_AVAILABLE
|
||||
if HTTP2_AVAILABLE is None:
|
||||
try:
|
||||
curl.setopt(pycurl.HTTP_VERSION, 4)
|
||||
HTTP2_AVAILABLE = True
|
||||
except pycurl.error:
|
||||
HTTP2_AVAILABLE = False
|
||||
elif HTTP2_AVAILABLE:
|
||||
curl.setopt(pycurl.HTTP_VERSION, 4)
|
||||
|
||||
if SSL_CERT_FILE:
|
||||
curl.setopt_string(pycurl.CAINFO, SSL_CERT_FILE)
|
||||
curl.setopt_string(pycurl.ACCEPT_ENCODING, "")
|
||||
|
||||
class TornadoSession(BaseSession):
|
||||
def setup(
|
||||
self,
|
||||
concurreny: int = 20,
|
||||
timeout: int = 20,
|
||||
) -> None:
|
||||
impl: Optional[str]
|
||||
if pycurl:
|
||||
impl = "tornado.curl_httpclient.CurlAsyncHTTPClient"
|
||||
else:
|
||||
impl = None
|
||||
AsyncHTTPClient.configure(
|
||||
impl, max_clients = concurreny)
|
||||
self.timeout = timeout
|
||||
|
||||
async def request_impl(
|
||||
self, url: str, *,
|
||||
method: str,
|
||||
proxy: Optional[str] = None,
|
||||
headers: Dict[str, str] = {},
|
||||
follow_redirects: bool = True,
|
||||
params = (),
|
||||
json = None,
|
||||
body = None,
|
||||
verify_cert: bool = True,
|
||||
) -> Response:
|
||||
kwargs: Dict[str, Any] = {
|
||||
'method': method,
|
||||
'headers': headers,
|
||||
'request_timeout': self.timeout,
|
||||
'follow_redirects': follow_redirects,
|
||||
'validate_cert': verify_cert,
|
||||
}
|
||||
|
||||
if body:
|
||||
# By default the content type is already 'application/x-www-form-urlencoded'
|
||||
kwargs['body'] = body
|
||||
elif json:
|
||||
kwargs['body'] = _json.dumps(json)
|
||||
kwargs['prepare_curl_callback'] = setup_curl
|
||||
|
||||
if proxy:
|
||||
host, port = proxy.rsplit(':', 1)
|
||||
kwargs['proxy_host'] = host
|
||||
kwargs['proxy_port'] = int(port)
|
||||
|
||||
if params:
|
||||
q = urlencode(params)
|
||||
url += '?' + q
|
||||
|
||||
r = HTTPRequest(url, **kwargs)
|
||||
res = await AsyncHTTPClient().fetch(
|
||||
r, raise_error=False)
|
||||
err_cls: Optional[type] = None
|
||||
if res.code >= 500:
|
||||
err_cls = TemporaryError
|
||||
elif res.code >= 400:
|
||||
err_cls = HTTPError
|
||||
if err_cls is not None:
|
||||
raise err_cls(
|
||||
res.code, res.reason, res
|
||||
)
|
||||
|
||||
return Response(res.headers, res.body)
|
||||
|
||||
session = TornadoSession()
|
|
@ -1 +1,4 @@
|
|||
This directory belongs to modules from my [winterpy](https://github.com/lilydjwg/winterpy) and can be synced from there without care.
|
||||
This directory contains code from other places:
|
||||
|
||||
* `nicelogger.py`: from my [winterpy](https://github.com/lilydjwg/winterpy)
|
||||
* `packaging_version.py`: from python-packaging 20.9, modified
|
||||
|
|
|
@ -11,10 +11,10 @@ import sys
|
|||
import time
|
||||
import logging
|
||||
|
||||
class TornadoLogFormatter(logging.Formatter):
|
||||
def __init__(self, color, *args, **kwargs):
|
||||
super().__init__(self, *args, **kwargs)
|
||||
self._color = color
|
||||
class Colors:
|
||||
def __init__(self, color=None):
|
||||
if color is None:
|
||||
color = support_color()
|
||||
if color:
|
||||
import curses
|
||||
curses.setupterm()
|
||||
|
@ -23,19 +23,32 @@ class TornadoLogFormatter(logging.Formatter):
|
|||
curses.tigetstr("setf") or "", "ascii")
|
||||
else:
|
||||
fg_color = curses.tigetstr("setaf") or curses.tigetstr("setf") or b""
|
||||
|
||||
self.blue = str(curses.tparm(fg_color, 4), "ascii")
|
||||
self.yellow = str(curses.tparm(fg_color, 3), "ascii")
|
||||
self.green = str(curses.tparm(fg_color, 2), "ascii")
|
||||
self.red = str(curses.tparm(fg_color, 1), "ascii")
|
||||
self.bright_red = str(curses.tparm(fg_color, 9), "ascii")
|
||||
self.normal = str(curses.tigetstr("sgr0"), "ascii")
|
||||
|
||||
else:
|
||||
self.blue = self.yellow = self.green = self.red = self.bright_red = self.normal = ""
|
||||
|
||||
|
||||
class TornadoLogFormatter(logging.Formatter):
|
||||
def __init__(self, color, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._color = color
|
||||
if color:
|
||||
colors = Colors(color=color)
|
||||
self._colors = {
|
||||
logging.DEBUG: str(curses.tparm(fg_color, 4), # Blue
|
||||
"ascii"),
|
||||
logging.INFO: str(curses.tparm(fg_color, 2), # Green
|
||||
"ascii"),
|
||||
logging.WARNING: str(curses.tparm(fg_color, 3), # Yellow
|
||||
"ascii"),
|
||||
logging.ERROR: str(curses.tparm(fg_color, 1), # Red
|
||||
"ascii"),
|
||||
logging.CRITICAL: str(curses.tparm(fg_color, 9), # Bright Red
|
||||
"ascii"),
|
||||
logging.DEBUG: colors.blue,
|
||||
logging.INFO: colors.green,
|
||||
logging.WARNING: colors.yellow,
|
||||
logging.ERROR: colors.red,
|
||||
logging.CRITICAL: colors.bright_red,
|
||||
}
|
||||
self._normal = str(curses.tigetstr("sgr0"), "ascii")
|
||||
self._normal = colors.normal
|
||||
|
||||
def format(self, record):
|
||||
try:
|
||||
|
@ -44,8 +57,7 @@ class TornadoLogFormatter(logging.Formatter):
|
|||
record.message = "Bad message (%r): %r" % (e, record.__dict__)
|
||||
record.asctime = time.strftime(
|
||||
"%m-%d %H:%M:%S", self.converter(record.created))
|
||||
record.asctime += '.%03d' % ((record.created % 1) * 1000)
|
||||
prefix = '[%(levelname)1.1s %(asctime)s %(module)s:%(lineno)d]' % \
|
||||
prefix = '[%(levelname)1.1s %(asctime)s.%(msecs)03d %(module)s:%(lineno)d]' % \
|
||||
record.__dict__
|
||||
if self._color:
|
||||
prefix = (self._colors.get(record.levelno, self._normal) +
|
||||
|
@ -59,6 +71,7 @@ class TornadoLogFormatter(logging.Formatter):
|
|||
'filename', 'exc_info', 'exc_text', 'created', 'funcName',
|
||||
'processName', 'process', 'msecs', 'relativeCreated', 'thread',
|
||||
'threadName', 'name', 'levelno', 'msg', 'pathname', 'stack_info',
|
||||
'taskName',
|
||||
})
|
||||
|
||||
if record.exc_info:
|
||||
|
@ -68,6 +81,18 @@ class TornadoLogFormatter(logging.Formatter):
|
|||
formatted = formatted.rstrip() + "\n" + record.exc_text
|
||||
return formatted.replace("\n", "\n ")
|
||||
|
||||
def support_color(stream=sys.stderr):
|
||||
if stream.isatty():
|
||||
try:
|
||||
import curses
|
||||
curses.setupterm()
|
||||
if curses.tigetnum("colors") > 0:
|
||||
return True
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
def enable_pretty_logging(level=logging.DEBUG, handler=None, color=None):
|
||||
'''
|
||||
handler: specify a handler instead of default StreamHandler
|
||||
|
@ -79,17 +104,8 @@ def enable_pretty_logging(level=logging.DEBUG, handler=None, color=None):
|
|||
h = logging.StreamHandler()
|
||||
else:
|
||||
h = handler
|
||||
if color is None:
|
||||
color = False
|
||||
if handler is None and sys.stderr.isatty():
|
||||
try:
|
||||
import curses
|
||||
curses.setupterm()
|
||||
if curses.tigetnum("colors") > 0:
|
||||
color = True
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
if color is None and handler is None:
|
||||
color = support_color()
|
||||
formatter = TornadoLogFormatter(color=color)
|
||||
h.setLevel(level)
|
||||
h.setFormatter(formatter)
|
||||
|
|
|
@ -1,102 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
'''
|
||||
调用 libnotify
|
||||
'''
|
||||
|
||||
__all__ = ["set", "show", "update", "set_timeout", "set_urgency"]
|
||||
|
||||
from ctypes import *
|
||||
from threading import Lock
|
||||
import atexit
|
||||
|
||||
NOTIFY_URGENCY_LOW = 0
|
||||
NOTIFY_URGENCY_NORMAL = 1
|
||||
NOTIFY_URGENCY_CRITICAL = 2
|
||||
UrgencyLevel = {NOTIFY_URGENCY_LOW, NOTIFY_URGENCY_NORMAL, NOTIFY_URGENCY_CRITICAL}
|
||||
|
||||
libnotify = None
|
||||
gobj = None
|
||||
libnotify_lock = Lock()
|
||||
libnotify_inited = False
|
||||
|
||||
class obj: pass
|
||||
notify_st = obj()
|
||||
|
||||
def set(summary=None, body=None, icon_str=None):
|
||||
with libnotify_lock:
|
||||
init()
|
||||
|
||||
if summary is not None:
|
||||
notify_st.summary = summary.encode()
|
||||
notify_st.body = notify_st.icon_str = None
|
||||
if body is not None:
|
||||
notify_st.body = body.encode()
|
||||
if icon_str is not None:
|
||||
notify_st.icon_str = icon_str.encode()
|
||||
|
||||
libnotify.notify_notification_update(
|
||||
notify_st.notify,
|
||||
notify_st.summary,
|
||||
notify_st.body,
|
||||
notify_st.icon_str,
|
||||
)
|
||||
|
||||
def show():
|
||||
libnotify.notify_notification_show(notify_st.notify, c_void_p())
|
||||
|
||||
def update(summary=None, body=None, icon_str=None):
|
||||
if not any((summary, body)):
|
||||
raise TypeError('at least one argument please')
|
||||
|
||||
set(summary, body, icon_str)
|
||||
show()
|
||||
|
||||
def set_timeout(self, timeout):
|
||||
'''set `timeout' in milliseconds'''
|
||||
libnotify.notify_notification_set_timeout(notify_st.notify, int(timeout))
|
||||
|
||||
def set_urgency(self, urgency):
|
||||
if urgency not in UrgencyLevel:
|
||||
raise ValueError
|
||||
libnotify.notify_notification_set_urgency(notify_st.notify, urgency)
|
||||
|
||||
def init():
|
||||
global libnotify_inited, libnotify, gobj
|
||||
if libnotify_inited:
|
||||
return
|
||||
|
||||
try:
|
||||
libnotify = CDLL('libnotify.so')
|
||||
except OSError:
|
||||
libnotify = CDLL('libnotify.so.4')
|
||||
gobj = CDLL('libgobject-2.0.so')
|
||||
|
||||
libnotify.notify_init('pynotify')
|
||||
libnotify_inited = True
|
||||
|
||||
libnotify.notify_notification_new.restype = c_void_p
|
||||
notify_st.notify = c_void_p(libnotify.notify_notification_new(
|
||||
c_void_p(), c_void_p(), c_void_p(),
|
||||
))
|
||||
atexit.register(uninit)
|
||||
|
||||
def uninit():
|
||||
global libnotify_inited
|
||||
try:
|
||||
if libnotify_inited:
|
||||
gobj.g_object_unref(notify_st.notify)
|
||||
libnotify.notify_uninit()
|
||||
libnotify_inited = False
|
||||
except AttributeError:
|
||||
# libnotify.so 已被卸载
|
||||
pass
|
||||
|
||||
if __name__ == '__main__':
|
||||
from time import sleep
|
||||
notify = __import__('__main__')
|
||||
notify.set('This is a test', '测试一下。')
|
||||
notify.show()
|
||||
sleep(1)
|
||||
notify.update(body='再测试一下。')
|
629
nvchecker/lib/packaging_version.py
Normal file
629
nvchecker/lib/packaging_version.py
Normal file
|
@ -0,0 +1,629 @@
|
|||
# This file comes from python-packaging 20.9 and is modified
|
||||
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import collections
|
||||
import itertools
|
||||
import re
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
class InfinityType(object):
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
return "Infinity"
|
||||
|
||||
def __hash__(self):
|
||||
# type: () -> int
|
||||
return hash(repr(self))
|
||||
|
||||
def __lt__(self, other):
|
||||
# type: (object) -> bool
|
||||
return False
|
||||
|
||||
def __le__(self, other):
|
||||
# type: (object) -> bool
|
||||
return False
|
||||
|
||||
def __eq__(self, other):
|
||||
# type: (object) -> bool
|
||||
return isinstance(other, self.__class__)
|
||||
|
||||
def __ne__(self, other):
|
||||
# type: (object) -> bool
|
||||
return not isinstance(other, self.__class__)
|
||||
|
||||
def __gt__(self, other):
|
||||
# type: (object) -> bool
|
||||
return True
|
||||
|
||||
def __ge__(self, other):
|
||||
# type: (object) -> bool
|
||||
return True
|
||||
|
||||
def __neg__(self):
|
||||
# type: (object) -> NegativeInfinityType
|
||||
return NegativeInfinity
|
||||
|
||||
|
||||
Infinity = InfinityType()
|
||||
|
||||
|
||||
class NegativeInfinityType(object):
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
return "-Infinity"
|
||||
|
||||
def __hash__(self):
|
||||
# type: () -> int
|
||||
return hash(repr(self))
|
||||
|
||||
def __lt__(self, other):
|
||||
# type: (object) -> bool
|
||||
return True
|
||||
|
||||
def __le__(self, other):
|
||||
# type: (object) -> bool
|
||||
return True
|
||||
|
||||
def __eq__(self, other):
|
||||
# type: (object) -> bool
|
||||
return isinstance(other, self.__class__)
|
||||
|
||||
def __ne__(self, other):
|
||||
# type: (object) -> bool
|
||||
return not isinstance(other, self.__class__)
|
||||
|
||||
def __gt__(self, other):
|
||||
# type: (object) -> bool
|
||||
return False
|
||||
|
||||
def __ge__(self, other):
|
||||
# type: (object) -> bool
|
||||
return False
|
||||
|
||||
def __neg__(self):
|
||||
# type: (object) -> InfinityType
|
||||
return Infinity
|
||||
|
||||
|
||||
NegativeInfinity = NegativeInfinityType()
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union
|
||||
|
||||
InfiniteTypes = Union[InfinityType, NegativeInfinityType]
|
||||
PrePostDevType = Union[InfiniteTypes, Tuple[str, int]]
|
||||
SubLocalType = Union[InfiniteTypes, int, str]
|
||||
LocalType = Union[
|
||||
NegativeInfinityType,
|
||||
Tuple[
|
||||
Union[
|
||||
SubLocalType,
|
||||
Tuple[SubLocalType, str],
|
||||
Tuple[NegativeInfinityType, SubLocalType],
|
||||
],
|
||||
...,
|
||||
],
|
||||
]
|
||||
CmpKey = Tuple[
|
||||
int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType
|
||||
]
|
||||
LegacyCmpKey = Tuple[int, Tuple[str, ...]]
|
||||
VersionComparisonMethod = Callable[
|
||||
[Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool
|
||||
]
|
||||
|
||||
__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"]
|
||||
|
||||
|
||||
_Version = collections.namedtuple(
|
||||
"_Version", ["epoch", "release", "dev", "pre", "post", "local"]
|
||||
)
|
||||
|
||||
|
||||
def parse(version):
|
||||
# type: (str) -> Union[LegacyVersion, Version]
|
||||
"""
|
||||
Parse the given version string and return either a :class:`Version` object
|
||||
or a :class:`LegacyVersion` object depending on if the given version is
|
||||
a valid PEP 440 version or a legacy version.
|
||||
"""
|
||||
try:
|
||||
return Version(version)
|
||||
except InvalidVersion:
|
||||
return LegacyVersion(version)
|
||||
|
||||
|
||||
class InvalidVersion(ValueError):
|
||||
"""
|
||||
An invalid version was found, users should refer to PEP 440.
|
||||
"""
|
||||
|
||||
|
||||
class _BaseVersion(object):
|
||||
_key = None # type: Union[CmpKey, LegacyCmpKey]
|
||||
|
||||
def __hash__(self):
|
||||
# type: () -> int
|
||||
return hash(self._key)
|
||||
|
||||
# Please keep the duplicated `isinstance` check
|
||||
# in the six comparisons hereunder
|
||||
# unless you find a way to avoid adding overhead function calls.
|
||||
def __lt__(self, other):
|
||||
# type: (_BaseVersion) -> bool
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key < other._key
|
||||
|
||||
def __le__(self, other):
|
||||
# type: (_BaseVersion) -> bool
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key <= other._key
|
||||
|
||||
def __eq__(self, other):
|
||||
# type: (object) -> bool
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key == other._key
|
||||
|
||||
def __ge__(self, other):
|
||||
# type: (_BaseVersion) -> bool
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key >= other._key
|
||||
|
||||
def __gt__(self, other):
|
||||
# type: (_BaseVersion) -> bool
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key > other._key
|
||||
|
||||
def __ne__(self, other):
|
||||
# type: (object) -> bool
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key != other._key
|
||||
|
||||
|
||||
class LegacyVersion(_BaseVersion):
|
||||
def __init__(self, version):
|
||||
# type: (str) -> None
|
||||
self._version = str(version)
|
||||
self._key = _legacy_cmpkey(self._version)
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
return self._version
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
return "<LegacyVersion({0})>".format(repr(str(self)))
|
||||
|
||||
@property
|
||||
def public(self):
|
||||
# type: () -> str
|
||||
return self._version
|
||||
|
||||
@property
|
||||
def base_version(self):
|
||||
# type: () -> str
|
||||
return self._version
|
||||
|
||||
@property
|
||||
def epoch(self):
|
||||
# type: () -> int
|
||||
return -1
|
||||
|
||||
@property
|
||||
def release(self):
|
||||
# type: () -> None
|
||||
return None
|
||||
|
||||
@property
|
||||
def pre(self):
|
||||
# type: () -> None
|
||||
return None
|
||||
|
||||
@property
|
||||
def post(self):
|
||||
# type: () -> None
|
||||
return None
|
||||
|
||||
@property
|
||||
def dev(self):
|
||||
# type: () -> None
|
||||
return None
|
||||
|
||||
@property
|
||||
def local(self):
|
||||
# type: () -> None
|
||||
return None
|
||||
|
||||
@property
|
||||
def is_prerelease(self):
|
||||
# type: () -> bool
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_postrelease(self):
|
||||
# type: () -> bool
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_devrelease(self):
|
||||
# type: () -> bool
|
||||
return False
|
||||
|
||||
|
||||
_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE)
|
||||
|
||||
_legacy_version_replacement_map = {
|
||||
"pre": "c",
|
||||
"preview": "c",
|
||||
"-": "final-",
|
||||
"rc": "c",
|
||||
"dev": "@",
|
||||
}
|
||||
|
||||
|
||||
def _parse_version_parts(s):
|
||||
# type: (str) -> Iterator[str]
|
||||
for part in _legacy_version_component_re.split(s):
|
||||
part = _legacy_version_replacement_map.get(part, part)
|
||||
|
||||
if not part or part == ".":
|
||||
continue
|
||||
|
||||
if part[:1] in "0123456789":
|
||||
# pad for numeric comparison
|
||||
yield part.zfill(8)
|
||||
else:
|
||||
yield "*" + part
|
||||
|
||||
# ensure that alpha/beta/candidate are before final
|
||||
yield "*final"
|
||||
|
||||
|
||||
def _legacy_cmpkey(version):
|
||||
# type: (str) -> LegacyCmpKey
|
||||
|
||||
# We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
|
||||
# greater than or equal to 0. This will effectively put the LegacyVersion,
|
||||
# which uses the defacto standard originally implemented by setuptools,
|
||||
# as before all PEP 440 versions.
|
||||
epoch = -1
|
||||
|
||||
# This scheme is taken from pkg_resources.parse_version setuptools prior to
|
||||
# it's adoption of the packaging library.
|
||||
parts = [] # type: List[str]
|
||||
for part in _parse_version_parts(version.lower()):
|
||||
if part.startswith("*"):
|
||||
# remove "-" before a prerelease tag
|
||||
if part < "*final":
|
||||
while parts and parts[-1] == "*final-":
|
||||
parts.pop()
|
||||
|
||||
# remove trailing zeros from each series of numeric parts
|
||||
while parts and parts[-1] == "00000000":
|
||||
parts.pop()
|
||||
|
||||
parts.append(part)
|
||||
|
||||
return epoch, tuple(parts)
|
||||
|
||||
|
||||
# Deliberately not anchored to the start and end of the string, to make it
|
||||
# easier for 3rd party code to reuse
|
||||
VERSION_PATTERN = r"""
|
||||
v?
|
||||
(?:
|
||||
(?:(?P<epoch>[0-9]+)!)? # epoch
|
||||
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
|
||||
(?P<pre> # pre-release
|
||||
[-_\.]?
|
||||
(?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
|
||||
[-_\.]?
|
||||
(?P<pre_n>[0-9]+)?
|
||||
)?
|
||||
(?P<post> # post release
|
||||
(?:-(?P<post_n1>[0-9]+))
|
||||
|
|
||||
(?:
|
||||
[-_\.]?
|
||||
(?P<post_l>post|rev|r)
|
||||
[-_\.]?
|
||||
(?P<post_n2>[0-9]+)?
|
||||
)
|
||||
)?
|
||||
(?P<dev> # dev release
|
||||
[-_\.]?
|
||||
(?P<dev_l>dev)
|
||||
[-_\.]?
|
||||
(?P<dev_n>[0-9]+)?
|
||||
)?
|
||||
)
|
||||
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
|
||||
"""
|
||||
|
||||
|
||||
class Version(_BaseVersion):
|
||||
|
||||
_regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
||||
|
||||
def __init__(self, version):
|
||||
# type: (str) -> None
|
||||
|
||||
# Validate the version and parse it into pieces
|
||||
match = self._regex.search(version)
|
||||
if not match:
|
||||
raise InvalidVersion("Invalid version: '{0}'".format(version))
|
||||
|
||||
# Store the parsed out pieces of the version
|
||||
self._version = _Version(
|
||||
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
|
||||
release=tuple(int(i) for i in match.group("release").split(".")),
|
||||
pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
|
||||
post=_parse_letter_version(
|
||||
match.group("post_l"), match.group("post_n1") or match.group("post_n2")
|
||||
),
|
||||
dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
|
||||
local=_parse_local_version(match.group("local")),
|
||||
)
|
||||
|
||||
# Generate a key which will be used for sorting
|
||||
self._key = _cmpkey(
|
||||
self._version.epoch,
|
||||
self._version.release,
|
||||
self._version.pre,
|
||||
self._version.post,
|
||||
self._version.dev,
|
||||
self._version.local,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
return "<Version({0})>".format(repr(str(self)))
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
parts = []
|
||||
|
||||
# Epoch
|
||||
if self.epoch != 0:
|
||||
parts.append("{0}!".format(self.epoch))
|
||||
|
||||
# Release segment
|
||||
parts.append(".".join(str(x) for x in self.release))
|
||||
|
||||
# Pre-release
|
||||
if self.pre is not None:
|
||||
parts.append("".join(str(x) for x in self.pre))
|
||||
|
||||
# Post-release
|
||||
if self.post is not None:
|
||||
parts.append(".post{0}".format(self.post))
|
||||
|
||||
# Development release
|
||||
if self.dev is not None:
|
||||
parts.append(".dev{0}".format(self.dev))
|
||||
|
||||
# Local version segment
|
||||
if self.local is not None:
|
||||
parts.append("+{0}".format(self.local))
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
@property
|
||||
def epoch(self):
|
||||
# type: () -> int
|
||||
_epoch = self._version.epoch # type: int
|
||||
return _epoch
|
||||
|
||||
@property
|
||||
def release(self):
|
||||
# type: () -> Tuple[int, ...]
|
||||
_release = self._version.release # type: Tuple[int, ...]
|
||||
return _release
|
||||
|
||||
@property
|
||||
def pre(self):
|
||||
# type: () -> Optional[Tuple[str, int]]
|
||||
_pre = self._version.pre # type: Optional[Tuple[str, int]]
|
||||
return _pre
|
||||
|
||||
@property
|
||||
def post(self):
|
||||
# type: () -> Optional[Tuple[str, int]]
|
||||
return self._version.post[1] if self._version.post else None
|
||||
|
||||
@property
|
||||
def dev(self):
|
||||
# type: () -> Optional[Tuple[str, int]]
|
||||
return self._version.dev[1] if self._version.dev else None
|
||||
|
||||
@property
|
||||
def local(self):
|
||||
# type: () -> Optional[str]
|
||||
if self._version.local:
|
||||
return ".".join(str(x) for x in self._version.local)
|
||||
else:
|
||||
return None
|
||||
|
||||
@property
|
||||
def public(self):
|
||||
# type: () -> str
|
||||
return str(self).split("+", 1)[0]
|
||||
|
||||
@property
|
||||
def base_version(self):
|
||||
# type: () -> str
|
||||
parts = []
|
||||
|
||||
# Epoch
|
||||
if self.epoch != 0:
|
||||
parts.append("{0}!".format(self.epoch))
|
||||
|
||||
# Release segment
|
||||
parts.append(".".join(str(x) for x in self.release))
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
@property
|
||||
def is_prerelease(self):
|
||||
# type: () -> bool
|
||||
return self.dev is not None or self.pre is not None
|
||||
|
||||
@property
|
||||
def is_postrelease(self):
|
||||
# type: () -> bool
|
||||
return self.post is not None
|
||||
|
||||
@property
|
||||
def is_devrelease(self):
|
||||
# type: () -> bool
|
||||
return self.dev is not None
|
||||
|
||||
@property
|
||||
def major(self):
|
||||
# type: () -> int
|
||||
return self.release[0] if len(self.release) >= 1 else 0
|
||||
|
||||
@property
|
||||
def minor(self):
|
||||
# type: () -> int
|
||||
return self.release[1] if len(self.release) >= 2 else 0
|
||||
|
||||
@property
|
||||
def micro(self):
|
||||
# type: () -> int
|
||||
return self.release[2] if len(self.release) >= 3 else 0
|
||||
|
||||
|
||||
def _parse_letter_version(
|
||||
letter, # type: str
|
||||
number, # type: Union[str, bytes, SupportsInt]
|
||||
):
|
||||
# type: (...) -> Optional[Tuple[str, int]]
|
||||
|
||||
if letter:
|
||||
# We consider there to be an implicit 0 in a pre-release if there is
|
||||
# not a numeral associated with it.
|
||||
if number is None:
|
||||
number = 0
|
||||
|
||||
# We normalize any letters to their lower case form
|
||||
letter = letter.lower()
|
||||
|
||||
# We consider some words to be alternate spellings of other words and
|
||||
# in those cases we want to normalize the spellings to our preferred
|
||||
# spelling.
|
||||
if letter == "alpha":
|
||||
letter = "a"
|
||||
elif letter == "beta":
|
||||
letter = "b"
|
||||
elif letter in ["c", "pre", "preview"]:
|
||||
letter = "rc"
|
||||
elif letter in ["rev", "r"]:
|
||||
letter = "post"
|
||||
|
||||
return letter, int(number)
|
||||
if not letter and number:
|
||||
# We assume if we are given a number, but we are not given a letter
|
||||
# then this is using the implicit post release syntax (e.g. 1.0-1)
|
||||
letter = "post"
|
||||
|
||||
return letter, int(number)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
_local_version_separators = re.compile(r"[\._-]")
|
||||
|
||||
|
||||
def _parse_local_version(local):
|
||||
# type: (str) -> Optional[LocalType]
|
||||
"""
|
||||
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
|
||||
"""
|
||||
if local is not None:
|
||||
return tuple(
|
||||
part.lower() if not part.isdigit() else int(part)
|
||||
for part in _local_version_separators.split(local)
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def _cmpkey(
|
||||
epoch, # type: int
|
||||
release, # type: Tuple[int, ...]
|
||||
pre, # type: Optional[Tuple[str, int]]
|
||||
post, # type: Optional[Tuple[str, int]]
|
||||
dev, # type: Optional[Tuple[str, int]]
|
||||
local, # type: Optional[Tuple[SubLocalType]]
|
||||
):
|
||||
# type: (...) -> CmpKey
|
||||
|
||||
# When we compare a release version, we want to compare it with all of the
|
||||
# trailing zeros removed. So we'll use a reverse the list, drop all the now
|
||||
# leading zeros until we come to something non zero, then take the rest
|
||||
# re-reverse it back into the correct order and make it a tuple and use
|
||||
# that for our sorting key.
|
||||
_release = tuple(
|
||||
reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
|
||||
)
|
||||
|
||||
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
|
||||
# We'll do this by abusing the pre segment, but we _only_ want to do this
|
||||
# if there is not a pre or a post segment. If we have one of those then
|
||||
# the normal sorting rules will handle this case correctly.
|
||||
if pre is None and post is None and dev is not None:
|
||||
_pre = NegativeInfinity # type: PrePostDevType
|
||||
# Versions without a pre-release (except as noted above) should sort after
|
||||
# those with one.
|
||||
elif pre is None:
|
||||
_pre = Infinity
|
||||
else:
|
||||
_pre = pre
|
||||
|
||||
# Versions without a post segment should sort before those with one.
|
||||
if post is None:
|
||||
_post = NegativeInfinity # type: PrePostDevType
|
||||
|
||||
else:
|
||||
_post = post
|
||||
|
||||
# Versions without a development segment should sort after those with one.
|
||||
if dev is None:
|
||||
_dev = Infinity # type: PrePostDevType
|
||||
|
||||
else:
|
||||
_dev = dev
|
||||
|
||||
if local is None:
|
||||
# Versions without a local segment should sort before those with one.
|
||||
_local = NegativeInfinity # type: LocalType
|
||||
else:
|
||||
# Versions with a local segment need that segment parsed to implement
|
||||
# the sorting rules in PEP440.
|
||||
# - Alpha numeric segments sort before numeric segments
|
||||
# - Alpha numeric segments sort lexicographically
|
||||
# - Numeric segments sort numerically
|
||||
# - Shorter versions sort before longer versions when the prefixes
|
||||
# match exactly
|
||||
_local = tuple(
|
||||
(i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
|
||||
)
|
||||
|
||||
return epoch, _release, _pre, _post, _dev, _local
|
|
@ -1,45 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import argparse
|
||||
import asyncio
|
||||
|
||||
import structlog
|
||||
|
||||
from .lib import notify
|
||||
from . import core
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
notifications = []
|
||||
args = None
|
||||
|
||||
class Source(core.Source):
|
||||
def on_update(self, name, version, oldver):
|
||||
if args.notify:
|
||||
msg = '%s updated to version %s' % (name, version)
|
||||
notifications.append(msg)
|
||||
notify.update('nvchecker', '\n'.join(notifications))
|
||||
|
||||
def main():
|
||||
global args
|
||||
|
||||
parser = argparse.ArgumentParser(description='New version checker for software')
|
||||
parser.add_argument('-n', '--notify', action='store_true', default=False,
|
||||
help='show desktop notifications when a new version is available')
|
||||
core.add_common_arguments(parser)
|
||||
args = parser.parse_args()
|
||||
if core.process_common_arguments(args):
|
||||
return
|
||||
|
||||
if not args.file:
|
||||
return
|
||||
|
||||
s = Source(args.file)
|
||||
|
||||
ioloop = asyncio.get_event_loop()
|
||||
ioloop.run_until_complete(s.check())
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -1,14 +1,17 @@
|
|||
# vim: se sw=2:
|
||||
# MIT licensed
|
||||
# Copyright (c) 2018 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
# Copyright (c) 2018-2020,2023-2024 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import logging
|
||||
import os
|
||||
import io
|
||||
import traceback
|
||||
import sys
|
||||
|
||||
import structlog
|
||||
|
||||
from .httpclient import TemporaryError
|
||||
|
||||
def _console_msg(event):
|
||||
evt = event['event']
|
||||
if evt == 'up-to-date':
|
||||
|
@ -23,8 +26,11 @@ def _console_msg(event):
|
|||
else:
|
||||
msg = evt
|
||||
|
||||
if 'revision' in event and not event['revision']:
|
||||
del event['revision']
|
||||
|
||||
if 'name' in event:
|
||||
msg = '%s: %s' % (event['name'], msg)
|
||||
msg = f"{event['name']}: {msg}"
|
||||
del event['name']
|
||||
|
||||
event['msg'] = msg
|
||||
|
@ -36,6 +42,33 @@ def exc_info(logger, level, event):
|
|||
event['exc_info'] = True
|
||||
return event
|
||||
|
||||
def filter_nones(logger, level, event):
|
||||
if 'url' in event and event['url'] is None:
|
||||
del event['url']
|
||||
return event
|
||||
|
||||
def filter_taskname(logger, level, event):
|
||||
# added in Python 3.12, not useful to us, but appears as a normal KV.
|
||||
if 'taskName' in event:
|
||||
del event['taskName']
|
||||
return event
|
||||
|
||||
def filter_exc(logger, level, event):
|
||||
exc_info = event.get('exc_info')
|
||||
if not exc_info:
|
||||
return event
|
||||
|
||||
if exc_info is True:
|
||||
exc = sys.exc_info()[1]
|
||||
else:
|
||||
exc = exc_info
|
||||
|
||||
if isinstance(exc, TemporaryError):
|
||||
if exc.code == 599: # network issues
|
||||
del event['exc_info']
|
||||
event['error'] = exc
|
||||
return event
|
||||
|
||||
def stdlib_renderer(logger, level, event):
|
||||
# return event unchanged for further processing
|
||||
std_event = _console_msg(event.copy())
|
||||
|
@ -45,6 +78,8 @@ def stdlib_renderer(logger, level, event):
|
|||
logger = logging.getLogger()
|
||||
msg = std_event.pop('msg', std_event.pop('event'))
|
||||
exc_info = std_event.pop('exc_info', None)
|
||||
if 'error' in std_event:
|
||||
std_event['error'] = repr(std_event['error'])
|
||||
getattr(logger, level)(
|
||||
msg, exc_info = exc_info, extra=std_event,
|
||||
)
|
||||
|
@ -64,7 +99,7 @@ class _Logger(logging.Logger):
|
|||
|
||||
_structlog_dir = os.path.dirname(structlog.__file__)
|
||||
|
||||
def findCaller(self, stack_info=False):
|
||||
def findCaller(self, stack_info=False, stacklevel=1):
|
||||
"""
|
||||
Find the stack frame of the caller so that we can note the source
|
||||
file name, line number and function name.
|
||||
|
@ -74,6 +109,12 @@ class _Logger(logging.Logger):
|
|||
#IronPython isn't run with -X:Frames.
|
||||
if f is not None:
|
||||
f = f.f_back
|
||||
orig_f = f
|
||||
while f and stacklevel > 1:
|
||||
f = f.f_back
|
||||
stacklevel -= 1
|
||||
if not f:
|
||||
f = orig_f
|
||||
rv = "(unknown file)", 0, "(unknown function)", None
|
||||
while hasattr(f, "f_code"):
|
||||
co = f.f_code
|
||||
|
|
|
@ -1,25 +1,34 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
# Copyright (c) 2013-2021 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
'''
|
||||
Sort versions using pkg_resource.parse_version or pyalpm.vercmp
|
||||
Sort versions using deprecated pkg_resource / packaging.parse_version or pyalpm.vercmp
|
||||
'''
|
||||
|
||||
__all__ = ["sort_version_keys"]
|
||||
|
||||
from functools import cmp_to_key
|
||||
from .lib.packaging_version import parse as parse_version
|
||||
|
||||
from pkg_resources import parse_version
|
||||
try:
|
||||
import pyalpm
|
||||
from functools import cmp_to_key
|
||||
vercmp = cmp_to_key(pyalpm.vercmp)
|
||||
vercmp_available = True
|
||||
except ImportError:
|
||||
def vercmp(k):
|
||||
raise NotImplementedError("Using vercmp but pyalpm can not be imported!")
|
||||
vercmp_available = False
|
||||
|
||||
sort_version_keys = {"parse_version": parse_version, "vercmp": vercmp}
|
||||
try:
|
||||
from awesomeversion import AwesomeVersion
|
||||
awesomeversion_available = True
|
||||
except ImportError:
|
||||
def AwesomeVersion(k): # type: ignore
|
||||
raise NotImplementedError("Using awesomeversion but it can not be imported!")
|
||||
awesomeversion_available = False
|
||||
|
||||
if __name__ == '__main__':
|
||||
assert(parse_version("v6.0") < parse_version("6.1"))
|
||||
assert(parse_version("v6.0") > parse_version("v6.1-stable"))
|
||||
assert(vercmp("v6.0") < vercmp("v6.1-stable"))
|
||||
sort_version_keys = {
|
||||
"parse_version": parse_version,
|
||||
"vercmp": vercmp,
|
||||
"awesomeversion": AwesomeVersion,
|
||||
}
|
||||
|
|
|
@ -1,28 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
try:
|
||||
import tornado, pycurl
|
||||
# connection reuse, http/2
|
||||
which = 'tornado'
|
||||
except ImportError:
|
||||
try:
|
||||
import aiohttp
|
||||
which = 'aiohttp'
|
||||
# connection reuse
|
||||
except ImportError:
|
||||
import tornado
|
||||
which = 'tornado'
|
||||
# fallback
|
||||
|
||||
m = __import__('%s_httpclient' % which, globals(), locals(), level=1)
|
||||
__all__ = m.__all__
|
||||
for x in __all__:
|
||||
globals()[x] = getattr(m, x)
|
||||
|
||||
def conf_cacheable_with_name(key):
|
||||
def get_cacheable_conf(name, conf):
|
||||
conf = dict(conf)
|
||||
conf[key] = conf.get(key) or name
|
||||
return conf
|
||||
return get_cacheable_conf
|
|
@ -1,33 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import atexit
|
||||
import asyncio
|
||||
import aiohttp
|
||||
connector = aiohttp.TCPConnector(limit=20)
|
||||
|
||||
__all__ = ['session', 'HTTPError']
|
||||
|
||||
class HTTPError(Exception):
|
||||
def __init__(self, code, message, response):
|
||||
self.code = code
|
||||
self.message = message
|
||||
self.response = response
|
||||
|
||||
class BetterClientSession(aiohttp.ClientSession):
|
||||
async def _request(self, *args, **kwargs):
|
||||
if hasattr(self, "nv_config") and self.nv_config.get("proxy"):
|
||||
kwargs.setdefault("proxy", self.nv_config.get("proxy"))
|
||||
|
||||
res = await super(BetterClientSession, self)._request(
|
||||
*args, **kwargs)
|
||||
if res.status >= 400:
|
||||
raise HTTPError(res.status, res.reason, res)
|
||||
return res
|
||||
|
||||
session = BetterClientSession(connector=connector)
|
||||
|
||||
@atexit.register
|
||||
def cleanup():
|
||||
loop = asyncio.get_event_loop()
|
||||
loop.run_until_complete(session.close())
|
|
@ -1,19 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import structlog
|
||||
|
||||
from . import session
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
URL = 'https://release-monitoring.org/api/project/{pkg}'
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
pkg = conf.get('anitya')
|
||||
url = URL.format(pkg = pkg)
|
||||
|
||||
async with session.get(url) as res:
|
||||
data = await res.json()
|
||||
|
||||
return data['version']
|
|
@ -1,30 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import structlog
|
||||
|
||||
from . import session, conf_cacheable_with_name
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
URL = 'https://www.archlinux.org/packages/search/json/'
|
||||
|
||||
get_cacheable_conf = conf_cacheable_with_name('archpkg')
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
pkg = conf.get('archpkg') or name
|
||||
strip_release = conf.getboolean('strip-release', False)
|
||||
async with session.get(URL, params={"name": pkg}) as res:
|
||||
data = await res.json()
|
||||
|
||||
if not data['results']:
|
||||
logger.error('Arch package not found', name=name)
|
||||
return
|
||||
|
||||
r = [r for r in data['results'] if r['repo'] != 'testing'][0]
|
||||
if strip_release:
|
||||
version = r['pkgver']
|
||||
else:
|
||||
version = r['pkgver'] + '-' + r['pkgrel']
|
||||
|
||||
return version
|
|
@ -1,31 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import structlog
|
||||
from datetime import datetime
|
||||
|
||||
from . import session, conf_cacheable_with_name
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
AUR_URL = 'https://aur.archlinux.org/rpc/?v=5&type=info&arg[]='
|
||||
|
||||
get_cacheable_conf = conf_cacheable_with_name('aur')
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
aurname = conf.get('aur') or name
|
||||
use_last_modified = conf.getboolean('use_last_modified', False)
|
||||
strip_release = conf.getboolean('strip-release', False)
|
||||
async with session.get(AUR_URL, params={"v": 5, "type": "info", "arg[]": aurname}) as res:
|
||||
data = await res.json()
|
||||
|
||||
if not data['results']:
|
||||
logger.error('AUR upstream not found', name=name)
|
||||
return
|
||||
|
||||
version = data['results'][0]['Version']
|
||||
if use_last_modified:
|
||||
version += '-' + datetime.utcfromtimestamp(data['results'][0]['LastModified']).strftime('%Y%m%d%H%M%S')
|
||||
if strip_release and '-' in version:
|
||||
version = version.rsplit('-', 1)[0]
|
||||
return version
|
|
@ -1,31 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from . import session
|
||||
from ..sortversion import sort_version_keys
|
||||
|
||||
# doc: https://confluence.atlassian.com/display/BITBUCKET/commits+or+commit+Resource
|
||||
BITBUCKET_URL = 'https://bitbucket.org/api/2.0/repositories/%s/commits/%s'
|
||||
BITBUCKET_MAX_TAG = 'https://bitbucket.org/api/1.0/repositories/%s/tags'
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
repo = conf.get('bitbucket')
|
||||
br = conf.get('branch', '')
|
||||
use_max_tag = conf.getboolean('use_max_tag', False)
|
||||
ignored_tags = conf.get("ignored_tags", "").split()
|
||||
sort_version_key = sort_version_keys[conf.get("sort_version_key", "parse_version")]
|
||||
if use_max_tag:
|
||||
url = BITBUCKET_MAX_TAG % repo
|
||||
else:
|
||||
url = BITBUCKET_URL % (repo, br)
|
||||
|
||||
async with session.get(url) as res:
|
||||
data = await res.json()
|
||||
|
||||
if use_max_tag:
|
||||
data = [tag for tag in data if tag not in ignored_tags]
|
||||
data.sort(key=sort_version_key)
|
||||
version = data[-1]
|
||||
else:
|
||||
version = data['values'][0]['date'].split('T', 1)[0].replace('-', '')
|
||||
return version
|
|
@ -1,30 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import asyncio
|
||||
|
||||
import structlog
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
cmd = conf['cmd']
|
||||
p = await asyncio.create_subprocess_shell(
|
||||
cmd,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE,
|
||||
)
|
||||
|
||||
output, error = await p.communicate()
|
||||
output = output.strip().decode('latin1')
|
||||
error = error.strip().decode(errors='replace')
|
||||
if p.returncode != 0:
|
||||
logger.error('command exited with error',
|
||||
cmd=cmd, error=error,
|
||||
name=name, returncode=p.returncode)
|
||||
elif not output:
|
||||
logger.error('command exited without output',
|
||||
cmd=cmd, error=error,
|
||||
name=name, returncode=p.returncode)
|
||||
else:
|
||||
return output
|
|
@ -1,16 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from .simple_json import simple_json
|
||||
|
||||
# Using metacpan
|
||||
CPAN_URL = 'https://fastapi.metacpan.org/release/%s'
|
||||
|
||||
def _version_from_json(data):
|
||||
return str(data['version'])
|
||||
|
||||
get_version, get_cacheable_conf = simple_json(
|
||||
CPAN_URL,
|
||||
'cpan',
|
||||
_version_from_json,
|
||||
)
|
|
@ -1,15 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2018 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from . import session, conf_cacheable_with_name
|
||||
|
||||
API_URL = 'https://crates.io/api/v1/crates/%s'
|
||||
|
||||
get_cacheable_conf = conf_cacheable_with_name('cratesio')
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
name = conf.get('cratesio') or name
|
||||
async with session.get(API_URL % name) as res:
|
||||
data = await res.json()
|
||||
version = [v['num'] for v in data['versions'] if not v['yanked']][0]
|
||||
return version
|
|
@ -1,15 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from .simple_json import simple_json
|
||||
|
||||
GEMS_URL = 'https://rubygems.org/api/v1/versions/%s.json'
|
||||
|
||||
def _version_from_json(data):
|
||||
return data[0]['number']
|
||||
|
||||
get_version, get_cacheable_conf = simple_json(
|
||||
GEMS_URL,
|
||||
'gems',
|
||||
_version_from_json,
|
||||
)
|
|
@ -1,138 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2018 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import os
|
||||
import re
|
||||
import time
|
||||
from functools import partial
|
||||
|
||||
import structlog
|
||||
|
||||
from . import session, HTTPError
|
||||
from ..sortversion import sort_version_keys
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
GITHUB_URL = 'https://api.github.com/repos/%s/commits'
|
||||
GITHUB_LATEST_RELEASE = 'https://api.github.com/repos/%s/releases/latest'
|
||||
GITHUB_MAX_TAG = 'https://api.github.com/repos/%s/tags'
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
try:
|
||||
return await get_version_real(name, conf, **kwargs)
|
||||
except HTTPError as e:
|
||||
check_ratelimit(e, name)
|
||||
|
||||
async def get_version_real(name, conf, **kwargs):
|
||||
repo = conf.get('github')
|
||||
br = conf.get('branch')
|
||||
use_latest_release = conf.getboolean('use_latest_release', False)
|
||||
use_max_tag = conf.getboolean('use_max_tag', False)
|
||||
include_tags_pattern = conf.get("include_tags_pattern", "")
|
||||
ignored_tags = conf.get("ignored_tags", "").split()
|
||||
sort_version_key = sort_version_keys[conf.get("sort_version_key", "parse_version")]
|
||||
if use_latest_release:
|
||||
url = GITHUB_LATEST_RELEASE % repo
|
||||
elif use_max_tag:
|
||||
url = GITHUB_MAX_TAG % repo
|
||||
else:
|
||||
url = GITHUB_URL % repo
|
||||
if br:
|
||||
url += '?sha=' + br
|
||||
headers = {
|
||||
'Accept': 'application/vnd.github.quicksilver-preview+json',
|
||||
'User-Agent': 'lilydjwg/nvchecker',
|
||||
}
|
||||
if 'NVCHECKER_GITHUB_TOKEN' in os.environ:
|
||||
headers['Authorization'] = 'token %s' % os.environ['NVCHECKER_GITHUB_TOKEN']
|
||||
else:
|
||||
key = kwargs['keyman'].get_key('github')
|
||||
if key:
|
||||
headers['Authorization'] = 'token %s' % key
|
||||
|
||||
kwargs = {}
|
||||
if conf.get('proxy'):
|
||||
kwargs["proxy"] = conf.get("proxy")
|
||||
|
||||
if use_max_tag:
|
||||
return await max_tag(partial(
|
||||
session.get, headers=headers, **kwargs),
|
||||
url, name, ignored_tags, include_tags_pattern,
|
||||
sort_version_key,
|
||||
)
|
||||
|
||||
async with session.get(url, headers=headers, **kwargs) as res:
|
||||
logger.debug('X-RateLimit-Remaining',
|
||||
n=res.headers.get('X-RateLimit-Remaining'))
|
||||
data = await res.json()
|
||||
|
||||
if use_latest_release:
|
||||
if 'tag_name' not in data:
|
||||
logger.error('No tag found in upstream repository.',
|
||||
name=name)
|
||||
return
|
||||
version = data['tag_name']
|
||||
|
||||
else:
|
||||
# YYYYMMDD.HHMMSS
|
||||
version = data[0]['commit']['committer']['date'] \
|
||||
.rstrip('Z').replace('-', '').replace(':', '').replace('T', '.')
|
||||
|
||||
return version
|
||||
|
||||
async def max_tag(
|
||||
getter, url, name,
|
||||
ignored_tags, include_tags_pattern, sort_version_key,
|
||||
):
|
||||
# paging is needed
|
||||
|
||||
while True:
|
||||
async with getter(url) as res:
|
||||
logger.debug('X-RateLimit-Remaining',
|
||||
n=res.headers.get('X-RateLimit-Remaining'))
|
||||
links = res.headers.get('Link')
|
||||
data = await res.json()
|
||||
|
||||
data = [tag["name"] for tag in data if tag["name"] not in ignored_tags]
|
||||
if include_tags_pattern:
|
||||
data = [x for x in data
|
||||
if re.search(include_tags_pattern, x)]
|
||||
if data:
|
||||
data.sort(key=sort_version_key)
|
||||
return data[-1]
|
||||
else:
|
||||
next_page_url = get_next_page_url(links)
|
||||
if not next_page_url:
|
||||
break
|
||||
else:
|
||||
url = next_page_url
|
||||
|
||||
logger.error('No tag found in upstream repository.',
|
||||
name=name,
|
||||
include_tags_pattern=include_tags_pattern)
|
||||
return
|
||||
|
||||
def get_next_page_url(links):
|
||||
links = links.split(', ')
|
||||
next_link = [x for x in links if x.endswith('rel="next"')]
|
||||
if not next_link:
|
||||
return
|
||||
|
||||
return next_link[0].split('>', 1)[0][1:]
|
||||
|
||||
def check_ratelimit(exc, name):
|
||||
res = exc.response
|
||||
if not res:
|
||||
raise
|
||||
|
||||
# default -1 is used to re-raise the exception
|
||||
n = int(res.headers.get('X-RateLimit-Remaining', -1))
|
||||
if n == 0:
|
||||
reset = int(res.headers.get('X-RateLimit-Reset'))
|
||||
logger.error('rate limited, resetting at %s. '
|
||||
'Or get an API token to increase the allowance if not yet'
|
||||
% time.ctime(reset),
|
||||
name = name,
|
||||
reset = reset)
|
||||
else:
|
||||
raise
|
|
@ -1,74 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2018 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import os
|
||||
import urllib.parse
|
||||
|
||||
import structlog
|
||||
|
||||
from . import session, HTTPError
|
||||
from ..sortversion import sort_version_keys
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
GITLAB_URL = 'https://%s/api/v4/projects/%s/repository/commits?ref_name=%s'
|
||||
GITLAB_MAX_TAG = 'https://%s/api/v4/projects/%s/repository/tags'
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
try:
|
||||
return await get_version_real(name, conf, **kwargs)
|
||||
except HTTPError as e:
|
||||
check_ratelimit(e, name)
|
||||
|
||||
async def get_version_real(name, conf, **kwargs):
|
||||
repo = urllib.parse.quote_plus(conf.get('gitlab'))
|
||||
br = conf.get('branch', 'master')
|
||||
host = conf.get('host', "gitlab.com")
|
||||
use_max_tag = conf.getboolean('use_max_tag', False)
|
||||
ignored_tags = conf.get("ignored_tags", "").split()
|
||||
sort_version_key = sort_version_keys[conf.get("sort_version_key", "parse_version")]
|
||||
|
||||
if use_max_tag:
|
||||
url = GITLAB_MAX_TAG % (host, repo)
|
||||
else:
|
||||
url = GITLAB_URL % (host, repo, br)
|
||||
|
||||
# Load token from config
|
||||
token = conf.get('token')
|
||||
# Load token from environ
|
||||
if token is None:
|
||||
env_name = "NVCHECKER_GITLAB_TOKEN_" + host.upper().replace(".", "_").replace("/", "_")
|
||||
token = os.environ.get(env_name)
|
||||
# Load token from keyman
|
||||
if token is None and 'keyman' in kwargs:
|
||||
key_name = 'gitlab_' + host.lower().replace('.', '_').replace("/", "_")
|
||||
token = kwargs['keyman'].get_key(key_name)
|
||||
|
||||
# Set private token if token exists.
|
||||
headers = {}
|
||||
if token:
|
||||
headers["PRIVATE-TOKEN"] = token
|
||||
|
||||
async with session.get(url, headers=headers) as res:
|
||||
data = await res.json()
|
||||
if use_max_tag:
|
||||
data = [tag["name"] for tag in data if tag["name"] not in ignored_tags]
|
||||
data.sort(key=sort_version_key)
|
||||
version = data[-1]
|
||||
else:
|
||||
version = data[0]['created_at'].split('T', 1)[0].replace('-', '')
|
||||
return version
|
||||
|
||||
def check_ratelimit(exc, name):
|
||||
res = exc.response
|
||||
if not res:
|
||||
raise
|
||||
|
||||
# default -1 is used to re-raise the exception
|
||||
n = int(res.headers.get('RateLimit-Remaining', -1))
|
||||
if n == 0:
|
||||
logger.error('rate limited, resetting at (unknown). '
|
||||
'Or get an API token to increase the allowance if not yet',
|
||||
name = name)
|
||||
else:
|
||||
raise
|
|
@ -1,15 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from .simple_json import simple_json
|
||||
|
||||
HACKAGE_URL = 'https://hackage.haskell.org/package/%s/preferred.json'
|
||||
|
||||
def _version_from_json(data):
|
||||
return data['normal-version'][0]
|
||||
|
||||
get_version, get_cacheable_conf = simple_json(
|
||||
HACKAGE_URL,
|
||||
'hackage',
|
||||
_version_from_json,
|
||||
)
|
|
@ -1,5 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
return conf.get('manual').strip() or None
|
|
@ -1,15 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from .simple_json import simple_json
|
||||
|
||||
NPM_URL = 'https://registry.npmjs.org/%s'
|
||||
|
||||
def _version_from_json(data):
|
||||
return data['dist-tags']['latest']
|
||||
|
||||
get_version, get_cacheable_conf = simple_json(
|
||||
NPM_URL,
|
||||
'npm',
|
||||
_version_from_json,
|
||||
)
|
|
@ -1,18 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from .simple_json import simple_json
|
||||
|
||||
PACKAGIST_URL = 'https://packagist.org/packages/%s.json'
|
||||
|
||||
def _version_from_json(data):
|
||||
data = {version: details for version, details in data["package"]['versions'].items() if version != "dev-master"}
|
||||
|
||||
if len(data):
|
||||
return max(data, key=lambda version: data[version]["time"])
|
||||
|
||||
get_version, get_cacheable_conf = simple_json(
|
||||
PACKAGIST_URL,
|
||||
'packagist',
|
||||
_version_from_json,
|
||||
)
|
|
@ -1,18 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from . import cmd, conf_cacheable_with_name
|
||||
|
||||
get_cacheable_conf = conf_cacheable_with_name('debianpkg')
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
referree = conf.get('pacman') or name
|
||||
c = "LANG=C pacman -Si %s | grep -F Version | awk '{print $3}'" % referree
|
||||
conf['cmd'] = c
|
||||
strip_release = conf.getboolean('strip-release', False)
|
||||
|
||||
version = await cmd.get_version(name, conf)
|
||||
|
||||
if strip_release and '-' in version:
|
||||
version = version.rsplit('-', 1)[0]
|
||||
return version
|
|
@ -1,15 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from .simple_json import simple_json
|
||||
|
||||
PYPI_URL = 'https://pypi.org/pypi/%s/json'
|
||||
|
||||
def _version_from_json(data):
|
||||
return data['info']['version']
|
||||
|
||||
get_version, get_cacheable_conf = simple_json(
|
||||
PYPI_URL,
|
||||
'pypi',
|
||||
_version_from_json,
|
||||
)
|
|
@ -1,39 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import re
|
||||
import sre_constants
|
||||
|
||||
import structlog
|
||||
|
||||
from . import session
|
||||
from ..sortversion import sort_version_keys
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
try:
|
||||
regex = re.compile(conf['regex'])
|
||||
except sre_constants.error:
|
||||
logger.warn('bad regex, skipped.', name=name, exc_info=True)
|
||||
return
|
||||
|
||||
encoding = conf.get('encoding', 'latin1')
|
||||
|
||||
kwargs = {}
|
||||
headers = {}
|
||||
if conf.get('proxy'):
|
||||
kwargs["proxy"] = conf.get("proxy")
|
||||
if conf.get('user_agent'):
|
||||
headers['User-Agent'] = conf['user_agent']
|
||||
sort_version_key = sort_version_keys[conf.get("sort_version_key", "parse_version")]
|
||||
|
||||
async with session.get(conf['url'], headers=headers, **kwargs) as res:
|
||||
body = (await res.read()).decode(encoding)
|
||||
try:
|
||||
version = max(regex.findall(body), key=sort_version_key)
|
||||
except ValueError:
|
||||
version = None
|
||||
if not conf.getboolean('missing_ok', False):
|
||||
logger.error('version string not found.', name=name)
|
||||
return version
|
|
@ -1,22 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from . import session, conf_cacheable_with_name
|
||||
|
||||
def simple_json(urlpat, confkey, version_from_json):
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
repo = conf.get(confkey) or name
|
||||
url = urlpat % repo
|
||||
kwargs = {}
|
||||
if conf.get('proxy'):
|
||||
kwargs["proxy"] = conf.get('proxy')
|
||||
|
||||
async with session.get(url, **kwargs) as res:
|
||||
data = await res.json(content_type=None)
|
||||
version = version_from_json(data)
|
||||
return version
|
||||
|
||||
get_cacheable_conf = conf_cacheable_with_name(confkey)
|
||||
|
||||
return get_version, get_cacheable_conf
|
|
@ -1,75 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import json
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from tornado.httpclient import AsyncHTTPClient, HTTPRequest, HTTPResponse
|
||||
from tornado.httpclient import HTTPError
|
||||
from tornado.platform.asyncio import AsyncIOMainLoop, to_asyncio_future
|
||||
AsyncIOMainLoop().install()
|
||||
|
||||
try:
|
||||
import pycurl
|
||||
AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient", max_clients=20)
|
||||
except ImportError:
|
||||
pycurl = None
|
||||
|
||||
__all__ = ['session', 'HTTPError']
|
||||
|
||||
client = AsyncHTTPClient()
|
||||
HTTP2_AVAILABLE = None if pycurl else False
|
||||
|
||||
def try_use_http2(curl):
|
||||
global HTTP2_AVAILABLE
|
||||
if HTTP2_AVAILABLE is None:
|
||||
try:
|
||||
curl.setopt(pycurl.HTTP_VERSION, 4)
|
||||
HTTP2_AVAILABLE = True
|
||||
except pycurl.error:
|
||||
HTTP2_AVAILABLE = False
|
||||
elif HTTP2_AVAILABLE:
|
||||
curl.setopt(pycurl.HTTP_VERSION, 4)
|
||||
|
||||
class Session:
|
||||
def get(self, url, **kwargs):
|
||||
kwargs['prepare_curl_callback'] = try_use_http2
|
||||
|
||||
proxy = kwargs.get('proxy')
|
||||
if proxy:
|
||||
del kwargs['proxy']
|
||||
elif hasattr(self, 'nv_config') and self.nv_config.get('proxy'):
|
||||
proxy = self.nv_config.get('proxy')
|
||||
if proxy:
|
||||
host, port = proxy.rsplit(':', 1)
|
||||
kwargs['proxy_host'] = host
|
||||
kwargs['proxy_port'] = int(port)
|
||||
|
||||
params = kwargs.get('params')
|
||||
if params:
|
||||
del kwargs['params']
|
||||
q = urlencode(params)
|
||||
url += '?' + q
|
||||
|
||||
r = HTTPRequest(url, **kwargs)
|
||||
return ResponseManager(r)
|
||||
|
||||
class ResponseManager:
|
||||
def __init__(self, req):
|
||||
self.req = req
|
||||
|
||||
async def __aenter__(self):
|
||||
return await to_asyncio_future(client.fetch(self.req))
|
||||
|
||||
async def __aexit__(self, exc_type, exc, tb):
|
||||
pass
|
||||
|
||||
async def json_response(self, **kwargs):
|
||||
return json.loads(self.body.decode('utf-8'))
|
||||
|
||||
async def read(self):
|
||||
return self.body
|
||||
|
||||
HTTPResponse.json = json_response
|
||||
HTTPResponse.read = read
|
||||
session = Session()
|
|
@ -1,66 +0,0 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2018 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import asyncio
|
||||
import os.path as _path
|
||||
|
||||
from pkg_resources import parse_version
|
||||
import structlog
|
||||
|
||||
from . import conf_cacheable_with_name
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
_self_path = _path.dirname(_path.abspath(__file__))
|
||||
_cmd_prefix = ['/bin/bash', _path.join(_self_path, 'vcs.sh')]
|
||||
|
||||
PROT_VER = 1
|
||||
|
||||
get_cacheable_conf = conf_cacheable_with_name('vcs')
|
||||
|
||||
def _parse_oldver(oldver):
|
||||
if oldver is None:
|
||||
return PROT_VER, 0, ''
|
||||
try:
|
||||
prot_ver, count, ver = oldver.split('.', maxsplit=2)
|
||||
prot_ver = int(prot_ver)
|
||||
count = int(count)
|
||||
except:
|
||||
return PROT_VER, 0, ''
|
||||
if prot_ver != PROT_VER:
|
||||
return PROT_VER, 0, ver
|
||||
return PROT_VER, count, ver
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
vcs = conf['vcs'] or ''
|
||||
use_max_tag = conf.getboolean('use_max_tag', False)
|
||||
ignored_tags = conf.get("ignored_tags", "").split()
|
||||
oldver = conf.get('oldver')
|
||||
cmd = _cmd_prefix + [name, vcs]
|
||||
if use_max_tag:
|
||||
cmd += ["get_tags"]
|
||||
p = await asyncio.create_subprocess_exec(
|
||||
*cmd,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE,
|
||||
)
|
||||
|
||||
output, error = await p.communicate()
|
||||
output = output.strip().decode('latin1')
|
||||
error = error.strip().decode('latin1')
|
||||
|
||||
if p.returncode != 0:
|
||||
logger.error('command exited with error', output=output,
|
||||
name=name, returncode=p.returncode, error=error)
|
||||
return
|
||||
else:
|
||||
if use_max_tag:
|
||||
data = [tag for tag in output.split("\n") if tag not in ignored_tags]
|
||||
data.sort(key=parse_version)
|
||||
version = data[-1]
|
||||
return version
|
||||
else:
|
||||
oldvers = _parse_oldver(oldver)
|
||||
if output == oldvers[2]:
|
||||
return oldver
|
||||
else:
|
||||
return "%d.%d.%s" % (oldvers[0], oldvers[1] + 1, output)
|
|
@ -1,115 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
exec 3>&1
|
||||
exec >&2
|
||||
|
||||
dir=$1
|
||||
vcs=$2
|
||||
get_tags=$3
|
||||
|
||||
parse_vcs_url() {
|
||||
local _url=$1
|
||||
local _out_var=$2
|
||||
# remove folder::
|
||||
[[ $_url =~ ^[^/:]*::(.*)$ ]] && _url=${BASH_REMATCH[1]}
|
||||
[[ $_url =~ ^(bzr|git|hg|svn)([+:])(.*) ]] || return 1
|
||||
local _proto=${BASH_REMATCH[1]}
|
||||
[[ ${BASH_REMATCH[2]} = + ]] && _url=${BASH_REMATCH[3]}
|
||||
local _real_url=${_url%\#*}
|
||||
local _frag=''
|
||||
[[ $_real_url = $_url ]] || _frag=${_url##*\#}
|
||||
eval "${_out_var}"'=("${_proto}" "${_real_url}" "${_frag}")'
|
||||
}
|
||||
|
||||
get_vcs() {
|
||||
local _vcs=$1
|
||||
local _out_var=$2
|
||||
if [[ -z $_vcs ]]; then
|
||||
_vcs=$(. "${dir}"/PKGBUILD &> /dev/null
|
||||
for src in "${source[@]}"; do
|
||||
parse_vcs_url "$src" _ && {
|
||||
echo "$src"
|
||||
exit 0
|
||||
}
|
||||
done
|
||||
exit 1) || return 1
|
||||
fi
|
||||
parse_vcs_url "$_vcs" "$_out_var"
|
||||
}
|
||||
|
||||
git_get_version() {
|
||||
local _url=$1
|
||||
local _frag=$2
|
||||
local _ref=''
|
||||
if [[ -z $_frag ]]; then
|
||||
_ref=HEAD
|
||||
elif [[ $_frag =~ ^commit=(.*)$ ]]; then
|
||||
echo "${BASH_REMATCH[1]}"
|
||||
return 0
|
||||
elif [[ $_frag =~ ^branch=(.*)$ ]]; then
|
||||
_ref=refs/heads/${BASH_REMATCH[1]}
|
||||
elif [[ $_frag =~ ^tag=(.*)$ ]]; then
|
||||
_ref=refs/tags/${BASH_REMATCH[1]}
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
local _res=$(git ls-remote "$_url" "$_ref")
|
||||
[[ $_res =~ ^([a-fA-F0-9]*)[[:blank:]] ]] || return 1
|
||||
echo "${BASH_REMATCH[1]}"
|
||||
}
|
||||
|
||||
hg_get_version() {
|
||||
local _url=$1
|
||||
local _frag=$2
|
||||
local _ref
|
||||
if [[ -z $_frag ]]; then
|
||||
_ref=default
|
||||
elif [[ $_frag =~ ^(revision|tag|branch)=(.*)$ ]]; then
|
||||
_ref=${BASH_REMATCH[2]}
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
hg identify "${_url}#${_ref}"
|
||||
}
|
||||
|
||||
svn_get_version() {
|
||||
local _url=$1
|
||||
local _frag=$2
|
||||
local _extra_arg=()
|
||||
if [[ -z $_frag ]]; then
|
||||
true
|
||||
elif [[ $_frag =~ ^(revision)=(.*)$ ]]; then
|
||||
_extra_arg=(-r "${BASH_REMATCH[2]}")
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
# Get rid of locale
|
||||
env -i PATH="${PATH}" svn info "${_extra_arg[@]}" "${_url}" | \
|
||||
sed -n 's/^Revision:[[:blank:]]*\([0-9]*\)/\1/p'
|
||||
}
|
||||
|
||||
bzr_get_version() {
|
||||
local _url=$1
|
||||
local _frag=$2
|
||||
local _extra_arg=()
|
||||
if [[ -z $_frag ]]; then
|
||||
true
|
||||
elif [[ $_frag =~ ^(revision)=(.*)$ ]]; then
|
||||
_extra_arg=(-r "${BASH_REMATCH[2]}")
|
||||
else
|
||||
return 1
|
||||
fi
|
||||
bzr revno -q "${_extra_arg[@]}" "${_url}"
|
||||
}
|
||||
|
||||
git_get_tags() {
|
||||
local _url=$1
|
||||
git ls-remote "$_url" | grep -oP '(?<=refs/tags/)[^^]*$'
|
||||
}
|
||||
|
||||
get_vcs "${vcs}" components || exit 1
|
||||
if [[ "x$get_tags" == "xget_tags" ]]; then
|
||||
eval "${components[0]}_get_tags"' ${components[@]:1}' >&3
|
||||
else
|
||||
eval "${components[0]}_get_version"' ${components[@]:1}' >&3
|
||||
fi
|
|
@ -1,17 +1,20 @@
|
|||
# vim: se sw=2:
|
||||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
# Copyright (c) 2013-2024 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import sys
|
||||
import os
|
||||
import argparse
|
||||
import shutil
|
||||
import structlog
|
||||
import json
|
||||
import os.path
|
||||
|
||||
from . import core
|
||||
from .util import RichResult
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
def take():
|
||||
def take() -> None:
|
||||
parser = argparse.ArgumentParser(description='update version records of nvchecker')
|
||||
core.add_common_arguments(parser)
|
||||
parser.add_argument('--all', action='store_true',
|
||||
|
@ -19,54 +22,178 @@ def take():
|
|||
parser.add_argument('--ignore-nonexistent', action='store_true',
|
||||
help='ignore nonexistent names')
|
||||
parser.add_argument('names', metavar='NAME', nargs='*',
|
||||
help='software name to be updated')
|
||||
help='software name to be updated. use NAME=VERSION to update '
|
||||
'to a specific version instead of the new version.')
|
||||
args = parser.parse_args()
|
||||
if core.process_common_arguments(args):
|
||||
return
|
||||
|
||||
s = core.Source(args.file)
|
||||
if not s.oldver or not s.newver:
|
||||
opt = core.load_file(args.file, use_keymanager=False)[1]
|
||||
if opt.ver_files is None:
|
||||
logger.critical(
|
||||
"doesn't have both 'oldver' and 'newver' set.", source=s,
|
||||
"doesn't have 'oldver' and 'newver' set.",
|
||||
source=args.file,
|
||||
)
|
||||
sys.exit(2)
|
||||
else:
|
||||
oldverf = opt.ver_files[0]
|
||||
newverf = opt.ver_files[1]
|
||||
|
||||
oldvers = core.read_verfile(s.oldver)
|
||||
newvers = core.read_verfile(s.newver)
|
||||
oldvers = core.read_verfile(oldverf)
|
||||
newvers = core.read_verfile(newverf)
|
||||
|
||||
if args.all:
|
||||
oldvers.update(newvers)
|
||||
else:
|
||||
name: str
|
||||
for name in args.names:
|
||||
try:
|
||||
oldvers[name] = newvers[name]
|
||||
except KeyError:
|
||||
if args.ignore_nonexistent:
|
||||
logger.warn('nonexistent in newver, ignored', name=name)
|
||||
continue
|
||||
if "=" in name:
|
||||
name, newver = name.split("=")
|
||||
oldvers[name] = RichResult(version=newver)
|
||||
else:
|
||||
try:
|
||||
oldvers[name] = newvers[name]
|
||||
except KeyError:
|
||||
if args.ignore_nonexistent:
|
||||
logger.warning('nonexistent in newver, ignored', name=name)
|
||||
continue
|
||||
|
||||
logger.critical(
|
||||
"doesn't exist in 'newver' set.", name=name,
|
||||
)
|
||||
sys.exit(2)
|
||||
logger.critical(
|
||||
"doesn't exist in 'newver' set.", name=name,
|
||||
)
|
||||
sys.exit(2)
|
||||
|
||||
try:
|
||||
os.rename(s.oldver, s.oldver + '~')
|
||||
if os.path.islink(oldverf):
|
||||
shutil.copy(oldverf, oldverf.with_name(oldverf.name + '~'))
|
||||
else:
|
||||
oldverf.rename(
|
||||
oldverf.with_name(oldverf.name + '~'),
|
||||
)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
core.write_verfile(s.oldver, oldvers)
|
||||
pass
|
||||
core.write_verfile(oldverf, oldvers)
|
||||
|
||||
def cmp():
|
||||
def cmp() -> None:
|
||||
parser = argparse.ArgumentParser(description='compare version records of nvchecker')
|
||||
core.add_common_arguments(parser)
|
||||
parser.add_argument('-j', '--json', action='store_true',
|
||||
help='Output JSON array of dictionaries with {name, newver, oldver, [delta]} '
|
||||
'(or array of names if --quiet)')
|
||||
parser.add_argument('-q', '--quiet', action='store_true',
|
||||
help="Quiet mode, output only the names.")
|
||||
parser.add_argument('-a', '--all', action='store_true',
|
||||
help="Include unchanged versions.")
|
||||
parser.add_argument('-s', '--sort',
|
||||
choices=('parse_version', 'vercmp', 'awesomeversion', 'none'),
|
||||
default='parse_version',
|
||||
help='Version compare method to backwards the arrow '
|
||||
'(default: parse_version)')
|
||||
parser.add_argument('-n', '--newer', action='store_true',
|
||||
help='Shows only the newer ones according to --sort.')
|
||||
parser.add_argument('--exit-status', action='store_true',
|
||||
help="exit with status 4 if there are updates")
|
||||
args = parser.parse_args()
|
||||
if core.process_common_arguments(args):
|
||||
return
|
||||
|
||||
s = core.Source(args.file)
|
||||
oldvers = core.read_verfile(s.oldver) if s.oldver else {}
|
||||
newvers = core.read_verfile(s.newver)
|
||||
for name, newver in sorted(newvers.items()):
|
||||
opt = core.load_file(args.file, use_keymanager=False)[1]
|
||||
if opt.ver_files is None:
|
||||
logger.critical(
|
||||
"doesn't have 'oldver' and 'newver' set.",
|
||||
source=args.file,
|
||||
)
|
||||
sys.exit(2)
|
||||
else:
|
||||
oldverf = opt.ver_files[0]
|
||||
newverf = opt.ver_files[1]
|
||||
|
||||
oldvers = {k: v.version for k, v in core.read_verfile(oldverf).items()}
|
||||
newvers = {k: v.version for k, v in core.read_verfile(newverf).items()}
|
||||
|
||||
differences = []
|
||||
|
||||
for name, newver in sorted(newvers.items()): # accumulate differences
|
||||
oldver = oldvers.get(name, None)
|
||||
if oldver != newver:
|
||||
print('%s %s -> %s' % (name, oldver, newver))
|
||||
|
||||
diff = {
|
||||
'name': name,
|
||||
'oldver': oldver,
|
||||
'newver': newver
|
||||
}
|
||||
|
||||
if oldver is not None and newver is not None:
|
||||
if oldver == newver:
|
||||
diff['delta'] = 'equal'
|
||||
|
||||
elif args.sort == "none":
|
||||
diff['delta'] = 'new' # assume it's a new version if we're not comparing
|
||||
|
||||
else:
|
||||
from .sortversion import sort_version_keys
|
||||
version = sort_version_keys[args.sort]
|
||||
|
||||
if version(oldver) > version(newver): # type: ignore
|
||||
if args.newer:
|
||||
continue # don't store this diff
|
||||
diff['delta'] = 'old'
|
||||
else:
|
||||
diff['delta'] = 'new'
|
||||
|
||||
elif oldver is None:
|
||||
diff['delta'] = 'added'
|
||||
|
||||
elif newver is None:
|
||||
if args.newer:
|
||||
continue # don't store this diff
|
||||
diff['delta'] = 'gone'
|
||||
|
||||
if args.all or diff['delta'] != 'equal':
|
||||
differences.append(diff)
|
||||
|
||||
if args.json:
|
||||
if args.quiet:
|
||||
print(json.dumps([diff['name'] for diff in differences], separators=(',', ':')))
|
||||
else:
|
||||
print(json.dumps(differences, sort_keys=True, separators=(',', ':')))
|
||||
|
||||
elif args.quiet:
|
||||
for diff in differences:
|
||||
print(diff['name'])
|
||||
|
||||
else:
|
||||
from .lib.nicelogger import Colors, support_color
|
||||
c = Colors(support_color(sys.stdout))
|
||||
|
||||
diffstyles = {
|
||||
'new': {
|
||||
'symbol': '->',
|
||||
'oldc': c.red
|
||||
},
|
||||
'old': {
|
||||
'symbol': f'{c.red}<-{c.normal}',
|
||||
'oldc': c.red
|
||||
},
|
||||
'added': {
|
||||
'symbol': '++',
|
||||
'oldc': c.red
|
||||
},
|
||||
'gone': {
|
||||
'symbol': f'{c.red}--{c.normal}',
|
||||
'oldc': c.green
|
||||
},
|
||||
'equal': {
|
||||
'symbol': '==',
|
||||
'oldc': c.green
|
||||
}
|
||||
}
|
||||
|
||||
for diff in differences:
|
||||
style = diffstyles[diff.get('delta', 'equal')] # type: ignore # mypy has issues with this line
|
||||
|
||||
print(f'{diff["name"]} {style["oldc"]}{diff["oldver"]}{c.normal} {style["symbol"]} {c.green}{diff["newver"]}{c.normal}')
|
||||
|
||||
if args.exit_status and any(
|
||||
diff.get('delta') != 'equal' for diff in differences
|
||||
):
|
||||
sys.exit(4)
|
||||
|
|
330
nvchecker/util.py
Normal file
330
nvchecker/util.py
Normal file
|
@ -0,0 +1,330 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import asyncio
|
||||
from asyncio import Queue
|
||||
from typing import (
|
||||
Dict, Optional, List, NamedTuple, Union,
|
||||
Any, Tuple, Callable, Coroutine, Hashable,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
from pathlib import Path
|
||||
import contextvars
|
||||
import abc
|
||||
import netrc
|
||||
from dataclasses import dataclass
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import tomli as tomllib
|
||||
else:
|
||||
try:
|
||||
import tomllib
|
||||
except ModuleNotFoundError:
|
||||
import tomli as tomllib
|
||||
|
||||
import structlog
|
||||
|
||||
from .httpclient import session
|
||||
from .ctxvars import tries as ctx_tries
|
||||
from .ctxvars import proxy as ctx_proxy
|
||||
from .ctxvars import user_agent as ctx_ua
|
||||
from .ctxvars import httptoken as ctx_httpt
|
||||
from .ctxvars import verify_cert as ctx_verify_cert
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
Entry = Dict[str, Any]
|
||||
Entry.__doc__ = '''The configuration `dict` for an entry.'''
|
||||
Entries = Dict[str, Entry]
|
||||
|
||||
if sys.version_info[:2] >= (3, 11):
|
||||
from typing import LiteralString
|
||||
else:
|
||||
LiteralString = str
|
||||
|
||||
if sys.version_info[:2] >= (3, 10):
|
||||
@dataclass(kw_only=True)
|
||||
class RichResult:
|
||||
version: str
|
||||
gitref: Optional[str] = None
|
||||
revision: Optional[str] = None
|
||||
url: Optional[str] = None
|
||||
|
||||
def __str__(self):
|
||||
return self.version
|
||||
else:
|
||||
@dataclass
|
||||
class RichResult:
|
||||
version: str
|
||||
gitref: Optional[str] = None
|
||||
revision: Optional[str] = None
|
||||
url: Optional[str] = None
|
||||
|
||||
def __str__(self):
|
||||
return self.version
|
||||
|
||||
VersionResult = Union[None, str, RichResult, List[Union[str, RichResult]], Exception]
|
||||
VersionResult.__doc__ = '''The result of a `get_version` check.
|
||||
|
||||
* `None` - No version found.
|
||||
* `str` - A single version string is found.
|
||||
* `RichResult` - A version string with additional information.
|
||||
* `List[Union[str, RichResult]]` - Multiple version strings with or without additional information are found. :ref:`list options` will be applied.
|
||||
* `Exception` - An error occurred.
|
||||
'''
|
||||
|
||||
class FileLoadError(Exception):
|
||||
def __init__(self, kind, filename, exc):
|
||||
self.kind = kind
|
||||
self.filename = filename
|
||||
self.exc = exc
|
||||
|
||||
def __str__(self):
|
||||
return f'failed to load {self.kind} {self.filename!r}: {self.exc}'
|
||||
|
||||
class KeyManager:
|
||||
'''Manages data in the keyfile.'''
|
||||
def __init__(
|
||||
self, file: Optional[Path],
|
||||
) -> None:
|
||||
if file is not None:
|
||||
try:
|
||||
with file.open('rb') as f:
|
||||
keys = tomllib.load(f)['keys']
|
||||
except (OSError, tomllib.TOMLDecodeError) as e:
|
||||
raise FileLoadError('keyfile', str(file), e)
|
||||
else:
|
||||
keys = {}
|
||||
self.keys = keys
|
||||
try:
|
||||
netrc_file = netrc.netrc()
|
||||
netrc_hosts = netrc_file.hosts
|
||||
except (FileNotFoundError, netrc.NetrcParseError):
|
||||
netrc_hosts = {}
|
||||
self.netrc = netrc_hosts
|
||||
|
||||
def get_key(self, name: str, legacy_name: Optional[str] = None) -> Optional[str]:
|
||||
'''Get the named key (token) in the keyfile.'''
|
||||
keyfile_token = self.keys.get(name) or self.keys.get(legacy_name)
|
||||
netrc_passwd = (e := self.netrc.get(name)) and e[2]
|
||||
return keyfile_token or netrc_passwd
|
||||
|
||||
class EntryWaiter:
|
||||
def __init__(self) -> None:
|
||||
self._waiting: Dict[str, asyncio.Future] = {}
|
||||
|
||||
async def wait(self, name: str) -> str:
|
||||
'''Wait on the ``name`` entry and return its result (the version string)'''
|
||||
fu = self._waiting.get(name)
|
||||
if fu is None:
|
||||
fu = asyncio.Future()
|
||||
self._waiting[name] = fu
|
||||
return await fu
|
||||
|
||||
def set_result(self, name: str, value: str) -> None:
|
||||
fu = self._waiting.get(name)
|
||||
if fu is not None:
|
||||
fu.set_result(value)
|
||||
|
||||
def set_exception(self, name: str, e: Exception) -> None:
|
||||
fu = self._waiting.get(name)
|
||||
if fu is not None:
|
||||
fu.set_exception(e)
|
||||
|
||||
class RawResult(NamedTuple):
|
||||
'''The unprocessed result from a check.'''
|
||||
name: str
|
||||
version: VersionResult
|
||||
conf: Entry
|
||||
|
||||
RawResult.name.__doc__ = 'The name (table name) of the entry.'
|
||||
RawResult.version.__doc__ = 'The result from the check.'
|
||||
RawResult.conf.__doc__ = 'The entry configuration (table content) of the entry.'
|
||||
|
||||
ResultData = Dict[str, RichResult]
|
||||
|
||||
class BaseWorker:
|
||||
'''The base class for defining `Worker` classes for source plugins.
|
||||
|
||||
.. py:attribute:: task_sem
|
||||
:type: asyncio.Semaphore
|
||||
|
||||
This is the rate-limiting semaphore. Workers should acquire it while doing one unit of work.
|
||||
|
||||
.. py:attribute:: result_q
|
||||
:type: Queue[RawResult]
|
||||
|
||||
Results should be put into this queue.
|
||||
|
||||
.. py:attribute:: tasks
|
||||
:type: List[Tuple[str, Entry]]
|
||||
|
||||
A list of tasks for the `Worker` to complete. Every task consists of
|
||||
a tuple for the task name (table name in the configuration file) and the
|
||||
content of that table (as a `dict`).
|
||||
|
||||
.. py:attribute:: keymanager
|
||||
:type: KeyManager
|
||||
|
||||
The `KeyManager` for retrieving keys from the keyfile.
|
||||
'''
|
||||
def __init__(
|
||||
self,
|
||||
task_sem: asyncio.Semaphore,
|
||||
result_q: Queue[RawResult],
|
||||
tasks: List[Tuple[str, Entry]],
|
||||
keymanager: KeyManager,
|
||||
) -> None:
|
||||
self.task_sem = task_sem
|
||||
self.result_q = result_q
|
||||
self.keymanager = keymanager
|
||||
self.tasks = tasks
|
||||
|
||||
@abc.abstractmethod
|
||||
async def run(self) -> None:
|
||||
'''Run the `tasks`. Subclasses should implement this method.'''
|
||||
raise NotImplementedError
|
||||
|
||||
async def _run_maynot_raise(self) -> None:
|
||||
try:
|
||||
await self.run()
|
||||
except Exception:
|
||||
# don't let an exception tear down the whole process
|
||||
logger.exception('exception raised by Worker.run')
|
||||
|
||||
class AsyncCache:
|
||||
'''A cache for use with async functions.'''
|
||||
cache: Dict[Hashable, Any]
|
||||
lock: asyncio.Lock
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.cache = {}
|
||||
self.lock = asyncio.Lock()
|
||||
|
||||
async def _get_json(
|
||||
self, key: Tuple[str, str, Tuple[Tuple[str, str], ...]],
|
||||
) -> Any:
|
||||
_, url, headers = key
|
||||
res = await session.get(url, headers=dict(headers))
|
||||
return res.json()
|
||||
|
||||
async def get_json(
|
||||
self, url: str, *,
|
||||
headers: Dict[str, str] = {},
|
||||
) -> Any:
|
||||
'''Get specified ``url`` and return the response content as JSON.
|
||||
|
||||
The returned data will be cached for reuse.
|
||||
'''
|
||||
key = '_jsonurl', url, tuple(sorted(headers.items()))
|
||||
return await self.get(
|
||||
key , self._get_json) # type: ignore
|
||||
|
||||
async def get(
|
||||
self,
|
||||
key: Hashable,
|
||||
func: Callable[[Hashable], Coroutine[Any, Any, Any]],
|
||||
) -> Any:
|
||||
'''Run async ``func`` and cache its return value by ``key``.
|
||||
|
||||
The ``key`` should be hashable, and the function will be called with it as
|
||||
its sole argument. For multiple simultaneous calls with the same key, only
|
||||
one will actually be called, and others will wait and return the same
|
||||
(cached) value.
|
||||
'''
|
||||
async with self.lock:
|
||||
cached = self.cache.get(key)
|
||||
if cached is None:
|
||||
coro = func(key)
|
||||
fu = asyncio.create_task(coro)
|
||||
self.cache[key] = fu
|
||||
|
||||
if asyncio.isfuture(cached): # pending
|
||||
return await cached
|
||||
elif cached is not None: # cached
|
||||
return cached
|
||||
else: # not cached
|
||||
r = await fu
|
||||
self.cache[key] = r
|
||||
return r
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing_extensions import Protocol
|
||||
class GetVersionFunc(Protocol):
|
||||
async def __call__(
|
||||
self,
|
||||
name: str, conf: Entry,
|
||||
*,
|
||||
cache: AsyncCache,
|
||||
keymanager: KeyManager,
|
||||
) -> VersionResult:
|
||||
...
|
||||
else:
|
||||
GetVersionFunc = Any
|
||||
|
||||
class FunctionWorker(BaseWorker):
|
||||
func: GetVersionFunc
|
||||
cache: AsyncCache
|
||||
|
||||
def initialize(self, func: GetVersionFunc) -> None:
|
||||
self.func = func
|
||||
self.cache = AsyncCache()
|
||||
|
||||
async def run(self) -> None:
|
||||
futures = []
|
||||
for name, entry in self.tasks:
|
||||
ctx = contextvars.copy_context()
|
||||
fu = ctx.run(self.run_one, name, entry)
|
||||
futures.append(fu)
|
||||
|
||||
for fu2 in asyncio.as_completed(futures):
|
||||
await fu2
|
||||
|
||||
async def run_one(
|
||||
self, name: str, entry: Entry,
|
||||
) -> None:
|
||||
assert self.func is not None
|
||||
|
||||
tries = entry.get('tries', None)
|
||||
if tries is not None:
|
||||
ctx_tries.set(tries)
|
||||
proxy = entry.get('proxy', None)
|
||||
if proxy is not None:
|
||||
ctx_proxy.set(proxy)
|
||||
ua = entry.get('user_agent', None)
|
||||
if ua is not None:
|
||||
ctx_ua.set(ua)
|
||||
httpt = entry.get('httptoken', None)
|
||||
if httpt is None:
|
||||
httpt = self.keymanager.get_key('httptoken_'+name)
|
||||
if httpt is not None:
|
||||
ctx_httpt.set(httpt)
|
||||
verify_cert = entry.get('verify_cert', None)
|
||||
if verify_cert is not None:
|
||||
ctx_verify_cert.set(verify_cert)
|
||||
|
||||
try:
|
||||
async with self.task_sem:
|
||||
version = await self.func(
|
||||
name, entry,
|
||||
cache = self.cache,
|
||||
keymanager = self.keymanager,
|
||||
)
|
||||
await self.result_q.put(RawResult(name, version, entry))
|
||||
except Exception as e:
|
||||
await self.result_q.put(RawResult(name, e, entry))
|
||||
|
||||
class GetVersionError(Exception):
|
||||
'''An error occurred while getting version information.
|
||||
|
||||
Raise this when a known bad situation happens.
|
||||
|
||||
:param msg: The error message.
|
||||
:param kwargs: Arbitrary additional context for the error.
|
||||
'''
|
||||
def __init__(self, msg: LiteralString, **kwargs: Any) -> None:
|
||||
self.msg = msg
|
||||
self.kwargs = kwargs
|
44
nvchecker_source/alpm.py
Normal file
44
nvchecker_source/alpm.py
Normal file
|
@ -0,0 +1,44 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020-2021 DDoSolitary <DDoSolitary@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import GetVersionError
|
||||
from pyalpm import Handle
|
||||
|
||||
|
||||
async def open_db(info):
|
||||
dbpath, repo = info
|
||||
handle = Handle('/', dbpath)
|
||||
db = handle.register_syncdb(repo, 0)
|
||||
return handle, db
|
||||
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
pkgname = conf.get('alpm', name)
|
||||
dbpath = conf.get('dbpath', '/var/lib/pacman')
|
||||
strip_release = conf.get('strip_release', False)
|
||||
provided = conf.get('provided')
|
||||
|
||||
repo = conf.get('repo')
|
||||
if repo is None:
|
||||
repos = conf.get('repos') or ['core', 'extra', 'multilib']
|
||||
else:
|
||||
repos = [repo]
|
||||
|
||||
for repo in repos:
|
||||
db = (await cache.get((dbpath, repo), open_db))[1]
|
||||
pkg = db.get_pkg(pkgname)
|
||||
if pkg is not None:
|
||||
break
|
||||
|
||||
if pkg is None:
|
||||
raise GetVersionError('package not found in the ALPM database')
|
||||
if provided is None:
|
||||
version = pkg.version
|
||||
else:
|
||||
provides = dict(x.split('=', 1) for x in pkg.provides if '=' in x)
|
||||
version = provides.get(provided)
|
||||
if version is None:
|
||||
raise GetVersionError('provides element not found')
|
||||
if strip_release:
|
||||
version = version.split('-', 1)[0]
|
||||
return version
|
51
nvchecker_source/alpmfiles.py
Normal file
51
nvchecker_source/alpmfiles.py
Normal file
|
@ -0,0 +1,51 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2023 Pekka Ristola <pekkarr [at] protonmail [dot] com>, et al.
|
||||
|
||||
from asyncio import create_subprocess_exec
|
||||
from asyncio.subprocess import PIPE
|
||||
import re
|
||||
from typing import Tuple, List
|
||||
|
||||
from nvchecker.api import GetVersionError
|
||||
|
||||
async def get_files(info: Tuple[str, str]) -> List[str]:
|
||||
dbpath, pkg = info
|
||||
# there's no pyalpm bindings for the file databases
|
||||
cmd = ['pacman', '-Flq', '--dbpath', dbpath, pkg]
|
||||
|
||||
p = await create_subprocess_exec(*cmd, stdout = PIPE, stderr = PIPE)
|
||||
stdout, stderr = await p.communicate()
|
||||
|
||||
if p.returncode == 0:
|
||||
return stdout.decode().splitlines()
|
||||
else:
|
||||
raise GetVersionError(
|
||||
'pacman failed to get file list',
|
||||
pkg = pkg,
|
||||
cmd = cmd,
|
||||
stdout = stdout.decode(errors='replace'),
|
||||
stderr = stderr.decode(errors='replace'),
|
||||
returncode = p.returncode,
|
||||
)
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
pkg = conf['pkgname']
|
||||
repo = conf.get('repo')
|
||||
if repo is not None:
|
||||
pkg = f'{repo}/{pkg}'
|
||||
dbpath = conf.get('dbpath', '/var/lib/pacman')
|
||||
regex = re.compile(conf['filename'])
|
||||
if regex.groups > 1:
|
||||
raise GetVersionError('multi-group regex')
|
||||
strip_dir = conf.get('strip_dir', False)
|
||||
|
||||
files = await cache.get((dbpath, pkg), get_files)
|
||||
|
||||
for f in files:
|
||||
fn = f.rsplit('/', 1)[-1] if strip_dir else f
|
||||
match = regex.fullmatch(fn)
|
||||
if match:
|
||||
groups = match.groups()
|
||||
return groups[0] if len(groups) > 0 else fn
|
||||
|
||||
raise GetVersionError('no file matches specified regex')
|
|
@ -1,51 +1,54 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2017 Yen Chi Hsuan <yan12125 at gmail dot com>
|
||||
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
# Copyright (c) 2017,2020 Chih-Hsuan Yen <yan12125 at gmail dot com>
|
||||
|
||||
from asyncio.locks import Lock
|
||||
import os
|
||||
import re
|
||||
from xml.etree import ElementTree
|
||||
|
||||
from . import session
|
||||
from nvchecker.api import session
|
||||
|
||||
_ANDROID_REPO_MANIFESTS = {
|
||||
'addon': 'https://dl.google.com/android/repository/addon2-1.xml',
|
||||
'package': 'https://dl.google.com/android/repository/repository2-1.xml',
|
||||
}
|
||||
|
||||
_repo_manifests_cache = {}
|
||||
_repo_manifests_locks = {}
|
||||
|
||||
for repo in _ANDROID_REPO_MANIFESTS.keys():
|
||||
_repo_manifests_locks[repo] = Lock()
|
||||
# See <channel> tags in Android SDK XML manifests
|
||||
_CHANNEL_MAP = {
|
||||
'stable': 'channel-0',
|
||||
'beta': 'channel-1',
|
||||
'dev': 'channel-2',
|
||||
'canary': 'channel-3',
|
||||
}
|
||||
|
||||
async def _get_repo_manifest(repo):
|
||||
async with _repo_manifests_locks[repo]:
|
||||
if repo in _repo_manifests_cache:
|
||||
return _repo_manifests_cache[repo]
|
||||
repo_xml_url = _ANDROID_REPO_MANIFESTS[repo]
|
||||
|
||||
repo_xml_url = _ANDROID_REPO_MANIFESTS[repo]
|
||||
res = await session.get(repo_xml_url)
|
||||
data = res.body.decode('utf-8')
|
||||
|
||||
async with session.get(repo_xml_url) as res:
|
||||
data = (await res.read()).decode('utf-8')
|
||||
repo_manifest = ElementTree.fromstring(data)
|
||||
return repo_manifest
|
||||
|
||||
repo_manifest = ElementTree.fromstring(data)
|
||||
_repo_manifests_cache[repo] = repo_manifest
|
||||
|
||||
return repo_manifest
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
repo = conf['repo']
|
||||
pkg_path_prefix = conf['android_sdk']
|
||||
channels = [_CHANNEL_MAP[channel]
|
||||
for channel in conf.get('channel', 'stable').split(',')]
|
||||
|
||||
repo_manifest = await _get_repo_manifest(repo)
|
||||
repo_manifest = await cache.get(repo, _get_repo_manifest)
|
||||
|
||||
versions = []
|
||||
|
||||
for pkg in repo_manifest.findall('.//remotePackage'):
|
||||
if not pkg.attrib['path'].startswith(pkg_path_prefix):
|
||||
continue
|
||||
channelRef = pkg.find('./channelRef')
|
||||
if channelRef.attrib['ref'] not in channels:
|
||||
continue
|
||||
for archive in pkg.findall('./archives/archive'):
|
||||
host_os = archive.find('./host-os')
|
||||
if host_os and host_os.text != 'linux':
|
||||
if host_os is not None and host_os.text != conf.get('host_os', 'linux'):
|
||||
continue
|
||||
archive_url = archive.find('./complete/url').text
|
||||
# revision
|
||||
|
@ -61,4 +64,8 @@ async def get_version(name, conf, **kwargs):
|
|||
mobj = re.match(r'r\d+', rel_str)
|
||||
if mobj:
|
||||
rev_strs.append(rel_str)
|
||||
return '.'.join(rev_strs)
|
||||
versions.append('.'.join(rev_strs))
|
||||
# A package suitable for the target host OS is found - skip remaining
|
||||
break
|
||||
|
||||
return versions
|
17
nvchecker_source/anitya.py
Normal file
17
nvchecker_source/anitya.py
Normal file
|
@ -0,0 +1,17 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2017-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
URL = 'https://release-monitoring.org/api/project/{pkg}'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
pkg = conf.get('anitya_id')
|
||||
if pkg is None:
|
||||
pkg = conf.get('anitya')
|
||||
url = URL.format(pkg = pkg)
|
||||
data = await cache.get_json(url)
|
||||
return RichResult(
|
||||
version = data['version'],
|
||||
url = f'https://release-monitoring.org/project/{data["id"]}/',
|
||||
)
|
189
nvchecker_source/apt.py
Normal file
189
nvchecker_source/apt.py
Normal file
|
@ -0,0 +1,189 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 Felix Yan <felixonmars@archlinux.org>, et al.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
import asyncio
|
||||
from typing import Dict, Tuple
|
||||
import itertools
|
||||
import functools
|
||||
from collections import defaultdict
|
||||
|
||||
from nvchecker.api import (
|
||||
session, GetVersionError, VersionResult,
|
||||
RichResult, Entry, AsyncCache, KeyManager,
|
||||
)
|
||||
|
||||
APT_RELEASE_URL = "%s/dists/%s/Release"
|
||||
APT_PACKAGES_PATH = "%s/binary-%s/Packages%s"
|
||||
APT_PACKAGES_URL = "%s/dists/%s/%s"
|
||||
APT_PACKAGES_SUFFIX_PREFER = (".xz", ".gz", "")
|
||||
|
||||
DpkgVersion = Tuple[int, str, str]
|
||||
|
||||
def parse_version(s: str) -> DpkgVersion:
|
||||
try:
|
||||
epoch_str, rest = s.split(':', 1)
|
||||
except ValueError:
|
||||
epoch = 0
|
||||
rest = s
|
||||
else:
|
||||
epoch = int(epoch_str)
|
||||
|
||||
try:
|
||||
ver, rev = rest.split('-', 1)
|
||||
except ValueError:
|
||||
ver = rest
|
||||
rev = ''
|
||||
|
||||
return epoch, ver, rev
|
||||
|
||||
def _compare_part(a: str, b: str) -> int:
|
||||
sa = re.split(r'(\d+)', a)
|
||||
sb = re.split(r'(\d+)', b)
|
||||
for idx, (pa, pb) in enumerate(itertools.zip_longest(sa, sb)):
|
||||
if pa is None:
|
||||
return -1
|
||||
elif pb is None:
|
||||
return 1
|
||||
|
||||
if idx % 2 == 1:
|
||||
ret = int(pa) - int(pb)
|
||||
if ret != 0:
|
||||
return ret
|
||||
else:
|
||||
if pa < pb:
|
||||
return -1
|
||||
elif pa > pb:
|
||||
return 1
|
||||
|
||||
return 0
|
||||
|
||||
def compare_version_parsed(a: DpkgVersion, b: DpkgVersion) -> int:
|
||||
ret = a[0] - b[0]
|
||||
if ret != 0:
|
||||
return ret
|
||||
ret = _compare_part(a[1], b[1])
|
||||
if ret != 0:
|
||||
return ret
|
||||
return _compare_part(a[2], b[2])
|
||||
|
||||
def compare_version(a: str, b: str) -> int:
|
||||
va = parse_version(a)
|
||||
vb = parse_version(b)
|
||||
return compare_version_parsed(va, vb)
|
||||
|
||||
def _decompress_data(url: str, data: bytes) -> str:
|
||||
if url.endswith(".xz"):
|
||||
import lzma
|
||||
data = lzma.decompress(data)
|
||||
elif url.endswith(".gz"):
|
||||
import gzip
|
||||
data = gzip.decompress(data)
|
||||
|
||||
return data.decode('utf-8')
|
||||
|
||||
async def get_url(url: str) -> str:
|
||||
res = await session.get(url)
|
||||
data = res.body
|
||||
loop = asyncio.get_running_loop()
|
||||
return await loop.run_in_executor(
|
||||
None, _decompress_data,
|
||||
url, data)
|
||||
|
||||
async def parse_packages(key: Tuple[AsyncCache, str]) -> Tuple[Dict[str, str], Dict[str, str], Dict[str, str]]:
|
||||
cache, url = key
|
||||
apt_packages = await cache.get(url, get_url) # type: ignore
|
||||
|
||||
pkg_map = defaultdict(list)
|
||||
srcpkg_map = defaultdict(list)
|
||||
pkg_to_src_map = defaultdict(list)
|
||||
|
||||
pkg = None
|
||||
srcpkg = None
|
||||
for line in apt_packages.split('\n'):
|
||||
if line.startswith("Package: "):
|
||||
pkg = line[9:]
|
||||
elif line.startswith("Source: "):
|
||||
srcpkg = line[8:]
|
||||
elif line.startswith("Version: "):
|
||||
version = line[9:]
|
||||
if pkg is not None:
|
||||
pkg_map[pkg].append(version)
|
||||
pkg_to_src_map["%s/%s" % (pkg, version)] = srcpkg if srcpkg is not None else pkg
|
||||
if srcpkg is not None:
|
||||
srcpkg_map[srcpkg].append(version)
|
||||
pkg = srcpkg = None
|
||||
|
||||
pkg_map_max = {pkg: max(vs, key=functools.cmp_to_key(compare_version))
|
||||
for pkg, vs in pkg_map.items()}
|
||||
srcpkg_map_max = {pkg: max(vs, key=functools.cmp_to_key(compare_version))
|
||||
for pkg, vs in srcpkg_map.items()}
|
||||
pkg_to_src_map_max = {pkg: pkg_to_src_map["%s/%s" % (pkg, vs)]
|
||||
for pkg, vs in pkg_map_max.items()}
|
||||
|
||||
return pkg_map_max, srcpkg_map_max, pkg_to_src_map_max
|
||||
|
||||
async def get_version(
|
||||
name: str, conf: Entry, *,
|
||||
cache: AsyncCache, keymanager: KeyManager,
|
||||
**kwargs,
|
||||
) -> VersionResult:
|
||||
srcpkg = conf.get('srcpkg')
|
||||
pkg = conf.get('pkg')
|
||||
mirror = conf['mirror']
|
||||
suite = conf['suite']
|
||||
repo = conf.get('repo', 'main')
|
||||
arch = conf.get('arch', 'amd64')
|
||||
strip_release = conf.get('strip_release', False)
|
||||
|
||||
if srcpkg and pkg:
|
||||
raise GetVersionError('Setting both srcpkg and pkg is ambiguous')
|
||||
elif not srcpkg and not pkg:
|
||||
pkg = name
|
||||
|
||||
apt_release = await cache.get(
|
||||
APT_RELEASE_URL % (mirror, suite), get_url) # type: ignore
|
||||
for suffix in APT_PACKAGES_SUFFIX_PREFER:
|
||||
packages_path = APT_PACKAGES_PATH % (repo, arch, suffix)
|
||||
if " " + packages_path in apt_release:
|
||||
break
|
||||
else:
|
||||
raise GetVersionError('Packages file not found in APT repository')
|
||||
|
||||
pkg_map, srcpkg_map, pkg_to_src_map = await cache.get(
|
||||
(cache, APT_PACKAGES_URL % (mirror, suite, packages_path)), parse_packages) # type: ignore
|
||||
|
||||
if pkg and pkg in pkg_map:
|
||||
version = pkg_map[pkg]
|
||||
changelog_name = pkg_to_src_map[pkg]
|
||||
elif srcpkg and srcpkg in srcpkg_map:
|
||||
version = srcpkg_map[srcpkg]
|
||||
changelog_name = srcpkg
|
||||
else:
|
||||
raise GetVersionError('package not found in APT repository')
|
||||
|
||||
# Get Changelogs field from the Release file
|
||||
changelogs_url = None
|
||||
for line in apt_release.split('\n'):
|
||||
if line.startswith('Changelogs: '):
|
||||
changelogs_url = line[12:]
|
||||
break
|
||||
|
||||
# Build the changelog URL (see https://wiki.debian.org/DebianRepository/Format#Changelogs for spec)
|
||||
changelog = None
|
||||
if changelogs_url is not None and changelogs_url != 'no':
|
||||
changelog_section = changelog_name[:4] if changelog_name.startswith('lib') else changelog_name[:1]
|
||||
changelog = changelogs_url.replace('@CHANGEPATH@', f'{repo}/{changelog_section}/{changelog_name}/{changelog_name}_{version}')
|
||||
|
||||
if strip_release:
|
||||
version = version.split("-")[0]
|
||||
|
||||
if changelog is not None:
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = changelog,
|
||||
)
|
||||
else:
|
||||
return version
|
37
nvchecker_source/archpkg.py
Normal file
37
nvchecker_source/archpkg.py
Normal file
|
@ -0,0 +1,37 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import session, RichResult, GetVersionError
|
||||
|
||||
URL = 'https://archlinux.org/packages/search/json/'
|
||||
|
||||
async def request(pkg):
|
||||
res = await session.get(URL, params={"name": pkg})
|
||||
return res.json()
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
pkg = conf.get('archpkg') or name
|
||||
strip_release = conf.get('strip_release', False)
|
||||
provided = conf.get('provided')
|
||||
|
||||
data = await cache.get(pkg, request)
|
||||
|
||||
if not data['results']:
|
||||
raise GetVersionError('Arch package not found')
|
||||
|
||||
r = [r for r in data['results'] if r['repo'] != 'testing'][0]
|
||||
|
||||
if provided:
|
||||
provides = dict(x.split('=', 1) for x in r['provides'] if '=' in x)
|
||||
version = provides.get(provided, None)
|
||||
if strip_release:
|
||||
version = version.split('-', 1)[0]
|
||||
elif strip_release:
|
||||
version = r['pkgver']
|
||||
else:
|
||||
version = r['pkgver'] + '-' + r['pkgrel']
|
||||
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://archlinux.org/packages/{r["repo"]}/{r["arch"]}/{r["pkgname"]}/',
|
||||
)
|
109
nvchecker_source/aur.py
Normal file
109
nvchecker_source/aur.py
Normal file
|
@ -0,0 +1,109 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020,2024 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from datetime import datetime, timezone
|
||||
import asyncio
|
||||
from typing import Iterable, Dict, List, Tuple, Any, Optional
|
||||
|
||||
from nvchecker.api import (
|
||||
session, GetVersionError, VersionResult, RichResult,
|
||||
Entry, BaseWorker, RawResult,
|
||||
)
|
||||
|
||||
AUR_URL = 'https://aur.archlinux.org/rpc/'
|
||||
|
||||
class AurResults:
|
||||
cache: Dict[str, Optional[Dict[str, Any]]]
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.cache = {}
|
||||
|
||||
async def get_multiple(
|
||||
self,
|
||||
aurnames: Iterable[str],
|
||||
) -> Dict[str, Optional[Dict[str, Any]]]:
|
||||
params = [('v', '5'), ('type', 'info')]
|
||||
params.extend(('arg[]', name) for name in aurnames
|
||||
if name not in self.cache)
|
||||
res = await session.get(AUR_URL, params=params)
|
||||
data = res.json()
|
||||
new_results = {r['Name']: r for r in data['results']}
|
||||
|
||||
cache = self.cache
|
||||
cache.update(new_results)
|
||||
cache.update(
|
||||
(name, None)
|
||||
for name in set(aurnames) - new_results.keys()
|
||||
)
|
||||
|
||||
return {name: cache[name] for name in aurnames
|
||||
if name in cache}
|
||||
|
||||
class Worker(BaseWorker):
|
||||
# https://wiki.archlinux.org/index.php/Aurweb_RPC_interface#Limitations
|
||||
batch_size = 100
|
||||
|
||||
async def run(self) -> None:
|
||||
tasks = self.tasks
|
||||
n_batch, left = divmod(len(tasks), self.batch_size)
|
||||
if left > 0:
|
||||
n_batch += 1
|
||||
|
||||
aur_results = AurResults()
|
||||
|
||||
ret = []
|
||||
for i in range(n_batch):
|
||||
s = i * self.batch_size
|
||||
batch = tasks[s : s+self.batch_size]
|
||||
fu = self._run_batch(batch, aur_results)
|
||||
ret.append(fu)
|
||||
|
||||
await asyncio.gather(*ret)
|
||||
|
||||
async def _run_batch(
|
||||
self,
|
||||
batch: List[Tuple[str, Entry]],
|
||||
aur_results: AurResults,
|
||||
) -> None:
|
||||
task_by_name: Dict[str, Entry] = dict(self.tasks)
|
||||
|
||||
async with self.task_sem:
|
||||
results = await _run_batch_impl(batch, aur_results)
|
||||
for name, version in results.items():
|
||||
r = RawResult(name, version, task_by_name[name])
|
||||
await self.result_q.put(r)
|
||||
|
||||
async def _run_batch_impl(
|
||||
batch: List[Tuple[str, Entry]],
|
||||
aur_results: AurResults,
|
||||
) -> Dict[str, VersionResult]:
|
||||
aurnames = {conf.get('aur', name) for name, conf in batch}
|
||||
results = await aur_results.get_multiple(aurnames)
|
||||
|
||||
ret: Dict[str, VersionResult] = {}
|
||||
|
||||
for name, conf in batch:
|
||||
aurname = conf.get('aur', name)
|
||||
use_last_modified = conf.get('use_last_modified', False)
|
||||
strip_release = conf.get('strip_release', False)
|
||||
|
||||
result = results.get(aurname)
|
||||
|
||||
if result is None:
|
||||
ret[name] = GetVersionError('AUR upstream not found')
|
||||
continue
|
||||
|
||||
version = result['Version']
|
||||
if use_last_modified:
|
||||
dt = datetime.fromtimestamp(result['LastModified'], timezone.utc)
|
||||
version += '-' + dt.strftime('%Y%m%d%H%M%S')
|
||||
if strip_release and '-' in version:
|
||||
version = version.rsplit('-', 1)[0]
|
||||
|
||||
ret[name] = RichResult(
|
||||
version = version,
|
||||
url = f'https://aur.archlinux.org/packages/{name}',
|
||||
)
|
||||
|
||||
return ret
|
||||
|
73
nvchecker_source/bitbucket.py
Normal file
73
nvchecker_source/bitbucket.py
Normal file
|
@ -0,0 +1,73 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from typing import Any, List, Union
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from nvchecker.api import VersionResult, RichResult, Entry, AsyncCache
|
||||
|
||||
# doc: https://developer.atlassian.com/cloud/bitbucket/rest/api-group-commits/#api-repositories-workspace-repo-slug-commits-get
|
||||
BITBUCKET_URL = 'https://bitbucket.org/api/2.0/repositories/%s/commits/%s'
|
||||
# doc: https://developer.atlassian.com/cloud/bitbucket/rest/api-group-refs/#api-repositories-workspace-repo-slug-refs-tags-get
|
||||
BITBUCKET_MAX_TAG = 'https://bitbucket.org/api/2.0/repositories/%s/refs/tags'
|
||||
|
||||
async def get_version(
|
||||
name: str, conf: Entry, *,
|
||||
cache: AsyncCache,
|
||||
**kwargs: Any,
|
||||
) -> VersionResult:
|
||||
repo = conf['bitbucket']
|
||||
br = conf.get('branch', '')
|
||||
use_max_tag = conf.get('use_max_tag', False)
|
||||
use_sorted_tags = conf.get('use_sorted_tags', False)
|
||||
|
||||
if use_sorted_tags or use_max_tag:
|
||||
parameters = {'fields': 'values.name,values.links.html.href,next'}
|
||||
|
||||
if use_sorted_tags:
|
||||
parameters['sort'] = conf.get('sort', '-target.date')
|
||||
if 'query' in conf:
|
||||
parameters['q'] = conf['query']
|
||||
|
||||
if use_sorted_tags:
|
||||
url = BITBUCKET_MAX_TAG % repo
|
||||
url += '?' + urlencode(parameters)
|
||||
|
||||
return await _get_tags(url, max_page=1, cache=cache)
|
||||
|
||||
elif use_max_tag:
|
||||
url = BITBUCKET_MAX_TAG % repo
|
||||
url += '?' + urlencode(parameters)
|
||||
|
||||
max_page = conf.get('max_page', 3)
|
||||
return await _get_tags(url, max_page=max_page, cache=cache)
|
||||
|
||||
else:
|
||||
url = BITBUCKET_URL % (repo, br)
|
||||
data = await cache.get_json(url)
|
||||
return RichResult(
|
||||
version = data['values'][0]['date'].split('T', 1)[0].replace('-', ''),
|
||||
url = data['values'][0]['links']['html']['href'],
|
||||
)
|
||||
|
||||
async def _get_tags(
|
||||
url: str, *,
|
||||
max_page: int,
|
||||
cache: AsyncCache,
|
||||
) -> VersionResult:
|
||||
ret: List[Union[str, RichResult]] = []
|
||||
|
||||
for _ in range(max_page):
|
||||
data = await cache.get_json(url)
|
||||
ret.extend([
|
||||
RichResult(
|
||||
version = tag['name'],
|
||||
url = tag['links']['html']['href'],
|
||||
) for tag in data['values']
|
||||
])
|
||||
if 'next' in data:
|
||||
url = data['next']
|
||||
else:
|
||||
break
|
||||
|
||||
return ret
|
40
nvchecker_source/cmd.py
Normal file
40
nvchecker_source/cmd.py
Normal file
|
@ -0,0 +1,40 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import asyncio
|
||||
|
||||
import structlog
|
||||
|
||||
from nvchecker.api import GetVersionError
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
async def run_cmd(cmd: str) -> str:
|
||||
logger.debug('running cmd', cmd=cmd)
|
||||
p = await asyncio.create_subprocess_shell(
|
||||
cmd,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE,
|
||||
)
|
||||
|
||||
output, error = await p.communicate()
|
||||
output_s = output.strip().decode('latin1')
|
||||
error_s = error.strip().decode(errors='replace')
|
||||
if p.returncode != 0:
|
||||
raise GetVersionError(
|
||||
'command exited with error',
|
||||
cmd=cmd, error=error_s,
|
||||
returncode=p.returncode)
|
||||
elif not output_s:
|
||||
raise GetVersionError(
|
||||
'command exited without output',
|
||||
cmd=cmd, error=error_s,
|
||||
returncode=p.returncode)
|
||||
else:
|
||||
return output_s
|
||||
|
||||
async def get_version(
|
||||
name, conf, *, cache, keymanager=None
|
||||
):
|
||||
cmd = conf['cmd']
|
||||
return await cache.get(cmd, run_cmd)
|
21
nvchecker_source/combiner.py
Normal file
21
nvchecker_source/combiner.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2021 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import asyncio
|
||||
import string
|
||||
|
||||
from nvchecker.api import entry_waiter
|
||||
|
||||
class CombineFormat(string.Template):
|
||||
idpattern = '[0-9]+'
|
||||
|
||||
async def get_version(
|
||||
name, conf, *, cache, keymanager=None
|
||||
):
|
||||
t = CombineFormat(conf['format'])
|
||||
from_ = conf['from']
|
||||
waiter = entry_waiter.get()
|
||||
entries = [waiter.wait(name) for name in from_]
|
||||
vers = await asyncio.gather(*entries)
|
||||
versdict = {str(i+1): v for i, v in enumerate(vers)}
|
||||
return t.substitute(versdict)
|
164
nvchecker_source/container.py
Normal file
164
nvchecker_source/container.py
Normal file
|
@ -0,0 +1,164 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 Chih-Hsuan Yen <yan12125 at gmail dot com>
|
||||
|
||||
from typing import Dict, List, NamedTuple, Optional, Tuple
|
||||
from urllib.request import parse_http_list
|
||||
from urllib.parse import urljoin
|
||||
import json
|
||||
|
||||
from nvchecker.api import session, HTTPError
|
||||
|
||||
class AuthInfo(NamedTuple):
|
||||
service: Optional[str]
|
||||
realm: str
|
||||
|
||||
def parse_www_authenticate_header(header: str) -> Tuple[str, Dict[str, str]]:
|
||||
'''
|
||||
Parse WWW-Authenticate header used in OAuth2 authentication for container
|
||||
registries. This is NOT RFC-compliant!
|
||||
|
||||
Simplified from http.parse_www_authenticate_header in Werkzeug (BSD license)
|
||||
'''
|
||||
auth_type, auth_info = header.split(None, 1)
|
||||
result = {}
|
||||
for item in parse_http_list(auth_info):
|
||||
name, value = item.split("=", 1)
|
||||
if value[:1] == value[-1:] == '"':
|
||||
value = value[1:-1]
|
||||
result[name] = value
|
||||
return auth_type, result
|
||||
|
||||
# Inspired by https://stackoverflow.com/a/51921869
|
||||
# Reference: https://github.com/containers/image/blob/v5.6.0/docker/docker_client.go
|
||||
|
||||
class UnsupportedAuthenticationError(NotImplementedError):
|
||||
def __init__(self):
|
||||
super().__init__('Only Bearer authentication supported for now')
|
||||
|
||||
async def get_registry_auth_info(registry_host: str) -> AuthInfo:
|
||||
auth_service = auth_realm = None
|
||||
|
||||
try:
|
||||
await session.get(f'https://{registry_host}/v2/')
|
||||
raise UnsupportedAuthenticationError # No authentication needed
|
||||
except HTTPError as e:
|
||||
if e.code != 401:
|
||||
raise
|
||||
|
||||
auth_type, auth_info = parse_www_authenticate_header(e.response.headers['WWW-Authenticate'])
|
||||
if auth_type.lower() != 'bearer':
|
||||
raise UnsupportedAuthenticationError
|
||||
|
||||
# Although 'service' is needed as per https://docs.docker.com/registry/spec/auth/token/,
|
||||
# ghcr.io (GitHub container registry) does not provide it
|
||||
auth_service = auth_info.get('service')
|
||||
auth_realm = auth_info['realm']
|
||||
|
||||
return AuthInfo(auth_service, auth_realm)
|
||||
|
||||
async def get_container_tags(info: Tuple[str, str, AuthInfo]) -> List[str]:
|
||||
image_path, registry_host, auth_info = info
|
||||
token = await get_auth_token(auth_info, image_path)
|
||||
tags = []
|
||||
url = f'https://{registry_host}/v2/{image_path}/tags/list'
|
||||
|
||||
while True:
|
||||
res = await session.get(url, headers={
|
||||
'Authorization': f'Bearer {token}',
|
||||
'Accept': 'application/json',
|
||||
})
|
||||
tags += res.json()['tags']
|
||||
link = res.headers.get('Link')
|
||||
if link is None:
|
||||
break
|
||||
else:
|
||||
url = urljoin(url, parse_next_link(link))
|
||||
|
||||
return tags
|
||||
|
||||
|
||||
async def get_auth_token(auth_info, image_path):
|
||||
auth_params = {
|
||||
'scope': f'repository:{image_path}:pull',
|
||||
}
|
||||
if auth_info.service:
|
||||
auth_params['service'] = auth_info.service
|
||||
res = await session.get(auth_info.realm, params=auth_params)
|
||||
token = res.json()['token']
|
||||
return token
|
||||
|
||||
|
||||
def parse_next_link(value: str) -> str:
|
||||
ending = '>; rel="next"'
|
||||
if value.endswith(ending):
|
||||
return value[1:-len(ending)]
|
||||
else:
|
||||
raise ValueError(value)
|
||||
|
||||
|
||||
async def get_container_tag_update_time(info: Tuple[str, str, str, AuthInfo]):
|
||||
'''
|
||||
Find the update time of a container tag.
|
||||
|
||||
In fact, it's the creation time of the image ID referred by the tag. Tag itself does not have any update time.
|
||||
'''
|
||||
image_path, image_tag, registry_host, auth_info = info
|
||||
token = await get_auth_token(auth_info, image_path)
|
||||
|
||||
# HTTP headers
|
||||
headers = {
|
||||
'Authorization': f'Bearer {token}',
|
||||
# Prefer Image Manifest Version 2, Schema 2: https://distribution.github.io/distribution/spec/manifest-v2-2/
|
||||
'Accept': ', '.join([
|
||||
'application/vnd.oci.image.manifest.v1+json',
|
||||
'application/vnd.oci.image.index.v1+json',
|
||||
'application/vnd.docker.distribution.manifest.v2+json',
|
||||
'application/vnd.docker.distribution.manifest.list.v2+json',
|
||||
'application/json',
|
||||
]),
|
||||
}
|
||||
|
||||
# Get tag manifest
|
||||
url = f'https://{registry_host}/v2/{image_path}/manifests/{image_tag}'
|
||||
res = await session.get(url, headers=headers)
|
||||
data = res.json()
|
||||
# Schema 1 returns the creation time in the response
|
||||
if data['schemaVersion'] == 1:
|
||||
return json.loads(data['history'][0]['v1Compatibility'])['created']
|
||||
|
||||
# For schema 2, we have to fetch the config's blob
|
||||
# For multi-arch images, multiple manifests are bounded with the same tag. We should choose one and then request
|
||||
# the manifest's detail
|
||||
if data.get('manifests'):
|
||||
# It's quite hard to find the manifest matching with current CPU architecture and system.
|
||||
# For now we just choose the first and it should probably work for most cases
|
||||
image_digest = data['manifests'][0]['digest']
|
||||
url = f'https://{registry_host}/v2/{image_path}/manifests/{image_digest}'
|
||||
res = await session.get(url, headers=headers)
|
||||
data = res.json()
|
||||
|
||||
digest = data['config']['digest']
|
||||
url = f'https://{registry_host}/v2/{image_path}/blobs/{digest}'
|
||||
res = await session.get(url, headers=headers)
|
||||
data = res.json()
|
||||
return data['created']
|
||||
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
image_path = conf.get('container', name)
|
||||
image_tag = None
|
||||
# image tag is optional
|
||||
if ':' in image_path:
|
||||
image_path, image_tag = image_path.split(':', 1)
|
||||
registry_host = conf.get('registry', 'docker.io')
|
||||
if registry_host == 'docker.io':
|
||||
registry_host = 'registry-1.docker.io'
|
||||
|
||||
auth_info = await cache.get(registry_host, get_registry_auth_info)
|
||||
|
||||
# if a tag is given, return the tag's update time, otherwise return the image's tag list
|
||||
if image_tag:
|
||||
key = image_path, image_tag, registry_host, auth_info
|
||||
return await cache.get(key, get_container_tag_update_time)
|
||||
key = image_path, registry_host, auth_info
|
||||
return await cache.get(key, get_container_tags)
|
15
nvchecker_source/cpan.py
Normal file
15
nvchecker_source/cpan.py
Normal file
|
@ -0,0 +1,15 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
# Using metacpan
|
||||
CPAN_URL = 'https://fastapi.metacpan.org/release/%s'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
key = conf.get('cpan', name)
|
||||
data = await cache.get_json(CPAN_URL % key)
|
||||
return RichResult(
|
||||
version = str(data['version']),
|
||||
url = f'https://metacpan.org/release/{data["author"]}/{data["name"]}',
|
||||
)
|
29
nvchecker_source/cran.py
Normal file
29
nvchecker_source/cran.py
Normal file
|
@ -0,0 +1,29 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2022 Pekka Ristola <pekkarr [at] protonmail [dot] com>, et al.
|
||||
|
||||
from nvchecker.api import session, RichResult, GetVersionError
|
||||
|
||||
CRAN_URL = 'https://cran.r-project.org/package=%s/DESCRIPTION'
|
||||
VERSION_FIELD = 'Version: '
|
||||
|
||||
async def request(pkg):
|
||||
url = CRAN_URL % pkg
|
||||
res = await session.get(url)
|
||||
return res.body.decode('utf-8', errors='ignore')
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
package = conf.get('cran', name)
|
||||
|
||||
desc = await cache.get(package, request)
|
||||
|
||||
for line in desc.splitlines():
|
||||
if line.startswith(VERSION_FIELD):
|
||||
version = line[len(VERSION_FIELD):]
|
||||
break
|
||||
else:
|
||||
raise GetVersionError('Invalid DESCRIPTION file')
|
||||
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://cran.r-project.org/web/packages/{package}/',
|
||||
)
|
40
nvchecker_source/cratesio.py
Normal file
40
nvchecker_source/cratesio.py
Normal file
|
@ -0,0 +1,40 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import re
|
||||
|
||||
import structlog
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
|
||||
API_URL = 'https://crates.io/api/v1/crates/%s'
|
||||
# https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string
|
||||
VERSION_PATTERN = r'^(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$'
|
||||
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
name = conf.get('cratesio') or name
|
||||
use_pre_release = conf.get('use_pre_release', False)
|
||||
data = await cache.get_json(API_URL % name)
|
||||
results = []
|
||||
for v in data['versions']:
|
||||
if v['yanked']:
|
||||
continue
|
||||
version = v['num']
|
||||
match = re.fullmatch(VERSION_PATTERN, version)
|
||||
if match is None:
|
||||
logger.warning('ignoring invalid version', version=version)
|
||||
continue
|
||||
if not use_pre_release and match.group('prerelease'):
|
||||
continue
|
||||
results.append(
|
||||
RichResult(
|
||||
version=version,
|
||||
url=f'https://crates.io/crates/{name}/{version}',
|
||||
)
|
||||
)
|
||||
|
||||
return results
|
|
@ -1,27 +1,20 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
# Copyright (c) 2017 Felix Yan <felixonmars@archlinux.org>, et al.
|
||||
|
||||
import structlog
|
||||
|
||||
from . import session, conf_cacheable_with_name
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
from nvchecker.api import RichResult, GetVersionError
|
||||
|
||||
URL = 'https://sources.debian.org/api/src/%(pkgname)s/?suite=%(suite)s'
|
||||
|
||||
get_cacheable_conf = conf_cacheable_with_name('debianpkg')
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
pkg = conf.get('debianpkg') or name
|
||||
strip_release = conf.getboolean('strip-release', False)
|
||||
strip_release = conf.get('strip_release', False)
|
||||
suite = conf.get('suite') or "sid"
|
||||
url = URL % {"pkgname": pkg, "suite": suite}
|
||||
async with session.get(url) as res:
|
||||
data = await res.json()
|
||||
data = await cache.get_json(url)
|
||||
|
||||
if not data.get('versions'):
|
||||
logger.error('Debian package not found', name=name)
|
||||
return
|
||||
raise GetVersionError('Debian package not found')
|
||||
|
||||
r = data['versions'][0]
|
||||
if strip_release:
|
||||
|
@ -29,4 +22,7 @@ async def get_version(name, conf, **kwargs):
|
|||
else:
|
||||
version = r['version']
|
||||
|
||||
return version
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://sources.debian.org/src/{data["package"]}/{r["version"]}/',
|
||||
)
|
16
nvchecker_source/gems.py
Normal file
16
nvchecker_source/gems.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
GEMS_URL = 'https://rubygems.org/api/v1/versions/%s.json'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
key = conf.get('gems', name)
|
||||
data = await cache.get_json(GEMS_URL % key)
|
||||
return [
|
||||
RichResult(
|
||||
version = item['number'],
|
||||
url = f'https://rubygems.org/gems/{key}/versions/{item["number"]}',
|
||||
) for item in data
|
||||
]
|
41
nvchecker_source/git.py
Normal file
41
nvchecker_source/git.py
Normal file
|
@ -0,0 +1,41 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 Felix Yan <felixonmars@archlinux.org>, et al.
|
||||
|
||||
from .cmd import run_cmd
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
async def get_version(
|
||||
name, conf, *, cache, keymanager=None
|
||||
):
|
||||
git = conf['git']
|
||||
|
||||
use_commit = conf.get('use_commit', False)
|
||||
if use_commit:
|
||||
ref = conf.get('branch')
|
||||
if ref is None:
|
||||
ref = 'HEAD'
|
||||
gitref = None
|
||||
else:
|
||||
ref = 'refs/heads/' + ref
|
||||
gitref = ref
|
||||
cmd = f"git ls-remote {git} {ref}"
|
||||
data = await cache.get(cmd, run_cmd)
|
||||
version = data.split(None, 1)[0]
|
||||
return RichResult(
|
||||
version = version,
|
||||
revision = version,
|
||||
gitref = gitref,
|
||||
)
|
||||
else:
|
||||
cmd = f"git ls-remote --tags --refs {git}"
|
||||
data = await cache.get(cmd, run_cmd)
|
||||
versions = []
|
||||
for line in data.splitlines():
|
||||
revision, version = line.split("\trefs/tags/", 1)
|
||||
versions.append(RichResult(
|
||||
version = version,
|
||||
revision = revision,
|
||||
gitref = f"refs/tags/{version}",
|
||||
))
|
||||
return versions
|
57
nvchecker_source/gitea.py
Normal file
57
nvchecker_source/gitea.py
Normal file
|
@ -0,0 +1,57 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import urllib.parse
|
||||
|
||||
GITEA_URL = 'https://%s/api/v1/repos/%s/commits'
|
||||
GITEA_MAX_TAG = 'https://%s/api/v1/repos/%s/tags'
|
||||
|
||||
from nvchecker.api import (
|
||||
VersionResult, RichResult, Entry,
|
||||
AsyncCache, KeyManager,
|
||||
)
|
||||
|
||||
async def get_version(
|
||||
name: str, conf: Entry, *,
|
||||
cache: AsyncCache, keymanager: KeyManager,
|
||||
) -> VersionResult:
|
||||
repo = urllib.parse.quote(conf['gitea'])
|
||||
br = conf.get('branch')
|
||||
host = conf.get('host', 'gitea.com')
|
||||
use_max_tag = conf.get('use_max_tag', False)
|
||||
|
||||
if use_max_tag:
|
||||
url = GITEA_MAX_TAG % (host, repo)
|
||||
else:
|
||||
url = GITEA_URL % (host, repo)
|
||||
if br:
|
||||
url += '?sha=' + br
|
||||
|
||||
# Load token from config
|
||||
token = conf.get('token')
|
||||
# Load token from keyman
|
||||
if token is None:
|
||||
token = keymanager.get_key(host.lower(), 'gitea_' + host.lower())
|
||||
|
||||
# Set private token if token exists.
|
||||
headers = {}
|
||||
if token:
|
||||
headers["Authorization"] = f'token {token}'
|
||||
|
||||
data = await cache.get_json(url, headers = headers)
|
||||
if use_max_tag:
|
||||
return [
|
||||
RichResult(
|
||||
version = tag['name'],
|
||||
revision = tag['id'],
|
||||
url = f'https://{host}/{conf["gitea"]}/releases/tag/{tag["name"]}',
|
||||
) for tag in data
|
||||
]
|
||||
else:
|
||||
return RichResult(
|
||||
version = data[0]['commit']['committer']['date'],
|
||||
revision = data[0]['sha'],
|
||||
url = data[0]['html_url'],
|
||||
)
|
287
nvchecker_source/github.py
Normal file
287
nvchecker_source/github.py
Normal file
|
@ -0,0 +1,287 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020, 2024 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import time
|
||||
from urllib.parse import urlencode
|
||||
from typing import List, Tuple, Union, Optional
|
||||
import asyncio
|
||||
|
||||
import structlog
|
||||
|
||||
from nvchecker.api import (
|
||||
VersionResult, Entry, AsyncCache, KeyManager,
|
||||
HTTPError, session, RichResult, GetVersionError,
|
||||
)
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
ALLOW_REQUEST = None
|
||||
RATE_LIMITED_ERROR = False
|
||||
|
||||
GITHUB_URL = 'https://api.%s/repos/%s/commits'
|
||||
GITHUB_LATEST_RELEASE = 'https://api.%s/repos/%s/releases/latest'
|
||||
# https://developer.github.com/v3/git/refs/#get-all-references
|
||||
GITHUB_MAX_TAG = 'https://api.%s/repos/%s/git/refs/tags'
|
||||
GITHUB_MAX_RELEASE = 'https://api.%s/repos/%s/releases'
|
||||
GITHUB_GRAPHQL_URL = 'https://api.%s/graphql'
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
global RATE_LIMITED_ERROR, ALLOW_REQUEST
|
||||
|
||||
if RATE_LIMITED_ERROR:
|
||||
raise RuntimeError('rate limited')
|
||||
|
||||
if ALLOW_REQUEST is None:
|
||||
ALLOW_REQUEST = asyncio.Event()
|
||||
ALLOW_REQUEST.set()
|
||||
|
||||
for _ in range(2): # retry once
|
||||
try:
|
||||
await ALLOW_REQUEST.wait()
|
||||
return await get_version_real(name, conf, **kwargs)
|
||||
except HTTPError as e:
|
||||
if e.code in [403, 429]:
|
||||
if n := check_ratelimit(e, name):
|
||||
ALLOW_REQUEST.clear()
|
||||
await asyncio.sleep(n+1)
|
||||
ALLOW_REQUEST.set()
|
||||
continue
|
||||
RATE_LIMITED_ERROR = True
|
||||
raise
|
||||
|
||||
QUERY_LATEST_TAG = '''
|
||||
{{
|
||||
repository(name: "{name}", owner: "{owner}") {{
|
||||
refs(refPrefix: "refs/tags/", first: 1,
|
||||
query: "{query}",
|
||||
orderBy: {{field: TAG_COMMIT_DATE, direction: DESC}}) {{
|
||||
edges {{
|
||||
node {{
|
||||
name
|
||||
target {{
|
||||
oid
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
'''
|
||||
|
||||
QUERY_LATEST_RELEASE_WITH_PRERELEASES = '''
|
||||
{{
|
||||
repository(name: "{name}", owner: "{owner}") {{
|
||||
releases(first: 1, orderBy: {{field: CREATED_AT, direction: DESC}}) {{
|
||||
edges {{
|
||||
node {{
|
||||
name
|
||||
url
|
||||
tag {{
|
||||
name
|
||||
}}
|
||||
tagCommit {{
|
||||
oid
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
'''
|
||||
|
||||
async def get_latest_tag(key: Tuple[str, str, str, str]) -> RichResult:
|
||||
host, repo, query, token = key
|
||||
owner, reponame = repo.split('/')
|
||||
headers = {
|
||||
'Authorization': f'bearer {token}',
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
q = QUERY_LATEST_TAG.format(
|
||||
owner = owner,
|
||||
name = reponame,
|
||||
query = query,
|
||||
)
|
||||
|
||||
res = await session.post(
|
||||
GITHUB_GRAPHQL_URL % host,
|
||||
headers = headers,
|
||||
json = {'query': q},
|
||||
)
|
||||
j = res.json()
|
||||
|
||||
refs = j['data']['repository']['refs']['edges']
|
||||
if not refs:
|
||||
raise GetVersionError('no tag found')
|
||||
|
||||
version = refs[0]['node']['name']
|
||||
revision = refs[0]['node']['target']['oid']
|
||||
return RichResult(
|
||||
version = version,
|
||||
gitref = f"refs/tags/{version}",
|
||||
revision = revision,
|
||||
url = f'https://github.com/{repo}/releases/tag/{version}',
|
||||
)
|
||||
|
||||
async def get_latest_release_with_prereleases(key: Tuple[str, str, str, str]) -> RichResult:
|
||||
host, repo, token, use_release_name = key
|
||||
owner, reponame = repo.split('/')
|
||||
headers = {
|
||||
'Authorization': f'bearer {token}',
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
q = QUERY_LATEST_RELEASE_WITH_PRERELEASES.format(
|
||||
owner = owner,
|
||||
name = reponame,
|
||||
)
|
||||
|
||||
res = await session.post(
|
||||
GITHUB_GRAPHQL_URL % host,
|
||||
headers = headers,
|
||||
json = {'query': q},
|
||||
)
|
||||
j = res.json()
|
||||
|
||||
refs = j['data']['repository']['releases']['edges']
|
||||
if not refs:
|
||||
raise GetVersionError('no release found')
|
||||
|
||||
tag_name = refs[0]['node']['tag']['name']
|
||||
if use_release_name:
|
||||
version = refs[0]['node']['name']
|
||||
else:
|
||||
version = tag_name
|
||||
|
||||
return RichResult(
|
||||
version = version,
|
||||
gitref = f"refs/tags/{tag_name}",
|
||||
revision = refs[0]['node']['tagCommit']['oid'],
|
||||
url = refs[0]['node']['url'],
|
||||
)
|
||||
|
||||
async def get_version_real(
|
||||
name: str, conf: Entry, *,
|
||||
cache: AsyncCache, keymanager: KeyManager,
|
||||
**kwargs,
|
||||
) -> VersionResult:
|
||||
repo = conf['github']
|
||||
host = conf.get('host', "github.com")
|
||||
|
||||
# Load token from config
|
||||
token = conf.get('token')
|
||||
# Load token from keyman
|
||||
if token is None:
|
||||
token = keymanager.get_key(host.lower(), 'github')
|
||||
|
||||
use_latest_tag = conf.get('use_latest_tag', False)
|
||||
if use_latest_tag:
|
||||
if not token:
|
||||
raise GetVersionError('token not given but it is required')
|
||||
|
||||
query = conf.get('query', '')
|
||||
return await cache.get((host, repo, query, token), get_latest_tag) # type: ignore
|
||||
|
||||
use_latest_release = conf.get('use_latest_release', False)
|
||||
include_prereleases = conf.get('include_prereleases', False)
|
||||
use_release_name = conf.get('use_release_name', False)
|
||||
if use_latest_release and include_prereleases:
|
||||
if not token:
|
||||
raise GetVersionError('token not given but it is required')
|
||||
|
||||
return await cache.get(
|
||||
(host, repo, token, use_release_name),
|
||||
get_latest_release_with_prereleases) # type: ignore
|
||||
|
||||
br = conf.get('branch')
|
||||
path = conf.get('path')
|
||||
use_max_tag = conf.get('use_max_tag', False)
|
||||
use_max_release = conf.get('use_max_release', False)
|
||||
if use_latest_release:
|
||||
url = GITHUB_LATEST_RELEASE % (host, repo)
|
||||
elif use_max_tag:
|
||||
url = GITHUB_MAX_TAG % (host, repo)
|
||||
elif use_max_release:
|
||||
url = GITHUB_MAX_RELEASE % (host, repo)
|
||||
else:
|
||||
url = GITHUB_URL % (host, repo)
|
||||
parameters = {}
|
||||
if br:
|
||||
parameters['sha'] = br
|
||||
if path:
|
||||
parameters['path'] = path
|
||||
url += '?' + urlencode(parameters)
|
||||
headers = {
|
||||
'Accept': 'application/vnd.github.quicksilver-preview+json',
|
||||
}
|
||||
if token:
|
||||
headers['Authorization'] = f'token {token}'
|
||||
|
||||
data = await cache.get_json(url, headers = headers)
|
||||
|
||||
if use_max_tag:
|
||||
tags: List[Union[str, RichResult]] = [
|
||||
RichResult(
|
||||
version = ref['ref'].split('/', 2)[-1],
|
||||
gitref = ref['ref'],
|
||||
revision = ref['object']['sha'],
|
||||
url = f'https://github.com/{repo}/releases/tag/{ref["ref"].split("/", 2)[-1]}',
|
||||
) for ref in data
|
||||
]
|
||||
if not tags:
|
||||
raise GetVersionError('No tag found in upstream repository.')
|
||||
return tags
|
||||
|
||||
if use_max_release:
|
||||
releases: List[Union[str, RichResult]] = [
|
||||
RichResult(
|
||||
version = ref['name'] if use_release_name else ref['tag_name'],
|
||||
gitref = f"refs/tags/{ref['tag_name']}",
|
||||
url = ref['html_url'],
|
||||
) for ref in data if include_prereleases or not ref['prerelease']
|
||||
]
|
||||
if not releases:
|
||||
raise GetVersionError('No release found in upstream repository.')
|
||||
return releases
|
||||
|
||||
if use_latest_release:
|
||||
if 'tag_name' not in data:
|
||||
raise GetVersionError('No release found in upstream repository.')
|
||||
|
||||
if use_release_name:
|
||||
version = data['name']
|
||||
else:
|
||||
version = data['tag_name']
|
||||
|
||||
return RichResult(
|
||||
version = version,
|
||||
gitref = f"refs/tags/{data['tag_name']}",
|
||||
url = data['html_url'],
|
||||
)
|
||||
|
||||
else:
|
||||
return RichResult(
|
||||
# YYYYMMDD.HHMMSS
|
||||
version = data[0]['commit']['committer']['date'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.'),
|
||||
revision = data[0]['sha'],
|
||||
url = data[0]['html_url'],
|
||||
)
|
||||
|
||||
def check_ratelimit(exc: HTTPError, name: str) -> Optional[int]:
|
||||
res = exc.response
|
||||
if not res:
|
||||
raise exc
|
||||
|
||||
if v := res.headers.get('retry-after'):
|
||||
n = int(v)
|
||||
logger.warning('retry-after', n=n)
|
||||
return n
|
||||
|
||||
# default -1 is used to re-raise the exception
|
||||
n = int(res.headers.get('X-RateLimit-Remaining', -1))
|
||||
if n == 0:
|
||||
reset = int(res.headers.get('X-RateLimit-Reset'))
|
||||
logger.error(f'rate limited, resetting at {time.ctime(reset)}. '
|
||||
'Or get an API token to increase the allowance if not yet',
|
||||
name = name,
|
||||
reset = reset)
|
||||
return None
|
||||
|
||||
raise exc
|
80
nvchecker_source/gitlab.py
Normal file
80
nvchecker_source/gitlab.py
Normal file
|
@ -0,0 +1,80 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import urllib.parse
|
||||
|
||||
import structlog
|
||||
|
||||
from nvchecker.api import (
|
||||
VersionResult, RichResult, Entry,
|
||||
AsyncCache, KeyManager, TemporaryError,
|
||||
)
|
||||
|
||||
GITLAB_URL = 'https://%s/api/v4/projects/%s/repository/commits'
|
||||
GITLAB_MAX_TAG = 'https://%s/api/v4/projects/%s/repository/tags'
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
try:
|
||||
return await get_version_real(name, conf, **kwargs)
|
||||
except TemporaryError as e:
|
||||
check_ratelimit(e, name)
|
||||
|
||||
async def get_version_real(
|
||||
name: str, conf: Entry, *,
|
||||
cache: AsyncCache, keymanager: KeyManager,
|
||||
**kwargs,
|
||||
) -> VersionResult:
|
||||
repo = urllib.parse.quote_plus(conf['gitlab'])
|
||||
br = conf.get('branch')
|
||||
host = conf.get('host', "gitlab.com")
|
||||
use_max_tag = conf.get('use_max_tag', False)
|
||||
|
||||
if use_max_tag:
|
||||
url = GITLAB_MAX_TAG % (host, repo)
|
||||
else:
|
||||
url = GITLAB_URL % (host, repo)
|
||||
if br:
|
||||
url += '?ref_name=%s' % br
|
||||
|
||||
# Load token from config
|
||||
token = conf.get('token')
|
||||
# Load token from keyman
|
||||
if token is None:
|
||||
token = keymanager.get_key(host.lower(), 'gitlab_' + host.lower())
|
||||
|
||||
# Set private token if token exists.
|
||||
headers = {}
|
||||
if token:
|
||||
headers["PRIVATE-TOKEN"] = token
|
||||
|
||||
data = await cache.get_json(url, headers = headers)
|
||||
if use_max_tag:
|
||||
return [
|
||||
RichResult(
|
||||
version = tag['name'],
|
||||
revision = tag['commit']['id'],
|
||||
url = f'https://{host}/{conf["gitlab"]}/-/tags/{tag["name"]}',
|
||||
) for tag in data
|
||||
]
|
||||
else:
|
||||
return RichResult(
|
||||
version = data[0]['created_at'].split('T', 1)[0].replace('-', ''),
|
||||
revision = data[0]['id'],
|
||||
url = data[0]['web_url'],
|
||||
)
|
||||
|
||||
def check_ratelimit(exc, name):
|
||||
res = exc.response
|
||||
if not res:
|
||||
raise
|
||||
|
||||
# default -1 is used to re-raise the exception
|
||||
n = int(res.headers.get('RateLimit-Remaining', -1))
|
||||
if n == 0:
|
||||
logger.error('gitlab rate limited. Wait some time '
|
||||
'or get an API token to increase the allowance if not yet',
|
||||
name = name)
|
||||
else:
|
||||
raise
|
40
nvchecker_source/go.py
Normal file
40
nvchecker_source/go.py
Normal file
|
@ -0,0 +1,40 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2024 bgme <i@bgme.me>.
|
||||
|
||||
from lxml import html
|
||||
|
||||
from nvchecker.api import (
|
||||
RichResult, Entry, AsyncCache, KeyManager,
|
||||
session, GetVersionError,
|
||||
)
|
||||
|
||||
GO_PKG_URL = 'https://pkg.go.dev/{pkg}?tab=versions'
|
||||
GO_PKG_VERSION_URL = 'https://pkg.go.dev/{pkg}@{version}'
|
||||
|
||||
|
||||
async def get_version(
|
||||
name: str, conf: Entry, *,
|
||||
cache: AsyncCache, keymanager: KeyManager,
|
||||
**kwargs,
|
||||
) -> RichResult:
|
||||
key = tuple(sorted(conf.items()))
|
||||
return await cache.get(key, get_version_impl)
|
||||
|
||||
|
||||
async def get_version_impl(info) -> RichResult:
|
||||
conf = dict(info)
|
||||
pkg_name = conf.get('go')
|
||||
|
||||
url = GO_PKG_URL.format(pkg=pkg_name)
|
||||
res = await session.get(url)
|
||||
doc = html.fromstring(res.body.decode())
|
||||
|
||||
elements = doc.xpath("//div[@class='Version-tag']/a/text()")
|
||||
try:
|
||||
version = elements[0] # type: ignore
|
||||
return RichResult(
|
||||
version = version, # type: ignore
|
||||
url = GO_PKG_VERSION_URL.format(pkg=pkg_name, version=version),
|
||||
)
|
||||
except IndexError:
|
||||
raise GetVersionError("parse error", pkg_name=pkg_name)
|
15
nvchecker_source/hackage.py
Normal file
15
nvchecker_source/hackage.py
Normal file
|
@ -0,0 +1,15 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
HACKAGE_URL = 'https://hackage.haskell.org/package/%s/preferred.json'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
key = conf.get('hackage', name)
|
||||
data = await cache.get_json(HACKAGE_URL % key)
|
||||
version = data['normal-version'][0]
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://hackage.haskell.org/package/{key}-{version}',
|
||||
)
|
41
nvchecker_source/htmlparser.py
Normal file
41
nvchecker_source/htmlparser.py
Normal file
|
@ -0,0 +1,41 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 Ypsilik <tt2laurent.maud@gmail.com>, et al.
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from lxml import html, etree
|
||||
|
||||
from nvchecker.api import session, GetVersionError
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
key = tuple(sorted(conf.items()))
|
||||
return await cache.get(key, get_version_impl)
|
||||
|
||||
async def get_version_impl(info):
|
||||
conf = dict(info)
|
||||
|
||||
encoding = conf.get('encoding')
|
||||
parser = html.HTMLParser(encoding=encoding)
|
||||
data = conf.get('post_data')
|
||||
if data is None:
|
||||
res = await session.get(conf['url'])
|
||||
else:
|
||||
res = await session.post(conf['url'], body = data, headers = {
|
||||
'Content-Type': conf.get('post_data_type', 'application/x-www-form-urlencoded')
|
||||
})
|
||||
doc = html.fromstring(res.body, base_url=conf['url'], parser=parser)
|
||||
|
||||
try:
|
||||
els = doc.xpath(conf.get('xpath'))
|
||||
except ValueError:
|
||||
if not conf.get('missing_ok', False):
|
||||
raise GetVersionError('version string not found.')
|
||||
except etree.XPathEvalError as e:
|
||||
raise GetVersionError('bad xpath', exc_info=e)
|
||||
|
||||
version = [
|
||||
str(el)
|
||||
if isinstance(el, str)
|
||||
else str(el.text_content())
|
||||
for el in els
|
||||
]
|
||||
return version
|
42
nvchecker_source/httpheader.py
Normal file
42
nvchecker_source/httpheader.py
Normal file
|
@ -0,0 +1,42 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2021 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import re
|
||||
|
||||
from nvchecker.api import session, GetVersionError
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
key = tuple(sorted(conf.items()))
|
||||
return await cache.get(key, get_version_impl)
|
||||
|
||||
async def get_version_impl(info):
|
||||
conf = dict(info)
|
||||
url = conf['url']
|
||||
header = conf.get('header', 'Location')
|
||||
follow_redirects = conf.get('follow_redirects', False)
|
||||
method = conf.get('method', 'HEAD')
|
||||
|
||||
try:
|
||||
regex = re.compile(conf['regex'])
|
||||
except re.error as e:
|
||||
raise GetVersionError('bad regex', exc_info=e)
|
||||
|
||||
res = await session.request(
|
||||
url,
|
||||
method = method,
|
||||
follow_redirects = follow_redirects,
|
||||
)
|
||||
|
||||
header_value = res.headers.get(header)
|
||||
if not header_value:
|
||||
raise GetVersionError(
|
||||
'header not found or is empty',
|
||||
header = header,
|
||||
value = header_value,
|
||||
)
|
||||
|
||||
try:
|
||||
version = regex.findall(header_value)
|
||||
except ValueError:
|
||||
raise GetVersionError('version string not found.')
|
||||
return version
|
42
nvchecker_source/jq.py
Normal file
42
nvchecker_source/jq.py
Normal file
|
@ -0,0 +1,42 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2024 Rocket Aaron <i@rocka.me>, et al.
|
||||
|
||||
import json
|
||||
import jq
|
||||
|
||||
from nvchecker.api import session, GetVersionError
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
key = tuple(sorted(conf.items()))
|
||||
return await cache.get(key, get_version_impl)
|
||||
|
||||
async def get_version_impl(info):
|
||||
conf = dict(info)
|
||||
|
||||
try:
|
||||
program = jq.compile(conf.get('filter', '.'))
|
||||
except ValueError as e:
|
||||
raise GetVersionError('bad jq filter', exc_info=e)
|
||||
|
||||
data = conf.get('post_data')
|
||||
if data is None:
|
||||
res = await session.get(conf['url'])
|
||||
else:
|
||||
res = await session.post(conf['url'], body = data, headers = {
|
||||
'Content-Type': conf.get('post_data_type', 'application/json')
|
||||
})
|
||||
|
||||
try:
|
||||
obj = json.loads(res.body)
|
||||
except json.decoder.JSONDecodeError as e:
|
||||
raise GetVersionError('bad json string', exc_info=e)
|
||||
|
||||
try:
|
||||
version = program.input(obj).all()
|
||||
if version == [None] and not conf.get('missing_ok', False):
|
||||
raise GetVersionError('version string not found.')
|
||||
version = [str(v) for v in version]
|
||||
except ValueError as e:
|
||||
raise GetVersionError('failed to filter json', exc_info=e)
|
||||
|
||||
return version
|
20
nvchecker_source/launchpad.py
Normal file
20
nvchecker_source/launchpad.py
Normal file
|
@ -0,0 +1,20 @@
|
|||
# MIT Licensed
|
||||
# Copyright (c) 2024 Bert Peters <bertptrs@archlinux.org>, et al.
|
||||
from __future__ import annotations
|
||||
from nvchecker.api import AsyncCache, Entry, RichResult
|
||||
|
||||
PROJECT_INFO_URL = "https://api.launchpad.net/1.0/{launchpad}"
|
||||
|
||||
async def get_version(name: str, conf: Entry, *, cache: AsyncCache, **kwargs):
|
||||
launchpad = conf["launchpad"]
|
||||
|
||||
project_data = await cache.get_json(PROJECT_INFO_URL.format(launchpad=launchpad))
|
||||
data = await cache.get_json(project_data['releases_collection_link'])
|
||||
|
||||
return [
|
||||
RichResult(version=entry["version"], url=entry["web_link"])
|
||||
for entry in data["entries"]
|
||||
]
|
||||
|
||||
|
||||
|
5
nvchecker_source/manual.py
Normal file
5
nvchecker_source/manual.py
Normal file
|
@ -0,0 +1,5 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
return str(conf.get('manual')).strip() or None
|
10
nvchecker_source/mercurial.py
Normal file
10
nvchecker_source/mercurial.py
Normal file
|
@ -0,0 +1,10 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 Felix Yan <felixonmars@archlinux.org>, et al.
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
url = conf['mercurial'] + '/json-tags'
|
||||
|
||||
data = await cache.get_json(url)
|
||||
|
||||
version = [tag['tag'] for tag in data['tags']]
|
||||
return version
|
16
nvchecker_source/none.py
Normal file
16
nvchecker_source/none.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from nvchecker.api import (
|
||||
BaseWorker, GetVersionError, RawResult,
|
||||
)
|
||||
|
||||
class Worker(BaseWorker):
|
||||
async def run(self) -> None:
|
||||
exc = GetVersionError('no source specified')
|
||||
async with self.task_sem:
|
||||
for name, conf in self.tasks:
|
||||
await self.result_q.put(
|
||||
RawResult(name, exc, conf))
|
38
nvchecker_source/npm.py
Normal file
38
nvchecker_source/npm.py
Normal file
|
@ -0,0 +1,38 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import json
|
||||
import re
|
||||
from nvchecker.api import session, RichResult
|
||||
|
||||
NPM_URL = 'https://registry.npmjs.org/%s'
|
||||
|
||||
def configure(config):
|
||||
global NPM_URL
|
||||
url = config.get('registry')
|
||||
if url:
|
||||
NPM_URL = f'{url.rstrip("/")}/%s'
|
||||
|
||||
async def get_first_1k(url):
|
||||
headers = {
|
||||
"Accept": "application/vnd.npm.install-v1+json",
|
||||
"Range": "bytes=0-1023",
|
||||
}
|
||||
res = await session.get(url, headers=headers)
|
||||
return res.body
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
key = conf.get('npm', name)
|
||||
data = await cache.get(NPM_URL % key, get_first_1k)
|
||||
|
||||
dist_tags = json.loads(re.search(b'"dist-tags":({.*?})', data).group(1))
|
||||
version = dist_tags['latest']
|
||||
|
||||
# There is no standardised URL scheme, so we only return an URL for the default registry
|
||||
if NPM_URL.startswith('https://registry.npmjs.org/'):
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://www.npmjs.com/package/{key}/v/{version}',
|
||||
)
|
||||
else:
|
||||
return version
|
71
nvchecker_source/opam.py
Normal file
71
nvchecker_source/opam.py
Normal file
|
@ -0,0 +1,71 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2024 Daniel Peukert <daniel@peukert.cc>, et al.
|
||||
|
||||
import asyncio
|
||||
from io import BytesIO
|
||||
import tarfile
|
||||
from typing import List
|
||||
|
||||
from nvchecker.api import (
|
||||
session, VersionResult,
|
||||
Entry, AsyncCache,
|
||||
KeyManager, RichResult
|
||||
)
|
||||
|
||||
OPAM_REPO_INDEX_URL = "%s/index.tar.gz"
|
||||
OPAM_VERSION_PATH_PREFIX = "packages/%s/%s."
|
||||
OPAM_VERSION_PATH_SUFFIX = "/opam"
|
||||
|
||||
OPAM_DEFAULT_REPO = 'https://opam.ocaml.org'
|
||||
OPAM_DEFAULT_REPO_VERSION_URL = "%s/packages/%s/%s.%s"
|
||||
|
||||
def _decompress_and_list_files(data: bytes) -> List[str]:
|
||||
# Convert the bytes to a file object and get a list of files
|
||||
archive = tarfile.open(mode='r', fileobj=BytesIO(data))
|
||||
return archive.getnames()
|
||||
|
||||
async def get_files(url: str) -> List[str]:
|
||||
# Download the file and get its contents
|
||||
res = await session.get(url)
|
||||
data = res.body
|
||||
|
||||
# Get the file list of the archive
|
||||
loop = asyncio.get_running_loop()
|
||||
return await loop.run_in_executor(None, _decompress_and_list_files, data)
|
||||
|
||||
async def get_package_versions(files: List[str], pkg: str) -> List[str]:
|
||||
# Prepare the filename prefix based on the package name
|
||||
prefix = OPAM_VERSION_PATH_PREFIX % (pkg , pkg)
|
||||
|
||||
# Only keep opam files that are relevant to the package we're working with
|
||||
filtered_files = []
|
||||
|
||||
for filename in files:
|
||||
if filename.startswith(prefix) and filename.endswith(OPAM_VERSION_PATH_SUFFIX):
|
||||
filtered_files.append(filename[len(prefix):-1*len(OPAM_VERSION_PATH_SUFFIX)])
|
||||
|
||||
return filtered_files
|
||||
|
||||
async def get_version(
|
||||
name: str, conf: Entry, *,
|
||||
cache: AsyncCache, keymanager: KeyManager,
|
||||
**kwargs,
|
||||
):
|
||||
pkg = conf.get('pkg', name)
|
||||
repo = conf.get('repo', OPAM_DEFAULT_REPO).rstrip('/')
|
||||
|
||||
# Get the list of files in the repo index (see https://opam.ocaml.org/doc/Manual.html#Repositories for repo structure)
|
||||
files = await cache.get(OPAM_REPO_INDEX_URL % repo, get_files) # type: ignore
|
||||
|
||||
# Parse the version strings from the file names
|
||||
raw_versions = await get_package_versions(files, pkg)
|
||||
|
||||
# Convert the version strings into RichResults
|
||||
versions = []
|
||||
for version in raw_versions:
|
||||
versions.append(RichResult(
|
||||
version = version,
|
||||
# There is no standardised URL scheme, so we only return an URL for the default registry
|
||||
url = OPAM_DEFAULT_REPO_VERSION_URL % (repo, pkg, pkg, version) if repo == OPAM_DEFAULT_REPO else None,
|
||||
))
|
||||
return versions
|
18
nvchecker_source/openvsx.py
Normal file
18
nvchecker_source/openvsx.py
Normal file
|
@ -0,0 +1,18 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2021 Th3Whit3Wolf <the.white.wolf.is.1337@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
API_URL = 'https://open-vsx.org/api/%s/%s'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
name = conf.get('openvsx') or name
|
||||
splitName = name.split('.')
|
||||
publisher = splitName[0]
|
||||
extension = splitName[1]
|
||||
data = await cache.get_json(API_URL % (publisher, extension))
|
||||
version = data['version']
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://open-vsx.org/extension/{publisher}/{extension}/{version}',
|
||||
)
|
23
nvchecker_source/packagist.py
Normal file
23
nvchecker_source/packagist.py
Normal file
|
@ -0,0 +1,23 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
PACKAGIST_URL = 'https://packagist.org/packages/%s.json'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
key = conf.get('packagist', name)
|
||||
data = await cache.get_json(PACKAGIST_URL % key)
|
||||
|
||||
versions = {
|
||||
version: details
|
||||
for version, details in data["package"]['versions'].items()
|
||||
if version != "dev-master"
|
||||
}
|
||||
|
||||
if len(versions):
|
||||
version = max(versions, key=lambda version: versions[version]["time"])
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://packagist.org/packages/{data["package"]["name"]}#{version}',
|
||||
)
|
16
nvchecker_source/pacman.py
Normal file
16
nvchecker_source/pacman.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from nvchecker_source import cmd
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
referree = conf.get('pacman') or name
|
||||
c = "LANG=C pacman -Si %s | grep -F Version | awk '{print $3}' | head -n 1" % referree
|
||||
conf['cmd'] = c
|
||||
strip_release = conf.get('strip_release', False)
|
||||
|
||||
version = await cmd.get_version(name, conf, **kwargs)
|
||||
|
||||
if strip_release and '-' in version:
|
||||
version = version.rsplit('-', 1)[0]
|
||||
return version
|
32
nvchecker_source/pagure.py
Normal file
32
nvchecker_source/pagure.py
Normal file
|
@ -0,0 +1,32 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 Felix Yan <felixonmars@archlinux.org>, et al.
|
||||
|
||||
import urllib.parse
|
||||
|
||||
import structlog
|
||||
|
||||
from nvchecker.api import (
|
||||
VersionResult, RichResult, Entry, AsyncCache, KeyManager,
|
||||
)
|
||||
|
||||
PAGURE_URL = 'https://%s/api/0/%s/git/tags?with_commits=true'
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
async def get_version(
|
||||
name: str, conf: Entry, *,
|
||||
cache: AsyncCache, keymanager: KeyManager,
|
||||
**kwargs,
|
||||
) -> VersionResult:
|
||||
repo = conf['pagure']
|
||||
host = conf.get('host', "pagure.io")
|
||||
|
||||
url = PAGURE_URL % (host, repo)
|
||||
|
||||
data = await cache.get_json(url)
|
||||
return [
|
||||
RichResult(
|
||||
version = version,
|
||||
url = f'https://{host}/{repo}/tree/{version_hash}',
|
||||
) for version, version_hash in data["tags"].items()
|
||||
]
|
43
nvchecker_source/pypi.py
Normal file
43
nvchecker_source/pypi.py
Normal file
|
@ -0,0 +1,43 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2021,2023-2024 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import structlog
|
||||
from packaging.version import Version, InvalidVersion
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
ret = []
|
||||
|
||||
package = conf.get('pypi') or name
|
||||
use_pre_release = conf.get('use_pre_release', False)
|
||||
|
||||
url = 'https://pypi.org/pypi/{}/json'.format(package)
|
||||
|
||||
data = await cache.get_json(url)
|
||||
|
||||
for version in data['releases'].keys():
|
||||
# Skip versions that are marked as yanked.
|
||||
if (vers := data['releases'][version]) and vers[0]['yanked']:
|
||||
continue
|
||||
|
||||
try:
|
||||
parsed_version = Version(version)
|
||||
except InvalidVersion:
|
||||
if data['releases'][version]:
|
||||
# emit a warning if there is something under the invalid version
|
||||
# sympy has an empty "0.5.13-hg" version
|
||||
logger.warning('ignoring invalid version', version=version)
|
||||
continue
|
||||
|
||||
if not use_pre_release and parsed_version.is_prerelease:
|
||||
continue
|
||||
|
||||
ret.append(RichResult(
|
||||
version = version,
|
||||
url = f'https://pypi.org/project/{package}/{version}/',
|
||||
))
|
||||
|
||||
return ret
|
39
nvchecker_source/regex.py
Normal file
39
nvchecker_source/regex.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import re
|
||||
|
||||
from nvchecker.api import session, GetVersionError
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
try:
|
||||
regex = re.compile(conf['regex'])
|
||||
except re.error as e:
|
||||
raise GetVersionError('bad regex', exc_info=e)
|
||||
if regex.groups > 1:
|
||||
raise GetVersionError('multi-group regex')
|
||||
|
||||
key = (
|
||||
conf['url'],
|
||||
conf.get('encoding', 'latin1'),
|
||||
conf.get('post_data'),
|
||||
conf.get('post_data_type', 'application/x-www-form-urlencoded'),
|
||||
)
|
||||
body = await cache.get(key, get_url)
|
||||
|
||||
versions = regex.findall(body)
|
||||
if not versions and not conf.get('missing_ok', False):
|
||||
raise GetVersionError('version string not found.')
|
||||
return versions
|
||||
|
||||
async def get_url(info):
|
||||
url, encoding, post_data, post_data_type = info
|
||||
|
||||
if post_data is None:
|
||||
res = await session.get(url)
|
||||
else:
|
||||
res = await session.post(url, body = post_data, headers = {
|
||||
'Content-Type': post_data_type,
|
||||
})
|
||||
body = res.body.decode(encoding)
|
||||
return body
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue