mirror of
https://github.com/lilydjwg/nvchecker.git
synced 2025-03-10 06:14:02 +00:00
Compare commits
1007 commits
Author | SHA1 | Date | |
---|---|---|---|
|
fc8d90f46b | ||
|
e1882a2778 | ||
|
b83cbd2ead | ||
|
d4be5189fe | ||
|
cc9001ad88 | ||
|
fca89f7830 | ||
|
92a424f946 | ||
|
d3bf5b2db5 | ||
|
bd72ea04d2 | ||
|
50d5342586 | ||
|
1e3ed1e4b9 | ||
|
72c0730725 | ||
|
6759d2f206 | ||
|
17e351f825 | ||
|
d58638733e | ||
|
287282eb2c | ||
|
fa9ca38690 | ||
|
5c7918bf7a | ||
|
c03bec7452 | ||
|
7cacd9edaf | ||
|
af21f93bd1 | ||
|
ca011221cf | ||
|
745f0decdb | ||
|
6f5870787a | ||
|
4364759b29 | ||
|
dbf6c4601f | ||
|
4d5e29f26b | ||
|
ad892b713e | ||
|
5803237d39 | ||
|
71a0002624 | ||
|
2722ccc7fe | ||
|
228139ddd4 | ||
|
372fce4445 | ||
|
babb3aa74e | ||
|
d44a50c273 | ||
|
c0d6568434 | ||
|
2a64f7ab8f | ||
|
af1a855fd5 | ||
|
eeddd56156 | ||
|
29b0f63103 | ||
|
3a87f920a5 | ||
|
af7acde251 | ||
|
c7f571ae79 | ||
|
256bb9dd3b | ||
|
28f1ab5144 | ||
|
e1a6517169 | ||
|
afad2f08f2 | ||
|
5cead67640 | ||
|
728dcca1bd | ||
|
89b4cd90ba | ||
|
4eb70a0fbe | ||
|
8d3b6adaaa | ||
|
5b561456ae | ||
|
d5cc276000 | ||
|
3abe5ad394 | ||
|
e4cff0b507 | ||
|
d4e27c22ac | ||
|
7848f0907d | ||
|
a1ab77f92a | ||
|
5a3ca69c1c | ||
|
5677c9595e | ||
|
61ca3c95d0 | ||
|
1d55664b31 | ||
|
c66ca7f2bc | ||
|
2800f392d9 | ||
|
3cf403fc51 | ||
|
6af59aa2ae | ||
|
8397cbbce0 | ||
|
d49d07437b | ||
|
5db6a89fc3 | ||
|
9f6706e0e7 | ||
|
2a63ddef7a | ||
|
fdac4af358 | ||
|
0f441b0bf2 | ||
|
0890c7e966 | ||
|
7e2583be56 | ||
|
d13cbeea38 | ||
|
5140fa2542 | ||
|
c367b49703 | ||
|
62fdca8fbf | ||
|
f1ff604b4c | ||
|
61b3705f35 | ||
|
bd2b2f9494 | ||
|
0355af5e4c | ||
|
67ccc10f17 | ||
|
2f6f803c1a | ||
|
be8967b1d2 | ||
|
7960541a34 | ||
|
c306b44711 | ||
|
47ce6fc2e4 | ||
|
511922e6fb | ||
|
beceadddee | ||
|
de1a3c6fc2 | ||
|
f29bdee6a3 | ||
|
9dd7fb1736 | ||
|
03819388e8 | ||
|
471b66ff4c | ||
|
4ec12d30fd | ||
|
1005bd5f01 | ||
|
4ad3bdb95d | ||
|
35a19f30ae | ||
|
5227df6f5b | ||
|
e19f033f15 | ||
|
0e0eb2c363 | ||
|
a7e6512e70 | ||
|
c53952c75c | ||
|
9784e64307 | ||
|
5d908b34a6 | ||
|
1d19082673 | ||
|
370711c878 | ||
|
03c2e25bbd | ||
|
3691fa9a51 | ||
|
f8a73a5a92 | ||
|
8c82c3c289 | ||
|
e3e8415251 | ||
|
6b73d8cd87 | ||
|
60d88ac5d2 | ||
|
c9017d7e07 | ||
|
e5751d2894 | ||
|
1e5de6cbcb | ||
|
c944cbcac3 | ||
|
28c3c7076b | ||
|
07c3704641 | ||
|
fac30d46d9 | ||
|
d35fb3b5bc | ||
|
6c63d7991f | ||
|
56869a45bc | ||
|
4ca61ba11a | ||
|
918457afed | ||
|
eaac776a90 | ||
|
4833135f87 | ||
|
0ba8cd41de | ||
|
c863d24695 | ||
|
6da6145350 | ||
|
9221a476c5 | ||
|
c401d239b2 | ||
|
55b3f671c8 | ||
|
592b4a4f6b | ||
|
6bf34873d3 | ||
|
5a6fee2817 | ||
|
a04d6b0fc6 | ||
|
5dcb3bc36a | ||
|
d00203d103 | ||
|
bdd025d995 | ||
|
cc4a0cd301 | ||
|
0fd35f4458 | ||
|
3563d71fb2 | ||
|
4f79058cbd | ||
|
b8c0e8a6b3 | ||
|
f3da72403e | ||
|
efc99cb791 | ||
|
2683f47e04 | ||
|
0a90d195d1 | ||
|
83272dc04a | ||
|
5052d82550 | ||
|
587eeafd65 | ||
|
386d05f8e3 | ||
|
07cddd9bd6 | ||
|
598bb94135 | ||
|
ea542d1fa1 | ||
|
fe1342e9fb | ||
|
5a972cd0d0 | ||
|
4e7dce4e93 | ||
|
d9888cc49d | ||
|
b54c5a0cf7 | ||
|
82dc93ceff | ||
|
e708d19d35 | ||
|
d3603fcee8 | ||
|
ac57d8fd15 | ||
|
62ab3b05ef | ||
|
8157e08b59 | ||
|
af77af34f3 | ||
|
62a3f336b8 | ||
|
e9ecbd514b | ||
|
34e87db8f9 | ||
|
8cd75fd101 | ||
|
fed4fc52ff | ||
|
1be02fc29c | ||
|
4a0caf3213 | ||
|
166dfb11a4 | ||
|
80343945a1 | ||
|
1bbbdca638 | ||
|
414c20a60f | ||
|
931b3fdf3c | ||
|
577a42ef89 | ||
|
addc7d0b52 | ||
|
e71dd309e7 | ||
|
8366e519d6 | ||
|
2782205efc | ||
|
2107e6ef6f | ||
|
4bace14110 | ||
|
fad7cf631d | ||
|
add6951eae | ||
|
8ca78f7445 | ||
|
324f2f5f59 | ||
|
115963ab93 | ||
|
2fc6be9d4e | ||
|
3d3d8c6136 | ||
|
51ffbbdf61 | ||
|
4c4479d4ea | ||
|
a1d2a1325c | ||
|
dada15505c | ||
|
706952609e | ||
|
7b2a464821 | ||
|
13cca483e1 | ||
|
9ee53041b6 | ||
|
8b8f2d1d8a | ||
|
268bd9125e | ||
|
2325d2ac74 | ||
|
2de30c0acd | ||
|
914c6414e8 | ||
|
8b32c26d1e | ||
|
e683476fb2 | ||
|
88cbdd925c | ||
|
ffe84cae08 | ||
|
d706961669 | ||
|
f2aa4cad20 | ||
|
48f82e819b | ||
|
257b0f63fe | ||
|
5605f62269 | ||
|
69a5d82d25 | ||
|
2e042d7576 | ||
|
2ea44d3694 | ||
|
1235d77e72 | ||
|
c15e9b7576 | ||
|
2598b0fd19 | ||
|
9005c96d10 | ||
|
c6ed37ada1 | ||
|
a62866a2d3 | ||
|
15020dfcd6 | ||
|
8b929322cb | ||
|
f4e9ab80b9 | ||
|
ff1c980456 | ||
|
e03023bd6c | ||
|
7739f5c8c0 | ||
|
3dff0466d1 | ||
|
d8c26fa666 | ||
|
ea01b6d9ee | ||
|
b61fbe5a37 | ||
|
6d8f00036d | ||
|
9ea6f37712 | ||
|
3e88d74043 | ||
|
da1d204307 | ||
|
222e3e9b4c | ||
|
acb56d7827 | ||
|
4bf2755b0e | ||
|
a8228bb594 | ||
|
4f06ce7862 | ||
|
31492c4eb0 | ||
|
d5920efa54 | ||
|
ac1aafc9f1 | ||
|
dd87c0e80c | ||
|
b6eef8a54a | ||
|
2f61336ce3 | ||
|
bc2c2edfde | ||
|
1fb3c970cf | ||
|
112c916a6d | ||
|
83286263d2 | ||
|
5e60a1dc86 | ||
|
8d4982d440 | ||
|
91cbed1f8e | ||
|
b8b6e47d59 | ||
|
f3be1c585f | ||
|
f8be3e7418 | ||
|
e93f91af57 | ||
|
d24f64a4f0 | ||
|
ceb573f337 | ||
|
3ef83b0a95 | ||
|
750999f397 | ||
|
c43d4e900f | ||
|
c65a5343ed | ||
|
4d6e87a975 | ||
|
9acae25c0f | ||
|
f443fa8e96 | ||
|
b8afc43430 | ||
|
20183a83e5 | ||
|
44b9f1856f | ||
|
02515fc82d | ||
|
b70de0a628 | ||
|
4643652eaf | ||
|
ad24b841cf | ||
|
494c4ddf67 | ||
|
11aea624e6 | ||
|
ddd99bd59f | ||
|
9a42ed9503 | ||
|
606b3f65fb | ||
|
151b03801f | ||
|
25d7a7498d | ||
|
ad2dc19415 | ||
|
46d724b465 | ||
|
f8dd1a4212 | ||
|
0f44759b35 | ||
|
822eb76a1c | ||
|
92e422c726 | ||
|
6fd3ba95ba | ||
|
4c4b770c27 | ||
|
ffaca8c949 | ||
|
3eee6480ab | ||
|
9deb55806d | ||
|
4033c0b9ba | ||
|
3c2abb01f0 | ||
|
ae506ba9cf | ||
|
d83d8d5367 | ||
|
943847da74 | ||
|
68fcfe3924 | ||
|
6f7633a93c | ||
|
1064f3d948 | ||
|
61bc0c5562 | ||
|
8db5c6a938 | ||
|
c65513b37d | ||
|
9d2d47ed15 | ||
|
134a6885f1 | ||
|
d7c553ae94 | ||
|
7366d82bfc | ||
|
1b7736a549 | ||
|
46da41147f | ||
|
f8f261c476 | ||
|
968af3df0b | ||
|
bf8a4c9298 | ||
|
bace59de78 | ||
|
e6e8573c33 | ||
|
de0a5fe02e | ||
|
27269534d2 | ||
|
0114a411f2 | ||
|
d2a5a1bf79 | ||
|
d2130b64c6 | ||
|
29f2bad400 | ||
|
305c329e02 | ||
|
9c2d2a39d7 | ||
|
2e069fe1c7 | ||
|
8b9bd1ca1c | ||
|
91daf5ad4d | ||
|
e09750b7a2 | ||
|
491a71add7 | ||
|
a0c32ce5f0 | ||
|
f00c6d163f | ||
|
d053fc3ba4 | ||
|
e6c0ce38e5 | ||
|
e5a28b0fb1 | ||
|
372454136f | ||
|
5622d6d0c1 | ||
|
630f41c19b | ||
|
64994e4a74 | ||
|
b4689369c8 | ||
|
024849c242 | ||
|
b83f92cc67 | ||
|
a8073e2ad5 | ||
|
cddf866ef6 | ||
|
c8b0e673ca | ||
|
6815b035a1 | ||
|
30055a9dcb | ||
|
ab66fc0468 | ||
|
8d1a079703 | ||
|
ea1200126b | ||
|
d4b07d67b8 | ||
|
f8566f3f91 | ||
|
8a988ba3a3 | ||
|
441f2a9fbd | ||
|
75e72c11b3 | ||
|
8d83d7ac66 | ||
|
f957acc756 | ||
|
42dfd483c7 | ||
|
e080e32c89 | ||
|
926e75c075 | ||
|
71612ad9b8 | ||
|
108016aa45 | ||
|
55c4cd9868 | ||
|
7d4b2daa65 | ||
|
ccae7d8fcf | ||
|
c251af817a | ||
|
b3bc7ec049 | ||
|
e9190df7c2 | ||
|
494c67beb0 | ||
|
54596bde3f | ||
|
854399a30a | ||
|
071b57c580 | ||
|
0eca4a9b43 | ||
|
4cb63cbe65 | ||
|
4f3a900505 | ||
|
254a229401 | ||
|
cd1cbfde30 | ||
|
121b94a7eb | ||
|
3ffb34257a | ||
|
7213d84056 | ||
|
d7624defd7 | ||
|
c9b689d67b | ||
|
e6ecb753d6 | ||
|
050f440465 | ||
|
a78c06163c | ||
|
71a63f90f6 | ||
|
07951bd745 | ||
|
23bcbf8906 | ||
|
c03e33db56 | ||
|
82d5ebb630 | ||
|
ac73106e43 | ||
|
185a7e88a9 | ||
|
f4983eaea3 | ||
|
450fa7de10 | ||
|
2502b0babc | ||
|
ef808b7517 | ||
|
d380e31881 | ||
|
a018f71186 | ||
|
a2b03867a9 | ||
|
da5b489173 | ||
|
b620ed4e90 | ||
|
819a8461a4 | ||
|
ae563d007f | ||
|
e744a27572 | ||
|
f28cc7ae61 | ||
|
f8444926ba | ||
|
61e53b6698 | ||
|
fbb7484b32 | ||
|
daae6b5341 | ||
|
bf8baa2c21 | ||
|
b9ac0794aa | ||
|
cf42eab8c2 | ||
|
03f99944cf | ||
|
1ceea05e15 | ||
|
7bae8ece16 | ||
|
717c3e2367 | ||
|
8c99ae8eea | ||
|
0397ea5cc5 | ||
|
52ab39d92e | ||
|
8e277e1528 | ||
|
30fe6e6d49 | ||
|
3591bd8ce8 | ||
|
0eb5860584 | ||
|
2604b8377c | ||
|
36358c889a | ||
|
61daa29d48 | ||
|
8c4b5bbe57 | ||
|
8a892f3909 | ||
|
042217eee2 | ||
|
465b9ee4dd | ||
|
d238a219f3 | ||
|
ade7d53a1a | ||
|
7f64ffa12a | ||
|
a7da4207ae | ||
|
0cfaac774d | ||
|
0d8b65a9c0 | ||
|
25b3eb5261 | ||
|
bc1f9df8c6 | ||
|
2a2c67090c | ||
|
307c0db0f8 | ||
|
a2a239bdc6 | ||
|
290240eafb | ||
|
b8d5bc151e | ||
|
5689bc0a3f | ||
|
3d90c02273 | ||
|
c2b4e51f56 | ||
|
b54ebe6bcf | ||
|
c8c7bc5d4e | ||
|
376e5ad499 | ||
|
275e502be3 | ||
|
027b8f9b46 | ||
|
6a6d5df682 | ||
|
4f515d75db | ||
|
6c2e31193c | ||
|
7f8310e685 | ||
|
e59766f839 | ||
|
57626faecf | ||
|
58a7ae05ca | ||
|
e6d0d158f0 | ||
|
8ed445d238 | ||
|
04a95de7af | ||
|
649799389f | ||
|
e339827436 | ||
|
2d0d3cfcee | ||
|
f6ff697d21 | ||
|
ce828df545 | ||
|
eb6833475e | ||
|
e228e31b84 | ||
|
95150fa8e9 | ||
|
61a67a4a5b | ||
|
7de923c1e1 | ||
|
2f4629fb22 | ||
|
71015be582 | ||
|
c0d836ba65 | ||
|
870fb99182 | ||
|
6737400a4c | ||
|
34f7de18c0 | ||
|
894706819a | ||
|
0f9099aae5 | ||
|
4887ec66d9 | ||
|
616cf2bc20 | ||
|
10f6b1561e | ||
|
1f7552bbf7 | ||
|
8520a62271 | ||
|
5e209cc9ad | ||
|
19553c3564 | ||
|
72d1d27f89 | ||
|
5dcd13306a | ||
|
961c1315ef | ||
|
b76bfb5606 | ||
|
435edf8589 | ||
|
0232d0fb4f | ||
|
14b3863f11 | ||
|
fbdde9bf58 | ||
|
2b3e88be13 | ||
|
4022a375d5 | ||
|
a35a35a784 | ||
|
d120f61b11 | ||
|
273764c2a4 | ||
|
145af9f0e5 | ||
|
f367d04287 | ||
|
ddc21adad5 | ||
|
44687da8a6 | ||
|
6101dde1e5 | ||
|
fa42c573b6 | ||
|
1bcd02aad2 | ||
|
cbd7e13867 | ||
|
2fc1ec3355 | ||
|
36c995eef6 | ||
|
e176078226 | ||
|
7a2d3d226b | ||
|
ac3e2beef9 | ||
|
3dfbca494c | ||
|
e7634a625c | ||
|
4109091955 | ||
|
f0869aa1c1 | ||
|
dd15f68033 | ||
|
bc2fcd148d | ||
|
4fc62d3588 | ||
|
430ce48e6f | ||
|
eba37c47a4 | ||
|
89fe2988cc | ||
|
de6fc3b5ba | ||
|
249e6b8f9b | ||
|
09f65b2dc3 | ||
|
04101c11e0 | ||
|
31f618e586 | ||
|
374ef20ad5 | ||
|
47c434ae3e | ||
|
add890fbea | ||
|
fe2ff0aa74 | ||
|
9c9e7f89d9 | ||
|
d9daa17a50 | ||
|
912e3cb758 | ||
|
b6d26e68fb | ||
|
a9639eee98 | ||
|
7055ad555e | ||
|
3546821471 | ||
|
1cb902d691 | ||
|
d10f3fc5e1 | ||
|
a86c4fdc13 | ||
|
374f7c3d42 | ||
|
38c1a4ce9d | ||
|
058b7d9679 | ||
|
761c944f19 | ||
|
3e47944a5a | ||
|
f677eda656 | ||
|
2fca9e025c | ||
|
be7e54404e | ||
|
7e44dd5767 | ||
|
eafeaca789 | ||
|
b7b7a08ae4 | ||
|
b45ac1831d | ||
|
6fb9ba590a | ||
|
737c7b9448 | ||
|
6f48b774ae | ||
|
484778218d | ||
|
893836fb09 | ||
|
5577e2cbd8 | ||
|
6674a875bf | ||
|
1f36032a28 | ||
|
1e1f431f9b | ||
|
981143f62b | ||
|
ed37892c87 | ||
|
8b0186bd25 | ||
|
e12a9c4250 | ||
|
851e141f3d | ||
|
629e82ac8a | ||
|
8720d59f79 | ||
|
51e2348d55 | ||
|
5e324ef6ff | ||
|
077a752045 | ||
|
93d1ca16db | ||
|
3194ef8e6c | ||
|
b1856f7426 | ||
|
3481013f78 | ||
|
b014759f81 | ||
|
8ade0f68b0 | ||
|
cc763205e8 | ||
|
0f101bda4d | ||
|
05848d1bc2 | ||
|
c6ef132ebb | ||
|
9a271c030b | ||
|
7dab7c3b6a | ||
|
2677d29f1d | ||
|
7370b8d23b | ||
|
c64766cc0e | ||
|
445ae6452b | ||
|
34ce4b0678 | ||
|
e2f68f764d | ||
|
a7f7a632a2 | ||
|
c34c2e70ee | ||
|
056fe65e79 | ||
|
e58142e37f | ||
|
2a9b60155b | ||
|
be7618988e | ||
|
ab1ecc231d | ||
|
5437100f08 | ||
|
ca68c44201 | ||
|
c23ef737d4 | ||
|
e5d52a9762 | ||
|
5b47391af0 | ||
|
de381bf26d | ||
|
7897317294 | ||
|
58a68957e9 | ||
|
6e60b84740 | ||
|
5a8577dec6 | ||
|
87a55c6cf9 | ||
|
a6315985d0 | ||
|
f482ee6499 | ||
|
620c7a89cd | ||
|
2f24822c88 | ||
|
d2d0279d56 | ||
|
b108d9624a | ||
|
ae011170af | ||
|
2e55a7db60 | ||
|
22c7b5db20 | ||
|
6002e273a9 | ||
|
77de8da557 | ||
|
13441ea88a | ||
|
d6067a3e86 | ||
|
377242f70b | ||
|
0301155177 | ||
|
5848aa39d9 | ||
|
b5fce0a3b9 | ||
|
eb75dc8305 | ||
|
5d3b5ed10c | ||
|
1da0e785db | ||
|
6c6aff99b1 | ||
|
5af558d214 | ||
|
c1e5822f07 | ||
|
e3d62d4169 | ||
|
c966e6faf8 | ||
|
d0ee359209 | ||
|
1adc38099a | ||
|
a6c3b49cc2 | ||
|
64703e8b2d | ||
|
e2fb91d3fb | ||
|
10e3478b12 | ||
|
26eaef92aa | ||
|
903b414183 | ||
|
8cc902909e | ||
|
faf14b64c4 | ||
|
0848d0a5d7 | ||
|
ddbe30db8c | ||
|
b8812f4ae6 | ||
|
81fb17ab25 | ||
|
2d9bbadd15 | ||
|
033440da84 | ||
|
9937378876 | ||
|
60afa62d89 | ||
|
f75a156d46 | ||
|
aabf9f5037 | ||
|
db376ccfed | ||
|
bfc4b76124 | ||
|
5bb2c960e5 | ||
|
68d9cd5d61 | ||
|
e6d0449bc4 | ||
|
1d16726c90 | ||
|
42a02efec8 | ||
|
0c459c1c03 | ||
|
3e915a6a39 | ||
|
431d80c61e | ||
|
af368112ed | ||
|
9f5028bbf0 | ||
|
d9872dad6b | ||
|
6afa9a551a | ||
|
7face004e4 | ||
|
00f7dccb39 | ||
|
c537cf20fc | ||
|
18d4e02977 | ||
|
2e4f5d23a0 | ||
|
be6f9dc9e9 | ||
|
9b8cfc6f6f | ||
|
6848c49dfb | ||
|
c9f2fcdb62 | ||
|
bb7afab0a7 | ||
|
07a97e04c5 | ||
|
9d32a25c55 | ||
|
f1c55296af | ||
|
116e426d5a | ||
|
e61f9dcd76 | ||
|
9340db2663 | ||
|
c1c531d707 | ||
|
5383fb30e0 | ||
|
8c0f7b5680 | ||
|
ae58042a3e | ||
|
b3897a1235 | ||
|
79c3c934ce | ||
|
d321beec36 | ||
|
f4ce0bdab0 | ||
|
6abc482c87 | ||
|
77781b3920 | ||
|
a31470df0b | ||
|
d8536f4390 | ||
|
ddfc569ba3 | ||
|
960f76e333 | ||
|
ded2bcd8c5 | ||
|
9273f30036 | ||
|
3da3e356fa | ||
|
8f2d64d353 | ||
|
6c15ee8517 | ||
|
c86f8820b7 | ||
|
6fcf5869ad | ||
|
cf817ffe27 | ||
|
de1634a587 | ||
|
26e7160a90 | ||
|
54f474cfb2 | ||
|
3d69ad4ad6 | ||
|
6bcef4e307 | ||
|
b12f98ae6e | ||
|
39e79631fc | ||
|
82dba4a965 | ||
|
a73314dc64 | ||
|
d0a197115c | ||
|
6c044b010e | ||
|
2e4f2bc424 | ||
|
23e9464cda | ||
|
081266e807 | ||
|
317ff6d97a | ||
|
1be2f5fe80 | ||
|
7676628b57 | ||
|
8b2c933d02 | ||
|
a8f8a41c46 | ||
|
d7ce75a2fd | ||
|
bb629ec561 | ||
|
07fe08c639 | ||
|
85288583e1 | ||
|
e9357f490d | ||
|
8d2690e344 | ||
|
cb6d38ed43 | ||
|
a5510113aa | ||
|
9f1f769738 | ||
|
79c36641a2 | ||
|
ac1e248aac | ||
|
e8235ee2a2 | ||
|
fd6abcaeee | ||
|
b24f4ae5fe | ||
|
6cf7867428 | ||
|
1be7b57ea0 | ||
|
ee1b8267d0 | ||
|
c150b77b61 | ||
|
7133ffc1a0 | ||
|
96b3d83849 | ||
|
e1ca67442f | ||
|
48c394b693 | ||
|
9ad63287eb | ||
|
1693d82fd4 | ||
|
e3d7a6c29a | ||
|
0ef27aaffd | ||
|
1021d5f7e4 | ||
|
587203d9ec | ||
|
75873b5281 | ||
|
8a17ccfd6b | ||
|
5df11f7b1a | ||
|
bb3fa58481 | ||
|
02601c0b58 | ||
|
3083ef6a1f | ||
|
a1222a0820 | ||
|
05b604530c | ||
|
78fef9d34d | ||
|
b834f9a81c | ||
|
ffa6eb7b8f | ||
|
3628505ea7 | ||
|
f3653c79c8 | ||
|
fd5879a4a2 | ||
|
fcd38b0bf7 | ||
|
156e281a82 | ||
|
dfbc91f22e | ||
|
e25f0bebce | ||
|
ac6616afd6 | ||
|
f98ef68949 | ||
|
0912747416 | ||
|
39d1895b2b | ||
|
933df7da4c | ||
|
2d2f7149aa | ||
|
c3503c34ac | ||
|
0115089279 | ||
|
0202eaa404 | ||
|
b46cea9dda | ||
|
189847ffdc | ||
|
61df628bd8 | ||
|
c81a826c57 | ||
|
1bb14397ed | ||
|
774c36f4fe | ||
|
f0f841db71 | ||
|
19be32164f | ||
|
3226a76cfe | ||
|
dfefcd4db9 | ||
|
7c534aec18 | ||
|
fdcd84ea2a | ||
|
85cad99662 | ||
|
139afdaa59 | ||
|
f6e836c16a | ||
|
ad583dd59a | ||
|
a82c18c376 | ||
|
028cf52b2d | ||
|
9085dceeb3 | ||
|
c10e88206b | ||
|
1448d9edcd | ||
|
3489e4e2d1 | ||
|
84df2716b5 | ||
|
c423bdee0a | ||
|
92aedb3680 | ||
|
ca2dbd5949 | ||
|
02fbb63657 | ||
|
08975b3302 | ||
|
967a4c2f7f | ||
|
2b5f2d190d | ||
|
780f75eaf7 | ||
|
af982c19dd | ||
|
e0765d6ce6 | ||
|
018a2957ad | ||
|
fcd2ff29e4 | ||
|
02809b703f | ||
|
8e2be647e0 | ||
|
1998238594 | ||
|
aa10e20dcc | ||
|
224e9f4b08 | ||
|
bd61be6981 | ||
|
63a0161acf | ||
|
d62d49a1d5 | ||
|
22e1ca9ed1 | ||
|
82ca0cff9c | ||
|
5c54947b42 | ||
|
aa418aba20 | ||
|
375fb4f14f | ||
|
b9f5a1e476 | ||
|
8850babe71 | ||
|
9df0bb5433 | ||
|
3c97b5c693 | ||
|
89acf137a0 | ||
|
65ed4ada17 | ||
|
316dcd18be | ||
|
7209189753 | ||
|
c90e8fc07b | ||
|
f7e8f3be7b | ||
|
aaf375f0bd | ||
|
e3aae23225 | ||
|
7e5d4aa83a | ||
|
70832eb6c8 | ||
|
4b375866b6 | ||
|
e723053c41 | ||
|
b34861f9cd | ||
|
f02e6e2c31 | ||
|
ac7ed64118 | ||
|
3fd661ef74 | ||
|
83c99f1edf | ||
|
a977a508d2 | ||
|
b6df5d6f2d | ||
|
d22ca1dde0 | ||
|
fbbc7cb1e4 | ||
|
af14b39833 | ||
|
986773c814 | ||
|
2e6749021f | ||
|
8cfcb5897a | ||
|
9df22e807e | ||
|
6c661bb2e0 | ||
|
697ba410d5 | ||
|
a9fb82b03c | ||
|
b8987edaff | ||
|
03765db683 | ||
|
694d8d6bea | ||
|
e6ece60c7d | ||
|
4d46b494f4 | ||
|
8b34569e01 | ||
|
19d78411a5 | ||
|
ed39e35c4a | ||
|
29b0fe7e2d | ||
|
ce9819f49d | ||
|
5bde44a3c2 | ||
|
7065df1b32 | ||
|
132bbdc559 | ||
|
1d48d813e3 | ||
|
52c09f1e4a | ||
|
07d75c4c5d | ||
|
d28d869add | ||
|
2c17c40206 | ||
|
4be2d6c09d | ||
|
f924b973d4 | ||
|
d77cff9cc7 | ||
|
7595011cbb | ||
|
d36b11321d | ||
|
0047c6ddac | ||
|
2992a75686 | ||
|
3d36589d1a | ||
|
5db3b93dcd | ||
|
d909983f0d | ||
|
1d3abf73b2 | ||
|
13c1253c19 | ||
|
18f7eeb825 | ||
|
b32952e66c | ||
|
f1cca7388b | ||
|
f48fdbc441 | ||
|
088b73ebde | ||
|
b047840c04 | ||
|
a76e283918 | ||
|
f5fb79cc63 | ||
|
f7287bdf56 | ||
|
10ac7ea7a6 | ||
|
5ae52b9e16 | ||
|
793b1ad0da | ||
|
d2049aa3ce | ||
|
512399cef5 | ||
|
15b800d5a8 | ||
|
7852ea4d7e | ||
|
66123fbe9f | ||
|
5fc778c6e5 | ||
|
03e77aa1af | ||
|
0cd587c6ba | ||
|
a60e493170 | ||
|
2431922619 | ||
|
0a3d62986a | ||
|
49ec750772 | ||
|
3dce06ca21 | ||
|
8a6fbf4a9a | ||
|
011ae94c4d | ||
|
9de0eb289e | ||
|
ddf9169074 | ||
|
c6c647ac56 | ||
|
108dfeea53 | ||
|
6cf385a73a | ||
|
00978b2a5b | ||
|
7390202d87 | ||
|
34a1a88f63 | ||
|
fd0c7d57ae | ||
|
0093a40957 | ||
|
bdf5efdb76 | ||
|
41df426ca5 | ||
|
4b5b037b9e | ||
|
c3c16247b7 | ||
|
fa927dbf50 | ||
|
0c362d907d | ||
|
819d958461 | ||
|
388889a27e | ||
|
7f081f4a79 | ||
|
e4830dd162 | ||
|
ca931633aa | ||
|
be7e9105c0 | ||
|
0bf4559c3c | ||
|
82c85f0402 | ||
|
3194ec374b | ||
|
369f75949d | ||
|
0863ff5adf | ||
|
5457360858 | ||
|
ff7f3c65f9 | ||
|
f85909e6ee | ||
|
ab9f453e3a | ||
|
ad31a14c75 | ||
|
4b0bff467e | ||
|
57ae4c54f1 | ||
|
5cb229a26f | ||
|
2c3ba193e3 | ||
|
b258adfc13 | ||
|
8c8c0364f7 | ||
|
73b9f1bb1b | ||
|
fbd046af34 | ||
|
b5ff67fa67 | ||
|
819a2c69d3 | ||
|
81a17ef690 | ||
|
13f26b83d6 | ||
|
7a2883a092 | ||
|
906254e6b5 | ||
|
21a9eec31b | ||
|
e71d56b671 | ||
|
dcdfaa7d25 | ||
|
60894a43fa | ||
|
a4ce7e3b66 | ||
|
2eea113a66 | ||
|
653324ae6f | ||
|
5dd8a4df68 | ||
|
a767da8a51 | ||
|
dfaf858951 | ||
|
9f0e030958 | ||
|
b7534c9091 | ||
|
e5d89e8ce8 | ||
|
3a1960425f | ||
|
659ff63fe0 | ||
|
44d0e8ee84 | ||
|
e06963ad50 | ||
|
f92a10ddfb | ||
|
e36c74d639 | ||
|
16ef913b88 | ||
|
dbc35aa901 | ||
|
a2ed894c2d | ||
|
a147f22fc4 | ||
|
c150ec27ac | ||
|
abeefb6426 | ||
|
8b0105d491 | ||
|
76c834d3d2 | ||
|
7c20a2ff21 | ||
|
2cc5de34ab | ||
|
312f99edf9 | ||
|
bce4b1af41 | ||
|
27df5909e3 | ||
|
110d083bb9 | ||
|
b58e3f70e5 | ||
|
fd99a076e9 | ||
|
e1457aadd3 | ||
|
b736675784 | ||
|
0e53128981 | ||
|
6d4e3c84b8 | ||
|
ff46f33752 |
144 changed files with 8562 additions and 679 deletions
84
.github/workflows/codeql.yml
vendored
Normal file
84
.github/workflows/codeql.yml
vendored
Normal file
|
@ -0,0 +1,84 @@
|
|||
# For most projects, this workflow file will not need changing; you simply need
|
||||
# to commit it to your repository.
|
||||
#
|
||||
# You may wish to alter this file to override the set of languages analyzed,
|
||||
# or to provide custom queries or build logic.
|
||||
#
|
||||
# ******** NOTE ********
|
||||
# We have attempted to detect the languages in your repository. Please check
|
||||
# the `language` matrix defined below to confirm you have the correct set of
|
||||
# supported CodeQL languages.
|
||||
#
|
||||
name: "CodeQL"
|
||||
|
||||
on:
|
||||
push:
|
||||
branches: [ "master" ]
|
||||
pull_request:
|
||||
branches: [ "master" ]
|
||||
schedule:
|
||||
- cron: '34 14 * * 4'
|
||||
|
||||
jobs:
|
||||
analyze:
|
||||
name: Analyze
|
||||
# Runner size impacts CodeQL analysis time. To learn more, please see:
|
||||
# - https://gh.io/recommended-hardware-resources-for-running-codeql
|
||||
# - https://gh.io/supported-runners-and-hardware-resources
|
||||
# - https://gh.io/using-larger-runners
|
||||
# Consider using larger runners for possible analysis time improvements.
|
||||
runs-on: ${{ (matrix.language == 'swift' && 'macos-latest') || 'ubuntu-latest' }}
|
||||
timeout-minutes: ${{ (matrix.language == 'swift' && 120) || 360 }}
|
||||
permissions:
|
||||
# required for all workflows
|
||||
security-events: write
|
||||
|
||||
# only required for workflows in private repositories
|
||||
actions: read
|
||||
contents: read
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
language: [ 'python' ]
|
||||
# CodeQL supports [ 'c-cpp', 'csharp', 'go', 'java-kotlin', 'javascript-typescript', 'python', 'ruby', 'swift' ]
|
||||
# Use only 'java-kotlin' to analyze code written in Java, Kotlin or both
|
||||
# Use only 'javascript-typescript' to analyze code written in JavaScript, TypeScript or both
|
||||
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v4
|
||||
|
||||
# Initializes the CodeQL tools for scanning.
|
||||
- name: Initialize CodeQL
|
||||
uses: github/codeql-action/init@v3
|
||||
with:
|
||||
languages: ${{ matrix.language }}
|
||||
# If you wish to specify custom queries, you can do so here or in a config file.
|
||||
# By default, queries listed here will override any specified in a config file.
|
||||
# Prefix the list here with "+" to use these queries and those in the config file.
|
||||
|
||||
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
|
||||
# queries: security-extended,security-and-quality
|
||||
|
||||
|
||||
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
|
||||
# If this step fails, then you should remove it and run the build manually (see below)
|
||||
- name: Autobuild
|
||||
uses: github/codeql-action/autobuild@v3
|
||||
|
||||
# ℹ️ Command-line programs to run using the OS shell.
|
||||
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
|
||||
|
||||
# If the Autobuild fails above, remove it and uncomment the following three lines.
|
||||
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
|
||||
|
||||
# - run: |
|
||||
# echo "Run, Build Application using script"
|
||||
# ./location_of_script_within_repo/buildscript.sh
|
||||
|
||||
- name: Perform CodeQL Analysis
|
||||
uses: github/codeql-action/analyze@v3
|
||||
with:
|
||||
category: "/language:${{matrix.language}}"
|
28
.github/workflows/mypy.yaml
vendored
Normal file
28
.github/workflows/mypy.yaml
vendored
Normal file
|
@ -0,0 +1,28 @@
|
|||
name: run mypy
|
||||
on: [push, pull_request]
|
||||
jobs:
|
||||
build:
|
||||
name: run mypy
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Cache
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
cache-name: cache-pipmypy
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-${{ env.cache-name }}-${{ hashFiles('setup.py') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-${{ env.cache-name }}-
|
||||
${{ runner.os }}-cache-pip-
|
||||
- name: Install deps
|
||||
run: pip3 install -U tornado pytest pytest-asyncio pytest-httpbin pytest-rerunfailures structlog tomli platformdirs aiohttp httpx mypy awesomeversion
|
||||
- name: Run mypy for --install-types
|
||||
run: PATH=$HOME/.local/bin:$PATH mypy --namespace-packages --explicit-package-bases nvchecker nvchecker_source tests
|
||||
continue-on-error: true
|
||||
- name: Install types
|
||||
run: PATH=$HOME/.local/bin:$PATH yes | mypy --install-types
|
||||
- name: Run mypy
|
||||
run: PATH=$HOME/.local/bin:$PATH mypy --namespace-packages --explicit-package-bases nvchecker nvchecker_source tests
|
93
.github/workflows/tests.yaml
vendored
Normal file
93
.github/workflows/tests.yaml
vendored
Normal file
|
@ -0,0 +1,93 @@
|
|||
name: run tests
|
||||
on: [push, pull_request]
|
||||
jobs:
|
||||
tests:
|
||||
runs-on: ubuntu-latest
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
python-version:
|
||||
- "3.8"
|
||||
- "3.9"
|
||||
- "3.10"
|
||||
- "3.11"
|
||||
- "3.12"
|
||||
- "3.13"
|
||||
# pypy fails in some cases but we don't care much about that
|
||||
# with github actions we can't mark some jobs to not affect the overall
|
||||
# conclusion so we have to omit "allow-failure" tests.
|
||||
# See https://github.com/actions/toolkit/issues/399
|
||||
# - pypy-3.7
|
||||
deps:
|
||||
- tornado pycurl
|
||||
# timer runs when loop is closed, see https://github.com/lilydjwg/nvchecker/actions/runs/11650699759/job/32439742210
|
||||
# - aiohttp
|
||||
- tornado
|
||||
- httpx[http2]>=0.14.0
|
||||
exclude: []
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Python ${{ matrix.python-version }}
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: ${{ matrix.python-version }}
|
||||
- name: Setup Cache
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
cache-name: cache-pip
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.deps }}-${{ hashFiles('pyproject.toml', 'setup.cfg') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-${{ env.cache-name }}-${{ matrix.deps }}-
|
||||
${{ runner.os }}-${{ env.cache-name }}-
|
||||
|
||||
- name: Install pycurl deps
|
||||
if: ${{ contains(matrix.deps, 'pycurl') }}
|
||||
run: |
|
||||
sudo apt update
|
||||
sudo apt install -y libcurl4-openssl-dev
|
||||
# werkzeug is pinned for httpbin compatibility https://github.com/postmanlabs/httpbin/issues/673
|
||||
- name: Install Python deps
|
||||
env:
|
||||
# use env to avoid `>` being redirection
|
||||
deps: ${{ matrix.deps }}
|
||||
run: pip install -U $deps pytest 'pytest-asyncio>=0.24' pytest-httpbin pytest-rerunfailures structlog tomli platformdirs lxml jq 'werkzeug<2.1' awesomeversion
|
||||
# don't use binary distribution because:
|
||||
# hardcoded cacert path doesn't work on Ubuntu (should have been resolved?)
|
||||
# limited compression support (only deflate & gzip)
|
||||
- name: Install pycurl
|
||||
if: ${{ contains(matrix.deps, 'pycurl') }}
|
||||
run: |
|
||||
pip uninstall -y pycurl
|
||||
pip install -U pycurl --no-binary :all:
|
||||
- name: Decrypt keys
|
||||
env:
|
||||
KEY: ${{ secrets.KEY }}
|
||||
run: if [[ -n $KEY ]]; then openssl enc -d -aes-256-ctr -pbkdf2 -k $KEY -in keyfile.toml.enc -out keyfile.toml; fi
|
||||
|
||||
- name: Setup mitmproxy cache
|
||||
uses: actions/cache@v4
|
||||
env:
|
||||
cache-name: cache-mitm
|
||||
with:
|
||||
path: ~/.mitmproxy
|
||||
key: ${{ env.cache-name }}
|
||||
restore-keys: |
|
||||
${{ env.cache-name }}-
|
||||
- name: Install mitmproxy
|
||||
run: |
|
||||
/usr/bin/python -m venv --system-site-packages ~/.mitmproxy/venv
|
||||
. ~/.mitmproxy/venv/bin/activate
|
||||
pip install -U mitmproxy
|
||||
# https://github.com/DevToys-app/DevToys/issues/1373#issuecomment-2599820594
|
||||
sudo sysctl -w kernel.apparmor_restrict_unprivileged_unconfined=0
|
||||
sudo sysctl -w kernel.apparmor_restrict_unprivileged_userns=0
|
||||
|
||||
# - name: Setup upterm session
|
||||
# uses: lhotari/action-upterm@v1
|
||||
- name: Run pytest
|
||||
env:
|
||||
mitmdump: /home/runner/.mitmproxy/venv/bin/mitmdump
|
||||
run: scripts/run_cached_tests
|
14
.gitignore
vendored
14
.gitignore
vendored
|
@ -1,2 +1,12 @@
|
|||
nvchecker.egg-info/
|
||||
versions/
|
||||
*.egg-info/
|
||||
__pycache__/
|
||||
/build/
|
||||
/dist/
|
||||
.cache/
|
||||
.eggs/
|
||||
*.pyc
|
||||
*.pyo
|
||||
.travis.pub
|
||||
.pytest_cache/
|
||||
.tox/
|
||||
keyfile.toml
|
||||
|
|
10
.readthedocs.yaml
Normal file
10
.readthedocs.yaml
Normal file
|
@ -0,0 +1,10 @@
|
|||
version: 2
|
||||
build:
|
||||
os: ubuntu-22.04
|
||||
tools:
|
||||
python: "3.11"
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
python:
|
||||
install:
|
||||
- requirements: docs/requirements.txt
|
2
.typos.toml
Normal file
2
.typos.toml
Normal file
|
@ -0,0 +1,2 @@
|
|||
[default.extend-words]
|
||||
mis = "mis"
|
21
LICENSE
Normal file
21
LICENSE
Normal file
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
132
README.rst
132
README.rst
|
@ -1,96 +1,60 @@
|
|||
**nvchecker** (short for *new version checker*) is for checking if a new version of some software has been released.
|
||||
|
||||
nvchecker is now **in development**.
|
||||
This is the version 2.0 branch. For the old version 1.x, please switch to the ``v1.x`` branch.
|
||||
|
||||
.. image:: https://github.com/lilydjwg/nvchecker/workflows/run%20tests/badge.svg?branch=master
|
||||
:alt: Test Status
|
||||
:target: https://github.com/lilydjwg/nvchecker/actions?query=workflow%3A%22run+tests%22
|
||||
.. image:: https://badge.fury.io/py/nvchecker.svg
|
||||
:alt: PyPI version
|
||||
:target: https://badge.fury.io/py/nvchecker
|
||||
.. image:: https://readthedocs.org/projects/nvchecker/badge/?version=latest
|
||||
:target: https://nvchecker.readthedocs.io/en/latest/?badge=latest
|
||||
:alt: Documentation Status
|
||||
|
||||
|
|
||||
|
||||
.. image:: https://repology.org/badge/vertical-allrepos/nvchecker.svg
|
||||
:alt: Packaging status
|
||||
:target: https://repology.org/metapackage/nvchecker/versions
|
||||
|
||||
.. contents::
|
||||
:local:
|
||||
|
||||
Dependency
|
||||
==========
|
||||
- Python 3
|
||||
- Tornado
|
||||
- All commands used in your configuration files
|
||||
----------
|
||||
- Python 3.8+
|
||||
- Python library: structlog, platformdirs, tomli (on Python < 3.11)
|
||||
- One of these Python library combinations (ordered by preference):
|
||||
|
||||
* tornado + pycurl
|
||||
* aiohttp
|
||||
* httpx with http2 support (experimental; only latest version is supported)
|
||||
* tornado
|
||||
|
||||
- All commands used in your software version configuration files
|
||||
|
||||
Install and Run
|
||||
---------------
|
||||
To install::
|
||||
|
||||
pip3 install nvchecker
|
||||
|
||||
To use the latest code, you can also clone this repository and run::
|
||||
|
||||
pip install .
|
||||
|
||||
Running
|
||||
=======
|
||||
To see available options::
|
||||
|
||||
./nvchecker --help
|
||||
nvchecker --help
|
||||
|
||||
Run with one or more configuration files::
|
||||
Run with one or more software version files::
|
||||
|
||||
./nvchecker config_file_1 config_file_2 ...
|
||||
nvchecker -c config_file
|
||||
|
||||
You normally will like to specify some "version files"; see below.
|
||||
You normally will like to specify some "version record files"; see below.
|
||||
|
||||
Version Files
|
||||
=============
|
||||
Version files record which version of the software you know or is available. They are simple key-value pairs of ``(name, version)`` seperated by ``:`` ::
|
||||
Documentation
|
||||
-------------
|
||||
|
||||
fcitx: 4.2.7
|
||||
google-chrome: 27.0.1453.93-200836
|
||||
vim: 7.3.1024
|
||||
|
||||
Say you've got a version file called ``old_ver.txt`` which records all your watched software and their versions. To update it using ``nvchecker``::
|
||||
|
||||
./nvchecker --oldverfile=old_ver.txt --verfile=new_ver.txt config.ini
|
||||
|
||||
Compare the two files for updates (assuming they are sorted alphabetically; files generated by ``nvchecker`` are already sorted)::
|
||||
|
||||
comm -13 old_ver.txt new_ver.txt
|
||||
# or say that in English:
|
||||
comm -13 old_ver.txt new_ver.txt | sed 's/:/ has updated to version/;s/$/./'
|
||||
# show both old and new versions
|
||||
join old_ver.txt new_ver.txt | awk '$2 != $3'
|
||||
|
||||
Configuration Files
|
||||
===================
|
||||
The configuration files are in ini format. *Section names* is the name of the software. Following fields are used to tell nvchecker how to determine the current version of that software.
|
||||
|
||||
See ``sample_config.ini`` for an example.
|
||||
|
||||
Search in a Webpage
|
||||
-------------------
|
||||
Search through a specific webpage for the version string. This type of version finding has these fields:
|
||||
|
||||
url
|
||||
The URL of the webpage to fetch.
|
||||
|
||||
encoding
|
||||
(*Optional*) The character encoding of the webpage, if ``latin1`` is not appropriate.
|
||||
|
||||
regex
|
||||
A regular expression used to find the version string.
|
||||
|
||||
It can have zero or one capture group. The capture group or the whole match is the version string.
|
||||
|
||||
When multiple version strings are found, the maximum of those is chosen.
|
||||
|
||||
proxy
|
||||
The HTTP proxy to use. The format is ``host:port``, e.g. ``localhost:8087``.
|
||||
|
||||
Find with a Command
|
||||
-------------------
|
||||
Use a shell command line to get the version. The output is striped first, so trailing newlines do not bother.
|
||||
|
||||
cmd
|
||||
The command line to use. This will run with the system's standard shell (e.g. ``/bin/sh``).
|
||||
|
||||
Check AUR
|
||||
---------
|
||||
Check `Arch User Repository <https://aur.archlinux.org/>`_ for updates.
|
||||
|
||||
aur
|
||||
The package name in AUR. If empty, use the name of software (the *section name*).
|
||||
|
||||
Check GitHub
|
||||
------------
|
||||
Check `GitHub <https://github.com/>`_ for updates. The version returned is in date format ``%Y%m%d``, e.g. ``20130701``.
|
||||
|
||||
github
|
||||
The github repository, with author, e.g. ``lilydjwg/nvchecker``.
|
||||
|
||||
Other
|
||||
-----
|
||||
More to come. Send me a patch or pull request if you can't wait and have written one yourself :-)
|
||||
|
||||
Bugs
|
||||
----
|
||||
* Finish writing results even on Ctrl-C or other interruption.
|
||||
For detailed documentation, see `https://nvchecker.readthedocs.io/en/latest/ <https://nvchecker.readthedocs.io/en/latest/>`_.
|
||||
|
|
|
@ -1,207 +0,0 @@
|
|||
[3to2]
|
||||
aur
|
||||
|
||||
[android-apktool]
|
||||
aur
|
||||
|
||||
[android-docs]
|
||||
aur
|
||||
|
||||
[android-ndk]
|
||||
aur
|
||||
|
||||
[android-sdk]
|
||||
aur
|
||||
|
||||
; [aufs3-util-lily-git]
|
||||
|
||||
[cgdb]
|
||||
aur
|
||||
|
||||
[coffee-script]
|
||||
aur
|
||||
|
||||
[dcron]
|
||||
aur
|
||||
|
||||
[dmg2img]
|
||||
aur
|
||||
|
||||
[dnscrypt-proxy-git]
|
||||
; my aur
|
||||
github = jedisct1/dnscrypt-proxy
|
||||
|
||||
[elflibviewer]
|
||||
aur
|
||||
|
||||
[evince-nodbus]
|
||||
cmd = LANG=C pacman -Si evince | grep -F Version | awk '{print $3}'
|
||||
|
||||
[fcitx-lilydjwg-git]
|
||||
github = fcitx/fcitx
|
||||
|
||||
[google-appengine-python]
|
||||
aur
|
||||
|
||||
; [gvim-lily]
|
||||
|
||||
[hfsprogs]
|
||||
aur
|
||||
|
||||
[kingsoft-office]
|
||||
aur
|
||||
|
||||
[latencytop]
|
||||
aur
|
||||
|
||||
[libsodium]
|
||||
aur
|
||||
|
||||
; [linux-lily]
|
||||
|
||||
; [linux-lily-headers]
|
||||
|
||||
[msdl]
|
||||
aur
|
||||
|
||||
[nautilus-dropbox]
|
||||
aur
|
||||
|
||||
[nodejs-jake]
|
||||
aur
|
||||
|
||||
[openresty-dev]
|
||||
; my aur
|
||||
url = http://openresty.org/
|
||||
regex = ngx_openresty-([\d.]+)\.tar\.gz\|
|
||||
|
||||
[perl-data-random]
|
||||
aur
|
||||
|
||||
[perl-goo-canvas]
|
||||
aur
|
||||
|
||||
[perl-gtk2-imageview]
|
||||
aur
|
||||
|
||||
[perl-gtk2-unique]
|
||||
aur
|
||||
|
||||
[perl-mouse]
|
||||
aur
|
||||
|
||||
[perl-net-dropbox-api]
|
||||
aur
|
||||
|
||||
[perl-net-oauth]
|
||||
aur
|
||||
|
||||
[perl-proc-processtable]
|
||||
aur
|
||||
|
||||
[perl-yaml-tiny]
|
||||
aur
|
||||
|
||||
[perl4-corelibs]
|
||||
aur
|
||||
|
||||
[python-autopep8]
|
||||
aur
|
||||
|
||||
[python-bitstring]
|
||||
aur
|
||||
|
||||
[python-blist]
|
||||
aur
|
||||
|
||||
[python-cffi]
|
||||
; my aur
|
||||
url = https://pypi.python.org/pypi/cffi
|
||||
regex = cffi-([\d.]+)\.tar\.gz
|
||||
|
||||
[python-pycparser-git]
|
||||
aur
|
||||
|
||||
[python-setproctitle]
|
||||
aur
|
||||
|
||||
; [python-you-get-git]
|
||||
; RSS'ed
|
||||
|
||||
[python2-netlib-git]
|
||||
aur
|
||||
|
||||
[python3-pycurl]
|
||||
aur
|
||||
|
||||
[reaver-wps-svn]
|
||||
aur
|
||||
|
||||
[ruby-gettext]
|
||||
aur
|
||||
|
||||
[ruby-levenshtein]
|
||||
aur
|
||||
|
||||
[ruby-locale]
|
||||
aur
|
||||
|
||||
[ruby-maruku]
|
||||
aur
|
||||
|
||||
[ruby-sass]
|
||||
aur
|
||||
|
||||
[ruby-yard]
|
||||
aur
|
||||
|
||||
[shutter]
|
||||
aur
|
||||
|
||||
[spideroak]
|
||||
aur
|
||||
|
||||
[sqlite-manager]
|
||||
aur
|
||||
|
||||
[ssed]
|
||||
; my aur
|
||||
url = http://sed.sourceforge.net/grabbag/ssed/
|
||||
regex = The current version is ([\d.]+)\.
|
||||
proxy = localhost:8087
|
||||
|
||||
[tp_smapi-dkms]
|
||||
aur
|
||||
|
||||
[ttf-ume]
|
||||
aur
|
||||
|
||||
[urlview]
|
||||
aur
|
||||
|
||||
; [vim-lily]
|
||||
|
||||
[wdiff]
|
||||
aur
|
||||
|
||||
[wireshark-gtk2]
|
||||
cmd = LANG=C pacman -Si wireshark-gtk | grep -F Version | awk '{print $3}'
|
||||
|
||||
[xf86-input-wizardpen]
|
||||
aur
|
||||
|
||||
[xkbset]
|
||||
aur
|
||||
|
||||
[xmind]
|
||||
aur
|
||||
|
||||
[zbar]
|
||||
aur
|
||||
|
||||
; [zhcon]
|
||||
; my aur
|
||||
; last update is six years ago
|
||||
|
||||
[zint]
|
||||
aur
|
|
@ -1,31 +0,0 @@
|
|||
[fcitx]
|
||||
url = https://code.google.com/p/fcitx/
|
||||
regex = fcitx-([\d.]+)\.tar\.xz
|
||||
|
||||
[vim]
|
||||
url = http://ftp.vim.org/pub/vim/patches/7.3/
|
||||
regex = 7\.3\.\d+
|
||||
|
||||
; [badone]
|
||||
; url = http://www.baidu.com/
|
||||
; regex = baidu (\d+)
|
||||
|
||||
[google-chrome]
|
||||
cmd = wget -qO- http://dl.google.com/linux/chrome/rpm/stable/x86_64/repodata/other.xml.gz | zgrep "google-chrome-stable" | awk -F\" '{print $10"-"$12}'
|
||||
|
||||
[you-get]
|
||||
aur = python-you-get-git
|
||||
|
||||
[fbcat]
|
||||
aur
|
||||
|
||||
[winterpy]
|
||||
github = lilydjwg/winterpy
|
||||
|
||||
[nvchecker]
|
||||
github = lilydjwg/nvchecker
|
||||
|
||||
[ssed]
|
||||
url = http://sed.sourceforge.net/grabbag/ssed/
|
||||
regex = The current version is ([\d.]+)\.
|
||||
proxy = localhost:8087
|
1
docs/.gitignore
vendored
Normal file
1
docs/.gitignore
vendored
Normal file
|
@ -0,0 +1 @@
|
|||
_build/
|
26
docs/Makefile
Normal file
26
docs/Makefile
Normal file
|
@ -0,0 +1,26 @@
|
|||
# Minimal makefile for Sphinx documentation
|
||||
#
|
||||
|
||||
# You can set these variables from the command line, and also
|
||||
# from the environment for the first two.
|
||||
SPHINXOPTS ?=
|
||||
SPHINXBUILD ?= sphinx-build
|
||||
SOURCEDIR = .
|
||||
BUILDDIR = _build
|
||||
|
||||
# Put it first so that "make" without argument is like "make help".
|
||||
help:
|
||||
@$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
||||
|
||||
.PHONY: help Makefile man
|
||||
|
||||
man: $(BUILDDIR)/man/nvchecker.1
|
||||
|
||||
$(BUILDDIR)/man/nvchecker.1: usage.rst
|
||||
mkdir -p $(BUILDDIR)/man
|
||||
./myrst2man.py $< > $@
|
||||
|
||||
# Catch-all target: route all unknown targets to Sphinx using the new
|
||||
# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS).
|
||||
%:
|
||||
@$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O)
|
26
docs/api.rst
Normal file
26
docs/api.rst
Normal file
|
@ -0,0 +1,26 @@
|
|||
``nvchecker.api`` --- The source plugin API
|
||||
===========================================
|
||||
|
||||
.. automodule:: nvchecker.api
|
||||
:members:
|
||||
:imported-members:
|
||||
:undoc-members:
|
||||
|
||||
.. py:data:: session
|
||||
:type: nvchecker.httpclient.base.BaseSession
|
||||
|
||||
The object to send out HTTP requests, respecting various options in the configuration entry.
|
||||
|
||||
.. automodule:: nvchecker.httpclient.base
|
||||
:members: BaseSession, Response
|
||||
:undoc-members:
|
||||
|
||||
.. autodata:: nvchecker.api.proxy
|
||||
.. autodata:: nvchecker.api.user_agent
|
||||
.. autodata:: nvchecker.api.tries
|
||||
.. autodata:: nvchecker.api.verify_cert
|
||||
|
||||
.. py:data:: nvchecker.api.entry_waiter
|
||||
:type: contextvars.ContextVar
|
||||
|
||||
This :class:`ContextVar <contextvars.ContextVar>` contains an :class:`EntryWaiter <nvchecker.api.EntryWaiter>` instance for waiting on other entries.
|
47
docs/conf.py
Normal file
47
docs/conf.py
Normal file
|
@ -0,0 +1,47 @@
|
|||
import os
|
||||
import sys
|
||||
|
||||
sys.path.insert(0, os.path.abspath(".."))
|
||||
import nvchecker
|
||||
|
||||
master_doc = "index"
|
||||
|
||||
project = "nvchecker"
|
||||
copyright = "lilydjwg, et al."
|
||||
|
||||
version = release = nvchecker.__version__
|
||||
|
||||
extensions = [
|
||||
"sphinx.ext.autodoc",
|
||||
"sphinx.ext.doctest",
|
||||
"sphinx.ext.intersphinx",
|
||||
"sphinx.ext.viewcode",
|
||||
]
|
||||
|
||||
primary_domain = "py"
|
||||
default_role = "py:obj"
|
||||
|
||||
autodoc_member_order = "bysource"
|
||||
autoclass_content = "both"
|
||||
autodoc_inherit_docstrings = False
|
||||
|
||||
# Without this line sphinx includes a copy of object.__init__'s docstring
|
||||
# on any class that doesn't define __init__.
|
||||
# https://bitbucket.org/birkenfeld/sphinx/issue/1337/autoclass_content-both-uses-object__init__
|
||||
autodoc_docstring_signature = False
|
||||
|
||||
intersphinx_mapping = {"python": ("https://docs.python.org/3", None)}
|
||||
|
||||
html_theme = "sphinx_rtd_theme"
|
||||
on_rtd = os.environ.get("READTHEDOCS", None) == "True"
|
||||
|
||||
# On RTD we can't import sphinx_rtd_theme, but it will be applied by
|
||||
# default anyway. This block will use the same theme when building locally
|
||||
# as on RTD.
|
||||
if not on_rtd:
|
||||
import sphinx_rtd_theme
|
||||
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
|
||||
|
||||
html_theme_options = {
|
||||
'collapse_navigation': False,
|
||||
}
|
22
docs/index.rst
Normal file
22
docs/index.rst
Normal file
|
@ -0,0 +1,22 @@
|
|||
.. nvchecker documentation master file, created by
|
||||
sphinx-quickstart on Thu Sep 3 00:19:02 2020.
|
||||
You can adapt this file completely to your liking, but it should at least
|
||||
contain the root `toctree` directive.
|
||||
|
||||
Welcome to nvchecker's documentation!
|
||||
=====================================
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
|
||||
usage
|
||||
plugin
|
||||
api
|
||||
|
||||
|
||||
Indices and tables
|
||||
==================
|
||||
|
||||
* :ref:`genindex`
|
||||
* :ref:`modindex`
|
||||
* :ref:`search`
|
35
docs/make.bat
Normal file
35
docs/make.bat
Normal file
|
@ -0,0 +1,35 @@
|
|||
@ECHO OFF
|
||||
|
||||
pushd %~dp0
|
||||
|
||||
REM Command file for Sphinx documentation
|
||||
|
||||
if "%SPHINXBUILD%" == "" (
|
||||
set SPHINXBUILD=sphinx-build
|
||||
)
|
||||
set SOURCEDIR=.
|
||||
set BUILDDIR=_build
|
||||
|
||||
if "%1" == "" goto help
|
||||
|
||||
%SPHINXBUILD% >NUL 2>NUL
|
||||
if errorlevel 9009 (
|
||||
echo.
|
||||
echo.The 'sphinx-build' command was not found. Make sure you have Sphinx
|
||||
echo.installed, then set the SPHINXBUILD environment variable to point
|
||||
echo.to the full path of the 'sphinx-build' executable. Alternatively you
|
||||
echo.may add the Sphinx directory to PATH.
|
||||
echo.
|
||||
echo.If you don't have Sphinx installed, grab it from
|
||||
echo.http://sphinx-doc.org/
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
goto end
|
||||
|
||||
:help
|
||||
%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O%
|
||||
|
||||
:end
|
||||
popd
|
74
docs/myrst2man.py
Executable file
74
docs/myrst2man.py
Executable file
|
@ -0,0 +1,74 @@
|
|||
#!/usr/bin/python3
|
||||
|
||||
import time
|
||||
import locale
|
||||
import os
|
||||
import sys
|
||||
try:
|
||||
locale.setlocale(locale.LC_ALL, '')
|
||||
except:
|
||||
pass
|
||||
|
||||
sys.path.insert(0, '..')
|
||||
import nvchecker
|
||||
|
||||
from docutils.core import publish_cmdline, default_description
|
||||
from docutils import nodes
|
||||
from docutils.writers import manpage
|
||||
from docutils.parsers.rst import roles
|
||||
|
||||
def ref_role(
|
||||
role, rawtext, text, lineno, inliner,
|
||||
options={}, content=[],
|
||||
):
|
||||
node = nodes.reference(rawtext, text.title(), **options)
|
||||
return [node], []
|
||||
|
||||
def doc_role(
|
||||
role, rawtext, text, lineno, inliner,
|
||||
options={}, content=[],
|
||||
):
|
||||
node = nodes.reference(rawtext, text, **options)
|
||||
return [node], []
|
||||
|
||||
roles.register_local_role('ref', ref_role)
|
||||
roles.register_local_role('doc', doc_role)
|
||||
|
||||
class MyTranslator(manpage.Translator):
|
||||
def visit_image(self, node):
|
||||
raise nodes.SkipNode
|
||||
|
||||
def visit_topic(self, node):
|
||||
self.body.append('\n')
|
||||
raise nodes.SkipNode
|
||||
|
||||
def visit_title(self, node):
|
||||
try:
|
||||
super().visit_title(node)
|
||||
except nodes.SkipNode:
|
||||
if self.section_level == 0:
|
||||
self._docinfo['title'] = 'nvchecker'
|
||||
self._docinfo['subtitle'] = 'New version checker for software releases'
|
||||
self._docinfo['title_upper'] = 'nvchecker'.upper()
|
||||
self._docinfo['manual_section'] = '1'
|
||||
# Make the generated man page reproducible. Based on the patch from
|
||||
# https://sourceforge.net/p/docutils/patches/132/#5333
|
||||
source_date_epoch = os.environ.get('SOURCE_DATE_EPOCH')
|
||||
if source_date_epoch:
|
||||
self._docinfo['date'] = time.strftime('%Y-%m-%d', time.gmtime(int(source_date_epoch)))
|
||||
else:
|
||||
self._docinfo['date'] = time.strftime('%Y-%m-%d')
|
||||
self._docinfo['version'] = nvchecker.__version__
|
||||
raise
|
||||
|
||||
class MyWriter(manpage.Writer):
|
||||
def __init__(self):
|
||||
super().__init__()
|
||||
self.translator_class = MyTranslator
|
||||
|
||||
def main():
|
||||
description = ("Generates plain unix manual documents. " + default_description)
|
||||
publish_cmdline(writer=MyWriter(), description=description)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
96
docs/plugin.rst
Normal file
96
docs/plugin.rst
Normal file
|
@ -0,0 +1,96 @@
|
|||
How to develop a source plugin for nvchecker
|
||||
============================================
|
||||
|
||||
.. contents::
|
||||
:local:
|
||||
|
||||
Source plugins enable nvchecker to discover software version strings in
|
||||
additional ways.
|
||||
|
||||
Where to put the plugins
|
||||
------------------------
|
||||
|
||||
They are Python modules put in any directories named ``nvchecker_source`` in
|
||||
``sys.path``. This is called namespace packages introduced by `PEP 420 <https:
|
||||
//www.python.org/dev/peps/pep-0420/>`_. For local use,
|
||||
``~/.local/lib/pythonX.Y/site-packages/nvchecker_source`` is a good place, or
|
||||
you can define the ``PYTHONPATH`` environment variable and put nvchecker source
|
||||
plugins there inside a ``nvchecker_source`` directory.
|
||||
|
||||
Plugins are referenced by their names in the configuration file (``source = "xxx"``).
|
||||
If multiple plugins have the same name, the first one in ``sys.path`` will be used.
|
||||
|
||||
How to write a simple plugin
|
||||
----------------------------
|
||||
|
||||
For simple situations, you need to define an async function with the following signature::
|
||||
|
||||
async def get_version(
|
||||
name: str, conf: Entry, *,
|
||||
cache: AsyncCache, keymanager: KeyManager,
|
||||
**kwargs,
|
||||
) -> VersionResult:
|
||||
...
|
||||
|
||||
Those types are imported from :mod:`nvchecker.api`.
|
||||
|
||||
``name`` is the table keys in the configuration file, and ``conf`` is a dict of
|
||||
the content of that table. You should not modify this dict.
|
||||
|
||||
``cache`` is an :class:`AsyncCache <nvchecker.api.AsyncCache>` object that
|
||||
caches results for you. Every plugin has its own ``cache`` object so that cache
|
||||
keys won't conflict.
|
||||
|
||||
``keymanager`` is a :class:`KeyManager <nvchecker.api.KeyManager>` object that
|
||||
you can call :meth:`.get_key(name) <nvchecker.api.KeyManager.get_key>` to get
|
||||
the key (token) from the keyfile.
|
||||
|
||||
There may be additional keyword arguments in the future so ``**kwargs`` should be used.
|
||||
|
||||
If you want to send an HTTP request, it's preferred to use :meth:
|
||||
`cache.get_json <nvchecker.api.AsyncCache.get_json>` or the :data:
|
||||
`nvchecker.api.session` object. It will use the auto-selected HTTP backend and
|
||||
handle the ``proxy`` option automatically.
|
||||
|
||||
For details about these objects, see :mod:`the API documentation <nvchecker.api>`,
|
||||
or take existing source plugins as examples.
|
||||
|
||||
How to write a more powerful plugin
|
||||
-----------------------------------
|
||||
|
||||
You may want more control in your source plugin, e.g. to do batch requests. To
|
||||
do this, you provide a class instead::
|
||||
|
||||
class Worker(BaseWorker):
|
||||
async def run(self) -> None:
|
||||
...
|
||||
|
||||
|
||||
You will have the following in the attributes::
|
||||
|
||||
token_q: Queue[bool],
|
||||
result_q: Queue[RawResult],
|
||||
tasks: List[Tuple[str, Entry]],
|
||||
keymanager: KeyManager,
|
||||
|
||||
You are expected to process :attr:`tasks <nvchecker.api.BaseWorker.tasks>` and
|
||||
put results in :attr:`result_q <nvchecker.api.BaseWorker.result_q>`. See
|
||||
``nvchecker_source/none.py`` for the simplest example, and
|
||||
``nvchecker_source/aur.py`` for a complete, batching example.
|
||||
|
||||
For details about these objects, see :mod:`the API documentation <nvchecker.api>`.
|
||||
|
||||
You can also receive a configuration section from the configuration as
|
||||
``__config__.source.SOURCE_NAME``, where ``SOURCE_NAME`` is what your plugin is
|
||||
called. This can be used to specify a mirror site for your plugin to use, e.g.
|
||||
the ``npm`` plugin accepts the following config::
|
||||
|
||||
[__config__.source.npm]
|
||||
registry = "https://registry.npm.taobao.org"
|
||||
|
||||
When such a configuration exists for your plugin, you need to define a function
|
||||
named ``configure`` to receive it::
|
||||
|
||||
def configure(config):
|
||||
'''use the "config" dict in some way'''
|
||||
...
|
6
docs/requirements.txt
Normal file
6
docs/requirements.txt
Normal file
|
@ -0,0 +1,6 @@
|
|||
structlog
|
||||
platformdirs
|
||||
tornado>=6
|
||||
sphinx>=3.2
|
||||
# <5 has strange bottom margins for p, and no list indicators
|
||||
sphinx-rtd-theme>=0.5
|
1145
docs/usage.rst
Normal file
1145
docs/usage.rst
Normal file
File diff suppressed because it is too large
Load diff
1
keyfile.toml.enc
Normal file
1
keyfile.toml.enc
Normal file
|
@ -0,0 +1 @@
|
|||
Salted__ÇßKÊ]å¯õ´›äó<C3A4>(¯J×ýºŒv4/ÆáôLÔ<4C>µ(Ó“O|ôNTÌ’¡O @”¬”×Ûª¨8—ïû‡Jz
cÈOüžË²úäk›d·—æ/œ÷ÛZ)<29>Q}Þá4×µËÒW§@í*´ÃW¹%¢·Bäðo¤¨wDA<44>›Ú\W´µÏ$:Ø– ß…´µ9Èß½0"1yE53«UÍwÅ"â0j!µéâœq^®ë½¼J$oÜÛn±hñ—u`ÅÚ0GÝÛôËNHµX®¼l53‘×D ÿ€Œ
|
31
mypy.ini
Normal file
31
mypy.ini
Normal file
|
@ -0,0 +1,31 @@
|
|||
[mypy]
|
||||
warn_unused_configs = True
|
||||
warn_redundant_casts = True
|
||||
warn_unused_ignores = True
|
||||
show_error_context = True
|
||||
show_column_numbers = True
|
||||
no_implicit_optional = True
|
||||
|
||||
[mypy-structlog]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-pyalpm]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-flaky]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-pytest_httpbin]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-lxml]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-tomllib]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-jq]
|
||||
ignore_missing_imports = True
|
||||
|
||||
[mypy-tomli]
|
||||
ignore_missing_imports = True
|
|
@ -0,0 +1,4 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2024 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
__version__ = '2.17dev'
|
112
nvchecker/__main__.py
Executable file
112
nvchecker/__main__.py
Executable file
|
@ -0,0 +1,112 @@
|
|||
#!/usr/bin/env python3
|
||||
# MIT licensed
|
||||
# Copyright (c) 2013-2024 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
import asyncio
|
||||
from typing import Coroutine, Tuple
|
||||
from pathlib import Path
|
||||
|
||||
import structlog
|
||||
|
||||
from . import core
|
||||
from .util import ResultData, RawResult, KeyManager, EntryWaiter
|
||||
from .ctxvars import proxy as ctx_proxy
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(description='New version checker for software')
|
||||
parser.add_argument('-k', '--keyfile',
|
||||
metavar='FILE', type=str,
|
||||
help='use specified keyfile (override the one in configuration file)')
|
||||
parser.add_argument('-t', '--tries', default=1, type=int, metavar='N',
|
||||
help='try N times when network errors occur')
|
||||
parser.add_argument('--failures', action='store_true',
|
||||
help='exit with code 3 if failures / errors happen during checking')
|
||||
parser.add_argument('-e', '--entry', type=str,
|
||||
help='only execute on specified entry (useful for debugging)')
|
||||
core.add_common_arguments(parser)
|
||||
args = parser.parse_args()
|
||||
if core.process_common_arguments(args):
|
||||
return
|
||||
|
||||
try:
|
||||
entries, options = core.load_file(
|
||||
args.file, use_keymanager=not bool(args.keyfile))
|
||||
|
||||
if args.entry:
|
||||
if args.entry not in entries:
|
||||
sys.exit('Specified entry not found in config')
|
||||
entries = {args.entry: entries[args.entry]}
|
||||
|
||||
if args.keyfile:
|
||||
keymanager = KeyManager(Path(args.keyfile))
|
||||
else:
|
||||
keymanager = options.keymanager
|
||||
except core.FileLoadError as e:
|
||||
sys.exit(str(e))
|
||||
|
||||
if options.proxy is not None:
|
||||
ctx_proxy.set(options.proxy)
|
||||
|
||||
task_sem = asyncio.Semaphore(options.max_concurrency)
|
||||
result_q: asyncio.Queue[RawResult] = asyncio.Queue()
|
||||
dispatcher = core.setup_httpclient(
|
||||
options.max_concurrency,
|
||||
options.httplib,
|
||||
options.http_timeout,
|
||||
)
|
||||
entry_waiter = EntryWaiter()
|
||||
try:
|
||||
futures = dispatcher.dispatch(
|
||||
entries, task_sem, result_q,
|
||||
keymanager, entry_waiter,
|
||||
args.tries,
|
||||
options.source_configs,
|
||||
)
|
||||
except ModuleNotFoundError as e:
|
||||
sys.exit(f'Error: {e}')
|
||||
|
||||
if options.ver_files is not None:
|
||||
oldvers = core.read_verfile(options.ver_files[0])
|
||||
else:
|
||||
oldvers = {}
|
||||
result_coro = core.process_result(oldvers, result_q, entry_waiter, verbose=bool(args.entry))
|
||||
runner_coro = core.run_tasks(futures)
|
||||
|
||||
if sys.version_info >= (3, 10):
|
||||
# Python 3.10 has deprecated asyncio.get_event_loop
|
||||
results, has_failures = asyncio.run(run(result_coro, runner_coro))
|
||||
else:
|
||||
# Python < 3.10 will create an eventloop when asyncio.Queue is initialized
|
||||
results, has_failures = asyncio.get_event_loop().run_until_complete(run(result_coro, runner_coro))
|
||||
|
||||
if options.ver_files is not None:
|
||||
newverf = options.ver_files[1]
|
||||
if args.entry:
|
||||
# don't remove other entries when only one entry is specified on cmdline
|
||||
vers = core.read_verfile(newverf)
|
||||
else:
|
||||
vers = {}
|
||||
vers.update(results)
|
||||
core.write_verfile(newverf, vers)
|
||||
|
||||
if args.failures and has_failures:
|
||||
sys.exit(3)
|
||||
|
||||
async def run(
|
||||
result_coro: Coroutine[None, None, Tuple[ResultData, bool]],
|
||||
runner_coro: Coroutine[None, None, None],
|
||||
) -> Tuple[ResultData, bool]:
|
||||
result_fu = asyncio.create_task(result_coro)
|
||||
runner_fu = asyncio.create_task(runner_coro)
|
||||
await runner_fu
|
||||
result_fu.cancel()
|
||||
return await result_fu
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
11
nvchecker/api.py
Normal file
11
nvchecker/api.py
Normal file
|
@ -0,0 +1,11 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from .httpclient import session, TemporaryError, HTTPError
|
||||
from .util import (
|
||||
Entry, BaseWorker, RawResult, VersionResult, RichResult,
|
||||
AsyncCache, KeyManager, GetVersionError, EntryWaiter,
|
||||
)
|
||||
from .sortversion import sort_version_keys
|
||||
|
||||
from .ctxvars import tries, proxy, user_agent, httptoken, entry_waiter, verify_cert
|
461
nvchecker/core.py
Normal file
461
nvchecker/core.py
Normal file
|
@ -0,0 +1,461 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020, 2024 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import sys
|
||||
import asyncio
|
||||
from asyncio import Queue
|
||||
import logging
|
||||
import argparse
|
||||
from typing import (
|
||||
Tuple, NamedTuple, Optional, List, Union,
|
||||
cast, Dict, Awaitable, Sequence, Any,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
import types
|
||||
from pathlib import Path
|
||||
from importlib import import_module
|
||||
import re
|
||||
import contextvars
|
||||
import json
|
||||
import dataclasses
|
||||
|
||||
import structlog
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import tomli as tomllib
|
||||
else:
|
||||
try:
|
||||
import tomllib
|
||||
except ModuleNotFoundError:
|
||||
import tomli as tomllib
|
||||
|
||||
import platformdirs
|
||||
|
||||
from .lib import nicelogger
|
||||
from . import slogconf
|
||||
from .util import (
|
||||
Entry, Entries, KeyManager, RawResult, RichResult, ResultData,
|
||||
FunctionWorker, GetVersionError,
|
||||
FileLoadError, EntryWaiter,
|
||||
)
|
||||
from . import __version__
|
||||
from .sortversion import sort_version_keys
|
||||
from .ctxvars import tries as ctx_tries
|
||||
from .ctxvars import entry_waiter as ctx_entry_waiter
|
||||
from . import httpclient
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
def get_default_config() -> str:
|
||||
confdir = platformdirs.user_config_dir(appname='nvchecker')
|
||||
file = os.path.join(confdir, 'nvchecker.toml')
|
||||
return file
|
||||
|
||||
def add_common_arguments(parser: argparse.ArgumentParser) -> None:
|
||||
parser.add_argument('-l', '--logging',
|
||||
choices=('debug', 'info', 'warning', 'error'), default='info',
|
||||
help='logging level (default: info)')
|
||||
parser.add_argument('--logger', default='pretty',
|
||||
choices=['pretty', 'json', 'both'],
|
||||
help='select which logger to use')
|
||||
parser.add_argument('--json-log-fd', metavar='FD',
|
||||
type=lambda fd: os.fdopen(int(fd), mode='w'),
|
||||
help='specify fd to send json logs to. stdout by default')
|
||||
parser.add_argument('-V', '--version', action='store_true',
|
||||
help='show version and exit')
|
||||
default_config = get_default_config()
|
||||
parser.add_argument('-c', '--file',
|
||||
metavar='FILE', type=str,
|
||||
default=default_config,
|
||||
help=f'software version configuration file [default: {default_config}]')
|
||||
|
||||
def process_common_arguments(args: argparse.Namespace) -> bool:
|
||||
'''return True if should stop'''
|
||||
processors = [
|
||||
slogconf.exc_info,
|
||||
slogconf.filter_exc,
|
||||
slogconf.filter_nones,
|
||||
slogconf.filter_taskname,
|
||||
]
|
||||
logger_factory = None
|
||||
|
||||
if args.logger in ['pretty', 'both']:
|
||||
slogconf.fix_logging()
|
||||
nicelogger.enable_pretty_logging(
|
||||
getattr(logging, args.logging.upper()))
|
||||
processors.append(slogconf.stdlib_renderer)
|
||||
if args.logger == 'pretty':
|
||||
logger_factory=structlog.PrintLoggerFactory(
|
||||
file=open(os.devnull, 'w'),
|
||||
)
|
||||
processors.append(slogconf.null_renderer)
|
||||
if args.logger in ['json', 'both']:
|
||||
processors.extend([
|
||||
structlog.processors.format_exc_info,
|
||||
slogconf.json_renderer,
|
||||
])
|
||||
|
||||
if logger_factory is None:
|
||||
logfile = args.json_log_fd or sys.stdout
|
||||
logger_factory = structlog.PrintLoggerFactory(file=logfile)
|
||||
|
||||
structlog.configure(
|
||||
processors = processors,
|
||||
logger_factory = logger_factory,
|
||||
)
|
||||
|
||||
if args.version:
|
||||
progname = os.path.basename(sys.argv[0])
|
||||
print(f'{progname} v{__version__}')
|
||||
return True
|
||||
return False
|
||||
|
||||
def safe_overwrite(file: Path, data: Union[bytes, str], *,
|
||||
method: str = 'write', mode: str = 'w', encoding: Optional[str] = None) -> None:
|
||||
# FIXME: directory has no read perm
|
||||
# FIXME: hard links
|
||||
resolved_path = file.resolve()
|
||||
tmpname = str(resolved_path) + '.tmp'
|
||||
# if not using "with", write can fail without exception
|
||||
with open(tmpname, mode, encoding=encoding) as f:
|
||||
getattr(f, method)(data)
|
||||
# see also: https://thunk.org/tytso/blog/2009/03/15/dont-fear-the-fsync/
|
||||
f.flush()
|
||||
os.fsync(f.fileno())
|
||||
# if the above write failed (because disk is full etc), the old data should be kept
|
||||
os.rename(tmpname, resolved_path)
|
||||
|
||||
def read_verfile(file: Path) -> ResultData:
|
||||
try:
|
||||
with open(file) as f:
|
||||
data = f.read()
|
||||
except FileNotFoundError:
|
||||
return {}
|
||||
|
||||
try:
|
||||
v = json.loads(data)
|
||||
except json.decoder.JSONDecodeError:
|
||||
# old format
|
||||
v = {}
|
||||
for l in data.splitlines():
|
||||
name, ver = l.rstrip().split(None, 1)
|
||||
v[name] = ver
|
||||
|
||||
if v.get('version') is None:
|
||||
v = {k: RichResult(version=a) for k, a in v.items()}
|
||||
elif v['version'] == 2:
|
||||
v = {k: RichResult(**a) for k, a in v['data'].items()}
|
||||
else:
|
||||
raise Exception('unknown verfile version', v['version'])
|
||||
|
||||
return v
|
||||
|
||||
def write_verfile(file: Path, versions: ResultData) -> None:
|
||||
d = {
|
||||
'version': 2,
|
||||
# sort and indent to make it friendly to human and git
|
||||
'data': dict(sorted(versions.items())),
|
||||
}
|
||||
data = json.dumps(
|
||||
d,
|
||||
indent = 2,
|
||||
ensure_ascii = False,
|
||||
default = json_encode,
|
||||
) + '\n'
|
||||
safe_overwrite(file, data)
|
||||
|
||||
def json_encode(obj):
|
||||
if isinstance(obj, RichResult):
|
||||
d = {k: v for k, v in dataclasses.asdict(obj).items() if v is not None}
|
||||
return d
|
||||
raise TypeError(obj)
|
||||
|
||||
class Options(NamedTuple):
|
||||
ver_files: Optional[Tuple[Path, Path]]
|
||||
max_concurrency: int
|
||||
proxy: Optional[str]
|
||||
keymanager: KeyManager
|
||||
source_configs: Dict[str, Dict[str, Any]]
|
||||
httplib: Optional[str]
|
||||
http_timeout: int
|
||||
|
||||
def load_file(
|
||||
file: str, *,
|
||||
use_keymanager: bool,
|
||||
) -> Tuple[Entries, Options]:
|
||||
try:
|
||||
with open(file, 'rb') as f:
|
||||
config = tomllib.load(f)
|
||||
except (OSError, tomllib.TOMLDecodeError) as e:
|
||||
raise FileLoadError('version configuration file', file, e)
|
||||
|
||||
ver_files: Optional[Tuple[Path, Path]] = None
|
||||
keymanager = KeyManager(None)
|
||||
source_configs = {}
|
||||
|
||||
if '__config__' in config:
|
||||
c = config.pop('__config__')
|
||||
d = Path(file).parent
|
||||
|
||||
if 'oldver' in c and 'newver' in c:
|
||||
oldver_s = os.path.expandvars(
|
||||
os.path.expanduser(c.get('oldver')))
|
||||
oldver = d / oldver_s
|
||||
newver_s = os.path.expandvars(
|
||||
os.path.expanduser(c.get('newver')))
|
||||
newver = d / newver_s
|
||||
ver_files = oldver, newver
|
||||
|
||||
if use_keymanager:
|
||||
keyfile = c.get('keyfile')
|
||||
if keyfile:
|
||||
keyfile_s = os.path.expandvars(
|
||||
os.path.expanduser(c.get('keyfile')))
|
||||
keyfile = d / keyfile_s
|
||||
keymanager = KeyManager(keyfile)
|
||||
|
||||
if 'source' in c:
|
||||
source_configs = c['source']
|
||||
|
||||
max_concurrency = c.get('max_concurrency', 20)
|
||||
proxy = c.get('proxy')
|
||||
httplib = c.get('httplib', None)
|
||||
http_timeout = c.get('http_timeout', 20)
|
||||
else:
|
||||
max_concurrency = 20
|
||||
proxy = None
|
||||
httplib = None
|
||||
http_timeout = 20
|
||||
|
||||
return cast(Entries, config), Options(
|
||||
ver_files, max_concurrency, proxy, keymanager,
|
||||
source_configs, httplib, http_timeout,
|
||||
)
|
||||
|
||||
def setup_httpclient(
|
||||
max_concurrency: int = 20,
|
||||
httplib: Optional[str] = None,
|
||||
http_timeout: int = 20,
|
||||
) -> Dispatcher:
|
||||
httplib_ = httplib or httpclient.find_best_httplib()
|
||||
httpclient.setup(
|
||||
httplib_, max_concurrency, http_timeout)
|
||||
return Dispatcher()
|
||||
|
||||
class Dispatcher:
|
||||
def dispatch(
|
||||
self,
|
||||
entries: Entries,
|
||||
task_sem: asyncio.Semaphore,
|
||||
result_q: Queue[RawResult],
|
||||
keymanager: KeyManager,
|
||||
entry_waiter: EntryWaiter,
|
||||
tries: int,
|
||||
source_configs: Dict[str, Dict[str, Any]],
|
||||
) -> List[asyncio.Future]:
|
||||
mods: Dict[str, Tuple[types.ModuleType, List]] = {}
|
||||
ctx_tries.set(tries)
|
||||
ctx_entry_waiter.set(entry_waiter)
|
||||
root_ctx = contextvars.copy_context()
|
||||
|
||||
for name, entry in entries.items():
|
||||
source = entry.get('source', 'none')
|
||||
if source not in mods:
|
||||
mod = import_module('nvchecker_source.' + source)
|
||||
tasks: List[Tuple[str, Entry]] = []
|
||||
mods[source] = mod, tasks
|
||||
config = source_configs.get(source)
|
||||
if config and getattr(mod, 'configure'):
|
||||
mod.configure(config)
|
||||
else:
|
||||
tasks = mods[source][1]
|
||||
tasks.append((name, entry))
|
||||
|
||||
ret = []
|
||||
for mod, tasks in mods.values():
|
||||
if hasattr(mod, 'Worker'):
|
||||
worker_cls = mod.Worker
|
||||
else:
|
||||
worker_cls = FunctionWorker
|
||||
|
||||
ctx = root_ctx.copy()
|
||||
worker = ctx.run(
|
||||
worker_cls,
|
||||
task_sem, result_q, tasks, keymanager,
|
||||
)
|
||||
if worker_cls is FunctionWorker:
|
||||
func = mod.get_version
|
||||
ctx.run(worker.initialize, func)
|
||||
|
||||
ret.append(ctx.run(worker._run_maynot_raise))
|
||||
|
||||
return ret
|
||||
|
||||
def substitute_version(
|
||||
version: str, conf: Entry,
|
||||
) -> str:
|
||||
'''
|
||||
Substitute the version string via defined rules in the configuration file.
|
||||
See usage.rst#global-options for details.
|
||||
'''
|
||||
prefix = conf.get('prefix')
|
||||
if prefix:
|
||||
if version.startswith(prefix):
|
||||
version = version[len(prefix):]
|
||||
|
||||
from_pattern = conf.get('from_pattern')
|
||||
if from_pattern:
|
||||
to_pattern = conf.get('to_pattern')
|
||||
if to_pattern is None:
|
||||
raise ValueError("from_pattern exists but to_pattern doesn't")
|
||||
|
||||
version = re.sub(from_pattern, to_pattern, version)
|
||||
|
||||
return version
|
||||
|
||||
def apply_list_options(
|
||||
versions: List[Union[str, RichResult]],
|
||||
conf: Entry,
|
||||
name: str,
|
||||
) -> Optional[Union[str, RichResult]]:
|
||||
pattern = conf.get('include_regex')
|
||||
if versions and pattern:
|
||||
re_pat = re.compile(pattern)
|
||||
versions2 = [x for x in versions
|
||||
if re_pat.fullmatch(str(x))]
|
||||
if not versions2:
|
||||
logger.warning('include_regex matched no versions',
|
||||
name=name, versions=versions, regex=pattern)
|
||||
return None
|
||||
versions = versions2
|
||||
|
||||
pattern = conf.get('exclude_regex')
|
||||
if pattern:
|
||||
re_pat = re.compile(pattern)
|
||||
versions = [x for x in versions
|
||||
if not re_pat.fullmatch(str(x))]
|
||||
|
||||
ignored = set(conf.get('ignored', '').split())
|
||||
if ignored:
|
||||
versions = [x for x in versions
|
||||
if str(x) not in ignored]
|
||||
|
||||
if not versions:
|
||||
return None
|
||||
|
||||
sort_version_key = sort_version_keys[
|
||||
conf.get("sort_version_key", "parse_version")]
|
||||
versions.sort(key=lambda version: sort_version_key(str(version))) # type: ignore
|
||||
|
||||
return versions[-1]
|
||||
|
||||
def _process_result(r: RawResult) -> Union[RichResult, Exception]:
|
||||
version = r.version
|
||||
conf = r.conf
|
||||
name = r.name
|
||||
|
||||
url = None
|
||||
revision = None
|
||||
gitref = None
|
||||
if isinstance(version, GetVersionError):
|
||||
kw = version.kwargs
|
||||
kw['name'] = name
|
||||
logger.error(version.msg, **kw)
|
||||
return version
|
||||
elif isinstance(version, Exception):
|
||||
logger.error('unexpected error happened',
|
||||
name=r.name, exc_info=r.version)
|
||||
return version
|
||||
elif isinstance(version, list):
|
||||
version_str = apply_list_options(version, conf, name)
|
||||
if isinstance(version_str, RichResult):
|
||||
url = version_str.url
|
||||
gitref = version_str.gitref
|
||||
revision = version_str.revision
|
||||
version_str = version_str.version
|
||||
elif isinstance(version, RichResult):
|
||||
version_str = version.version
|
||||
url = version.url
|
||||
gitref = version.gitref
|
||||
revision = version.revision
|
||||
else:
|
||||
version_str = version
|
||||
|
||||
if version_str:
|
||||
version_str = version_str.replace('\n', ' ')
|
||||
|
||||
try:
|
||||
version_str = substitute_version(version_str, conf)
|
||||
return RichResult(
|
||||
version = version_str,
|
||||
url = url,
|
||||
gitref = gitref,
|
||||
revision = revision,
|
||||
)
|
||||
except (ValueError, re.error) as e:
|
||||
logger.exception('error occurred in version substitutions', name=name)
|
||||
return e
|
||||
|
||||
else:
|
||||
return ValueError('no version returned')
|
||||
|
||||
def check_version_update(
|
||||
oldvers: ResultData,
|
||||
name: str,
|
||||
r: RichResult,
|
||||
verbose: bool,
|
||||
) -> None:
|
||||
if old_result := oldvers.get(name):
|
||||
oldver = old_result.version
|
||||
else:
|
||||
oldver = None
|
||||
if not oldver or oldver != r.version:
|
||||
logger.info(
|
||||
'updated',
|
||||
name = name,
|
||||
version = r.version,
|
||||
revision = r.revision,
|
||||
old_version = oldver,
|
||||
url = r.url,
|
||||
)
|
||||
else:
|
||||
# provide visible user feedback if it was the only entry
|
||||
level = logging.INFO if verbose else logging.DEBUG
|
||||
logger.log(level, 'up-to-date', name=name, version=r.version, url=r.url)
|
||||
|
||||
async def process_result(
|
||||
oldvers: ResultData,
|
||||
result_q: Queue[RawResult],
|
||||
entry_waiter: EntryWaiter,
|
||||
verbose: bool = False,
|
||||
) -> Tuple[ResultData, bool]:
|
||||
ret = {}
|
||||
has_failures = False
|
||||
try:
|
||||
while True:
|
||||
r = await result_q.get()
|
||||
try:
|
||||
r1 = _process_result(r)
|
||||
except Exception as e:
|
||||
logger.exception('error processing result', result=r)
|
||||
r1 = e
|
||||
if isinstance(r1, Exception):
|
||||
entry_waiter.set_exception(r.name, r1)
|
||||
# no versions are returned from "apply_list_options"?
|
||||
logger.error('no-result', name=r.name, error=repr(r1))
|
||||
has_failures = True
|
||||
continue
|
||||
check_version_update(oldvers, r.name, r1, verbose)
|
||||
entry_waiter.set_result(r.name, r1.version)
|
||||
ret[r.name] = r1
|
||||
except asyncio.CancelledError:
|
||||
return ret, has_failures
|
||||
|
||||
async def run_tasks(
|
||||
futures: Sequence[Awaitable[None]]
|
||||
) -> None:
|
||||
for fu in asyncio.as_completed(futures):
|
||||
await fu
|
21
nvchecker/ctxvars.py
Normal file
21
nvchecker/ctxvars.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from contextvars import ContextVar
|
||||
from typing import Optional, TYPE_CHECKING
|
||||
|
||||
from . import __version__
|
||||
|
||||
DEFAULT_USER_AGENT = f'lilydjwg/nvchecker {__version__}'
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .util import EntryWaiter
|
||||
|
||||
tries = ContextVar('tries', default=1)
|
||||
proxy: ContextVar[Optional[str]] = ContextVar('proxy', default=None)
|
||||
user_agent = ContextVar('user_agent', default=DEFAULT_USER_AGENT)
|
||||
httptoken = ContextVar('httptoken', default=None)
|
||||
entry_waiter: ContextVar[EntryWaiter] = ContextVar('entry_waiter')
|
||||
verify_cert = ContextVar('verify_cert', default=True)
|
|
@ -1,124 +0,0 @@
|
|||
import re
|
||||
import sre_constants
|
||||
import logging
|
||||
from functools import partial
|
||||
import queue
|
||||
import json
|
||||
import urllib.parse
|
||||
|
||||
from pkg_resources import parse_version
|
||||
from tornado.httpclient import AsyncHTTPClient
|
||||
import tornado.process
|
||||
from tornado.ioloop import IOLoop
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
handler_precedence = ('github', 'aur', 'cmd', 'regex')
|
||||
|
||||
try:
|
||||
import pycurl
|
||||
AsyncHTTPClient.configure("tornado.curl_httpclient.CurlAsyncHTTPClient")
|
||||
except ImportError:
|
||||
pycurl = None
|
||||
|
||||
def get_version(name, conf, callback):
|
||||
g = globals()
|
||||
for key in handler_precedence:
|
||||
if key in conf:
|
||||
funcname = 'get_version_by_' + key
|
||||
g[funcname](name, conf, callback)
|
||||
break
|
||||
else:
|
||||
logger.error('%s: no idea to get version info.', name)
|
||||
callback(name, None)
|
||||
|
||||
def get_version_by_regex(name, conf, callback):
|
||||
try:
|
||||
r = re.compile(conf['regex'])
|
||||
except sre_constants.error:
|
||||
logger.warn('%s: bad regex, skipped.', name, exc_info=True)
|
||||
callback(name, None)
|
||||
return
|
||||
|
||||
encoding = conf.get('encoding', 'latin1')
|
||||
httpclient = AsyncHTTPClient()
|
||||
|
||||
kwargs = {}
|
||||
if conf.get('proxy'):
|
||||
if pycurl:
|
||||
host, port = urllib.parse.splitport(conf['proxy'])
|
||||
kwargs['proxy_host'] = host
|
||||
kwargs['proxy_port'] = int(port)
|
||||
else:
|
||||
logger.warn('%s: proxy set but not used because pycurl is unavailable.', name)
|
||||
|
||||
httpclient.fetch(conf['url'], partial(
|
||||
_get_version_by_regex, name, r, encoding, callback
|
||||
), **kwargs)
|
||||
|
||||
def _get_version_by_regex(name, regex, encoding, callback, res):
|
||||
body = res.body.decode(encoding)
|
||||
try:
|
||||
version = max(regex.findall(body), key=parse_version)
|
||||
except ValueError:
|
||||
logger.error('%s: version string not found.', name)
|
||||
callback(name, None)
|
||||
else:
|
||||
callback(name, version)
|
||||
|
||||
AUR_URL = 'https://aur.archlinux.org/rpc.php?type=info&arg='
|
||||
|
||||
def get_version_by_aur(name, conf, callback):
|
||||
aurname = conf.get('aur') or name
|
||||
url = AUR_URL + aurname
|
||||
AsyncHTTPClient().fetch(url, partial(_aur_done, name, callback))
|
||||
|
||||
def _aur_done(name, callback, res):
|
||||
data = json.loads(res.body.decode('utf-8'))
|
||||
version = data['results']['Version']
|
||||
callback(name, version)
|
||||
|
||||
GITHUB_URL = 'https://api.github.com/repos/%s/commits'
|
||||
|
||||
def get_version_by_github(name, conf, callback):
|
||||
repo = conf.get('github')
|
||||
url = GITHUB_URL % repo
|
||||
AsyncHTTPClient().fetch(url, user_agent='lilydjwg/nvchecker',
|
||||
callback=partial(_github_done, name, callback))
|
||||
|
||||
def _github_done(name, callback, res):
|
||||
data = json.loads(res.body.decode('utf-8'))
|
||||
version = data[0]['commit']['committer']['date'].split('T', 1)[0].replace('-', '')
|
||||
callback(name, version)
|
||||
|
||||
cmd_q = queue.Queue()
|
||||
cmd_q.running = False
|
||||
|
||||
def get_version_by_cmd(name, conf, callback):
|
||||
cmd = conf['cmd']
|
||||
cmd_q.put((name, cmd, callback))
|
||||
if not cmd_q.running:
|
||||
_run_command()
|
||||
|
||||
def _run_command():
|
||||
cmd_q.running = True
|
||||
try:
|
||||
name, cmd, callback = cmd_q.get_nowait()
|
||||
except queue.Empty:
|
||||
cmd_q.running = False
|
||||
return
|
||||
|
||||
p = tornado.process.Subprocess(cmd, shell=True, io_loop=IOLoop.instance(),
|
||||
stdout=tornado.process.Subprocess.STREAM)
|
||||
p.set_exit_callback(partial(_command_done, name, callback, p))
|
||||
|
||||
def _command_done(name, callback, process, status):
|
||||
if status != 0:
|
||||
logger.error('%s: command exited with %d.', name, status)
|
||||
callback(name, None)
|
||||
else:
|
||||
process.stdout.read_until_close(partial(_got_version_from_cmd, callback, name))
|
||||
_run_command()
|
||||
|
||||
def _got_version_from_cmd(callback, name, output):
|
||||
output = output.strip().decode('latin1')
|
||||
callback(name, output)
|
55
nvchecker/httpclient/__init__.py
Normal file
55
nvchecker/httpclient/__init__.py
Normal file
|
@ -0,0 +1,55 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from .base import TemporaryError, HTTPError
|
||||
|
||||
class Proxy:
|
||||
_obj = None
|
||||
|
||||
def set_obj(self, obj):
|
||||
super().__setattr__('_obj', obj)
|
||||
|
||||
def __getattr__(self, name):
|
||||
return getattr(self._obj, name)
|
||||
|
||||
def __setattr__(self, name, value):
|
||||
return setattr(self._obj, name, value)
|
||||
|
||||
session = Proxy()
|
||||
|
||||
def setup(
|
||||
which: Optional[str] = None,
|
||||
concurreny: int = 20,
|
||||
timeout: int = 20,
|
||||
) -> None:
|
||||
if which is None:
|
||||
which = find_best_httplib()
|
||||
|
||||
m = __import__(
|
||||
'%s_httpclient' % which, globals(), locals(), level=1)
|
||||
|
||||
session.set_obj(m.session)
|
||||
session.setup(concurreny, timeout)
|
||||
|
||||
def find_best_httplib() -> str:
|
||||
try:
|
||||
import tornado, pycurl
|
||||
# connection reuse, http/2
|
||||
which = 'tornado'
|
||||
except ImportError:
|
||||
try:
|
||||
import aiohttp
|
||||
which = 'aiohttp'
|
||||
# connection reuse
|
||||
except ImportError:
|
||||
try:
|
||||
import httpx
|
||||
which = 'httpx'
|
||||
except ImportError:
|
||||
import tornado
|
||||
which = 'tornado'
|
||||
# fallback
|
||||
|
||||
return which
|
85
nvchecker/httpclient/aiohttp_httpclient.py
Normal file
85
nvchecker/httpclient/aiohttp_httpclient.py
Normal file
|
@ -0,0 +1,85 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import asyncio
|
||||
from typing import Optional, Dict
|
||||
|
||||
import structlog
|
||||
import aiohttp
|
||||
|
||||
from .base import BaseSession, TemporaryError, Response, HTTPError
|
||||
|
||||
__all__ = ['session']
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
class AiohttpSession(BaseSession):
|
||||
session = None
|
||||
|
||||
def setup(
|
||||
self,
|
||||
concurreny: int = 20,
|
||||
timeout: int = 20,
|
||||
) -> None:
|
||||
self._concurreny = concurreny
|
||||
self._timeout = timeout
|
||||
|
||||
async def request_impl(
|
||||
self, url: str, *,
|
||||
method: str,
|
||||
proxy: Optional[str] = None,
|
||||
headers: Dict[str, str] = {},
|
||||
follow_redirects: bool = True,
|
||||
params = (),
|
||||
json = None,
|
||||
body = None,
|
||||
verify_cert: bool = True,
|
||||
) -> Response:
|
||||
if self.session is None:
|
||||
# need to create in async context
|
||||
self.session = aiohttp.ClientSession(
|
||||
connector = aiohttp.TCPConnector(limit=self._concurreny),
|
||||
timeout = aiohttp.ClientTimeout(total=self._timeout),
|
||||
trust_env = True,
|
||||
)
|
||||
|
||||
kwargs = {
|
||||
'headers': headers,
|
||||
'params': params,
|
||||
'allow_redirects': follow_redirects,
|
||||
}
|
||||
if not verify_cert:
|
||||
kwargs['ssl'] = False
|
||||
|
||||
if proxy is not None:
|
||||
kwargs['proxy'] = proxy
|
||||
if body is not None:
|
||||
# Make sure all backends have the same default encoding for post data.
|
||||
if 'Content-Type' not in headers:
|
||||
headers = {**headers, 'Content-Type': 'application/x-www-form-urlencoded'}
|
||||
kwargs['headers'] = headers
|
||||
kwargs['data'] = body.encode()
|
||||
elif json is not None:
|
||||
kwargs['json'] = json
|
||||
|
||||
try:
|
||||
logger.debug('send request', method=method, url=url, kwargs=kwargs)
|
||||
res = await self.session.request(
|
||||
method, url, **kwargs)
|
||||
except (
|
||||
asyncio.TimeoutError, aiohttp.ClientConnectorError,
|
||||
) as e:
|
||||
raise TemporaryError(599, repr(e), e)
|
||||
|
||||
err_cls: Optional[type] = None
|
||||
if res.status >= 500:
|
||||
err_cls = TemporaryError
|
||||
elif res.status >= 400:
|
||||
err_cls = HTTPError
|
||||
if err_cls is not None:
|
||||
raise err_cls(res.status, res.reason, res)
|
||||
|
||||
body = await res.content.read()
|
||||
return Response(res.headers, body)
|
||||
|
||||
session = AiohttpSession()
|
124
nvchecker/httpclient/base.py
Normal file
124
nvchecker/httpclient/base.py
Normal file
|
@ -0,0 +1,124 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2019-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import structlog
|
||||
from typing import Optional, Dict, Mapping
|
||||
import json as _json
|
||||
|
||||
from ..ctxvars import tries, proxy, user_agent, httptoken, verify_cert
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
class Response:
|
||||
'''The response of an HTTP request.
|
||||
|
||||
.. py:attribute:: body
|
||||
:type: bytes
|
||||
|
||||
.. py:attribute:: headers
|
||||
:type: Mapping[str, str]
|
||||
'''
|
||||
def __init__(
|
||||
self,
|
||||
headers: Mapping[str, str],
|
||||
body: bytes,
|
||||
) -> None:
|
||||
self.headers = headers
|
||||
self.body = body
|
||||
|
||||
def json(self):
|
||||
'''Convert response content to JSON.'''
|
||||
return _json.loads(self.body.decode('utf-8'))
|
||||
|
||||
class BaseSession:
|
||||
'''The base class for different HTTP backend.'''
|
||||
def setup(
|
||||
self,
|
||||
concurreny: int = 20,
|
||||
timeout: int = 20,
|
||||
) -> None:
|
||||
pass
|
||||
|
||||
async def head(self, *args, **kwargs):
|
||||
'''Shortcut for ``HEAD`` request.'''
|
||||
return await self.request(
|
||||
method='HEAD', *args, **kwargs)
|
||||
|
||||
async def get(self, *args, **kwargs):
|
||||
'''Shortcut for ``GET`` request.'''
|
||||
return await self.request(
|
||||
method='GET', *args, **kwargs)
|
||||
|
||||
async def post(self, *args, **kwargs):
|
||||
'''Shortcut for ``POST`` request.'''
|
||||
return await self.request(
|
||||
method='POST', *args, **kwargs)
|
||||
|
||||
async def request(
|
||||
self, url: str, *,
|
||||
method: str,
|
||||
headers: Dict[str, str] = {},
|
||||
follow_redirects: bool = True,
|
||||
params = (),
|
||||
json = None,
|
||||
body = None,
|
||||
) -> Response:
|
||||
t = tries.get()
|
||||
p = proxy.get()
|
||||
ua = user_agent.get()
|
||||
httpt = httptoken.get()
|
||||
verify = verify_cert.get()
|
||||
|
||||
headers = headers.copy()
|
||||
headers.setdefault('User-Agent', ua)
|
||||
if httpt is not None:
|
||||
headers.setdefault('Authorization', httpt)
|
||||
|
||||
for i in range(1, t+1):
|
||||
try:
|
||||
return await self.request_impl(
|
||||
url,
|
||||
method = method,
|
||||
headers = headers,
|
||||
params = params,
|
||||
follow_redirects = follow_redirects,
|
||||
json = json,
|
||||
body = body,
|
||||
proxy = p or None,
|
||||
verify_cert = verify,
|
||||
)
|
||||
except TemporaryError as e:
|
||||
if i == t:
|
||||
raise
|
||||
else:
|
||||
logger.warning('temporary error, retrying',
|
||||
tries = i, exc_info = e)
|
||||
continue
|
||||
|
||||
raise Exception('should not reach')
|
||||
|
||||
async def request_impl(
|
||||
self, url: str, *,
|
||||
method: str,
|
||||
proxy: Optional[str] = None,
|
||||
headers: Dict[str, str] = {},
|
||||
follow_redirects: bool = True,
|
||||
params = (),
|
||||
json = None,
|
||||
body = None,
|
||||
verify_cert: bool = True,
|
||||
) -> Response:
|
||||
''':meta private:'''
|
||||
raise NotImplementedError
|
||||
|
||||
class BaseHTTPError(Exception):
|
||||
def __init__(self, code, message, response):
|
||||
self.code = code
|
||||
self.message = message
|
||||
self.response = response
|
||||
|
||||
class TemporaryError(BaseHTTPError):
|
||||
'''A temporary error (e.g. network error) happens.'''
|
||||
|
||||
class HTTPError(BaseHTTPError):
|
||||
'''An HTTP 4xx error happens'''
|
78
nvchecker/httpclient/httpx_httpclient.py
Normal file
78
nvchecker/httpclient/httpx_httpclient.py
Normal file
|
@ -0,0 +1,78 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020-2022,2024 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from typing import Dict, Optional, Tuple
|
||||
|
||||
import httpx
|
||||
|
||||
from .base import BaseSession, TemporaryError, Response, HTTPError
|
||||
|
||||
__all__ = ['session']
|
||||
|
||||
class HttpxSession(BaseSession):
|
||||
def setup(
|
||||
self,
|
||||
concurreny: int = 20,
|
||||
timeout: int = 20,
|
||||
) -> None:
|
||||
self.clients: Dict[Tuple[Optional[str], bool], httpx.AsyncClient] = {}
|
||||
self.timeout = timeout
|
||||
|
||||
async def request_impl(
|
||||
self, url: str, *,
|
||||
method: str,
|
||||
proxy: Optional[str] = None,
|
||||
headers: Dict[str, str] = {},
|
||||
follow_redirects: bool = True,
|
||||
params = (),
|
||||
json = None,
|
||||
body = None,
|
||||
verify_cert: bool = True,
|
||||
) -> Response:
|
||||
client = self.clients.get((proxy, verify_cert))
|
||||
if not client:
|
||||
client = httpx.AsyncClient(
|
||||
timeout = httpx.Timeout(self.timeout, pool=None),
|
||||
http2 = True,
|
||||
proxy = proxy,
|
||||
verify = verify_cert,
|
||||
)
|
||||
self.clients[(proxy, verify_cert)] = client
|
||||
|
||||
try:
|
||||
if body is not None:
|
||||
# Make sure all backends have the same default encoding for post data.
|
||||
if 'Content-Type' not in headers:
|
||||
headers = {**headers, 'Content-Type': 'application/x-www-form-urlencoded'}
|
||||
body = body.encode()
|
||||
r = await client.request(
|
||||
method, url, json = json, content = body,
|
||||
headers = headers,
|
||||
follow_redirects = follow_redirects,
|
||||
# httpx checks for None but not ()
|
||||
params = params or None,
|
||||
)
|
||||
err_cls: Optional[type] = None
|
||||
if r.status_code >= 500:
|
||||
err_cls = TemporaryError
|
||||
elif r.status_code >= 400:
|
||||
err_cls = HTTPError
|
||||
if err_cls is not None:
|
||||
raise err_cls(
|
||||
r.status_code,
|
||||
r.reason_phrase,
|
||||
r,
|
||||
)
|
||||
|
||||
except httpx.TransportError as e:
|
||||
raise TemporaryError(599, repr(e), e)
|
||||
|
||||
body = await r.aread()
|
||||
return Response(r.headers, body)
|
||||
|
||||
async def aclose(self):
|
||||
for client in self.clients.values():
|
||||
await client.aclose()
|
||||
del self.clients
|
||||
|
||||
session = HttpxSession()
|
103
nvchecker/httpclient/tornado_httpclient.py
Normal file
103
nvchecker/httpclient/tornado_httpclient.py
Normal file
|
@ -0,0 +1,103 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import json as _json
|
||||
from urllib.parse import urlencode
|
||||
from typing import Optional, Dict, Any
|
||||
import os
|
||||
|
||||
from tornado.httpclient import AsyncHTTPClient, HTTPRequest
|
||||
|
||||
try:
|
||||
import pycurl
|
||||
except ImportError:
|
||||
pycurl = None # type: ignore
|
||||
|
||||
from .base import BaseSession, TemporaryError, Response, HTTPError
|
||||
|
||||
__all__ = ['session']
|
||||
|
||||
HTTP2_AVAILABLE = None if pycurl else False
|
||||
SSL_CERT_FILE = os.environ.get('SSL_CERT_FILE')
|
||||
|
||||
def setup_curl(curl):
|
||||
global HTTP2_AVAILABLE
|
||||
if HTTP2_AVAILABLE is None:
|
||||
try:
|
||||
curl.setopt(pycurl.HTTP_VERSION, 4)
|
||||
HTTP2_AVAILABLE = True
|
||||
except pycurl.error:
|
||||
HTTP2_AVAILABLE = False
|
||||
elif HTTP2_AVAILABLE:
|
||||
curl.setopt(pycurl.HTTP_VERSION, 4)
|
||||
|
||||
if SSL_CERT_FILE:
|
||||
curl.setopt_string(pycurl.CAINFO, SSL_CERT_FILE)
|
||||
curl.setopt_string(pycurl.ACCEPT_ENCODING, "")
|
||||
|
||||
class TornadoSession(BaseSession):
|
||||
def setup(
|
||||
self,
|
||||
concurreny: int = 20,
|
||||
timeout: int = 20,
|
||||
) -> None:
|
||||
impl: Optional[str]
|
||||
if pycurl:
|
||||
impl = "tornado.curl_httpclient.CurlAsyncHTTPClient"
|
||||
else:
|
||||
impl = None
|
||||
AsyncHTTPClient.configure(
|
||||
impl, max_clients = concurreny)
|
||||
self.timeout = timeout
|
||||
|
||||
async def request_impl(
|
||||
self, url: str, *,
|
||||
method: str,
|
||||
proxy: Optional[str] = None,
|
||||
headers: Dict[str, str] = {},
|
||||
follow_redirects: bool = True,
|
||||
params = (),
|
||||
json = None,
|
||||
body = None,
|
||||
verify_cert: bool = True,
|
||||
) -> Response:
|
||||
kwargs: Dict[str, Any] = {
|
||||
'method': method,
|
||||
'headers': headers,
|
||||
'request_timeout': self.timeout,
|
||||
'follow_redirects': follow_redirects,
|
||||
'validate_cert': verify_cert,
|
||||
}
|
||||
|
||||
if body:
|
||||
# By default the content type is already 'application/x-www-form-urlencoded'
|
||||
kwargs['body'] = body
|
||||
elif json:
|
||||
kwargs['body'] = _json.dumps(json)
|
||||
kwargs['prepare_curl_callback'] = setup_curl
|
||||
|
||||
if proxy:
|
||||
host, port = proxy.rsplit(':', 1)
|
||||
kwargs['proxy_host'] = host
|
||||
kwargs['proxy_port'] = int(port)
|
||||
|
||||
if params:
|
||||
q = urlencode(params)
|
||||
url += '?' + q
|
||||
|
||||
r = HTTPRequest(url, **kwargs)
|
||||
res = await AsyncHTTPClient().fetch(
|
||||
r, raise_error=False)
|
||||
err_cls: Optional[type] = None
|
||||
if res.code >= 500:
|
||||
err_cls = TemporaryError
|
||||
elif res.code >= 400:
|
||||
err_cls = HTTPError
|
||||
if err_cls is not None:
|
||||
raise err_cls(
|
||||
res.code, res.reason, res
|
||||
)
|
||||
|
||||
return Response(res.headers, res.body)
|
||||
|
||||
session = TornadoSession()
|
4
nvchecker/lib/README.md
Normal file
4
nvchecker/lib/README.md
Normal file
|
@ -0,0 +1,4 @@
|
|||
This directory contains code from other places:
|
||||
|
||||
* `nicelogger.py`: from my [winterpy](https://github.com/lilydjwg/winterpy)
|
||||
* `packaging_version.py`: from python-packaging 20.9, modified
|
0
nvchecker/lib/__init__.py
Normal file
0
nvchecker/lib/__init__.py
Normal file
113
nvchecker/lib/nicelogger.py
Normal file
113
nvchecker/lib/nicelogger.py
Normal file
|
@ -0,0 +1,113 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2017 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
'''
|
||||
A Tornado-inspired logging formatter, with displayed time with millisecond accuracy
|
||||
|
||||
FYI: pyftpdlib also has a Tornado-style logger.
|
||||
'''
|
||||
|
||||
import sys
|
||||
import time
|
||||
import logging
|
||||
|
||||
class Colors:
|
||||
def __init__(self, color=None):
|
||||
if color is None:
|
||||
color = support_color()
|
||||
if color:
|
||||
import curses
|
||||
curses.setupterm()
|
||||
if sys.hexversion < 0x30203f0:
|
||||
fg_color = str(curses.tigetstr("setaf") or
|
||||
curses.tigetstr("setf") or "", "ascii")
|
||||
else:
|
||||
fg_color = curses.tigetstr("setaf") or curses.tigetstr("setf") or b""
|
||||
|
||||
self.blue = str(curses.tparm(fg_color, 4), "ascii")
|
||||
self.yellow = str(curses.tparm(fg_color, 3), "ascii")
|
||||
self.green = str(curses.tparm(fg_color, 2), "ascii")
|
||||
self.red = str(curses.tparm(fg_color, 1), "ascii")
|
||||
self.bright_red = str(curses.tparm(fg_color, 9), "ascii")
|
||||
self.normal = str(curses.tigetstr("sgr0"), "ascii")
|
||||
|
||||
else:
|
||||
self.blue = self.yellow = self.green = self.red = self.bright_red = self.normal = ""
|
||||
|
||||
|
||||
class TornadoLogFormatter(logging.Formatter):
|
||||
def __init__(self, color, *args, **kwargs):
|
||||
super().__init__(*args, **kwargs)
|
||||
self._color = color
|
||||
if color:
|
||||
colors = Colors(color=color)
|
||||
self._colors = {
|
||||
logging.DEBUG: colors.blue,
|
||||
logging.INFO: colors.green,
|
||||
logging.WARNING: colors.yellow,
|
||||
logging.ERROR: colors.red,
|
||||
logging.CRITICAL: colors.bright_red,
|
||||
}
|
||||
self._normal = colors.normal
|
||||
|
||||
def format(self, record):
|
||||
try:
|
||||
record.message = record.getMessage()
|
||||
except Exception as e:
|
||||
record.message = "Bad message (%r): %r" % (e, record.__dict__)
|
||||
record.asctime = time.strftime(
|
||||
"%m-%d %H:%M:%S", self.converter(record.created))
|
||||
prefix = '[%(levelname)1.1s %(asctime)s.%(msecs)03d %(module)s:%(lineno)d]' % \
|
||||
record.__dict__
|
||||
if self._color:
|
||||
prefix = (self._colors.get(record.levelno, self._normal) +
|
||||
prefix + self._normal)
|
||||
formatted = prefix + " " + record.message
|
||||
|
||||
formatted += ''.join(
|
||||
' %s=%s' % (k, v) for k, v in record.__dict__.items()
|
||||
if k not in {
|
||||
'levelname', 'asctime', 'module', 'lineno', 'args', 'message',
|
||||
'filename', 'exc_info', 'exc_text', 'created', 'funcName',
|
||||
'processName', 'process', 'msecs', 'relativeCreated', 'thread',
|
||||
'threadName', 'name', 'levelno', 'msg', 'pathname', 'stack_info',
|
||||
'taskName',
|
||||
})
|
||||
|
||||
if record.exc_info:
|
||||
if not record.exc_text:
|
||||
record.exc_text = self.formatException(record.exc_info)
|
||||
if record.exc_text:
|
||||
formatted = formatted.rstrip() + "\n" + record.exc_text
|
||||
return formatted.replace("\n", "\n ")
|
||||
|
||||
def support_color(stream=sys.stderr):
|
||||
if stream.isatty():
|
||||
try:
|
||||
import curses
|
||||
curses.setupterm()
|
||||
if curses.tigetnum("colors") > 0:
|
||||
return True
|
||||
except:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return False
|
||||
|
||||
def enable_pretty_logging(level=logging.DEBUG, handler=None, color=None):
|
||||
'''
|
||||
handler: specify a handler instead of default StreamHandler
|
||||
color: boolean, force color to be on / off. Default to be on only when
|
||||
``handler`` isn't specified and the term supports color
|
||||
'''
|
||||
logger = logging.getLogger()
|
||||
if handler is None:
|
||||
h = logging.StreamHandler()
|
||||
else:
|
||||
h = handler
|
||||
if color is None and handler is None:
|
||||
color = support_color()
|
||||
formatter = TornadoLogFormatter(color=color)
|
||||
h.setLevel(level)
|
||||
h.setFormatter(formatter)
|
||||
logger.setLevel(level)
|
||||
logger.addHandler(h)
|
629
nvchecker/lib/packaging_version.py
Normal file
629
nvchecker/lib/packaging_version.py
Normal file
|
@ -0,0 +1,629 @@
|
|||
# This file comes from python-packaging 20.9 and is modified
|
||||
|
||||
# This file is dual licensed under the terms of the Apache License, Version
|
||||
# 2.0, and the BSD License.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import collections
|
||||
import itertools
|
||||
import re
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
class InfinityType(object):
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
return "Infinity"
|
||||
|
||||
def __hash__(self):
|
||||
# type: () -> int
|
||||
return hash(repr(self))
|
||||
|
||||
def __lt__(self, other):
|
||||
# type: (object) -> bool
|
||||
return False
|
||||
|
||||
def __le__(self, other):
|
||||
# type: (object) -> bool
|
||||
return False
|
||||
|
||||
def __eq__(self, other):
|
||||
# type: (object) -> bool
|
||||
return isinstance(other, self.__class__)
|
||||
|
||||
def __ne__(self, other):
|
||||
# type: (object) -> bool
|
||||
return not isinstance(other, self.__class__)
|
||||
|
||||
def __gt__(self, other):
|
||||
# type: (object) -> bool
|
||||
return True
|
||||
|
||||
def __ge__(self, other):
|
||||
# type: (object) -> bool
|
||||
return True
|
||||
|
||||
def __neg__(self):
|
||||
# type: (object) -> NegativeInfinityType
|
||||
return NegativeInfinity
|
||||
|
||||
|
||||
Infinity = InfinityType()
|
||||
|
||||
|
||||
class NegativeInfinityType(object):
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
return "-Infinity"
|
||||
|
||||
def __hash__(self):
|
||||
# type: () -> int
|
||||
return hash(repr(self))
|
||||
|
||||
def __lt__(self, other):
|
||||
# type: (object) -> bool
|
||||
return True
|
||||
|
||||
def __le__(self, other):
|
||||
# type: (object) -> bool
|
||||
return True
|
||||
|
||||
def __eq__(self, other):
|
||||
# type: (object) -> bool
|
||||
return isinstance(other, self.__class__)
|
||||
|
||||
def __ne__(self, other):
|
||||
# type: (object) -> bool
|
||||
return not isinstance(other, self.__class__)
|
||||
|
||||
def __gt__(self, other):
|
||||
# type: (object) -> bool
|
||||
return False
|
||||
|
||||
def __ge__(self, other):
|
||||
# type: (object) -> bool
|
||||
return False
|
||||
|
||||
def __neg__(self):
|
||||
# type: (object) -> InfinityType
|
||||
return Infinity
|
||||
|
||||
|
||||
NegativeInfinity = NegativeInfinityType()
|
||||
|
||||
if TYPE_CHECKING: # pragma: no cover
|
||||
from typing import Callable, Iterator, List, Optional, SupportsInt, Tuple, Union
|
||||
|
||||
InfiniteTypes = Union[InfinityType, NegativeInfinityType]
|
||||
PrePostDevType = Union[InfiniteTypes, Tuple[str, int]]
|
||||
SubLocalType = Union[InfiniteTypes, int, str]
|
||||
LocalType = Union[
|
||||
NegativeInfinityType,
|
||||
Tuple[
|
||||
Union[
|
||||
SubLocalType,
|
||||
Tuple[SubLocalType, str],
|
||||
Tuple[NegativeInfinityType, SubLocalType],
|
||||
],
|
||||
...,
|
||||
],
|
||||
]
|
||||
CmpKey = Tuple[
|
||||
int, Tuple[int, ...], PrePostDevType, PrePostDevType, PrePostDevType, LocalType
|
||||
]
|
||||
LegacyCmpKey = Tuple[int, Tuple[str, ...]]
|
||||
VersionComparisonMethod = Callable[
|
||||
[Union[CmpKey, LegacyCmpKey], Union[CmpKey, LegacyCmpKey]], bool
|
||||
]
|
||||
|
||||
__all__ = ["parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN"]
|
||||
|
||||
|
||||
_Version = collections.namedtuple(
|
||||
"_Version", ["epoch", "release", "dev", "pre", "post", "local"]
|
||||
)
|
||||
|
||||
|
||||
def parse(version):
|
||||
# type: (str) -> Union[LegacyVersion, Version]
|
||||
"""
|
||||
Parse the given version string and return either a :class:`Version` object
|
||||
or a :class:`LegacyVersion` object depending on if the given version is
|
||||
a valid PEP 440 version or a legacy version.
|
||||
"""
|
||||
try:
|
||||
return Version(version)
|
||||
except InvalidVersion:
|
||||
return LegacyVersion(version)
|
||||
|
||||
|
||||
class InvalidVersion(ValueError):
|
||||
"""
|
||||
An invalid version was found, users should refer to PEP 440.
|
||||
"""
|
||||
|
||||
|
||||
class _BaseVersion(object):
|
||||
_key = None # type: Union[CmpKey, LegacyCmpKey]
|
||||
|
||||
def __hash__(self):
|
||||
# type: () -> int
|
||||
return hash(self._key)
|
||||
|
||||
# Please keep the duplicated `isinstance` check
|
||||
# in the six comparisons hereunder
|
||||
# unless you find a way to avoid adding overhead function calls.
|
||||
def __lt__(self, other):
|
||||
# type: (_BaseVersion) -> bool
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key < other._key
|
||||
|
||||
def __le__(self, other):
|
||||
# type: (_BaseVersion) -> bool
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key <= other._key
|
||||
|
||||
def __eq__(self, other):
|
||||
# type: (object) -> bool
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key == other._key
|
||||
|
||||
def __ge__(self, other):
|
||||
# type: (_BaseVersion) -> bool
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key >= other._key
|
||||
|
||||
def __gt__(self, other):
|
||||
# type: (_BaseVersion) -> bool
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key > other._key
|
||||
|
||||
def __ne__(self, other):
|
||||
# type: (object) -> bool
|
||||
if not isinstance(other, _BaseVersion):
|
||||
return NotImplemented
|
||||
|
||||
return self._key != other._key
|
||||
|
||||
|
||||
class LegacyVersion(_BaseVersion):
|
||||
def __init__(self, version):
|
||||
# type: (str) -> None
|
||||
self._version = str(version)
|
||||
self._key = _legacy_cmpkey(self._version)
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
return self._version
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
return "<LegacyVersion({0})>".format(repr(str(self)))
|
||||
|
||||
@property
|
||||
def public(self):
|
||||
# type: () -> str
|
||||
return self._version
|
||||
|
||||
@property
|
||||
def base_version(self):
|
||||
# type: () -> str
|
||||
return self._version
|
||||
|
||||
@property
|
||||
def epoch(self):
|
||||
# type: () -> int
|
||||
return -1
|
||||
|
||||
@property
|
||||
def release(self):
|
||||
# type: () -> None
|
||||
return None
|
||||
|
||||
@property
|
||||
def pre(self):
|
||||
# type: () -> None
|
||||
return None
|
||||
|
||||
@property
|
||||
def post(self):
|
||||
# type: () -> None
|
||||
return None
|
||||
|
||||
@property
|
||||
def dev(self):
|
||||
# type: () -> None
|
||||
return None
|
||||
|
||||
@property
|
||||
def local(self):
|
||||
# type: () -> None
|
||||
return None
|
||||
|
||||
@property
|
||||
def is_prerelease(self):
|
||||
# type: () -> bool
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_postrelease(self):
|
||||
# type: () -> bool
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_devrelease(self):
|
||||
# type: () -> bool
|
||||
return False
|
||||
|
||||
|
||||
_legacy_version_component_re = re.compile(r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE)
|
||||
|
||||
_legacy_version_replacement_map = {
|
||||
"pre": "c",
|
||||
"preview": "c",
|
||||
"-": "final-",
|
||||
"rc": "c",
|
||||
"dev": "@",
|
||||
}
|
||||
|
||||
|
||||
def _parse_version_parts(s):
|
||||
# type: (str) -> Iterator[str]
|
||||
for part in _legacy_version_component_re.split(s):
|
||||
part = _legacy_version_replacement_map.get(part, part)
|
||||
|
||||
if not part or part == ".":
|
||||
continue
|
||||
|
||||
if part[:1] in "0123456789":
|
||||
# pad for numeric comparison
|
||||
yield part.zfill(8)
|
||||
else:
|
||||
yield "*" + part
|
||||
|
||||
# ensure that alpha/beta/candidate are before final
|
||||
yield "*final"
|
||||
|
||||
|
||||
def _legacy_cmpkey(version):
|
||||
# type: (str) -> LegacyCmpKey
|
||||
|
||||
# We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch
|
||||
# greater than or equal to 0. This will effectively put the LegacyVersion,
|
||||
# which uses the defacto standard originally implemented by setuptools,
|
||||
# as before all PEP 440 versions.
|
||||
epoch = -1
|
||||
|
||||
# This scheme is taken from pkg_resources.parse_version setuptools prior to
|
||||
# it's adoption of the packaging library.
|
||||
parts = [] # type: List[str]
|
||||
for part in _parse_version_parts(version.lower()):
|
||||
if part.startswith("*"):
|
||||
# remove "-" before a prerelease tag
|
||||
if part < "*final":
|
||||
while parts and parts[-1] == "*final-":
|
||||
parts.pop()
|
||||
|
||||
# remove trailing zeros from each series of numeric parts
|
||||
while parts and parts[-1] == "00000000":
|
||||
parts.pop()
|
||||
|
||||
parts.append(part)
|
||||
|
||||
return epoch, tuple(parts)
|
||||
|
||||
|
||||
# Deliberately not anchored to the start and end of the string, to make it
|
||||
# easier for 3rd party code to reuse
|
||||
VERSION_PATTERN = r"""
|
||||
v?
|
||||
(?:
|
||||
(?:(?P<epoch>[0-9]+)!)? # epoch
|
||||
(?P<release>[0-9]+(?:\.[0-9]+)*) # release segment
|
||||
(?P<pre> # pre-release
|
||||
[-_\.]?
|
||||
(?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview))
|
||||
[-_\.]?
|
||||
(?P<pre_n>[0-9]+)?
|
||||
)?
|
||||
(?P<post> # post release
|
||||
(?:-(?P<post_n1>[0-9]+))
|
||||
|
|
||||
(?:
|
||||
[-_\.]?
|
||||
(?P<post_l>post|rev|r)
|
||||
[-_\.]?
|
||||
(?P<post_n2>[0-9]+)?
|
||||
)
|
||||
)?
|
||||
(?P<dev> # dev release
|
||||
[-_\.]?
|
||||
(?P<dev_l>dev)
|
||||
[-_\.]?
|
||||
(?P<dev_n>[0-9]+)?
|
||||
)?
|
||||
)
|
||||
(?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
|
||||
"""
|
||||
|
||||
|
||||
class Version(_BaseVersion):
|
||||
|
||||
_regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
|
||||
|
||||
def __init__(self, version):
|
||||
# type: (str) -> None
|
||||
|
||||
# Validate the version and parse it into pieces
|
||||
match = self._regex.search(version)
|
||||
if not match:
|
||||
raise InvalidVersion("Invalid version: '{0}'".format(version))
|
||||
|
||||
# Store the parsed out pieces of the version
|
||||
self._version = _Version(
|
||||
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
|
||||
release=tuple(int(i) for i in match.group("release").split(".")),
|
||||
pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
|
||||
post=_parse_letter_version(
|
||||
match.group("post_l"), match.group("post_n1") or match.group("post_n2")
|
||||
),
|
||||
dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
|
||||
local=_parse_local_version(match.group("local")),
|
||||
)
|
||||
|
||||
# Generate a key which will be used for sorting
|
||||
self._key = _cmpkey(
|
||||
self._version.epoch,
|
||||
self._version.release,
|
||||
self._version.pre,
|
||||
self._version.post,
|
||||
self._version.dev,
|
||||
self._version.local,
|
||||
)
|
||||
|
||||
def __repr__(self):
|
||||
# type: () -> str
|
||||
return "<Version({0})>".format(repr(str(self)))
|
||||
|
||||
def __str__(self):
|
||||
# type: () -> str
|
||||
parts = []
|
||||
|
||||
# Epoch
|
||||
if self.epoch != 0:
|
||||
parts.append("{0}!".format(self.epoch))
|
||||
|
||||
# Release segment
|
||||
parts.append(".".join(str(x) for x in self.release))
|
||||
|
||||
# Pre-release
|
||||
if self.pre is not None:
|
||||
parts.append("".join(str(x) for x in self.pre))
|
||||
|
||||
# Post-release
|
||||
if self.post is not None:
|
||||
parts.append(".post{0}".format(self.post))
|
||||
|
||||
# Development release
|
||||
if self.dev is not None:
|
||||
parts.append(".dev{0}".format(self.dev))
|
||||
|
||||
# Local version segment
|
||||
if self.local is not None:
|
||||
parts.append("+{0}".format(self.local))
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
@property
|
||||
def epoch(self):
|
||||
# type: () -> int
|
||||
_epoch = self._version.epoch # type: int
|
||||
return _epoch
|
||||
|
||||
@property
|
||||
def release(self):
|
||||
# type: () -> Tuple[int, ...]
|
||||
_release = self._version.release # type: Tuple[int, ...]
|
||||
return _release
|
||||
|
||||
@property
|
||||
def pre(self):
|
||||
# type: () -> Optional[Tuple[str, int]]
|
||||
_pre = self._version.pre # type: Optional[Tuple[str, int]]
|
||||
return _pre
|
||||
|
||||
@property
|
||||
def post(self):
|
||||
# type: () -> Optional[Tuple[str, int]]
|
||||
return self._version.post[1] if self._version.post else None
|
||||
|
||||
@property
|
||||
def dev(self):
|
||||
# type: () -> Optional[Tuple[str, int]]
|
||||
return self._version.dev[1] if self._version.dev else None
|
||||
|
||||
@property
|
||||
def local(self):
|
||||
# type: () -> Optional[str]
|
||||
if self._version.local:
|
||||
return ".".join(str(x) for x in self._version.local)
|
||||
else:
|
||||
return None
|
||||
|
||||
@property
|
||||
def public(self):
|
||||
# type: () -> str
|
||||
return str(self).split("+", 1)[0]
|
||||
|
||||
@property
|
||||
def base_version(self):
|
||||
# type: () -> str
|
||||
parts = []
|
||||
|
||||
# Epoch
|
||||
if self.epoch != 0:
|
||||
parts.append("{0}!".format(self.epoch))
|
||||
|
||||
# Release segment
|
||||
parts.append(".".join(str(x) for x in self.release))
|
||||
|
||||
return "".join(parts)
|
||||
|
||||
@property
|
||||
def is_prerelease(self):
|
||||
# type: () -> bool
|
||||
return self.dev is not None or self.pre is not None
|
||||
|
||||
@property
|
||||
def is_postrelease(self):
|
||||
# type: () -> bool
|
||||
return self.post is not None
|
||||
|
||||
@property
|
||||
def is_devrelease(self):
|
||||
# type: () -> bool
|
||||
return self.dev is not None
|
||||
|
||||
@property
|
||||
def major(self):
|
||||
# type: () -> int
|
||||
return self.release[0] if len(self.release) >= 1 else 0
|
||||
|
||||
@property
|
||||
def minor(self):
|
||||
# type: () -> int
|
||||
return self.release[1] if len(self.release) >= 2 else 0
|
||||
|
||||
@property
|
||||
def micro(self):
|
||||
# type: () -> int
|
||||
return self.release[2] if len(self.release) >= 3 else 0
|
||||
|
||||
|
||||
def _parse_letter_version(
|
||||
letter, # type: str
|
||||
number, # type: Union[str, bytes, SupportsInt]
|
||||
):
|
||||
# type: (...) -> Optional[Tuple[str, int]]
|
||||
|
||||
if letter:
|
||||
# We consider there to be an implicit 0 in a pre-release if there is
|
||||
# not a numeral associated with it.
|
||||
if number is None:
|
||||
number = 0
|
||||
|
||||
# We normalize any letters to their lower case form
|
||||
letter = letter.lower()
|
||||
|
||||
# We consider some words to be alternate spellings of other words and
|
||||
# in those cases we want to normalize the spellings to our preferred
|
||||
# spelling.
|
||||
if letter == "alpha":
|
||||
letter = "a"
|
||||
elif letter == "beta":
|
||||
letter = "b"
|
||||
elif letter in ["c", "pre", "preview"]:
|
||||
letter = "rc"
|
||||
elif letter in ["rev", "r"]:
|
||||
letter = "post"
|
||||
|
||||
return letter, int(number)
|
||||
if not letter and number:
|
||||
# We assume if we are given a number, but we are not given a letter
|
||||
# then this is using the implicit post release syntax (e.g. 1.0-1)
|
||||
letter = "post"
|
||||
|
||||
return letter, int(number)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
_local_version_separators = re.compile(r"[\._-]")
|
||||
|
||||
|
||||
def _parse_local_version(local):
|
||||
# type: (str) -> Optional[LocalType]
|
||||
"""
|
||||
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
|
||||
"""
|
||||
if local is not None:
|
||||
return tuple(
|
||||
part.lower() if not part.isdigit() else int(part)
|
||||
for part in _local_version_separators.split(local)
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def _cmpkey(
|
||||
epoch, # type: int
|
||||
release, # type: Tuple[int, ...]
|
||||
pre, # type: Optional[Tuple[str, int]]
|
||||
post, # type: Optional[Tuple[str, int]]
|
||||
dev, # type: Optional[Tuple[str, int]]
|
||||
local, # type: Optional[Tuple[SubLocalType]]
|
||||
):
|
||||
# type: (...) -> CmpKey
|
||||
|
||||
# When we compare a release version, we want to compare it with all of the
|
||||
# trailing zeros removed. So we'll use a reverse the list, drop all the now
|
||||
# leading zeros until we come to something non zero, then take the rest
|
||||
# re-reverse it back into the correct order and make it a tuple and use
|
||||
# that for our sorting key.
|
||||
_release = tuple(
|
||||
reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
|
||||
)
|
||||
|
||||
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
|
||||
# We'll do this by abusing the pre segment, but we _only_ want to do this
|
||||
# if there is not a pre or a post segment. If we have one of those then
|
||||
# the normal sorting rules will handle this case correctly.
|
||||
if pre is None and post is None and dev is not None:
|
||||
_pre = NegativeInfinity # type: PrePostDevType
|
||||
# Versions without a pre-release (except as noted above) should sort after
|
||||
# those with one.
|
||||
elif pre is None:
|
||||
_pre = Infinity
|
||||
else:
|
||||
_pre = pre
|
||||
|
||||
# Versions without a post segment should sort before those with one.
|
||||
if post is None:
|
||||
_post = NegativeInfinity # type: PrePostDevType
|
||||
|
||||
else:
|
||||
_post = post
|
||||
|
||||
# Versions without a development segment should sort after those with one.
|
||||
if dev is None:
|
||||
_dev = Infinity # type: PrePostDevType
|
||||
|
||||
else:
|
||||
_dev = dev
|
||||
|
||||
if local is None:
|
||||
# Versions without a local segment should sort before those with one.
|
||||
_local = NegativeInfinity # type: LocalType
|
||||
else:
|
||||
# Versions with a local segment need that segment parsed to implement
|
||||
# the sorting rules in PEP440.
|
||||
# - Alpha numeric segments sort before numeric segments
|
||||
# - Alpha numeric segments sort lexicographically
|
||||
# - Numeric segments sort numerically
|
||||
# - Shorter versions sort before longer versions when the prefixes
|
||||
# match exactly
|
||||
_local = tuple(
|
||||
(i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
|
||||
)
|
||||
|
||||
return epoch, _release, _pre, _post, _dev, _local
|
|
@ -1,110 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
# vim:fileencoding=utf-8
|
||||
|
||||
import os
|
||||
import sys
|
||||
import configparser
|
||||
import logging
|
||||
from functools import partial
|
||||
|
||||
from pkg_resources import parse_version
|
||||
from tornado.ioloop import IOLoop
|
||||
from tornado.options import parse_command_line, define, options
|
||||
|
||||
from nvchecker.get_version import get_version
|
||||
from nvchecker import notify
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
notifications = []
|
||||
g_counter = 0
|
||||
g_oldver = {}
|
||||
g_curver = {}
|
||||
|
||||
define("notify", type=bool,
|
||||
help="show desktop notifications when a new version is available")
|
||||
define("oldverfile", type=str, metavar="FILE",
|
||||
help="a text file listing current version info in format 'name: version'")
|
||||
define("verfile", type=str, metavar="FILE",
|
||||
help="write a new version file")
|
||||
|
||||
def task_inc():
|
||||
global g_counter
|
||||
g_counter += 1
|
||||
|
||||
def task_dec():
|
||||
global g_counter
|
||||
g_counter -= 1
|
||||
if g_counter == 0:
|
||||
IOLoop.instance().stop()
|
||||
write_verfile()
|
||||
|
||||
def load_config(*files):
|
||||
config = configparser.ConfigParser(
|
||||
dict_type=dict, allow_no_value=True
|
||||
)
|
||||
for file in files:
|
||||
with open(file) as f:
|
||||
config.read_file(f)
|
||||
|
||||
return config
|
||||
|
||||
def load_oldverfile(file):
|
||||
v = {}
|
||||
with open(file) as f:
|
||||
for l in f:
|
||||
name, ver = [x.strip() for x in l.split(':', 1)]
|
||||
v[name] = ver
|
||||
return v
|
||||
|
||||
def write_verfile():
|
||||
if not options.verfile:
|
||||
return
|
||||
|
||||
with open(options.verfile, 'w') as f:
|
||||
# sort using only alphanums, as done by the sort command, and needed by
|
||||
# comm command
|
||||
for item in sorted(g_curver.items(), key=lambda i: (''.join(filter(str.isalnum, i[0])), i[1])):
|
||||
print('%s: %s' % item, file=f)
|
||||
|
||||
def print_version_update(name, version):
|
||||
oldver = g_oldver.get(name, None)
|
||||
if not oldver or parse_version(oldver) < parse_version(version):
|
||||
logger.info('%s: updated version %s', name, version)
|
||||
_updated(name, version)
|
||||
else:
|
||||
logger.info('%s: current version %s', name, version)
|
||||
task_dec()
|
||||
|
||||
def _updated(name, version):
|
||||
g_curver[name] = version
|
||||
|
||||
if options.notify:
|
||||
msg = '%s updated to version %s' % (name, version)
|
||||
notifications.append(msg)
|
||||
notify.update('nvchecker', '\n'.join(notifications))
|
||||
|
||||
def get_versions(config):
|
||||
task_inc()
|
||||
for name in config.sections():
|
||||
task_inc()
|
||||
get_version(name, config[name], print_version_update)
|
||||
task_dec()
|
||||
|
||||
def main():
|
||||
files = parse_command_line()
|
||||
if not files:
|
||||
return
|
||||
|
||||
def run_test():
|
||||
config = load_config(*files)
|
||||
if options.oldverfile:
|
||||
g_oldver.update(load_oldverfile(options.oldverfile))
|
||||
g_curver.update(g_oldver)
|
||||
get_versions(config)
|
||||
|
||||
ioloop = IOLoop.instance()
|
||||
ioloop.add_callback(run_test)
|
||||
ioloop.start()
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
|
@ -1,98 +0,0 @@
|
|||
'''
|
||||
调用 libnotify
|
||||
'''
|
||||
|
||||
__all__ = ["set", "show", "update", "set_timeout", "set_urgency"]
|
||||
|
||||
from ctypes import *
|
||||
from threading import Lock
|
||||
import atexit
|
||||
|
||||
NOTIFY_URGENCY_LOW = 0
|
||||
NOTIFY_URGENCY_NORMAL = 1
|
||||
NOTIFY_URGENCY_CRITICAL = 2
|
||||
UrgencyLevel = {NOTIFY_URGENCY_LOW, NOTIFY_URGENCY_NORMAL, NOTIFY_URGENCY_CRITICAL}
|
||||
|
||||
libnotify = None
|
||||
gobj = None
|
||||
libnotify_lock = Lock()
|
||||
libnotify_inited = False
|
||||
|
||||
class obj: pass
|
||||
notify_st = obj()
|
||||
|
||||
def set(summary=None, body=None, icon_str=None):
|
||||
with libnotify_lock:
|
||||
init()
|
||||
|
||||
if summary is not None:
|
||||
notify_st.summary = summary.encode()
|
||||
notify_st.body = notify_st.icon_str = None
|
||||
if body is not None:
|
||||
notify_st.body = body.encode()
|
||||
if icon_str is not None:
|
||||
notify_st.icon_str = icon_str.encode()
|
||||
|
||||
libnotify.notify_notification_update(
|
||||
notify_st.notify,
|
||||
c_char_p(notify_st.summary),
|
||||
c_char_p(notify_st.body),
|
||||
c_char_p(notify_st.icon_str),
|
||||
c_void_p()
|
||||
)
|
||||
|
||||
def show():
|
||||
libnotify.notify_notification_show(notify_st.notify, c_void_p())
|
||||
|
||||
def update(summary=None, body=None, icon_str=None):
|
||||
if not any((summary, body)):
|
||||
raise TypeError('at least one argument please')
|
||||
|
||||
set(summary, body, icon_str)
|
||||
show()
|
||||
|
||||
def set_timeout(self, timeout):
|
||||
'''set `timeout' in milliseconds'''
|
||||
libnotify.notify_notification_set_timeout(notify_st.notify, int(timeout))
|
||||
|
||||
def set_urgency(self, urgency):
|
||||
if urgency not in UrgencyLevel:
|
||||
raise ValueError
|
||||
libnotify.notify_notification_set_urgency(notify_st.notify, urgency)
|
||||
|
||||
def init():
|
||||
global libnotify_inited, libnotify, gobj
|
||||
if libnotify_inited:
|
||||
return
|
||||
|
||||
try:
|
||||
libnotify = CDLL('libnotify.so')
|
||||
except OSError:
|
||||
libnotify = CDLL('libnotify.so.4')
|
||||
gobj = CDLL('libgobject-2.0.so')
|
||||
|
||||
libnotify.notify_init('pynotify')
|
||||
libnotify_inited = True
|
||||
notify_st.notify = libnotify.notify_notification_new(
|
||||
c_void_p(), c_void_p(), c_void_p(),
|
||||
)
|
||||
atexit.register(uninit)
|
||||
|
||||
def uninit():
|
||||
global libnotify_inited
|
||||
try:
|
||||
if libnotify_inited:
|
||||
gobj.g_object_unref(notify_st.notify)
|
||||
libnotify.notify_uninit()
|
||||
libnotify_inited = False
|
||||
except AttributeError:
|
||||
# libnotify.so 已被卸载
|
||||
pass
|
||||
|
||||
if __name__ == '__main__':
|
||||
from time import sleep
|
||||
notify = __import__('__main__')
|
||||
notify.set('This is a test', '测试一下。')
|
||||
notify.show()
|
||||
sleep(1)
|
||||
notify.update(body='再测试一下。')
|
141
nvchecker/slogconf.py
Normal file
141
nvchecker/slogconf.py
Normal file
|
@ -0,0 +1,141 @@
|
|||
# vim: se sw=2:
|
||||
# MIT licensed
|
||||
# Copyright (c) 2018-2020,2023-2024 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import logging
|
||||
import os
|
||||
import io
|
||||
import traceback
|
||||
import sys
|
||||
|
||||
import structlog
|
||||
|
||||
from .httpclient import TemporaryError
|
||||
|
||||
def _console_msg(event):
|
||||
evt = event['event']
|
||||
if evt == 'up-to-date':
|
||||
msg = 'up-to-date, version %s' % event['version']
|
||||
del event['version']
|
||||
elif evt == 'updated':
|
||||
if event.get('old_version'):
|
||||
msg = 'updated from %(old_version)s to %(version)s' % event
|
||||
else:
|
||||
msg = 'updated to %(version)s' % event
|
||||
del event['version'], event['old_version']
|
||||
else:
|
||||
msg = evt
|
||||
|
||||
if 'revision' in event and not event['revision']:
|
||||
del event['revision']
|
||||
|
||||
if 'name' in event:
|
||||
msg = f"{event['name']}: {msg}"
|
||||
del event['name']
|
||||
|
||||
event['msg'] = msg
|
||||
|
||||
return event
|
||||
|
||||
def exc_info(logger, level, event):
|
||||
if level == 'exception':
|
||||
event['exc_info'] = True
|
||||
return event
|
||||
|
||||
def filter_nones(logger, level, event):
|
||||
if 'url' in event and event['url'] is None:
|
||||
del event['url']
|
||||
return event
|
||||
|
||||
def filter_taskname(logger, level, event):
|
||||
# added in Python 3.12, not useful to us, but appears as a normal KV.
|
||||
if 'taskName' in event:
|
||||
del event['taskName']
|
||||
return event
|
||||
|
||||
def filter_exc(logger, level, event):
|
||||
exc_info = event.get('exc_info')
|
||||
if not exc_info:
|
||||
return event
|
||||
|
||||
if exc_info is True:
|
||||
exc = sys.exc_info()[1]
|
||||
else:
|
||||
exc = exc_info
|
||||
|
||||
if isinstance(exc, TemporaryError):
|
||||
if exc.code == 599: # network issues
|
||||
del event['exc_info']
|
||||
event['error'] = exc
|
||||
return event
|
||||
|
||||
def stdlib_renderer(logger, level, event):
|
||||
# return event unchanged for further processing
|
||||
std_event = _console_msg(event.copy())
|
||||
try:
|
||||
logger = logging.getLogger(std_event.pop('logger_name'))
|
||||
except KeyError:
|
||||
logger = logging.getLogger()
|
||||
msg = std_event.pop('msg', std_event.pop('event'))
|
||||
exc_info = std_event.pop('exc_info', None)
|
||||
if 'error' in std_event:
|
||||
std_event['error'] = repr(std_event['error'])
|
||||
getattr(logger, level)(
|
||||
msg, exc_info = exc_info, extra=std_event,
|
||||
)
|
||||
return event
|
||||
|
||||
_renderer = structlog.processors.JSONRenderer(ensure_ascii=False)
|
||||
def json_renderer(logger, level, event):
|
||||
event['level'] = level
|
||||
return _renderer(logger, level, event)
|
||||
|
||||
def null_renderer(logger, level, event):
|
||||
return ''
|
||||
|
||||
class _Logger(logging.Logger):
|
||||
_my_srcfile = os.path.normcase(
|
||||
stdlib_renderer.__code__.co_filename)
|
||||
|
||||
_structlog_dir = os.path.dirname(structlog.__file__)
|
||||
|
||||
def findCaller(self, stack_info=False, stacklevel=1):
|
||||
"""
|
||||
Find the stack frame of the caller so that we can note the source
|
||||
file name, line number and function name.
|
||||
"""
|
||||
f = logging.currentframe()
|
||||
#On some versions of IronPython, currentframe() returns None if
|
||||
#IronPython isn't run with -X:Frames.
|
||||
if f is not None:
|
||||
f = f.f_back
|
||||
orig_f = f
|
||||
while f and stacklevel > 1:
|
||||
f = f.f_back
|
||||
stacklevel -= 1
|
||||
if not f:
|
||||
f = orig_f
|
||||
rv = "(unknown file)", 0, "(unknown function)", None
|
||||
while hasattr(f, "f_code"):
|
||||
co = f.f_code
|
||||
filename = os.path.normcase(co.co_filename)
|
||||
if filename in [logging._srcfile, self._my_srcfile] \
|
||||
or filename.startswith(self._structlog_dir):
|
||||
f = f.f_back
|
||||
continue
|
||||
sinfo = None
|
||||
if stack_info:
|
||||
sio = io.StringIO()
|
||||
sio.write('Stack (most recent call last):\n')
|
||||
traceback.print_stack(f, file=sio)
|
||||
sinfo = sio.getvalue()
|
||||
if sinfo[-1] == '\n':
|
||||
sinfo = sinfo[:-1]
|
||||
sio.close()
|
||||
rv = (co.co_filename, f.f_lineno, co.co_name, sinfo)
|
||||
break
|
||||
return rv
|
||||
|
||||
def fix_logging():
|
||||
logging.setLoggerClass(_Logger)
|
||||
|
34
nvchecker/sortversion.py
Normal file
34
nvchecker/sortversion.py
Normal file
|
@ -0,0 +1,34 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2021 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
'''
|
||||
Sort versions using deprecated pkg_resource / packaging.parse_version or pyalpm.vercmp
|
||||
'''
|
||||
|
||||
__all__ = ["sort_version_keys"]
|
||||
|
||||
from .lib.packaging_version import parse as parse_version
|
||||
|
||||
try:
|
||||
import pyalpm
|
||||
from functools import cmp_to_key
|
||||
vercmp = cmp_to_key(pyalpm.vercmp)
|
||||
vercmp_available = True
|
||||
except ImportError:
|
||||
def vercmp(k):
|
||||
raise NotImplementedError("Using vercmp but pyalpm can not be imported!")
|
||||
vercmp_available = False
|
||||
|
||||
try:
|
||||
from awesomeversion import AwesomeVersion
|
||||
awesomeversion_available = True
|
||||
except ImportError:
|
||||
def AwesomeVersion(k): # type: ignore
|
||||
raise NotImplementedError("Using awesomeversion but it can not be imported!")
|
||||
awesomeversion_available = False
|
||||
|
||||
sort_version_keys = {
|
||||
"parse_version": parse_version,
|
||||
"vercmp": vercmp,
|
||||
"awesomeversion": AwesomeVersion,
|
||||
}
|
199
nvchecker/tools.py
Normal file
199
nvchecker/tools.py
Normal file
|
@ -0,0 +1,199 @@
|
|||
# vim: se sw=2:
|
||||
# MIT licensed
|
||||
# Copyright (c) 2013-2024 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import sys
|
||||
import argparse
|
||||
import shutil
|
||||
import structlog
|
||||
import json
|
||||
import os.path
|
||||
|
||||
from . import core
|
||||
from .util import RichResult
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
def take() -> None:
|
||||
parser = argparse.ArgumentParser(description='update version records of nvchecker')
|
||||
core.add_common_arguments(parser)
|
||||
parser.add_argument('--all', action='store_true',
|
||||
help='take all updates')
|
||||
parser.add_argument('--ignore-nonexistent', action='store_true',
|
||||
help='ignore nonexistent names')
|
||||
parser.add_argument('names', metavar='NAME', nargs='*',
|
||||
help='software name to be updated. use NAME=VERSION to update '
|
||||
'to a specific version instead of the new version.')
|
||||
args = parser.parse_args()
|
||||
if core.process_common_arguments(args):
|
||||
return
|
||||
|
||||
opt = core.load_file(args.file, use_keymanager=False)[1]
|
||||
if opt.ver_files is None:
|
||||
logger.critical(
|
||||
"doesn't have 'oldver' and 'newver' set.",
|
||||
source=args.file,
|
||||
)
|
||||
sys.exit(2)
|
||||
else:
|
||||
oldverf = opt.ver_files[0]
|
||||
newverf = opt.ver_files[1]
|
||||
|
||||
oldvers = core.read_verfile(oldverf)
|
||||
newvers = core.read_verfile(newverf)
|
||||
|
||||
if args.all:
|
||||
oldvers.update(newvers)
|
||||
else:
|
||||
name: str
|
||||
for name in args.names:
|
||||
if "=" in name:
|
||||
name, newver = name.split("=")
|
||||
oldvers[name] = RichResult(version=newver)
|
||||
else:
|
||||
try:
|
||||
oldvers[name] = newvers[name]
|
||||
except KeyError:
|
||||
if args.ignore_nonexistent:
|
||||
logger.warning('nonexistent in newver, ignored', name=name)
|
||||
continue
|
||||
|
||||
logger.critical(
|
||||
"doesn't exist in 'newver' set.", name=name,
|
||||
)
|
||||
sys.exit(2)
|
||||
|
||||
try:
|
||||
if os.path.islink(oldverf):
|
||||
shutil.copy(oldverf, oldverf.with_name(oldverf.name + '~'))
|
||||
else:
|
||||
oldverf.rename(
|
||||
oldverf.with_name(oldverf.name + '~'),
|
||||
)
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
core.write_verfile(oldverf, oldvers)
|
||||
|
||||
def cmp() -> None:
|
||||
parser = argparse.ArgumentParser(description='compare version records of nvchecker')
|
||||
core.add_common_arguments(parser)
|
||||
parser.add_argument('-j', '--json', action='store_true',
|
||||
help='Output JSON array of dictionaries with {name, newver, oldver, [delta]} '
|
||||
'(or array of names if --quiet)')
|
||||
parser.add_argument('-q', '--quiet', action='store_true',
|
||||
help="Quiet mode, output only the names.")
|
||||
parser.add_argument('-a', '--all', action='store_true',
|
||||
help="Include unchanged versions.")
|
||||
parser.add_argument('-s', '--sort',
|
||||
choices=('parse_version', 'vercmp', 'awesomeversion', 'none'),
|
||||
default='parse_version',
|
||||
help='Version compare method to backwards the arrow '
|
||||
'(default: parse_version)')
|
||||
parser.add_argument('-n', '--newer', action='store_true',
|
||||
help='Shows only the newer ones according to --sort.')
|
||||
parser.add_argument('--exit-status', action='store_true',
|
||||
help="exit with status 4 if there are updates")
|
||||
args = parser.parse_args()
|
||||
if core.process_common_arguments(args):
|
||||
return
|
||||
|
||||
opt = core.load_file(args.file, use_keymanager=False)[1]
|
||||
if opt.ver_files is None:
|
||||
logger.critical(
|
||||
"doesn't have 'oldver' and 'newver' set.",
|
||||
source=args.file,
|
||||
)
|
||||
sys.exit(2)
|
||||
else:
|
||||
oldverf = opt.ver_files[0]
|
||||
newverf = opt.ver_files[1]
|
||||
|
||||
oldvers = {k: v.version for k, v in core.read_verfile(oldverf).items()}
|
||||
newvers = {k: v.version for k, v in core.read_verfile(newverf).items()}
|
||||
|
||||
differences = []
|
||||
|
||||
for name, newver in sorted(newvers.items()): # accumulate differences
|
||||
oldver = oldvers.get(name, None)
|
||||
|
||||
diff = {
|
||||
'name': name,
|
||||
'oldver': oldver,
|
||||
'newver': newver
|
||||
}
|
||||
|
||||
if oldver is not None and newver is not None:
|
||||
if oldver == newver:
|
||||
diff['delta'] = 'equal'
|
||||
|
||||
elif args.sort == "none":
|
||||
diff['delta'] = 'new' # assume it's a new version if we're not comparing
|
||||
|
||||
else:
|
||||
from .sortversion import sort_version_keys
|
||||
version = sort_version_keys[args.sort]
|
||||
|
||||
if version(oldver) > version(newver): # type: ignore
|
||||
if args.newer:
|
||||
continue # don't store this diff
|
||||
diff['delta'] = 'old'
|
||||
else:
|
||||
diff['delta'] = 'new'
|
||||
|
||||
elif oldver is None:
|
||||
diff['delta'] = 'added'
|
||||
|
||||
elif newver is None:
|
||||
if args.newer:
|
||||
continue # don't store this diff
|
||||
diff['delta'] = 'gone'
|
||||
|
||||
if args.all or diff['delta'] != 'equal':
|
||||
differences.append(diff)
|
||||
|
||||
if args.json:
|
||||
if args.quiet:
|
||||
print(json.dumps([diff['name'] for diff in differences], separators=(',', ':')))
|
||||
else:
|
||||
print(json.dumps(differences, sort_keys=True, separators=(',', ':')))
|
||||
|
||||
elif args.quiet:
|
||||
for diff in differences:
|
||||
print(diff['name'])
|
||||
|
||||
else:
|
||||
from .lib.nicelogger import Colors, support_color
|
||||
c = Colors(support_color(sys.stdout))
|
||||
|
||||
diffstyles = {
|
||||
'new': {
|
||||
'symbol': '->',
|
||||
'oldc': c.red
|
||||
},
|
||||
'old': {
|
||||
'symbol': f'{c.red}<-{c.normal}',
|
||||
'oldc': c.red
|
||||
},
|
||||
'added': {
|
||||
'symbol': '++',
|
||||
'oldc': c.red
|
||||
},
|
||||
'gone': {
|
||||
'symbol': f'{c.red}--{c.normal}',
|
||||
'oldc': c.green
|
||||
},
|
||||
'equal': {
|
||||
'symbol': '==',
|
||||
'oldc': c.green
|
||||
}
|
||||
}
|
||||
|
||||
for diff in differences:
|
||||
style = diffstyles[diff.get('delta', 'equal')] # type: ignore # mypy has issues with this line
|
||||
|
||||
print(f'{diff["name"]} {style["oldc"]}{diff["oldver"]}{c.normal} {style["symbol"]} {c.green}{diff["newver"]}{c.normal}')
|
||||
|
||||
if args.exit_status and any(
|
||||
diff.get('delta') != 'equal' for diff in differences
|
||||
):
|
||||
sys.exit(4)
|
330
nvchecker/util.py
Normal file
330
nvchecker/util.py
Normal file
|
@ -0,0 +1,330 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import sys
|
||||
import asyncio
|
||||
from asyncio import Queue
|
||||
from typing import (
|
||||
Dict, Optional, List, NamedTuple, Union,
|
||||
Any, Tuple, Callable, Coroutine, Hashable,
|
||||
TYPE_CHECKING,
|
||||
)
|
||||
from pathlib import Path
|
||||
import contextvars
|
||||
import abc
|
||||
import netrc
|
||||
from dataclasses import dataclass
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import tomli as tomllib
|
||||
else:
|
||||
try:
|
||||
import tomllib
|
||||
except ModuleNotFoundError:
|
||||
import tomli as tomllib
|
||||
|
||||
import structlog
|
||||
|
||||
from .httpclient import session
|
||||
from .ctxvars import tries as ctx_tries
|
||||
from .ctxvars import proxy as ctx_proxy
|
||||
from .ctxvars import user_agent as ctx_ua
|
||||
from .ctxvars import httptoken as ctx_httpt
|
||||
from .ctxvars import verify_cert as ctx_verify_cert
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
Entry = Dict[str, Any]
|
||||
Entry.__doc__ = '''The configuration `dict` for an entry.'''
|
||||
Entries = Dict[str, Entry]
|
||||
|
||||
if sys.version_info[:2] >= (3, 11):
|
||||
from typing import LiteralString
|
||||
else:
|
||||
LiteralString = str
|
||||
|
||||
if sys.version_info[:2] >= (3, 10):
|
||||
@dataclass(kw_only=True)
|
||||
class RichResult:
|
||||
version: str
|
||||
gitref: Optional[str] = None
|
||||
revision: Optional[str] = None
|
||||
url: Optional[str] = None
|
||||
|
||||
def __str__(self):
|
||||
return self.version
|
||||
else:
|
||||
@dataclass
|
||||
class RichResult:
|
||||
version: str
|
||||
gitref: Optional[str] = None
|
||||
revision: Optional[str] = None
|
||||
url: Optional[str] = None
|
||||
|
||||
def __str__(self):
|
||||
return self.version
|
||||
|
||||
VersionResult = Union[None, str, RichResult, List[Union[str, RichResult]], Exception]
|
||||
VersionResult.__doc__ = '''The result of a `get_version` check.
|
||||
|
||||
* `None` - No version found.
|
||||
* `str` - A single version string is found.
|
||||
* `RichResult` - A version string with additional information.
|
||||
* `List[Union[str, RichResult]]` - Multiple version strings with or without additional information are found. :ref:`list options` will be applied.
|
||||
* `Exception` - An error occurred.
|
||||
'''
|
||||
|
||||
class FileLoadError(Exception):
|
||||
def __init__(self, kind, filename, exc):
|
||||
self.kind = kind
|
||||
self.filename = filename
|
||||
self.exc = exc
|
||||
|
||||
def __str__(self):
|
||||
return f'failed to load {self.kind} {self.filename!r}: {self.exc}'
|
||||
|
||||
class KeyManager:
|
||||
'''Manages data in the keyfile.'''
|
||||
def __init__(
|
||||
self, file: Optional[Path],
|
||||
) -> None:
|
||||
if file is not None:
|
||||
try:
|
||||
with file.open('rb') as f:
|
||||
keys = tomllib.load(f)['keys']
|
||||
except (OSError, tomllib.TOMLDecodeError) as e:
|
||||
raise FileLoadError('keyfile', str(file), e)
|
||||
else:
|
||||
keys = {}
|
||||
self.keys = keys
|
||||
try:
|
||||
netrc_file = netrc.netrc()
|
||||
netrc_hosts = netrc_file.hosts
|
||||
except (FileNotFoundError, netrc.NetrcParseError):
|
||||
netrc_hosts = {}
|
||||
self.netrc = netrc_hosts
|
||||
|
||||
def get_key(self, name: str, legacy_name: Optional[str] = None) -> Optional[str]:
|
||||
'''Get the named key (token) in the keyfile.'''
|
||||
keyfile_token = self.keys.get(name) or self.keys.get(legacy_name)
|
||||
netrc_passwd = (e := self.netrc.get(name)) and e[2]
|
||||
return keyfile_token or netrc_passwd
|
||||
|
||||
class EntryWaiter:
|
||||
def __init__(self) -> None:
|
||||
self._waiting: Dict[str, asyncio.Future] = {}
|
||||
|
||||
async def wait(self, name: str) -> str:
|
||||
'''Wait on the ``name`` entry and return its result (the version string)'''
|
||||
fu = self._waiting.get(name)
|
||||
if fu is None:
|
||||
fu = asyncio.Future()
|
||||
self._waiting[name] = fu
|
||||
return await fu
|
||||
|
||||
def set_result(self, name: str, value: str) -> None:
|
||||
fu = self._waiting.get(name)
|
||||
if fu is not None:
|
||||
fu.set_result(value)
|
||||
|
||||
def set_exception(self, name: str, e: Exception) -> None:
|
||||
fu = self._waiting.get(name)
|
||||
if fu is not None:
|
||||
fu.set_exception(e)
|
||||
|
||||
class RawResult(NamedTuple):
|
||||
'''The unprocessed result from a check.'''
|
||||
name: str
|
||||
version: VersionResult
|
||||
conf: Entry
|
||||
|
||||
RawResult.name.__doc__ = 'The name (table name) of the entry.'
|
||||
RawResult.version.__doc__ = 'The result from the check.'
|
||||
RawResult.conf.__doc__ = 'The entry configuration (table content) of the entry.'
|
||||
|
||||
ResultData = Dict[str, RichResult]
|
||||
|
||||
class BaseWorker:
|
||||
'''The base class for defining `Worker` classes for source plugins.
|
||||
|
||||
.. py:attribute:: task_sem
|
||||
:type: asyncio.Semaphore
|
||||
|
||||
This is the rate-limiting semaphore. Workers should acquire it while doing one unit of work.
|
||||
|
||||
.. py:attribute:: result_q
|
||||
:type: Queue[RawResult]
|
||||
|
||||
Results should be put into this queue.
|
||||
|
||||
.. py:attribute:: tasks
|
||||
:type: List[Tuple[str, Entry]]
|
||||
|
||||
A list of tasks for the `Worker` to complete. Every task consists of
|
||||
a tuple for the task name (table name in the configuration file) and the
|
||||
content of that table (as a `dict`).
|
||||
|
||||
.. py:attribute:: keymanager
|
||||
:type: KeyManager
|
||||
|
||||
The `KeyManager` for retrieving keys from the keyfile.
|
||||
'''
|
||||
def __init__(
|
||||
self,
|
||||
task_sem: asyncio.Semaphore,
|
||||
result_q: Queue[RawResult],
|
||||
tasks: List[Tuple[str, Entry]],
|
||||
keymanager: KeyManager,
|
||||
) -> None:
|
||||
self.task_sem = task_sem
|
||||
self.result_q = result_q
|
||||
self.keymanager = keymanager
|
||||
self.tasks = tasks
|
||||
|
||||
@abc.abstractmethod
|
||||
async def run(self) -> None:
|
||||
'''Run the `tasks`. Subclasses should implement this method.'''
|
||||
raise NotImplementedError
|
||||
|
||||
async def _run_maynot_raise(self) -> None:
|
||||
try:
|
||||
await self.run()
|
||||
except Exception:
|
||||
# don't let an exception tear down the whole process
|
||||
logger.exception('exception raised by Worker.run')
|
||||
|
||||
class AsyncCache:
|
||||
'''A cache for use with async functions.'''
|
||||
cache: Dict[Hashable, Any]
|
||||
lock: asyncio.Lock
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.cache = {}
|
||||
self.lock = asyncio.Lock()
|
||||
|
||||
async def _get_json(
|
||||
self, key: Tuple[str, str, Tuple[Tuple[str, str], ...]],
|
||||
) -> Any:
|
||||
_, url, headers = key
|
||||
res = await session.get(url, headers=dict(headers))
|
||||
return res.json()
|
||||
|
||||
async def get_json(
|
||||
self, url: str, *,
|
||||
headers: Dict[str, str] = {},
|
||||
) -> Any:
|
||||
'''Get specified ``url`` and return the response content as JSON.
|
||||
|
||||
The returned data will be cached for reuse.
|
||||
'''
|
||||
key = '_jsonurl', url, tuple(sorted(headers.items()))
|
||||
return await self.get(
|
||||
key , self._get_json) # type: ignore
|
||||
|
||||
async def get(
|
||||
self,
|
||||
key: Hashable,
|
||||
func: Callable[[Hashable], Coroutine[Any, Any, Any]],
|
||||
) -> Any:
|
||||
'''Run async ``func`` and cache its return value by ``key``.
|
||||
|
||||
The ``key`` should be hashable, and the function will be called with it as
|
||||
its sole argument. For multiple simultaneous calls with the same key, only
|
||||
one will actually be called, and others will wait and return the same
|
||||
(cached) value.
|
||||
'''
|
||||
async with self.lock:
|
||||
cached = self.cache.get(key)
|
||||
if cached is None:
|
||||
coro = func(key)
|
||||
fu = asyncio.create_task(coro)
|
||||
self.cache[key] = fu
|
||||
|
||||
if asyncio.isfuture(cached): # pending
|
||||
return await cached
|
||||
elif cached is not None: # cached
|
||||
return cached
|
||||
else: # not cached
|
||||
r = await fu
|
||||
self.cache[key] = r
|
||||
return r
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from typing_extensions import Protocol
|
||||
class GetVersionFunc(Protocol):
|
||||
async def __call__(
|
||||
self,
|
||||
name: str, conf: Entry,
|
||||
*,
|
||||
cache: AsyncCache,
|
||||
keymanager: KeyManager,
|
||||
) -> VersionResult:
|
||||
...
|
||||
else:
|
||||
GetVersionFunc = Any
|
||||
|
||||
class FunctionWorker(BaseWorker):
|
||||
func: GetVersionFunc
|
||||
cache: AsyncCache
|
||||
|
||||
def initialize(self, func: GetVersionFunc) -> None:
|
||||
self.func = func
|
||||
self.cache = AsyncCache()
|
||||
|
||||
async def run(self) -> None:
|
||||
futures = []
|
||||
for name, entry in self.tasks:
|
||||
ctx = contextvars.copy_context()
|
||||
fu = ctx.run(self.run_one, name, entry)
|
||||
futures.append(fu)
|
||||
|
||||
for fu2 in asyncio.as_completed(futures):
|
||||
await fu2
|
||||
|
||||
async def run_one(
|
||||
self, name: str, entry: Entry,
|
||||
) -> None:
|
||||
assert self.func is not None
|
||||
|
||||
tries = entry.get('tries', None)
|
||||
if tries is not None:
|
||||
ctx_tries.set(tries)
|
||||
proxy = entry.get('proxy', None)
|
||||
if proxy is not None:
|
||||
ctx_proxy.set(proxy)
|
||||
ua = entry.get('user_agent', None)
|
||||
if ua is not None:
|
||||
ctx_ua.set(ua)
|
||||
httpt = entry.get('httptoken', None)
|
||||
if httpt is None:
|
||||
httpt = self.keymanager.get_key('httptoken_'+name)
|
||||
if httpt is not None:
|
||||
ctx_httpt.set(httpt)
|
||||
verify_cert = entry.get('verify_cert', None)
|
||||
if verify_cert is not None:
|
||||
ctx_verify_cert.set(verify_cert)
|
||||
|
||||
try:
|
||||
async with self.task_sem:
|
||||
version = await self.func(
|
||||
name, entry,
|
||||
cache = self.cache,
|
||||
keymanager = self.keymanager,
|
||||
)
|
||||
await self.result_q.put(RawResult(name, version, entry))
|
||||
except Exception as e:
|
||||
await self.result_q.put(RawResult(name, e, entry))
|
||||
|
||||
class GetVersionError(Exception):
|
||||
'''An error occurred while getting version information.
|
||||
|
||||
Raise this when a known bad situation happens.
|
||||
|
||||
:param msg: The error message.
|
||||
:param kwargs: Arbitrary additional context for the error.
|
||||
'''
|
||||
def __init__(self, msg: LiteralString, **kwargs: Any) -> None:
|
||||
self.msg = msg
|
||||
self.kwargs = kwargs
|
44
nvchecker_source/alpm.py
Normal file
44
nvchecker_source/alpm.py
Normal file
|
@ -0,0 +1,44 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020-2021 DDoSolitary <DDoSolitary@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import GetVersionError
|
||||
from pyalpm import Handle
|
||||
|
||||
|
||||
async def open_db(info):
|
||||
dbpath, repo = info
|
||||
handle = Handle('/', dbpath)
|
||||
db = handle.register_syncdb(repo, 0)
|
||||
return handle, db
|
||||
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
pkgname = conf.get('alpm', name)
|
||||
dbpath = conf.get('dbpath', '/var/lib/pacman')
|
||||
strip_release = conf.get('strip_release', False)
|
||||
provided = conf.get('provided')
|
||||
|
||||
repo = conf.get('repo')
|
||||
if repo is None:
|
||||
repos = conf.get('repos') or ['core', 'extra', 'multilib']
|
||||
else:
|
||||
repos = [repo]
|
||||
|
||||
for repo in repos:
|
||||
db = (await cache.get((dbpath, repo), open_db))[1]
|
||||
pkg = db.get_pkg(pkgname)
|
||||
if pkg is not None:
|
||||
break
|
||||
|
||||
if pkg is None:
|
||||
raise GetVersionError('package not found in the ALPM database')
|
||||
if provided is None:
|
||||
version = pkg.version
|
||||
else:
|
||||
provides = dict(x.split('=', 1) for x in pkg.provides if '=' in x)
|
||||
version = provides.get(provided)
|
||||
if version is None:
|
||||
raise GetVersionError('provides element not found')
|
||||
if strip_release:
|
||||
version = version.split('-', 1)[0]
|
||||
return version
|
51
nvchecker_source/alpmfiles.py
Normal file
51
nvchecker_source/alpmfiles.py
Normal file
|
@ -0,0 +1,51 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2023 Pekka Ristola <pekkarr [at] protonmail [dot] com>, et al.
|
||||
|
||||
from asyncio import create_subprocess_exec
|
||||
from asyncio.subprocess import PIPE
|
||||
import re
|
||||
from typing import Tuple, List
|
||||
|
||||
from nvchecker.api import GetVersionError
|
||||
|
||||
async def get_files(info: Tuple[str, str]) -> List[str]:
|
||||
dbpath, pkg = info
|
||||
# there's no pyalpm bindings for the file databases
|
||||
cmd = ['pacman', '-Flq', '--dbpath', dbpath, pkg]
|
||||
|
||||
p = await create_subprocess_exec(*cmd, stdout = PIPE, stderr = PIPE)
|
||||
stdout, stderr = await p.communicate()
|
||||
|
||||
if p.returncode == 0:
|
||||
return stdout.decode().splitlines()
|
||||
else:
|
||||
raise GetVersionError(
|
||||
'pacman failed to get file list',
|
||||
pkg = pkg,
|
||||
cmd = cmd,
|
||||
stdout = stdout.decode(errors='replace'),
|
||||
stderr = stderr.decode(errors='replace'),
|
||||
returncode = p.returncode,
|
||||
)
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
pkg = conf['pkgname']
|
||||
repo = conf.get('repo')
|
||||
if repo is not None:
|
||||
pkg = f'{repo}/{pkg}'
|
||||
dbpath = conf.get('dbpath', '/var/lib/pacman')
|
||||
regex = re.compile(conf['filename'])
|
||||
if regex.groups > 1:
|
||||
raise GetVersionError('multi-group regex')
|
||||
strip_dir = conf.get('strip_dir', False)
|
||||
|
||||
files = await cache.get((dbpath, pkg), get_files)
|
||||
|
||||
for f in files:
|
||||
fn = f.rsplit('/', 1)[-1] if strip_dir else f
|
||||
match = regex.fullmatch(fn)
|
||||
if match:
|
||||
groups = match.groups()
|
||||
return groups[0] if len(groups) > 0 else fn
|
||||
|
||||
raise GetVersionError('no file matches specified regex')
|
71
nvchecker_source/android_sdk.py
Normal file
71
nvchecker_source/android_sdk.py
Normal file
|
@ -0,0 +1,71 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
# Copyright (c) 2017,2020 Chih-Hsuan Yen <yan12125 at gmail dot com>
|
||||
|
||||
import os
|
||||
import re
|
||||
from xml.etree import ElementTree
|
||||
|
||||
from nvchecker.api import session
|
||||
|
||||
_ANDROID_REPO_MANIFESTS = {
|
||||
'addon': 'https://dl.google.com/android/repository/addon2-1.xml',
|
||||
'package': 'https://dl.google.com/android/repository/repository2-1.xml',
|
||||
}
|
||||
|
||||
# See <channel> tags in Android SDK XML manifests
|
||||
_CHANNEL_MAP = {
|
||||
'stable': 'channel-0',
|
||||
'beta': 'channel-1',
|
||||
'dev': 'channel-2',
|
||||
'canary': 'channel-3',
|
||||
}
|
||||
|
||||
async def _get_repo_manifest(repo):
|
||||
repo_xml_url = _ANDROID_REPO_MANIFESTS[repo]
|
||||
|
||||
res = await session.get(repo_xml_url)
|
||||
data = res.body.decode('utf-8')
|
||||
|
||||
repo_manifest = ElementTree.fromstring(data)
|
||||
return repo_manifest
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
repo = conf['repo']
|
||||
pkg_path_prefix = conf['android_sdk']
|
||||
channels = [_CHANNEL_MAP[channel]
|
||||
for channel in conf.get('channel', 'stable').split(',')]
|
||||
|
||||
repo_manifest = await cache.get(repo, _get_repo_manifest)
|
||||
|
||||
versions = []
|
||||
|
||||
for pkg in repo_manifest.findall('.//remotePackage'):
|
||||
if not pkg.attrib['path'].startswith(pkg_path_prefix):
|
||||
continue
|
||||
channelRef = pkg.find('./channelRef')
|
||||
if channelRef.attrib['ref'] not in channels:
|
||||
continue
|
||||
for archive in pkg.findall('./archives/archive'):
|
||||
host_os = archive.find('./host-os')
|
||||
if host_os is not None and host_os.text != conf.get('host_os', 'linux'):
|
||||
continue
|
||||
archive_url = archive.find('./complete/url').text
|
||||
# revision
|
||||
rev = pkg.find('./revision')
|
||||
rev_strs = []
|
||||
for part in ('major', 'minor', 'micro'):
|
||||
part_node = rev.find('./' + part)
|
||||
if part_node is not None:
|
||||
rev_strs.append(part_node.text)
|
||||
# release number
|
||||
filename, ext = os.path.splitext(archive_url)
|
||||
rel_str = filename.rsplit('-')[-1]
|
||||
mobj = re.match(r'r\d+', rel_str)
|
||||
if mobj:
|
||||
rev_strs.append(rel_str)
|
||||
versions.append('.'.join(rev_strs))
|
||||
# A package suitable for the target host OS is found - skip remaining
|
||||
break
|
||||
|
||||
return versions
|
17
nvchecker_source/anitya.py
Normal file
17
nvchecker_source/anitya.py
Normal file
|
@ -0,0 +1,17 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2017-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
URL = 'https://release-monitoring.org/api/project/{pkg}'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
pkg = conf.get('anitya_id')
|
||||
if pkg is None:
|
||||
pkg = conf.get('anitya')
|
||||
url = URL.format(pkg = pkg)
|
||||
data = await cache.get_json(url)
|
||||
return RichResult(
|
||||
version = data['version'],
|
||||
url = f'https://release-monitoring.org/project/{data["id"]}/',
|
||||
)
|
189
nvchecker_source/apt.py
Normal file
189
nvchecker_source/apt.py
Normal file
|
@ -0,0 +1,189 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 Felix Yan <felixonmars@archlinux.org>, et al.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import re
|
||||
import asyncio
|
||||
from typing import Dict, Tuple
|
||||
import itertools
|
||||
import functools
|
||||
from collections import defaultdict
|
||||
|
||||
from nvchecker.api import (
|
||||
session, GetVersionError, VersionResult,
|
||||
RichResult, Entry, AsyncCache, KeyManager,
|
||||
)
|
||||
|
||||
APT_RELEASE_URL = "%s/dists/%s/Release"
|
||||
APT_PACKAGES_PATH = "%s/binary-%s/Packages%s"
|
||||
APT_PACKAGES_URL = "%s/dists/%s/%s"
|
||||
APT_PACKAGES_SUFFIX_PREFER = (".xz", ".gz", "")
|
||||
|
||||
DpkgVersion = Tuple[int, str, str]
|
||||
|
||||
def parse_version(s: str) -> DpkgVersion:
|
||||
try:
|
||||
epoch_str, rest = s.split(':', 1)
|
||||
except ValueError:
|
||||
epoch = 0
|
||||
rest = s
|
||||
else:
|
||||
epoch = int(epoch_str)
|
||||
|
||||
try:
|
||||
ver, rev = rest.split('-', 1)
|
||||
except ValueError:
|
||||
ver = rest
|
||||
rev = ''
|
||||
|
||||
return epoch, ver, rev
|
||||
|
||||
def _compare_part(a: str, b: str) -> int:
|
||||
sa = re.split(r'(\d+)', a)
|
||||
sb = re.split(r'(\d+)', b)
|
||||
for idx, (pa, pb) in enumerate(itertools.zip_longest(sa, sb)):
|
||||
if pa is None:
|
||||
return -1
|
||||
elif pb is None:
|
||||
return 1
|
||||
|
||||
if idx % 2 == 1:
|
||||
ret = int(pa) - int(pb)
|
||||
if ret != 0:
|
||||
return ret
|
||||
else:
|
||||
if pa < pb:
|
||||
return -1
|
||||
elif pa > pb:
|
||||
return 1
|
||||
|
||||
return 0
|
||||
|
||||
def compare_version_parsed(a: DpkgVersion, b: DpkgVersion) -> int:
|
||||
ret = a[0] - b[0]
|
||||
if ret != 0:
|
||||
return ret
|
||||
ret = _compare_part(a[1], b[1])
|
||||
if ret != 0:
|
||||
return ret
|
||||
return _compare_part(a[2], b[2])
|
||||
|
||||
def compare_version(a: str, b: str) -> int:
|
||||
va = parse_version(a)
|
||||
vb = parse_version(b)
|
||||
return compare_version_parsed(va, vb)
|
||||
|
||||
def _decompress_data(url: str, data: bytes) -> str:
|
||||
if url.endswith(".xz"):
|
||||
import lzma
|
||||
data = lzma.decompress(data)
|
||||
elif url.endswith(".gz"):
|
||||
import gzip
|
||||
data = gzip.decompress(data)
|
||||
|
||||
return data.decode('utf-8')
|
||||
|
||||
async def get_url(url: str) -> str:
|
||||
res = await session.get(url)
|
||||
data = res.body
|
||||
loop = asyncio.get_running_loop()
|
||||
return await loop.run_in_executor(
|
||||
None, _decompress_data,
|
||||
url, data)
|
||||
|
||||
async def parse_packages(key: Tuple[AsyncCache, str]) -> Tuple[Dict[str, str], Dict[str, str], Dict[str, str]]:
|
||||
cache, url = key
|
||||
apt_packages = await cache.get(url, get_url) # type: ignore
|
||||
|
||||
pkg_map = defaultdict(list)
|
||||
srcpkg_map = defaultdict(list)
|
||||
pkg_to_src_map = defaultdict(list)
|
||||
|
||||
pkg = None
|
||||
srcpkg = None
|
||||
for line in apt_packages.split('\n'):
|
||||
if line.startswith("Package: "):
|
||||
pkg = line[9:]
|
||||
elif line.startswith("Source: "):
|
||||
srcpkg = line[8:]
|
||||
elif line.startswith("Version: "):
|
||||
version = line[9:]
|
||||
if pkg is not None:
|
||||
pkg_map[pkg].append(version)
|
||||
pkg_to_src_map["%s/%s" % (pkg, version)] = srcpkg if srcpkg is not None else pkg
|
||||
if srcpkg is not None:
|
||||
srcpkg_map[srcpkg].append(version)
|
||||
pkg = srcpkg = None
|
||||
|
||||
pkg_map_max = {pkg: max(vs, key=functools.cmp_to_key(compare_version))
|
||||
for pkg, vs in pkg_map.items()}
|
||||
srcpkg_map_max = {pkg: max(vs, key=functools.cmp_to_key(compare_version))
|
||||
for pkg, vs in srcpkg_map.items()}
|
||||
pkg_to_src_map_max = {pkg: pkg_to_src_map["%s/%s" % (pkg, vs)]
|
||||
for pkg, vs in pkg_map_max.items()}
|
||||
|
||||
return pkg_map_max, srcpkg_map_max, pkg_to_src_map_max
|
||||
|
||||
async def get_version(
|
||||
name: str, conf: Entry, *,
|
||||
cache: AsyncCache, keymanager: KeyManager,
|
||||
**kwargs,
|
||||
) -> VersionResult:
|
||||
srcpkg = conf.get('srcpkg')
|
||||
pkg = conf.get('pkg')
|
||||
mirror = conf['mirror']
|
||||
suite = conf['suite']
|
||||
repo = conf.get('repo', 'main')
|
||||
arch = conf.get('arch', 'amd64')
|
||||
strip_release = conf.get('strip_release', False)
|
||||
|
||||
if srcpkg and pkg:
|
||||
raise GetVersionError('Setting both srcpkg and pkg is ambiguous')
|
||||
elif not srcpkg and not pkg:
|
||||
pkg = name
|
||||
|
||||
apt_release = await cache.get(
|
||||
APT_RELEASE_URL % (mirror, suite), get_url) # type: ignore
|
||||
for suffix in APT_PACKAGES_SUFFIX_PREFER:
|
||||
packages_path = APT_PACKAGES_PATH % (repo, arch, suffix)
|
||||
if " " + packages_path in apt_release:
|
||||
break
|
||||
else:
|
||||
raise GetVersionError('Packages file not found in APT repository')
|
||||
|
||||
pkg_map, srcpkg_map, pkg_to_src_map = await cache.get(
|
||||
(cache, APT_PACKAGES_URL % (mirror, suite, packages_path)), parse_packages) # type: ignore
|
||||
|
||||
if pkg and pkg in pkg_map:
|
||||
version = pkg_map[pkg]
|
||||
changelog_name = pkg_to_src_map[pkg]
|
||||
elif srcpkg and srcpkg in srcpkg_map:
|
||||
version = srcpkg_map[srcpkg]
|
||||
changelog_name = srcpkg
|
||||
else:
|
||||
raise GetVersionError('package not found in APT repository')
|
||||
|
||||
# Get Changelogs field from the Release file
|
||||
changelogs_url = None
|
||||
for line in apt_release.split('\n'):
|
||||
if line.startswith('Changelogs: '):
|
||||
changelogs_url = line[12:]
|
||||
break
|
||||
|
||||
# Build the changelog URL (see https://wiki.debian.org/DebianRepository/Format#Changelogs for spec)
|
||||
changelog = None
|
||||
if changelogs_url is not None and changelogs_url != 'no':
|
||||
changelog_section = changelog_name[:4] if changelog_name.startswith('lib') else changelog_name[:1]
|
||||
changelog = changelogs_url.replace('@CHANGEPATH@', f'{repo}/{changelog_section}/{changelog_name}/{changelog_name}_{version}')
|
||||
|
||||
if strip_release:
|
||||
version = version.split("-")[0]
|
||||
|
||||
if changelog is not None:
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = changelog,
|
||||
)
|
||||
else:
|
||||
return version
|
37
nvchecker_source/archpkg.py
Normal file
37
nvchecker_source/archpkg.py
Normal file
|
@ -0,0 +1,37 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import session, RichResult, GetVersionError
|
||||
|
||||
URL = 'https://archlinux.org/packages/search/json/'
|
||||
|
||||
async def request(pkg):
|
||||
res = await session.get(URL, params={"name": pkg})
|
||||
return res.json()
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
pkg = conf.get('archpkg') or name
|
||||
strip_release = conf.get('strip_release', False)
|
||||
provided = conf.get('provided')
|
||||
|
||||
data = await cache.get(pkg, request)
|
||||
|
||||
if not data['results']:
|
||||
raise GetVersionError('Arch package not found')
|
||||
|
||||
r = [r for r in data['results'] if r['repo'] != 'testing'][0]
|
||||
|
||||
if provided:
|
||||
provides = dict(x.split('=', 1) for x in r['provides'] if '=' in x)
|
||||
version = provides.get(provided, None)
|
||||
if strip_release:
|
||||
version = version.split('-', 1)[0]
|
||||
elif strip_release:
|
||||
version = r['pkgver']
|
||||
else:
|
||||
version = r['pkgver'] + '-' + r['pkgrel']
|
||||
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://archlinux.org/packages/{r["repo"]}/{r["arch"]}/{r["pkgname"]}/',
|
||||
)
|
109
nvchecker_source/aur.py
Normal file
109
nvchecker_source/aur.py
Normal file
|
@ -0,0 +1,109 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020,2024 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from datetime import datetime, timezone
|
||||
import asyncio
|
||||
from typing import Iterable, Dict, List, Tuple, Any, Optional
|
||||
|
||||
from nvchecker.api import (
|
||||
session, GetVersionError, VersionResult, RichResult,
|
||||
Entry, BaseWorker, RawResult,
|
||||
)
|
||||
|
||||
AUR_URL = 'https://aur.archlinux.org/rpc/'
|
||||
|
||||
class AurResults:
|
||||
cache: Dict[str, Optional[Dict[str, Any]]]
|
||||
|
||||
def __init__(self) -> None:
|
||||
self.cache = {}
|
||||
|
||||
async def get_multiple(
|
||||
self,
|
||||
aurnames: Iterable[str],
|
||||
) -> Dict[str, Optional[Dict[str, Any]]]:
|
||||
params = [('v', '5'), ('type', 'info')]
|
||||
params.extend(('arg[]', name) for name in aurnames
|
||||
if name not in self.cache)
|
||||
res = await session.get(AUR_URL, params=params)
|
||||
data = res.json()
|
||||
new_results = {r['Name']: r for r in data['results']}
|
||||
|
||||
cache = self.cache
|
||||
cache.update(new_results)
|
||||
cache.update(
|
||||
(name, None)
|
||||
for name in set(aurnames) - new_results.keys()
|
||||
)
|
||||
|
||||
return {name: cache[name] for name in aurnames
|
||||
if name in cache}
|
||||
|
||||
class Worker(BaseWorker):
|
||||
# https://wiki.archlinux.org/index.php/Aurweb_RPC_interface#Limitations
|
||||
batch_size = 100
|
||||
|
||||
async def run(self) -> None:
|
||||
tasks = self.tasks
|
||||
n_batch, left = divmod(len(tasks), self.batch_size)
|
||||
if left > 0:
|
||||
n_batch += 1
|
||||
|
||||
aur_results = AurResults()
|
||||
|
||||
ret = []
|
||||
for i in range(n_batch):
|
||||
s = i * self.batch_size
|
||||
batch = tasks[s : s+self.batch_size]
|
||||
fu = self._run_batch(batch, aur_results)
|
||||
ret.append(fu)
|
||||
|
||||
await asyncio.gather(*ret)
|
||||
|
||||
async def _run_batch(
|
||||
self,
|
||||
batch: List[Tuple[str, Entry]],
|
||||
aur_results: AurResults,
|
||||
) -> None:
|
||||
task_by_name: Dict[str, Entry] = dict(self.tasks)
|
||||
|
||||
async with self.task_sem:
|
||||
results = await _run_batch_impl(batch, aur_results)
|
||||
for name, version in results.items():
|
||||
r = RawResult(name, version, task_by_name[name])
|
||||
await self.result_q.put(r)
|
||||
|
||||
async def _run_batch_impl(
|
||||
batch: List[Tuple[str, Entry]],
|
||||
aur_results: AurResults,
|
||||
) -> Dict[str, VersionResult]:
|
||||
aurnames = {conf.get('aur', name) for name, conf in batch}
|
||||
results = await aur_results.get_multiple(aurnames)
|
||||
|
||||
ret: Dict[str, VersionResult] = {}
|
||||
|
||||
for name, conf in batch:
|
||||
aurname = conf.get('aur', name)
|
||||
use_last_modified = conf.get('use_last_modified', False)
|
||||
strip_release = conf.get('strip_release', False)
|
||||
|
||||
result = results.get(aurname)
|
||||
|
||||
if result is None:
|
||||
ret[name] = GetVersionError('AUR upstream not found')
|
||||
continue
|
||||
|
||||
version = result['Version']
|
||||
if use_last_modified:
|
||||
dt = datetime.fromtimestamp(result['LastModified'], timezone.utc)
|
||||
version += '-' + dt.strftime('%Y%m%d%H%M%S')
|
||||
if strip_release and '-' in version:
|
||||
version = version.rsplit('-', 1)[0]
|
||||
|
||||
ret[name] = RichResult(
|
||||
version = version,
|
||||
url = f'https://aur.archlinux.org/packages/{name}',
|
||||
)
|
||||
|
||||
return ret
|
||||
|
73
nvchecker_source/bitbucket.py
Normal file
73
nvchecker_source/bitbucket.py
Normal file
|
@ -0,0 +1,73 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from typing import Any, List, Union
|
||||
from urllib.parse import urlencode
|
||||
|
||||
from nvchecker.api import VersionResult, RichResult, Entry, AsyncCache
|
||||
|
||||
# doc: https://developer.atlassian.com/cloud/bitbucket/rest/api-group-commits/#api-repositories-workspace-repo-slug-commits-get
|
||||
BITBUCKET_URL = 'https://bitbucket.org/api/2.0/repositories/%s/commits/%s'
|
||||
# doc: https://developer.atlassian.com/cloud/bitbucket/rest/api-group-refs/#api-repositories-workspace-repo-slug-refs-tags-get
|
||||
BITBUCKET_MAX_TAG = 'https://bitbucket.org/api/2.0/repositories/%s/refs/tags'
|
||||
|
||||
async def get_version(
|
||||
name: str, conf: Entry, *,
|
||||
cache: AsyncCache,
|
||||
**kwargs: Any,
|
||||
) -> VersionResult:
|
||||
repo = conf['bitbucket']
|
||||
br = conf.get('branch', '')
|
||||
use_max_tag = conf.get('use_max_tag', False)
|
||||
use_sorted_tags = conf.get('use_sorted_tags', False)
|
||||
|
||||
if use_sorted_tags or use_max_tag:
|
||||
parameters = {'fields': 'values.name,values.links.html.href,next'}
|
||||
|
||||
if use_sorted_tags:
|
||||
parameters['sort'] = conf.get('sort', '-target.date')
|
||||
if 'query' in conf:
|
||||
parameters['q'] = conf['query']
|
||||
|
||||
if use_sorted_tags:
|
||||
url = BITBUCKET_MAX_TAG % repo
|
||||
url += '?' + urlencode(parameters)
|
||||
|
||||
return await _get_tags(url, max_page=1, cache=cache)
|
||||
|
||||
elif use_max_tag:
|
||||
url = BITBUCKET_MAX_TAG % repo
|
||||
url += '?' + urlencode(parameters)
|
||||
|
||||
max_page = conf.get('max_page', 3)
|
||||
return await _get_tags(url, max_page=max_page, cache=cache)
|
||||
|
||||
else:
|
||||
url = BITBUCKET_URL % (repo, br)
|
||||
data = await cache.get_json(url)
|
||||
return RichResult(
|
||||
version = data['values'][0]['date'].split('T', 1)[0].replace('-', ''),
|
||||
url = data['values'][0]['links']['html']['href'],
|
||||
)
|
||||
|
||||
async def _get_tags(
|
||||
url: str, *,
|
||||
max_page: int,
|
||||
cache: AsyncCache,
|
||||
) -> VersionResult:
|
||||
ret: List[Union[str, RichResult]] = []
|
||||
|
||||
for _ in range(max_page):
|
||||
data = await cache.get_json(url)
|
||||
ret.extend([
|
||||
RichResult(
|
||||
version = tag['name'],
|
||||
url = tag['links']['html']['href'],
|
||||
) for tag in data['values']
|
||||
])
|
||||
if 'next' in data:
|
||||
url = data['next']
|
||||
else:
|
||||
break
|
||||
|
||||
return ret
|
40
nvchecker_source/cmd.py
Normal file
40
nvchecker_source/cmd.py
Normal file
|
@ -0,0 +1,40 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import asyncio
|
||||
|
||||
import structlog
|
||||
|
||||
from nvchecker.api import GetVersionError
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
async def run_cmd(cmd: str) -> str:
|
||||
logger.debug('running cmd', cmd=cmd)
|
||||
p = await asyncio.create_subprocess_shell(
|
||||
cmd,
|
||||
stdout=asyncio.subprocess.PIPE,
|
||||
stderr=asyncio.subprocess.PIPE,
|
||||
)
|
||||
|
||||
output, error = await p.communicate()
|
||||
output_s = output.strip().decode('latin1')
|
||||
error_s = error.strip().decode(errors='replace')
|
||||
if p.returncode != 0:
|
||||
raise GetVersionError(
|
||||
'command exited with error',
|
||||
cmd=cmd, error=error_s,
|
||||
returncode=p.returncode)
|
||||
elif not output_s:
|
||||
raise GetVersionError(
|
||||
'command exited without output',
|
||||
cmd=cmd, error=error_s,
|
||||
returncode=p.returncode)
|
||||
else:
|
||||
return output_s
|
||||
|
||||
async def get_version(
|
||||
name, conf, *, cache, keymanager=None
|
||||
):
|
||||
cmd = conf['cmd']
|
||||
return await cache.get(cmd, run_cmd)
|
21
nvchecker_source/combiner.py
Normal file
21
nvchecker_source/combiner.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2021 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import asyncio
|
||||
import string
|
||||
|
||||
from nvchecker.api import entry_waiter
|
||||
|
||||
class CombineFormat(string.Template):
|
||||
idpattern = '[0-9]+'
|
||||
|
||||
async def get_version(
|
||||
name, conf, *, cache, keymanager=None
|
||||
):
|
||||
t = CombineFormat(conf['format'])
|
||||
from_ = conf['from']
|
||||
waiter = entry_waiter.get()
|
||||
entries = [waiter.wait(name) for name in from_]
|
||||
vers = await asyncio.gather(*entries)
|
||||
versdict = {str(i+1): v for i, v in enumerate(vers)}
|
||||
return t.substitute(versdict)
|
164
nvchecker_source/container.py
Normal file
164
nvchecker_source/container.py
Normal file
|
@ -0,0 +1,164 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 Chih-Hsuan Yen <yan12125 at gmail dot com>
|
||||
|
||||
from typing import Dict, List, NamedTuple, Optional, Tuple
|
||||
from urllib.request import parse_http_list
|
||||
from urllib.parse import urljoin
|
||||
import json
|
||||
|
||||
from nvchecker.api import session, HTTPError
|
||||
|
||||
class AuthInfo(NamedTuple):
|
||||
service: Optional[str]
|
||||
realm: str
|
||||
|
||||
def parse_www_authenticate_header(header: str) -> Tuple[str, Dict[str, str]]:
|
||||
'''
|
||||
Parse WWW-Authenticate header used in OAuth2 authentication for container
|
||||
registries. This is NOT RFC-compliant!
|
||||
|
||||
Simplified from http.parse_www_authenticate_header in Werkzeug (BSD license)
|
||||
'''
|
||||
auth_type, auth_info = header.split(None, 1)
|
||||
result = {}
|
||||
for item in parse_http_list(auth_info):
|
||||
name, value = item.split("=", 1)
|
||||
if value[:1] == value[-1:] == '"':
|
||||
value = value[1:-1]
|
||||
result[name] = value
|
||||
return auth_type, result
|
||||
|
||||
# Inspired by https://stackoverflow.com/a/51921869
|
||||
# Reference: https://github.com/containers/image/blob/v5.6.0/docker/docker_client.go
|
||||
|
||||
class UnsupportedAuthenticationError(NotImplementedError):
|
||||
def __init__(self):
|
||||
super().__init__('Only Bearer authentication supported for now')
|
||||
|
||||
async def get_registry_auth_info(registry_host: str) -> AuthInfo:
|
||||
auth_service = auth_realm = None
|
||||
|
||||
try:
|
||||
await session.get(f'https://{registry_host}/v2/')
|
||||
raise UnsupportedAuthenticationError # No authentication needed
|
||||
except HTTPError as e:
|
||||
if e.code != 401:
|
||||
raise
|
||||
|
||||
auth_type, auth_info = parse_www_authenticate_header(e.response.headers['WWW-Authenticate'])
|
||||
if auth_type.lower() != 'bearer':
|
||||
raise UnsupportedAuthenticationError
|
||||
|
||||
# Although 'service' is needed as per https://docs.docker.com/registry/spec/auth/token/,
|
||||
# ghcr.io (GitHub container registry) does not provide it
|
||||
auth_service = auth_info.get('service')
|
||||
auth_realm = auth_info['realm']
|
||||
|
||||
return AuthInfo(auth_service, auth_realm)
|
||||
|
||||
async def get_container_tags(info: Tuple[str, str, AuthInfo]) -> List[str]:
|
||||
image_path, registry_host, auth_info = info
|
||||
token = await get_auth_token(auth_info, image_path)
|
||||
tags = []
|
||||
url = f'https://{registry_host}/v2/{image_path}/tags/list'
|
||||
|
||||
while True:
|
||||
res = await session.get(url, headers={
|
||||
'Authorization': f'Bearer {token}',
|
||||
'Accept': 'application/json',
|
||||
})
|
||||
tags += res.json()['tags']
|
||||
link = res.headers.get('Link')
|
||||
if link is None:
|
||||
break
|
||||
else:
|
||||
url = urljoin(url, parse_next_link(link))
|
||||
|
||||
return tags
|
||||
|
||||
|
||||
async def get_auth_token(auth_info, image_path):
|
||||
auth_params = {
|
||||
'scope': f'repository:{image_path}:pull',
|
||||
}
|
||||
if auth_info.service:
|
||||
auth_params['service'] = auth_info.service
|
||||
res = await session.get(auth_info.realm, params=auth_params)
|
||||
token = res.json()['token']
|
||||
return token
|
||||
|
||||
|
||||
def parse_next_link(value: str) -> str:
|
||||
ending = '>; rel="next"'
|
||||
if value.endswith(ending):
|
||||
return value[1:-len(ending)]
|
||||
else:
|
||||
raise ValueError(value)
|
||||
|
||||
|
||||
async def get_container_tag_update_time(info: Tuple[str, str, str, AuthInfo]):
|
||||
'''
|
||||
Find the update time of a container tag.
|
||||
|
||||
In fact, it's the creation time of the image ID referred by the tag. Tag itself does not have any update time.
|
||||
'''
|
||||
image_path, image_tag, registry_host, auth_info = info
|
||||
token = await get_auth_token(auth_info, image_path)
|
||||
|
||||
# HTTP headers
|
||||
headers = {
|
||||
'Authorization': f'Bearer {token}',
|
||||
# Prefer Image Manifest Version 2, Schema 2: https://distribution.github.io/distribution/spec/manifest-v2-2/
|
||||
'Accept': ', '.join([
|
||||
'application/vnd.oci.image.manifest.v1+json',
|
||||
'application/vnd.oci.image.index.v1+json',
|
||||
'application/vnd.docker.distribution.manifest.v2+json',
|
||||
'application/vnd.docker.distribution.manifest.list.v2+json',
|
||||
'application/json',
|
||||
]),
|
||||
}
|
||||
|
||||
# Get tag manifest
|
||||
url = f'https://{registry_host}/v2/{image_path}/manifests/{image_tag}'
|
||||
res = await session.get(url, headers=headers)
|
||||
data = res.json()
|
||||
# Schema 1 returns the creation time in the response
|
||||
if data['schemaVersion'] == 1:
|
||||
return json.loads(data['history'][0]['v1Compatibility'])['created']
|
||||
|
||||
# For schema 2, we have to fetch the config's blob
|
||||
# For multi-arch images, multiple manifests are bounded with the same tag. We should choose one and then request
|
||||
# the manifest's detail
|
||||
if data.get('manifests'):
|
||||
# It's quite hard to find the manifest matching with current CPU architecture and system.
|
||||
# For now we just choose the first and it should probably work for most cases
|
||||
image_digest = data['manifests'][0]['digest']
|
||||
url = f'https://{registry_host}/v2/{image_path}/manifests/{image_digest}'
|
||||
res = await session.get(url, headers=headers)
|
||||
data = res.json()
|
||||
|
||||
digest = data['config']['digest']
|
||||
url = f'https://{registry_host}/v2/{image_path}/blobs/{digest}'
|
||||
res = await session.get(url, headers=headers)
|
||||
data = res.json()
|
||||
return data['created']
|
||||
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
image_path = conf.get('container', name)
|
||||
image_tag = None
|
||||
# image tag is optional
|
||||
if ':' in image_path:
|
||||
image_path, image_tag = image_path.split(':', 1)
|
||||
registry_host = conf.get('registry', 'docker.io')
|
||||
if registry_host == 'docker.io':
|
||||
registry_host = 'registry-1.docker.io'
|
||||
|
||||
auth_info = await cache.get(registry_host, get_registry_auth_info)
|
||||
|
||||
# if a tag is given, return the tag's update time, otherwise return the image's tag list
|
||||
if image_tag:
|
||||
key = image_path, image_tag, registry_host, auth_info
|
||||
return await cache.get(key, get_container_tag_update_time)
|
||||
key = image_path, registry_host, auth_info
|
||||
return await cache.get(key, get_container_tags)
|
15
nvchecker_source/cpan.py
Normal file
15
nvchecker_source/cpan.py
Normal file
|
@ -0,0 +1,15 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
# Using metacpan
|
||||
CPAN_URL = 'https://fastapi.metacpan.org/release/%s'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
key = conf.get('cpan', name)
|
||||
data = await cache.get_json(CPAN_URL % key)
|
||||
return RichResult(
|
||||
version = str(data['version']),
|
||||
url = f'https://metacpan.org/release/{data["author"]}/{data["name"]}',
|
||||
)
|
29
nvchecker_source/cran.py
Normal file
29
nvchecker_source/cran.py
Normal file
|
@ -0,0 +1,29 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2022 Pekka Ristola <pekkarr [at] protonmail [dot] com>, et al.
|
||||
|
||||
from nvchecker.api import session, RichResult, GetVersionError
|
||||
|
||||
CRAN_URL = 'https://cran.r-project.org/package=%s/DESCRIPTION'
|
||||
VERSION_FIELD = 'Version: '
|
||||
|
||||
async def request(pkg):
|
||||
url = CRAN_URL % pkg
|
||||
res = await session.get(url)
|
||||
return res.body.decode('utf-8', errors='ignore')
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
package = conf.get('cran', name)
|
||||
|
||||
desc = await cache.get(package, request)
|
||||
|
||||
for line in desc.splitlines():
|
||||
if line.startswith(VERSION_FIELD):
|
||||
version = line[len(VERSION_FIELD):]
|
||||
break
|
||||
else:
|
||||
raise GetVersionError('Invalid DESCRIPTION file')
|
||||
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://cran.r-project.org/web/packages/{package}/',
|
||||
)
|
40
nvchecker_source/cratesio.py
Normal file
40
nvchecker_source/cratesio.py
Normal file
|
@ -0,0 +1,40 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import re
|
||||
|
||||
import structlog
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
|
||||
API_URL = 'https://crates.io/api/v1/crates/%s'
|
||||
# https://semver.org/#is-there-a-suggested-regular-expression-regex-to-check-a-semver-string
|
||||
VERSION_PATTERN = r'^(?P<major>0|[1-9]\d*)\.(?P<minor>0|[1-9]\d*)\.(?P<patch>0|[1-9]\d*)(?:-(?P<prerelease>(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*)(?:\.(?:0|[1-9]\d*|\d*[a-zA-Z-][0-9a-zA-Z-]*))*))?(?:\+(?P<buildmetadata>[0-9a-zA-Z-]+(?:\.[0-9a-zA-Z-]+)*))?$'
|
||||
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
name = conf.get('cratesio') or name
|
||||
use_pre_release = conf.get('use_pre_release', False)
|
||||
data = await cache.get_json(API_URL % name)
|
||||
results = []
|
||||
for v in data['versions']:
|
||||
if v['yanked']:
|
||||
continue
|
||||
version = v['num']
|
||||
match = re.fullmatch(VERSION_PATTERN, version)
|
||||
if match is None:
|
||||
logger.warning('ignoring invalid version', version=version)
|
||||
continue
|
||||
if not use_pre_release and match.group('prerelease'):
|
||||
continue
|
||||
results.append(
|
||||
RichResult(
|
||||
version=version,
|
||||
url=f'https://crates.io/crates/{name}/{version}',
|
||||
)
|
||||
)
|
||||
|
||||
return results
|
28
nvchecker_source/debianpkg.py
Normal file
28
nvchecker_source/debianpkg.py
Normal file
|
@ -0,0 +1,28 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
# Copyright (c) 2017 Felix Yan <felixonmars@archlinux.org>, et al.
|
||||
|
||||
from nvchecker.api import RichResult, GetVersionError
|
||||
|
||||
URL = 'https://sources.debian.org/api/src/%(pkgname)s/?suite=%(suite)s'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
pkg = conf.get('debianpkg') or name
|
||||
strip_release = conf.get('strip_release', False)
|
||||
suite = conf.get('suite') or "sid"
|
||||
url = URL % {"pkgname": pkg, "suite": suite}
|
||||
data = await cache.get_json(url)
|
||||
|
||||
if not data.get('versions'):
|
||||
raise GetVersionError('Debian package not found')
|
||||
|
||||
r = data['versions'][0]
|
||||
if strip_release:
|
||||
version = r['version'].split("-")[0]
|
||||
else:
|
||||
version = r['version']
|
||||
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://sources.debian.org/src/{data["package"]}/{r["version"]}/',
|
||||
)
|
16
nvchecker_source/gems.py
Normal file
16
nvchecker_source/gems.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
GEMS_URL = 'https://rubygems.org/api/v1/versions/%s.json'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
key = conf.get('gems', name)
|
||||
data = await cache.get_json(GEMS_URL % key)
|
||||
return [
|
||||
RichResult(
|
||||
version = item['number'],
|
||||
url = f'https://rubygems.org/gems/{key}/versions/{item["number"]}',
|
||||
) for item in data
|
||||
]
|
41
nvchecker_source/git.py
Normal file
41
nvchecker_source/git.py
Normal file
|
@ -0,0 +1,41 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 Felix Yan <felixonmars@archlinux.org>, et al.
|
||||
|
||||
from .cmd import run_cmd
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
async def get_version(
|
||||
name, conf, *, cache, keymanager=None
|
||||
):
|
||||
git = conf['git']
|
||||
|
||||
use_commit = conf.get('use_commit', False)
|
||||
if use_commit:
|
||||
ref = conf.get('branch')
|
||||
if ref is None:
|
||||
ref = 'HEAD'
|
||||
gitref = None
|
||||
else:
|
||||
ref = 'refs/heads/' + ref
|
||||
gitref = ref
|
||||
cmd = f"git ls-remote {git} {ref}"
|
||||
data = await cache.get(cmd, run_cmd)
|
||||
version = data.split(None, 1)[0]
|
||||
return RichResult(
|
||||
version = version,
|
||||
revision = version,
|
||||
gitref = gitref,
|
||||
)
|
||||
else:
|
||||
cmd = f"git ls-remote --tags --refs {git}"
|
||||
data = await cache.get(cmd, run_cmd)
|
||||
versions = []
|
||||
for line in data.splitlines():
|
||||
revision, version = line.split("\trefs/tags/", 1)
|
||||
versions.append(RichResult(
|
||||
version = version,
|
||||
revision = revision,
|
||||
gitref = f"refs/tags/{version}",
|
||||
))
|
||||
return versions
|
57
nvchecker_source/gitea.py
Normal file
57
nvchecker_source/gitea.py
Normal file
|
@ -0,0 +1,57 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import urllib.parse
|
||||
|
||||
GITEA_URL = 'https://%s/api/v1/repos/%s/commits'
|
||||
GITEA_MAX_TAG = 'https://%s/api/v1/repos/%s/tags'
|
||||
|
||||
from nvchecker.api import (
|
||||
VersionResult, RichResult, Entry,
|
||||
AsyncCache, KeyManager,
|
||||
)
|
||||
|
||||
async def get_version(
|
||||
name: str, conf: Entry, *,
|
||||
cache: AsyncCache, keymanager: KeyManager,
|
||||
) -> VersionResult:
|
||||
repo = urllib.parse.quote(conf['gitea'])
|
||||
br = conf.get('branch')
|
||||
host = conf.get('host', 'gitea.com')
|
||||
use_max_tag = conf.get('use_max_tag', False)
|
||||
|
||||
if use_max_tag:
|
||||
url = GITEA_MAX_TAG % (host, repo)
|
||||
else:
|
||||
url = GITEA_URL % (host, repo)
|
||||
if br:
|
||||
url += '?sha=' + br
|
||||
|
||||
# Load token from config
|
||||
token = conf.get('token')
|
||||
# Load token from keyman
|
||||
if token is None:
|
||||
token = keymanager.get_key(host.lower(), 'gitea_' + host.lower())
|
||||
|
||||
# Set private token if token exists.
|
||||
headers = {}
|
||||
if token:
|
||||
headers["Authorization"] = f'token {token}'
|
||||
|
||||
data = await cache.get_json(url, headers = headers)
|
||||
if use_max_tag:
|
||||
return [
|
||||
RichResult(
|
||||
version = tag['name'],
|
||||
revision = tag['id'],
|
||||
url = f'https://{host}/{conf["gitea"]}/releases/tag/{tag["name"]}',
|
||||
) for tag in data
|
||||
]
|
||||
else:
|
||||
return RichResult(
|
||||
version = data[0]['commit']['committer']['date'],
|
||||
revision = data[0]['sha'],
|
||||
url = data[0]['html_url'],
|
||||
)
|
287
nvchecker_source/github.py
Normal file
287
nvchecker_source/github.py
Normal file
|
@ -0,0 +1,287 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020, 2024 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import time
|
||||
from urllib.parse import urlencode
|
||||
from typing import List, Tuple, Union, Optional
|
||||
import asyncio
|
||||
|
||||
import structlog
|
||||
|
||||
from nvchecker.api import (
|
||||
VersionResult, Entry, AsyncCache, KeyManager,
|
||||
HTTPError, session, RichResult, GetVersionError,
|
||||
)
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
ALLOW_REQUEST = None
|
||||
RATE_LIMITED_ERROR = False
|
||||
|
||||
GITHUB_URL = 'https://api.%s/repos/%s/commits'
|
||||
GITHUB_LATEST_RELEASE = 'https://api.%s/repos/%s/releases/latest'
|
||||
# https://developer.github.com/v3/git/refs/#get-all-references
|
||||
GITHUB_MAX_TAG = 'https://api.%s/repos/%s/git/refs/tags'
|
||||
GITHUB_MAX_RELEASE = 'https://api.%s/repos/%s/releases'
|
||||
GITHUB_GRAPHQL_URL = 'https://api.%s/graphql'
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
global RATE_LIMITED_ERROR, ALLOW_REQUEST
|
||||
|
||||
if RATE_LIMITED_ERROR:
|
||||
raise RuntimeError('rate limited')
|
||||
|
||||
if ALLOW_REQUEST is None:
|
||||
ALLOW_REQUEST = asyncio.Event()
|
||||
ALLOW_REQUEST.set()
|
||||
|
||||
for _ in range(2): # retry once
|
||||
try:
|
||||
await ALLOW_REQUEST.wait()
|
||||
return await get_version_real(name, conf, **kwargs)
|
||||
except HTTPError as e:
|
||||
if e.code in [403, 429]:
|
||||
if n := check_ratelimit(e, name):
|
||||
ALLOW_REQUEST.clear()
|
||||
await asyncio.sleep(n+1)
|
||||
ALLOW_REQUEST.set()
|
||||
continue
|
||||
RATE_LIMITED_ERROR = True
|
||||
raise
|
||||
|
||||
QUERY_LATEST_TAG = '''
|
||||
{{
|
||||
repository(name: "{name}", owner: "{owner}") {{
|
||||
refs(refPrefix: "refs/tags/", first: 1,
|
||||
query: "{query}",
|
||||
orderBy: {{field: TAG_COMMIT_DATE, direction: DESC}}) {{
|
||||
edges {{
|
||||
node {{
|
||||
name
|
||||
target {{
|
||||
oid
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
'''
|
||||
|
||||
QUERY_LATEST_RELEASE_WITH_PRERELEASES = '''
|
||||
{{
|
||||
repository(name: "{name}", owner: "{owner}") {{
|
||||
releases(first: 1, orderBy: {{field: CREATED_AT, direction: DESC}}) {{
|
||||
edges {{
|
||||
node {{
|
||||
name
|
||||
url
|
||||
tag {{
|
||||
name
|
||||
}}
|
||||
tagCommit {{
|
||||
oid
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
'''
|
||||
|
||||
async def get_latest_tag(key: Tuple[str, str, str, str]) -> RichResult:
|
||||
host, repo, query, token = key
|
||||
owner, reponame = repo.split('/')
|
||||
headers = {
|
||||
'Authorization': f'bearer {token}',
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
q = QUERY_LATEST_TAG.format(
|
||||
owner = owner,
|
||||
name = reponame,
|
||||
query = query,
|
||||
)
|
||||
|
||||
res = await session.post(
|
||||
GITHUB_GRAPHQL_URL % host,
|
||||
headers = headers,
|
||||
json = {'query': q},
|
||||
)
|
||||
j = res.json()
|
||||
|
||||
refs = j['data']['repository']['refs']['edges']
|
||||
if not refs:
|
||||
raise GetVersionError('no tag found')
|
||||
|
||||
version = refs[0]['node']['name']
|
||||
revision = refs[0]['node']['target']['oid']
|
||||
return RichResult(
|
||||
version = version,
|
||||
gitref = f"refs/tags/{version}",
|
||||
revision = revision,
|
||||
url = f'https://github.com/{repo}/releases/tag/{version}',
|
||||
)
|
||||
|
||||
async def get_latest_release_with_prereleases(key: Tuple[str, str, str, str]) -> RichResult:
|
||||
host, repo, token, use_release_name = key
|
||||
owner, reponame = repo.split('/')
|
||||
headers = {
|
||||
'Authorization': f'bearer {token}',
|
||||
'Content-Type': 'application/json',
|
||||
}
|
||||
q = QUERY_LATEST_RELEASE_WITH_PRERELEASES.format(
|
||||
owner = owner,
|
||||
name = reponame,
|
||||
)
|
||||
|
||||
res = await session.post(
|
||||
GITHUB_GRAPHQL_URL % host,
|
||||
headers = headers,
|
||||
json = {'query': q},
|
||||
)
|
||||
j = res.json()
|
||||
|
||||
refs = j['data']['repository']['releases']['edges']
|
||||
if not refs:
|
||||
raise GetVersionError('no release found')
|
||||
|
||||
tag_name = refs[0]['node']['tag']['name']
|
||||
if use_release_name:
|
||||
version = refs[0]['node']['name']
|
||||
else:
|
||||
version = tag_name
|
||||
|
||||
return RichResult(
|
||||
version = version,
|
||||
gitref = f"refs/tags/{tag_name}",
|
||||
revision = refs[0]['node']['tagCommit']['oid'],
|
||||
url = refs[0]['node']['url'],
|
||||
)
|
||||
|
||||
async def get_version_real(
|
||||
name: str, conf: Entry, *,
|
||||
cache: AsyncCache, keymanager: KeyManager,
|
||||
**kwargs,
|
||||
) -> VersionResult:
|
||||
repo = conf['github']
|
||||
host = conf.get('host', "github.com")
|
||||
|
||||
# Load token from config
|
||||
token = conf.get('token')
|
||||
# Load token from keyman
|
||||
if token is None:
|
||||
token = keymanager.get_key(host.lower(), 'github')
|
||||
|
||||
use_latest_tag = conf.get('use_latest_tag', False)
|
||||
if use_latest_tag:
|
||||
if not token:
|
||||
raise GetVersionError('token not given but it is required')
|
||||
|
||||
query = conf.get('query', '')
|
||||
return await cache.get((host, repo, query, token), get_latest_tag) # type: ignore
|
||||
|
||||
use_latest_release = conf.get('use_latest_release', False)
|
||||
include_prereleases = conf.get('include_prereleases', False)
|
||||
use_release_name = conf.get('use_release_name', False)
|
||||
if use_latest_release and include_prereleases:
|
||||
if not token:
|
||||
raise GetVersionError('token not given but it is required')
|
||||
|
||||
return await cache.get(
|
||||
(host, repo, token, use_release_name),
|
||||
get_latest_release_with_prereleases) # type: ignore
|
||||
|
||||
br = conf.get('branch')
|
||||
path = conf.get('path')
|
||||
use_max_tag = conf.get('use_max_tag', False)
|
||||
use_max_release = conf.get('use_max_release', False)
|
||||
if use_latest_release:
|
||||
url = GITHUB_LATEST_RELEASE % (host, repo)
|
||||
elif use_max_tag:
|
||||
url = GITHUB_MAX_TAG % (host, repo)
|
||||
elif use_max_release:
|
||||
url = GITHUB_MAX_RELEASE % (host, repo)
|
||||
else:
|
||||
url = GITHUB_URL % (host, repo)
|
||||
parameters = {}
|
||||
if br:
|
||||
parameters['sha'] = br
|
||||
if path:
|
||||
parameters['path'] = path
|
||||
url += '?' + urlencode(parameters)
|
||||
headers = {
|
||||
'Accept': 'application/vnd.github.quicksilver-preview+json',
|
||||
}
|
||||
if token:
|
||||
headers['Authorization'] = f'token {token}'
|
||||
|
||||
data = await cache.get_json(url, headers = headers)
|
||||
|
||||
if use_max_tag:
|
||||
tags: List[Union[str, RichResult]] = [
|
||||
RichResult(
|
||||
version = ref['ref'].split('/', 2)[-1],
|
||||
gitref = ref['ref'],
|
||||
revision = ref['object']['sha'],
|
||||
url = f'https://github.com/{repo}/releases/tag/{ref["ref"].split("/", 2)[-1]}',
|
||||
) for ref in data
|
||||
]
|
||||
if not tags:
|
||||
raise GetVersionError('No tag found in upstream repository.')
|
||||
return tags
|
||||
|
||||
if use_max_release:
|
||||
releases: List[Union[str, RichResult]] = [
|
||||
RichResult(
|
||||
version = ref['name'] if use_release_name else ref['tag_name'],
|
||||
gitref = f"refs/tags/{ref['tag_name']}",
|
||||
url = ref['html_url'],
|
||||
) for ref in data if include_prereleases or not ref['prerelease']
|
||||
]
|
||||
if not releases:
|
||||
raise GetVersionError('No release found in upstream repository.')
|
||||
return releases
|
||||
|
||||
if use_latest_release:
|
||||
if 'tag_name' not in data:
|
||||
raise GetVersionError('No release found in upstream repository.')
|
||||
|
||||
if use_release_name:
|
||||
version = data['name']
|
||||
else:
|
||||
version = data['tag_name']
|
||||
|
||||
return RichResult(
|
||||
version = version,
|
||||
gitref = f"refs/tags/{data['tag_name']}",
|
||||
url = data['html_url'],
|
||||
)
|
||||
|
||||
else:
|
||||
return RichResult(
|
||||
# YYYYMMDD.HHMMSS
|
||||
version = data[0]['commit']['committer']['date'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.'),
|
||||
revision = data[0]['sha'],
|
||||
url = data[0]['html_url'],
|
||||
)
|
||||
|
||||
def check_ratelimit(exc: HTTPError, name: str) -> Optional[int]:
|
||||
res = exc.response
|
||||
if not res:
|
||||
raise exc
|
||||
|
||||
if v := res.headers.get('retry-after'):
|
||||
n = int(v)
|
||||
logger.warning('retry-after', n=n)
|
||||
return n
|
||||
|
||||
# default -1 is used to re-raise the exception
|
||||
n = int(res.headers.get('X-RateLimit-Remaining', -1))
|
||||
if n == 0:
|
||||
reset = int(res.headers.get('X-RateLimit-Reset'))
|
||||
logger.error(f'rate limited, resetting at {time.ctime(reset)}. '
|
||||
'Or get an API token to increase the allowance if not yet',
|
||||
name = name,
|
||||
reset = reset)
|
||||
return None
|
||||
|
||||
raise exc
|
80
nvchecker_source/gitlab.py
Normal file
80
nvchecker_source/gitlab.py
Normal file
|
@ -0,0 +1,80 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import urllib.parse
|
||||
|
||||
import structlog
|
||||
|
||||
from nvchecker.api import (
|
||||
VersionResult, RichResult, Entry,
|
||||
AsyncCache, KeyManager, TemporaryError,
|
||||
)
|
||||
|
||||
GITLAB_URL = 'https://%s/api/v4/projects/%s/repository/commits'
|
||||
GITLAB_MAX_TAG = 'https://%s/api/v4/projects/%s/repository/tags'
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
try:
|
||||
return await get_version_real(name, conf, **kwargs)
|
||||
except TemporaryError as e:
|
||||
check_ratelimit(e, name)
|
||||
|
||||
async def get_version_real(
|
||||
name: str, conf: Entry, *,
|
||||
cache: AsyncCache, keymanager: KeyManager,
|
||||
**kwargs,
|
||||
) -> VersionResult:
|
||||
repo = urllib.parse.quote_plus(conf['gitlab'])
|
||||
br = conf.get('branch')
|
||||
host = conf.get('host', "gitlab.com")
|
||||
use_max_tag = conf.get('use_max_tag', False)
|
||||
|
||||
if use_max_tag:
|
||||
url = GITLAB_MAX_TAG % (host, repo)
|
||||
else:
|
||||
url = GITLAB_URL % (host, repo)
|
||||
if br:
|
||||
url += '?ref_name=%s' % br
|
||||
|
||||
# Load token from config
|
||||
token = conf.get('token')
|
||||
# Load token from keyman
|
||||
if token is None:
|
||||
token = keymanager.get_key(host.lower(), 'gitlab_' + host.lower())
|
||||
|
||||
# Set private token if token exists.
|
||||
headers = {}
|
||||
if token:
|
||||
headers["PRIVATE-TOKEN"] = token
|
||||
|
||||
data = await cache.get_json(url, headers = headers)
|
||||
if use_max_tag:
|
||||
return [
|
||||
RichResult(
|
||||
version = tag['name'],
|
||||
revision = tag['commit']['id'],
|
||||
url = f'https://{host}/{conf["gitlab"]}/-/tags/{tag["name"]}',
|
||||
) for tag in data
|
||||
]
|
||||
else:
|
||||
return RichResult(
|
||||
version = data[0]['created_at'].split('T', 1)[0].replace('-', ''),
|
||||
revision = data[0]['id'],
|
||||
url = data[0]['web_url'],
|
||||
)
|
||||
|
||||
def check_ratelimit(exc, name):
|
||||
res = exc.response
|
||||
if not res:
|
||||
raise
|
||||
|
||||
# default -1 is used to re-raise the exception
|
||||
n = int(res.headers.get('RateLimit-Remaining', -1))
|
||||
if n == 0:
|
||||
logger.error('gitlab rate limited. Wait some time '
|
||||
'or get an API token to increase the allowance if not yet',
|
||||
name = name)
|
||||
else:
|
||||
raise
|
40
nvchecker_source/go.py
Normal file
40
nvchecker_source/go.py
Normal file
|
@ -0,0 +1,40 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2024 bgme <i@bgme.me>.
|
||||
|
||||
from lxml import html
|
||||
|
||||
from nvchecker.api import (
|
||||
RichResult, Entry, AsyncCache, KeyManager,
|
||||
session, GetVersionError,
|
||||
)
|
||||
|
||||
GO_PKG_URL = 'https://pkg.go.dev/{pkg}?tab=versions'
|
||||
GO_PKG_VERSION_URL = 'https://pkg.go.dev/{pkg}@{version}'
|
||||
|
||||
|
||||
async def get_version(
|
||||
name: str, conf: Entry, *,
|
||||
cache: AsyncCache, keymanager: KeyManager,
|
||||
**kwargs,
|
||||
) -> RichResult:
|
||||
key = tuple(sorted(conf.items()))
|
||||
return await cache.get(key, get_version_impl)
|
||||
|
||||
|
||||
async def get_version_impl(info) -> RichResult:
|
||||
conf = dict(info)
|
||||
pkg_name = conf.get('go')
|
||||
|
||||
url = GO_PKG_URL.format(pkg=pkg_name)
|
||||
res = await session.get(url)
|
||||
doc = html.fromstring(res.body.decode())
|
||||
|
||||
elements = doc.xpath("//div[@class='Version-tag']/a/text()")
|
||||
try:
|
||||
version = elements[0] # type: ignore
|
||||
return RichResult(
|
||||
version = version, # type: ignore
|
||||
url = GO_PKG_VERSION_URL.format(pkg=pkg_name, version=version),
|
||||
)
|
||||
except IndexError:
|
||||
raise GetVersionError("parse error", pkg_name=pkg_name)
|
15
nvchecker_source/hackage.py
Normal file
15
nvchecker_source/hackage.py
Normal file
|
@ -0,0 +1,15 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
HACKAGE_URL = 'https://hackage.haskell.org/package/%s/preferred.json'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
key = conf.get('hackage', name)
|
||||
data = await cache.get_json(HACKAGE_URL % key)
|
||||
version = data['normal-version'][0]
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://hackage.haskell.org/package/{key}-{version}',
|
||||
)
|
41
nvchecker_source/htmlparser.py
Normal file
41
nvchecker_source/htmlparser.py
Normal file
|
@ -0,0 +1,41 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 Ypsilik <tt2laurent.maud@gmail.com>, et al.
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from lxml import html, etree
|
||||
|
||||
from nvchecker.api import session, GetVersionError
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
key = tuple(sorted(conf.items()))
|
||||
return await cache.get(key, get_version_impl)
|
||||
|
||||
async def get_version_impl(info):
|
||||
conf = dict(info)
|
||||
|
||||
encoding = conf.get('encoding')
|
||||
parser = html.HTMLParser(encoding=encoding)
|
||||
data = conf.get('post_data')
|
||||
if data is None:
|
||||
res = await session.get(conf['url'])
|
||||
else:
|
||||
res = await session.post(conf['url'], body = data, headers = {
|
||||
'Content-Type': conf.get('post_data_type', 'application/x-www-form-urlencoded')
|
||||
})
|
||||
doc = html.fromstring(res.body, base_url=conf['url'], parser=parser)
|
||||
|
||||
try:
|
||||
els = doc.xpath(conf.get('xpath'))
|
||||
except ValueError:
|
||||
if not conf.get('missing_ok', False):
|
||||
raise GetVersionError('version string not found.')
|
||||
except etree.XPathEvalError as e:
|
||||
raise GetVersionError('bad xpath', exc_info=e)
|
||||
|
||||
version = [
|
||||
str(el)
|
||||
if isinstance(el, str)
|
||||
else str(el.text_content())
|
||||
for el in els
|
||||
]
|
||||
return version
|
42
nvchecker_source/httpheader.py
Normal file
42
nvchecker_source/httpheader.py
Normal file
|
@ -0,0 +1,42 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2021 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import re
|
||||
|
||||
from nvchecker.api import session, GetVersionError
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
key = tuple(sorted(conf.items()))
|
||||
return await cache.get(key, get_version_impl)
|
||||
|
||||
async def get_version_impl(info):
|
||||
conf = dict(info)
|
||||
url = conf['url']
|
||||
header = conf.get('header', 'Location')
|
||||
follow_redirects = conf.get('follow_redirects', False)
|
||||
method = conf.get('method', 'HEAD')
|
||||
|
||||
try:
|
||||
regex = re.compile(conf['regex'])
|
||||
except re.error as e:
|
||||
raise GetVersionError('bad regex', exc_info=e)
|
||||
|
||||
res = await session.request(
|
||||
url,
|
||||
method = method,
|
||||
follow_redirects = follow_redirects,
|
||||
)
|
||||
|
||||
header_value = res.headers.get(header)
|
||||
if not header_value:
|
||||
raise GetVersionError(
|
||||
'header not found or is empty',
|
||||
header = header,
|
||||
value = header_value,
|
||||
)
|
||||
|
||||
try:
|
||||
version = regex.findall(header_value)
|
||||
except ValueError:
|
||||
raise GetVersionError('version string not found.')
|
||||
return version
|
42
nvchecker_source/jq.py
Normal file
42
nvchecker_source/jq.py
Normal file
|
@ -0,0 +1,42 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2024 Rocket Aaron <i@rocka.me>, et al.
|
||||
|
||||
import json
|
||||
import jq
|
||||
|
||||
from nvchecker.api import session, GetVersionError
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
key = tuple(sorted(conf.items()))
|
||||
return await cache.get(key, get_version_impl)
|
||||
|
||||
async def get_version_impl(info):
|
||||
conf = dict(info)
|
||||
|
||||
try:
|
||||
program = jq.compile(conf.get('filter', '.'))
|
||||
except ValueError as e:
|
||||
raise GetVersionError('bad jq filter', exc_info=e)
|
||||
|
||||
data = conf.get('post_data')
|
||||
if data is None:
|
||||
res = await session.get(conf['url'])
|
||||
else:
|
||||
res = await session.post(conf['url'], body = data, headers = {
|
||||
'Content-Type': conf.get('post_data_type', 'application/json')
|
||||
})
|
||||
|
||||
try:
|
||||
obj = json.loads(res.body)
|
||||
except json.decoder.JSONDecodeError as e:
|
||||
raise GetVersionError('bad json string', exc_info=e)
|
||||
|
||||
try:
|
||||
version = program.input(obj).all()
|
||||
if version == [None] and not conf.get('missing_ok', False):
|
||||
raise GetVersionError('version string not found.')
|
||||
version = [str(v) for v in version]
|
||||
except ValueError as e:
|
||||
raise GetVersionError('failed to filter json', exc_info=e)
|
||||
|
||||
return version
|
20
nvchecker_source/launchpad.py
Normal file
20
nvchecker_source/launchpad.py
Normal file
|
@ -0,0 +1,20 @@
|
|||
# MIT Licensed
|
||||
# Copyright (c) 2024 Bert Peters <bertptrs@archlinux.org>, et al.
|
||||
from __future__ import annotations
|
||||
from nvchecker.api import AsyncCache, Entry, RichResult
|
||||
|
||||
PROJECT_INFO_URL = "https://api.launchpad.net/1.0/{launchpad}"
|
||||
|
||||
async def get_version(name: str, conf: Entry, *, cache: AsyncCache, **kwargs):
|
||||
launchpad = conf["launchpad"]
|
||||
|
||||
project_data = await cache.get_json(PROJECT_INFO_URL.format(launchpad=launchpad))
|
||||
data = await cache.get_json(project_data['releases_collection_link'])
|
||||
|
||||
return [
|
||||
RichResult(version=entry["version"], url=entry["web_link"])
|
||||
for entry in data["entries"]
|
||||
]
|
||||
|
||||
|
||||
|
5
nvchecker_source/manual.py
Normal file
5
nvchecker_source/manual.py
Normal file
|
@ -0,0 +1,5 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
return str(conf.get('manual')).strip() or None
|
10
nvchecker_source/mercurial.py
Normal file
10
nvchecker_source/mercurial.py
Normal file
|
@ -0,0 +1,10 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 Felix Yan <felixonmars@archlinux.org>, et al.
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
url = conf['mercurial'] + '/json-tags'
|
||||
|
||||
data = await cache.get_json(url)
|
||||
|
||||
version = [tag['tag'] for tag in data['tags']]
|
||||
return version
|
16
nvchecker_source/none.py
Normal file
16
nvchecker_source/none.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from nvchecker.api import (
|
||||
BaseWorker, GetVersionError, RawResult,
|
||||
)
|
||||
|
||||
class Worker(BaseWorker):
|
||||
async def run(self) -> None:
|
||||
exc = GetVersionError('no source specified')
|
||||
async with self.task_sem:
|
||||
for name, conf in self.tasks:
|
||||
await self.result_q.put(
|
||||
RawResult(name, exc, conf))
|
38
nvchecker_source/npm.py
Normal file
38
nvchecker_source/npm.py
Normal file
|
@ -0,0 +1,38 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import json
|
||||
import re
|
||||
from nvchecker.api import session, RichResult
|
||||
|
||||
NPM_URL = 'https://registry.npmjs.org/%s'
|
||||
|
||||
def configure(config):
|
||||
global NPM_URL
|
||||
url = config.get('registry')
|
||||
if url:
|
||||
NPM_URL = f'{url.rstrip("/")}/%s'
|
||||
|
||||
async def get_first_1k(url):
|
||||
headers = {
|
||||
"Accept": "application/vnd.npm.install-v1+json",
|
||||
"Range": "bytes=0-1023",
|
||||
}
|
||||
res = await session.get(url, headers=headers)
|
||||
return res.body
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
key = conf.get('npm', name)
|
||||
data = await cache.get(NPM_URL % key, get_first_1k)
|
||||
|
||||
dist_tags = json.loads(re.search(b'"dist-tags":({.*?})', data).group(1))
|
||||
version = dist_tags['latest']
|
||||
|
||||
# There is no standardised URL scheme, so we only return an URL for the default registry
|
||||
if NPM_URL.startswith('https://registry.npmjs.org/'):
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://www.npmjs.com/package/{key}/v/{version}',
|
||||
)
|
||||
else:
|
||||
return version
|
71
nvchecker_source/opam.py
Normal file
71
nvchecker_source/opam.py
Normal file
|
@ -0,0 +1,71 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2024 Daniel Peukert <daniel@peukert.cc>, et al.
|
||||
|
||||
import asyncio
|
||||
from io import BytesIO
|
||||
import tarfile
|
||||
from typing import List
|
||||
|
||||
from nvchecker.api import (
|
||||
session, VersionResult,
|
||||
Entry, AsyncCache,
|
||||
KeyManager, RichResult
|
||||
)
|
||||
|
||||
OPAM_REPO_INDEX_URL = "%s/index.tar.gz"
|
||||
OPAM_VERSION_PATH_PREFIX = "packages/%s/%s."
|
||||
OPAM_VERSION_PATH_SUFFIX = "/opam"
|
||||
|
||||
OPAM_DEFAULT_REPO = 'https://opam.ocaml.org'
|
||||
OPAM_DEFAULT_REPO_VERSION_URL = "%s/packages/%s/%s.%s"
|
||||
|
||||
def _decompress_and_list_files(data: bytes) -> List[str]:
|
||||
# Convert the bytes to a file object and get a list of files
|
||||
archive = tarfile.open(mode='r', fileobj=BytesIO(data))
|
||||
return archive.getnames()
|
||||
|
||||
async def get_files(url: str) -> List[str]:
|
||||
# Download the file and get its contents
|
||||
res = await session.get(url)
|
||||
data = res.body
|
||||
|
||||
# Get the file list of the archive
|
||||
loop = asyncio.get_running_loop()
|
||||
return await loop.run_in_executor(None, _decompress_and_list_files, data)
|
||||
|
||||
async def get_package_versions(files: List[str], pkg: str) -> List[str]:
|
||||
# Prepare the filename prefix based on the package name
|
||||
prefix = OPAM_VERSION_PATH_PREFIX % (pkg , pkg)
|
||||
|
||||
# Only keep opam files that are relevant to the package we're working with
|
||||
filtered_files = []
|
||||
|
||||
for filename in files:
|
||||
if filename.startswith(prefix) and filename.endswith(OPAM_VERSION_PATH_SUFFIX):
|
||||
filtered_files.append(filename[len(prefix):-1*len(OPAM_VERSION_PATH_SUFFIX)])
|
||||
|
||||
return filtered_files
|
||||
|
||||
async def get_version(
|
||||
name: str, conf: Entry, *,
|
||||
cache: AsyncCache, keymanager: KeyManager,
|
||||
**kwargs,
|
||||
):
|
||||
pkg = conf.get('pkg', name)
|
||||
repo = conf.get('repo', OPAM_DEFAULT_REPO).rstrip('/')
|
||||
|
||||
# Get the list of files in the repo index (see https://opam.ocaml.org/doc/Manual.html#Repositories for repo structure)
|
||||
files = await cache.get(OPAM_REPO_INDEX_URL % repo, get_files) # type: ignore
|
||||
|
||||
# Parse the version strings from the file names
|
||||
raw_versions = await get_package_versions(files, pkg)
|
||||
|
||||
# Convert the version strings into RichResults
|
||||
versions = []
|
||||
for version in raw_versions:
|
||||
versions.append(RichResult(
|
||||
version = version,
|
||||
# There is no standardised URL scheme, so we only return an URL for the default registry
|
||||
url = OPAM_DEFAULT_REPO_VERSION_URL % (repo, pkg, pkg, version) if repo == OPAM_DEFAULT_REPO else None,
|
||||
))
|
||||
return versions
|
18
nvchecker_source/openvsx.py
Normal file
18
nvchecker_source/openvsx.py
Normal file
|
@ -0,0 +1,18 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2021 Th3Whit3Wolf <the.white.wolf.is.1337@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
API_URL = 'https://open-vsx.org/api/%s/%s'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
name = conf.get('openvsx') or name
|
||||
splitName = name.split('.')
|
||||
publisher = splitName[0]
|
||||
extension = splitName[1]
|
||||
data = await cache.get_json(API_URL % (publisher, extension))
|
||||
version = data['version']
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://open-vsx.org/extension/{publisher}/{extension}/{version}',
|
||||
)
|
23
nvchecker_source/packagist.py
Normal file
23
nvchecker_source/packagist.py
Normal file
|
@ -0,0 +1,23 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
PACKAGIST_URL = 'https://packagist.org/packages/%s.json'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
key = conf.get('packagist', name)
|
||||
data = await cache.get_json(PACKAGIST_URL % key)
|
||||
|
||||
versions = {
|
||||
version: details
|
||||
for version, details in data["package"]['versions'].items()
|
||||
if version != "dev-master"
|
||||
}
|
||||
|
||||
if len(versions):
|
||||
version = max(versions, key=lambda version: versions[version]["time"])
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://packagist.org/packages/{data["package"]["name"]}#{version}',
|
||||
)
|
16
nvchecker_source/pacman.py
Normal file
16
nvchecker_source/pacman.py
Normal file
|
@ -0,0 +1,16 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from nvchecker_source import cmd
|
||||
|
||||
async def get_version(name, conf, **kwargs):
|
||||
referree = conf.get('pacman') or name
|
||||
c = "LANG=C pacman -Si %s | grep -F Version | awk '{print $3}' | head -n 1" % referree
|
||||
conf['cmd'] = c
|
||||
strip_release = conf.get('strip_release', False)
|
||||
|
||||
version = await cmd.get_version(name, conf, **kwargs)
|
||||
|
||||
if strip_release and '-' in version:
|
||||
version = version.rsplit('-', 1)[0]
|
||||
return version
|
32
nvchecker_source/pagure.py
Normal file
32
nvchecker_source/pagure.py
Normal file
|
@ -0,0 +1,32 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 Felix Yan <felixonmars@archlinux.org>, et al.
|
||||
|
||||
import urllib.parse
|
||||
|
||||
import structlog
|
||||
|
||||
from nvchecker.api import (
|
||||
VersionResult, RichResult, Entry, AsyncCache, KeyManager,
|
||||
)
|
||||
|
||||
PAGURE_URL = 'https://%s/api/0/%s/git/tags?with_commits=true'
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
async def get_version(
|
||||
name: str, conf: Entry, *,
|
||||
cache: AsyncCache, keymanager: KeyManager,
|
||||
**kwargs,
|
||||
) -> VersionResult:
|
||||
repo = conf['pagure']
|
||||
host = conf.get('host', "pagure.io")
|
||||
|
||||
url = PAGURE_URL % (host, repo)
|
||||
|
||||
data = await cache.get_json(url)
|
||||
return [
|
||||
RichResult(
|
||||
version = version,
|
||||
url = f'https://{host}/{repo}/tree/{version_hash}',
|
||||
) for version, version_hash in data["tags"].items()
|
||||
]
|
43
nvchecker_source/pypi.py
Normal file
43
nvchecker_source/pypi.py
Normal file
|
@ -0,0 +1,43 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2021,2023-2024 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import structlog
|
||||
from packaging.version import Version, InvalidVersion
|
||||
|
||||
from nvchecker.api import RichResult
|
||||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
ret = []
|
||||
|
||||
package = conf.get('pypi') or name
|
||||
use_pre_release = conf.get('use_pre_release', False)
|
||||
|
||||
url = 'https://pypi.org/pypi/{}/json'.format(package)
|
||||
|
||||
data = await cache.get_json(url)
|
||||
|
||||
for version in data['releases'].keys():
|
||||
# Skip versions that are marked as yanked.
|
||||
if (vers := data['releases'][version]) and vers[0]['yanked']:
|
||||
continue
|
||||
|
||||
try:
|
||||
parsed_version = Version(version)
|
||||
except InvalidVersion:
|
||||
if data['releases'][version]:
|
||||
# emit a warning if there is something under the invalid version
|
||||
# sympy has an empty "0.5.13-hg" version
|
||||
logger.warning('ignoring invalid version', version=version)
|
||||
continue
|
||||
|
||||
if not use_pre_release and parsed_version.is_prerelease:
|
||||
continue
|
||||
|
||||
ret.append(RichResult(
|
||||
version = version,
|
||||
url = f'https://pypi.org/project/{package}/{version}/',
|
||||
))
|
||||
|
||||
return ret
|
39
nvchecker_source/regex.py
Normal file
39
nvchecker_source/regex.py
Normal file
|
@ -0,0 +1,39 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import re
|
||||
|
||||
from nvchecker.api import session, GetVersionError
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
try:
|
||||
regex = re.compile(conf['regex'])
|
||||
except re.error as e:
|
||||
raise GetVersionError('bad regex', exc_info=e)
|
||||
if regex.groups > 1:
|
||||
raise GetVersionError('multi-group regex')
|
||||
|
||||
key = (
|
||||
conf['url'],
|
||||
conf.get('encoding', 'latin1'),
|
||||
conf.get('post_data'),
|
||||
conf.get('post_data_type', 'application/x-www-form-urlencoded'),
|
||||
)
|
||||
body = await cache.get(key, get_url)
|
||||
|
||||
versions = regex.findall(body)
|
||||
if not versions and not conf.get('missing_ok', False):
|
||||
raise GetVersionError('version string not found.')
|
||||
return versions
|
||||
|
||||
async def get_url(info):
|
||||
url, encoding, post_data, post_data_type = info
|
||||
|
||||
if post_data is None:
|
||||
res = await session.get(url)
|
||||
else:
|
||||
res = await session.post(url, body = post_data, headers = {
|
||||
'Content-Type': post_data_type,
|
||||
})
|
||||
body = res.body.decode(encoding)
|
||||
return body
|
33
nvchecker_source/repology.py
Normal file
33
nvchecker_source/repology.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2019 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import RichResult, GetVersionError
|
||||
|
||||
API_URL = 'https://repology.org/api/v1/project/{}'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
project = conf.get('repology') or name
|
||||
repo = conf.get('repo')
|
||||
subrepo = conf.get('subrepo')
|
||||
if not repo:
|
||||
raise GetVersionError('repo field is required for repology source')
|
||||
|
||||
url = API_URL.format(project)
|
||||
data = await cache.get_json(url)
|
||||
|
||||
pkgs = [pkg for pkg in data if pkg['repo'] == repo]
|
||||
if not pkgs:
|
||||
raise GetVersionError('package is not found', repo=repo)
|
||||
|
||||
if subrepo:
|
||||
pkgs = [pkg for pkg in pkgs if pkg.get('subrepo') == subrepo]
|
||||
if not pkgs:
|
||||
raise GetVersionError('package is not found in subrepo',
|
||||
repo=repo, subrepo=subrepo)
|
||||
|
||||
return [
|
||||
RichResult(
|
||||
version = pkg['version'],
|
||||
url = f'https://repology.org/project/{project}/packages',
|
||||
) for pkg in pkgs
|
||||
]
|
84
nvchecker_source/rpmrepo.py
Normal file
84
nvchecker_source/rpmrepo.py
Normal file
|
@ -0,0 +1,84 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2024 Jakub Ružička <jru@debian.org>, et al.
|
||||
|
||||
import asyncio
|
||||
import gzip
|
||||
import pathlib
|
||||
import urllib
|
||||
from typing import Set
|
||||
|
||||
import lxml.etree
|
||||
from nvchecker.api import session, AsyncCache, Entry, KeyManager, VersionResult
|
||||
|
||||
|
||||
# XML namespaces used in repodata (dead links haha)
|
||||
NS = {
|
||||
'common': 'http://linux.duke.edu/metadata/common',
|
||||
'repo': 'http://linux.duke.edu/metadata/repo',
|
||||
'rpm': 'http://linux.duke.edu/metadata/rpm'
|
||||
}
|
||||
|
||||
|
||||
async def get_version(
|
||||
name: str, conf: Entry, *,
|
||||
cache: AsyncCache, keymanager: KeyManager,
|
||||
**kwargs,
|
||||
) -> VersionResult:
|
||||
repo = conf['repo']
|
||||
arch = conf.get('arch', 'binary')
|
||||
pkg = conf.get('pkg')
|
||||
if not pkg:
|
||||
pkg = conf.get('rpmrepo', name)
|
||||
|
||||
repo_url = urllib.parse.urlparse(repo)
|
||||
repo_path = pathlib.PurePosixPath(repo_url.path)
|
||||
|
||||
# get the url of repomd.xml
|
||||
repomd_path = repo_path / 'repodata' / 'repomd.xml'
|
||||
repomd_url = repo_url._replace(path=str(repomd_path)).geturl()
|
||||
# download repomd.xml (use cache)
|
||||
repomd_body = await cache.get(repomd_url, get_file) # type: ignore
|
||||
# parse repomd.xml
|
||||
repomd_xml = lxml.etree.fromstring(repomd_body)
|
||||
|
||||
# get the url of *primary.xml.gz
|
||||
primary_element = repomd_xml.find('repo:data[@type="primary"]/repo:location', namespaces=NS)
|
||||
primary_path = repo_path / primary_element.get('href') # type: ignore
|
||||
primary_url = repo_url._replace(path=str(primary_path)).geturl()
|
||||
# download and decompress *primary.xml.gz (use cache)
|
||||
primary_body = await cache.get(primary_url, get_file_gz) # type: ignore
|
||||
# parse *primary.xml metadata
|
||||
metadata = lxml.etree.fromstring(primary_body)
|
||||
|
||||
# use set to eliminate duplication
|
||||
versions_set: Set[str] = set()
|
||||
# iterate package metadata
|
||||
for el in metadata.findall(f'common:package[common:name="{pkg}"]', namespaces=NS):
|
||||
pkg_arch = el.findtext('common:arch', namespaces=NS)
|
||||
|
||||
# filter bych arch
|
||||
if arch == 'binary':
|
||||
if pkg_arch == 'src':
|
||||
continue
|
||||
elif arch != 'any':
|
||||
if pkg_arch != arch:
|
||||
continue
|
||||
|
||||
version_info = el.find('common:version', namespaces=NS)
|
||||
version = version_info.get('ver') # type: ignore
|
||||
versions_set.add(version) # type: ignore
|
||||
|
||||
versions = list(versions_set)
|
||||
return versions # type: ignore
|
||||
|
||||
|
||||
async def get_file(url: str) -> bytes:
|
||||
res = await session.get(url)
|
||||
return res.body
|
||||
|
||||
|
||||
async def get_file_gz(url: str) -> bytes:
|
||||
res = await session.get(url)
|
||||
loop = asyncio.get_running_loop()
|
||||
return await loop.run_in_executor(
|
||||
None, gzip.decompress, res.body)
|
33
nvchecker_source/snapcraft.py
Normal file
33
nvchecker_source/snapcraft.py
Normal file
|
@ -0,0 +1,33 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2025 Maxim Slipenko <maxim@slipenko.com>, et al.
|
||||
|
||||
from nvchecker.api import (
|
||||
GetVersionError
|
||||
)
|
||||
from nvchecker.httpclient.base import HTTPError
|
||||
|
||||
URL="https://api.snapcraft.io/v2/snaps/info/%(snap)s"
|
||||
|
||||
async def get_version(
|
||||
name: str, conf, *,
|
||||
cache, keymanager,
|
||||
**kwargs,
|
||||
):
|
||||
try:
|
||||
snap = conf.get("snap")
|
||||
channel = conf.get("channel")
|
||||
|
||||
result = await cache.get_json(
|
||||
URL % { "snap": snap },
|
||||
headers={
|
||||
"Snap-Device-Series": "16",
|
||||
},
|
||||
)
|
||||
except HTTPError:
|
||||
raise GetVersionError(f"Failed to request snap info for {snap}")
|
||||
|
||||
for c in result['channel-map']:
|
||||
if c['channel']['name'] == channel:
|
||||
return c['version']
|
||||
|
||||
raise GetVersionError(f"Failed to find version for {snap}")
|
60
nvchecker_source/sparkle.py
Normal file
60
nvchecker_source/sparkle.py
Normal file
|
@ -0,0 +1,60 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
# Copyright (c) 2020 Sunlei <guizaicn@gmail.com>
|
||||
|
||||
from xml.etree import ElementTree
|
||||
|
||||
from nvchecker.api import session, RichResult
|
||||
|
||||
XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace'
|
||||
SPARKLE_NAMESPACE = 'http://www.andymatuschak.org/xml-namespaces/sparkle'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
sparkle = conf['sparkle']
|
||||
release_notes_language = conf.get('release_notes_language', 'en')
|
||||
return await cache.get((sparkle, release_notes_language), get_version_impl)
|
||||
|
||||
|
||||
async def get_version_impl(info):
|
||||
sparkle, release_notes_language = info
|
||||
res = await session.get(sparkle)
|
||||
root = ElementTree.fromstring(res.body).find('./channel/item[1]')
|
||||
item = root.find('./enclosure')
|
||||
|
||||
version_string = item.get(f'{{{SPARKLE_NAMESPACE}}}shortVersionString')
|
||||
build_number = item.get(f'{{{SPARKLE_NAMESPACE}}}version')
|
||||
|
||||
if (version_string and version_string.isdigit()) and (
|
||||
build_number and not build_number.isdigit()
|
||||
):
|
||||
version_string, build_number = build_number, version_string
|
||||
|
||||
version = []
|
||||
|
||||
if version_string:
|
||||
version.append(version_string)
|
||||
if build_number and (build_number not in version):
|
||||
version.append(build_number)
|
||||
|
||||
version_str = '-'.join(version) if version else None
|
||||
|
||||
release_notes_link = None
|
||||
for release_notes in root.findall(f'./{{{SPARKLE_NAMESPACE}}}releaseNotesLink'):
|
||||
language = release_notes.get(f'{{{XML_NAMESPACE}}}lang')
|
||||
|
||||
# If the release notes have no language set, store them, but keep looking for our preferred language
|
||||
if language is None:
|
||||
release_notes_link = release_notes.text.strip()
|
||||
|
||||
# If the release notes match our preferred language, store them and stop looking
|
||||
if language == release_notes_language:
|
||||
release_notes_link = release_notes.text.strip()
|
||||
break
|
||||
|
||||
if release_notes_link is not None:
|
||||
return RichResult(
|
||||
version = version_str,
|
||||
url = release_notes_link,
|
||||
)
|
||||
else:
|
||||
return version_str
|
48
nvchecker_source/ubuntupkg.py
Normal file
48
nvchecker_source/ubuntupkg.py
Normal file
|
@ -0,0 +1,48 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
# Copyright (c) 2017 Felix Yan <felixonmars@archlinux.org>, et al.
|
||||
|
||||
from nvchecker.api import RichResult, GetVersionError
|
||||
|
||||
URL = 'https://api.launchpad.net/1.0/ubuntu/+archive/primary?ws.op=getPublishedSources&source_name=%s&exact_match=true'
|
||||
|
||||
async def get_version(name, conf, *, cache, **kwargs):
|
||||
pkg = conf.get('ubuntupkg') or name
|
||||
strip_release = conf.get('strip_release', False)
|
||||
suite = conf.get('suite')
|
||||
url = URL % pkg
|
||||
|
||||
if suite:
|
||||
suite = "https://api.launchpad.net/1.0/ubuntu/" + suite
|
||||
|
||||
releases = []
|
||||
|
||||
while not releases:
|
||||
data = await cache.get_json(url)
|
||||
|
||||
if not data.get('entries'):
|
||||
raise GetVersionError('Ubuntu package not found')
|
||||
|
||||
releases = [r for r in data["entries"] if r["status"] == "Published"]
|
||||
|
||||
if suite:
|
||||
releases = [r for r in releases if r["distro_series_link"] == suite]
|
||||
|
||||
if "next_collection_link" not in data:
|
||||
break
|
||||
|
||||
url = data["next_collection_link"]
|
||||
|
||||
if not releases:
|
||||
raise GetVersionError('Ubuntu package not found')
|
||||
return
|
||||
|
||||
if strip_release:
|
||||
version = releases[0]['source_package_version'].split("-")[0]
|
||||
else:
|
||||
version = releases[0]['source_package_version']
|
||||
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://packages.ubuntu.com/{releases[0]["distro_series_link"].rsplit("/", 1)[-1]}/{pkg}',
|
||||
)
|
57
nvchecker_source/vsmarketplace.py
Normal file
57
nvchecker_source/vsmarketplace.py
Normal file
|
@ -0,0 +1,57 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2013-2021 Th3Whit3Wolf <the.white.wolf.is.1337@gmail.com>, et al.
|
||||
|
||||
from nvchecker.api import (
|
||||
VersionResult, Entry, AsyncCache, KeyManager,
|
||||
TemporaryError, session, RichResult, GetVersionError,
|
||||
)
|
||||
|
||||
API_URL = 'https://marketplace.visualstudio.com/_apis/public/gallery/extensionquery'
|
||||
|
||||
HEADERS = {
|
||||
'Accept': 'application/json;api-version=6.1-preview.1',
|
||||
'Content-Type': 'application/json'
|
||||
}
|
||||
|
||||
async def get_version(name: str, conf: Entry, *, cache: AsyncCache, **kwargs):
|
||||
name = conf.get('vsmarketplace') or name
|
||||
|
||||
q = {
|
||||
'filters': [
|
||||
{
|
||||
'criteria': [
|
||||
{
|
||||
'filterType': 8,
|
||||
'value': 'Microsoft.VisualStudio.Code'
|
||||
},
|
||||
{
|
||||
'filterType': 7,
|
||||
'value': name
|
||||
},
|
||||
{
|
||||
'filterType': 12,
|
||||
'value': '4096'
|
||||
}
|
||||
],
|
||||
'pageNumber': 1,
|
||||
'pageSize': 2,
|
||||
'sortBy': 0,
|
||||
'sortOrder': 0
|
||||
}
|
||||
],
|
||||
'assetTypes': [],
|
||||
'flags': 946
|
||||
}
|
||||
|
||||
res = await session.post(
|
||||
API_URL,
|
||||
headers = HEADERS,
|
||||
json = q,
|
||||
)
|
||||
j = res.json()
|
||||
|
||||
version = j['results'][0]['extensions'][0]['versions'][0]['version']
|
||||
return RichResult(
|
||||
version = version,
|
||||
url = f'https://marketplace.visualstudio.com/items?itemName={name}',
|
||||
)
|
11
pyproject.toml
Normal file
11
pyproject.toml
Normal file
|
@ -0,0 +1,11 @@
|
|||
[build-system]
|
||||
requires = ["setuptools"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
# addopts = -n auto
|
||||
asyncio_mode = "strict"
|
||||
asyncio_default_fixture_loop_scope = "session"
|
||||
|
||||
# build and upload
|
||||
# rm -rf dist && python -m build --no-isolation && twine check dist/* && twine upload dist/*
|
32
sample_config.toml
Normal file
32
sample_config.toml
Normal file
|
@ -0,0 +1,32 @@
|
|||
[__config__]
|
||||
oldver = "old_ver.json"
|
||||
newver = "new_ver.json"
|
||||
|
||||
[google-chrome]
|
||||
source = "cmd"
|
||||
cmd = '''wget -qO- http://dl.google.com/linux/chrome/rpm/stable/x86_64/repodata/other.xml.gz | zgrep -A1 "google-chrome-stable" | awk -F\" '/version/ {print $4"-"$6}' '''
|
||||
|
||||
[fbcat]
|
||||
source = "aur"
|
||||
|
||||
[winterpy]
|
||||
source = "github"
|
||||
github = "lilydjwg/winterpy"
|
||||
|
||||
[nvchecker]
|
||||
source = "github"
|
||||
github = "lilydjwg/nvchecker"
|
||||
|
||||
[ssed]
|
||||
source = "regex"
|
||||
regex = "The current version is ([\\d.]+)\\."
|
||||
url = "https://sed.sourceforge.net/grabbag/ssed/"
|
||||
proxy = "http://localhost:8087"
|
||||
|
||||
[PySide]
|
||||
source = "pypi"
|
||||
pypi = "nvchecker"
|
||||
|
||||
[test]
|
||||
source = "manual"
|
||||
manual = "0.1"
|
1
scripts/README.rst
Normal file
1
scripts/README.rst
Normal file
|
@ -0,0 +1 @@
|
|||
Additional scripts may help someone.
|
85
scripts/nvchecker-ini2toml
Executable file
85
scripts/nvchecker-ini2toml
Executable file
|
@ -0,0 +1,85 @@
|
|||
#!/usr/bin/python3
|
||||
# MIT licensed
|
||||
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import argparse
|
||||
|
||||
import configparser
|
||||
import toml
|
||||
|
||||
_handler_precedence = (
|
||||
'github', 'aur', 'pypi', 'archpkg', 'debianpkg', 'ubuntupkg',
|
||||
'gems', 'pacman',
|
||||
'cmd', 'bitbucket', 'regex', 'manual', 'vcs',
|
||||
'cratesio', 'npm', 'hackage', 'cpan', 'gitlab', 'packagist',
|
||||
'repology', 'anitya', 'android_sdk', 'sparkle', 'gitea'
|
||||
)
|
||||
|
||||
BOOL_KEYS = [
|
||||
'strip_release', 'use_last_modified',
|
||||
'use_latest_release', 'use_latest_tag',
|
||||
'use_max_release', 'use_max_tag', 'use_pre_release',
|
||||
]
|
||||
|
||||
INT_KEYS = [
|
||||
'max_page',
|
||||
]
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='convert 1.x ini file to 2.x toml file')
|
||||
parser.add_argument('ini', type=argparse.FileType(),
|
||||
help='the old ini file')
|
||||
parser.add_argument('toml', type=argparse.FileType(mode='w'),
|
||||
help='the new ini file')
|
||||
args = parser.parse_args()
|
||||
|
||||
old = configparser.ConfigParser(
|
||||
dict_type=dict, allow_no_value=True, interpolation=None,
|
||||
)
|
||||
old.read_file(args.ini)
|
||||
|
||||
if '__config__' in old:
|
||||
c = old['__config__']
|
||||
newconf = dict(c)
|
||||
x = newconf.pop('max_concurrent', None)
|
||||
if x is not None:
|
||||
newconf['max_concurrency'] = x
|
||||
confs = {'__config__': newconf}
|
||||
else:
|
||||
confs = {}
|
||||
|
||||
for section in old.sections():
|
||||
if section == '__config__':
|
||||
continue
|
||||
|
||||
conf = old[section]
|
||||
newconf = {}
|
||||
|
||||
for key in _handler_precedence:
|
||||
if key not in conf:
|
||||
continue
|
||||
newconf['source'] = key
|
||||
if conf.get(key):
|
||||
newconf[key] = conf.get(key)
|
||||
break
|
||||
|
||||
dconf = dict(conf)
|
||||
|
||||
for k, v in dconf.items():
|
||||
if '-' in k:
|
||||
k = k.replace('-', '_')
|
||||
|
||||
if k in BOOL_KEYS:
|
||||
newconf[k] = conf.getboolean(k)
|
||||
elif k in INT_KEYS:
|
||||
newconf[k] = conf.getint(k)
|
||||
elif v != '':
|
||||
newconf[k] = v
|
||||
|
||||
confs[section] = newconf
|
||||
|
||||
toml.dump(confs, args.toml)
|
||||
args.toml.flush()
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
73
scripts/nvchecker-notify
Executable file
73
scripts/nvchecker-notify
Executable file
|
@ -0,0 +1,73 @@
|
|||
#!/usr/bin/env python3
|
||||
# MIT licensed
|
||||
# Copyright (c) 2020,2022 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
'''
|
||||
A simple wrapper to show desktop notifications while running nvchecker.
|
||||
'''
|
||||
|
||||
import os
|
||||
import subprocess
|
||||
import json
|
||||
|
||||
import gi
|
||||
try:
|
||||
gi.require_version('Notify', '0.8')
|
||||
except ValueError:
|
||||
gi.require_version('Notify', '0.7')
|
||||
from gi.repository import Notify
|
||||
|
||||
def get_args():
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(description='show desktop notifications while running nvchecker')
|
||||
parser.add_argument('-c', '--file',
|
||||
metavar='FILE', type=str,
|
||||
help='software version configuration file if not default')
|
||||
parser.add_argument('-k', '--keyfile',
|
||||
metavar='FILE', type=str,
|
||||
help='use specified keyfile (override the one in configuration file)')
|
||||
parser.add_argument('-t', '--tries', default=1, type=int, metavar='N',
|
||||
help='try N times when network errors occur')
|
||||
parser.add_argument('--failures', action='store_true',
|
||||
help='exit with code 3 if failures / errors happen during checking')
|
||||
|
||||
return parser.parse_args()
|
||||
|
||||
def main():
|
||||
args = get_args()
|
||||
|
||||
Notify.init('nvchecker')
|
||||
notif = Notify.Notification()
|
||||
updates = []
|
||||
|
||||
rfd, wfd = os.pipe()
|
||||
cmd = [
|
||||
'nvchecker', '--logger', 'both', '--json-log-fd', str(wfd),
|
||||
]
|
||||
if args.file:
|
||||
cmd.extend(['-c', args.file])
|
||||
if args.keyfile:
|
||||
cmd.extend(['-k', args.keyfile])
|
||||
if args.tries:
|
||||
cmd.extend(['-t', str(args.tries)])
|
||||
if args.failures:
|
||||
cmd.append('--failures')
|
||||
|
||||
process = subprocess.Popen(cmd, pass_fds=(wfd,))
|
||||
os.close(wfd)
|
||||
|
||||
output = os.fdopen(rfd)
|
||||
for l in output:
|
||||
j = json.loads(l)
|
||||
event = j['event']
|
||||
if event == 'updated':
|
||||
updates.append('%(name)s updated to version %(version)s' % j)
|
||||
notif.update('nvchecker', '\n'.join(updates))
|
||||
notif.show()
|
||||
|
||||
ret = process.wait()
|
||||
if ret != 0:
|
||||
raise subprocess.CalledProcessError(ret, cmd)
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
16
scripts/nvtake.bash_completion
Normal file
16
scripts/nvtake.bash_completion
Normal file
|
@ -0,0 +1,16 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 Felix Yan <felixonmars@archlinux.org>, et al.
|
||||
|
||||
_nvtake() {
|
||||
local cur _nvchecker_conf_mtime
|
||||
_init_completion || return
|
||||
|
||||
_nvchecker_conf_mtime="$(stat -c %Y $HOME/.config/nvchecker/*)"
|
||||
if [ -z "$_nvtake_completion_cache" -o "$_nvchecker_conf_mtime" != "$_nvchecker_conf_mtime_cached" ]; then
|
||||
_nvtake_completion_cache="$(nvcmp -q)"
|
||||
_nvchecker_conf_mtime_cached="$_nvchecker_conf_mtime"
|
||||
fi
|
||||
|
||||
COMPREPLY=( $(compgen -W "$_nvtake_completion_cache" -- "$cur") )
|
||||
} &&
|
||||
complete -F _nvtake nvtake
|
40
scripts/run_cached_tests
Executable file
40
scripts/run_cached_tests
Executable file
|
@ -0,0 +1,40 @@
|
|||
#!/bin/bash -e
|
||||
|
||||
mitmdump=${mitmdump:-mitmdump}
|
||||
|
||||
if [[ -f ~/.mitmproxy/nvdump ]]; then
|
||||
$mitmdump -S ~/.mitmproxy/nvdump -p 7890 --ignore-hosts '127\.0\.0\.1' --server-replay-reuse --server-replay-extra=forward -w newdump >mitmdump_output &
|
||||
else
|
||||
$mitmdump -w ~/.mitmproxy/nvdump -p 7890 --ignore-hosts '127\.0\.0\.1' >mitmdump_output &
|
||||
fi
|
||||
|
||||
mitm_pid=$!
|
||||
|
||||
on_exit () {
|
||||
kill -INT $mitm_pid
|
||||
|
||||
if [[ -s newdump ]]; then
|
||||
cat newdump >> ~/.mitmproxy/nvdump
|
||||
fi
|
||||
|
||||
cat mitmdump_output
|
||||
}
|
||||
|
||||
trap on_exit EXIT
|
||||
|
||||
if [[ -f keyfile.toml ]]; then
|
||||
export KEYFILE=keyfile.toml
|
||||
fi
|
||||
|
||||
for _ in {1..10}; do
|
||||
if [[ -s ~/.mitmproxy/mitmproxy-ca-cert.pem ]]; then
|
||||
break
|
||||
fi
|
||||
sleep 1
|
||||
done
|
||||
|
||||
export SSL_CERT_FILE=$HOME/.mitmproxy/mitmproxy-ca-cert.pem
|
||||
export GIT_SSL_CAINFO=$SSL_CERT_FILE
|
||||
export http_proxy=http://localhost:7890 https_proxy=http://localhost:7890
|
||||
|
||||
pytest
|
79
setup.cfg
Normal file
79
setup.cfg
Normal file
|
@ -0,0 +1,79 @@
|
|||
# The complex upload command:
|
||||
# rm -rf dist && python -m build --sdist && twine check dist/* && twine upload -s dist/*
|
||||
|
||||
[metadata]
|
||||
name = nvchecker
|
||||
version = attr: nvchecker.__version__
|
||||
author = lilydjwg
|
||||
author_email = lilydjwg@gmail.com
|
||||
description = New version checker for software
|
||||
license = MIT
|
||||
keywords = new, version, build, check
|
||||
url = https://github.com/lilydjwg/nvchecker
|
||||
long_description = file: README.rst
|
||||
long_description_content_type = text/x-rst
|
||||
platforms = any
|
||||
|
||||
classifiers =
|
||||
Development Status :: 5 - Production/Stable
|
||||
Environment :: Console
|
||||
Intended Audience :: Developers
|
||||
Intended Audience :: System Administrators
|
||||
License :: OSI Approved :: MIT License
|
||||
Operating System :: OS Independent
|
||||
Programming Language :: Python
|
||||
Programming Language :: Python :: 3
|
||||
Programming Language :: Python :: 3 :: Only
|
||||
Programming Language :: Python :: 3.8
|
||||
Programming Language :: Python :: 3.9
|
||||
Programming Language :: Python :: 3.10
|
||||
Programming Language :: Python :: 3.11
|
||||
Programming Language :: Python :: 3.12
|
||||
Programming Language :: Python :: 3.13
|
||||
Topic :: Internet
|
||||
Topic :: Internet :: WWW/HTTP
|
||||
Topic :: Software Development
|
||||
Topic :: System :: Archiving :: Packaging
|
||||
Topic :: System :: Software Distribution
|
||||
Topic :: Utilities
|
||||
|
||||
[options]
|
||||
zip_safe = True
|
||||
python_requires = >=3.8
|
||||
|
||||
packages = find_namespace:
|
||||
install_requires =
|
||||
tomli; python_version<"3.11"
|
||||
structlog
|
||||
platformdirs
|
||||
tornado>=6
|
||||
pycurl
|
||||
scripts =
|
||||
scripts/nvchecker-ini2toml
|
||||
scripts/nvchecker-notify
|
||||
|
||||
[options.packages.find]
|
||||
exclude = tests, build*, docs*
|
||||
|
||||
[options.extras_require]
|
||||
vercmp =
|
||||
pyalpm
|
||||
awesomeversion =
|
||||
awesomeversion
|
||||
pypi =
|
||||
packaging
|
||||
htmlparser =
|
||||
lxml
|
||||
rpmrepo =
|
||||
lxml
|
||||
jq =
|
||||
jq
|
||||
|
||||
[options.entry_points]
|
||||
console_scripts =
|
||||
nvchecker = nvchecker.__main__:main
|
||||
nvtake = nvchecker.tools:take
|
||||
nvcmp = nvchecker.tools:cmp
|
||||
|
||||
[flake8]
|
||||
ignore = E111, E302, E501
|
23
setup.py
23
setup.py
|
@ -1,23 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
# vim:fileencoding=utf-8
|
||||
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
setup(
|
||||
name = 'nvchecker',
|
||||
version = '0.1',
|
||||
packages = find_packages(),
|
||||
install_requires = ['tornado'],
|
||||
entry_points = {
|
||||
'console_scripts': [
|
||||
'nvchecker = nvchecker.main:main',
|
||||
],
|
||||
},
|
||||
|
||||
author = 'lilydjwg',
|
||||
author_email = 'lilydjwg@gmail.com',
|
||||
description = 'New version checker for software',
|
||||
license = 'MIT',
|
||||
keywords = 'new version build check',
|
||||
url = 'https://github.com/lilydjwg/nvchecker',
|
||||
)
|
3
tests/__init__.py
Normal file
3
tests/__init__.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
111
tests/conftest.py
Normal file
111
tests/conftest.py
Normal file
|
@ -0,0 +1,111 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020, 2024 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
|
||||
import asyncio
|
||||
import structlog
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Dict
|
||||
|
||||
if TYPE_CHECKING:
|
||||
import tomli as tomllib
|
||||
else:
|
||||
try:
|
||||
import tomllib
|
||||
except ModuleNotFoundError:
|
||||
import tomli as tomllib
|
||||
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
|
||||
from nvchecker import core
|
||||
from nvchecker import __main__ as main
|
||||
from nvchecker.util import Entries, ResultData, RawResult
|
||||
|
||||
use_keyfile = False
|
||||
|
||||
async def run(
|
||||
entries: Entries, max_concurrency: int = 20,
|
||||
) -> Dict[str, str]:
|
||||
task_sem = asyncio.Semaphore(max_concurrency)
|
||||
result_q: asyncio.Queue[RawResult] = asyncio.Queue()
|
||||
keyfile = os.environ.get('KEYFILE')
|
||||
if use_keyfile and keyfile:
|
||||
filepath = Path(keyfile)
|
||||
keymanager = core.KeyManager(filepath)
|
||||
else:
|
||||
keymanager = core.KeyManager(None)
|
||||
|
||||
dispatcher = core.setup_httpclient()
|
||||
entry_waiter = core.EntryWaiter()
|
||||
futures = dispatcher.dispatch(
|
||||
entries, task_sem, result_q,
|
||||
keymanager, entry_waiter, 1, {},
|
||||
)
|
||||
|
||||
oldvers: ResultData = {}
|
||||
result_coro = core.process_result(oldvers, result_q, entry_waiter)
|
||||
runner_coro = core.run_tasks(futures)
|
||||
|
||||
results, _has_failures = await main.run(result_coro, runner_coro)
|
||||
return {k: r.version for k, r in results.items()}
|
||||
|
||||
@pytest_asyncio.fixture(scope="session")
|
||||
async def get_version():
|
||||
async def __call__(name, config):
|
||||
entries = {name: config}
|
||||
newvers = await run(entries)
|
||||
return newvers.get(name)
|
||||
|
||||
return __call__
|
||||
|
||||
@pytest_asyncio.fixture(scope="session")
|
||||
async def run_str():
|
||||
async def __call__(str):
|
||||
entries = tomllib.loads(str)
|
||||
newvers = await run(entries)
|
||||
return newvers.popitem()[1]
|
||||
|
||||
return __call__
|
||||
|
||||
@pytest_asyncio.fixture(scope="session")
|
||||
async def run_str_multi():
|
||||
async def __call__(str):
|
||||
entries = tomllib.loads(str)
|
||||
newvers = await run(entries)
|
||||
return newvers
|
||||
|
||||
return __call__
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def raise_on_logger_msg():
|
||||
def proc(logger, method_name, event_dict):
|
||||
if method_name in ('warning', 'error'):
|
||||
if 'exc_info' in event_dict:
|
||||
exc = event_dict['exc_info']
|
||||
if isinstance(exc, Exception):
|
||||
raise exc
|
||||
else: # exc_info=True
|
||||
raise
|
||||
if not event_dict['event'].startswith(('rate limited', 'no-result')):
|
||||
raise RuntimeError(event_dict['event'])
|
||||
return event_dict['event']
|
||||
|
||||
structlog.configure([proc])
|
||||
|
||||
def pytest_configure(config):
|
||||
# register an additional marker
|
||||
config.addinivalue_line(
|
||||
'markers', 'needs_net: mark test to require Internet access',
|
||||
)
|
||||
|
||||
@pytest.fixture
|
||||
def keyfile():
|
||||
global use_keyfile
|
||||
if 'KEYFILE' not in os.environ:
|
||||
pytest.skip('KEYFILE not set')
|
||||
return
|
||||
|
||||
use_keyfile = True
|
||||
yield
|
||||
use_keyfile = False
|
113
tests/test_alpm.py
Normal file
113
tests/test_alpm.py
Normal file
|
@ -0,0 +1,113 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 DDoSolitary <DDoSolitary@gmail.com>, et al.
|
||||
|
||||
import pathlib
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
import pytest
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.asyncio,
|
||||
pytest.mark.skipif(shutil.which('makepkg') is None, reason='requires makepkg command'),
|
||||
pytest.mark.skipif(shutil.which('repo-add') is None, reason='requires repo-add command')
|
||||
]
|
||||
|
||||
global temp_dir, db_path
|
||||
|
||||
|
||||
def setup_module(module):
|
||||
global temp_dir, db_path
|
||||
temp_dir = tempfile.TemporaryDirectory()
|
||||
temp_path = pathlib.Path(temp_dir.name)
|
||||
pkg_path = temp_path / 'test-pkg'
|
||||
pkg_path.mkdir()
|
||||
with (pkg_path / 'PKGBUILD').open('w') as f:
|
||||
f.write(
|
||||
'pkgname=test-pkg\n'
|
||||
'pkgver=1.2.3\n'
|
||||
'pkgrel=4\n'
|
||||
'arch=(any)\n'
|
||||
'provides=("test-provides=5.6-7" "test-provides-unversioned")\n'
|
||||
'options=(!debug)\n'
|
||||
)
|
||||
subprocess.check_call(['makepkg', '--nosign'], cwd=pkg_path)
|
||||
pkg_file = subprocess.check_output(['makepkg', '--packagelist'], cwd=pkg_path, text=True).strip()
|
||||
db_path = pkg_path / 'test-db'
|
||||
db_path.mkdir()
|
||||
repo_path = db_path / 'sync'
|
||||
repo_path.mkdir()
|
||||
subprocess.check_call([
|
||||
'repo-add',
|
||||
repo_path / 'test-repo.db.tar.gz',
|
||||
pkg_path / pkg_file
|
||||
])
|
||||
|
||||
|
||||
def teardown_module(module):
|
||||
temp_dir.cleanup()
|
||||
|
||||
|
||||
async def test_alpm(get_version):
|
||||
assert await get_version('test-pkg', {
|
||||
'source': 'alpm',
|
||||
'dbpath': str(db_path),
|
||||
'repo': 'test-repo'
|
||||
}) == '1.2.3-4'
|
||||
|
||||
|
||||
async def test_alpm_strip(get_version):
|
||||
assert await get_version('test-pkg', {
|
||||
'source': 'alpm',
|
||||
'dbpath': str(db_path),
|
||||
'repo': 'test-repo',
|
||||
'strip_release': True
|
||||
}) == '1.2.3'
|
||||
|
||||
|
||||
async def test_alpm_provided(get_version):
|
||||
assert await get_version('test-pkg', {
|
||||
'source': 'alpm',
|
||||
'dbpath': str(db_path),
|
||||
'repo': 'test-repo',
|
||||
'provided': 'test-provides'
|
||||
}) == '5.6-7'
|
||||
|
||||
|
||||
async def test_alpm_provided_strip(get_version):
|
||||
assert await get_version('test-pkg', {
|
||||
'source': 'alpm',
|
||||
'dbpath': str(db_path),
|
||||
'repo': 'test-repo',
|
||||
'provided': 'test-provides',
|
||||
'strip_release': True
|
||||
}) == '5.6'
|
||||
|
||||
|
||||
async def test_alpm_missing_repo(get_version):
|
||||
with pytest.raises(RuntimeError):
|
||||
await get_version('test-pkg', {
|
||||
'source': 'alpm',
|
||||
'dbpath': str(db_path),
|
||||
'repo': 'wrong-repo'
|
||||
})
|
||||
|
||||
|
||||
async def test_alpm_missing_pkg(get_version):
|
||||
with pytest.raises(RuntimeError):
|
||||
await get_version('wrong-pkg', {
|
||||
'source': 'alpm',
|
||||
'dbpath': str(db_path),
|
||||
'repo': 'test-repo'
|
||||
})
|
||||
|
||||
|
||||
async def test_alpm_missing_provides(get_version):
|
||||
with pytest.raises(RuntimeError):
|
||||
await get_version('test-pkg', {
|
||||
'source': 'alpm',
|
||||
'dbpath': str(db_path),
|
||||
'repo': 'test-repo',
|
||||
'provided': 'wrong-provides'
|
||||
})
|
53
tests/test_alpmfiles.py
Normal file
53
tests/test_alpmfiles.py
Normal file
|
@ -0,0 +1,53 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2023 Pekka Ristola <pekkarr [at] protonmail [dot] com>, et al.
|
||||
|
||||
import pathlib
|
||||
import shutil
|
||||
import subprocess
|
||||
import tempfile
|
||||
|
||||
import pytest
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.asyncio,
|
||||
pytest.mark.skipif(shutil.which('pacman') is None, reason='requires pacman command'),
|
||||
pytest.mark.skipif(shutil.which('fakeroot') is None, reason='requires fakeroot command'),
|
||||
]
|
||||
|
||||
global temp_dir, db_path
|
||||
|
||||
|
||||
def setup_module(module):
|
||||
global temp_dir, db_path
|
||||
|
||||
temp_dir = tempfile.TemporaryDirectory()
|
||||
temp_path = pathlib.Path(temp_dir.name)
|
||||
db_path = temp_path / 'test-db'
|
||||
|
||||
db_path.mkdir(exist_ok=True)
|
||||
|
||||
cmd = ['fakeroot', 'pacman', '-Fy', '--dbpath', db_path]
|
||||
subprocess.check_call(cmd)
|
||||
|
||||
|
||||
def teardown_module(module):
|
||||
temp_dir.cleanup()
|
||||
|
||||
|
||||
async def test_alpmfiles(get_version):
|
||||
assert await get_version('test', {
|
||||
'source': 'alpmfiles',
|
||||
'pkgname': 'libuv',
|
||||
'filename': 'usr/lib/libuv\\.so\\.([^.]+)',
|
||||
'dbpath': db_path,
|
||||
}) == '1'
|
||||
|
||||
async def test_alpmfiles_strip(get_version):
|
||||
assert await get_version('test', {
|
||||
'source': 'alpmfiles',
|
||||
'pkgname': 'glibc',
|
||||
'repo': 'core',
|
||||
'filename': 'libc\\.so\\.[^.]+',
|
||||
'strip_dir': True,
|
||||
'dbpath': db_path,
|
||||
}) == 'libc.so.6'
|
55
tests/test_android_sdk.py
Normal file
55
tests/test_android_sdk.py
Normal file
|
@ -0,0 +1,55 @@
|
|||
# MIT licensed
|
||||
# Copyright (c) 2020 lilydjwg <lilydjwg@gmail.com>, et al.
|
||||
# Copyright (c) 2017 Chih-Hsuan Yen <yan12125 at gmail dot com>
|
||||
|
||||
import pytest
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
|
||||
@pytest.mark.flaky(reruns=10)
|
||||
async def test_android_addon(get_version):
|
||||
assert await get_version("android-google-play-apk-expansion", {
|
||||
"source": "android_sdk",
|
||||
"android_sdk": "extras;google;market_apk_expansion",
|
||||
"repo": "addon",
|
||||
}) == "1.r03"
|
||||
|
||||
async def test_android_package(get_version):
|
||||
version = await get_version("android-sdk-cmake", {
|
||||
"source": "android_sdk",
|
||||
"android_sdk": "cmake;",
|
||||
"repo": "package",
|
||||
})
|
||||
assert version.startswith("3.")
|
||||
|
||||
|
||||
async def test_android_package_channel(get_version):
|
||||
assert await get_version("android-sdk-cmake", {
|
||||
"source": "android_sdk",
|
||||
"android_sdk": "ndk;",
|
||||
"repo": "package",
|
||||
"channel": "beta,dev,canary",
|
||||
}) == "26.0.10636728"
|
||||
|
||||
async def test_android_list(get_version):
|
||||
assert await get_version("android-sdk-cmake-older", {
|
||||
"source": "android_sdk",
|
||||
"android_sdk": "cmake;",
|
||||
"repo": "package",
|
||||
"include_regex": r"3\.10.*",
|
||||
}) == "3.10.2"
|
||||
|
||||
async def test_android_package_os(get_version):
|
||||
assert await get_version("android-usb-driver", {
|
||||
"source": "android_sdk",
|
||||
"android_sdk": "extras;google;usb_driver",
|
||||
"repo": "addon",
|
||||
"host_os": "windows"
|
||||
}) == "13"
|
||||
|
||||
async def test_android_package_os_missing(get_version):
|
||||
assert await get_version("android-usb-driver", {
|
||||
"source": "android_sdk",
|
||||
"android_sdk": "extras;google;usb_driver",
|
||||
"repo": "addon",
|
||||
"host_os": "linux"
|
||||
}) == None
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Reference in a new issue