mirror of
https://github.com/lilydjwg/nvchecker.git
synced 2025-03-10 06:14:02 +00:00
Merge 4d5c102f15
into c6ed37ada1
This commit is contained in:
commit
84b94e4347
12 changed files with 407 additions and 173 deletions
2
.github/workflows/tests.yaml
vendored
2
.github/workflows/tests.yaml
vendored
|
@ -34,7 +34,7 @@ jobs:
|
|||
cache-name: cache-pip
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.deps }}-${{ hashFiles('setup.py') }}
|
||||
key: ${{ runner.os }}-${{ env.cache-name }}-${{ matrix.deps }}-${{ hashFiles('pyproject.toml', 'setup.cfg') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-${{ env.cache-name }}-${{ matrix.deps }}-
|
||||
${{ runner.os }}-${{ env.cache-name }}-
|
||||
|
|
2
.gitignore
vendored
2
.gitignore
vendored
|
@ -1,10 +1,12 @@
|
|||
*.egg-info/
|
||||
__pycache__/
|
||||
/build/
|
||||
/dist/
|
||||
.cache/
|
||||
.eggs/
|
||||
*.pyc
|
||||
*.pyo
|
||||
.travis.pub
|
||||
.pytest_cache/
|
||||
.tox/
|
||||
keyfile.toml
|
||||
|
|
|
@ -42,7 +42,7 @@ To install::
|
|||
|
||||
To use the latest code, you can also clone this repository and run::
|
||||
|
||||
python3 setup.py install
|
||||
pip install .
|
||||
|
||||
To see available options::
|
||||
|
||||
|
|
|
@ -204,8 +204,8 @@ httptoken
|
|||
A personal authorization token used to fetch the url with the ``Authorization`` header.
|
||||
The type of token depends on the authorization required.
|
||||
|
||||
- For Bearer token set : ``Bearer <Your_bearer_token>``
|
||||
- For Basic token set : ``Basic <Your_base64_encoded_token>``
|
||||
- For Bearer token set \: ``Bearer <Your_bearer_token>``
|
||||
- For Basic token set \: ``Basic <Your_base64_encoded_token>``
|
||||
|
||||
In the keyfile add ``httptoken_{name}`` token.
|
||||
|
||||
|
@ -365,8 +365,8 @@ Check GitHub
|
|||
source = "github"
|
||||
|
||||
Check `GitHub <https://github.com/>`_ for updates. The version returned is in
|
||||
date format ``%Y%m%d.%H%M%S``, e.g. ``20130701.012212``, unless ``use_latest_release``
|
||||
or ``use_max_tag`` is used. See below.
|
||||
date format ``%Y%m%d.%H%M%S``, e.g. ``20130701.012212``, unless ``use_latest_release``,
|
||||
``use_max_tag``, or ``use_commit_name`` is used. See below.
|
||||
|
||||
github
|
||||
The github repository, with author, e.g. ``lilydjwg/nvchecker``.
|
||||
|
@ -393,6 +393,12 @@ use_latest_tag
|
|||
|
||||
This requires a token because it's using the v4 GraphQL API.
|
||||
|
||||
use_commit_name
|
||||
Set this to ``true`` to append a plus and the commit name to the version, e.g.
|
||||
``20130701.012212+e1457aadd30f53f4d50d6c4828d517355c09b8ae``.
|
||||
|
||||
If this isn't showing up, provide a token so it can use the v4 GraphQL API.
|
||||
|
||||
query
|
||||
When ``use_latest_tag`` is ``true``, this sets a query for the tag. The exact
|
||||
matching method is not documented by GitHub.
|
||||
|
@ -403,6 +409,12 @@ use_max_tag
|
|||
lightweight ones, and return the largest one sorted by the
|
||||
``sort_version_key`` option. Will return the tag name instead of date.
|
||||
|
||||
This defaults ``list_count`` to 100.
|
||||
|
||||
list_count
|
||||
When supporting :ref:`list options` through the v4 GraphQL API, this sets a
|
||||
maximum count of items in the list. By default, ``list_count`` is set to 1.
|
||||
|
||||
token
|
||||
A personal authorization token used to call the API.
|
||||
|
||||
|
@ -415,6 +427,8 @@ To set an authorization token, you can set:
|
|||
- the token option
|
||||
|
||||
This source supports :ref:`list options` when ``use_max_tag`` is set.
|
||||
Options of this source that support :ref:`list options` may be effected by
|
||||
``list_count``.
|
||||
|
||||
Check Gitea
|
||||
~~~~~~~~~~~
|
||||
|
|
|
@ -146,6 +146,14 @@ class BaseWorker:
|
|||
'''Run the `tasks`. Subclasses should implement this method.'''
|
||||
raise NotImplementedError
|
||||
|
||||
def _normalize(x: Any) -> Any:
|
||||
if isinstance(x, list):
|
||||
return tuple(sorted(_normalize(y) for y in x))
|
||||
elif isinstance(x, dict):
|
||||
return tuple(sorted((_normalize(k), _normalize(v)) for k, v in x.items()))
|
||||
else:
|
||||
return x
|
||||
|
||||
class AsyncCache:
|
||||
'''A cache for use with async functions.'''
|
||||
cache: Dict[Hashable, Any]
|
||||
|
@ -156,28 +164,32 @@ class AsyncCache:
|
|||
self.lock = asyncio.Lock()
|
||||
|
||||
async def _get_json(
|
||||
self, key: Tuple[str, str, Tuple[Tuple[str, str], ...]],
|
||||
self, key: Tuple[str, str, Tuple[Tuple[str, str], ...], object], extra: Any,
|
||||
) -> Any:
|
||||
_, url, headers = key
|
||||
res = await session.get(url, headers=dict(headers))
|
||||
_, url, headers, json = key
|
||||
json = extra # denormalizing json would be a pain, so we sneak it through
|
||||
res = await (session.get(url=url, headers=dict(headers)) if json is None \
|
||||
else session.post(url=url, headers=dict(headers), json=json))
|
||||
return res.json()
|
||||
|
||||
async def get_json(
|
||||
self, url: str, *,
|
||||
headers: Dict[str, str] = {},
|
||||
json: Optional[object] = None,
|
||||
) -> Any:
|
||||
'''Get specified ``url`` and return the response content as JSON.
|
||||
|
||||
The returned data will be cached for reuse.
|
||||
'''
|
||||
key = '_jsonurl', url, tuple(sorted(headers.items()))
|
||||
key = '_jsonurl', url, _normalize(headers), _normalize(json)
|
||||
return await self.get(
|
||||
key , self._get_json) # type: ignore
|
||||
key, self._get_json, extra=json) # type: ignore
|
||||
|
||||
async def get(
|
||||
self,
|
||||
key: Hashable,
|
||||
func: Callable[[Hashable], Coroutine[Any, Any, Any]],
|
||||
func: Callable[[Hashable, Optional[Any]], Coroutine[Any, Any, Any]],
|
||||
extra: Optional[Any] = None,
|
||||
) -> Any:
|
||||
'''Run async ``func`` and cache its return value by ``key``.
|
||||
|
||||
|
@ -189,7 +201,7 @@ class AsyncCache:
|
|||
async with self.lock:
|
||||
cached = self.cache.get(key)
|
||||
if cached is None:
|
||||
coro = func(key)
|
||||
coro = func(key, extra)
|
||||
fu = asyncio.create_task(coro)
|
||||
self.cache[key] = fu
|
||||
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
|
||||
import time
|
||||
from urllib.parse import urlencode
|
||||
from typing import Tuple
|
||||
from typing import Any, Dict, List, Optional, Tuple
|
||||
|
||||
import structlog
|
||||
|
||||
|
@ -14,6 +14,9 @@ from nvchecker.api import (
|
|||
|
||||
logger = structlog.get_logger(logger_name=__name__)
|
||||
|
||||
def add_commit_name(version: str, commit_name: Optional[str]) -> str:
|
||||
return version if commit_name is None else version + '+' + commit_name
|
||||
|
||||
GITHUB_URL = 'https://api.github.com/repos/%s/commits'
|
||||
GITHUB_LATEST_RELEASE = 'https://api.github.com/repos/%s/releases/latest'
|
||||
# https://developer.github.com/v3/git/refs/#get-all-references
|
||||
|
@ -26,47 +29,136 @@ async def get_version(name, conf, **kwargs):
|
|||
except TemporaryError as e:
|
||||
check_ratelimit(e, name)
|
||||
|
||||
QUERY_LATEST_TAG = '''
|
||||
{{
|
||||
repository(name: "{name}", owner: "{owner}") {{
|
||||
refs(refPrefix: "refs/tags/", first: 1,
|
||||
query: "{query}",
|
||||
orderBy: {{field: TAG_COMMIT_DATE, direction: DESC}}) {{
|
||||
edges {{
|
||||
node {{
|
||||
name
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
}}
|
||||
async def query_graphql(
|
||||
*,
|
||||
cache: AsyncCache,
|
||||
token: Optional[str] = None,
|
||||
headers: Optional[Dict[str, str]] = None,
|
||||
query: str,
|
||||
variables: Optional[Dict[str, object]] = None,
|
||||
json: Optional[Dict[str, object]] = None,
|
||||
url: Optional[str] = None,
|
||||
**kwargs,
|
||||
) -> Any:
|
||||
if not token:
|
||||
raise GetVersionError('token not given but it is required')
|
||||
if headers is None:
|
||||
headers = {}
|
||||
headers.setdefault('Authorization', f'bearer {token}')
|
||||
headers.setdefault('Content-Type', 'application/json')
|
||||
|
||||
if json is None:
|
||||
json = {}
|
||||
json['query'] = query
|
||||
if variables is not None:
|
||||
json.setdefault('variables', {}).update(variables)
|
||||
|
||||
if url is None:
|
||||
url = GITHUB_GRAPHQL_URL
|
||||
return await cache.get_json(url = url, headers = headers, json = json)
|
||||
|
||||
async def query_rest(
|
||||
*,
|
||||
cache: AsyncCache,
|
||||
token: Optional[str] = None,
|
||||
headers: Optional[Dict[str, str]] = None,
|
||||
parameters: Optional[Dict[str, str]] = None,
|
||||
url: str,
|
||||
) -> Any:
|
||||
if headers is None:
|
||||
headers = {}
|
||||
if token:
|
||||
headers.setdefault('Authorization', f'token {token}')
|
||||
headers.setdefault('Accept', 'application/vnd.github.quicksilver-preview+json')
|
||||
|
||||
if parameters:
|
||||
url += '?' + urlencode(parameters)
|
||||
|
||||
return await cache.get_json(url = url, headers = headers)
|
||||
|
||||
QUERY_LATEST_TAGS = '''
|
||||
query latestTags(
|
||||
$owner: String!, $name: String!,
|
||||
$query: String, $orderByCommitDate: Boolean!, $count: Int = 1,
|
||||
$includeCommitName: Boolean = false,
|
||||
) {
|
||||
repository(owner: $owner, name: $name) {
|
||||
... @include(if: $orderByCommitDate) { latestRefs: refs(
|
||||
refPrefix: "refs/tags/", query: $query,
|
||||
first: $count, orderBy: {field: TAG_COMMIT_DATE, direction: DESC}
|
||||
) { ...tagData } }
|
||||
... @skip(if: $orderByCommitDate) { maxRefs: refs(
|
||||
refPrefix: "refs/tags/", query: $query,
|
||||
last: $count
|
||||
) { ...tagData } }
|
||||
}
|
||||
}
|
||||
fragment tagData on RefConnection {
|
||||
edges {
|
||||
node {
|
||||
name
|
||||
... @include(if: $includeCommitName) { target { ...commitOid } }
|
||||
}
|
||||
}
|
||||
}
|
||||
fragment commitOid on GitObject {
|
||||
... on Commit { commitOid: oid }
|
||||
... on Tag { tagTarget: target {
|
||||
... on Commit { commitOid: oid }
|
||||
} }
|
||||
}
|
||||
'''
|
||||
|
||||
async def get_latest_tag(key: Tuple[str, str, str]) -> str:
|
||||
repo, query, token = key
|
||||
owner, reponame = repo.split('/')
|
||||
headers = {
|
||||
'Authorization': f'bearer {token}',
|
||||
'Content-Type': 'application/json',
|
||||
async def query_latest_tags(
|
||||
*,
|
||||
cache: AsyncCache,
|
||||
token: Optional[str] = None,
|
||||
owner: str,
|
||||
name: str,
|
||||
query: Optional[str],
|
||||
order_by_commit_date: bool,
|
||||
count: Optional[int] = None,
|
||||
use_commit_name: bool,
|
||||
) -> List[str]:
|
||||
j = await query_graphql(
|
||||
cache = cache,
|
||||
token = token,
|
||||
query = QUERY_LATEST_TAGS,
|
||||
variables = {
|
||||
'owner': owner,
|
||||
'name': name,
|
||||
'query': query,
|
||||
'orderByCommitDate': order_by_commit_date,
|
||||
'count': count,
|
||||
'includeCommitName': use_commit_name,
|
||||
},
|
||||
)
|
||||
refsAlias = 'latestRefs' if order_by_commit_date else 'maxRefs'
|
||||
refs = j['data']['repository'][refsAlias]['edges']
|
||||
if not order_by_commit_date:
|
||||
refs = reversed(refs)
|
||||
tags = [
|
||||
add_commit_name(
|
||||
ref['node']['name'],
|
||||
ref['node']['target']['commitOid'] if use_commit_name else None,
|
||||
)
|
||||
for ref in refs
|
||||
]
|
||||
return tags
|
||||
|
||||
QUERY_LATEST_RELEASE = '''
|
||||
query latestRelease(
|
||||
$owner: String!, $name: String!,
|
||||
$includeCommitName: Boolean = false,
|
||||
) {
|
||||
repository(owner: $owner, name: $name) {
|
||||
latestRelease {
|
||||
tagName
|
||||
... @include(if: $includeCommitName) { tagCommit { oid } }
|
||||
}
|
||||
}
|
||||
q = QUERY_LATEST_TAG.format(
|
||||
owner = owner,
|
||||
name = reponame,
|
||||
query = query,
|
||||
)
|
||||
|
||||
res = await session.post(
|
||||
GITHUB_GRAPHQL_URL,
|
||||
headers = headers,
|
||||
json = {'query': q},
|
||||
)
|
||||
j = res.json()
|
||||
|
||||
refs = j['data']['repository']['refs']['edges']
|
||||
if not refs:
|
||||
raise GetVersionError('no tag found')
|
||||
|
||||
return refs[0]['node']['name']
|
||||
}
|
||||
'''
|
||||
|
||||
async def get_version_real(
|
||||
name: str, conf: Entry, *,
|
||||
|
@ -74,6 +166,7 @@ async def get_version_real(
|
|||
**kwargs,
|
||||
) -> VersionResult:
|
||||
repo = conf['github']
|
||||
use_commit_name = conf.get('use_commit_name', False)
|
||||
|
||||
# Load token from config
|
||||
token = conf.get('token')
|
||||
|
@ -81,55 +174,102 @@ async def get_version_real(
|
|||
if token is None:
|
||||
token = keymanager.get_key('github')
|
||||
|
||||
use_latest_tag = conf.get('use_latest_tag', False)
|
||||
if use_latest_tag:
|
||||
if not token:
|
||||
raise GetVersionError('token not given but it is required')
|
||||
|
||||
query = conf.get('query', '')
|
||||
return await cache.get((repo, query, token), get_latest_tag) # type: ignore
|
||||
|
||||
br = conf.get('branch')
|
||||
path = conf.get('path')
|
||||
use_latest_release = conf.get('use_latest_release', False)
|
||||
use_max_tag = conf.get('use_max_tag', False)
|
||||
if use_latest_release:
|
||||
url = GITHUB_LATEST_RELEASE % repo
|
||||
elif use_max_tag:
|
||||
url = GITHUB_MAX_TAG % repo
|
||||
else:
|
||||
url = GITHUB_URL % repo
|
||||
parameters = {}
|
||||
if br:
|
||||
parameters['sha'] = br
|
||||
if path:
|
||||
parameters['path'] = path
|
||||
url += '?' + urlencode(parameters)
|
||||
headers = {
|
||||
'Accept': 'application/vnd.github.quicksilver-preview+json',
|
||||
}
|
||||
if token:
|
||||
headers['Authorization'] = f'token {token}'
|
||||
|
||||
data = await cache.get_json(url, headers = headers)
|
||||
|
||||
if use_max_tag:
|
||||
tags = [ref['ref'].split('/', 2)[-1] for ref in data]
|
||||
if conf.get('use_latest_tag', False):
|
||||
owner, reponame = repo.split('/')
|
||||
tags = await query_latest_tags(
|
||||
cache = cache,
|
||||
token = token,
|
||||
owner = owner,
|
||||
name = reponame,
|
||||
query = conf.get('query'),
|
||||
order_by_commit_date = True,
|
||||
use_commit_name = use_commit_name,
|
||||
)
|
||||
if not tags:
|
||||
raise GetVersionError('No tag found in upstream repository.')
|
||||
return tags[0]
|
||||
elif conf.get('use_latest_release', False):
|
||||
tag = None
|
||||
if token:
|
||||
owner, reponame = repo.split('/')
|
||||
j = await query_graphql(
|
||||
cache = cache,
|
||||
token = token,
|
||||
query = QUERY_LATEST_RELEASE,
|
||||
variables = {
|
||||
'owner': owner,
|
||||
'name': reponame,
|
||||
'includeCommitName': use_commit_name,
|
||||
},
|
||||
)
|
||||
release = j['data']['repository']['latestRelease']
|
||||
if release is not None:
|
||||
tag = add_commit_name(
|
||||
release['tagName'],
|
||||
release['tagCommit']['oid'] if use_commit_name else None,
|
||||
)
|
||||
else:
|
||||
data = await query_rest(
|
||||
cache = cache,
|
||||
token = token,
|
||||
url = GITHUB_LATEST_RELEASE % repo,
|
||||
)
|
||||
if 'tag_name' in data:
|
||||
tag = data['tag_name']
|
||||
if tag is None:
|
||||
raise GetVersionError('No release found in upstream repository.')
|
||||
return tag
|
||||
elif conf.get('use_max_tag', False):
|
||||
if token:
|
||||
owner, reponame = repo.split('/')
|
||||
tags = await query_latest_tags(
|
||||
cache = cache,
|
||||
token = token,
|
||||
owner = owner,
|
||||
name = reponame,
|
||||
query = conf.get('query'),
|
||||
order_by_commit_date = False,
|
||||
count = conf.get('list_count', 100),
|
||||
use_commit_name = use_commit_name,
|
||||
)
|
||||
else:
|
||||
data = await query_rest(
|
||||
cache = cache,
|
||||
token = token,
|
||||
url = GITHUB_MAX_TAG % repo,
|
||||
)
|
||||
tags = [
|
||||
add_commit_name(
|
||||
ref['ref'].split('/', 2)[-1],
|
||||
ref['object']['sha'] if use_commit_name else None,
|
||||
)
|
||||
for ref in data
|
||||
]
|
||||
if not tags:
|
||||
raise GetVersionError('No tag found in upstream repository.')
|
||||
return tags
|
||||
|
||||
if use_latest_release:
|
||||
if 'tag_name' not in data:
|
||||
raise GetVersionError('No release found in upstream repository.')
|
||||
version = data['tag_name']
|
||||
|
||||
else:
|
||||
# YYYYMMDD.HHMMSS
|
||||
version = data[0]['commit']['committer']['date'] \
|
||||
.rstrip('Z').replace('-', '').replace(':', '').replace('T', '.')
|
||||
|
||||
return version
|
||||
br = conf.get('branch')
|
||||
path = conf.get('path')
|
||||
parameters = {}
|
||||
if br is not None:
|
||||
parameters['sha'] = br
|
||||
if path is not None:
|
||||
parameters['path'] = path
|
||||
data = await query_rest(
|
||||
cache = cache,
|
||||
token = token,
|
||||
url = GITHUB_URL % repo,
|
||||
parameters = parameters,
|
||||
)
|
||||
date = data[0]['commit']['committer']['date']
|
||||
commit_name = data[0]['sha'] if use_commit_name else None
|
||||
version = add_commit_name(
|
||||
# YYYYMMDD.HHMMSS
|
||||
date.rstrip('Z').replace('-', '').replace(':', '').replace('T', '.'),
|
||||
commit_name,
|
||||
)
|
||||
return version
|
||||
|
||||
def check_ratelimit(exc, name):
|
||||
res = exc.response
|
||||
|
|
27
pyproject.toml
Normal file
27
pyproject.toml
Normal file
|
@ -0,0 +1,27 @@
|
|||
[build-system]
|
||||
requires = ["setuptools"]
|
||||
build-backend = "setuptools.build_meta"
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
# addopts = -n auto
|
||||
asyncio_mode = "strict"
|
||||
|
||||
[tool.tox]
|
||||
legacy_tox_ini = """
|
||||
[tox]
|
||||
isolated_build = True
|
||||
# you may find `tox --skip-missing-interpreters=true` helpful.
|
||||
envlist = py3{7,8,9,10}
|
||||
|
||||
[testenv]
|
||||
usedevelop = false
|
||||
deps =
|
||||
pytest
|
||||
pytest-asyncio
|
||||
pytest-httpbin
|
||||
flaky
|
||||
extras =
|
||||
htmlparser
|
||||
passenv = KEYFILE
|
||||
commands = pytest -r fEs {posargs}
|
||||
"""
|
72
setup.cfg
72
setup.cfg
|
@ -1,5 +1,71 @@
|
|||
# The complex upload command:
|
||||
# rm -rf dist && python -m build --sdist && twine check dist/* && twine upload -s dist/*
|
||||
|
||||
[metadata]
|
||||
name = nvchecker
|
||||
version = attr: nvchecker.__version__
|
||||
author = lilydjwg
|
||||
author_email = lilydjwg@gmail.com
|
||||
description = New version checker for software
|
||||
license = MIT
|
||||
keywords = new, version, build, check
|
||||
url = https://github.com/lilydjwg/nvchecker
|
||||
long_description = file: README.rst
|
||||
long_description_content_type = text/x-rst
|
||||
platforms = any
|
||||
|
||||
classifiers =
|
||||
Development Status :: 5 - Production/Stable
|
||||
Environment :: Console
|
||||
Intended Audience :: Developers
|
||||
Intended Audience :: System Administrators
|
||||
License :: OSI Approved :: MIT License
|
||||
Operating System :: OS Independent
|
||||
Programming Language :: Python
|
||||
Programming Language :: Python :: 3
|
||||
Programming Language :: Python :: 3 :: Only
|
||||
Programming Language :: Python :: 3.7
|
||||
Programming Language :: Python :: 3.8
|
||||
Programming Language :: Python :: 3.9
|
||||
Programming Language :: Python :: 3.10
|
||||
Topic :: Internet
|
||||
Topic :: Internet :: WWW/HTTP
|
||||
Topic :: Software Development
|
||||
Topic :: System :: Archiving :: Packaging
|
||||
Topic :: System :: Software Distribution
|
||||
Topic :: Utilities
|
||||
|
||||
[options]
|
||||
zip_safe = True
|
||||
|
||||
packages = find_namespace:
|
||||
install_requires =
|
||||
setuptools; python_version<"3.8"
|
||||
tomli
|
||||
structlog
|
||||
appdirs
|
||||
tornado>=6
|
||||
pycurl
|
||||
scripts =
|
||||
scripts/nvchecker-ini2toml
|
||||
scripts/nvchecker-notify
|
||||
|
||||
[options.packages.find]
|
||||
exclude = tests, build*, docs*
|
||||
|
||||
[options.extras_require]
|
||||
vercmp =
|
||||
pyalpm
|
||||
pypi =
|
||||
packaging
|
||||
htmlparser =
|
||||
lxml
|
||||
|
||||
[options.entry_points]
|
||||
console_scripts =
|
||||
nvchecker = nvchecker.__main__:main
|
||||
nvtake = nvchecker.tools:take
|
||||
nvcmp = nvchecker.tools:cmp
|
||||
|
||||
[flake8]
|
||||
ignore = E111, E302, E501
|
||||
|
||||
[tool:pytest]
|
||||
# addopts = -n auto
|
||||
|
|
69
setup.py
69
setup.py
|
@ -1,69 +0,0 @@
|
|||
#!/usr/bin/env python3
|
||||
|
||||
from setuptools import setup, find_namespace_packages
|
||||
import nvchecker
|
||||
|
||||
# The complex upload command:
|
||||
# rm -rf dist && python setup.py sdist && twine check dist/* && twine upload -s dist/*
|
||||
|
||||
setup(
|
||||
name = 'nvchecker',
|
||||
version = nvchecker.__version__,
|
||||
author = 'lilydjwg',
|
||||
author_email = 'lilydjwg@gmail.com',
|
||||
description = 'New version checker for software',
|
||||
license = 'MIT',
|
||||
keywords = 'new version build check',
|
||||
url = 'https://github.com/lilydjwg/nvchecker',
|
||||
long_description = open('README.rst', encoding='utf-8').read(),
|
||||
long_description_content_type = 'text/x-rst',
|
||||
platforms = 'any',
|
||||
zip_safe = True,
|
||||
|
||||
packages = find_namespace_packages(exclude=['tests', 'build*', 'docs*']),
|
||||
install_requires = ['setuptools; python_version<"3.8"', 'tomli', 'structlog', 'appdirs', 'tornado>=6', 'pycurl'],
|
||||
extras_require = {
|
||||
'vercmp': ['pyalpm'],
|
||||
'pypi': ['packaging'],
|
||||
'htmlparser': ['lxml'],
|
||||
},
|
||||
tests_require = [
|
||||
'pytest',
|
||||
'pytest-asyncio',
|
||||
'pytest-httpbin',
|
||||
'flaky',
|
||||
],
|
||||
entry_points = {
|
||||
'console_scripts': [
|
||||
'nvchecker = nvchecker.__main__:main',
|
||||
'nvtake = nvchecker.tools:take',
|
||||
'nvcmp = nvchecker.tools:cmp',
|
||||
],
|
||||
},
|
||||
scripts = [
|
||||
'scripts/nvchecker-ini2toml',
|
||||
'scripts/nvchecker-notify',
|
||||
],
|
||||
|
||||
classifiers = [
|
||||
"Development Status :: 5 - Production/Stable",
|
||||
"Environment :: Console",
|
||||
"Intended Audience :: Developers",
|
||||
"Intended Audience :: System Administrators",
|
||||
"License :: OSI Approved :: MIT License",
|
||||
"Operating System :: OS Independent",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Topic :: Internet",
|
||||
"Topic :: Internet :: WWW/HTTP",
|
||||
"Topic :: Software Development",
|
||||
"Topic :: System :: Archiving :: Packaging",
|
||||
"Topic :: System :: Software Distribution",
|
||||
"Topic :: Utilities",
|
||||
],
|
||||
)
|
|
@ -8,6 +8,7 @@ from pathlib import Path
|
|||
|
||||
import tomli
|
||||
import pytest
|
||||
import pytest_asyncio
|
||||
|
||||
from nvchecker import core
|
||||
from nvchecker import __main__ as main
|
||||
|
@ -41,7 +42,7 @@ async def run(
|
|||
vers, _has_failures = await main.run(result_coro, runner_coro)
|
||||
return vers
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
@pytest_asyncio.fixture(scope="module")
|
||||
async def get_version():
|
||||
async def __call__(name, config):
|
||||
entries = {name: config}
|
||||
|
@ -50,7 +51,7 @@ async def get_version():
|
|||
|
||||
return __call__
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
@pytest_asyncio.fixture(scope="module")
|
||||
async def run_str():
|
||||
async def __call__(str):
|
||||
entries = tomli.loads(str)
|
||||
|
@ -59,7 +60,7 @@ async def run_str():
|
|||
|
||||
return __call__
|
||||
|
||||
@pytest.fixture(scope="module")
|
||||
@pytest_asyncio.fixture(scope="module")
|
||||
async def run_str_multi():
|
||||
async def __call__(str):
|
||||
entries = tomli.loads(str)
|
||||
|
|
|
@ -15,6 +15,13 @@ async def test_github(get_version):
|
|||
"github": "harry-sanabria/ReleaseTestRepo",
|
||||
}) == "20140122.012101"
|
||||
|
||||
async def test_github_commit_name(get_version):
|
||||
assert await get_version("example", {
|
||||
"source": "github",
|
||||
"github": "harry-sanabria/ReleaseTestRepo",
|
||||
"use_commit_name": True,
|
||||
}) == "20140122.012101+2b3cdf6134b07ae6ac77f11b586dc1ae6d1521db"
|
||||
|
||||
async def test_github_default_not_master(get_version):
|
||||
assert await get_version("example", {
|
||||
"source": "github",
|
||||
|
@ -28,6 +35,14 @@ async def test_github_latest_release(get_version):
|
|||
"use_latest_release": True,
|
||||
}) == "release3"
|
||||
|
||||
async def test_github_latest_release_commit_name(get_version):
|
||||
assert await get_version("example", {
|
||||
"source": "github",
|
||||
"github": "harry-sanabria/ReleaseTestRepo",
|
||||
"use_latest_release": True,
|
||||
"use_commit_name": True,
|
||||
}) == "release3+2b3cdf6134b07ae6ac77f11b586dc1ae6d1521db"
|
||||
|
||||
async def test_github_max_tag(get_version):
|
||||
assert await get_version("example", {
|
||||
"source": "github",
|
||||
|
@ -35,6 +50,14 @@ async def test_github_max_tag(get_version):
|
|||
"use_max_tag": True,
|
||||
}) == "second_release"
|
||||
|
||||
async def test_github_max_tag_commit_name(get_version):
|
||||
assert await get_version("example", {
|
||||
"source": "github",
|
||||
"github": "harry-sanabria/ReleaseTestRepo",
|
||||
"use_max_tag": True,
|
||||
"use_commit_name": True,
|
||||
}) == "second_release+0f01b10ee72809e7ec0d903db95bcb6eef18c925"
|
||||
|
||||
async def test_github_max_tag_with_ignored(get_version):
|
||||
assert await get_version("example", {
|
||||
"source": "github",
|
||||
|
@ -74,3 +97,11 @@ async def test_github_latest_tag(get_version):
|
|||
"use_latest_tag": True,
|
||||
}) == "release3"
|
||||
|
||||
async def test_github_latest_tag_commit_name(get_version):
|
||||
assert await get_version("example", {
|
||||
"source": "github",
|
||||
"github": "harry-sanabria/ReleaseTestRepo",
|
||||
"use_latest_tag": True,
|
||||
"use_commit_name": True,
|
||||
}) == "release3+2b3cdf6134b07ae6ac77f11b586dc1ae6d1521db"
|
||||
|
||||
|
|
|
@ -3,7 +3,17 @@
|
|||
|
||||
import pytest
|
||||
|
||||
pytestmark = [pytest.mark.asyncio, pytest.mark.needs_net]
|
||||
lxml_available = True
|
||||
try:
|
||||
import lxml
|
||||
except ImportError:
|
||||
lxml_available = False
|
||||
|
||||
pytestmark = [
|
||||
pytest.mark.asyncio,
|
||||
pytest.mark.needs_net,
|
||||
pytest.mark.skipif(not lxml_available, reason="needs lxml"),
|
||||
]
|
||||
|
||||
async def test_xpath_ok(get_version):
|
||||
assert await get_version("unifiedremote", {
|
Loading…
Add table
Reference in a new issue