From 44f55d75bdf01f22b410bd6dfbe313545a47fdf0 Mon Sep 17 00:00:00 2001 From: involution Date: Tue, 19 Nov 2024 05:07:00 -0500 Subject: [PATCH 01/40] add new function --- nvchecker_source/github.py | 31 +++++++++++++++++++++++++++++++ 1 file changed, 31 insertions(+) diff --git a/nvchecker_source/github.py b/nvchecker_source/github.py index 8585366..9647d7a 100644 --- a/nvchecker_source/github.py +++ b/nvchecker_source/github.py @@ -86,6 +86,31 @@ QUERY_LATEST_RELEASE_WITH_PRERELEASES = ''' }} }} ''' +async def get_commit_count(url: str, headers: dict) -> int: + """Get the total commit count using pagination.""" + params = {'per_page': '1'} + + response = await session.get( + url, + params=params, + headers=headers + ) + + if response.status_code != 200: + raise HTTPError(response.status_code, response) + + commit_count = 1 + if 'Link' in response.headers: + link_header = response.headers['Link'] + for link in link_header.split(', '): + if 'rel="last"' in link: + url = link[link.find("<") + 1:link.find(">")] + query_params = parse_qs(urlparse(url).query) + if 'page' in query_params: + commit_count = int(query_params['page'][0]) + break + + return commit_count async def get_latest_tag(key: Tuple[str, str, str, str]) -> RichResult: host, repo, query, token = key @@ -241,6 +266,12 @@ async def get_version_real( ) else: + + # Only add commit info if configured + if conf.get('use_commit_info', False): + commit_count = await get_commit_count(url, headers) + version = f"{version}.r{commit_count}.g{data[0]['sha'][:9]}" + return RichResult( # YYYYMMDD.HHMMSS version = data[0]['commit']['committer']['date'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.'), From bb8db94214c381ca693ac1448b840cfb4def3a2d Mon Sep 17 00:00:00 2001 From: involution Date: Tue, 19 Nov 2024 05:11:07 -0500 Subject: [PATCH 02/40] ii --- nvchecker_source/github.py | 3 --- 1 file changed, 3 deletions(-) diff --git a/nvchecker_source/github.py b/nvchecker_source/github.py index 9647d7a..588cee8 100644 --- a/nvchecker_source/github.py +++ b/nvchecker_source/github.py @@ -96,9 +96,6 @@ async def get_commit_count(url: str, headers: dict) -> int: headers=headers ) - if response.status_code != 200: - raise HTTPError(response.status_code, response) - commit_count = 1 if 'Link' in response.headers: link_header = response.headers['Link'] From 498262c867000d4d78a3726821c867921daf5523 Mon Sep 17 00:00:00 2001 From: involution Date: Tue, 19 Nov 2024 05:15:16 -0500 Subject: [PATCH 03/40] ii --- nvchecker_source/github.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nvchecker_source/github.py b/nvchecker_source/github.py index 588cee8..8bbed31 100644 --- a/nvchecker_source/github.py +++ b/nvchecker_source/github.py @@ -2,7 +2,7 @@ # Copyright (c) 2013-2020, 2024 lilydjwg , et al. import time -from urllib.parse import urlencode +from urllib.parse import urlencode, parse_qs from typing import List, Tuple, Union, Optional import asyncio From d2408d8fd38e678df66045e83c8240728f3de881 Mon Sep 17 00:00:00 2001 From: involution Date: Tue, 19 Nov 2024 05:16:19 -0500 Subject: [PATCH 04/40] iii --- nvchecker_source/github.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nvchecker_source/github.py b/nvchecker_source/github.py index 8bbed31..bb8fc5a 100644 --- a/nvchecker_source/github.py +++ b/nvchecker_source/github.py @@ -2,7 +2,7 @@ # Copyright (c) 2013-2020, 2024 lilydjwg , et al. import time -from urllib.parse import urlencode, parse_qs +from urllib.parse import urlencode, parse_qs, urlparse from typing import List, Tuple, Union, Optional import asyncio From 8278d91806a58b83f7c169cc89968ff1a6af6599 Mon Sep 17 00:00:00 2001 From: involution Date: Tue, 19 Nov 2024 05:20:22 -0500 Subject: [PATCH 05/40] ii --- nvchecker_source/github.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nvchecker_source/github.py b/nvchecker_source/github.py index bb8fc5a..6dedb27 100644 --- a/nvchecker_source/github.py +++ b/nvchecker_source/github.py @@ -267,6 +267,7 @@ async def get_version_real( # Only add commit info if configured if conf.get('use_commit_info', False): commit_count = await get_commit_count(url, headers) + version = data[0]['commit']['committer']['date'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.') version = f"{version}.r{commit_count}.g{data[0]['sha'][:9]}" return RichResult( From db5e5b7e706ef382b9594678e3768926c9eb4283 Mon Sep 17 00:00:00 2001 From: involution Date: Tue, 19 Nov 2024 05:22:59 -0500 Subject: [PATCH 06/40] a --- nvchecker_source/github.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nvchecker_source/github.py b/nvchecker_source/github.py index 6dedb27..e5f2172 100644 --- a/nvchecker_source/github.py +++ b/nvchecker_source/github.py @@ -269,6 +269,7 @@ async def get_version_real( commit_count = await get_commit_count(url, headers) version = data[0]['commit']['committer']['date'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.') version = f"{version}.r{commit_count}.g{data[0]['sha'][:9]}" + print(f"{version}") return RichResult( # YYYYMMDD.HHMMSS From cc0737beef7e0803a1d61d8ef94d683a8fdcbff1 Mon Sep 17 00:00:00 2001 From: involution Date: Tue, 19 Nov 2024 05:26:27 -0500 Subject: [PATCH 07/40] aaa --- nvchecker_source/github.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nvchecker_source/github.py b/nvchecker_source/github.py index e5f2172..266f97b 100644 --- a/nvchecker_source/github.py +++ b/nvchecker_source/github.py @@ -268,12 +268,12 @@ async def get_version_real( if conf.get('use_commit_info', False): commit_count = await get_commit_count(url, headers) version = data[0]['commit']['committer']['date'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.') - version = f"{version}.r{commit_count}.g{data[0]['sha'][:9]}" print(f"{version}") return RichResult( # YYYYMMDD.HHMMSS version = data[0]['commit']['committer']['date'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.'), + version = f"{version}.r{commit_count}.g{data[0]['sha'][:9]}", revision = data[0]['sha'], url = data[0]['html_url'], ) From 267c00fb5efd56b720ebf742f9439220c6effea2 Mon Sep 17 00:00:00 2001 From: involution Date: Tue, 19 Nov 2024 05:27:55 -0500 Subject: [PATCH 08/40] aaa --- nvchecker_source/github.py | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/nvchecker_source/github.py b/nvchecker_source/github.py index 266f97b..a310e50 100644 --- a/nvchecker_source/github.py +++ b/nvchecker_source/github.py @@ -264,16 +264,16 @@ async def get_version_real( else: + version = data[0]['commit']['committer']['date'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.') + # Only add commit info if configured if conf.get('use_commit_info', False): commit_count = await get_commit_count(url, headers) - version = data[0]['commit']['committer']['date'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.') - print(f"{version}") + version = f"{version}.r{commit_count}.g{data[0]['sha'][:9]}" return RichResult( # YYYYMMDD.HHMMSS - version = data[0]['commit']['committer']['date'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.'), - version = f"{version}.r{commit_count}.g{data[0]['sha'][:9]}", + version = version, revision = data[0]['sha'], url = data[0]['html_url'], ) From 3f1f5ce11800850019a13f7079365b97125aaca1 Mon Sep 17 00:00:00 2001 From: involution Date: Tue, 19 Nov 2024 05:35:01 -0500 Subject: [PATCH 09/40] i --- nvchecker_source/github.py | 435 ++++++++++++++++--------------------- 1 file changed, 192 insertions(+), 243 deletions(-) diff --git a/nvchecker_source/github.py b/nvchecker_source/github.py index a310e50..cbeebab 100644 --- a/nvchecker_source/github.py +++ b/nvchecker_source/github.py @@ -19,283 +19,232 @@ RATE_LIMITED_ERROR = False GITHUB_URL = 'https://api.%s/repos/%s/commits' GITHUB_LATEST_RELEASE = 'https://api.%s/repos/%s/releases/latest' -# https://developer.github.com/v3/git/refs/#get-all-references GITHUB_MAX_TAG = 'https://api.%s/repos/%s/git/refs/tags' GITHUB_GRAPHQL_URL = 'https://api.%s/graphql' -async def get_version(name, conf, **kwargs): - global RATE_LIMITED_ERROR, ALLOW_REQUEST - - if RATE_LIMITED_ERROR: - raise RuntimeError('rate limited') - - if ALLOW_REQUEST is None: - ALLOW_REQUEST = asyncio.Event() - ALLOW_REQUEST.set() - - for _ in range(2): # retry once - try: - await ALLOW_REQUEST.wait() - return await get_version_real(name, conf, **kwargs) - except HTTPError as e: - if e.code in [403, 429]: - if n := check_ratelimit(e, name): - ALLOW_REQUEST.clear() - await asyncio.sleep(n+1) - ALLOW_REQUEST.set() - continue - RATE_LIMITED_ERROR = True - raise - -QUERY_LATEST_TAG = ''' -{{ - repository(name: "{name}", owner: "{owner}") {{ - refs(refPrefix: "refs/tags/", first: 1, - query: "{query}", - orderBy: {{field: TAG_COMMIT_DATE, direction: DESC}}) {{ - edges {{ - node {{ - name - target {{ - oid - }} - }} - }} - }} - }} -}} -''' - -QUERY_LATEST_RELEASE_WITH_PRERELEASES = ''' -{{ - repository(name: "{name}", owner: "{owner}") {{ - releases(first: 1, orderBy: {{field: CREATED_AT, direction: DESC}}) {{ - edges {{ - node {{ - name - url - tag {{ - name - }} - tagCommit {{ - oid - }} - }} - }} - }} - }} -}} -''' -async def get_commit_count(url: str, headers: dict) -> int: - """Get the total commit count using pagination.""" - params = {'per_page': '1'} +async def enhance_version_with_commit_info( + result: RichResult, + host: str, + repo: str, + headers: dict, + use_commit_info: bool +) -> RichResult: + """Add commit count and SHA to version if use_commit_info is True.""" + if not use_commit_info: + return result + + url = GITHUB_URL % (host, repo) + commit_count = await get_commit_count(url, headers) - response = await session.get( - url, - params=params, - headers=headers + # Create new version string with commit info + enhanced_version = f"{result.version}.r{commit_count}.g{result.revision[:9]}" + + return RichResult( + version=enhanced_version, + gitref=result.gitref, + revision=result.revision, + url=result.url ) - - commit_count = 1 - if 'Link' in response.headers: - link_header = response.headers['Link'] - for link in link_header.split(', '): - if 'rel="last"' in link: - url = link[link.find("<") + 1:link.find(">")] - query_params = parse_qs(urlparse(url).query) - if 'page' in query_params: - commit_count = int(query_params['page'][0]) - break - - return commit_count async def get_latest_tag(key: Tuple[str, str, str, str]) -> RichResult: - host, repo, query, token = key - owner, reponame = repo.split('/') - headers = { - 'Authorization': f'bearer {token}', - 'Content-Type': 'application/json', - } - q = QUERY_LATEST_TAG.format( - owner = owner, - name = reponame, - query = query, - ) + host, repo, query, token = key + owner, reponame = repo.split('/') + headers = { + 'Authorization': f'bearer {token}', + 'Content-Type': 'application/json', + } + q = QUERY_LATEST_TAG.format( + owner=owner, + name=reponame, + query=query, + ) - res = await session.post( - GITHUB_GRAPHQL_URL % host, - headers = headers, - json = {'query': q}, - ) - j = res.json() + res = await session.post( + GITHUB_GRAPHQL_URL % host, + headers=headers, + json={'query': q}, + ) + j = res.json() - refs = j['data']['repository']['refs']['edges'] - if not refs: - raise GetVersionError('no tag found') + refs = j['data']['repository']['refs']['edges'] + if not refs: + raise GetVersionError('no tag found') - version = refs[0]['node']['name'] - revision = refs[0]['node']['target']['oid'] - return RichResult( - version = version, - gitref = f"refs/tags/{version}", - revision = revision, - url = f'https://github.com/{repo}/releases/tag/{version}', - ) + version = refs[0]['node']['name'] + revision = refs[0]['node']['target']['oid'] + return RichResult( + version=version, + gitref=f"refs/tags/{version}", + revision=revision, + url=f'https://github.com/{repo}/releases/tag/{version}', + ) async def get_latest_release_with_prereleases(key: Tuple[str, str, str, str]) -> RichResult: - host, repo, token, use_release_name = key - owner, reponame = repo.split('/') - headers = { - 'Authorization': f'bearer {token}', - 'Content-Type': 'application/json', - } - q = QUERY_LATEST_RELEASE_WITH_PRERELEASES.format( - owner = owner, - name = reponame, - ) + host, repo, token, use_release_name = key + owner, reponame = repo.split('/') + headers = { + 'Authorization': f'bearer {token}', + 'Content-Type': 'application/json', + } + q = QUERY_LATEST_RELEASE_WITH_PRERELEASES.format( + owner=owner, + name=reponame, + ) - res = await session.post( - GITHUB_GRAPHQL_URL % host, - headers = headers, - json = {'query': q}, - ) - j = res.json() + res = await session.post( + GITHUB_GRAPHQL_URL % host, + headers=headers, + json={'query': q}, + ) + j = res.json() - refs = j['data']['repository']['releases']['edges'] - if not refs: - raise GetVersionError('no release found') + refs = j['data']['repository']['releases']['edges'] + if not refs: + raise GetVersionError('no release found') - tag_name = refs[0]['node']['tag']['name'] - if use_release_name: - version = refs[0]['node']['name'] - else: - version = tag_name + tag_name = refs[0]['node']['tag']['name'] + if use_release_name: + version = refs[0]['node']['name'] + else: + version = tag_name - return RichResult( - version = version, - gitref = f"refs/tags/{tag_name}", - revision = refs[0]['node']['tagCommit']['oid'], - url = refs[0]['node']['url'], - ) + return RichResult( + version=version, + gitref=f"refs/tags/{tag_name}", + revision=refs[0]['node']['tagCommit']['oid'], + url=refs[0]['node']['url'], + ) async def get_version_real( - name: str, conf: Entry, *, - cache: AsyncCache, keymanager: KeyManager, - **kwargs, + name: str, conf: Entry, *, + cache: AsyncCache, keymanager: KeyManager, + **kwargs, ) -> VersionResult: - repo = conf['github'] - host = conf.get('host', "github.com") + repo = conf['github'] + host = conf.get('host', "github.com") + use_commit_info = conf.get('use_commit_info', False) - # Load token from config - token = conf.get('token') - # Load token from keyman - if token is None: - token = keymanager.get_key(host.lower(), 'github') + # Load token from config + token = conf.get('token') + # Load token from keyman + if token is None: + token = keymanager.get_key(host.lower(), 'github') - use_latest_tag = conf.get('use_latest_tag', False) - if use_latest_tag: - if not token: - raise GetVersionError('token not given but it is required') + headers = { + 'Accept': 'application/vnd.github.quicksilver-preview+json', + } + if token: + headers['Authorization'] = f'token {token}' - query = conf.get('query', '') - return await cache.get((host, repo, query, token), get_latest_tag) # type: ignore + use_latest_tag = conf.get('use_latest_tag', False) + if use_latest_tag: + if not token: + raise GetVersionError('token not given but it is required') - use_latest_release = conf.get('use_latest_release', False) - include_prereleases = conf.get('include_prereleases', False) - use_release_name = conf.get('use_release_name', False) - if use_latest_release and include_prereleases: - if not token: - raise GetVersionError('token not given but it is required') + query = conf.get('query', '') + result = await cache.get((host, repo, query, token), get_latest_tag) + return await enhance_version_with_commit_info(result, host, repo, headers, use_commit_info) - return await cache.get( - (host, repo, token, use_release_name), - get_latest_release_with_prereleases) # type: ignore + use_latest_release = conf.get('use_latest_release', False) + include_prereleases = conf.get('include_prereleases', False) + use_release_name = conf.get('use_release_name', False) + if use_latest_release and include_prereleases: + if not token: + raise GetVersionError('token not given but it is required') - br = conf.get('branch') - path = conf.get('path') - use_max_tag = conf.get('use_max_tag', False) - if use_latest_release: - url = GITHUB_LATEST_RELEASE % (host, repo) - elif use_max_tag: - url = GITHUB_MAX_TAG % (host, repo) - else: - url = GITHUB_URL % (host, repo) - parameters = {} - if br: - parameters['sha'] = br - if path: - parameters['path'] = path - url += '?' + urlencode(parameters) - headers = { - 'Accept': 'application/vnd.github.quicksilver-preview+json', - } - if token: - headers['Authorization'] = f'token {token}' + result = await cache.get( + (host, repo, token, use_release_name), + get_latest_release_with_prereleases) + return await enhance_version_with_commit_info(result, host, repo, headers, use_commit_info) - data = await cache.get_json(url, headers = headers) - - if use_max_tag: - tags: List[Union[str, RichResult]] = [ - RichResult( - version = ref['ref'].split('/', 2)[-1], - gitref = ref['ref'], - revision = ref['object']['sha'], - url = f'https://github.com/{repo}/releases/tag/{ref["ref"].split("/", 2)[-1]}', - ) for ref in data - ] - if not tags: - raise GetVersionError('No tag found in upstream repository.') - return tags - - if use_latest_release: - if 'tag_name' not in data: - raise GetVersionError('No release found in upstream repository.') - - if use_release_name: - version = data['name'] + br = conf.get('branch') + path = conf.get('path') + use_max_tag = conf.get('use_max_tag', False) + if use_latest_release: + url = GITHUB_LATEST_RELEASE % (host, repo) + elif use_max_tag: + url = GITHUB_MAX_TAG % (host, repo) else: - version = data['tag_name'] + url = GITHUB_URL % (host, repo) + parameters = {} + if br: + parameters['sha'] = br + if path: + parameters['path'] = path + url += '?' + urlencode(parameters) - return RichResult( - version = version, - gitref = f"refs/tags/{data['tag_name']}", - url = data['html_url'], - ) + data = await cache.get_json(url, headers=headers) - else: + if use_max_tag: + tags: List[Union[str, RichResult]] = [ + RichResult( + version=ref['ref'].split('/', 2)[-1], + gitref=ref['ref'], + revision=ref['object']['sha'], + url=f'https://github.com/{repo}/releases/tag/{ref["ref"].split("/", 2)[-1]}', + ) for ref in data + ] + if not tags: + raise GetVersionError('No tag found in upstream repository.') + + # Enhance all tags with commit info if enabled + if use_commit_info: + enhanced_tags = [] + for tag in tags: + if isinstance(tag, RichResult): + enhanced_tag = await enhance_version_with_commit_info( + tag, host, repo, headers, use_commit_info + ) + enhanced_tags.append(enhanced_tag) + else: + enhanced_tags.append(tag) + return enhanced_tags + return tags - version = data[0]['commit']['committer']['date'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.') + if use_latest_release: + if 'tag_name' not in data: + raise GetVersionError('No release found in upstream repository.') - # Only add commit info if configured - if conf.get('use_commit_info', False): - commit_count = await get_commit_count(url, headers) - version = f"{version}.r{commit_count}.g{data[0]['sha'][:9]}" + if use_release_name: + version = data['name'] + else: + version = data['tag_name'] - return RichResult( - # YYYYMMDD.HHMMSS - version = version, - revision = data[0]['sha'], - url = data[0]['html_url'], - ) + result = RichResult( + version=version, + gitref=f"refs/tags/{data['tag_name']}", + url=data['html_url'], + ) + return await enhance_version_with_commit_info(result, host, repo, headers, use_commit_info) + + else: + version = data[0]['commit']['committer']['date'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.') + + result = RichResult( + version=version, + revision=data[0]['sha'], + url=data[0]['html_url'], + ) + return await enhance_version_with_commit_info(result, host, repo, headers, use_commit_info) def check_ratelimit(exc: HTTPError, name: str) -> Optional[int]: - res = exc.response - if not res: - raise exc + res = exc.response + if not res: + raise exc - if v := res.headers.get('retry-after'): - n = int(v) - logger.warning('retry-after', n=n) - return n + if v := res.headers.get('retry-after'): + n = int(v) + logger.warning('retry-after', n=n) + return n - # default -1 is used to re-raise the exception - n = int(res.headers.get('X-RateLimit-Remaining', -1)) - if n == 0: - reset = int(res.headers.get('X-RateLimit-Reset')) - logger.error(f'rate limited, resetting at {time.ctime(reset)}. ' - 'Or get an API token to increase the allowance if not yet', - name = name, - reset = reset) - return None + # default -1 is used to re-raise the exception + n = int(res.headers.get('X-RateLimit-Remaining', -1)) + if n == 0: + reset = int(res.headers.get('X-RateLimit-Reset')) + logger.error(f'rate limited, resetting at {time.ctime(reset)}. ' + 'Or get an API token to increase the allowance if not yet', + name=name, + reset=reset) + return None - raise exc + raise exc \ No newline at end of file From d9ee49c6b1bcaaacaa687edf6f7caaf00f9359dd Mon Sep 17 00:00:00 2001 From: involution Date: Tue, 19 Nov 2024 05:41:10 -0500 Subject: [PATCH 10/40] ss --- nvchecker_source/github.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/nvchecker_source/github.py b/nvchecker_source/github.py index cbeebab..e9c88f6 100644 --- a/nvchecker_source/github.py +++ b/nvchecker_source/github.py @@ -22,7 +22,7 @@ GITHUB_LATEST_RELEASE = 'https://api.%s/repos/%s/releases/latest' GITHUB_MAX_TAG = 'https://api.%s/repos/%s/git/refs/tags' GITHUB_GRAPHQL_URL = 'https://api.%s/graphql' -async def enhance_version_with_commit_info( +async def get_version( result: RichResult, host: str, repo: str, @@ -143,7 +143,7 @@ async def get_version_real( query = conf.get('query', '') result = await cache.get((host, repo, query, token), get_latest_tag) - return await enhance_version_with_commit_info(result, host, repo, headers, use_commit_info) + return await get_version(result, host, repo, headers, use_commit_info) use_latest_release = conf.get('use_latest_release', False) include_prereleases = conf.get('include_prereleases', False) @@ -155,7 +155,7 @@ async def get_version_real( result = await cache.get( (host, repo, token, use_release_name), get_latest_release_with_prereleases) - return await enhance_version_with_commit_info(result, host, repo, headers, use_commit_info) + return await get_version(result, host, repo, headers, use_commit_info) br = conf.get('branch') path = conf.get('path') @@ -192,7 +192,7 @@ async def get_version_real( enhanced_tags = [] for tag in tags: if isinstance(tag, RichResult): - enhanced_tag = await enhance_version_with_commit_info( + enhanced_tag = await get_version( tag, host, repo, headers, use_commit_info ) enhanced_tags.append(enhanced_tag) @@ -215,7 +215,7 @@ async def get_version_real( gitref=f"refs/tags/{data['tag_name']}", url=data['html_url'], ) - return await enhance_version_with_commit_info(result, host, repo, headers, use_commit_info) + return await get_version(result, host, repo, headers, use_commit_info) else: version = data[0]['commit']['committer']['date'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.') @@ -225,7 +225,7 @@ async def get_version_real( revision=data[0]['sha'], url=data[0]['html_url'], ) - return await enhance_version_with_commit_info(result, host, repo, headers, use_commit_info) + return await get_version(result, host, repo, headers, use_commit_info) def check_ratelimit(exc: HTTPError, name: str) -> Optional[int]: res = exc.response From 80f2e2f2c9a27c64d8de960dfbf2e0d120ac49f4 Mon Sep 17 00:00:00 2001 From: involution Date: Tue, 19 Nov 2024 05:44:26 -0500 Subject: [PATCH 11/40] aa --- nvchecker_source/github.py | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/nvchecker_source/github.py b/nvchecker_source/github.py index e9c88f6..cbeebab 100644 --- a/nvchecker_source/github.py +++ b/nvchecker_source/github.py @@ -22,7 +22,7 @@ GITHUB_LATEST_RELEASE = 'https://api.%s/repos/%s/releases/latest' GITHUB_MAX_TAG = 'https://api.%s/repos/%s/git/refs/tags' GITHUB_GRAPHQL_URL = 'https://api.%s/graphql' -async def get_version( +async def enhance_version_with_commit_info( result: RichResult, host: str, repo: str, @@ -143,7 +143,7 @@ async def get_version_real( query = conf.get('query', '') result = await cache.get((host, repo, query, token), get_latest_tag) - return await get_version(result, host, repo, headers, use_commit_info) + return await enhance_version_with_commit_info(result, host, repo, headers, use_commit_info) use_latest_release = conf.get('use_latest_release', False) include_prereleases = conf.get('include_prereleases', False) @@ -155,7 +155,7 @@ async def get_version_real( result = await cache.get( (host, repo, token, use_release_name), get_latest_release_with_prereleases) - return await get_version(result, host, repo, headers, use_commit_info) + return await enhance_version_with_commit_info(result, host, repo, headers, use_commit_info) br = conf.get('branch') path = conf.get('path') @@ -192,7 +192,7 @@ async def get_version_real( enhanced_tags = [] for tag in tags: if isinstance(tag, RichResult): - enhanced_tag = await get_version( + enhanced_tag = await enhance_version_with_commit_info( tag, host, repo, headers, use_commit_info ) enhanced_tags.append(enhanced_tag) @@ -215,7 +215,7 @@ async def get_version_real( gitref=f"refs/tags/{data['tag_name']}", url=data['html_url'], ) - return await get_version(result, host, repo, headers, use_commit_info) + return await enhance_version_with_commit_info(result, host, repo, headers, use_commit_info) else: version = data[0]['commit']['committer']['date'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.') @@ -225,7 +225,7 @@ async def get_version_real( revision=data[0]['sha'], url=data[0]['html_url'], ) - return await get_version(result, host, repo, headers, use_commit_info) + return await enhance_version_with_commit_info(result, host, repo, headers, use_commit_info) def check_ratelimit(exc: HTTPError, name: str) -> Optional[int]: res = exc.response From 3f6059fa48334612314aa6471cd3734839be99dc Mon Sep 17 00:00:00 2001 From: involution Date: Tue, 19 Nov 2024 05:47:51 -0500 Subject: [PATCH 12/40] aa --- nvchecker_source/github.py | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) diff --git a/nvchecker_source/github.py b/nvchecker_source/github.py index cbeebab..47bc185 100644 --- a/nvchecker_source/github.py +++ b/nvchecker_source/github.py @@ -22,6 +22,30 @@ GITHUB_LATEST_RELEASE = 'https://api.%s/repos/%s/releases/latest' GITHUB_MAX_TAG = 'https://api.%s/repos/%s/git/refs/tags' GITHUB_GRAPHQL_URL = 'https://api.%s/graphql' +async def get_version(name, conf, **kwargs): + global RATE_LIMITED_ERROR, ALLOW_REQUEST + + if RATE_LIMITED_ERROR: + raise RuntimeError('rate limited') + + if ALLOW_REQUEST is None: + ALLOW_REQUEST = asyncio.Event() + ALLOW_REQUEST.set() + + for _ in range(2): # retry once + try: + await ALLOW_REQUEST.wait() + return await get_version_real(name, conf, **kwargs) + except HTTPError as e: + if e.code in [403, 429]: + if n := check_ratelimit(e, name): + ALLOW_REQUEST.clear() + await asyncio.sleep(n+1) + ALLOW_REQUEST.set() + continue + RATE_LIMITED_ERROR = True + raise + async def enhance_version_with_commit_info( result: RichResult, host: str, From 6eb98fa1767e728a2e95a4e24b35e3483d813abb Mon Sep 17 00:00:00 2001 From: involution Date: Tue, 19 Nov 2024 05:50:22 -0500 Subject: [PATCH 13/40] aa --- nvchecker_source/github.py | 23 +++++++++++++++++++++++ 1 file changed, 23 insertions(+) diff --git a/nvchecker_source/github.py b/nvchecker_source/github.py index 47bc185..7f97dcd 100644 --- a/nvchecker_source/github.py +++ b/nvchecker_source/github.py @@ -22,6 +22,29 @@ GITHUB_LATEST_RELEASE = 'https://api.%s/repos/%s/releases/latest' GITHUB_MAX_TAG = 'https://api.%s/repos/%s/git/refs/tags' GITHUB_GRAPHQL_URL = 'https://api.%s/graphql' +async def get_commit_count(url: str, headers: dict) -> int: + """Get the total commit count using pagination.""" + params = {'per_page': '1'} + + response = await session.get( + url, + params=params, + headers=headers + ) + + commit_count = 1 + if 'Link' in response.headers: + link_header = response.headers['Link'] + for link in link_header.split(', '): + if 'rel="last"' in link: + url = link[link.find("<") + 1:link.find(">")] + query_params = parse_qs(urlparse(url).query) + if 'page' in query_params: + commit_count = int(query_params['page'][0]) + break + + return commit_count + async def get_version(name, conf, **kwargs): global RATE_LIMITED_ERROR, ALLOW_REQUEST From eaff21c5d60d79ee7f0df30f48d6918ef36c6762 Mon Sep 17 00:00:00 2001 From: involution Date: Tue, 19 Nov 2024 05:55:31 -0500 Subject: [PATCH 14/40] aa --- nvchecker_source/github.py | 85 ++++++-------------------------------- 1 file changed, 13 insertions(+), 72 deletions(-) diff --git a/nvchecker_source/github.py b/nvchecker_source/github.py index 7f97dcd..11efae7 100644 --- a/nvchecker_source/github.py +++ b/nvchecker_source/github.py @@ -93,75 +93,6 @@ async def enhance_version_with_commit_info( url=result.url ) -async def get_latest_tag(key: Tuple[str, str, str, str]) -> RichResult: - host, repo, query, token = key - owner, reponame = repo.split('/') - headers = { - 'Authorization': f'bearer {token}', - 'Content-Type': 'application/json', - } - q = QUERY_LATEST_TAG.format( - owner=owner, - name=reponame, - query=query, - ) - - res = await session.post( - GITHUB_GRAPHQL_URL % host, - headers=headers, - json={'query': q}, - ) - j = res.json() - - refs = j['data']['repository']['refs']['edges'] - if not refs: - raise GetVersionError('no tag found') - - version = refs[0]['node']['name'] - revision = refs[0]['node']['target']['oid'] - return RichResult( - version=version, - gitref=f"refs/tags/{version}", - revision=revision, - url=f'https://github.com/{repo}/releases/tag/{version}', - ) - -async def get_latest_release_with_prereleases(key: Tuple[str, str, str, str]) -> RichResult: - host, repo, token, use_release_name = key - owner, reponame = repo.split('/') - headers = { - 'Authorization': f'bearer {token}', - 'Content-Type': 'application/json', - } - q = QUERY_LATEST_RELEASE_WITH_PRERELEASES.format( - owner=owner, - name=reponame, - ) - - res = await session.post( - GITHUB_GRAPHQL_URL % host, - headers=headers, - json={'query': q}, - ) - j = res.json() - - refs = j['data']['repository']['releases']['edges'] - if not refs: - raise GetVersionError('no release found') - - tag_name = refs[0]['node']['tag']['name'] - if use_release_name: - version = refs[0]['node']['name'] - else: - version = tag_name - - return RichResult( - version=version, - gitref=f"refs/tags/{tag_name}", - revision=refs[0]['node']['tagCommit']['oid'], - url=refs[0]['node']['url'], - ) - async def get_version_real( name: str, conf: Entry, *, cache: AsyncCache, keymanager: KeyManager, @@ -171,17 +102,22 @@ async def get_version_real( host = conf.get('host', "github.com") use_commit_info = conf.get('use_commit_info', False) - # Load token from config + # Load token from config or keymanager token = conf.get('token') - # Load token from keyman if token is None: token = keymanager.get_key(host.lower(), 'github') + # Set up headers with proper authentication headers = { 'Accept': 'application/vnd.github.quicksilver-preview+json', } + + # Now ensure we always add Authorization header if we have a token if token: - headers['Authorization'] = f'token {token}' + if token.startswith('github_pat_'): # Personal Access Token (Fine-grained) + headers['Authorization'] = f'Bearer {token}' + else: + headers['Authorization'] = f'token {token}' use_latest_tag = conf.get('use_latest_tag', False) if use_latest_tag: @@ -207,6 +143,11 @@ async def get_version_real( br = conf.get('branch') path = conf.get('path') use_max_tag = conf.get('use_max_tag', False) + + # Check for token requirement early for max_tag + if use_max_tag and not token: + raise GetVersionError('token not given but it is required for max_tag') + if use_latest_release: url = GITHUB_LATEST_RELEASE % (host, repo) elif use_max_tag: From 12d8e3daa768ed9b8f6c78f9d5de4983695387f9 Mon Sep 17 00:00:00 2001 From: involution Date: Tue, 19 Nov 2024 06:03:51 -0500 Subject: [PATCH 15/40] jjj --- nvchecker_source/github.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nvchecker_source/github.py b/nvchecker_source/github.py index 11efae7..d51c53e 100644 --- a/nvchecker_source/github.py +++ b/nvchecker_source/github.py @@ -1,6 +1,6 @@ # MIT licensed # Copyright (c) 2013-2020, 2024 lilydjwg , et al. - +# import time from urllib.parse import urlencode, parse_qs, urlparse from typing import List, Tuple, Union, Optional From e56157145a285e1e2cd8c5524295e17135117967 Mon Sep 17 00:00:00 2001 From: involution Date: Tue, 19 Nov 2024 06:09:04 -0500 Subject: [PATCH 16/40] ss --- nvchecker_source/github.py | 149 ++++++++++++++++++------------------- 1 file changed, 71 insertions(+), 78 deletions(-) diff --git a/nvchecker_source/github.py b/nvchecker_source/github.py index d51c53e..578e5df 100644 --- a/nvchecker_source/github.py +++ b/nvchecker_source/github.py @@ -107,105 +107,89 @@ async def get_version_real( if token is None: token = keymanager.get_key(host.lower(), 'github') - # Set up headers with proper authentication headers = { 'Accept': 'application/vnd.github.quicksilver-preview+json', } - # Now ensure we always add Authorization header if we have a token if token: - if token.startswith('github_pat_'): # Personal Access Token (Fine-grained) + if token.startswith('github_pat_'): headers['Authorization'] = f'Bearer {token}' else: headers['Authorization'] = f'token {token}' use_latest_tag = conf.get('use_latest_tag', False) - if use_latest_tag: - if not token: - raise GetVersionError('token not given but it is required') - - query = conf.get('query', '') - result = await cache.get((host, repo, query, token), get_latest_tag) - return await enhance_version_with_commit_info(result, host, repo, headers, use_commit_info) - use_latest_release = conf.get('use_latest_release', False) include_prereleases = conf.get('include_prereleases', False) - use_release_name = conf.get('use_release_name', False) - if use_latest_release and include_prereleases: - if not token: - raise GetVersionError('token not given but it is required') - - result = await cache.get( - (host, repo, token, use_release_name), - get_latest_release_with_prereleases) - return await enhance_version_with_commit_info(result, host, repo, headers, use_commit_info) - - br = conf.get('branch') - path = conf.get('path') use_max_tag = conf.get('use_max_tag', False) - - # Check for token requirement early for max_tag - if use_max_tag and not token: - raise GetVersionError('token not given but it is required for max_tag') + use_release_name = conf.get('use_release_name', False) - if use_latest_release: - url = GITHUB_LATEST_RELEASE % (host, repo) - elif use_max_tag: - url = GITHUB_MAX_TAG % (host, repo) - else: + # Token requirement checks + if any([use_latest_tag, (use_latest_release and include_prereleases), use_max_tag]) and not token: + raise GetVersionError('token not given but it is required for this operation') + + try: + if use_latest_tag: + query = conf.get('query', '') + result = await cache.get((host, repo, query, token), get_latest_tag) + return await enhance_version_with_commit_info(result, host, repo, headers, use_commit_info) + + if use_latest_release: + url = GITHUB_LATEST_RELEASE % (host, repo) + try: + data = await cache.get_json(url, headers=headers) + if 'tag_name' not in data: + raise GetVersionError('No release found in upstream repository.') + + version = data['name'] if use_release_name else data['tag_name'] + result = RichResult( + version=version, + gitref=f"refs/tags/{data['tag_name']}", + url=data['html_url'], + ) + return await enhance_version_with_commit_info(result, host, repo, headers, use_commit_info) + except HTTPError as e: + if e.code == 404: + raise GetVersionError(f'No releases found for repository {repo}. The repository might not have any releases yet.') + raise + + if use_max_tag: + url = GITHUB_MAX_TAG % (host, repo) + try: + data = await cache.get_json(url, headers=headers) + tags: List[Union[str, RichResult]] = [ + RichResult( + version=ref['ref'].split('/', 2)[-1], + gitref=ref['ref'], + revision=ref['object']['sha'], + url=f'https://github.com/{repo}/releases/tag/{ref["ref"].split("/", 2)[-1]}', + ) for ref in data + ] + if not tags: + raise GetVersionError('No tags found in upstream repository.') + + if use_commit_info: + return [await enhance_version_with_commit_info( + tag, host, repo, headers, use_commit_info + ) for tag in tags if isinstance(tag, RichResult)] + return tags + except HTTPError as e: + if e.code == 404: + raise GetVersionError(f'No tags found for repository {repo}. The repository might not have any tags yet.') + raise + + # Default: use commits + br = conf.get('branch') + path = conf.get('path') url = GITHUB_URL % (host, repo) parameters = {} if br: parameters['sha'] = br if path: parameters['path'] = path - url += '?' + urlencode(parameters) + if parameters: + url += '?' + urlencode(parameters) - data = await cache.get_json(url, headers=headers) - - if use_max_tag: - tags: List[Union[str, RichResult]] = [ - RichResult( - version=ref['ref'].split('/', 2)[-1], - gitref=ref['ref'], - revision=ref['object']['sha'], - url=f'https://github.com/{repo}/releases/tag/{ref["ref"].split("/", 2)[-1]}', - ) for ref in data - ] - if not tags: - raise GetVersionError('No tag found in upstream repository.') - - # Enhance all tags with commit info if enabled - if use_commit_info: - enhanced_tags = [] - for tag in tags: - if isinstance(tag, RichResult): - enhanced_tag = await enhance_version_with_commit_info( - tag, host, repo, headers, use_commit_info - ) - enhanced_tags.append(enhanced_tag) - else: - enhanced_tags.append(tag) - return enhanced_tags - return tags - - if use_latest_release: - if 'tag_name' not in data: - raise GetVersionError('No release found in upstream repository.') - - if use_release_name: - version = data['name'] - else: - version = data['tag_name'] - - result = RichResult( - version=version, - gitref=f"refs/tags/{data['tag_name']}", - url=data['html_url'], - ) - return await enhance_version_with_commit_info(result, host, repo, headers, use_commit_info) - - else: + data = await cache.get_json(url, headers=headers) version = data[0]['commit']['committer']['date'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.') result = RichResult( @@ -215,6 +199,15 @@ async def get_version_real( ) return await enhance_version_with_commit_info(result, host, repo, headers, use_commit_info) + except HTTPError as e: + if e.code == 404: + raise GetVersionError(f'Repository {repo} not found or access denied.') + elif e.code in [403, 429]: + if n := check_ratelimit(e, name): + raise GetVersionError(f'Rate limited. Try again in {n} seconds or use an API token.') + raise GetVersionError('Rate limit exceeded. Please use an API token to increase the allowance.') + raise + def check_ratelimit(exc: HTTPError, name: str) -> Optional[int]: res = exc.response if not res: From a7798cb8c00ec973144c9a7616a7e3cbbd743b6a Mon Sep 17 00:00:00 2001 From: involution Date: Tue, 19 Nov 2024 06:18:01 -0500 Subject: [PATCH 17/40] aa --- nvchecker_source/github.py | 38 ++++++++++++++++++++++++++++++++++---- 1 file changed, 34 insertions(+), 4 deletions(-) diff --git a/nvchecker_source/github.py b/nvchecker_source/github.py index 578e5df..b27c9e8 100644 --- a/nvchecker_source/github.py +++ b/nvchecker_source/github.py @@ -8,6 +8,38 @@ import asyncio import structlog + +def get_github_token(conf: dict, host: str, keymanager: KeyManager) -> Optional[str]: + """ + Get GitHub token with the following priority: + 1. Token from config + 2. Token from keymanager + 3. Token from GITHUB_TOKEN environment variable + + Args: + conf: Configuration dictionary + host: GitHub host (e.g., "github.com") + keymanager: KeyManager instance for managing tokens + + Returns: + str or None: GitHub token if found, None otherwise + """ + # Check config first + token = conf.get('token') + if token is not None: + return token + + # Then check keymanager + try: + token = keymanager.get_key(host.lower(), 'github') + if token: + return token + except Exception: + pass + + # Finally check environment variable + return os.environ.get('GITHUB_TOKEN') + from nvchecker.api import ( VersionResult, Entry, AsyncCache, KeyManager, HTTPError, session, RichResult, GetVersionError, @@ -102,10 +134,8 @@ async def get_version_real( host = conf.get('host', "github.com") use_commit_info = conf.get('use_commit_info', False) - # Load token from config or keymanager - token = conf.get('token') - if token is None: - token = keymanager.get_key(host.lower(), 'github') + # Load token from config, keymanager or env GITHUB_TOKEN + token = get_github_token(conf, host, keymanager) headers = { 'Accept': 'application/vnd.github.quicksilver-preview+json', From eff7c1f968ebc6cb0f27a3d6ce38aad3e8f00f55 Mon Sep 17 00:00:00 2001 From: involution Date: Tue, 19 Nov 2024 06:22:01 -0500 Subject: [PATCH 18/40] s --- nvchecker_source/github.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nvchecker_source/github.py b/nvchecker_source/github.py index b27c9e8..37fcf73 100644 --- a/nvchecker_source/github.py +++ b/nvchecker_source/github.py @@ -5,6 +5,7 @@ import time from urllib.parse import urlencode, parse_qs, urlparse from typing import List, Tuple, Union, Optional import asyncio +from nvchecker.api import KeyManager import structlog From 79214329c94b88f83f5302a6612283c4ef99ce41 Mon Sep 17 00:00:00 2001 From: involution Date: Tue, 19 Nov 2024 06:22:53 -0500 Subject: [PATCH 19/40] ss --- nvchecker_source/github.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nvchecker_source/github.py b/nvchecker_source/github.py index 37fcf73..780287c 100644 --- a/nvchecker_source/github.py +++ b/nvchecker_source/github.py @@ -2,6 +2,7 @@ # Copyright (c) 2013-2020, 2024 lilydjwg , et al. # import time +import os from urllib.parse import urlencode, parse_qs, urlparse from typing import List, Tuple, Union, Optional import asyncio From 62c1e7d8b4fcc9caaf6e442a5f139c802e61c848 Mon Sep 17 00:00:00 2001 From: involution Date: Tue, 19 Nov 2024 06:35:19 -0500 Subject: [PATCH 20/40] iii --- nvchecker_source/github.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/nvchecker_source/github.py b/nvchecker_source/github.py index 780287c..573f323 100644 --- a/nvchecker_source/github.py +++ b/nvchecker_source/github.py @@ -222,10 +222,9 @@ async def get_version_real( url += '?' + urlencode(parameters) data = await cache.get_json(url, headers=headers) - version = data[0]['commit']['committer']['date'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.') result = RichResult( - version=version, + version=data[0]['commit']['committer']['date'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.'), revision=data[0]['sha'], url=data[0]['html_url'], ) From 8c1448193b4c564b5afd28a5066b39f24e14fd02 Mon Sep 17 00:00:00 2001 From: involution Date: Tue, 19 Nov 2024 20:46:16 -0500 Subject: [PATCH 21/40] i --- graphqlquery.txt | 49 ++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100644 graphqlquery.txt diff --git a/graphqlquery.txt b/graphqlquery.txt new file mode 100644 index 0000000..448361f --- /dev/null +++ b/graphqlquery.txt @@ -0,0 +1,49 @@ +query() { + repository(owner: "GNOME", name: "gnome-shell") { + # Default branch commits + defaultBranchRef { + target { + ... on Commit { + history(first: 1) { + totalCount + edges { + node { + oid + committedDate + } + } + } + } + } + } + # All tags + refs(refPrefix: "refs/tags/", first: 1, orderBy: {field: TAG_COMMIT_DATE, direction: DESC}) { + edges { + node { + name + target { + ... on Commit { + oid + } + } + } + } + } + # All releases (filter pre-releases in your application logic) + releases(first: 100) { + totalCount + edges { + node { + name + tagName + isPrerelease + createdAt + } + } + pageInfo { + hasNextPage + endCursor + } + } + } +} \ No newline at end of file From ec556f69571b580c434af19775e8aaa5fde00d99 Mon Sep 17 00:00:00 2001 From: envolution Date: Tue, 19 Nov 2024 22:51:56 -0500 Subject: [PATCH 22/40] ii --- Structuregraphql.py | 55 +++++ graphqlquery.txt | 14 +- nvchecker_source/github-test.py | 274 ++++++++++++++++++++++ nvchecker_source/github.py | 403 ++++++++++++++++---------------- 4 files changed, 546 insertions(+), 200 deletions(-) create mode 100644 Structuregraphql.py create mode 100644 nvchecker_source/github-test.py diff --git a/Structuregraphql.py b/Structuregraphql.py new file mode 100644 index 0000000..0168f4f --- /dev/null +++ b/Structuregraphql.py @@ -0,0 +1,55 @@ +# Extract important variables from the GitHub GraphQL JSON response +data = j["data"] + +# Rate limit information +rate_limit = { + "max_requests": data["rateLimit"]["limit"], # Maximum allowed requests + "remaining_requests": data["rateLimit"]["remaining"], # Remaining requests in the current window + "reset_time": data["rateLimit"]["resetAt"], # Time when the rate limit resets +} + +# Repository information +repository = data["repository"] + +# Default branch commit history +default_branch_commit = { + "total_commits": repository["defaultBranchRef"]["target"]["history"]["totalCount"], # Total number of commits + "latest_commit_oid": repository["defaultBranchRef"]["target"]["history"]["edges"][0]["node"]["oid"], # Latest commit hash (OID) + "latest_commit_date": repository["defaultBranchRef"]["target"]["history"]["edges"][0]["node"]["committedDate"], # Latest commit date +} + +# Tags information +tags = [ + edge["node"]["name"] for edge in repository["refs"]["edges"] +] # List of tag names (if available) + +# Releases information +releases = [ + { + "name": release["node"]["name"], # Release name + "url": release["node"]["url"], # Release URL + "tag": release["node"]["tagName"], # Tag associated with the release + "is_prerelease": release["node"]["isPrerelease"], # Whether this is a pre-release + "is_latest": release["node"]["isLatest"], # Whether this is the latest release + "created_at": release["node"]["createdAt"], # Release creation date + } + for release in repository["releases"]["edges"] +] + +# Pagination info for releases +releases_pagination = { + "has_next_page": repository["releases"]["pageInfo"]["hasNextPage"], # Whether there are more releases + "end_cursor": repository["releases"]["pageInfo"]["endCursor"], # Cursor for the next page of releases +} + +# Organized result as a dictionary +result = { + "rate_limit": rate_limit, + "default_branch_commit": default_branch_commit, + "tags": tags, + "releases": releases, + "releases_pagination": releases_pagination, +} + +# Example of accessing the organized data +print(result) \ No newline at end of file diff --git a/graphqlquery.txt b/graphqlquery.txt index 448361f..e36bba1 100644 --- a/graphqlquery.txt +++ b/graphqlquery.txt @@ -1,5 +1,10 @@ -query() { - repository(owner: "GNOME", name: "gnome-shell") { +query { + rateLimit { + limit + remaining + resetAt + } + repository(owner: "drwetter", name: "testssl.sh") { # Default branch commits defaultBranchRef { target { @@ -24,19 +29,22 @@ query() { target { ... on Commit { oid + url } } } } } # All releases (filter pre-releases in your application logic) - releases(first: 100) { +releases(first: 100, orderBy: { field: CREATED_AT, direction: DESC }) { totalCount edges { node { name + url tagName isPrerelease + isLatest createdAt } } diff --git a/nvchecker_source/github-test.py b/nvchecker_source/github-test.py new file mode 100644 index 0000000..88b3317 --- /dev/null +++ b/nvchecker_source/github-test.py @@ -0,0 +1,274 @@ +import os # Added for environment variable access +import time +from urllib.parse import urlencode +from typing import List, Tuple, Union, Optional +import asyncio +import json # Added for JSON handling + +import structlog + +from nvchecker.api import ( + VersionResult, Entry, AsyncCache, KeyManager, + HTTPError, session, RichResult, GetVersionError, +) + +logger = structlog.get_logger(logger_name=__name__) +ALLOW_REQUEST = None +RATE_LIMITED_ERROR = False + +GITHUB_GRAPHQL_URL = 'https://api.%s/graphql' + +async def get_version(name, conf, **kwargs): + global RATE_LIMITED_ERROR, ALLOW_REQUEST + + if RATE_LIMITED_ERROR: + raise RuntimeError('rate limited') + + if ALLOW_REQUEST is None: + ALLOW_REQUEST = asyncio.Event() + ALLOW_REQUEST.set() + + for _ in range(2): # retry once + try: + await ALLOW_REQUEST.wait() + return await get_version_real(name, conf, **kwargs) + except HTTPError as e: + if e.code in [403, 429]: + if n := check_ratelimit(e, name): + ALLOW_REQUEST.clear() + await asyncio.sleep(n+1) + ALLOW_REQUEST.set() + continue + RATE_LIMITED_ERROR = True + raise + +QUERY_GITHUB = """ +query { + rateLimit { + limit + remaining + resetAt + } + repository(owner: "$name", name: "$owner") { + # Default branch commits + defaultBranchRef { + target { + ... on Commit { + history(first: 1) { + totalCount + edges { + node { + oid + committedDate + } + } + } + } + } + } + # All tags + refs(refPrefix: "refs/tags/", first: 1, orderBy: { + field: TAG_COMMIT_DATE, + direction: DESC}) + { + edges { + node { + name + target { + ... on Commit { + oid + url + } + } + } + } + } + # All releases (filter pre-releases in your application logic) +releases(first: 100, orderBy: { field: CREATED_AT, direction: DESC }) { + totalCount + edges { + node { + name + url + tagName + isPrerelease + isLatest + createdAt + } + } + pageInfo { + hasNextPage + endCursor + } + } + } +} +""" + +async def get_latest_tag(key: Tuple[str, str, str, str]) -> RichResult: + host, repo, query, token = key + owner, reponame = repo.split('/') + headers = { + 'Authorization': f'bearer {token}', + 'Content-Type': 'application/json', + } + + # Make GraphQL query + query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) + async with session.post( + GITHUB_GRAPHQL_URL % host, + headers=headers, + json={'query': query_vars} + ) as res: + j = await res.json() + if 'errors' in j: + raise GetVersionError(f"GitHub API error: {j['errors']}") + + refs = j['data']['repository']['refs']['edges'] + if not refs: + raise GetVersionError('no tag found') + + version = refs[0]['node']['name'] + revision = refs[0]['node']['target']['oid'] + + return RichResult( + version=version, + gitref=f"refs/tags/{version}", + revision=revision, + url=f'https://github.com/{repo}/releases/tag/{version}', + ) + +async def get_latest_release_with_prereleases(key: Tuple[str, str, str, str]) -> RichResult: + host, repo, token, use_release_name = key + owner, reponame = repo.split('/') + headers = { + 'Authorization': f'bearer {token}', + 'Content-Type': 'application/json', + } + + # Make GraphQL query + query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) + async with session.post( + GITHUB_GRAPHQL_URL % host, + headers=headers, + json={'query': query_vars} + ) as res: + j = await res.json() + if 'errors' in j: + raise GetVersionError(f"GitHub API error: {j['errors']}") + + releases = j['data']['repository']['releases']['edges'] + if not releases: + raise GetVersionError('no release found') + + latest_release = releases[0]['node'] + tag_name = latest_release['tagName'] + version = latest_release['name'] if use_release_name else tag_name + + return RichResult( + version=version, + gitref=f"refs/tags/{tag_name}", + revision=latest_release['target']['oid'], + url=latest_release['url'], + ) + +async def get_version_real( + name: str, conf: Entry, *, + cache: AsyncCache, keymanager: KeyManager, + **kwargs, +) -> VersionResult: + repo = conf['github'] + owner, reponame = repo.split('/') + host = conf.get('host', "github.com") + + # Load token from config + token = conf.get('token') + # Load token from keyman + if token is None: + token = keymanager.get_key(host.lower(), 'github') + # Load token from environment + if token is None: + token = os.environ.get('GITHUB_TOKEN') + + use_latest_tag = conf.get('use_latest_tag', False) + if use_latest_tag: + if not token: + raise GetVersionError('token not given but it is required') + + query = conf.get('query', '') + return await cache.get((host, repo, query, token), get_latest_tag) + + headers = { + 'Authorization': f'bearer {token}', + 'Content-Type': 'application/json', + } + + # Make GraphQL query + query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) + async with session.post( + GITHUB_GRAPHQL_URL % host, + headers=headers, + json={'query': query_vars} + ) as res: + j = await res.json() + if 'errors' in j: + raise GetVersionError(f"GitHub API error: {j['errors']}") + + use_max_tag = conf.ger('use_max_tag', False) + if use_max_tag: + refs = j['data']['repository']['refs']['edges'] + tags: List[Union[str, RichResult]] = [ + RichResult( + version=ref['node']['name'], + gitref=f"refs/tags/{ref['node']['name']}", + revision=ref['node']['target']['oid'], + url=f'https://github.com/{repo}/releases/tag/{ref["node"]["name"]}', + ) for ref in refs + ] + if not tags: + raise GetVersionError('No tag found in upstream repository.') + return tags + use_latest_release = conf.ger('use_latest_release', False) + if use_latest_release: + releases = j['data']['repository']['releases']['edges'] + if not releases: + raise GetVersionError('No release found in upstream repository.') + + latest_release = releases[0]['node'] + use_release_name = conf.ger('use_release_name', False) + version = latest_release['name'] if use_release_name else latest_release['tagName'] + + return RichResult( + version=version, + gitref=f"refs/tags/{latest_release['tagName']}", + url=latest_release['url'], + ) + else: + commit = j['data']['repository']['defaultBranchRef']['target']['history']['edges'][0]['node'] + return RichResult( + version=commit['committedDate'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.'), + revision=commit['oid'], + url=f'https://github.com/{repo}/commit/{commit["oid"]}', + ) + +def check_ratelimit(exc: HTTPError, name: str) -> Optional[int]: + res = exc.response + if not res: + raise exc + + if v := res.headers.get('retry-after'): + n = int(v) + logger.warning('retry-after', n=n) + return n + + # default -1 is used to re-raise the exception + n = int(res.headers.get('X-RateLimit-Remaining', -1)) + if n == 0: + reset = int(res.headers.get('X-RateLimit-Reset')) + logger.error(f'rate limited, resetting at {time.ctime(reset)}. ' + 'Or get an API token to increase the allowance if not yet', + name = name, + reset = reset) + return None + + raise exc diff --git a/nvchecker_source/github.py b/nvchecker_source/github.py index 573f323..8585366 100644 --- a/nvchecker_source/github.py +++ b/nvchecker_source/github.py @@ -1,47 +1,13 @@ # MIT licensed # Copyright (c) 2013-2020, 2024 lilydjwg , et al. -# + import time -import os -from urllib.parse import urlencode, parse_qs, urlparse +from urllib.parse import urlencode from typing import List, Tuple, Union, Optional import asyncio -from nvchecker.api import KeyManager import structlog - -def get_github_token(conf: dict, host: str, keymanager: KeyManager) -> Optional[str]: - """ - Get GitHub token with the following priority: - 1. Token from config - 2. Token from keymanager - 3. Token from GITHUB_TOKEN environment variable - - Args: - conf: Configuration dictionary - host: GitHub host (e.g., "github.com") - keymanager: KeyManager instance for managing tokens - - Returns: - str or None: GitHub token if found, None otherwise - """ - # Check config first - token = conf.get('token') - if token is not None: - return token - - # Then check keymanager - try: - token = keymanager.get_key(host.lower(), 'github') - if token: - return token - except Exception: - pass - - # Finally check environment variable - return os.environ.get('GITHUB_TOKEN') - from nvchecker.api import ( VersionResult, Entry, AsyncCache, KeyManager, HTTPError, session, RichResult, GetVersionError, @@ -53,32 +19,10 @@ RATE_LIMITED_ERROR = False GITHUB_URL = 'https://api.%s/repos/%s/commits' GITHUB_LATEST_RELEASE = 'https://api.%s/repos/%s/releases/latest' +# https://developer.github.com/v3/git/refs/#get-all-references GITHUB_MAX_TAG = 'https://api.%s/repos/%s/git/refs/tags' GITHUB_GRAPHQL_URL = 'https://api.%s/graphql' -async def get_commit_count(url: str, headers: dict) -> int: - """Get the total commit count using pagination.""" - params = {'per_page': '1'} - - response = await session.get( - url, - params=params, - headers=headers - ) - - commit_count = 1 - if 'Link' in response.headers: - link_header = response.headers['Link'] - for link in link_header.split(', '): - if 'rel="last"' in link: - url = link[link.find("<") + 1:link.find(">")] - query_params = parse_qs(urlparse(url).query) - if 'page' in query_params: - commit_count = int(query_params['page'][0]) - break - - return commit_count - async def get_version(name, conf, **kwargs): global RATE_LIMITED_ERROR, ALLOW_REQUEST @@ -103,160 +47,225 @@ async def get_version(name, conf, **kwargs): RATE_LIMITED_ERROR = True raise -async def enhance_version_with_commit_info( - result: RichResult, - host: str, - repo: str, - headers: dict, - use_commit_info: bool -) -> RichResult: - """Add commit count and SHA to version if use_commit_info is True.""" - if not use_commit_info: - return result - - url = GITHUB_URL % (host, repo) - commit_count = await get_commit_count(url, headers) - - # Create new version string with commit info - enhanced_version = f"{result.version}.r{commit_count}.g{result.revision[:9]}" - - return RichResult( - version=enhanced_version, - gitref=result.gitref, - revision=result.revision, - url=result.url - ) +QUERY_LATEST_TAG = ''' +{{ + repository(name: "{name}", owner: "{owner}") {{ + refs(refPrefix: "refs/tags/", first: 1, + query: "{query}", + orderBy: {{field: TAG_COMMIT_DATE, direction: DESC}}) {{ + edges {{ + node {{ + name + target {{ + oid + }} + }} + }} + }} + }} +}} +''' + +QUERY_LATEST_RELEASE_WITH_PRERELEASES = ''' +{{ + repository(name: "{name}", owner: "{owner}") {{ + releases(first: 1, orderBy: {{field: CREATED_AT, direction: DESC}}) {{ + edges {{ + node {{ + name + url + tag {{ + name + }} + tagCommit {{ + oid + }} + }} + }} + }} + }} +}} +''' + +async def get_latest_tag(key: Tuple[str, str, str, str]) -> RichResult: + host, repo, query, token = key + owner, reponame = repo.split('/') + headers = { + 'Authorization': f'bearer {token}', + 'Content-Type': 'application/json', + } + q = QUERY_LATEST_TAG.format( + owner = owner, + name = reponame, + query = query, + ) + + res = await session.post( + GITHUB_GRAPHQL_URL % host, + headers = headers, + json = {'query': q}, + ) + j = res.json() + + refs = j['data']['repository']['refs']['edges'] + if not refs: + raise GetVersionError('no tag found') + + version = refs[0]['node']['name'] + revision = refs[0]['node']['target']['oid'] + return RichResult( + version = version, + gitref = f"refs/tags/{version}", + revision = revision, + url = f'https://github.com/{repo}/releases/tag/{version}', + ) + +async def get_latest_release_with_prereleases(key: Tuple[str, str, str, str]) -> RichResult: + host, repo, token, use_release_name = key + owner, reponame = repo.split('/') + headers = { + 'Authorization': f'bearer {token}', + 'Content-Type': 'application/json', + } + q = QUERY_LATEST_RELEASE_WITH_PRERELEASES.format( + owner = owner, + name = reponame, + ) + + res = await session.post( + GITHUB_GRAPHQL_URL % host, + headers = headers, + json = {'query': q}, + ) + j = res.json() + + refs = j['data']['repository']['releases']['edges'] + if not refs: + raise GetVersionError('no release found') + + tag_name = refs[0]['node']['tag']['name'] + if use_release_name: + version = refs[0]['node']['name'] + else: + version = tag_name + + return RichResult( + version = version, + gitref = f"refs/tags/{tag_name}", + revision = refs[0]['node']['tagCommit']['oid'], + url = refs[0]['node']['url'], + ) async def get_version_real( - name: str, conf: Entry, *, - cache: AsyncCache, keymanager: KeyManager, - **kwargs, + name: str, conf: Entry, *, + cache: AsyncCache, keymanager: KeyManager, + **kwargs, ) -> VersionResult: - repo = conf['github'] - host = conf.get('host', "github.com") - use_commit_info = conf.get('use_commit_info', False) + repo = conf['github'] + host = conf.get('host', "github.com") - # Load token from config, keymanager or env GITHUB_TOKEN - token = get_github_token(conf, host, keymanager) + # Load token from config + token = conf.get('token') + # Load token from keyman + if token is None: + token = keymanager.get_key(host.lower(), 'github') - headers = { - 'Accept': 'application/vnd.github.quicksilver-preview+json', - } - - if token: - if token.startswith('github_pat_'): - headers['Authorization'] = f'Bearer {token}' - else: - headers['Authorization'] = f'token {token}' + use_latest_tag = conf.get('use_latest_tag', False) + if use_latest_tag: + if not token: + raise GetVersionError('token not given but it is required') - use_latest_tag = conf.get('use_latest_tag', False) - use_latest_release = conf.get('use_latest_release', False) - include_prereleases = conf.get('include_prereleases', False) - use_max_tag = conf.get('use_max_tag', False) - use_release_name = conf.get('use_release_name', False) + query = conf.get('query', '') + return await cache.get((host, repo, query, token), get_latest_tag) # type: ignore - # Token requirement checks - if any([use_latest_tag, (use_latest_release and include_prereleases), use_max_tag]) and not token: - raise GetVersionError('token not given but it is required for this operation') + use_latest_release = conf.get('use_latest_release', False) + include_prereleases = conf.get('include_prereleases', False) + use_release_name = conf.get('use_release_name', False) + if use_latest_release and include_prereleases: + if not token: + raise GetVersionError('token not given but it is required') - try: - if use_latest_tag: - query = conf.get('query', '') - result = await cache.get((host, repo, query, token), get_latest_tag) - return await enhance_version_with_commit_info(result, host, repo, headers, use_commit_info) + return await cache.get( + (host, repo, token, use_release_name), + get_latest_release_with_prereleases) # type: ignore - if use_latest_release: - url = GITHUB_LATEST_RELEASE % (host, repo) - try: - data = await cache.get_json(url, headers=headers) - if 'tag_name' not in data: - raise GetVersionError('No release found in upstream repository.') + br = conf.get('branch') + path = conf.get('path') + use_max_tag = conf.get('use_max_tag', False) + if use_latest_release: + url = GITHUB_LATEST_RELEASE % (host, repo) + elif use_max_tag: + url = GITHUB_MAX_TAG % (host, repo) + else: + url = GITHUB_URL % (host, repo) + parameters = {} + if br: + parameters['sha'] = br + if path: + parameters['path'] = path + url += '?' + urlencode(parameters) + headers = { + 'Accept': 'application/vnd.github.quicksilver-preview+json', + } + if token: + headers['Authorization'] = f'token {token}' - version = data['name'] if use_release_name else data['tag_name'] - result = RichResult( - version=version, - gitref=f"refs/tags/{data['tag_name']}", - url=data['html_url'], - ) - return await enhance_version_with_commit_info(result, host, repo, headers, use_commit_info) - except HTTPError as e: - if e.code == 404: - raise GetVersionError(f'No releases found for repository {repo}. The repository might not have any releases yet.') - raise + data = await cache.get_json(url, headers = headers) - if use_max_tag: - url = GITHUB_MAX_TAG % (host, repo) - try: - data = await cache.get_json(url, headers=headers) - tags: List[Union[str, RichResult]] = [ - RichResult( - version=ref['ref'].split('/', 2)[-1], - gitref=ref['ref'], - revision=ref['object']['sha'], - url=f'https://github.com/{repo}/releases/tag/{ref["ref"].split("/", 2)[-1]}', - ) for ref in data - ] - if not tags: - raise GetVersionError('No tags found in upstream repository.') - - if use_commit_info: - return [await enhance_version_with_commit_info( - tag, host, repo, headers, use_commit_info - ) for tag in tags if isinstance(tag, RichResult)] - return tags - except HTTPError as e: - if e.code == 404: - raise GetVersionError(f'No tags found for repository {repo}. The repository might not have any tags yet.') - raise + if use_max_tag: + tags: List[Union[str, RichResult]] = [ + RichResult( + version = ref['ref'].split('/', 2)[-1], + gitref = ref['ref'], + revision = ref['object']['sha'], + url = f'https://github.com/{repo}/releases/tag/{ref["ref"].split("/", 2)[-1]}', + ) for ref in data + ] + if not tags: + raise GetVersionError('No tag found in upstream repository.') + return tags - # Default: use commits - br = conf.get('branch') - path = conf.get('path') - url = GITHUB_URL % (host, repo) - parameters = {} - if br: - parameters['sha'] = br - if path: - parameters['path'] = path - if parameters: - url += '?' + urlencode(parameters) + if use_latest_release: + if 'tag_name' not in data: + raise GetVersionError('No release found in upstream repository.') - data = await cache.get_json(url, headers=headers) - - result = RichResult( - version=data[0]['commit']['committer']['date'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.'), - revision=data[0]['sha'], - url=data[0]['html_url'], - ) - return await enhance_version_with_commit_info(result, host, repo, headers, use_commit_info) + if use_release_name: + version = data['name'] + else: + version = data['tag_name'] - except HTTPError as e: - if e.code == 404: - raise GetVersionError(f'Repository {repo} not found or access denied.') - elif e.code in [403, 429]: - if n := check_ratelimit(e, name): - raise GetVersionError(f'Rate limited. Try again in {n} seconds or use an API token.') - raise GetVersionError('Rate limit exceeded. Please use an API token to increase the allowance.') - raise + return RichResult( + version = version, + gitref = f"refs/tags/{data['tag_name']}", + url = data['html_url'], + ) + + else: + return RichResult( + # YYYYMMDD.HHMMSS + version = data[0]['commit']['committer']['date'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.'), + revision = data[0]['sha'], + url = data[0]['html_url'], + ) def check_ratelimit(exc: HTTPError, name: str) -> Optional[int]: - res = exc.response - if not res: - raise exc + res = exc.response + if not res: + raise exc - if v := res.headers.get('retry-after'): - n = int(v) - logger.warning('retry-after', n=n) - return n + if v := res.headers.get('retry-after'): + n = int(v) + logger.warning('retry-after', n=n) + return n - # default -1 is used to re-raise the exception - n = int(res.headers.get('X-RateLimit-Remaining', -1)) - if n == 0: - reset = int(res.headers.get('X-RateLimit-Reset')) - logger.error(f'rate limited, resetting at {time.ctime(reset)}. ' - 'Or get an API token to increase the allowance if not yet', - name=name, - reset=reset) - return None + # default -1 is used to re-raise the exception + n = int(res.headers.get('X-RateLimit-Remaining', -1)) + if n == 0: + reset = int(res.headers.get('X-RateLimit-Reset')) + logger.error(f'rate limited, resetting at {time.ctime(reset)}. ' + 'Or get an API token to increase the allowance if not yet', + name = name, + reset = reset) + return None - raise exc \ No newline at end of file + raise exc From 74fdf9110271d4c7b82f86114579cd9239ec74d1 Mon Sep 17 00:00:00 2001 From: envolution Date: Tue, 19 Nov 2024 23:01:23 -0500 Subject: [PATCH 23/40] aa --- nvchecker_source/github-test.py | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/nvchecker_source/github-test.py b/nvchecker_source/github-test.py index 88b3317..8b138f9 100644 --- a/nvchecker_source/github-test.py +++ b/nvchecker_source/github-test.py @@ -6,7 +6,7 @@ import asyncio import json # Added for JSON handling import structlog - +http_client = None from nvchecker.api import ( VersionResult, Entry, AsyncCache, KeyManager, HTTPError, session, RichResult, GetVersionError, @@ -21,6 +21,14 @@ GITHUB_GRAPHQL_URL = 'https://api.%s/graphql' async def get_version(name, conf, **kwargs): global RATE_LIMITED_ERROR, ALLOW_REQUEST + global http_client + + # Initialize the HTTP client if not already done + if http_client is None: + if asyncio.iscoroutine(session): + http_client = await session + http_client = session + if RATE_LIMITED_ERROR: raise RuntimeError('rate limited') @@ -115,7 +123,7 @@ async def get_latest_tag(key: Tuple[str, str, str, str]) -> RichResult: # Make GraphQL query query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) - async with session.post( + async with http_client.post( GITHUB_GRAPHQL_URL % host, headers=headers, json={'query': query_vars} @@ -148,7 +156,7 @@ async def get_latest_release_with_prereleases(key: Tuple[str, str, str, str]) -> # Make GraphQL query query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) - async with session.post( + async with http_client.post( GITHUB_GRAPHQL_URL % host, headers=headers, json={'query': query_vars} @@ -205,7 +213,7 @@ async def get_version_real( # Make GraphQL query query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) - async with session.post( + async with http_client.post( GITHUB_GRAPHQL_URL % host, headers=headers, json={'query': query_vars} From fa4cfefc83a48aad02ae62ed0b22085173e994bc Mon Sep 17 00:00:00 2001 From: envolution Date: Tue, 19 Nov 2024 23:22:56 -0500 Subject: [PATCH 24/40] aa --- nvchecker_source/1.patch | 195 ++++++++++++++++++++++++++++++++ nvchecker_source/github-test.py | 120 +++++++++----------- 2 files changed, 251 insertions(+), 64 deletions(-) create mode 100644 nvchecker_source/1.patch diff --git a/nvchecker_source/1.patch b/nvchecker_source/1.patch new file mode 100644 index 0000000..465bf39 --- /dev/null +++ b/nvchecker_source/1.patch @@ -0,0 +1,195 @@ +--- github-test.py ++++ github-test.py +@@ -13,6 +13,7 @@ from nvchecker.api import ( + HTTPError, session, RichResult, GetVersionError, + ) + ++http_client = None + logger = structlog.get_logger(logger_name=__name__) + ALLOW_REQUEST = None + RATE_LIMITED_ERROR = False +@@ -20,8 +21,48 @@ RATE_LIMITED_ERROR = False + GITHUB_GRAPHQL_URL = 'https://api.%s/graphql' + ++async def execute_github_query(host: str, owner: str, reponame: str, token: str) -> dict: ++ """ ++ Execute GraphQL query against GitHub API and return the response data. ++ Centralizes error handling and query execution. ++ """ ++ global http_client ++ ++ # Initialize the HTTP client if not already done ++ if http_client is None: ++ if asyncio.iscoroutine(session): ++ http_client = await session ++ http_client = session ++ ++ headers = { ++ 'Authorization': f'bearer {token}', ++ 'Content-Type': 'application/json', ++ } ++ ++ query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) ++ ++ async with http_client.post( ++ GITHUB_GRAPHQL_URL % host, ++ headers=headers, ++ json={'query': query_vars} ++ ) as res: ++ j = await res.json() ++ if 'errors' in j: ++ raise GetVersionError(f"GitHub API error: {j['errors']}") ++ return j['data']['repository'] ++ ++def get_github_token(conf: Entry, host: str, keymanager: KeyManager) -> Optional[str]: ++ """Get GitHub token from config, keymanager, or environment.""" ++ token = conf.get('token') ++ if token is None: ++ token = keymanager.get_key(host.lower(), 'github') ++ if token is None: ++ token = os.environ.get('GITHUB_TOKEN') ++ return token ++ + async def get_version(name, conf, **kwargs): +- global RATE_LIMITED_ERROR, ALLOW_REQUEST ++ global RATE_LIMITED_ERROR, ALLOW_REQUEST ++ if RATE_LIMITED_ERROR: ++ raise RuntimeError('rate limited') + + if ALLOW_REQUEST is None: + ALLOW_REQUEST = asyncio.Event() +@@ -91,21 +132,11 @@ query { + + async def get_latest_tag(key: Tuple[str, str, str, str]) -> RichResult: + host, repo, query, token = key + owner, reponame = repo.split('/') +- headers = { +- 'Authorization': f'bearer {token}', +- 'Content-Type': 'application/json', +- } + +- # Make GraphQL query +- query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) +- async with session.post( +- GITHUB_GRAPHQL_URL % host, +- headers=headers, +- json={'query': query_vars} +- ) as res: +- j = await res.json() +- if 'errors' in j: +- raise GetVersionError(f"GitHub API error: {j['errors']}") ++ if not token: ++ raise GetVersionError('token is required for latest tag query') ++ ++ repo_data = await execute_github_query(host, owner, reponame, token) + +- refs = j['data']['repository']['refs']['edges'] ++ refs = repo_data['refs']['edges'] + if not refs: + raise GetVersionError('no tag found') +@@ -120,21 +151,11 @@ async def get_latest_tag(key: Tuple[str, str, str, str]) -> RichResult: + + async def get_latest_release_with_prereleases(key: Tuple[str, str, str, str]) -> RichResult: + host, repo, token, use_release_name = key + owner, reponame = repo.split('/') +- headers = { +- 'Authorization': f'bearer {token}', +- 'Content-Type': 'application/json', +- } + +- # Make GraphQL query +- query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) +- async with session.post( +- GITHUB_GRAPHQL_URL % host, +- headers=headers, +- json={'query': query_vars} +- ) as res: +- j = await res.json() +- if 'errors' in j: +- raise GetVersionError(f"GitHub API error: {j['errors']}") ++ if not token: ++ raise GetVersionError('token is required for latest release query') ++ ++ repo_data = await execute_github_query(host, owner, reponame, token) + +- releases = j['data']['repository']['releases']['edges'] ++ releases = repo_data['releases']['edges'] + if not releases: + raise GetVersionError('no release found') +@@ -199,30 +220,17 @@ async def get_version_real( + repo = conf['github'] + owner, reponame = repo.split('/') + host = conf.get('host', "github.com") ++ token = get_github_token(conf, host, keymanager) + +- # Load token from config +- token = conf.get('token') +- # Load token from keyman +- if token is None: +- token = keymanager.get_key(host.lower(), 'github') +- # Load token from environment +- if token is None: +- token = os.environ.get('GITHUB_TOKEN') +- + use_latest_tag = conf.get('use_latest_tag', False) + if use_latest_tag: + if not token: + raise GetVersionError('token not given but it is required') +- + query = conf.get('query', '') + return await cache.get((host, repo, query, token), get_latest_tag) + +- headers = { +- 'Authorization': f'bearer {token}', +- 'Content-Type': 'application/json', +- } ++ repo_data = await execute_github_query(host, owner, reponame, token) + +- # Make GraphQL query +- query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) +- async with session.post( +- GITHUB_GRAPHQL_URL % host, +- headers=headers, +- json={'query': query_vars} +- ) as res: +- j = await res.json() +- if 'errors' in j: +- raise GetVersionError(f"GitHub API error: {j['errors']}") +- +- use_max_tag = conf.ger('use_max_tag', False) ++ use_max_tag = conf.get('use_max_tag', False) + if use_max_tag: +- refs = j['data']['repository']['refs']['edges'] ++ refs = repo_data['refs']['edges'] + tags: List[Union[str, RichResult]] = [ + RichResult( + version=ref['node']['name'], +@@ -233,10 +241,10 @@ async def get_version_real( + if not tags: + raise GetVersionError('No tag found in upstream repository.') + return tags +- use_latest_release = conf.ger('use_latest_release', False) ++ use_latest_release = conf.get('use_latest_release', False) + if use_latest_release: +- releases = j['data']['repository']['releases']['edges'] +- ++ releases = repo_data['releases']['edges'] ++ + if not releases: + raise GetVersionError('No release found in upstream repository.') + + latest_release = releases[0]['node'] +- use_release_name = conf.ger('use_release_name', False) ++ use_release_name = conf.get('use_release_name', False) + version = latest_release['name'] if use_release_name else latest_release['tagName'] + + return RichResult( +@@ -245,7 +253,7 @@ async def get_version_real( + url=latest_release['url'], + ) + else: +- commit = j['data']['repository']['defaultBranchRef']['target']['history']['edges'][0]['node'] ++ commit = repo_data['defaultBranchRef']['target']['history']['edges'][0]['node'] + return RichResult( + version=commit['committedDate'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.'), + revision=commit['oid'], diff --git a/nvchecker_source/github-test.py b/nvchecker_source/github-test.py index 8b138f9..320ad49 100644 --- a/nvchecker_source/github-test.py +++ b/nvchecker_source/github-test.py @@ -12,12 +12,52 @@ from nvchecker.api import ( HTTPError, session, RichResult, GetVersionError, ) +http_client = None logger = structlog.get_logger(logger_name=__name__) ALLOW_REQUEST = None RATE_LIMITED_ERROR = False GITHUB_GRAPHQL_URL = 'https://api.%s/graphql' +async def execute_github_query(host: str, owner: str, reponame: str, token: str) -> dict: + """ + Execute GraphQL query against GitHub API and return the response data. + Centralizes error handling and query execution. + """ + global http_client + + # Initialize the HTTP client if not already done + if http_client is None: + if asyncio.iscoroutine(session): + http_client = await session + http_client = session + + headers = { + 'Authorization': f'bearer {token}', + 'Content-Type': 'application/json', + } + + query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) + + async with http_client.post( + GITHUB_GRAPHQL_URL % host, + headers=headers, + json={'query': query_vars} + ) as res: + j = await res.json() + if 'errors' in j: + raise GetVersionError(f"GitHub API error: {j['errors']}") + return j['data']['repository'] + +def get_github_token(conf: Entry, host: str, keymanager: KeyManager) -> Optional[str]: + """Get GitHub token from config, keymanager, or environment.""" + token = conf.get('token') + if token is None: + token = keymanager.get_key(host.lower(), 'github') + if token is None: + token = os.environ.get('GITHUB_TOKEN') + return token + async def get_version(name, conf, **kwargs): global RATE_LIMITED_ERROR, ALLOW_REQUEST @@ -116,26 +156,11 @@ releases(first: 100, orderBy: { field: CREATED_AT, direction: DESC }) { async def get_latest_tag(key: Tuple[str, str, str, str]) -> RichResult: host, repo, query, token = key owner, reponame = repo.split('/') - headers = { - 'Authorization': f'bearer {token}', - 'Content-Type': 'application/json', - } - - # Make GraphQL query - query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) - async with http_client.post( - GITHUB_GRAPHQL_URL % host, - headers=headers, - json={'query': query_vars} - ) as res: - j = await res.json() - if 'errors' in j: - raise GetVersionError(f"GitHub API error: {j['errors']}") - - refs = j['data']['repository']['refs']['edges'] - if not refs: - raise GetVersionError('no tag found') + if not token: + raise GetVersionError('token is required for latest tag query') + repo_data = await execute_github_query(host, owner, reponame, token) + refs = repo_data['refs']['edges'] version = refs[0]['node']['name'] revision = refs[0]['node']['target']['oid'] @@ -149,23 +174,12 @@ async def get_latest_tag(key: Tuple[str, str, str, str]) -> RichResult: async def get_latest_release_with_prereleases(key: Tuple[str, str, str, str]) -> RichResult: host, repo, token, use_release_name = key owner, reponame = repo.split('/') - headers = { - 'Authorization': f'bearer {token}', - 'Content-Type': 'application/json', - } + if not token: + raise GetVersionError('token is required for latest release query') - # Make GraphQL query - query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) - async with http_client.post( - GITHUB_GRAPHQL_URL % host, - headers=headers, - json={'query': query_vars} - ) as res: - j = await res.json() - if 'errors' in j: - raise GetVersionError(f"GitHub API error: {j['errors']}") + repo_data = await execute_github_query(host, owner, reponame, token) - releases = j['data']['repository']['releases']['edges'] + releases = repo_data['releases']['edges'] if not releases: raise GetVersionError('no release found') @@ -188,15 +202,7 @@ async def get_version_real( repo = conf['github'] owner, reponame = repo.split('/') host = conf.get('host', "github.com") - - # Load token from config - token = conf.get('token') - # Load token from keyman - if token is None: - token = keymanager.get_key(host.lower(), 'github') - # Load token from environment - if token is None: - token = os.environ.get('GITHUB_TOKEN') + token = get_github_token(conf, host, keymanager) use_latest_tag = conf.get('use_latest_tag', False) if use_latest_tag: @@ -206,25 +212,11 @@ async def get_version_real( query = conf.get('query', '') return await cache.get((host, repo, query, token), get_latest_tag) - headers = { - 'Authorization': f'bearer {token}', - 'Content-Type': 'application/json', - } + repo_data = await execute_github_query(host, owner, reponame, token) - # Make GraphQL query - query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) - async with http_client.post( - GITHUB_GRAPHQL_URL % host, - headers=headers, - json={'query': query_vars} - ) as res: - j = await res.json() - if 'errors' in j: - raise GetVersionError(f"GitHub API error: {j['errors']}") - - use_max_tag = conf.ger('use_max_tag', False) + use_max_tag = conf.get('use_max_tag', False) if use_max_tag: - refs = j['data']['repository']['refs']['edges'] + refs = repo_data['refs']['edges'] tags: List[Union[str, RichResult]] = [ RichResult( version=ref['node']['name'], @@ -236,14 +228,14 @@ async def get_version_real( if not tags: raise GetVersionError('No tag found in upstream repository.') return tags - use_latest_release = conf.ger('use_latest_release', False) + use_latest_release = conf.get('use_latest_release', False) if use_latest_release: - releases = j['data']['repository']['releases']['edges'] + releases = repo_data['releases']['edges'] if not releases: raise GetVersionError('No release found in upstream repository.') latest_release = releases[0]['node'] - use_release_name = conf.ger('use_release_name', False) + use_release_name = conf.get('use_release_name', False) version = latest_release['name'] if use_release_name else latest_release['tagName'] return RichResult( @@ -252,7 +244,7 @@ async def get_version_real( url=latest_release['url'], ) else: - commit = j['data']['repository']['defaultBranchRef']['target']['history']['edges'][0]['node'] + commit = repo_data['defaultBranchRef']['target']['history']['edges'][0]['node'] return RichResult( version=commit['committedDate'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.'), revision=commit['oid'], From 15da543c55d205eb03f00122ac82fef4fde850b2 Mon Sep 17 00:00:00 2001 From: envolution Date: Tue, 19 Nov 2024 23:28:56 -0500 Subject: [PATCH 25/40] aa --- nvchecker_source/github-test.py | 68 ++++++++++++++++----------------- 1 file changed, 32 insertions(+), 36 deletions(-) diff --git a/nvchecker_source/github-test.py b/nvchecker_source/github-test.py index 320ad49..c451aed 100644 --- a/nvchecker_source/github-test.py +++ b/nvchecker_source/github-test.py @@ -12,25 +12,29 @@ from nvchecker.api import ( HTTPError, session, RichResult, GetVersionError, ) -http_client = None logger = structlog.get_logger(logger_name=__name__) ALLOW_REQUEST = None RATE_LIMITED_ERROR = False +http_client = None GITHUB_GRAPHQL_URL = 'https://api.%s/graphql' +async def get_http_client(): + """Initialize and return the HTTP client.""" + global http_client + if http_client is None: + if asyncio.iscoroutine(session): + http_client = await session + else: + http_client = session + return http_client + async def execute_github_query(host: str, owner: str, reponame: str, token: str) -> dict: """ Execute GraphQL query against GitHub API and return the response data. Centralizes error handling and query execution. """ - global http_client - - # Initialize the HTTP client if not already done - if http_client is None: - if asyncio.iscoroutine(session): - http_client = await session - http_client = session + client = await get_http_client() headers = { 'Authorization': f'bearer {token}', @@ -39,7 +43,7 @@ async def execute_github_query(host: str, owner: str, reponame: str, token: str) query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) - async with http_client.post( + async with client.post( GITHUB_GRAPHQL_URL % host, headers=headers, json={'query': query_vars} @@ -59,36 +63,28 @@ def get_github_token(conf: Entry, host: str, keymanager: KeyManager) -> Optional return token async def get_version(name, conf, **kwargs): - global RATE_LIMITED_ERROR, ALLOW_REQUEST + global RATE_LIMITED_ERROR, ALLOW_REQUEST - global http_client - - # Initialize the HTTP client if not already done - if http_client is None: - if asyncio.iscoroutine(session): - http_client = await session - http_client = session + if RATE_LIMITED_ERROR: + raise RuntimeError('rate limited') - if RATE_LIMITED_ERROR: - raise RuntimeError('rate limited') + if ALLOW_REQUEST is None: + ALLOW_REQUEST = asyncio.Event() + ALLOW_REQUEST.set() - if ALLOW_REQUEST is None: - ALLOW_REQUEST = asyncio.Event() - ALLOW_REQUEST.set() - - for _ in range(2): # retry once - try: - await ALLOW_REQUEST.wait() - return await get_version_real(name, conf, **kwargs) - except HTTPError as e: - if e.code in [403, 429]: - if n := check_ratelimit(e, name): - ALLOW_REQUEST.clear() - await asyncio.sleep(n+1) - ALLOW_REQUEST.set() - continue - RATE_LIMITED_ERROR = True - raise + for _ in range(2): # retry once + try: + await ALLOW_REQUEST.wait() + return await get_version_real(name, conf, **kwargs) + except HTTPError as e: + if e.code in [403, 429]: + if n := check_ratelimit(e, name): + ALLOW_REQUEST.clear() + await asyncio.sleep(n+1) + ALLOW_REQUEST.set() + continue + RATE_LIMITED_ERROR = True + raise QUERY_GITHUB = """ query { From 200d7843c532c496e51dd97c3bf4130b3d4eb6c3 Mon Sep 17 00:00:00 2001 From: envolution Date: Tue, 19 Nov 2024 23:34:00 -0500 Subject: [PATCH 26/40] aa --- nvchecker_source/github-test.py | 48 ++++++++++++++++++++++----------- 1 file changed, 33 insertions(+), 15 deletions(-) diff --git a/nvchecker_source/github-test.py b/nvchecker_source/github-test.py index c451aed..fe56adf 100644 --- a/nvchecker_source/github-test.py +++ b/nvchecker_source/github-test.py @@ -21,13 +21,19 @@ GITHUB_GRAPHQL_URL = 'https://api.%s/graphql' async def get_http_client(): """Initialize and return the HTTP client.""" - global http_client - if http_client is None: + global _http_client + if _http_client is None: if asyncio.iscoroutine(session): - http_client = await session + # Properly await the session coroutine + client = await session + # Ensure the client supports async context management + if hasattr(client, '__aenter__'): + _http_client = client + else: + raise RuntimeError("HTTP client must support async context management") else: - http_client = session - return http_client + _http_client = session + return _http_client async def execute_github_query(host: str, owner: str, reponame: str, token: str) -> dict: """ @@ -35,7 +41,7 @@ async def execute_github_query(host: str, owner: str, reponame: str, token: str) Centralizes error handling and query execution. """ client = await get_http_client() - + headers = { 'Authorization': f'bearer {token}', 'Content-Type': 'application/json', @@ -43,15 +49,27 @@ async def execute_github_query(host: str, owner: str, reponame: str, token: str) query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) - async with client.post( - GITHUB_GRAPHQL_URL % host, - headers=headers, - json={'query': query_vars} - ) as res: - j = await res.json() - if 'errors' in j: - raise GetVersionError(f"GitHub API error: {j['errors']}") - return j['data']['repository'] + try: + # Create the request without using async with + response = await client.post( + GITHUB_GRAPHQL_URL % host, + headers=headers, + json={'query': query_vars} + ) + + # Handle the response manually + try: + data = await response.json() + if 'errors' in data: + raise GetVersionError(f"GitHub API error: {data['errors']}") + return data['data']['repository'] + finally: + # Ensure we clean up the response + if hasattr(response, 'close'): + await response.close() + except Exception as e: + logger.error("GitHub API request failed", error=str(e)) + raise def get_github_token(conf: Entry, host: str, keymanager: KeyManager) -> Optional[str]: """Get GitHub token from config, keymanager, or environment.""" From 19eb0e83e1dfaa8c3327bac3859294da132ce525 Mon Sep 17 00:00:00 2001 From: envolution Date: Tue, 19 Nov 2024 23:36:27 -0500 Subject: [PATCH 27/40] aa --- nvchecker_source/github-test.py | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/nvchecker_source/github-test.py b/nvchecker_source/github-test.py index fe56adf..17d204c 100644 --- a/nvchecker_source/github-test.py +++ b/nvchecker_source/github-test.py @@ -6,7 +6,6 @@ import asyncio import json # Added for JSON handling import structlog -http_client = None from nvchecker.api import ( VersionResult, Entry, AsyncCache, KeyManager, HTTPError, session, RichResult, GetVersionError, @@ -15,7 +14,7 @@ from nvchecker.api import ( logger = structlog.get_logger(logger_name=__name__) ALLOW_REQUEST = None RATE_LIMITED_ERROR = False -http_client = None +_http_client = None GITHUB_GRAPHQL_URL = 'https://api.%s/graphql' From 10fe17e712227605eea0c2cc4cea42d24f45a9ed Mon Sep 17 00:00:00 2001 From: envolution Date: Tue, 19 Nov 2024 23:45:35 -0500 Subject: [PATCH 28/40] aa --- nvchecker_source/github-test.py | 27 +++++++++++++-------------- 1 file changed, 13 insertions(+), 14 deletions(-) diff --git a/nvchecker_source/github-test.py b/nvchecker_source/github-test.py index 17d204c..1ea87a7 100644 --- a/nvchecker_source/github-test.py +++ b/nvchecker_source/github-test.py @@ -49,23 +49,22 @@ async def execute_github_query(host: str, owner: str, reponame: str, token: str) query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) try: - # Create the request without using async with - response = await client.post( - GITHUB_GRAPHQL_URL % host, - headers=headers, - json={'query': query_vars} - ) - - # Handle the response manually - try: - data = await response.json() + async with client.post( + GITHUB_GRAPHQL_URL % host, + headers=headers, + json={'query': query_vars} + ) as response: + # Check response status + response.raise_for_status() + + # Parse JSON response + data = await response.json() + + # Check for GraphQL errors if 'errors' in data: raise GetVersionError(f"GitHub API error: {data['errors']}") return data['data']['repository'] - finally: - # Ensure we clean up the response - if hasattr(response, 'close'): - await response.close() + except Exception as e: logger.error("GitHub API request failed", error=str(e)) raise From d3a60cc29a12ef4d13314673b69107f9ba9f743d Mon Sep 17 00:00:00 2001 From: envolution Date: Tue, 19 Nov 2024 23:53:04 -0500 Subject: [PATCH 29/40] aa --- nvchecker_source/github-test.py | 25 +++++++++++++------------ 1 file changed, 13 insertions(+), 12 deletions(-) diff --git a/nvchecker_source/github-test.py b/nvchecker_source/github-test.py index 1ea87a7..f998cf3 100644 --- a/nvchecker_source/github-test.py +++ b/nvchecker_source/github-test.py @@ -21,17 +21,16 @@ GITHUB_GRAPHQL_URL = 'https://api.%s/graphql' async def get_http_client(): """Initialize and return the HTTP client.""" global _http_client - if _http_client is None: - if asyncio.iscoroutine(session): - # Properly await the session coroutine - client = await session - # Ensure the client supports async context management - if hasattr(client, '__aenter__'): - _http_client = client - else: - raise RuntimeError("HTTP client must support async context management") - else: - _http_client = session + if _http_client is not None: + return _http_client + + # Get the client instance, awaiting if necessary + client = await session if asyncio.iscoroutine(session) else session + + if not hasattr(client, '__aenter__'): + raise RuntimeError("HTTP client must support async context management") + + _http_client = client return _http_client async def execute_github_query(host: str, owner: str, reponame: str, token: str) -> dict: @@ -49,7 +48,9 @@ async def execute_github_query(host: str, owner: str, reponame: str, token: str) query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) try: - async with client.post( + # Ensure we have a properly initialized client + http_client = await get_http_client() + async with http_client.post( GITHUB_GRAPHQL_URL % host, headers=headers, json={'query': query_vars} From af18ac688bf6d763937279d3b91fede41d25c06f Mon Sep 17 00:00:00 2001 From: envolution Date: Wed, 20 Nov 2024 00:02:26 -0500 Subject: [PATCH 30/40] aa --- nvchecker_source/github-test.py | 40 ++++++++++++++++++++------------- 1 file changed, 24 insertions(+), 16 deletions(-) diff --git a/nvchecker_source/github-test.py b/nvchecker_source/github-test.py index f998cf3..9360bfd 100644 --- a/nvchecker_source/github-test.py +++ b/nvchecker_source/github-test.py @@ -3,14 +3,14 @@ import time from urllib.parse import urlencode from typing import List, Tuple, Union, Optional import asyncio -import json # Added for JSON handling +import aiohttp import structlog from nvchecker.api import ( VersionResult, Entry, AsyncCache, KeyManager, HTTPError, session, RichResult, GetVersionError, ) - +DEFAULT_TIMEOUT = aiohttp.ClientTimeout(total=60) logger = structlog.get_logger(logger_name=__name__) ALLOW_REQUEST = None RATE_LIMITED_ERROR = False @@ -18,18 +18,18 @@ _http_client = None GITHUB_GRAPHQL_URL = 'https://api.%s/graphql' +async def create_http_client(): + """Create a new aiohttp client session with proper configuration.""" + return aiohttp.ClientSession(timeout=DEFAULT_TIMEOUT) + async def get_http_client(): """Initialize and return the HTTP client.""" global _http_client if _http_client is not None: return _http_client - - # Get the client instance, awaiting if necessary - client = await session if asyncio.iscoroutine(session) else session - - if not hasattr(client, '__aenter__'): - raise RuntimeError("HTTP client must support async context management") - + + # Create a new client session if none exists + client = await create_http_client() _http_client = client return _http_client @@ -46,21 +46,29 @@ async def execute_github_query(host: str, owner: str, reponame: str, token: str) } query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) + client = await get_http_client() try: - # Ensure we have a properly initialized client - http_client = await get_http_client() - async with http_client.post( - GITHUB_GRAPHQL_URL % host, - headers=headers, - json={'query': query_vars} + async with client.post( + GITHUB_GRAPHQL_URL % host, + headers=headers, + json={'query': query_vars} ) as response: # Check response status response.raise_for_status() # Parse JSON response data = await response.json() - + + # Handle rate limiting headers + remaining = response.headers.get('X-RateLimit-Remaining') + if remaining and int(remaining) == 0: + reset_time = int(response.headers.get('X-RateLimit-Reset', 0)) + logger.warning( + "GitHub API rate limit reached", + reset_time=time.ctime(reset_time) + ) + # Check for GraphQL errors if 'errors' in data: raise GetVersionError(f"GitHub API error: {data['errors']}") From 1c7158fff19c5511e8985fdd7230d03a534ec6e8 Mon Sep 17 00:00:00 2001 From: envolution Date: Wed, 20 Nov 2024 00:06:45 -0500 Subject: [PATCH 31/40] aa --- nvchecker_source/github-test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nvchecker_source/github-test.py b/nvchecker_source/github-test.py index 9360bfd..248dd59 100644 --- a/nvchecker_source/github-test.py +++ b/nvchecker_source/github-test.py @@ -44,7 +44,7 @@ async def execute_github_query(host: str, owner: str, reponame: str, token: str) 'Authorization': f'bearer {token}', 'Content-Type': 'application/json', } - + logger.warning(f"{owner} {reponame}") query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) client = await get_http_client() From b50433ede6850a27ad6cd101a075d52acdfcbc23 Mon Sep 17 00:00:00 2001 From: envolution Date: Wed, 20 Nov 2024 00:08:23 -0500 Subject: [PATCH 32/40] aa --- nvchecker_source/github-test.py | 1 + 1 file changed, 1 insertion(+) diff --git a/nvchecker_source/github-test.py b/nvchecker_source/github-test.py index 248dd59..0ce80e1 100644 --- a/nvchecker_source/github-test.py +++ b/nvchecker_source/github-test.py @@ -46,6 +46,7 @@ async def execute_github_query(host: str, owner: str, reponame: str, token: str) } logger.warning(f"{owner} {reponame}") query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) + logger.warning(f"{query_vars}") client = await get_http_client() try: From 729cde36259fc64f4dfe12d0cce0d6c78244ce52 Mon Sep 17 00:00:00 2001 From: envolution Date: Wed, 20 Nov 2024 00:09:23 -0500 Subject: [PATCH 33/40] aa --- nvchecker_source/github-test.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/nvchecker_source/github-test.py b/nvchecker_source/github-test.py index 0ce80e1..50cf9d4 100644 --- a/nvchecker_source/github-test.py +++ b/nvchecker_source/github-test.py @@ -119,7 +119,7 @@ query { remaining resetAt } - repository(owner: "$name", name: "$owner") { + repository(owner: "$owner", name: "$name") { # Default branch commits defaultBranchRef { target { From 0e8e68a5fb02fc9381d9492639f70f7cb9ec9b95 Mon Sep 17 00:00:00 2001 From: envolution Date: Wed, 20 Nov 2024 00:19:46 -0500 Subject: [PATCH 34/40] aa --- nvchecker_source/github-test.py | 13 +++++-------- 1 file changed, 5 insertions(+), 8 deletions(-) diff --git a/nvchecker_source/github-test.py b/nvchecker_source/github-test.py index 50cf9d4..af17ed4 100644 --- a/nvchecker_source/github-test.py +++ b/nvchecker_source/github-test.py @@ -137,16 +137,13 @@ query { } } # All tags - refs(refPrefix: "refs/tags/", first: 1, orderBy: { - field: TAG_COMMIT_DATE, - direction: DESC}) - { + refs(refPrefix: "refs/tags/", first: 1, orderBy: {field: TAG_COMMIT_DATE, direction: DESC}) { edges { node { - name + name target { - ... on Commit { - oid + oid + ... on Commit { url } } @@ -159,7 +156,7 @@ releases(first: 100, orderBy: { field: CREATED_AT, direction: DESC }) { edges { node { name - url + url tagName isPrerelease isLatest From 98e5aa47423dd877916b46ef0b3db5889b0a44ea Mon Sep 17 00:00:00 2001 From: envolution Date: Wed, 20 Nov 2024 00:24:06 -0500 Subject: [PATCH 35/40] aa --- nvchecker_source/github-test.py | 2 ++ 1 file changed, 2 insertions(+) diff --git a/nvchecker_source/github-test.py b/nvchecker_source/github-test.py index af17ed4..7d633bb 100644 --- a/nvchecker_source/github-test.py +++ b/nvchecker_source/github-test.py @@ -4,6 +4,7 @@ from urllib.parse import urlencode from typing import List, Tuple, Union, Optional import asyncio import aiohttp +import prettyprint import structlog from nvchecker.api import ( @@ -236,6 +237,7 @@ async def get_version_real( use_max_tag = conf.get('use_max_tag', False) if use_max_tag: refs = repo_data['refs']['edges'] + logger.warning(f"{refs}") tags: List[Union[str, RichResult]] = [ RichResult( version=ref['node']['name'], From 286127f66ce7f4d78f6cfdbd1f9976199c0c5a8b Mon Sep 17 00:00:00 2001 From: envolution Date: Wed, 20 Nov 2024 03:27:54 -0500 Subject: [PATCH 36/40] ii --- graphql.output.json | 307 ++++++++++++++++++ ...graphql.py => graphql.variable.examples.py | 0 graphqlquery.txt | 57 ---- nvchecker_source/github-test.py | 116 ++++--- 4 files changed, 382 insertions(+), 98 deletions(-) create mode 100644 graphql.output.json rename Structuregraphql.py => graphql.variable.examples.py (100%) delete mode 100644 graphqlquery.txt diff --git a/graphql.output.json b/graphql.output.json new file mode 100644 index 0000000..2f21f7b --- /dev/null +++ b/graphql.output.json @@ -0,0 +1,307 @@ +{ + "data": { + "rateLimit": { + "limit": 5000, + "remaining": 4988, + "resetAt": "2024-11-20T05:34:23Z" + }, + "repository": { + "defaultBranchRef": { + "target": { + "history": { + "totalCount": 4640, + "edges": [ + { + "node": { + "oid": "65c463fcbf3b75eb60c9d51d4cf04abbfe736b98", + "committedDate": "2024-11-19T19:49:27Z" + } + } + ] + } + } + }, + "refs": { + "edges": [ + { + "node": { + "name": "v3.0.9", + "target": { + "oid": "72c8f30c17e03b01711b902525250e48594350aa" + } + } + } + ] + }, + "releases": { + "totalCount": 26, + "edges": [ + { + "node": { + "name": "Version 3.0.9", + "url": "https://github.com/drwetter/testssl.sh/releases/tag/v3.0.9", + "tagName": "v3.0.9", + "isPrerelease": false, + "isLatest": false, + "createdAt": "2024-06-13T16:56:28Z" + } + }, + { + "node": { + "name": "Release version 3.2rc3", + "url": "https://github.com/drwetter/testssl.sh/releases/tag/v3.2rc3", + "tagName": "v3.2rc3", + "isPrerelease": false, + "isLatest": true, + "createdAt": "2023-10-10T10:22:24Z" + } + }, + { + "node": { + "name": "Version 3.0.8", + "url": "https://github.com/drwetter/testssl.sh/releases/tag/v3.0.8", + "tagName": "v3.0.8", + "isPrerelease": false, + "isLatest": false, + "createdAt": "2022-09-28T19:05:06Z" + } + }, + { + "node": { + "name": "Version 3.0.7", + "url": "https://github.com/drwetter/testssl.sh/releases/tag/v3.0.7", + "tagName": "v3.0.7", + "isPrerelease": false, + "isLatest": false, + "createdAt": "2022-02-19T13:42:25Z" + } + }, + { + "node": { + "name": "Version 3.0", + "url": "https://github.com/drwetter/testssl.sh/releases/tag/v3.0", + "tagName": "v3.0", + "isPrerelease": false, + "isLatest": false, + "createdAt": "2021-10-03T09:05:27Z" + } + }, + { + "node": { + "name": "Version 3.0.6", + "url": "https://github.com/drwetter/testssl.sh/releases/tag/v3.0.6", + "tagName": "v3.0.6", + "isPrerelease": false, + "isLatest": false, + "createdAt": "2021-10-03T09:00:16Z" + } + }, + { + "node": { + "name": "Version 3.0.5", + "url": "https://github.com/drwetter/testssl.sh/releases/tag/3.0.5", + "tagName": "3.0.5", + "isPrerelease": false, + "isLatest": false, + "createdAt": "2021-05-10T11:50:07Z" + } + }, + { + "node": { + "name": "Version 3.0.4", + "url": "https://github.com/drwetter/testssl.sh/releases/tag/3.0.4", + "tagName": "3.0.4", + "isPrerelease": false, + "isLatest": false, + "createdAt": "2020-11-20T16:59:15Z" + } + }, + { + "node": { + "name": "Version 3.0.3", + "url": "https://github.com/drwetter/testssl.sh/releases/tag/3.0.3", + "tagName": "3.0.3", + "isPrerelease": false, + "isLatest": false, + "createdAt": "2020-11-19T08:43:35Z" + } + }, + { + "node": { + "name": "Version 3.0.2", + "url": "https://github.com/drwetter/testssl.sh/releases/tag/3.0.2", + "tagName": "3.0.2", + "isPrerelease": false, + "isLatest": false, + "createdAt": "2020-05-08T09:56:50Z" + } + }, + { + "node": { + "name": "Version 3.0.1", + "url": "https://github.com/drwetter/testssl.sh/releases/tag/3.0.1", + "tagName": "3.0.1", + "isPrerelease": false, + "isLatest": false, + "createdAt": "2020-04-15T10:37:32Z" + } + }, + { + "node": { + "name": "Version 3.0-1", + "url": "https://github.com/drwetter/testssl.sh/releases/tag/3.0-1", + "tagName": "3.0-1", + "isPrerelease": false, + "isLatest": false, + "createdAt": "2020-04-15T07:34:31Z" + } + }, + { + "node": { + "name": "Version 3.0 rc6", + "url": "https://github.com/drwetter/testssl.sh/releases/tag/3.0rc6", + "tagName": "3.0rc6", + "isPrerelease": false, + "isLatest": false, + "createdAt": "2019-12-11T20:54:31Z" + } + }, + { + "node": { + "name": "Version 3.0 rc5", + "url": "https://github.com/drwetter/testssl.sh/releases/tag/3.0rc5", + "tagName": "3.0rc5", + "isPrerelease": false, + "isLatest": false, + "createdAt": "2019-04-25T07:21:23Z" + } + }, + { + "node": { + "name": "Version 2.9.5-8", + "url": "https://github.com/drwetter/testssl.sh/releases/tag/v2.9.5-8", + "tagName": "v2.9.5-8", + "isPrerelease": false, + "isLatest": false, + "createdAt": "2019-04-23T20:27:29Z" + } + }, + { + "node": { + "name": "Version 3.0 rc4", + "url": "https://github.com/drwetter/testssl.sh/releases/tag/3.0rc4", + "tagName": "3.0rc4", + "isPrerelease": false, + "isLatest": false, + "createdAt": "2019-02-18T19:33:08Z" + } + }, + { + "node": { + "name": "Version 3.0 rc3", + "url": "https://github.com/drwetter/testssl.sh/releases/tag/3.0rc3", + "tagName": "3.0rc3", + "isPrerelease": false, + "isLatest": false, + "createdAt": "2018-11-30T17:38:31Z" + } + }, + { + "node": { + "name": "Version 3.0 rc2", + "url": "https://github.com/drwetter/testssl.sh/releases/tag/3.0rc2", + "tagName": "3.0rc2", + "isPrerelease": false, + "isLatest": false, + "createdAt": "2018-10-09T10:36:15Z" + } + }, + { + "node": { + "name": "Version 2.9.5-7", + "url": "https://github.com/drwetter/testssl.sh/releases/tag/v2.9.5-7", + "tagName": "v2.9.5-7", + "isPrerelease": false, + "isLatest": false, + "createdAt": "2018-09-07T15:05:41Z" + } + }, + { + "node": { + "name": "Version 3.0 rc1", + "url": "https://github.com/drwetter/testssl.sh/releases/tag/3.0rc1", + "tagName": "3.0rc1", + "isPrerelease": false, + "isLatest": false, + "createdAt": "2018-09-06T06:19:17Z" + } + }, + { + "node": { + "name": "Version 2.9.5-5", + "url": "https://github.com/drwetter/testssl.sh/releases/tag/v2.9.5-5", + "tagName": "v2.9.5-5", + "isPrerelease": false, + "isLatest": false, + "createdAt": "2018-04-09T08:07:46Z" + } + }, + { + "node": { + "name": "Version 2.9.5-3", + "url": "https://github.com/drwetter/testssl.sh/releases/tag/v2.9.5-3", + "tagName": "v2.9.5-3", + "isPrerelease": false, + "isLatest": false, + "createdAt": "2018-03-24T18:45:21Z" + } + }, + { + "node": { + "name": "Version 2.9.5-2", + "url": "https://github.com/drwetter/testssl.sh/releases/tag/v2.9.5-2", + "tagName": "v2.9.5-2", + "isPrerelease": false, + "isLatest": false, + "createdAt": "2018-02-19T10:16:04Z" + } + }, + { + "node": { + "name": "Version 2.9.5-1", + "url": "https://github.com/drwetter/testssl.sh/releases/tag/v2.9.5-1", + "tagName": "v2.9.5-1", + "isPrerelease": false, + "isLatest": false, + "createdAt": "2017-09-20T15:24:37Z" + } + }, + { + "node": { + "name": "Version 2.8", + "url": "https://github.com/drwetter/testssl.sh/releases/tag/v2.8", + "tagName": "v2.8", + "isPrerelease": false, + "isLatest": false, + "createdAt": "2017-05-10T19:04:48Z" + } + }, + { + "node": { + "name": "Version 2.6", + "url": "https://github.com/drwetter/testssl.sh/releases/tag/v2.6", + "tagName": "v2.6", + "isPrerelease": false, + "isLatest": false, + "createdAt": "2015-09-25T18:56:09Z" + } + } + ], + "pageInfo": { + "hasNextPage": false, + "endCursor": "Y3Vyc29yOnYyOpK5MjAxNS0wOS0yNVQxNDo1NjowOS0wNDowMM4AHJKe" + } + } + } + } + } \ No newline at end of file diff --git a/Structuregraphql.py b/graphql.variable.examples.py similarity index 100% rename from Structuregraphql.py rename to graphql.variable.examples.py diff --git a/graphqlquery.txt b/graphqlquery.txt deleted file mode 100644 index e36bba1..0000000 --- a/graphqlquery.txt +++ /dev/null @@ -1,57 +0,0 @@ -query { - rateLimit { - limit - remaining - resetAt - } - repository(owner: "drwetter", name: "testssl.sh") { - # Default branch commits - defaultBranchRef { - target { - ... on Commit { - history(first: 1) { - totalCount - edges { - node { - oid - committedDate - } - } - } - } - } - } - # All tags - refs(refPrefix: "refs/tags/", first: 1, orderBy: {field: TAG_COMMIT_DATE, direction: DESC}) { - edges { - node { - name - target { - ... on Commit { - oid - url - } - } - } - } - } - # All releases (filter pre-releases in your application logic) -releases(first: 100, orderBy: { field: CREATED_AT, direction: DESC }) { - totalCount - edges { - node { - name - url - tagName - isPrerelease - isLatest - createdAt - } - } - pageInfo { - hasNextPage - endCursor - } - } - } -} \ No newline at end of file diff --git a/nvchecker_source/github-test.py b/nvchecker_source/github-test.py index 7d633bb..d6e721f 100644 --- a/nvchecker_source/github-test.py +++ b/nvchecker_source/github-test.py @@ -4,7 +4,6 @@ from urllib.parse import urlencode from typing import List, Tuple, Union, Optional import asyncio import aiohttp -import prettyprint import structlog from nvchecker.api import ( @@ -34,6 +33,17 @@ async def get_http_client(): _http_client = client return _http_client +def create_rich_result(conf, commits, sha, **kwargs) -> RichResult: + """ + Helper function to centralize the creation of a RichResult. + Accepts any keyword arguments and passes them to RichResult. + """ + if conf.get('use_commit_number', False): + kwargs['version'] += f"+r{str(commits)}" + if conf.get('use_commit_hash', False): + kwargs['version'] += f"+g{sha[:9]}" + return RichResult(**kwargs) + async def execute_github_query(host: str, owner: str, reponame: str, token: str) -> dict: """ Execute GraphQL query against GitHub API and return the response data. @@ -45,9 +55,7 @@ async def execute_github_query(host: str, owner: str, reponame: str, token: str) 'Authorization': f'bearer {token}', 'Content-Type': 'application/json', } - logger.warning(f"{owner} {reponame}") query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) - logger.warning(f"{query_vars}") client = await get_http_client() try: @@ -74,6 +82,7 @@ async def execute_github_query(host: str, owner: str, reponame: str, token: str) # Check for GraphQL errors if 'errors' in data: raise GetVersionError(f"GitHub API error: {data['errors']}") + await client.close() return data['data']['repository'] except Exception as e: @@ -90,11 +99,18 @@ def get_github_token(conf: Entry, host: str, keymanager: KeyManager) -> Optional return token async def get_version(name, conf, **kwargs): - global RATE_LIMITED_ERROR, ALLOW_REQUEST + global RATE_LIMITED_ERROR if RATE_LIMITED_ERROR: raise RuntimeError('rate limited') + try: + return await _get_version_with_retry(name, conf, **kwargs) + finally: + await _cleanup_http_client() + +async def _get_version_with_retry(name, conf, **kwargs): + global ALLOW_REQUEST if ALLOW_REQUEST is None: ALLOW_REQUEST = asyncio.Event() ALLOW_REQUEST.set() @@ -113,6 +129,13 @@ async def get_version(name, conf, **kwargs): RATE_LIMITED_ERROR = True raise +async def _cleanup_http_client(): + """Clean up the global HTTP client if it exists.""" + global _http_client + if _http_client is not None: + await _http_client.close() + _http_client = None + QUERY_GITHUB = """ query { rateLimit { @@ -151,7 +174,7 @@ query { } } } - # All releases (filter pre-releases in your application logic) + # All releases (can't filter server-side) releases(first: 100, orderBy: { field: CREATED_AT, direction: DESC }) { totalCount edges { @@ -173,7 +196,7 @@ releases(first: 100, orderBy: { field: CREATED_AT, direction: DESC }) { } """ -async def get_latest_tag(key: Tuple[str, str, str, str]) -> RichResult: +async def get_latest_tag(conf: Entry, key: Tuple[str, str, str, str]) -> RichResult: host, repo, query, token = key owner, reponame = repo.split('/') if not token: @@ -183,37 +206,18 @@ async def get_latest_tag(key: Tuple[str, str, str, str]) -> RichResult: refs = repo_data['refs']['edges'] version = refs[0]['node']['name'] revision = refs[0]['node']['target']['oid'] - - return RichResult( + commits = repo_data["defaultBranchRef"]["target"]["history"]["totalCount"] + sha = repo_data["defaultBranchRef"]["target"]["history"]["edges"][0]["node"]["oid"] + return create_rich_result( + conf=conf, + commits=commits, + sha=sha, version=version, gitref=f"refs/tags/{version}", revision=revision, url=f'https://github.com/{repo}/releases/tag/{version}', ) -async def get_latest_release_with_prereleases(key: Tuple[str, str, str, str]) -> RichResult: - host, repo, token, use_release_name = key - owner, reponame = repo.split('/') - if not token: - raise GetVersionError('token is required for latest release query') - - repo_data = await execute_github_query(host, owner, reponame, token) - - releases = repo_data['releases']['edges'] - if not releases: - raise GetVersionError('no release found') - - latest_release = releases[0]['node'] - tag_name = latest_release['tagName'] - version = latest_release['name'] if use_release_name else tag_name - - return RichResult( - version=version, - gitref=f"refs/tags/{tag_name}", - revision=latest_release['target']['oid'], - url=latest_release['url'], - ) - async def get_version_real( name: str, conf: Entry, *, cache: AsyncCache, keymanager: KeyManager, @@ -230,16 +234,20 @@ async def get_version_real( raise GetVersionError('token not given but it is required') query = conf.get('query', '') - return await cache.get((host, repo, query, token), get_latest_tag) + return await cache.get((host, repo, query, token), get_latest_tag(conf)) repo_data = await execute_github_query(host, owner, reponame, token) + commits = repo_data["defaultBranchRef"]["target"]["history"]["totalCount"] + sha = repo_data["defaultBranchRef"]["target"]["history"]["edges"][0]["node"]["oid"] use_max_tag = conf.get('use_max_tag', False) if use_max_tag: refs = repo_data['refs']['edges'] - logger.warning(f"{refs}") tags: List[Union[str, RichResult]] = [ - RichResult( + create_rich_result( + conf=conf, + commits=commits, + sha=sha, version=ref['node']['name'], gitref=f"refs/tags/{ref['node']['name']}", revision=ref['node']['target']['oid'], @@ -250,23 +258,49 @@ async def get_version_real( raise GetVersionError('No tag found in upstream repository.') return tags use_latest_release = conf.get('use_latest_release', False) - if use_latest_release: - releases = repo_data['releases']['edges'] - if not releases: - raise GetVersionError('No release found in upstream repository.') + use_newest_release = conf.get('use_newest_release', False) + include_prereleases = conf.get('use_prereleases', False) + if use_latest_release or use_newest_release: + if use_latest_release: + releases = repo_data['releases']['edges'] + if not releases: + raise GetVersionError('No release found in upstream repository.') + if include_prereleases: + latest_release = next( + (release['node'] for release in releases if release['node']['isLatest'] or release['node']['isPrerelease]']), + None + ) + else: + latest_release = next( + (release['node'] for release in releases if release['node']['isLatest'] and not release['node']['isPrerelease']), + None + ) + + elif use_newest_release: + releases = repo_data['releases']['edges'] + latest_release = next( + (release['node'] for release in releases), + None + ) + - latest_release = releases[0]['node'] use_release_name = conf.get('use_release_name', False) version = latest_release['name'] if use_release_name else latest_release['tagName'] - return RichResult( + return create_rich_result( + conf=conf, + commits=commits, + sha=sha, version=version, gitref=f"refs/tags/{latest_release['tagName']}", url=latest_release['url'], ) else: commit = repo_data['defaultBranchRef']['target']['history']['edges'][0]['node'] - return RichResult( + return create_rich_result( + conf=conf, + commits=commits, + sha=sha, version=commit['committedDate'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.'), revision=commit['oid'], url=f'https://github.com/{repo}/commit/{commit["oid"]}', From beaaa6bff95396748ff45d20413fa0f8a9e70c84 Mon Sep 17 00:00:00 2001 From: envolution Date: Wed, 20 Nov 2024 03:35:00 -0500 Subject: [PATCH 37/40] test file --- nvchecker_source/github_testfile.toml | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) create mode 100644 nvchecker_source/github_testfile.toml diff --git a/nvchecker_source/github_testfile.toml b/nvchecker_source/github_testfile.toml new file mode 100644 index 0000000..1bea215 --- /dev/null +++ b/nvchecker_source/github_testfile.toml @@ -0,0 +1,18 @@ +["testssl.sh-git"] +source="github-test" +github="drwetter/testssl.sh" +#use_max_tag=true +#use_latest_tag=true + +#uses latest release marked as Latest by github, this is not always the newest (evidenced by the repo in this file) +#use_latest_release=true +#include_prereleases=true + +#selects the most recent by ordered list descending +use_newest_release=true + +#use_release_name=true + +#i find these useful to match arch guidelines for VCS packages +use_commit_number=true +use_commit_hash=true From a0a5da6ebf5c0a21fe8d029406a1bd1c82378adf Mon Sep 17 00:00:00 2001 From: envolution Date: Wed, 20 Nov 2024 03:40:48 -0500 Subject: [PATCH 38/40] aa --- nvchecker_source/1.patch | 195 --------------------------------------- 1 file changed, 195 deletions(-) delete mode 100644 nvchecker_source/1.patch diff --git a/nvchecker_source/1.patch b/nvchecker_source/1.patch deleted file mode 100644 index 465bf39..0000000 --- a/nvchecker_source/1.patch +++ /dev/null @@ -1,195 +0,0 @@ ---- github-test.py -+++ github-test.py -@@ -13,6 +13,7 @@ from nvchecker.api import ( - HTTPError, session, RichResult, GetVersionError, - ) - -+http_client = None - logger = structlog.get_logger(logger_name=__name__) - ALLOW_REQUEST = None - RATE_LIMITED_ERROR = False -@@ -20,8 +21,48 @@ RATE_LIMITED_ERROR = False - GITHUB_GRAPHQL_URL = 'https://api.%s/graphql' - -+async def execute_github_query(host: str, owner: str, reponame: str, token: str) -> dict: -+ """ -+ Execute GraphQL query against GitHub API and return the response data. -+ Centralizes error handling and query execution. -+ """ -+ global http_client -+ -+ # Initialize the HTTP client if not already done -+ if http_client is None: -+ if asyncio.iscoroutine(session): -+ http_client = await session -+ http_client = session -+ -+ headers = { -+ 'Authorization': f'bearer {token}', -+ 'Content-Type': 'application/json', -+ } -+ -+ query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) -+ -+ async with http_client.post( -+ GITHUB_GRAPHQL_URL % host, -+ headers=headers, -+ json={'query': query_vars} -+ ) as res: -+ j = await res.json() -+ if 'errors' in j: -+ raise GetVersionError(f"GitHub API error: {j['errors']}") -+ return j['data']['repository'] -+ -+def get_github_token(conf: Entry, host: str, keymanager: KeyManager) -> Optional[str]: -+ """Get GitHub token from config, keymanager, or environment.""" -+ token = conf.get('token') -+ if token is None: -+ token = keymanager.get_key(host.lower(), 'github') -+ if token is None: -+ token = os.environ.get('GITHUB_TOKEN') -+ return token -+ - async def get_version(name, conf, **kwargs): -- global RATE_LIMITED_ERROR, ALLOW_REQUEST -+ global RATE_LIMITED_ERROR, ALLOW_REQUEST -+ if RATE_LIMITED_ERROR: -+ raise RuntimeError('rate limited') - - if ALLOW_REQUEST is None: - ALLOW_REQUEST = asyncio.Event() -@@ -91,21 +132,11 @@ query { - - async def get_latest_tag(key: Tuple[str, str, str, str]) -> RichResult: - host, repo, query, token = key - owner, reponame = repo.split('/') -- headers = { -- 'Authorization': f'bearer {token}', -- 'Content-Type': 'application/json', -- } - -- # Make GraphQL query -- query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) -- async with session.post( -- GITHUB_GRAPHQL_URL % host, -- headers=headers, -- json={'query': query_vars} -- ) as res: -- j = await res.json() -- if 'errors' in j: -- raise GetVersionError(f"GitHub API error: {j['errors']}") -+ if not token: -+ raise GetVersionError('token is required for latest tag query') -+ -+ repo_data = await execute_github_query(host, owner, reponame, token) - -- refs = j['data']['repository']['refs']['edges'] -+ refs = repo_data['refs']['edges'] - if not refs: - raise GetVersionError('no tag found') -@@ -120,21 +151,11 @@ async def get_latest_tag(key: Tuple[str, str, str, str]) -> RichResult: - - async def get_latest_release_with_prereleases(key: Tuple[str, str, str, str]) -> RichResult: - host, repo, token, use_release_name = key - owner, reponame = repo.split('/') -- headers = { -- 'Authorization': f'bearer {token}', -- 'Content-Type': 'application/json', -- } - -- # Make GraphQL query -- query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) -- async with session.post( -- GITHUB_GRAPHQL_URL % host, -- headers=headers, -- json={'query': query_vars} -- ) as res: -- j = await res.json() -- if 'errors' in j: -- raise GetVersionError(f"GitHub API error: {j['errors']}") -+ if not token: -+ raise GetVersionError('token is required for latest release query') -+ -+ repo_data = await execute_github_query(host, owner, reponame, token) - -- releases = j['data']['repository']['releases']['edges'] -+ releases = repo_data['releases']['edges'] - if not releases: - raise GetVersionError('no release found') -@@ -199,30 +220,17 @@ async def get_version_real( - repo = conf['github'] - owner, reponame = repo.split('/') - host = conf.get('host', "github.com") -+ token = get_github_token(conf, host, keymanager) - -- # Load token from config -- token = conf.get('token') -- # Load token from keyman -- if token is None: -- token = keymanager.get_key(host.lower(), 'github') -- # Load token from environment -- if token is None: -- token = os.environ.get('GITHUB_TOKEN') -- - use_latest_tag = conf.get('use_latest_tag', False) - if use_latest_tag: - if not token: - raise GetVersionError('token not given but it is required') -- - query = conf.get('query', '') - return await cache.get((host, repo, query, token), get_latest_tag) - -- headers = { -- 'Authorization': f'bearer {token}', -- 'Content-Type': 'application/json', -- } -+ repo_data = await execute_github_query(host, owner, reponame, token) - -- # Make GraphQL query -- query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) -- async with session.post( -- GITHUB_GRAPHQL_URL % host, -- headers=headers, -- json={'query': query_vars} -- ) as res: -- j = await res.json() -- if 'errors' in j: -- raise GetVersionError(f"GitHub API error: {j['errors']}") -- -- use_max_tag = conf.ger('use_max_tag', False) -+ use_max_tag = conf.get('use_max_tag', False) - if use_max_tag: -- refs = j['data']['repository']['refs']['edges'] -+ refs = repo_data['refs']['edges'] - tags: List[Union[str, RichResult]] = [ - RichResult( - version=ref['node']['name'], -@@ -233,10 +241,10 @@ async def get_version_real( - if not tags: - raise GetVersionError('No tag found in upstream repository.') - return tags -- use_latest_release = conf.ger('use_latest_release', False) -+ use_latest_release = conf.get('use_latest_release', False) - if use_latest_release: -- releases = j['data']['repository']['releases']['edges'] -- -+ releases = repo_data['releases']['edges'] -+ - if not releases: - raise GetVersionError('No release found in upstream repository.') - - latest_release = releases[0]['node'] -- use_release_name = conf.ger('use_release_name', False) -+ use_release_name = conf.get('use_release_name', False) - version = latest_release['name'] if use_release_name else latest_release['tagName'] - - return RichResult( -@@ -245,7 +253,7 @@ async def get_version_real( - url=latest_release['url'], - ) - else: -- commit = j['data']['repository']['defaultBranchRef']['target']['history']['edges'][0]['node'] -+ commit = repo_data['defaultBranchRef']['target']['history']['edges'][0]['node'] - return RichResult( - version=commit['committedDate'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.'), - revision=commit['oid'], From 965c26b34e37fb13b968d53aea94f1bbcb247c39 Mon Sep 17 00:00:00 2001 From: envolution Date: Thu, 28 Nov 2024 12:18:05 -0500 Subject: [PATCH 39/40] restructure and rename to -graphql --- nvchecker_source/github-graphql.py | 179 ++++++++++++++++ nvchecker_source/github-test.py | 329 ----------------------------- 2 files changed, 179 insertions(+), 329 deletions(-) create mode 100644 nvchecker_source/github-graphql.py delete mode 100644 nvchecker_source/github-test.py diff --git a/nvchecker_source/github-graphql.py b/nvchecker_source/github-graphql.py new file mode 100644 index 0000000..8242a41 --- /dev/null +++ b/nvchecker_source/github-graphql.py @@ -0,0 +1,179 @@ +import os +import time +import aiohttp +from typing import List, Tuple, Union, Optional +from nvchecker.api import RichResult, Entry, KeyManager, GetVersionError, AsyncCache + +async def get_github_token(conf: Entry, host: str, keymanager: KeyManager) -> Optional[str]: + token = conf.get('token') + if token is None: + token = keymanager.get_key(host.lower(), 'github') + if token is None: + token = os.environ.get('GITHUB_TOKEN') + return token + +def create_rich_result(conf, commits, sha, **kwargs) -> RichResult: + if conf.get('use_commit_number', False): + kwargs['version'] += f"+r{str(commits)}" + if conf.get('use_commit_hash', False): + kwargs['version'] += f"+g{sha[:9]}" + return RichResult(**kwargs) + +async def get_version( + name: str, conf: Entry, *, + cache: AsyncCache, keymanager: KeyManager, + **kwargs, +) -> RichResult: + repo = conf['github'] + owner, reponame = repo.split('/') + host = conf.get('host', "github.com") + token = await get_github_token(conf, host, keymanager) + + if not token: + raise GetVersionError('token not given but it is required') + + GITHUB_GRAPHQL_URL = 'https://api.github.com/graphql' + query = """ + query { + repository(owner: "$owner", name: "$name") { + defaultBranchRef { + target { + ... on Commit { + history(first: 1) { + totalCount + edges { + node { + oid + committedDate + } + } + } + } + } + } + refs(refPrefix: "refs/tags/", first: 100, orderBy: {field: TAG_COMMIT_DATE, direction: DESC}) { + edges { + node { + name + target { + oid + } + } + } + } + releases(first: 100, orderBy: { field: CREATED_AT, direction: DESC }) { + edges { + node { + name + url + tagName + isPrerelease + isLatest + createdAt + } + } + } + } + } + """ + + query_vars = query.replace("$owner", owner).replace("$name", reponame) + + async with aiohttp.ClientSession() as session: + headers = { + 'Authorization': f'bearer {token}', + 'Content-Type': 'application/json', + } + + try: + async with session.post( + GITHUB_GRAPHQL_URL, + headers=headers, + json={'query': query_vars} + ) as response: + data = await response.json() + + if 'errors' in data: + raise GetVersionError(f"GitHub API error: {data['errors']}") + + repo_data = data['data']['repository'] + commits = repo_data["defaultBranchRef"]["target"]["history"]["totalCount"] + sha = repo_data["defaultBranchRef"]["target"]["history"]["edges"][0]["node"]["oid"] + + # Latest Tag Strategy + if conf.get('use_latest_tag', False): + refs = repo_data['refs']['edges'] + if not refs: + raise GetVersionError('No tag found in upstream repository.') + latest_tag = refs[0]['node'] + return create_rich_result( + conf=conf, + commits=commits, + sha=sha, + version=latest_tag['name'], + gitref=f"refs/tags/{latest_tag['name']}", + revision=latest_tag['target']['oid'], + url=f'https://github.com/{repo}/releases/tag/{latest_tag["name"]}' + ) + + # Maximum Tag Strategy - Return first tag + if conf.get('use_max_tag', False): + refs = repo_data['refs']['edges'] + if not refs: + raise GetVersionError('No tag found in upstream repository.') + first_tag = refs[0]['node'] + return create_rich_result( + conf=conf, + commits=commits, + sha=sha, + version=first_tag['name'], + gitref=f"refs/tags/{first_tag['name']}", + revision=first_tag['target']['oid'], + url=f'https://github.com/{repo}/releases/tag/{first_tag["name"]}' + ) + + # Release Strategies + if conf.get('use_latest_release', False) or conf.get('use_newest_release', False): + releases = repo_data['releases']['edges'] + if not releases: + raise GetVersionError('No release found in upstream repository.') + + include_prereleases = conf.get('use_prereleases', False) + + if conf.get('use_latest_release', False): + latest_release = next( + (release['node'] for release in releases + if release['node']['isLatest'] or (include_prereleases and release['node']['isPrerelease'])), + None + ) + else: + latest_release = releases[0]['node'] + + if not latest_release: + raise GetVersionError('No suitable release found') + + use_release_name = conf.get('use_release_name', False) + version = latest_release['name'] if use_release_name else latest_release['tagName'] + + return create_rich_result( + conf=conf, + commits=commits, + sha=sha, + version=version, + gitref=f"refs/tags/{latest_release['tagName']}", + url=latest_release['url'] + ) + + # Default: Use commit date + commit = repo_data['defaultBranchRef']['target']['history']['edges'][0]['node'] + return create_rich_result( + conf=conf, + commits=commits, + sha=sha, + version=commit['committedDate'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.'), + revision=commit['oid'], + url=f'https://github.com/{repo}/commit/{commit["oid"]}' + ) + + except aiohttp.ClientError as e: + raise GetVersionError(f"GitHub API request failed: {e}") diff --git a/nvchecker_source/github-test.py b/nvchecker_source/github-test.py deleted file mode 100644 index d6e721f..0000000 --- a/nvchecker_source/github-test.py +++ /dev/null @@ -1,329 +0,0 @@ -import os # Added for environment variable access -import time -from urllib.parse import urlencode -from typing import List, Tuple, Union, Optional -import asyncio -import aiohttp - -import structlog -from nvchecker.api import ( - VersionResult, Entry, AsyncCache, KeyManager, - HTTPError, session, RichResult, GetVersionError, -) -DEFAULT_TIMEOUT = aiohttp.ClientTimeout(total=60) -logger = structlog.get_logger(logger_name=__name__) -ALLOW_REQUEST = None -RATE_LIMITED_ERROR = False -_http_client = None - -GITHUB_GRAPHQL_URL = 'https://api.%s/graphql' - -async def create_http_client(): - """Create a new aiohttp client session with proper configuration.""" - return aiohttp.ClientSession(timeout=DEFAULT_TIMEOUT) - -async def get_http_client(): - """Initialize and return the HTTP client.""" - global _http_client - if _http_client is not None: - return _http_client - - # Create a new client session if none exists - client = await create_http_client() - _http_client = client - return _http_client - -def create_rich_result(conf, commits, sha, **kwargs) -> RichResult: - """ - Helper function to centralize the creation of a RichResult. - Accepts any keyword arguments and passes them to RichResult. - """ - if conf.get('use_commit_number', False): - kwargs['version'] += f"+r{str(commits)}" - if conf.get('use_commit_hash', False): - kwargs['version'] += f"+g{sha[:9]}" - return RichResult(**kwargs) - -async def execute_github_query(host: str, owner: str, reponame: str, token: str) -> dict: - """ - Execute GraphQL query against GitHub API and return the response data. - Centralizes error handling and query execution. - """ - client = await get_http_client() - - headers = { - 'Authorization': f'bearer {token}', - 'Content-Type': 'application/json', - } - query_vars = QUERY_GITHUB.replace("$owner", owner).replace("$name", reponame) - client = await get_http_client() - - try: - async with client.post( - GITHUB_GRAPHQL_URL % host, - headers=headers, - json={'query': query_vars} - ) as response: - # Check response status - response.raise_for_status() - - # Parse JSON response - data = await response.json() - - # Handle rate limiting headers - remaining = response.headers.get('X-RateLimit-Remaining') - if remaining and int(remaining) == 0: - reset_time = int(response.headers.get('X-RateLimit-Reset', 0)) - logger.warning( - "GitHub API rate limit reached", - reset_time=time.ctime(reset_time) - ) - - # Check for GraphQL errors - if 'errors' in data: - raise GetVersionError(f"GitHub API error: {data['errors']}") - await client.close() - return data['data']['repository'] - - except Exception as e: - logger.error("GitHub API request failed", error=str(e)) - raise - -def get_github_token(conf: Entry, host: str, keymanager: KeyManager) -> Optional[str]: - """Get GitHub token from config, keymanager, or environment.""" - token = conf.get('token') - if token is None: - token = keymanager.get_key(host.lower(), 'github') - if token is None: - token = os.environ.get('GITHUB_TOKEN') - return token - -async def get_version(name, conf, **kwargs): - global RATE_LIMITED_ERROR - - if RATE_LIMITED_ERROR: - raise RuntimeError('rate limited') - - try: - return await _get_version_with_retry(name, conf, **kwargs) - finally: - await _cleanup_http_client() - -async def _get_version_with_retry(name, conf, **kwargs): - global ALLOW_REQUEST - if ALLOW_REQUEST is None: - ALLOW_REQUEST = asyncio.Event() - ALLOW_REQUEST.set() - - for _ in range(2): # retry once - try: - await ALLOW_REQUEST.wait() - return await get_version_real(name, conf, **kwargs) - except HTTPError as e: - if e.code in [403, 429]: - if n := check_ratelimit(e, name): - ALLOW_REQUEST.clear() - await asyncio.sleep(n+1) - ALLOW_REQUEST.set() - continue - RATE_LIMITED_ERROR = True - raise - -async def _cleanup_http_client(): - """Clean up the global HTTP client if it exists.""" - global _http_client - if _http_client is not None: - await _http_client.close() - _http_client = None - -QUERY_GITHUB = """ -query { - rateLimit { - limit - remaining - resetAt - } - repository(owner: "$owner", name: "$name") { - # Default branch commits - defaultBranchRef { - target { - ... on Commit { - history(first: 1) { - totalCount - edges { - node { - oid - committedDate - } - } - } - } - } - } - # All tags - refs(refPrefix: "refs/tags/", first: 1, orderBy: {field: TAG_COMMIT_DATE, direction: DESC}) { - edges { - node { - name - target { - oid - ... on Commit { - url - } - } - } - } - } - # All releases (can't filter server-side) -releases(first: 100, orderBy: { field: CREATED_AT, direction: DESC }) { - totalCount - edges { - node { - name - url - tagName - isPrerelease - isLatest - createdAt - } - } - pageInfo { - hasNextPage - endCursor - } - } - } -} -""" - -async def get_latest_tag(conf: Entry, key: Tuple[str, str, str, str]) -> RichResult: - host, repo, query, token = key - owner, reponame = repo.split('/') - if not token: - raise GetVersionError('token is required for latest tag query') - - repo_data = await execute_github_query(host, owner, reponame, token) - refs = repo_data['refs']['edges'] - version = refs[0]['node']['name'] - revision = refs[0]['node']['target']['oid'] - commits = repo_data["defaultBranchRef"]["target"]["history"]["totalCount"] - sha = repo_data["defaultBranchRef"]["target"]["history"]["edges"][0]["node"]["oid"] - return create_rich_result( - conf=conf, - commits=commits, - sha=sha, - version=version, - gitref=f"refs/tags/{version}", - revision=revision, - url=f'https://github.com/{repo}/releases/tag/{version}', - ) - -async def get_version_real( - name: str, conf: Entry, *, - cache: AsyncCache, keymanager: KeyManager, - **kwargs, -) -> VersionResult: - repo = conf['github'] - owner, reponame = repo.split('/') - host = conf.get('host', "github.com") - token = get_github_token(conf, host, keymanager) - - use_latest_tag = conf.get('use_latest_tag', False) - if use_latest_tag: - if not token: - raise GetVersionError('token not given but it is required') - - query = conf.get('query', '') - return await cache.get((host, repo, query, token), get_latest_tag(conf)) - - repo_data = await execute_github_query(host, owner, reponame, token) - commits = repo_data["defaultBranchRef"]["target"]["history"]["totalCount"] - sha = repo_data["defaultBranchRef"]["target"]["history"]["edges"][0]["node"]["oid"] - - use_max_tag = conf.get('use_max_tag', False) - if use_max_tag: - refs = repo_data['refs']['edges'] - tags: List[Union[str, RichResult]] = [ - create_rich_result( - conf=conf, - commits=commits, - sha=sha, - version=ref['node']['name'], - gitref=f"refs/tags/{ref['node']['name']}", - revision=ref['node']['target']['oid'], - url=f'https://github.com/{repo}/releases/tag/{ref["node"]["name"]}', - ) for ref in refs - ] - if not tags: - raise GetVersionError('No tag found in upstream repository.') - return tags - use_latest_release = conf.get('use_latest_release', False) - use_newest_release = conf.get('use_newest_release', False) - include_prereleases = conf.get('use_prereleases', False) - if use_latest_release or use_newest_release: - if use_latest_release: - releases = repo_data['releases']['edges'] - if not releases: - raise GetVersionError('No release found in upstream repository.') - if include_prereleases: - latest_release = next( - (release['node'] for release in releases if release['node']['isLatest'] or release['node']['isPrerelease]']), - None - ) - else: - latest_release = next( - (release['node'] for release in releases if release['node']['isLatest'] and not release['node']['isPrerelease']), - None - ) - - elif use_newest_release: - releases = repo_data['releases']['edges'] - latest_release = next( - (release['node'] for release in releases), - None - ) - - - use_release_name = conf.get('use_release_name', False) - version = latest_release['name'] if use_release_name else latest_release['tagName'] - - return create_rich_result( - conf=conf, - commits=commits, - sha=sha, - version=version, - gitref=f"refs/tags/{latest_release['tagName']}", - url=latest_release['url'], - ) - else: - commit = repo_data['defaultBranchRef']['target']['history']['edges'][0]['node'] - return create_rich_result( - conf=conf, - commits=commits, - sha=sha, - version=commit['committedDate'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.'), - revision=commit['oid'], - url=f'https://github.com/{repo}/commit/{commit["oid"]}', - ) - -def check_ratelimit(exc: HTTPError, name: str) -> Optional[int]: - res = exc.response - if not res: - raise exc - - if v := res.headers.get('retry-after'): - n = int(v) - logger.warning('retry-after', n=n) - return n - - # default -1 is used to re-raise the exception - n = int(res.headers.get('X-RateLimit-Remaining', -1)) - if n == 0: - reset = int(res.headers.get('X-RateLimit-Reset')) - logger.error(f'rate limited, resetting at {time.ctime(reset)}. ' - 'Or get an API token to increase the allowance if not yet', - name = name, - reset = reset) - return None - - raise exc From 19530736a636df27fdb128d560c78df06b9b9c63 Mon Sep 17 00:00:00 2001 From: envolution Date: Thu, 28 Nov 2024 12:25:51 -0500 Subject: [PATCH 40/40] remove environ token, add MIT license comments --- nvchecker_source/github-graphql.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/nvchecker_source/github-graphql.py b/nvchecker_source/github-graphql.py index 8242a41..518278e 100644 --- a/nvchecker_source/github-graphql.py +++ b/nvchecker_source/github-graphql.py @@ -1,3 +1,6 @@ +# MIT licensed +# Copyright (c) 2013-2020, 2024 lilydjwg , et al. + import os import time import aiohttp @@ -8,8 +11,8 @@ async def get_github_token(conf: Entry, host: str, keymanager: KeyManager) -> Op token = conf.get('token') if token is None: token = keymanager.get_key(host.lower(), 'github') - if token is None: - token = os.environ.get('GITHUB_TOKEN') + #if token is None: + # token = os.environ.get('GITHUB_TOKEN') return token def create_rich_result(conf, commits, sha, **kwargs) -> RichResult: