implement second batch of URL results for sources

This commit is contained in:
Daniel Peukert 2023-10-18 21:46:28 +02:00
parent 592b4a4f6b
commit 55b3f671c8
No known key found for this signature in database
4 changed files with 66 additions and 30 deletions

View File

@ -22,7 +22,7 @@ async def get_version(
use_sorted_tags = conf.get('use_sorted_tags', False) use_sorted_tags = conf.get('use_sorted_tags', False)
if use_sorted_tags or use_max_tag: if use_sorted_tags or use_max_tag:
parameters = {'fields': 'values.name,next'} parameters = {'fields': 'values.name,values.links.html.href,next'}
if use_sorted_tags: if use_sorted_tags:
parameters['sort'] = conf.get('sort', '-target.date') parameters['sort'] = conf.get('sort', '-target.date')
@ -33,22 +33,22 @@ async def get_version(
url = BITBUCKET_MAX_TAG % repo url = BITBUCKET_MAX_TAG % repo
url += '?' + urlencode(parameters) url += '?' + urlencode(parameters)
version = await _get_tags(url, max_page=1, cache=cache) return await _get_tags(url, max_page=1, cache=cache)
elif use_max_tag: elif use_max_tag:
url = BITBUCKET_MAX_TAG % repo url = BITBUCKET_MAX_TAG % repo
url += '?' + urlencode(parameters) url += '?' + urlencode(parameters)
max_page = conf.get('max_page', 3) max_page = conf.get('max_page', 3)
version = await _get_tags(url, max_page=max_page, cache=cache) return await _get_tags(url, max_page=max_page, cache=cache)
else: else:
url = BITBUCKET_URL % (repo, br) url = BITBUCKET_URL % (repo, br)
data = await cache.get_json(url) data = await cache.get_json(url)
return RichResult(
version = data['values'][0]['date'].split('T', 1)[0].replace('-', '') version = data['values'][0]['date'].split('T', 1)[0].replace('-', ''),
url = data['values'][0]['links']['html']['href'],
return version )
async def _get_tags( async def _get_tags(
url: str, *, url: str, *,
@ -59,7 +59,12 @@ async def _get_tags(
for _ in range(max_page): for _ in range(max_page):
data = await cache.get_json(url) data = await cache.get_json(url)
ret.extend(x['name'] for x in data['values']) ret.extend([
RichResult(
version = tag['name'],
url = tag['links']['html']['href'],
) for tag in data['values']
])
if 'next' in data: if 'next' in data:
url = data['next'] url = data['next']
else: else:

View File

@ -9,7 +9,8 @@ GITEA_URL = 'https://%s/api/v1/repos/%s/commits'
GITEA_MAX_TAG = 'https://%s/api/v1/repos/%s/tags' GITEA_MAX_TAG = 'https://%s/api/v1/repos/%s/tags'
from nvchecker.api import ( from nvchecker.api import (
VersionResult, Entry, AsyncCache, KeyManager, VersionResult, RichResult, Entry,
AsyncCache, KeyManager,
) )
async def get_version( async def get_version(
@ -42,7 +43,14 @@ async def get_version(
data = await cache.get_json(url, headers = headers) data = await cache.get_json(url, headers = headers)
if use_max_tag: if use_max_tag:
version = [tag["name"] for tag in data] return [
RichResult(
version = tag['name'],
url = f'https://{host}/{conf["gitea"]}/releases/tag/{tag["name"]}',
) for tag in data
]
else: else:
version = data[0]['commit']['committer']['date'].split('T', 1)[0].replace('-', '') return RichResult(
return version version = data[0]['commit']['committer']['date'].split('T', 1)[0].replace('-', ''),
url = data[0]['html_url'],
)

View File

@ -3,13 +3,13 @@
import time import time
from urllib.parse import urlencode from urllib.parse import urlencode
from typing import Tuple from typing import List, Tuple, Union
import structlog import structlog
from nvchecker.api import ( from nvchecker.api import (
VersionResult, Entry, AsyncCache, KeyManager, VersionResult, Entry, AsyncCache, KeyManager,
TemporaryError, session, GetVersionError, TemporaryError, session, RichResult, GetVersionError,
) )
logger = structlog.get_logger(logger_name=__name__) logger = structlog.get_logger(logger_name=__name__)
@ -49,6 +49,7 @@ QUERY_LATEST_RELEASE_WITH_PRERELEASES = '''
edges {{ edges {{
node {{ node {{
name name
url
}} }}
}} }}
}} }}
@ -56,7 +57,7 @@ QUERY_LATEST_RELEASE_WITH_PRERELEASES = '''
}} }}
''' '''
async def get_latest_tag(key: Tuple[str, str, str]) -> str: async def get_latest_tag(key: Tuple[str, str, str]) -> RichResult:
repo, query, token = key repo, query, token = key
owner, reponame = repo.split('/') owner, reponame = repo.split('/')
headers = { headers = {
@ -80,9 +81,13 @@ async def get_latest_tag(key: Tuple[str, str, str]) -> str:
if not refs: if not refs:
raise GetVersionError('no tag found') raise GetVersionError('no tag found')
return refs[0]['node']['name'] version = refs[0]['node']['name']
return RichResult(
version = version,
url = f'https://github.com/{repo}/releases/tag/{version}',
)
async def get_latest_release_with_prereleases(key: Tuple[str, str]) -> str: async def get_latest_release_with_prereleases(key: Tuple[str, str]) -> RichResult:
repo, token = key repo, token = key
owner, reponame = repo.split('/') owner, reponame = repo.split('/')
headers = { headers = {
@ -105,7 +110,10 @@ async def get_latest_release_with_prereleases(key: Tuple[str, str]) -> str:
if not refs: if not refs:
raise GetVersionError('no release found') raise GetVersionError('no release found')
return refs[0]['node']['name'] return RichResult(
version = refs[0]['node']['name'],
url = refs[0]['node']['url'],
)
async def get_version_real( async def get_version_real(
name: str, conf: Entry, *, name: str, conf: Entry, *,
@ -160,7 +168,12 @@ async def get_version_real(
data = await cache.get_json(url, headers = headers) data = await cache.get_json(url, headers = headers)
if use_max_tag: if use_max_tag:
tags = [ref['ref'].split('/', 2)[-1] for ref in data] tags: List[Union[str, RichResult]] = [
RichResult(
version = ref['ref'].split('/', 2)[-1],
url = f'https://github.com/{repo}/releases/tag/{ref["ref"].split("/", 2)[-1]}',
) for ref in data
]
if not tags: if not tags:
raise GetVersionError('No tag found in upstream repository.') raise GetVersionError('No tag found in upstream repository.')
return tags return tags
@ -168,14 +181,17 @@ async def get_version_real(
if use_latest_release: if use_latest_release:
if 'tag_name' not in data: if 'tag_name' not in data:
raise GetVersionError('No release found in upstream repository.') raise GetVersionError('No release found in upstream repository.')
version = data['tag_name'] return RichResult(
version = data['tag_name'],
url = data['html_url'],
)
else: else:
return RichResult(
# YYYYMMDD.HHMMSS # YYYYMMDD.HHMMSS
version = data[0]['commit']['committer']['date'] \ version = data[0]['commit']['committer']['date'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.'),
.rstrip('Z').replace('-', '').replace(':', '').replace('T', '.') url = data[0]['html_url'],
)
return version
def check_ratelimit(exc, name): def check_ratelimit(exc, name):
res = exc.response res = exc.response

View File

@ -6,8 +6,8 @@ import urllib.parse
import structlog import structlog
from nvchecker.api import ( from nvchecker.api import (
VersionResult, Entry, AsyncCache, KeyManager, VersionResult, RichResult, Entry,
TemporaryError, AsyncCache, KeyManager, TemporaryError,
) )
GITLAB_URL = 'https://%s/api/v4/projects/%s/repository/commits' GITLAB_URL = 'https://%s/api/v4/projects/%s/repository/commits'
@ -52,10 +52,17 @@ async def get_version_real(
data = await cache.get_json(url, headers = headers) data = await cache.get_json(url, headers = headers)
if use_max_tag: if use_max_tag:
version = [tag["name"] for tag in data] return [
RichResult(
version = tag['name'],
url = f'https://{host}/{conf["gitlab"]}/-/tags/{tag["name"]}',
) for tag in data
]
else: else:
version = data[0]['created_at'].split('T', 1)[0].replace('-', '') return RichResult(
return version version = data[0]['created_at'].split('T', 1)[0].replace('-', ''),
url = data[0]['web_url'],
)
def check_ratelimit(exc, name): def check_ratelimit(exc, name):
res = exc.response res = exc.response