[0-9]+(?:\.[0-9]+)*) # release segment
(?P # pre-release
[-_\.]?
(?P(a|b|c|rc|alpha|beta|pre|preview))
[-_\.]?
(?P[0-9]+)?
)?
(?P # post release
(?:-(?P[0-9]+))
|
(?:
[-_\.]?
(?Ppost|rev|r)
[-_\.]?
(?P[0-9]+)?
)
)?
(?P # dev release
[-_\.]?
(?Pdev)
[-_\.]?
(?P[0-9]+)?
)?
)
(?:\+(?P[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version
"""
class Version(_BaseVersion):
_regex = re.compile(r"^\s*" + VERSION_PATTERN + r"\s*$", re.VERBOSE | re.IGNORECASE)
def __init__(self, version):
# type: (str) -> None
# Validate the version and parse it into pieces
match = self._regex.search(version)
if not match:
raise InvalidVersion("Invalid version: '{0}'".format(version))
# Store the parsed out pieces of the version
self._version = _Version(
epoch=int(match.group("epoch")) if match.group("epoch") else 0,
release=tuple(int(i) for i in match.group("release").split(".")),
pre=_parse_letter_version(match.group("pre_l"), match.group("pre_n")),
post=_parse_letter_version(
match.group("post_l"), match.group("post_n1") or match.group("post_n2")
),
dev=_parse_letter_version(match.group("dev_l"), match.group("dev_n")),
local=_parse_local_version(match.group("local")),
)
# Generate a key which will be used for sorting
self._key = _cmpkey(
self._version.epoch,
self._version.release,
self._version.pre,
self._version.post,
self._version.dev,
self._version.local,
)
def __repr__(self):
# type: () -> str
return "".format(repr(str(self)))
def __str__(self):
# type: () -> str
parts = []
# Epoch
if self.epoch != 0:
parts.append("{0}!".format(self.epoch))
# Release segment
parts.append(".".join(str(x) for x in self.release))
# Pre-release
if self.pre is not None:
parts.append("".join(str(x) for x in self.pre))
# Post-release
if self.post is not None:
parts.append(".post{0}".format(self.post))
# Development release
if self.dev is not None:
parts.append(".dev{0}".format(self.dev))
# Local version segment
if self.local is not None:
parts.append("+{0}".format(self.local))
return "".join(parts)
@property
def epoch(self):
# type: () -> int
_epoch = self._version.epoch # type: int
return _epoch
@property
def release(self):
# type: () -> Tuple[int, ...]
_release = self._version.release # type: Tuple[int, ...]
return _release
@property
def pre(self):
# type: () -> Optional[Tuple[str, int]]
_pre = self._version.pre # type: Optional[Tuple[str, int]]
return _pre
@property
def post(self):
# type: () -> Optional[Tuple[str, int]]
return self._version.post[1] if self._version.post else None
@property
def dev(self):
# type: () -> Optional[Tuple[str, int]]
return self._version.dev[1] if self._version.dev else None
@property
def local(self):
# type: () -> Optional[str]
if self._version.local:
return ".".join(str(x) for x in self._version.local)
else:
return None
@property
def public(self):
# type: () -> str
return str(self).split("+", 1)[0]
@property
def base_version(self):
# type: () -> str
parts = []
# Epoch
if self.epoch != 0:
parts.append("{0}!".format(self.epoch))
# Release segment
parts.append(".".join(str(x) for x in self.release))
return "".join(parts)
@property
def is_prerelease(self):
# type: () -> bool
return self.dev is not None or self.pre is not None
@property
def is_postrelease(self):
# type: () -> bool
return self.post is not None
@property
def is_devrelease(self):
# type: () -> bool
return self.dev is not None
@property
def major(self):
# type: () -> int
return self.release[0] if len(self.release) >= 1 else 0
@property
def minor(self):
# type: () -> int
return self.release[1] if len(self.release) >= 2 else 0
@property
def micro(self):
# type: () -> int
return self.release[2] if len(self.release) >= 3 else 0
def _parse_letter_version(
letter, # type: str
number, # type: Union[str, bytes, SupportsInt]
):
# type: (...) -> Optional[Tuple[str, int]]
if letter:
# We consider there to be an implicit 0 in a pre-release if there is
# not a numeral associated with it.
if number is None:
number = 0
# We normalize any letters to their lower case form
letter = letter.lower()
# We consider some words to be alternate spellings of other words and
# in those cases we want to normalize the spellings to our preferred
# spelling.
if letter == "alpha":
letter = "a"
elif letter == "beta":
letter = "b"
elif letter in ["c", "pre", "preview"]:
letter = "rc"
elif letter in ["rev", "r"]:
letter = "post"
return letter, int(number)
if not letter and number:
# We assume if we are given a number, but we are not given a letter
# then this is using the implicit post release syntax (e.g. 1.0-1)
letter = "post"
return letter, int(number)
return None
_local_version_separators = re.compile(r"[\._-]")
def _parse_local_version(local):
# type: (str) -> Optional[LocalType]
"""
Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve").
"""
if local is not None:
return tuple(
part.lower() if not part.isdigit() else int(part)
for part in _local_version_separators.split(local)
)
return None
def _cmpkey(
epoch, # type: int
release, # type: Tuple[int, ...]
pre, # type: Optional[Tuple[str, int]]
post, # type: Optional[Tuple[str, int]]
dev, # type: Optional[Tuple[str, int]]
local, # type: Optional[Tuple[SubLocalType]]
):
# type: (...) -> CmpKey
# When we compare a release version, we want to compare it with all of the
# trailing zeros removed. So we'll use a reverse the list, drop all the now
# leading zeros until we come to something non zero, then take the rest
# re-reverse it back into the correct order and make it a tuple and use
# that for our sorting key.
_release = tuple(
reversed(list(itertools.dropwhile(lambda x: x == 0, reversed(release))))
)
# We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0.
# We'll do this by abusing the pre segment, but we _only_ want to do this
# if there is not a pre or a post segment. If we have one of those then
# the normal sorting rules will handle this case correctly.
if pre is None and post is None and dev is not None:
_pre = NegativeInfinity # type: PrePostDevType
# Versions without a pre-release (except as noted above) should sort after
# those with one.
elif pre is None:
_pre = Infinity
else:
_pre = pre
# Versions without a post segment should sort before those with one.
if post is None:
_post = NegativeInfinity # type: PrePostDevType
else:
_post = post
# Versions without a development segment should sort after those with one.
if dev is None:
_dev = Infinity # type: PrePostDevType
else:
_dev = dev
if local is None:
# Versions without a local segment should sort before those with one.
_local = NegativeInfinity # type: LocalType
else:
# Versions with a local segment need that segment parsed to implement
# the sorting rules in PEP440.
# - Alpha numeric segments sort before numeric segments
# - Alpha numeric segments sort lexicographically
# - Numeric segments sort numerically
# - Shorter versions sort before longer versions when the prefixes
# match exactly
_local = tuple(
(i, "") if isinstance(i, int) else (NegativeInfinity, i) for i in local
)
return epoch, _release, _pre, _post, _dev, _local
nvchecker-2.15.1/nvchecker/slogconf.py000066400000000000000000000070531462655643400177300ustar00rootroot00000000000000# vim: se sw=2:
# MIT licensed
# Copyright (c) 2018-2020,2023-2024 lilydjwg , et al.
import logging
import os
import io
import traceback
import sys
import structlog
from .httpclient import TemporaryError
def _console_msg(event):
evt = event['event']
if evt == 'up-to-date':
msg = 'up-to-date, version %s' % event['version']
del event['version']
elif evt == 'updated':
if event.get('old_version'):
msg = 'updated from %(old_version)s to %(version)s' % event
else:
msg = 'updated to %(version)s' % event
del event['version'], event['old_version']
else:
msg = evt
if 'name' in event:
msg = f"{event['name']}: {msg}"
del event['name']
event['msg'] = msg
return event
def exc_info(logger, level, event):
if level == 'exception':
event['exc_info'] = True
return event
def filter_nones(logger, level, event):
if 'url' in event and event['url'] is None:
del event['url']
return event
def filter_taskname(logger, level, event):
# added in Python 3.12, not useful to us, but appears as a normal KV.
if 'taskName' in event:
del event['taskName']
return event
def filter_exc(logger, level, event):
exc_info = event.get('exc_info')
if not exc_info:
return event
if exc_info is True:
exc = sys.exc_info()[1]
else:
exc = exc_info
if isinstance(exc, TemporaryError):
if exc.code == 599: # network issues
del event['exc_info']
event['error'] = exc
return event
def stdlib_renderer(logger, level, event):
# return event unchanged for further processing
std_event = _console_msg(event.copy())
try:
logger = logging.getLogger(std_event.pop('logger_name'))
except KeyError:
logger = logging.getLogger()
msg = std_event.pop('msg', std_event.pop('event'))
exc_info = std_event.pop('exc_info', None)
if 'error' in std_event:
std_event['error'] = repr(std_event['error'])
getattr(logger, level)(
msg, exc_info = exc_info, extra=std_event,
)
return event
_renderer = structlog.processors.JSONRenderer(ensure_ascii=False)
def json_renderer(logger, level, event):
event['level'] = level
return _renderer(logger, level, event)
def null_renderer(logger, level, event):
return ''
class _Logger(logging.Logger):
_my_srcfile = os.path.normcase(
stdlib_renderer.__code__.co_filename)
_structlog_dir = os.path.dirname(structlog.__file__)
def findCaller(self, stack_info=False, stacklevel=1):
"""
Find the stack frame of the caller so that we can note the source
file name, line number and function name.
"""
f = logging.currentframe()
#On some versions of IronPython, currentframe() returns None if
#IronPython isn't run with -X:Frames.
if f is not None:
f = f.f_back
orig_f = f
while f and stacklevel > 1:
f = f.f_back
stacklevel -= 1
if not f:
f = orig_f
rv = "(unknown file)", 0, "(unknown function)", None
while hasattr(f, "f_code"):
co = f.f_code
filename = os.path.normcase(co.co_filename)
if filename in [logging._srcfile, self._my_srcfile] \
or filename.startswith(self._structlog_dir):
f = f.f_back
continue
sinfo = None
if stack_info:
sio = io.StringIO()
sio.write('Stack (most recent call last):\n')
traceback.print_stack(f, file=sio)
sinfo = sio.getvalue()
if sinfo[-1] == '\n':
sinfo = sinfo[:-1]
sio.close()
rv = (co.co_filename, f.f_lineno, co.co_name, sinfo)
break
return rv
def fix_logging():
logging.setLoggerClass(_Logger)
nvchecker-2.15.1/nvchecker/sortversion.py000066400000000000000000000016111462655643400205050ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2021 lilydjwg , et al.
'''
Sort versions using deprecated pkg_resource / packaging.parse_version or pyalpm.vercmp
'''
__all__ = ["sort_version_keys"]
from .lib.packaging_version import parse as parse_version
try:
import pyalpm
from functools import cmp_to_key
vercmp = cmp_to_key(pyalpm.vercmp)
vercmp_available = True
except ImportError:
def vercmp(k):
raise NotImplementedError("Using vercmp but pyalpm can not be imported!")
vercmp_available = False
try:
from awesomeversion import AwesomeVersion
awesomeversion_available = True
except ImportError:
def AwesomeVersion(k): # type: ignore
raise NotImplementedError("Using awesomeversion but it can not be imported!")
awesomeversion_available = False
sort_version_keys = {
"parse_version": parse_version,
"vercmp": vercmp,
"awesomeversion": AwesomeVersion,
}
nvchecker-2.15.1/nvchecker/tools.py000066400000000000000000000135511462655643400172560ustar00rootroot00000000000000# vim: se sw=2:
# MIT licensed
# Copyright (c) 2013-2024 lilydjwg , et al.
import sys
import argparse
import shutil
import structlog
import json
import os.path
from . import core
from .util import RichResult
logger = structlog.get_logger(logger_name=__name__)
def take() -> None:
parser = argparse.ArgumentParser(description='update version records of nvchecker')
core.add_common_arguments(parser)
parser.add_argument('--all', action='store_true',
help='take all updates')
parser.add_argument('--ignore-nonexistent', action='store_true',
help='ignore nonexistent names')
parser.add_argument('names', metavar='NAME', nargs='*',
help='software name to be updated. use NAME=VERSION to update '
'to a specific version instead of the new version.')
args = parser.parse_args()
if core.process_common_arguments(args):
return
opt = core.load_file(args.file, use_keymanager=False)[1]
if opt.ver_files is None:
logger.critical(
"doesn't have 'oldver' and 'newver' set.",
source=args.file,
)
sys.exit(2)
else:
oldverf = opt.ver_files[0]
newverf = opt.ver_files[1]
oldvers = core.read_verfile(oldverf)
newvers = core.read_verfile(newverf)
if args.all:
oldvers.update(newvers)
else:
name: str
for name in args.names:
if "=" in name:
name, newver = name.split("=")
oldvers[name] = RichResult(version=newver)
else:
try:
oldvers[name] = newvers[name]
except KeyError:
if args.ignore_nonexistent:
logger.warning('nonexistent in newver, ignored', name=name)
continue
logger.critical(
"doesn't exist in 'newver' set.", name=name,
)
sys.exit(2)
try:
if os.path.islink(oldverf):
shutil.copy(oldverf, oldverf.with_name(oldverf.name + '~'))
else:
oldverf.rename(
oldverf.with_name(oldverf.name + '~'),
)
except FileNotFoundError:
pass
core.write_verfile(oldverf, oldvers)
def cmp() -> None:
parser = argparse.ArgumentParser(description='compare version records of nvchecker')
core.add_common_arguments(parser)
parser.add_argument('-j', '--json', action='store_true',
help='Output JSON array of dictionaries with {name, newver, oldver, [delta]} '
'(or array of names if --quiet)')
parser.add_argument('-q', '--quiet', action='store_true',
help="Quiet mode, output only the names.")
parser.add_argument('-a', '--all', action='store_true',
help="Include unchanged versions.")
parser.add_argument('-s', '--sort',
choices=('parse_version', 'vercmp', 'awesomeversion', 'none'),
default='parse_version',
help='Version compare method to backwards the arrow '
'(default: parse_version)')
parser.add_argument('-n', '--newer', action='store_true',
help='Shows only the newer ones according to --sort.')
parser.add_argument('--exit-status', action='store_true',
help="exit with status 4 if there are updates")
args = parser.parse_args()
if core.process_common_arguments(args):
return
opt = core.load_file(args.file, use_keymanager=False)[1]
if opt.ver_files is None:
logger.critical(
"doesn't have 'oldver' and 'newver' set.",
source=args.file,
)
sys.exit(2)
else:
oldverf = opt.ver_files[0]
newverf = opt.ver_files[1]
oldvers = {k: v.version for k, v in core.read_verfile(oldverf).items()}
newvers = {k: v.version for k, v in core.read_verfile(newverf).items()}
differences = []
for name, newver in sorted(newvers.items()): # accumulate differences
oldver = oldvers.get(name, None)
diff = {
'name': name,
'oldver': oldver,
'newver': newver
}
if oldver is not None and newver is not None:
if oldver == newver:
diff['delta'] = 'equal'
elif args.sort == "none":
diff['delta'] = 'new' # assume it's a new version if we're not comparing
else:
from .sortversion import sort_version_keys
version = sort_version_keys[args.sort]
if version(oldver) > version(newver): # type: ignore
if args.newer:
continue # don't store this diff
diff['delta'] = 'old'
else:
diff['delta'] = 'new'
elif oldver is None:
diff['delta'] = 'added'
elif newver is None:
if args.newer:
continue # don't store this diff
diff['delta'] = 'gone'
if args.all or diff['delta'] != 'equal':
differences.append(diff)
if args.json:
if args.quiet:
print(json.dumps([diff['name'] for diff in differences], separators=(',', ':')))
else:
print(json.dumps(differences, sort_keys=True, separators=(',', ':')))
elif args.quiet:
for diff in differences:
print(diff['name'])
else:
from .lib.nicelogger import Colors, support_color
c = Colors(support_color(sys.stdout))
diffstyles = {
'new': {
'symbol': '->',
'oldc': c.red
},
'old': {
'symbol': f'{c.red}<-{c.normal}',
'oldc': c.red
},
'added': {
'symbol': '++',
'oldc': c.red
},
'gone': {
'symbol': f'{c.red}--{c.normal}',
'oldc': c.green
},
'equal': {
'symbol': '==',
'oldc': c.green
}
}
for diff in differences:
style = diffstyles[diff.get('delta', 'equal')] # type: ignore # mypy has issues with this line
print(f'{diff["name"]} {style["oldc"]}{diff["oldver"]}{c.normal} {style["symbol"]} {c.green}{diff["newver"]}{c.normal}')
if args.exit_status and any(
diff.get('delta') != 'equal' for diff in differences
):
sys.exit(4)
nvchecker-2.15.1/nvchecker/util.py000066400000000000000000000216621462655643400170750ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
from __future__ import annotations
import sys
import asyncio
from asyncio import Queue
from typing import (
Dict, Optional, List, NamedTuple, Union,
Any, Tuple, Callable, Coroutine, Hashable,
TYPE_CHECKING,
)
from pathlib import Path
import contextvars
import abc
import netrc
from dataclasses import dataclass
if TYPE_CHECKING:
import tomli as tomllib
else:
try:
import tomllib
except ModuleNotFoundError:
import tomli as tomllib
import structlog
from .httpclient import session
from .ctxvars import tries as ctx_tries
from .ctxvars import proxy as ctx_proxy
from .ctxvars import user_agent as ctx_ua
from .ctxvars import httptoken as ctx_httpt
from .ctxvars import verify_cert as ctx_verify_cert
logger = structlog.get_logger(logger_name=__name__)
Entry = Dict[str, Any]
Entry.__doc__ = '''The configuration `dict` for an entry.'''
Entries = Dict[str, Entry]
if sys.version_info[:2] >= (3, 11):
from typing import LiteralString
else:
LiteralString = str
if sys.version_info[:2] >= (3, 10):
@dataclass(kw_only=True)
class RichResult:
version: str
gitref: Optional[str] = None
revision: Optional[str] = None
url: Optional[str] = None
def __str__(self):
return self.version
else:
@dataclass
class RichResult:
version: str
gitref: Optional[str] = None
revision: Optional[str] = None
url: Optional[str] = None
def __str__(self):
return self.version
VersionResult = Union[None, str, RichResult, List[Union[str, RichResult]], Exception]
VersionResult.__doc__ = '''The result of a `get_version` check.
* `None` - No version found.
* `str` - A single version string is found.
* `RichResult` - A version string with additional information.
* `List[Union[str, RichResult]]` - Multiple version strings with or without additional information are found. :ref:`list options` will be applied.
* `Exception` - An error occurred.
'''
class FileLoadError(Exception):
def __init__(self, kind, filename, exc):
self.kind = kind
self.filename = filename
self.exc = exc
def __str__(self):
return f'failed to load {self.kind} {self.filename!r}: {self.exc}'
class KeyManager:
'''Manages data in the keyfile.'''
def __init__(
self, file: Optional[Path],
) -> None:
if file is not None:
try:
with file.open('rb') as f:
keys = tomllib.load(f)['keys']
except (OSError, tomllib.TOMLDecodeError) as e:
raise FileLoadError('keyfile', str(file), e)
else:
keys = {}
self.keys = keys
try:
netrc_file = netrc.netrc()
netrc_hosts = netrc_file.hosts
except (FileNotFoundError, netrc.NetrcParseError):
netrc_hosts = {}
self.netrc = netrc_hosts
def get_key(self, name: str, legacy_name: Optional[str] = None) -> Optional[str]:
'''Get the named key (token) in the keyfile.'''
keyfile_token = self.keys.get(name) or self.keys.get(legacy_name)
netrc_passwd = (e := self.netrc.get(name)) and e[2]
return keyfile_token or netrc_passwd
class EntryWaiter:
def __init__(self) -> None:
self._waiting: Dict[str, asyncio.Future] = {}
async def wait(self, name: str) -> str:
'''Wait on the ``name`` entry and return its result (the version string)'''
fu = self._waiting.get(name)
if fu is None:
fu = asyncio.Future()
self._waiting[name] = fu
return await fu
def set_result(self, name: str, value: str) -> None:
fu = self._waiting.get(name)
if fu is not None:
fu.set_result(value)
def set_exception(self, name: str, e: Exception) -> None:
fu = self._waiting.get(name)
if fu is not None:
fu.set_exception(e)
class RawResult(NamedTuple):
'''The unprocessed result from a check.'''
name: str
version: VersionResult
conf: Entry
RawResult.name.__doc__ = 'The name (table name) of the entry.'
RawResult.version.__doc__ = 'The result from the check.'
RawResult.conf.__doc__ = 'The entry configuration (table content) of the entry.'
ResultData = Dict[str, RichResult]
class BaseWorker:
'''The base class for defining `Worker` classes for source plugins.
.. py:attribute:: task_sem
:type: asyncio.Semaphore
This is the rate-limiting semaphore. Workers should acquire it while doing one unit of work.
.. py:attribute:: result_q
:type: Queue[RawResult]
Results should be put into this queue.
.. py:attribute:: tasks
:type: List[Tuple[str, Entry]]
A list of tasks for the `Worker` to complete. Every task consists of
a tuple for the task name (table name in the configuration file) and the
content of that table (as a `dict`).
.. py:attribute:: keymanager
:type: KeyManager
The `KeyManager` for retrieving keys from the keyfile.
'''
def __init__(
self,
task_sem: asyncio.Semaphore,
result_q: Queue[RawResult],
tasks: List[Tuple[str, Entry]],
keymanager: KeyManager,
) -> None:
self.task_sem = task_sem
self.result_q = result_q
self.keymanager = keymanager
self.tasks = tasks
@abc.abstractmethod
async def run(self) -> None:
'''Run the `tasks`. Subclasses should implement this method.'''
raise NotImplementedError
async def _run_maynot_raise(self) -> None:
try:
await self.run()
except Exception:
# don't let an exception tear down the whole process
logger.exception('exception raised by Worker.run')
class AsyncCache:
'''A cache for use with async functions.'''
cache: Dict[Hashable, Any]
lock: asyncio.Lock
def __init__(self) -> None:
self.cache = {}
self.lock = asyncio.Lock()
async def _get_json(
self, key: Tuple[str, str, Tuple[Tuple[str, str], ...]],
) -> Any:
_, url, headers = key
res = await session.get(url, headers=dict(headers))
return res.json()
async def get_json(
self, url: str, *,
headers: Dict[str, str] = {},
) -> Any:
'''Get specified ``url`` and return the response content as JSON.
The returned data will be cached for reuse.
'''
key = '_jsonurl', url, tuple(sorted(headers.items()))
return await self.get(
key , self._get_json) # type: ignore
async def get(
self,
key: Hashable,
func: Callable[[Hashable], Coroutine[Any, Any, Any]],
) -> Any:
'''Run async ``func`` and cache its return value by ``key``.
The ``key`` should be hashable, and the function will be called with it as
its sole argument. For multiple simultaneous calls with the same key, only
one will actually be called, and others will wait and return the same
(cached) value.
'''
async with self.lock:
cached = self.cache.get(key)
if cached is None:
coro = func(key)
fu = asyncio.create_task(coro)
self.cache[key] = fu
if asyncio.isfuture(cached): # pending
return await cached
elif cached is not None: # cached
return cached
else: # not cached
r = await fu
self.cache[key] = r
return r
if TYPE_CHECKING:
from typing_extensions import Protocol
class GetVersionFunc(Protocol):
async def __call__(
self,
name: str, conf: Entry,
*,
cache: AsyncCache,
keymanager: KeyManager,
) -> VersionResult:
...
else:
GetVersionFunc = Any
class FunctionWorker(BaseWorker):
func: GetVersionFunc
cache: AsyncCache
def initialize(self, func: GetVersionFunc) -> None:
self.func = func
self.cache = AsyncCache()
async def run(self) -> None:
futures = []
for name, entry in self.tasks:
ctx = contextvars.copy_context()
fu = ctx.run(self.run_one, name, entry)
futures.append(fu)
for fu2 in asyncio.as_completed(futures):
await fu2
async def run_one(
self, name: str, entry: Entry,
) -> None:
assert self.func is not None
tries = entry.get('tries', None)
if tries is not None:
ctx_tries.set(tries)
proxy = entry.get('proxy', None)
if proxy is not None:
ctx_proxy.set(proxy)
ua = entry.get('user_agent', None)
if ua is not None:
ctx_ua.set(ua)
httpt = entry.get('httptoken', None)
if httpt is None:
httpt = self.keymanager.get_key('httptoken_'+name)
if httpt is not None:
ctx_httpt.set(httpt)
verify_cert = entry.get('verify_cert', None)
if verify_cert is not None:
ctx_verify_cert.set(verify_cert)
try:
async with self.task_sem:
version = await self.func(
name, entry,
cache = self.cache,
keymanager = self.keymanager,
)
await self.result_q.put(RawResult(name, version, entry))
except Exception as e:
await self.result_q.put(RawResult(name, e, entry))
class GetVersionError(Exception):
'''An error occurred while getting version information.
Raise this when a known bad situation happens.
:param msg: The error message.
:param kwargs: Arbitrary additional context for the error.
'''
def __init__(self, msg: LiteralString, **kwargs: Any) -> None:
self.msg = msg
self.kwargs = kwargs
nvchecker-2.15.1/nvchecker_source/000077500000000000000000000000001462655643400171175ustar00rootroot00000000000000nvchecker-2.15.1/nvchecker_source/alpm.py000066400000000000000000000022701462655643400204230ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020-2021 DDoSolitary , et al.
from nvchecker.api import GetVersionError
from pyalpm import Handle
async def open_db(info):
dbpath, repo = info
handle = Handle('/', dbpath)
db = handle.register_syncdb(repo, 0)
return handle, db
async def get_version(name, conf, *, cache, **kwargs):
pkgname = conf.get('alpm', name)
dbpath = conf.get('dbpath', '/var/lib/pacman')
strip_release = conf.get('strip_release', False)
provided = conf.get('provided')
repo = conf.get('repo')
if repo is None:
repos = conf.get('repos') or ['core', 'extra', 'multilib']
else:
repos = [repo]
for repo in repos:
db = (await cache.get((dbpath, repo), open_db))[1]
pkg = db.get_pkg(pkgname)
if pkg is not None:
break
if pkg is None:
raise GetVersionError('package not found in the ALPM database')
if provided is None:
version = pkg.version
else:
provides = dict(x.split('=', 1) for x in pkg.provides if '=' in x)
version = provides.get(provided)
if version is None:
raise GetVersionError('provides element not found')
if strip_release:
version = version.split('-', 1)[0]
return version
nvchecker-2.15.1/nvchecker_source/alpmfiles.py000066400000000000000000000027711462655643400214540ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2023 Pekka Ristola , et al.
from asyncio import create_subprocess_exec
from asyncio.subprocess import PIPE
import re
from typing import Tuple, List
from nvchecker.api import GetVersionError
async def get_files(info: Tuple[str, str]) -> List[str]:
dbpath, pkg = info
# there's no pyalpm bindings for the file databases
cmd = ['pacman', '-Flq', '--dbpath', dbpath, pkg]
p = await create_subprocess_exec(*cmd, stdout = PIPE, stderr = PIPE)
stdout, stderr = await p.communicate()
if p.returncode == 0:
return stdout.decode().splitlines()
else:
raise GetVersionError(
'pacman failed to get file list',
pkg = pkg,
cmd = cmd,
stdout = stdout.decode(errors='replace'),
stderr = stderr.decode(errors='replace'),
returncode = p.returncode,
)
async def get_version(name, conf, *, cache, **kwargs):
pkg = conf['pkgname']
repo = conf.get('repo')
if repo is not None:
pkg = f'{repo}/{pkg}'
dbpath = conf.get('dbpath', '/var/lib/pacman')
regex = re.compile(conf['filename'])
if regex.groups > 1:
raise GetVersionError('multi-group regex')
strip_dir = conf.get('strip_dir', False)
files = await cache.get((dbpath, pkg), get_files)
for f in files:
fn = f.rsplit('/', 1)[-1] if strip_dir else f
match = regex.fullmatch(fn)
if match:
groups = match.groups()
return groups[0] if len(groups) > 0 else fn
raise GetVersionError('no file matches specified regex')
nvchecker-2.15.1/nvchecker_source/android_sdk.py000066400000000000000000000042041462655643400217520ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
# Copyright (c) 2017,2020 Chih-Hsuan Yen
import os
import re
from xml.etree import ElementTree
from nvchecker.api import session
_ANDROID_REPO_MANIFESTS = {
'addon': 'https://dl.google.com/android/repository/addon2-1.xml',
'package': 'https://dl.google.com/android/repository/repository2-1.xml',
}
# See tags in Android SDK XML manifests
_CHANNEL_MAP = {
'stable': 'channel-0',
'beta': 'channel-1',
'dev': 'channel-2',
'canary': 'channel-3',
}
async def _get_repo_manifest(repo):
repo_xml_url = _ANDROID_REPO_MANIFESTS[repo]
res = await session.get(repo_xml_url)
data = res.body.decode('utf-8')
repo_manifest = ElementTree.fromstring(data)
return repo_manifest
async def get_version(name, conf, *, cache, **kwargs):
repo = conf['repo']
pkg_path_prefix = conf['android_sdk']
channels = [_CHANNEL_MAP[channel]
for channel in conf.get('channel', 'stable').split(',')]
repo_manifest = await cache.get(repo, _get_repo_manifest)
versions = []
for pkg in repo_manifest.findall('.//remotePackage'):
if not pkg.attrib['path'].startswith(pkg_path_prefix):
continue
channelRef = pkg.find('./channelRef')
if channelRef.attrib['ref'] not in channels:
continue
for archive in pkg.findall('./archives/archive'):
host_os = archive.find('./host-os')
if host_os is not None and host_os.text != conf.get('host_os', 'linux'):
continue
archive_url = archive.find('./complete/url').text
# revision
rev = pkg.find('./revision')
rev_strs = []
for part in ('major', 'minor', 'micro'):
part_node = rev.find('./' + part)
if part_node is not None:
rev_strs.append(part_node.text)
# release number
filename, ext = os.path.splitext(archive_url)
rel_str = filename.rsplit('-')[-1]
mobj = re.match(r'r\d+', rel_str)
if mobj:
rev_strs.append(rel_str)
versions.append('.'.join(rev_strs))
# A package suitable for the target host OS is found - skip remaining
break
return versions
nvchecker-2.15.1/nvchecker_source/anitya.py000066400000000000000000000006761462655643400207670ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2017-2020 lilydjwg , et al.
from nvchecker.api import RichResult
URL = 'https://release-monitoring.org/api/project/{pkg}'
async def get_version(name, conf, *, cache, **kwargs):
pkg = conf.get('anitya')
url = URL.format(pkg = pkg)
data = await cache.get_json(url)
return RichResult(
version = data['version'],
url = f'https://release-monitoring.org/project/{data["id"]}/',
)
nvchecker-2.15.1/nvchecker_source/apt.py000066400000000000000000000123451462655643400202620ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Felix Yan , et al.
from __future__ import annotations
import re
import asyncio
from typing import Dict, Tuple
import itertools
import functools
from collections import defaultdict
from nvchecker.api import (
session, GetVersionError, VersionResult,
RichResult, Entry, AsyncCache, KeyManager,
)
APT_RELEASE_URL = "%s/dists/%s/Release"
APT_PACKAGES_PATH = "%s/binary-%s/Packages%s"
APT_PACKAGES_URL = "%s/dists/%s/%s"
APT_PACKAGES_SUFFIX_PREFER = (".xz", ".gz", "")
DpkgVersion = Tuple[int, str, str]
def parse_version(s: str) -> DpkgVersion:
try:
epoch_str, rest = s.split(':', 1)
except ValueError:
epoch = 0
rest = s
else:
epoch = int(epoch_str)
try:
ver, rev = rest.split('-', 1)
except ValueError:
ver = rest
rev = ''
return epoch, ver, rev
def _compare_part(a: str, b: str) -> int:
sa = re.split(r'(\d+)', a)
sb = re.split(r'(\d+)', b)
for idx, (pa, pb) in enumerate(itertools.zip_longest(sa, sb)):
if pa is None:
return -1
elif pb is None:
return 1
if idx % 2 == 1:
ret = int(pa) - int(pb)
if ret != 0:
return ret
else:
if pa < pb:
return -1
elif pa > pb:
return 1
return 0
def compare_version_parsed(a: DpkgVersion, b: DpkgVersion) -> int:
ret = a[0] - b[0]
if ret != 0:
return ret
ret = _compare_part(a[1], b[1])
if ret != 0:
return ret
return _compare_part(a[2], b[2])
def compare_version(a: str, b: str) -> int:
va = parse_version(a)
vb = parse_version(b)
return compare_version_parsed(va, vb)
def _decompress_data(url: str, data: bytes) -> str:
if url.endswith(".xz"):
import lzma
data = lzma.decompress(data)
elif url.endswith(".gz"):
import gzip
data = gzip.decompress(data)
return data.decode('utf-8')
async def get_url(url: str) -> str:
res = await session.get(url)
data = res.body
loop = asyncio.get_running_loop()
return await loop.run_in_executor(
None, _decompress_data,
url, data)
async def parse_packages(key: Tuple[AsyncCache, str]) -> Tuple[Dict[str, str], Dict[str, str], Dict[str, str]]:
cache, url = key
apt_packages = await cache.get(url, get_url) # type: ignore
pkg_map = defaultdict(list)
srcpkg_map = defaultdict(list)
pkg_to_src_map = defaultdict(list)
pkg = None
srcpkg = None
for line in apt_packages.split('\n'):
if line.startswith("Package: "):
pkg = line[9:]
elif line.startswith("Source: "):
srcpkg = line[8:]
elif line.startswith("Version: "):
version = line[9:]
if pkg is not None:
pkg_map[pkg].append(version)
pkg_to_src_map["%s/%s" % (pkg, version)] = srcpkg if srcpkg is not None else pkg
if srcpkg is not None:
srcpkg_map[srcpkg].append(version)
pkg = srcpkg = None
pkg_map_max = {pkg: max(vs, key=functools.cmp_to_key(compare_version))
for pkg, vs in pkg_map.items()}
srcpkg_map_max = {pkg: max(vs, key=functools.cmp_to_key(compare_version))
for pkg, vs in srcpkg_map.items()}
pkg_to_src_map_max = {pkg: pkg_to_src_map["%s/%s" % (pkg, vs)]
for pkg, vs in pkg_map_max.items()}
return pkg_map_max, srcpkg_map_max, pkg_to_src_map_max
async def get_version(
name: str, conf: Entry, *,
cache: AsyncCache, keymanager: KeyManager,
**kwargs,
) -> VersionResult:
srcpkg = conf.get('srcpkg')
pkg = conf.get('pkg')
mirror = conf['mirror']
suite = conf['suite']
repo = conf.get('repo', 'main')
arch = conf.get('arch', 'amd64')
strip_release = conf.get('strip_release', False)
if srcpkg and pkg:
raise GetVersionError('Setting both srcpkg and pkg is ambiguous')
elif not srcpkg and not pkg:
pkg = name
apt_release = await cache.get(
APT_RELEASE_URL % (mirror, suite), get_url) # type: ignore
for suffix in APT_PACKAGES_SUFFIX_PREFER:
packages_path = APT_PACKAGES_PATH % (repo, arch, suffix)
if " " + packages_path in apt_release:
break
else:
raise GetVersionError('Packages file not found in APT repository')
pkg_map, srcpkg_map, pkg_to_src_map = await cache.get(
(cache, APT_PACKAGES_URL % (mirror, suite, packages_path)), parse_packages) # type: ignore
if pkg and pkg in pkg_map:
version = pkg_map[pkg]
changelog_name = pkg_to_src_map[pkg]
elif srcpkg and srcpkg in srcpkg_map:
version = srcpkg_map[srcpkg]
changelog_name = srcpkg
else:
raise GetVersionError('package not found in APT repository')
# Get Changelogs field from the Release file
changelogs_url = None
for line in apt_release.split('\n'):
if line.startswith('Changelogs: '):
changelogs_url = line[12:]
break
# Build the changelog URL (see https://wiki.debian.org/DebianRepository/Format#Changelogs for spec)
changelog = None
if changelogs_url is not None and changelogs_url != 'no':
changelog_section = changelog_name[:4] if changelog_name.startswith('lib') else changelog_name[:1]
changelog = changelogs_url.replace('@CHANGEPATH@', f'{repo}/{changelog_section}/{changelog_name}/{changelog_name}_{version}')
if strip_release:
version = version.split("-")[0]
if changelog is not None:
return RichResult(
version = version,
url = changelog,
)
else:
return version
nvchecker-2.15.1/nvchecker_source/archpkg.py000066400000000000000000000021011462655643400211020ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
from nvchecker.api import session, RichResult, GetVersionError
URL = 'https://www.archlinux.org/packages/search/json/'
async def request(pkg):
res = await session.get(URL, params={"name": pkg})
return res.json()
async def get_version(name, conf, *, cache, **kwargs):
pkg = conf.get('archpkg') or name
strip_release = conf.get('strip_release', False)
provided = conf.get('provided')
data = await cache.get(pkg, request)
if not data['results']:
raise GetVersionError('Arch package not found')
r = [r for r in data['results'] if r['repo'] != 'testing'][0]
if provided:
provides = dict(x.split('=', 1) for x in r['provides'] if '=' in x)
version = provides.get(provided, None)
if strip_release:
version = version.split('-', 1)[0]
elif strip_release:
version = r['pkgver']
else:
version = r['pkgver'] + '-' + r['pkgrel']
return RichResult(
version = version,
url = f'https://archlinux.org/packages/{r["repo"]}/{r["arch"]}/{r["pkgname"]}/',
)
nvchecker-2.15.1/nvchecker_source/aur.py000066400000000000000000000056521462655643400202700ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020,2024 lilydjwg , et al.
from datetime import datetime, timezone
import asyncio
from typing import Iterable, Dict, List, Tuple, Any, Optional
from nvchecker.api import (
session, GetVersionError, VersionResult, RichResult,
Entry, BaseWorker, RawResult,
)
AUR_URL = 'https://aur.archlinux.org/rpc/'
class AurResults:
cache: Dict[str, Optional[Dict[str, Any]]]
def __init__(self) -> None:
self.cache = {}
async def get_multiple(
self,
aurnames: Iterable[str],
) -> Dict[str, Optional[Dict[str, Any]]]:
params = [('v', '5'), ('type', 'info')]
params.extend(('arg[]', name) for name in aurnames
if name not in self.cache)
res = await session.get(AUR_URL, params=params)
data = res.json()
new_results = {r['Name']: r for r in data['results']}
cache = self.cache
cache.update(new_results)
cache.update(
(name, None)
for name in set(aurnames) - new_results.keys()
)
return {name: cache[name] for name in aurnames
if name in cache}
class Worker(BaseWorker):
# https://wiki.archlinux.org/index.php/Aurweb_RPC_interface#Limitations
batch_size = 100
async def run(self) -> None:
tasks = self.tasks
n_batch, left = divmod(len(tasks), self.batch_size)
if left > 0:
n_batch += 1
aur_results = AurResults()
ret = []
for i in range(n_batch):
s = i * self.batch_size
batch = tasks[s : s+self.batch_size]
fu = self._run_batch(batch, aur_results)
ret.append(fu)
await asyncio.gather(*ret)
async def _run_batch(
self,
batch: List[Tuple[str, Entry]],
aur_results: AurResults,
) -> None:
task_by_name: Dict[str, Entry] = dict(self.tasks)
async with self.task_sem:
results = await _run_batch_impl(batch, aur_results)
for name, version in results.items():
r = RawResult(name, version, task_by_name[name])
await self.result_q.put(r)
async def _run_batch_impl(
batch: List[Tuple[str, Entry]],
aur_results: AurResults,
) -> Dict[str, VersionResult]:
aurnames = {conf.get('aur', name) for name, conf in batch}
results = await aur_results.get_multiple(aurnames)
ret: Dict[str, VersionResult] = {}
for name, conf in batch:
aurname = conf.get('aur', name)
use_last_modified = conf.get('use_last_modified', False)
strip_release = conf.get('strip_release', False)
result = results.get(aurname)
if result is None:
ret[name] = GetVersionError('AUR upstream not found')
continue
version = result['Version']
if use_last_modified:
dt = datetime.fromtimestamp(result['LastModified'], timezone.utc)
version += '-' + dt.strftime('%Y%m%d%H%M%S')
if strip_release and '-' in version:
version = version.rsplit('-', 1)[0]
ret[name] = RichResult(
version = version,
url = f'https://aur.archlinux.org/packages/{name}',
)
return ret
nvchecker-2.15.1/nvchecker_source/bitbucket.py000066400000000000000000000042261462655643400214510ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
from typing import Any, List, Union
from urllib.parse import urlencode
from nvchecker.api import VersionResult, RichResult, Entry, AsyncCache
# doc: https://developer.atlassian.com/cloud/bitbucket/rest/api-group-commits/#api-repositories-workspace-repo-slug-commits-get
BITBUCKET_URL = 'https://bitbucket.org/api/2.0/repositories/%s/commits/%s'
# doc: https://developer.atlassian.com/cloud/bitbucket/rest/api-group-refs/#api-repositories-workspace-repo-slug-refs-tags-get
BITBUCKET_MAX_TAG = 'https://bitbucket.org/api/2.0/repositories/%s/refs/tags'
async def get_version(
name: str, conf: Entry, *,
cache: AsyncCache,
**kwargs: Any,
) -> VersionResult:
repo = conf['bitbucket']
br = conf.get('branch', '')
use_max_tag = conf.get('use_max_tag', False)
use_sorted_tags = conf.get('use_sorted_tags', False)
if use_sorted_tags or use_max_tag:
parameters = {'fields': 'values.name,values.links.html.href,next'}
if use_sorted_tags:
parameters['sort'] = conf.get('sort', '-target.date')
if 'query' in conf:
parameters['q'] = conf['query']
if use_sorted_tags:
url = BITBUCKET_MAX_TAG % repo
url += '?' + urlencode(parameters)
return await _get_tags(url, max_page=1, cache=cache)
elif use_max_tag:
url = BITBUCKET_MAX_TAG % repo
url += '?' + urlencode(parameters)
max_page = conf.get('max_page', 3)
return await _get_tags(url, max_page=max_page, cache=cache)
else:
url = BITBUCKET_URL % (repo, br)
data = await cache.get_json(url)
return RichResult(
version = data['values'][0]['date'].split('T', 1)[0].replace('-', ''),
url = data['values'][0]['links']['html']['href'],
)
async def _get_tags(
url: str, *,
max_page: int,
cache: AsyncCache,
) -> VersionResult:
ret: List[Union[str, RichResult]] = []
for _ in range(max_page):
data = await cache.get_json(url)
ret.extend([
RichResult(
version = tag['name'],
url = tag['links']['html']['href'],
) for tag in data['values']
])
if 'next' in data:
url = data['next']
else:
break
return ret
nvchecker-2.15.1/nvchecker_source/cmd.py000066400000000000000000000017511462655643400202400ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
import asyncio
import structlog
from nvchecker.api import GetVersionError
logger = structlog.get_logger(logger_name=__name__)
async def run_cmd(cmd: str) -> str:
logger.debug('running cmd', cmd=cmd)
p = await asyncio.create_subprocess_shell(
cmd,
stdout=asyncio.subprocess.PIPE,
stderr=asyncio.subprocess.PIPE,
)
output, error = await p.communicate()
output_s = output.strip().decode('latin1')
error_s = error.strip().decode(errors='replace')
if p.returncode != 0:
raise GetVersionError(
'command exited with error',
cmd=cmd, error=error_s,
returncode=p.returncode)
elif not output_s:
raise GetVersionError(
'command exited without output',
cmd=cmd, error=error_s,
returncode=p.returncode)
else:
return output_s
async def get_version(
name, conf, *, cache, keymanager=None
):
cmd = conf['cmd']
return await cache.get(cmd, run_cmd)
nvchecker-2.15.1/nvchecker_source/combiner.py000066400000000000000000000010331462655643400212640ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2021 lilydjwg , et al.
import asyncio
import string
from nvchecker.api import entry_waiter
class CombineFormat(string.Template):
idpattern = '[0-9]+'
async def get_version(
name, conf, *, cache, keymanager=None
):
t = CombineFormat(conf['format'])
from_ = conf['from']
waiter = entry_waiter.get()
entries = [waiter.wait(name) for name in from_]
vers = await asyncio.gather(*entries)
versdict = {str(i+1): v for i, v in enumerate(vers)}
return t.substitute(versdict)
nvchecker-2.15.1/nvchecker_source/container.py000066400000000000000000000130351462655643400214550ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Chih-Hsuan Yen
from typing import Dict, List, NamedTuple, Optional, Tuple
from urllib.request import parse_http_list
from urllib.parse import urljoin
import json
from nvchecker.api import session, HTTPError
class AuthInfo(NamedTuple):
service: Optional[str]
realm: str
def parse_www_authenticate_header(header: str) -> Tuple[str, Dict[str, str]]:
'''
Parse WWW-Authenticate header used in OAuth2 authentication for container
registries. This is NOT RFC-compliant!
Simplified from http.parse_www_authenticate_header in Werkzeug (BSD license)
'''
auth_type, auth_info = header.split(None, 1)
result = {}
for item in parse_http_list(auth_info):
name, value = item.split("=", 1)
if value[:1] == value[-1:] == '"':
value = value[1:-1]
result[name] = value
return auth_type, result
# Inspired by https://stackoverflow.com/a/51921869
# Reference: https://github.com/containers/image/blob/v5.6.0/docker/docker_client.go
class UnsupportedAuthenticationError(NotImplementedError):
def __init__(self):
super().__init__('Only Bearer authentication supported for now')
async def get_registry_auth_info(registry_host: str) -> AuthInfo:
auth_service = auth_realm = None
try:
await session.get(f'https://{registry_host}/v2/')
raise UnsupportedAuthenticationError # No authentication needed
except HTTPError as e:
if e.code != 401:
raise
auth_type, auth_info = parse_www_authenticate_header(e.response.headers['WWW-Authenticate'])
if auth_type.lower() != 'bearer':
raise UnsupportedAuthenticationError
# Although 'service' is needed as per https://docs.docker.com/registry/spec/auth/token/,
# ghcr.io (GitHub container registry) does not provide it
auth_service = auth_info.get('service')
auth_realm = auth_info['realm']
return AuthInfo(auth_service, auth_realm)
async def get_container_tags(info: Tuple[str, str, AuthInfo]) -> List[str]:
image_path, registry_host, auth_info = info
token = await get_auth_token(auth_info, image_path)
tags = []
url = f'https://{registry_host}/v2/{image_path}/tags/list'
while True:
res = await session.get(url, headers={
'Authorization': f'Bearer {token}',
'Accept': 'application/json',
})
tags += res.json()['tags']
link = res.headers.get('Link')
if link is None:
break
else:
url = urljoin(url, parse_next_link(link))
return tags
async def get_auth_token(auth_info, image_path):
auth_params = {
'scope': f'repository:{image_path}:pull',
}
if auth_info.service:
auth_params['service'] = auth_info.service
res = await session.get(auth_info.realm, params=auth_params)
token = res.json()['token']
return token
def parse_next_link(value: str) -> str:
ending = '>; rel="next"'
if value.endswith(ending):
return value[1:-len(ending)]
else:
raise ValueError(value)
async def get_container_tag_update_time(info: Tuple[str, str, str, AuthInfo]):
'''
Find the update time of a container tag.
In fact, it's the creation time of the image ID referred by the tag. Tag itself does not have any update time.
'''
image_path, image_tag, registry_host, auth_info = info
token = await get_auth_token(auth_info, image_path)
# HTTP headers
headers = {
'Authorization': f'Bearer {token}',
# Prefer Image Manifest Version 2, Schema 2: https://distribution.github.io/distribution/spec/manifest-v2-2/
'Accept': ', '.join([
'application/vnd.oci.image.manifest.v1+json',
'application/vnd.oci.image.index.v1+json',
'application/vnd.docker.distribution.manifest.v2+json',
'application/vnd.docker.distribution.manifest.list.v2+json',
'application/json',
]),
}
# Get tag manifest
url = f'https://{registry_host}/v2/{image_path}/manifests/{image_tag}'
res = await session.get(url, headers=headers)
data = res.json()
# Schema 1 returns the creation time in the response
if data['schemaVersion'] == 1:
return json.loads(data['history'][0]['v1Compatibility'])['created']
# For schema 2, we have to fetch the config's blob
# For multi-arch images, multiple manifests are bounded with the same tag. We should choose one and then request
# the manifest's detail
if data.get('manifests'):
# It's quite hard to find the manifest matching with current CPU architecture and system.
# For now we just choose the first and it should probably work for most cases
image_digest = data['manifests'][0]['digest']
url = f'https://{registry_host}/v2/{image_path}/manifests/{image_digest}'
res = await session.get(url, headers=headers)
data = res.json()
digest = data['config']['digest']
url = f'https://{registry_host}/v2/{image_path}/blobs/{digest}'
res = await session.get(url, headers=headers)
data = res.json()
return data['created']
async def get_version(name, conf, *, cache, **kwargs):
image_path = conf.get('container', name)
image_tag = None
# image tag is optional
if ':' in image_path:
image_path, image_tag = image_path.split(':', 1)
registry_host = conf.get('registry', 'docker.io')
if registry_host == 'docker.io':
registry_host = 'registry-1.docker.io'
auth_info = await cache.get(registry_host, get_registry_auth_info)
# if a tag is given, return the tag's update time, otherwise return the image's tag list
if image_tag:
key = image_path, image_tag, registry_host, auth_info
return await cache.get(key, get_container_tag_update_time)
key = image_path, registry_host, auth_info
return await cache.get(key, get_container_tags)
nvchecker-2.15.1/nvchecker_source/cpan.py000066400000000000000000000007111462655643400204110ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
from nvchecker.api import RichResult
# Using metacpan
CPAN_URL = 'https://fastapi.metacpan.org/release/%s'
async def get_version(name, conf, *, cache, **kwargs):
key = conf.get('cpan', name)
data = await cache.get_json(CPAN_URL % key)
return RichResult(
version = str(data['version']),
url = f'https://metacpan.org/release/{data["author"]}/{data["name"]}',
)
nvchecker-2.15.1/nvchecker_source/cran.py000066400000000000000000000014621462655643400204170ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2022 Pekka Ristola , et al.
from nvchecker.api import session, RichResult, GetVersionError
CRAN_URL = 'https://cran.r-project.org/package=%s/DESCRIPTION'
VERSION_FIELD = 'Version: '
async def request(pkg):
url = CRAN_URL % pkg
res = await session.get(url)
return res.body.decode('utf-8', errors='ignore')
async def get_version(name, conf, *, cache, **kwargs):
package = conf.get('cran', name)
desc = await cache.get(package, request)
for line in desc.splitlines():
if line.startswith(VERSION_FIELD):
version = line[len(VERSION_FIELD):]
break
else:
raise GetVersionError('Invalid DESCRIPTION file')
return RichResult(
version = version,
url = f'https://cran.r-project.org/web/packages/{package}/',
)
nvchecker-2.15.1/nvchecker_source/cratesio.py000066400000000000000000000007401462655643400213030ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
from nvchecker.api import RichResult
API_URL = 'https://crates.io/api/v1/crates/%s'
async def get_version(name, conf, *, cache, **kwargs):
name = conf.get('cratesio') or name
data = await cache.get_json(API_URL % name)
version = [v['num'] for v in data['versions'] if not v['yanked']][0]
return RichResult(
version = version,
url = f'https://crates.io/crates/{name}/{version}',
)
nvchecker-2.15.1/nvchecker_source/debianpkg.py000066400000000000000000000015461462655643400214230ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
# Copyright (c) 2017 Felix Yan , et al.
from nvchecker.api import RichResult, GetVersionError
URL = 'https://sources.debian.org/api/src/%(pkgname)s/?suite=%(suite)s'
async def get_version(name, conf, *, cache, **kwargs):
pkg = conf.get('debianpkg') or name
strip_release = conf.get('strip_release', False)
suite = conf.get('suite') or "sid"
url = URL % {"pkgname": pkg, "suite": suite}
data = await cache.get_json(url)
if not data.get('versions'):
raise GetVersionError('Debian package not found')
r = data['versions'][0]
if strip_release:
version = r['version'].split("-")[0]
else:
version = r['version']
return RichResult(
version = version,
url = f'https://sources.debian.org/src/{data["package"]}/{r["version"]}/',
)
nvchecker-2.15.1/nvchecker_source/gems.py000066400000000000000000000007251462655643400204300ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
from nvchecker.api import RichResult
GEMS_URL = 'https://rubygems.org/api/v1/versions/%s.json'
async def get_version(name, conf, *, cache, **kwargs):
key = conf.get('gems', name)
data = await cache.get_json(GEMS_URL % key)
return [
RichResult(
version = item['number'],
url = f'https://rubygems.org/gems/{key}/versions/{item["number"]}',
) for item in data
]
nvchecker-2.15.1/nvchecker_source/git.py000066400000000000000000000020061462655643400202520ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Felix Yan , et al.
from .cmd import run_cmd
from nvchecker.api import RichResult
async def get_version(
name, conf, *, cache, keymanager=None
):
git = conf['git']
use_commit = conf.get('use_commit', False)
if use_commit:
ref = conf.get('branch')
if ref is None:
ref = 'HEAD'
gitref = None
else:
ref = 'refs/heads/' + ref
gitref = ref
cmd = f"git ls-remote {git} {ref}"
data = await cache.get(cmd, run_cmd)
version = data.split(None, 1)[0]
return RichResult(
version = version,
revision = version,
gitref = gitref,
)
else:
cmd = f"git ls-remote --tags --refs {git}"
data = await cache.get(cmd, run_cmd)
versions = []
for line in data.splitlines():
revision, version = line.split("\trefs/tags/", 1)
versions.append(RichResult(
version = version,
revision = revision,
gitref = f"refs/tags/{version}",
))
return versions
nvchecker-2.15.1/nvchecker_source/gitea.py000066400000000000000000000027241462655643400205670ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
from __future__ import annotations
import urllib.parse
GITEA_URL = 'https://%s/api/v1/repos/%s/commits'
GITEA_MAX_TAG = 'https://%s/api/v1/repos/%s/tags'
from nvchecker.api import (
VersionResult, RichResult, Entry,
AsyncCache, KeyManager,
)
async def get_version(
name: str, conf: Entry, *,
cache: AsyncCache, keymanager: KeyManager,
) -> VersionResult:
repo = urllib.parse.quote(conf['gitea'])
br = conf.get('branch')
host = conf.get('host', 'gitea.com')
use_max_tag = conf.get('use_max_tag', False)
if use_max_tag:
url = GITEA_MAX_TAG % (host, repo)
else:
url = GITEA_URL % (host, repo)
if br:
url += '?sha=' + br
# Load token from config
token = conf.get('token')
# Load token from keyman
if token is None:
token = keymanager.get_key(host.lower(), 'gitea_' + host.lower())
# Set private token if token exists.
headers = {}
if token:
headers["Authorization"] = f'token {token}'
data = await cache.get_json(url, headers = headers)
if use_max_tag:
return [
RichResult(
version = tag['name'],
revision = tag['id'],
url = f'https://{host}/{conf["gitea"]}/releases/tag/{tag["name"]}',
) for tag in data
]
else:
return RichResult(
version = data[0]['commit']['committer']['date'].split('T', 1)[0].replace('-', ''),
revision = data[0]['sha'],
url = data[0]['html_url'],
)
nvchecker-2.15.1/nvchecker_source/github.py000066400000000000000000000153421462655643400207600ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020, 2024 lilydjwg , et al.
import time
from urllib.parse import urlencode
from typing import List, Tuple, Union, Optional
import asyncio
import structlog
from nvchecker.api import (
VersionResult, Entry, AsyncCache, KeyManager,
HTTPError, session, RichResult, GetVersionError,
)
logger = structlog.get_logger(logger_name=__name__)
ALLOW_REQUEST = None
RATE_LIMITED_ERROR = False
GITHUB_URL = 'https://api.%s/repos/%s/commits'
GITHUB_LATEST_RELEASE = 'https://api.%s/repos/%s/releases/latest'
# https://developer.github.com/v3/git/refs/#get-all-references
GITHUB_MAX_TAG = 'https://api.%s/repos/%s/git/refs/tags'
GITHUB_GRAPHQL_URL = 'https://api.%s/graphql'
async def get_version(name, conf, **kwargs):
global RATE_LIMITED_ERROR, ALLOW_REQUEST
if RATE_LIMITED_ERROR:
raise RuntimeError('rate limited')
if ALLOW_REQUEST is None:
ALLOW_REQUEST = asyncio.Event()
ALLOW_REQUEST.set()
for _ in range(2): # retry once
try:
await ALLOW_REQUEST.wait()
return await get_version_real(name, conf, **kwargs)
except HTTPError as e:
if e.code in [403, 429]:
if n := check_ratelimit(e, name):
ALLOW_REQUEST.clear()
await asyncio.sleep(n+1)
ALLOW_REQUEST.set()
continue
RATE_LIMITED_ERROR = True
raise
QUERY_LATEST_TAG = '''
{{
repository(name: "{name}", owner: "{owner}") {{
refs(refPrefix: "refs/tags/", first: 1,
query: "{query}",
orderBy: {{field: TAG_COMMIT_DATE, direction: DESC}}) {{
edges {{
node {{
name
target {{
oid
}}
}}
}}
}}
}}
}}
'''
QUERY_LATEST_RELEASE_WITH_PRERELEASES = '''
{{
repository(name: "{name}", owner: "{owner}") {{
releases(first: 1, orderBy: {{field: CREATED_AT, direction: DESC}}) {{
edges {{
node {{
name
url
tag {{
name
}}
tagCommit {{
oid
}}
}}
}}
}}
}}
}}
'''
async def get_latest_tag(key: Tuple[str, str, str, str]) -> RichResult:
host, repo, query, token = key
owner, reponame = repo.split('/')
headers = {
'Authorization': f'bearer {token}',
'Content-Type': 'application/json',
}
q = QUERY_LATEST_TAG.format(
owner = owner,
name = reponame,
query = query,
)
res = await session.post(
GITHUB_GRAPHQL_URL % host,
headers = headers,
json = {'query': q},
)
j = res.json()
refs = j['data']['repository']['refs']['edges']
if not refs:
raise GetVersionError('no tag found')
version = refs[0]['node']['name']
revision = refs[0]['node']['target']['oid']
return RichResult(
version = version,
gitref = f"refs/tags/{version}",
revision = revision,
url = f'https://github.com/{repo}/releases/tag/{version}',
)
async def get_latest_release_with_prereleases(key: Tuple[str, str, str]) -> RichResult:
host, repo, token = key
owner, reponame = repo.split('/')
headers = {
'Authorization': f'bearer {token}',
'Content-Type': 'application/json',
}
q = QUERY_LATEST_RELEASE_WITH_PRERELEASES.format(
owner = owner,
name = reponame,
)
res = await session.post(
GITHUB_GRAPHQL_URL % host,
headers = headers,
json = {'query': q},
)
j = res.json()
refs = j['data']['repository']['releases']['edges']
if not refs:
raise GetVersionError('no release found')
return RichResult(
version = refs[0]['node']['name'],
gitref = refs[0]['node']['tag']['name'],
revision = refs[0]['node']['tagCommit']['oid'],
url = refs[0]['node']['url'],
)
async def get_version_real(
name: str, conf: Entry, *,
cache: AsyncCache, keymanager: KeyManager,
**kwargs,
) -> VersionResult:
repo = conf['github']
host = conf.get('host', "github.com")
# Load token from config
token = conf.get('token')
# Load token from keyman
if token is None:
token = keymanager.get_key(host.lower(), 'github')
use_latest_tag = conf.get('use_latest_tag', False)
if use_latest_tag:
if not token:
raise GetVersionError('token not given but it is required')
query = conf.get('query', '')
return await cache.get((host, repo, query, token), get_latest_tag) # type: ignore
use_latest_release = conf.get('use_latest_release', False)
include_prereleases = conf.get('include_prereleases', False)
if use_latest_release and include_prereleases:
if not token:
raise GetVersionError('token not given but it is required')
return await cache.get((host, repo, token), get_latest_release_with_prereleases) # type: ignore
br = conf.get('branch')
path = conf.get('path')
use_max_tag = conf.get('use_max_tag', False)
if use_latest_release:
url = GITHUB_LATEST_RELEASE % (host, repo)
elif use_max_tag:
url = GITHUB_MAX_TAG % (host, repo)
else:
url = GITHUB_URL % (host, repo)
parameters = {}
if br:
parameters['sha'] = br
if path:
parameters['path'] = path
url += '?' + urlencode(parameters)
headers = {
'Accept': 'application/vnd.github.quicksilver-preview+json',
}
if token:
headers['Authorization'] = f'token {token}'
data = await cache.get_json(url, headers = headers)
if use_max_tag:
tags: List[Union[str, RichResult]] = [
RichResult(
version = ref['ref'].split('/', 2)[-1],
gitref = ref['ref'],
revision = ref['object']['sha'],
url = f'https://github.com/{repo}/releases/tag/{ref["ref"].split("/", 2)[-1]}',
) for ref in data
]
if not tags:
raise GetVersionError('No tag found in upstream repository.')
return tags
if use_latest_release:
if 'tag_name' not in data:
raise GetVersionError('No release found in upstream repository.')
return RichResult(
version = data['tag_name'],
gitref = f"refs/tags/{data['tag_name']}",
url = data['html_url'],
)
else:
return RichResult(
# YYYYMMDD.HHMMSS
version = data[0]['commit']['committer']['date'].rstrip('Z').replace('-', '').replace(':', '').replace('T', '.'),
revision = data[0]['sha'],
url = data[0]['html_url'],
)
def check_ratelimit(exc: HTTPError, name: str) -> Optional[int]:
res = exc.response
if not res:
raise exc
if v := res.headers.get('retry-after'):
n = int(v)
logger.warning('retry-after', n=n)
return n
# default -1 is used to re-raise the exception
n = int(res.headers.get('X-RateLimit-Remaining', -1))
if n == 0:
reset = int(res.headers.get('X-RateLimit-Reset'))
logger.error(f'rate limited, resetting at {time.ctime(reset)}. '
'Or get an API token to increase the allowance if not yet',
name = name,
reset = reset)
return None
raise exc
nvchecker-2.15.1/nvchecker_source/gitlab.py000066400000000000000000000041161462655643400207350ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
import urllib.parse
import structlog
from nvchecker.api import (
VersionResult, RichResult, Entry,
AsyncCache, KeyManager, TemporaryError,
)
GITLAB_URL = 'https://%s/api/v4/projects/%s/repository/commits'
GITLAB_MAX_TAG = 'https://%s/api/v4/projects/%s/repository/tags'
logger = structlog.get_logger(logger_name=__name__)
async def get_version(name, conf, **kwargs):
try:
return await get_version_real(name, conf, **kwargs)
except TemporaryError as e:
check_ratelimit(e, name)
async def get_version_real(
name: str, conf: Entry, *,
cache: AsyncCache, keymanager: KeyManager,
**kwargs,
) -> VersionResult:
repo = urllib.parse.quote_plus(conf['gitlab'])
br = conf.get('branch')
host = conf.get('host', "gitlab.com")
use_max_tag = conf.get('use_max_tag', False)
if use_max_tag:
url = GITLAB_MAX_TAG % (host, repo)
else:
url = GITLAB_URL % (host, repo)
if br:
url += '?ref_name=%s' % br
# Load token from config
token = conf.get('token')
# Load token from keyman
if token is None:
token = keymanager.get_key(host.lower(), 'gitlab_' + host.lower())
# Set private token if token exists.
headers = {}
if token:
headers["PRIVATE-TOKEN"] = token
data = await cache.get_json(url, headers = headers)
if use_max_tag:
return [
RichResult(
version = tag['name'],
revision = tag['commit']['id'],
url = f'https://{host}/{conf["gitlab"]}/-/tags/{tag["name"]}',
) for tag in data
]
else:
return RichResult(
version = data[0]['created_at'].split('T', 1)[0].replace('-', ''),
revision = data[0]['id'],
url = data[0]['web_url'],
)
def check_ratelimit(exc, name):
res = exc.response
if not res:
raise
# default -1 is used to re-raise the exception
n = int(res.headers.get('RateLimit-Remaining', -1))
if n == 0:
logger.error('gitlab rate limited. Wait some time '
'or get an API token to increase the allowance if not yet',
name = name)
else:
raise
nvchecker-2.15.1/nvchecker_source/go.py000066400000000000000000000020021462655643400200700ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2024 bgme .
from lxml import html
from nvchecker.api import (
RichResult, Entry, AsyncCache, KeyManager,
session, GetVersionError,
)
GO_PKG_URL = 'https://pkg.go.dev/{pkg}?tab=versions'
GO_PKG_VERSION_URL = 'https://pkg.go.dev/{pkg}@{version}'
async def get_version(
name: str, conf: Entry, *,
cache: AsyncCache, keymanager: KeyManager,
**kwargs,
) -> RichResult:
key = tuple(sorted(conf.items()))
return await cache.get(key, get_version_impl)
async def get_version_impl(info) -> RichResult:
conf = dict(info)
pkg_name = conf.get('go')
url = GO_PKG_URL.format(pkg=pkg_name)
res = await session.get(url)
doc = html.fromstring(res.body.decode())
elements = doc.xpath("//div[@class='Version-tag']/a/text()")
try:
version = elements[0]
return RichResult(
version = version,
url = GO_PKG_VERSION_URL.format(pkg=pkg_name, version=version),
)
except IndexError:
raise GetVersionError("parse error", pkg_name=pkg_name)
nvchecker-2.15.1/nvchecker_source/hackage.py000066400000000000000000000007371462655643400210630ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
from nvchecker.api import RichResult
HACKAGE_URL = 'https://hackage.haskell.org/package/%s/preferred.json'
async def get_version(name, conf, *, cache, **kwargs):
key = conf.get('hackage', name)
data = await cache.get_json(HACKAGE_URL % key)
version = data['normal-version'][0]
return RichResult(
version = version,
url = f'https://hackage.haskell.org/package/{key}-{version}',
)
nvchecker-2.15.1/nvchecker_source/htmlparser.py000066400000000000000000000022771462655643400216620ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Ypsilik , et al.
# Copyright (c) 2013-2020 lilydjwg , et al.
from lxml import html, etree
from nvchecker.api import session, GetVersionError
async def get_version(name, conf, *, cache, **kwargs):
key = tuple(sorted(conf.items()))
return await cache.get(key, get_version_impl)
async def get_version_impl(info):
conf = dict(info)
encoding = conf.get('encoding')
parser = html.HTMLParser(encoding=encoding)
data = conf.get('post_data')
if data is None:
res = await session.get(conf['url'])
else:
res = await session.post(conf['url'], body = data, headers = {
'Content-Type': conf.get('post_data_type', 'application/x-www-form-urlencoded')
})
doc = html.fromstring(res.body, base_url=conf['url'], parser=parser)
try:
els = doc.xpath(conf.get('xpath'))
except ValueError:
if not conf.get('missing_ok', False):
raise GetVersionError('version string not found.')
except etree.XPathEvalError as e:
raise GetVersionError('bad xpath', exc_info=e)
version = [
str(el)
if isinstance(el, str)
else str(el.text_content())
for el in els
]
return version
nvchecker-2.15.1/nvchecker_source/httpheader.py000066400000000000000000000020271462655643400216220ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2021 lilydjwg , et al.
import re
from nvchecker.api import session, GetVersionError
async def get_version(name, conf, *, cache, **kwargs):
key = tuple(sorted(conf.items()))
return await cache.get(key, get_version_impl)
async def get_version_impl(info):
conf = dict(info)
url = conf['url']
header = conf.get('header', 'Location')
follow_redirects = conf.get('follow_redirects', False)
method = conf.get('method', 'HEAD')
try:
regex = re.compile(conf['regex'])
except re.error as e:
raise GetVersionError('bad regex', exc_info=e)
res = await session.request(
url,
method = method,
follow_redirects = follow_redirects,
)
header_value = res.headers.get(header)
if not header_value:
raise GetVersionError(
'header not found or is empty',
header = header,
value = header_value,
)
try:
version = regex.findall(header_value)
except ValueError:
raise GetVersionError('version string not found.')
return version
nvchecker-2.15.1/nvchecker_source/jq.py000066400000000000000000000022351462655643400201050ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2024 Rocket Aaron , et al.
import json
import jq
from nvchecker.api import session, GetVersionError
async def get_version(name, conf, *, cache, **kwargs):
key = tuple(sorted(conf.items()))
return await cache.get(key, get_version_impl)
async def get_version_impl(info):
conf = dict(info)
try:
program = jq.compile(conf.get('filter', '.'))
except ValueError as e:
raise GetVersionError('bad jq filter', exc_info=e)
data = conf.get('post_data')
if data is None:
res = await session.get(conf['url'])
else:
res = await session.post(conf['url'], body = data, headers = {
'Content-Type': conf.get('post_data_type', 'application/json')
})
try:
obj = json.loads(res.body)
except json.decoder.JSONDecodeError as e:
raise GetVersionError('bad json string', exc_info=e)
try:
version = program.input(obj).all()
if version == [None] and not conf.get('missing_ok', False):
raise GetVersionError('version string not found.')
version = [str(v) for v in version]
except ValueError as e:
raise GetVersionError('failed to filter json', exc_info=e)
return version
nvchecker-2.15.1/nvchecker_source/manual.py000066400000000000000000000002561462655643400207510ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
async def get_version(name, conf, **kwargs):
return str(conf.get('manual')).strip() or None
nvchecker-2.15.1/nvchecker_source/mercurial.py000066400000000000000000000004321462655643400214530ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Felix Yan , et al.
async def get_version(name, conf, *, cache, **kwargs):
url = conf['mercurial'] + '/json-tags'
data = await cache.get_json(url)
version = [tag['tag'] for tag in data['tags']]
return version
nvchecker-2.15.1/nvchecker_source/none.py000066400000000000000000000006531462655643400204340ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
from __future__ import annotations
from nvchecker.api import (
BaseWorker, GetVersionError, RawResult,
)
class Worker(BaseWorker):
async def run(self) -> None:
exc = GetVersionError('no source specified')
async with self.task_sem:
for name, conf in self.tasks:
await self.result_q.put(
RawResult(name, exc, conf))
nvchecker-2.15.1/nvchecker_source/npm.py000066400000000000000000000020301462655643400202560ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
import json
import re
from nvchecker.api import session, RichResult
NPM_URL = 'https://registry.npmjs.org/%s'
def configure(config):
global NPM_URL
url = config.get('registry')
if url:
NPM_URL = f'{url.rstrip("/")}/%s'
async def get_first_1k(url):
headers = {
"Accept": "application/vnd.npm.install-v1+json",
"Range": "bytes=0-1023",
}
res = await session.get(url, headers=headers)
return res.body
async def get_version(name, conf, *, cache, **kwargs):
key = conf.get('npm', name)
data = await cache.get(NPM_URL % key, get_first_1k)
dist_tags = json.loads(re.search(b'"dist-tags":({.*?})', data).group(1))
version = dist_tags['latest']
# There is no standardised URL scheme, so we only return an URL for the default registry
if NPM_URL.startswith('https://registry.npmjs.org/'):
return RichResult(
version = version,
url = f'https://www.npmjs.com/package/{key}/v/{version}',
)
else:
return version
nvchecker-2.15.1/nvchecker_source/openvsx.py000066400000000000000000000010771462655643400212000ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2021 Th3Whit3Wolf , et al.
from nvchecker.api import RichResult
API_URL = 'https://open-vsx.org/api/%s/%s'
async def get_version(name, conf, *, cache, **kwargs):
name = conf.get('openvsx') or name
splitName = name.split('.')
publisher = splitName[0]
extension = splitName[1]
data = await cache.get_json(API_URL % (publisher, extension))
version = data['version']
return RichResult(
version = version,
url = f'https://open-vsx.org/extension/{publisher}/{extension}/{version}',
)
nvchecker-2.15.1/nvchecker_source/packagist.py000066400000000000000000000012571462655643400214440ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
from nvchecker.api import RichResult
PACKAGIST_URL = 'https://packagist.org/packages/%s.json'
async def get_version(name, conf, *, cache, **kwargs):
key = conf.get('packagist', name)
data = await cache.get_json(PACKAGIST_URL % key)
versions = {
version: details
for version, details in data["package"]['versions'].items()
if version != "dev-master"
}
if len(versions):
version = max(versions, key=lambda version: versions[version]["time"])
return RichResult(
version = version,
url = f'https://packagist.org/packages/{data["package"]["name"]}#{version}',
)
nvchecker-2.15.1/nvchecker_source/pacman.py000066400000000000000000000007771462655643400207430ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
from nvchecker_source import cmd
async def get_version(name, conf, **kwargs):
referree = conf.get('pacman') or name
c = "LANG=C pacman -Si %s | grep -F Version | awk '{print $3}' | head -n 1" % referree
conf['cmd'] = c
strip_release = conf.get('strip_release', False)
version = await cmd.get_version(name, conf, **kwargs)
if strip_release and '-' in version:
version = version.rsplit('-', 1)[0]
return version
nvchecker-2.15.1/nvchecker_source/pagure.py000066400000000000000000000013721462655643400207570ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Felix Yan , et al.
import urllib.parse
import structlog
from nvchecker.api import (
VersionResult, RichResult, Entry, AsyncCache, KeyManager,
)
PAGURE_URL = 'https://%s/api/0/%s/git/tags?with_commits=true'
logger = structlog.get_logger(logger_name=__name__)
async def get_version(
name: str, conf: Entry, *,
cache: AsyncCache, keymanager: KeyManager,
**kwargs,
) -> VersionResult:
repo = conf['pagure']
host = conf.get('host', "pagure.io")
url = PAGURE_URL % (host, repo)
data = await cache.get_json(url)
return [
RichResult(
version = version,
url = f'https://{host}/{repo}/tree/{version_hash}',
) for version, version_hash in data["tags"].items()
]
nvchecker-2.15.1/nvchecker_source/pypi.py000066400000000000000000000020661462655643400204560ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2021,2023-2024 lilydjwg , et al.
import structlog
from packaging.version import Version, InvalidVersion
from nvchecker.api import RichResult
logger = structlog.get_logger(logger_name=__name__)
async def get_version(name, conf, *, cache, **kwargs):
ret = []
package = conf.get('pypi') or name
use_pre_release = conf.get('use_pre_release', False)
url = 'https://pypi.org/pypi/{}/json'.format(package)
data = await cache.get_json(url)
for version in data['releases'].keys():
try:
parsed_version = Version(version)
except InvalidVersion:
if data['releases'][version]:
# emit a warning if there is something under the invalid version
# sympy has an empty "0.5.13-hg" version
logger.warning('ignoring invalid version', version=version)
continue
if not use_pre_release and parsed_version.is_prerelease:
continue
ret.append(RichResult(
version = version,
url = f'https://pypi.org/project/{package}/{version}/',
))
return ret
nvchecker-2.15.1/nvchecker_source/regex.py000066400000000000000000000020371462655643400206050ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
import re
from nvchecker.api import session, GetVersionError
async def get_version(name, conf, *, cache, **kwargs):
try:
regex = re.compile(conf['regex'])
except re.error as e:
raise GetVersionError('bad regex', exc_info=e)
if regex.groups > 1:
raise GetVersionError('multi-group regex')
key = (
conf['url'],
conf.get('encoding', 'latin1'),
conf.get('post_data'),
conf.get('post_data_type', 'application/x-www-form-urlencoded'),
)
body = await cache.get(key, get_url)
versions = regex.findall(body)
if not versions and not conf.get('missing_ok', False):
raise GetVersionError('version string not found.')
return versions
async def get_url(info):
url, encoding, post_data, post_data_type = info
if post_data is None:
res = await session.get(url)
else:
res = await session.post(url, body = post_data, headers = {
'Content-Type': post_data_type,
})
body = res.body.decode(encoding)
return body
nvchecker-2.15.1/nvchecker_source/repology.py000066400000000000000000000017411462655643400213340ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2019 lilydjwg , et al.
from nvchecker.api import RichResult, GetVersionError
API_URL = 'https://repology.org/api/v1/project/{}'
async def get_version(name, conf, *, cache, **kwargs):
project = conf.get('repology') or name
repo = conf.get('repo')
subrepo = conf.get('subrepo')
if not repo:
raise GetVersionError('repo field is required for repology source')
url = API_URL.format(project)
data = await cache.get_json(url)
pkgs = [pkg for pkg in data if pkg['repo'] == repo]
if not pkgs:
raise GetVersionError('package is not found', repo=repo)
if subrepo:
pkgs = [pkg for pkg in pkgs if pkg.get('subrepo') == subrepo]
if not pkgs:
raise GetVersionError('package is not found in subrepo',
repo=repo, subrepo=subrepo)
return [
RichResult(
version = pkg['version'],
url = f'https://repology.org/project/{project}/packages',
) for pkg in pkgs
]
nvchecker-2.15.1/nvchecker_source/sparkle.py000066400000000000000000000037411462655643400211370ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
# Copyright (c) 2020 Sunlei
from xml.etree import ElementTree
from nvchecker.api import session, RichResult
XML_NAMESPACE = 'http://www.w3.org/XML/1998/namespace'
SPARKLE_NAMESPACE = 'http://www.andymatuschak.org/xml-namespaces/sparkle'
async def get_version(name, conf, *, cache, **kwargs):
sparkle = conf['sparkle']
release_notes_language = conf.get('release_notes_language', 'en')
return await cache.get((sparkle, release_notes_language), get_version_impl)
async def get_version_impl(info):
sparkle, release_notes_language = info
res = await session.get(sparkle)
root = ElementTree.fromstring(res.body).find('./channel/item[1]')
item = root.find('./enclosure')
version_string = item.get(f'{{{SPARKLE_NAMESPACE}}}shortVersionString')
build_number = item.get(f'{{{SPARKLE_NAMESPACE}}}version')
if (version_string and version_string.isdigit()) and (
build_number and not build_number.isdigit()
):
version_string, build_number = build_number, version_string
version = []
if version_string:
version.append(version_string)
if build_number and (build_number not in version):
version.append(build_number)
version_str = '-'.join(version) if version else None
release_notes_link = None
for release_notes in root.findall(f'./{{{SPARKLE_NAMESPACE}}}releaseNotesLink'):
language = release_notes.get(f'{{{XML_NAMESPACE}}}lang')
# If the release notes have no language set, store them, but keep looking for our preferred language
if language is None:
release_notes_link = release_notes.text.strip()
# If the release notes match our preferred language, store them and stop looking
if language == release_notes_language:
release_notes_link = release_notes.text.strip()
break
if release_notes_link is not None:
return RichResult(
version = version_str,
url = release_notes_link,
)
else:
return version_str
nvchecker-2.15.1/nvchecker_source/ubuntupkg.py000066400000000000000000000025601462655643400215200ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
# Copyright (c) 2017 Felix Yan , et al.
from nvchecker.api import RichResult, GetVersionError
URL = 'https://api.launchpad.net/1.0/ubuntu/+archive/primary?ws.op=getPublishedSources&source_name=%s&exact_match=true'
async def get_version(name, conf, *, cache, **kwargs):
pkg = conf.get('ubuntupkg') or name
strip_release = conf.get('strip_release', False)
suite = conf.get('suite')
url = URL % pkg
if suite:
suite = "https://api.launchpad.net/1.0/ubuntu/" + suite
releases = []
while not releases:
data = await cache.get_json(url)
if not data.get('entries'):
raise GetVersionError('Ubuntu package not found')
releases = [r for r in data["entries"] if r["status"] == "Published"]
if suite:
releases = [r for r in releases if r["distro_series_link"] == suite]
if "next_collection_link" not in data:
break
url = data["next_collection_link"]
if not releases:
raise GetVersionError('Ubuntu package not found')
return
if strip_release:
version = releases[0]['source_package_version'].split("-")[0]
else:
version = releases[0]['source_package_version']
return RichResult(
version = version,
url = f'https://packages.ubuntu.com/{releases[0]["distro_series_link"].rsplit("/", 1)[-1]}/{pkg}',
)
nvchecker-2.15.1/nvchecker_source/vsmarketplace.py000066400000000000000000000024601462655643400223340ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2021 Th3Whit3Wolf , et al.
from nvchecker.api import (
VersionResult, Entry, AsyncCache, KeyManager,
TemporaryError, session, RichResult, GetVersionError,
)
API_URL = 'https://marketplace.visualstudio.com/_apis/public/gallery/extensionquery'
HEADERS = {
'Accept': 'application/json;api-version=6.1-preview.1',
'Content-Type': 'application/json'
}
async def get_version(name: str, conf: Entry, *, cache: AsyncCache, **kwargs):
name = conf.get('vsmarketplace') or name
q = {
'filters': [
{
'criteria': [
{
'filterType': 8,
'value': 'Microsoft.VisualStudio.Code'
},
{
'filterType': 7,
'value': name
},
{
'filterType': 12,
'value': '4096'
}
],
'pageNumber': 1,
'pageSize': 2,
'sortBy': 0,
'sortOrder': 0
}
],
'assetTypes': [],
'flags': 946
}
res = await session.post(
API_URL,
headers = HEADERS,
json = q,
)
j = res.json()
version = j['results'][0]['extensions'][0]['versions'][0]['version']
return RichResult(
version = version,
url = f'https://marketplace.visualstudio.com/items?itemName={name}',
)
nvchecker-2.15.1/pyproject.toml000066400000000000000000000004111462655643400164770ustar00rootroot00000000000000[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
[tool.pytest.ini_options]
# addopts = -n auto
asyncio_mode = "strict"
# build and upload
# rm -rf dist && python -m build --no-isolation && twine check dist/* && twine upload dist/*
nvchecker-2.15.1/sample_config.toml000066400000000000000000000015151462655643400172740ustar00rootroot00000000000000[__config__]
oldver = "old_ver.json"
newver = "new_ver.json"
[vim]
source = "regex"
regex = "7\\.3\\.\\d+"
url = "http://ftp.vim.org/pub/vim/patches/7.3/"
[google-chrome]
source = "cmd"
cmd = '''wget -qO- http://dl.google.com/linux/chrome/rpm/stable/x86_64/repodata/other.xml.gz | zgrep -A1 "google-chrome-stable" | awk -F\" '/version/ {print $4"-"$6}' '''
[fbcat]
source = "aur"
[winterpy]
source = "github"
github = "lilydjwg/winterpy"
[nvchecker]
source = "github"
github = "lilydjwg/nvchecker"
[ssed]
source = "regex"
regex = "The current version is ([\\d.]+)\\."
url = "http://sed.sourceforge.net/grabbag/ssed/"
proxy = "http://localhost:8087"
[PySide]
source = "pypi"
pypi = "PySide"
[test]
source = "manual"
manual = "0.1"
["Sparkle Test App"]
source = "sparkle"
sparkle = "https://sparkle-project.org/files/sparkletestcast.xml"
nvchecker-2.15.1/scripts/000077500000000000000000000000001462655643400152565ustar00rootroot00000000000000nvchecker-2.15.1/scripts/README.rst000066400000000000000000000000451462655643400167440ustar00rootroot00000000000000Additional scripts may help someone.
nvchecker-2.15.1/scripts/nvchecker-ini2toml000077500000000000000000000037151462655643400207150ustar00rootroot00000000000000#!/usr/bin/python3
# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
import argparse
import configparser
import toml
_handler_precedence = (
'github', 'aur', 'pypi', 'archpkg', 'debianpkg', 'ubuntupkg',
'gems', 'pacman',
'cmd', 'bitbucket', 'regex', 'manual', 'vcs',
'cratesio', 'npm', 'hackage', 'cpan', 'gitlab', 'packagist',
'repology', 'anitya', 'android_sdk', 'sparkle', 'gitea'
)
BOOL_KEYS = [
'strip_release', 'use_last_modified',
'use_latest_release', 'use_latest_tag',
'use_max_tag', 'use_pre_release',
]
INT_KEYS = [
'max_page',
]
def main():
parser = argparse.ArgumentParser(description='convert 1.x ini file to 2.x toml file')
parser.add_argument('ini', type=argparse.FileType(),
help='the old ini file')
parser.add_argument('toml', type=argparse.FileType(mode='w'),
help='the new ini file')
args = parser.parse_args()
old = configparser.ConfigParser(
dict_type=dict, allow_no_value=True, interpolation=None,
)
old.read_file(args.ini)
if '__config__' in old:
c = old['__config__']
newconf = dict(c)
x = newconf.pop('max_concurrent', None)
if x is not None:
newconf['max_concurrency'] = x
confs = {'__config__': newconf}
else:
confs = {}
for section in old.sections():
if section == '__config__':
continue
conf = old[section]
newconf = {}
for key in _handler_precedence:
if key not in conf:
continue
newconf['source'] = key
if conf.get(key):
newconf[key] = conf.get(key)
break
dconf = dict(conf)
for k, v in dconf.items():
if '-' in k:
k = k.replace('-', '_')
if k in BOOL_KEYS:
newconf[k] = conf.getboolean(k)
elif k in INT_KEYS:
newconf[k] = conf.getint(k)
elif v != '':
newconf[k] = v
confs[section] = newconf
toml.dump(confs, args.toml)
args.toml.flush()
if __name__ == '__main__':
main()
nvchecker-2.15.1/scripts/nvchecker-notify000077500000000000000000000037751462655643400204760ustar00rootroot00000000000000#!/usr/bin/env python3
# MIT licensed
# Copyright (c) 2020,2022 lilydjwg , et al.
'''
A simple wrapper to show desktop notifications while running nvchecker.
'''
import os
import subprocess
import json
import gi
try:
gi.require_version('Notify', '0.8')
except ValueError:
gi.require_version('Notify', '0.7')
from gi.repository import Notify
def get_args():
import argparse
parser = argparse.ArgumentParser(description='show desktop notifications while running nvchecker')
parser.add_argument('-c', '--file',
metavar='FILE', type=str,
help='software version configuration file if not default')
parser.add_argument('-k', '--keyfile',
metavar='FILE', type=str,
help='use specified keyfile (override the one in configuration file)')
parser.add_argument('-t', '--tries', default=1, type=int, metavar='N',
help='try N times when network errors occur')
parser.add_argument('--failures', action='store_true',
help='exit with code 3 if failures / errors happen during checking')
return parser.parse_args()
def main():
args = get_args()
Notify.init('nvchecker')
notif = Notify.Notification()
updates = []
rfd, wfd = os.pipe()
cmd = [
'nvchecker', '--logger', 'both', '--json-log-fd', str(wfd),
]
if args.file:
cmd.extend(['-c', args.file])
if args.keyfile:
cmd.extend(['-k', args.keyfile])
if args.tries:
cmd.extend(['-t', str(args.tries)])
if args.failures:
cmd.append('--failures')
process = subprocess.Popen(cmd, pass_fds=(wfd,))
os.close(wfd)
output = os.fdopen(rfd)
for l in output:
j = json.loads(l)
event = j['event']
if event == 'updated':
updates.append('%(name)s updated to version %(version)s' % j)
notif.update('nvchecker', '\n'.join(updates))
notif.show()
ret = process.wait()
if ret != 0:
raise subprocess.CalledProcessError(ret, cmd)
if __name__ == '__main__':
main()
nvchecker-2.15.1/scripts/nvtake.bash_completion000066400000000000000000000010651462655643400216400ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Felix Yan , et al.
_nvtake() {
local cur _nvchecker_conf_mtime
_init_completion || return
_nvchecker_conf_mtime="$(stat -c %Y $HOME/.config/nvchecker/*)"
if [ -z "$_nvtake_completion_cache" -o "$_nvchecker_conf_mtime" != "$_nvchecker_conf_mtime_cached" ]; then
_nvtake_completion_cache="$(nvcmp -q)"
_nvchecker_conf_mtime_cached="$_nvchecker_conf_mtime"
fi
COMPREPLY=( $(compgen -W "$_nvtake_completion_cache" -- "$cur") )
} &&
complete -F _nvtake nvtake
nvchecker-2.15.1/setup.cfg000066400000000000000000000034671462655643400154220ustar00rootroot00000000000000# The complex upload command:
# rm -rf dist && python -m build --sdist && twine check dist/* && twine upload -s dist/*
[metadata]
name = nvchecker
version = attr: nvchecker.__version__
author = lilydjwg
author_email = lilydjwg@gmail.com
description = New version checker for software
license = MIT
keywords = new, version, build, check
url = https://github.com/lilydjwg/nvchecker
long_description = file: README.rst
long_description_content_type = text/x-rst
platforms = any
classifiers =
Development Status :: 5 - Production/Stable
Environment :: Console
Intended Audience :: Developers
Intended Audience :: System Administrators
License :: OSI Approved :: MIT License
Operating System :: OS Independent
Programming Language :: Python
Programming Language :: Python :: 3
Programming Language :: Python :: 3 :: Only
Programming Language :: Python :: 3.8
Programming Language :: Python :: 3.9
Programming Language :: Python :: 3.10
Programming Language :: Python :: 3.11
Programming Language :: Python :: 3.12
Topic :: Internet
Topic :: Internet :: WWW/HTTP
Topic :: Software Development
Topic :: System :: Archiving :: Packaging
Topic :: System :: Software Distribution
Topic :: Utilities
[options]
zip_safe = True
packages = find_namespace:
install_requires =
setuptools; python_version<"3.8"
tomli; python_version<"3.11"
structlog
platformdirs
tornado>=6
pycurl
scripts =
scripts/nvchecker-ini2toml
scripts/nvchecker-notify
[options.packages.find]
exclude = tests, build*, docs*
[options.extras_require]
vercmp =
pyalpm
awesomeversion =
awesomeversion
pypi =
packaging
htmlparser =
lxml
jq =
jq
[options.entry_points]
console_scripts =
nvchecker = nvchecker.__main__:main
nvtake = nvchecker.tools:take
nvcmp = nvchecker.tools:cmp
[flake8]
ignore = E111, E302, E501
nvchecker-2.15.1/tests/000077500000000000000000000000001462655643400147315ustar00rootroot00000000000000nvchecker-2.15.1/tests/__init__.py000066400000000000000000000001131462655643400170350ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
nvchecker-2.15.1/tests/conftest.py000066400000000000000000000053761462655643400171430ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020, 2024 lilydjwg , et al.
import asyncio
import structlog
import os
from pathlib import Path
from typing import TYPE_CHECKING, Dict
if TYPE_CHECKING:
import tomli as tomllib
else:
try:
import tomllib
except ModuleNotFoundError:
import tomli as tomllib
import pytest
import pytest_asyncio
from nvchecker import core
from nvchecker import __main__ as main
from nvchecker.util import Entries, ResultData, RawResult
use_keyfile = False
async def run(
entries: Entries, max_concurrency: int = 20,
) -> Dict[str, str]:
task_sem = asyncio.Semaphore(max_concurrency)
result_q: asyncio.Queue[RawResult] = asyncio.Queue()
keyfile = os.environ.get('KEYFILE')
if use_keyfile and keyfile:
filepath = Path(keyfile)
keymanager = core.KeyManager(filepath)
else:
keymanager = core.KeyManager(None)
dispatcher = core.setup_httpclient()
entry_waiter = core.EntryWaiter()
futures = dispatcher.dispatch(
entries, task_sem, result_q,
keymanager, entry_waiter, 1, {},
)
oldvers: ResultData = {}
result_coro = core.process_result(oldvers, result_q, entry_waiter)
runner_coro = core.run_tasks(futures)
results, _has_failures = await main.run(result_coro, runner_coro)
return {k: r.version for k, r in results.items()}
@pytest_asyncio.fixture(scope="session")
async def get_version():
async def __call__(name, config):
entries = {name: config}
newvers = await run(entries)
return newvers.get(name)
return __call__
@pytest_asyncio.fixture(scope="session")
async def run_str():
async def __call__(str):
entries = tomllib.loads(str)
newvers = await run(entries)
return newvers.popitem()[1]
return __call__
@pytest_asyncio.fixture(scope="session")
async def run_str_multi():
async def __call__(str):
entries = tomllib.loads(str)
newvers = await run(entries)
return newvers
return __call__
@pytest.fixture(scope="session", autouse=True)
def raise_on_logger_msg():
def proc(logger, method_name, event_dict):
if method_name in ('warning', 'error'):
if 'exc_info' in event_dict:
exc = event_dict['exc_info']
if isinstance(exc, Exception):
raise exc
else: # exc_info=True
raise
if not event_dict['event'].startswith(('rate limited', 'no-result')):
raise RuntimeError(event_dict['event'])
return event_dict['event']
structlog.configure([proc])
def pytest_configure(config):
# register an additional marker
config.addinivalue_line(
'markers', 'needs_net: mark test to require Internet access',
)
@pytest.fixture
def keyfile():
global use_keyfile
if 'KEYFILE' not in os.environ:
pytest.skip('KEYFILE not set')
return
use_keyfile = True
yield
use_keyfile = False
nvchecker-2.15.1/tests/test_alpm.py000066400000000000000000000053371462655643400173030ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 DDoSolitary , et al.
import pathlib
import shutil
import subprocess
import tempfile
import pytest
pytestmark = [
pytest.mark.asyncio(scope="session"),
pytest.mark.skipif(shutil.which('makepkg') is None, reason='requires makepkg command'),
pytest.mark.skipif(shutil.which('repo-add') is None, reason='requires repo-add command')
]
global temp_dir, db_path
def setup_module(module):
global temp_dir, db_path
temp_dir = tempfile.TemporaryDirectory()
temp_path = pathlib.Path(temp_dir.name)
pkg_path = temp_path / 'test-pkg'
pkg_path.mkdir()
with (pkg_path / 'PKGBUILD').open('w') as f:
f.write(
'pkgname=test-pkg\n'
'pkgver=1.2.3\n'
'pkgrel=4\n'
'arch=(any)\n'
'provides=("test-provides=5.6-7" "test-provides-unversioned")\n'
'options=(!debug)\n'
)
subprocess.check_call(['makepkg', '--nosign'], cwd=pkg_path)
pkg_file = subprocess.check_output(['makepkg', '--packagelist'], cwd=pkg_path, text=True).strip()
db_path = pkg_path / 'test-db'
db_path.mkdir()
repo_path = db_path / 'sync'
repo_path.mkdir()
subprocess.check_call([
'repo-add',
repo_path / 'test-repo.db.tar.gz',
pkg_path / pkg_file
])
def teardown_module(module):
temp_dir.cleanup()
async def test_alpm(get_version):
assert await get_version('test-pkg', {
'source': 'alpm',
'dbpath': str(db_path),
'repo': 'test-repo'
}) == '1.2.3-4'
async def test_alpm_strip(get_version):
assert await get_version('test-pkg', {
'source': 'alpm',
'dbpath': str(db_path),
'repo': 'test-repo',
'strip_release': True
}) == '1.2.3'
async def test_alpm_provided(get_version):
assert await get_version('test-pkg', {
'source': 'alpm',
'dbpath': str(db_path),
'repo': 'test-repo',
'provided': 'test-provides'
}) == '5.6-7'
async def test_alpm_provided_strip(get_version):
assert await get_version('test-pkg', {
'source': 'alpm',
'dbpath': str(db_path),
'repo': 'test-repo',
'provided': 'test-provides',
'strip_release': True
}) == '5.6'
async def test_alpm_missing_repo(get_version):
with pytest.raises(RuntimeError):
await get_version('test-pkg', {
'source': 'alpm',
'dbpath': str(db_path),
'repo': 'wrong-repo'
})
async def test_alpm_missing_pkg(get_version):
with pytest.raises(RuntimeError):
await get_version('wrong-pkg', {
'source': 'alpm',
'dbpath': str(db_path),
'repo': 'test-repo'
})
async def test_alpm_missing_provides(get_version):
with pytest.raises(RuntimeError):
await get_version('test-pkg', {
'source': 'alpm',
'dbpath': str(db_path),
'repo': 'test-repo',
'provided': 'wrong-provides'
})
nvchecker-2.15.1/tests/test_alpmfiles.py000066400000000000000000000023461462655643400203230ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2023 Pekka Ristola , et al.
import pathlib
import shutil
import subprocess
import tempfile
import pytest
pytestmark = [
pytest.mark.asyncio(scope="session"),
pytest.mark.skipif(shutil.which('pacman') is None, reason='requires pacman command'),
pytest.mark.skipif(shutil.which('fakeroot') is None, reason='requires fakeroot command'),
]
global temp_dir, db_path
def setup_module(module):
global temp_dir, db_path
temp_dir = tempfile.TemporaryDirectory()
temp_path = pathlib.Path(temp_dir.name)
db_path = temp_path / 'test-db'
db_path.mkdir(exist_ok=True)
cmd = ['fakeroot', 'pacman', '-Fy', '--dbpath', db_path]
subprocess.check_call(cmd)
def teardown_module(module):
temp_dir.cleanup()
async def test_alpmfiles(get_version):
assert await get_version('test', {
'source': 'alpmfiles',
'pkgname': 'libuv',
'filename': 'usr/lib/libuv\\.so\\.([^.]+)',
'dbpath': db_path,
}) == '1'
async def test_alpmfiles_strip(get_version):
assert await get_version('test', {
'source': 'alpmfiles',
'pkgname': 'glibc',
'repo': 'core',
'filename': 'libc\\.so\\.[^.]+',
'strip_dir': True,
'dbpath': db_path,
}) == 'libc.so.6'
nvchecker-2.15.1/tests/test_android_sdk.py000066400000000000000000000032631462655643400206270ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
# Copyright (c) 2017 Chih-Hsuan Yen
import pytest
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
async def test_android_addon(get_version):
assert await get_version("android-google-play-apk-expansion", {
"source": "android_sdk",
"android_sdk": "extras;google;market_apk_expansion",
"repo": "addon",
}) == "1.r03"
async def test_android_package(get_version):
assert await get_version("android-sdk-cmake", {
"source": "android_sdk",
"android_sdk": "cmake;",
"repo": "package",
}) == "3.22.1"
async def test_android_package_channel(get_version):
assert await get_version("android-sdk-cmake", {
"source": "android_sdk",
"android_sdk": "ndk;",
"repo": "package",
"channel": "beta,dev,canary",
}) == "26.0.10636728"
async def test_android_list(get_version):
assert await get_version("android-sdk-cmake-older", {
"source": "android_sdk",
"android_sdk": "cmake;",
"repo": "package",
"include_regex": r"3\.10.*",
}) == "3.10.2"
async def test_android_package_os(get_version):
assert await get_version("android-usb-driver", {
"source": "android_sdk",
"android_sdk": "extras;google;usb_driver",
"repo": "addon",
"host_os": "windows"
}) == "13"
async def test_android_package_os_missing(get_version):
assert await get_version("android-usb-driver", {
"source": "android_sdk",
"android_sdk": "extras;google;usb_driver",
"repo": "addon",
"host_os": "linux"
}) == None
nvchecker-2.15.1/tests/test_anitya.py000066400000000000000000000006461462655643400176350ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
# Copyright (c) 2017 Felix Yan , et al.
import re
import pytest
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
async def test_anitya(get_version):
version = await get_version("shutter", {
"source": "anitya",
"anitya": "fedora/shutter",
})
assert re.match(r"[0-9.]+", version)
nvchecker-2.15.1/tests/test_apt.py000066400000000000000000000025311462655643400171270ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020-2021 lilydjwg , et al.
# Copyright (c) 2017 Felix Yan , et al.
import pytest
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
@pytest.mark.flaky(reruns=10)
async def test_apt(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {
"source": "apt",
"mirror": "http://deb.debian.org/debian/",
"suite": "sid",
}) == "0.1.7-3"
@pytest.mark.flaky(reruns=10)
async def test_apt_srcpkg(get_version):
ver = await get_version("test", {
"source": "apt",
"srcpkg": "golang-github-dataence-porter2",
"mirror": "http://deb.debian.org/debian/",
"suite": "sid",
})
assert ver.startswith("0.0~git20150829.56e4718-")
@pytest.mark.flaky(reruns=10)
async def test_apt_strip_release(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {
"source": "apt",
"mirror": "http://deb.debian.org/debian/",
"suite": "sid",
"strip_release": 1,
}) == "0.1.7"
@pytest.mark.skip
@pytest.mark.flaky(reruns=10)
async def test_apt_deepin(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {
"source": "apt",
"mirror": "https://community-packages.deepin.com/deepin",
"suite": "apricot",
}) == "0.1.6-1"
nvchecker-2.15.1/tests/test_archpkg.py000066400000000000000000000016141462655643400177630ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
import pytest
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
@pytest.mark.flaky
async def test_archpkg(get_version):
assert await get_version("base", {
"source": "archpkg",
}) == "3-2"
@pytest.mark.flaky
async def test_archpkg_strip_release(get_version):
assert await get_version("base", {
"source": "archpkg",
"strip_release": True,
}) == "3"
@pytest.mark.flaky
async def test_archpkg_provided(get_version):
assert await get_version("dbus", {
"source": "archpkg",
"provided": "libdbus-1.so",
}) == "3-64"
@pytest.mark.flaky
async def test_archpkg_provided_strip(get_version):
assert await get_version("jsoncpp", {
"source": "archpkg",
"provided": "libjsoncpp.so",
"strip_release": True,
}) == "25"
nvchecker-2.15.1/tests/test_aur.py000066400000000000000000000015471462655643400171400ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
import os
import pytest
pytestmark = [pytest.mark.asyncio(scope="session"),
pytest.mark.needs_net,
pytest.mark.skipif(os.environ.get('TRAVIS') == 'true',
reason="fail too often")]
@pytest.mark.flaky(reruns=10)
async def test_aur(get_version):
assert await get_version("ssed", {
"source": "aur",
}) == "3.62-2"
@pytest.mark.flaky(reruns=10)
async def test_aur_strip_release(get_version):
assert await get_version("ssed", {
"source": "aur",
"strip_release": 1,
}) == "3.62"
@pytest.mark.flaky(reruns=10)
async def test_aur_use_last_modified(get_version):
assert await get_version("ssed", {
"source": "aur",
'use_last_modified': True,
}) == "3.62-2-20150725052412"
nvchecker-2.15.1/tests/test_bitbucket.py000066400000000000000000000027041462655643400203210ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
import pytest
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
async def test_bitbucket(get_version):
assert await get_version("example", {
"source": "bitbucket",
"bitbucket": "prawee/git-tag",
}) == "20150303"
async def test_bitbucket_max_tag(get_version):
assert await get_version("example", {
"source": "bitbucket",
"bitbucket": "prawee/git-tag",
"use_max_tag": True,
}) == "1.7.0"
async def test_bitbucket_max_tag_with_ignored(get_version):
assert await get_version("example", {
"source": "bitbucket",
"bitbucket": "prawee/git-tag",
"use_max_tag": True,
"ignored": "1.6.0 1.7.0",
}) == "v1.5"
async def test_bitbucket_sorted_tags(get_version):
assert await get_version("example", {
"source": "bitbucket",
"bitbucket": "prawee/git-tag",
"use_sorted_tags": True,
}) == "1.7.0"
assert await get_version("example", {
"source": "bitbucket",
"bitbucket": "prawee/git-tag",
"use_sorted_tags": True,
"query": 'name~"v"',
}) == "v1.5"
assert await get_version("example", {
"source": "bitbucket",
"bitbucket": "berkeleylab/gasnet",
"use_sorted_tags": True,
"query": 'name~"CVS/BERKELEY_UPC" AND name!~"rc"',
"prefix": "CVS/BERKELEY_UPC_",
}) == "2_18_0"
nvchecker-2.15.1/tests/test_cache.py000066400000000000000000000005731462655643400174120ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
import pytest
pytestmark = pytest.mark.asyncio(scope="session")
async def test_cache(run_str_multi):
conf = r'''
[cache-1]
source = "cmd"
cmd = "bash -c 'echo $RANDOM'"
[cache-2]
source = "cmd"
cmd = "bash -c 'echo $RANDOM'"
'''
r = await run_str_multi(conf)
assert r['cache-1'] == r['cache-2']
nvchecker-2.15.1/tests/test_cmd.py000066400000000000000000000012731462655643400171100ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
import time
import pytest
pytestmark = pytest.mark.asyncio(scope="session")
async def test_cmd(get_version):
assert await get_version("example", {
"source": "cmd",
"cmd": "echo Meow",
}) == "Meow"
async def test_cmd_complex(get_version):
assert await get_version("example", {
"source": "cmd",
"cmd": "echo Meow | sed 's/meow/woof/i'",
}) == "woof"
async def test_cmd_with_percent(run_str):
test_conf = '''\
[example]
source = "cmd"
cmd = "date +%Y-%m-%d"'''
date = await run_str(test_conf)
expected = time.strftime('%Y-%m-%d')
assert date == expected
nvchecker-2.15.1/tests/test_combiner.py000066400000000000000000000006651462655643400201470ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2021 lilydjwg , et al.
import pytest
pytestmark = pytest.mark.asyncio(scope="session")
async def test_combiner(run_str_multi):
conf = r'''
[entry-1]
source = "cmd"
cmd = "echo 1"
[entry-2]
source = "cmd"
cmd = "echo 2"
[entry-3]
source = "combiner"
from = ["entry-1", "entry-2", "entry-2"]
format = "$1-$2-$3"
'''
r = await run_str_multi(conf)
assert r['entry-3'] == '1-2-2'
nvchecker-2.15.1/tests/test_container.py000066400000000000000000000034241462655643400203270ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Chih-Hsuan Yen
import pytest
import datetime
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
async def test_container(get_version):
assert await get_version("hello-world", {
"source": "container",
"container": "library/hello-world",
"include_regex": "linux",
}) == "linux"
async def test_container_with_tag(get_version):
update_time = await get_version("bitnami/mongodb:5.0", {
"source": "container",
"container": "bitnami/mongodb:5.0",
})
# the update time is changing occasionally, so we can not compare the exact time, otherwise the test will be failed in the future
assert datetime.date.fromisoformat(update_time.split('T')[0]) > datetime.date(2023, 12, 1)
async def test_container_with_tag_and_multi_arch(get_version):
update_time = await get_version("hello-world:linux", {
"source": "container",
"container": "library/hello-world:linux",
})
# the update time is changing occasionally, so we can not compare the exact time, otherwise the test will be failed in the future
assert datetime.date.fromisoformat(update_time.split('T')[0]) > datetime.date(2023, 1, 1)
async def test_container_with_tag_and_registry(get_version):
update_time = await get_version("hello-world-nginx:v1.0", {
"source": "container",
"registry": "quay.io",
"container": "redhattraining/hello-world-nginx:v1.0",
})
# the update time probably won't be changed
assert datetime.date.fromisoformat(update_time.split('T')[0]) == datetime.date(2019, 6, 26)
async def test_container_paging(get_version):
assert await get_version("prometheus-operator", {
"source": "container",
"registry": "quay.io",
"container": "redhattraining/hello-world-nginx",
}) == "v1.0"
nvchecker-2.15.1/tests/test_cpan.py000066400000000000000000000004721462655643400172660ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
import pytest
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
async def test_cpan(get_version):
assert await get_version("POE-Component-Server-HTTPServer", {
"source": "cpan",
}) == "0.9.2"
nvchecker-2.15.1/tests/test_cran.py000066400000000000000000000004561462655643400172720ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2022 Pekka Ristola , et al.
import pytest
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
async def test_cran(get_version):
assert await get_version("xml2", {
"source": "cran",
}) == "1.3.6"
nvchecker-2.15.1/tests/test_cratesio.py000066400000000000000000000004521462655643400201540ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
import pytest
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
async def test_cratesio(get_version):
assert await get_version("example", {
"source": "cratesio",
}) == "1.1.0"
nvchecker-2.15.1/tests/test_debianpkg.py000066400000000000000000000015111462655643400202640ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
# Copyright (c) 2017 Felix Yan , et al.
import pytest
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
@pytest.mark.flaky(reruns=10)
async def test_debianpkg(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {
"source": "debianpkg",
}) == "0.1.7-3"
@pytest.mark.flaky(reruns=10)
async def test_debianpkg_strip_release(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {
"source": "debianpkg",
"strip_release": 1,
}) == "0.1.7"
@pytest.mark.flaky(reruns=10)
async def test_debianpkg_suite(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {
"source": "debianpkg",
"suite": "buster",
}) == "0.1.6-1"
nvchecker-2.15.1/tests/test_gems.py000066400000000000000000000004421462655643400172750ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
import pytest
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
async def test_gems(get_version):
assert await get_version("example", {
"source": "gems",
}) == "1.0.2"
nvchecker-2.15.1/tests/test_git.py000066400000000000000000000016131462655643400171260ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Felix Yan , et al.
import pytest
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
async def test_git(get_version):
assert await get_version("example", {
"source": "git",
"git": "https://gitlab.com/gitlab-org/gitlab-test.git",
}) == "v1.1.1"
async def test_git_commit(get_version):
assert await get_version("example", {
"source": "git",
"git": "https://gitlab.com/gitlab-org/gitlab-test.git",
"use_commit": True,
}) == "ddd0f15ae83993f5cb66a927a28673882e99100b"
async def test_git_commit_branch(get_version):
assert await get_version("example", {
"source": "git",
"git": "https://gitlab.com/gitlab-org/gitlab-test.git",
"use_commit": True,
"branch": "with-executables",
}) == "6b8dc4a827797aa025ff6b8f425e583858a10d4f"
nvchecker-2.15.1/tests/test_gitea.py000066400000000000000000000012351462655643400174340ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
import pytest
pytestmark = [pytest.mark.asyncio(scope="session"),
pytest.mark.needs_net]
@pytest.mark.flaky(reruns=10)
async def test_gitea(get_version):
ver = await get_version("example", {
"source": "gitea",
"gitea": "gitea/tea"})
assert len(ver) == 8
assert ver.isdigit()
@pytest.mark.flaky(reruns=10)
async def test_gitea_max_tag_with_include(get_version):
assert await get_version("example", {
"source": "gitea",
"gitea": "gitea/tea",
"use_max_tag": True,
"include_regex": r'v0\.3.*',
}) == "v0.3.1"
nvchecker-2.15.1/tests/test_github.py000066400000000000000000000051321462655643400176250ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
import re
import pytest
pytestmark = [pytest.mark.asyncio(scope="session"),
pytest.mark.needs_net,
pytest.mark.usefixtures('keyfile')]
async def test_github(get_version):
assert await get_version("example", {
"source": "github",
"github": "harry-sanabria/ReleaseTestRepo",
}) == "20140122.012101"
async def test_github_default_not_master(get_version):
assert await get_version("example", {
"source": "github",
"github": "MariaDB/server",
}) is not None
async def test_github_latest_release(get_version):
assert await get_version("example", {
"source": "github",
"github": "dpeukert/ReleaseTestRepo",
"use_latest_release": True,
}) == "v0.0.0"
async def test_github_latest_release_include_prereleases(get_version):
assert await get_version("example", {
"source": "github",
"github": "dpeukert/ReleaseTestRepo",
"use_latest_release": True,
"include_prereleases": True,
}) == "v0.0.1-pre"
async def test_github_max_tag(get_version):
assert await get_version("example", {
"source": "github",
"github": "harry-sanabria/ReleaseTestRepo",
"use_max_tag": True,
}) == "second_release"
async def test_github_max_tag_with_ignored(get_version):
assert await get_version("example", {
"source": "github",
"github": "harry-sanabria/ReleaseTestRepo",
"use_max_tag": True,
"ignored": "second_release release3",
}) == "first_release"
async def test_github_with_path(get_version):
assert await get_version("example", {
"source": "github",
"github": "petronny/ReleaseTestRepo",
"path": "test_directory",
}) == "20140122.012101"
async def test_github_with_path_and_branch(get_version):
assert await get_version("example", {
"source": "github",
"github": "petronny/ReleaseTestRepo",
"branch": "test",
"path": "test_directory/test_directory",
}) == "20190128.113201"
async def test_github_max_tag_with_include(get_version):
version = await get_version("example", {
"source": "github",
"github": "EFForg/https-everywhere",
"use_max_tag": True,
"include_regex": r"chrome-\d.*",
})
assert re.match(r'chrome-[\d.]+', version)
async def test_github_latest_tag(get_version):
assert await get_version("example", {
"source": "github",
"github": "harry-sanabria/ReleaseTestRepo",
"use_latest_tag": True,
}) == "release3"
nvchecker-2.15.1/tests/test_gitlab.py000066400000000000000000000024751462655643400176140ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
import pytest
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
async def test_gitlab(get_version):
ver = await get_version("example", {
"source": "gitlab",
"gitlab": "gitlab-org/gitlab-test",
})
assert len(ver) == 8
assert ver.isdigit()
async def test_gitlab_blm(get_version):
# repo with a custom main branch
ver = await get_version("example", {
"source": "gitlab",
"gitlab": "asus-linux/asusctl",
})
assert len(ver) == 8
assert ver.isdigit()
async def test_gitlab_max_tag(get_version):
assert await get_version("example", {
"source": "gitlab",
"gitlab": "gitlab-org/gitlab-test",
"use_max_tag": True,
}) == "v1.1.1"
async def test_gitlab_max_tag_with_include(get_version):
assert await get_version("example", {
"source": "gitlab",
"gitlab": "gitlab-org/gitlab-test",
"use_max_tag": True,
"include_regex": r'v1\.0.*',
}) == "v1.0.0"
async def test_gitlab_max_tag_with_ignored(get_version):
assert await get_version("example", {
"source": "gitlab",
"gitlab": "gitlab-org/gitlab-test",
"use_max_tag": True,
"ignored": "v1.1.0 v1.1.1",
}) == "v1.0.0"
nvchecker-2.15.1/tests/test_go.py000066400000000000000000000014531462655643400167520ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2024 bgme .
import pytest
from nvchecker.api import HTTPError
try:
import lxml
lxml_available = True
except ImportError:
lxml_available = False
pytestmark = [
pytest.mark.asyncio(scope="session"),
pytest.mark.needs_net,
pytest.mark.skipif(not lxml_available, reason="needs lxml")
]
async def test_go(get_version):
assert await get_version("one version", {
"source": "go",
"go": "github.com/caddyserver/replace-response",
}) == "v0.0.0-20231221003037-a85d4ddc11d6"
assert await get_version("multiple version", {
"source": "go",
"go": "github.com/corazawaf/coraza-caddy",
}) == "v1.2.2"
with pytest.raises(HTTPError):
await get_version("not found", {
"source": "go",
"go": "github.com/asdas/sadfasdf",
})
nvchecker-2.15.1/tests/test_hackage.py000066400000000000000000000005131462655643400177240ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
import pytest
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
@pytest.mark.flaky(reruns=10)
async def test_hackage(get_version):
assert await get_version("sessions", {
"source": "hackage",
}) == "2008.7.18"
nvchecker-2.15.1/tests/test_htmlparser.py000066400000000000000000000015351462655643400205270ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2021 ypsilik , et al.
import pytest
lxml_available = True
try:
import lxml
except ImportError:
lxml_available = False
pytestmark = [
pytest.mark.asyncio(scope="session"),
pytest.mark.needs_net,
pytest.mark.skipif(not lxml_available, reason="needs lxml"),
]
async def test_xpath_ok(get_version):
ver = await get_version("aur", {
"source": "htmlparser",
"url": "https://aur.archlinux.org/",
"xpath": '//div[@id="footer"]/p[1]/a/text()',
})
assert ver.startswith('v')
assert '.' in ver
async def test_xpath_element(get_version):
ver = await get_version("aur", {
"source": "htmlparser",
"url": "https://aur.archlinux.org/",
"xpath": '//div[@id="footer"]/p[1]/a',
})
assert ver.startswith('v')
assert '.' in ver
nvchecker-2.15.1/tests/test_httpheader.py000066400000000000000000000016651462655643400205020ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2021,2024 lilydjwg , et al.
import pytest
httpbin_available = True
try:
import pytest_httpbin
assert pytest_httpbin # for pyflakes
except ImportError:
httpbin_available = False
pytestmark = pytest.mark.asyncio(scope="session")
@pytest.mark.needs_net
async def test_redirection(get_version):
assert await get_version("unifiedremote", {
"source": "httpheader",
"url": "https://www.unifiedremote.com/download/linux-x64-deb",
"regex": r'urserver-([\d.]+).deb',
}) is not None
@pytest.mark.skipif(not httpbin_available, reason="needs pytest_httpbin")
async def test_get_version_withtoken(get_version, httpbin):
assert await get_version("unifiedremote", {
"source": "httpheader",
"url": httpbin.url + "/basic-auth/username/superpassword",
"httptoken": "Basic dXNlcm5hbWU6c3VwZXJwYXNzd29yZA==",
"header": "server",
"regex": r'([0-9.]+)*',
}) is not None
nvchecker-2.15.1/tests/test_jq.py000066400000000000000000000014751462655643400167630ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2024 Rocket Aaron , et al.
import pytest
jq_available = True
try:
import jq
except ImportError:
jq_available = False
pytestmark = [
pytest.mark.asyncio(scope="session"),
pytest.mark.needs_net,
pytest.mark.skipif(not jq_available, reason="needs jq"),
]
async def test_jq(get_version):
ver = await get_version("aur", {
"source": "jq",
"url": "https://aur.archlinux.org/rpc/v5/info?arg[]=nvchecker-git"
})
ver = ver.strip()
assert ver.startswith("{")
assert ver.endswith("}")
async def test_jq_filter(get_version):
ver = await get_version("aur", {
"source": "jq",
"url": "https://aur.archlinux.org/rpc/v5/info?arg[]=nvchecker-git",
"filter": '.results[0].PackageBase',
})
assert ver == "nvchecker-git"
nvchecker-2.15.1/tests/test_manual.py000066400000000000000000000004461462655643400176230ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
import pytest
pytestmark = pytest.mark.asyncio(scope="session")
async def test_manual(get_version):
assert await get_version("example", {
"source": "manual",
"manual": "Meow",
}) == "Meow"
nvchecker-2.15.1/tests/test_mercurial.py000066400000000000000000000005651462655643400203330ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Felix Yan , et al.
import pytest
pytestmark = [
pytest.mark.asyncio(scope="session"),
pytest.mark.needs_net,
]
async def test_mercurial(get_version):
assert await get_version("example", {
"source": "mercurial",
"mercurial": "https://repo.mercurial-scm.org/hg-website/json-tags",
}) == "v1.0"
nvchecker-2.15.1/tests/test_npm.py000066400000000000000000000004401462655643400171320ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
import pytest
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
async def test_npm(get_version):
assert await get_version("example", {
"source": "npm",
}) == "0.0.0"
nvchecker-2.15.1/tests/test_openvsx.py000066400000000000000000000005161462655643400200460ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2021 Th3Whit3Wolf , et al.
import pytest
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
async def test_openvsx(get_version):
assert await get_version("usernamehw.indent-one-space", {
"source": "openvsx",
}) == "0.3.0"
nvchecker-2.15.1/tests/test_packagist.py000066400000000000000000000005061462655643400203110ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
import pytest
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
async def test_packagist(get_version):
assert await get_version("butterfly/example-web-application", {
"source": "packagist",
}) == "1.2.0"
nvchecker-2.15.1/tests/test_pacman.py000066400000000000000000000013651462655643400176060ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
import pathlib
import shutil
import pytest
pytestmark = [pytest.mark.asyncio(scope="session"),
pytest.mark.skipif(shutil.which("pacman") is None,
reason="requires pacman command"),
pytest.mark.skipif(not pathlib.Path("/var/lib/pacman/sync/core.db").exists(),
reason="requires synced pacman databases")]
async def test_pacman(get_version):
assert await get_version("base", {
"source": "pacman",
}) == "3-2"
async def test_pacman_strip_release(get_version):
assert await get_version("base", {
"source": "pacman",
"strip_release": 1,
}) == "3"
nvchecker-2.15.1/tests/test_pagure.py000066400000000000000000000015051462655643400176260ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 Felix Yan , et al.
import pytest
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
async def test_pagure(get_version):
ver = await get_version("example", {
"source": "pagure",
"pagure": "nvchecker-test",
})
assert ver == "0.2"
async def test_pagure_with_ignored(get_version):
ver = await get_version("example", {
"source": "pagure",
"pagure": "nvchecker-test",
"ignored": "0.2",
})
assert ver == "0.1"
async def test_pagure_with_alternative_host(get_version):
ver = await get_version("example", {
"source": "pagure",
"pagure": "rpms/glibc",
"host": "src.fedoraproject.org",
"include_regex": r"F-\d+-start",
})
assert ver == "F-13-start"
nvchecker-2.15.1/tests/test_pypi.py000066400000000000000000000016441462655643400173300ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
import pytest
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
async def test_pypi(get_version):
assert await get_version("example", {
"source": "pypi",
}) == "0.1.0"
async def test_pypi_release(get_version):
assert await get_version("example-test-package", {
"source": "pypi",
"pypi": "example-test-package",
}) == "1.0.0"
async def test_pypi_pre_release(get_version):
assert await get_version("example-test-package", {
"source": "pypi",
"use_pre_release": 1,
}) == "1.0.1a1"
async def test_pypi_list(get_version):
assert await get_version("urllib3", {
"source": "pypi",
"include_regex": "^1\\..*",
}) == "1.26.18"
async def test_pypi_invalid_version(get_version):
await get_version("sympy", {
"source": "pypi",
})
nvchecker-2.15.1/tests/test_regex.py000066400000000000000000000102061462655643400174530ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020,2024 lilydjwg , et al.
import base64
import pytest
httpbin_available = True
try:
import pytest_httpbin
assert pytest_httpbin # for pyflakes
except ImportError:
httpbin_available = False
pytestmark = [
pytest.mark.asyncio(scope="session"),
pytest.mark.skipif(not httpbin_available, reason="needs pytest_httpbin"),
]
def base64_encode(s):
return base64.b64encode(s.encode('utf-8')).decode('ascii')
async def test_regex_httpbin_default_user_agent(get_version, httpbin):
ua = await get_version("example", {
"source": "regex",
"url": httpbin.url + "/get",
"regex": r'"User-Agent":\s*"([^"]+)"',
})
assert ua.startswith("lilydjwg/nvchecker")
async def test_regex_httpbin_user_agent(get_version, httpbin):
assert await get_version("example", {
"source": "regex",
"url": httpbin.url + "/get",
"regex": r'"User-Agent":\s*"(\w+)"',
"user_agent": "Meow",
}) == "Meow"
async def test_regex(get_version, httpbin):
assert await get_version("example", {
"source": "regex",
"url": httpbin.url + "/base64/" + base64_encode("version 1.12 released"),
"regex": r'version ([0-9.]+)',
}) == "1.12"
async def test_missing_ok(get_version, httpbin):
assert await get_version("example", {
"source": "regex",
"url": httpbin.url + "/base64/" + base64_encode("something not there"),
"regex": "foobar",
"missing_ok": True,
}) is None
async def test_missing(get_version, httpbin):
with pytest.raises(RuntimeError):
await get_version("example", {
"source": "regex",
"url": httpbin.url + "/base64/" + base64_encode("something not there"),
"regex": "foobar",
})
async def test_multi_group(get_version, httpbin):
with pytest.raises(RuntimeError):
await get_version("example", {
"source": "regex",
"url": httpbin.url + "/base64/" + base64_encode("1.2"),
"regex": r"(\d+)\.(\d+)",
})
async def test_regex_with_tokenBasic(get_version, httpbin):
assert await get_version("example", {
"source": "regex",
"url": httpbin.url + "/basic-auth/username/superpassword",
"httptoken": "Basic dXNlcm5hbWU6c3VwZXJwYXNzd29yZA==",
"regex": r'"user":\s*"([a-w]+)"',
}) == "username"
async def test_regex_with_tokenBearer(get_version, httpbin):
assert await get_version("example", {
"source": "regex",
"url": httpbin.url + "/bearer",
"httptoken": "Bearer username:password",
"regex": r'"token":\s*"([a-w]+):.*"',
}) == "username"
async def test_regex_no_verify_ssl(get_version, httpbin_secure):
assert await get_version("example", {
"source": "regex",
"url": httpbin_secure.url + "/base64/" + base64_encode("version 1.12 released"),
"regex": r'version ([0-9.]+)',
"verify_cert": False,
}) == "1.12"
async def test_regex_bad_ssl(get_version, httpbin_secure):
try:
await get_version("example", {
"source": "regex",
"url": httpbin_secure.url + "/base64/" + base64_encode("version 1.12 released"),
"regex": r'version ([0-9.]+)',
})
except Exception:
pass
else:
assert False, 'certificate should not be trusted'
async def test_regex_post(get_version, httpbin):
assert await get_version("example", {
"source": "regex",
"url": httpbin.url + "/post",
"regex": r'"ABCDEF":\s*"(\w+)"',
"post_data": "ABCDEF=234&CDEFG=xyz"
}) == "234"
async def test_regex_post2(get_version, httpbin):
assert await get_version("example", {
"source": "regex",
"url": httpbin.url + "/post",
"regex": r'"CDEFG":\s*"(\w+)"',
"post_data": "ABCDEF=234&CDEFG=xyz"
}) == "xyz"
async def test_regex_post_json(get_version, httpbin):
assert await get_version("example", {
"source": "regex",
"url": httpbin.url + "/post",
"regex": r'"ABCDEF":\s*(\w+)',
"post_data": '{"ABCDEF":234,"CDEFG":"xyz"}',
"post_data_type": "application/json"
}) == "234"
async def test_regex_post_json2(get_version, httpbin):
assert await get_version("example", {
"source": "regex",
"url": httpbin.url + "/post",
"regex": r'"CDEFG":\s*"(\w+)"',
"post_data": '{"ABCDEF":234,"CDEFG":"xyz"}',
"post_data_type": "application/json"
}) == "xyz"
nvchecker-2.15.1/tests/test_repology.py000066400000000000000000000021131462655643400201770ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2019-2020 lilydjwg , et al.
import pytest
pytestmark = [pytest.mark.asyncio(scope="session"),
pytest.mark.needs_net]
@pytest.mark.flaky(reruns=10)
async def test_repology(get_version):
assert await get_version("ssed", {
"source": "repology",
"repo": "aur",
}) == "3.62"
@pytest.mark.flaky(reruns=10)
async def test_repology_subrepo(get_version):
assert await get_version("asciiquarium", {
"source": "repology",
"repo": "fedora_32",
"subrepo": "release"
}) == "1.1"
async def test_repology_bad_subrepo(get_version):
try:
assert await get_version("asciiquarium", {
"source": "repology",
"repo": "fedora_32",
"subrepo": "badsubrepo"
}) is None
except RuntimeError as e:
assert "package is not found in subrepo" in str(e)
async def test_repology_no_repo(get_version):
try:
assert await get_version("ssed", {
"source": "repology",
}) is None
except RuntimeError as e:
assert "repo field is required" in str(e)
nvchecker-2.15.1/tests/test_simplerun.py000066400000000000000000000006531462655643400203640ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2022 lilydjwg , et al.
import sys
import tempfile
import subprocess
def test_simple_run():
'''make sure the tool as a whole can run the simplest check'''
with tempfile.NamedTemporaryFile(mode='w') as f:
f.write('''\
[t]
source = "cmd"
cmd = "echo 1"
''')
f.flush()
subprocess.check_call([
sys.executable, '-m', 'nvchecker',
'-c', f.name,
])
nvchecker-2.15.1/tests/test_sortversion.py000066400000000000000000000012531462655643400207400ustar00rootroot00000000000000import pytest
from nvchecker.sortversion import (
parse_version,
vercmp, vercmp_available,
AwesomeVersion, awesomeversion_available,
)
def test_parse_version():
assert parse_version("v6.0") < parse_version("6.1")
assert parse_version("v6.0") > parse_version("v6.1-stable")
@pytest.mark.skipif(not vercmp_available,
reason="needs pyalpm")
def test_vercmp():
assert vercmp("v6.0") < vercmp("v6.1-stable")
@pytest.mark.skipif(not awesomeversion_available,
reason="needs awesomeversion")
def test_awesomeversion():
assert AwesomeVersion("v6.0") < AwesomeVersion("6.1")
assert AwesomeVersion("v6.0") > AwesomeVersion("v6.0b0")
nvchecker-2.15.1/tests/test_sparkle.py000066400000000000000000000010661462655643400200060ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020 lilydjwg , et al.
# Copyright (c) 2020 Sunlei
import pytest
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
async def test_sparkle(get_version):
assert await get_version('example', {
'source': 'sparkle',
'sparkle': (
'https://raw.githubusercontent.com/sparkle-project/Sparkle/'
'f453625573fc9a251760b65c74df59023b1471c1/Tests/Resources/'
'testlocalizedreleasenotesappcast.xml'
),
}) == '6.0'
nvchecker-2.15.1/tests/test_substitute.py000066400000000000000000000027721462655643400205650ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2020 lilydjwg , et al.
import pytest
pytestmark = pytest.mark.asyncio(scope="session")
async def test_substitute_prefix(get_version):
assert await get_version("example", {
"source": "manual",
"manual": "v1.0",
"prefix": "v",
}) == "1.0"
async def test_substitute_prefix_missing_ok(get_version):
assert await get_version("example", {
"source": "manual",
"manual": "1.0",
"prefix": "v",
}) == "1.0"
async def test_substitute_regex(get_version):
assert await get_version("example", {
"source": "manual",
"manual": "r15c",
"from_pattern": r"r(\d+)([a-z])",
"to_pattern": r"r\1.\2",
}) == "r15.c"
async def test_substitute_regex_missing_ok(get_version):
assert await get_version("example", {
"source": "manual",
"manual": "r15",
"from_pattern": r"r(\d+)([a-z])",
"to_pattern": r"r\1.\2",
}) == "r15"
async def test_substitute_regex_empty_to_pattern(get_version):
assert await get_version("example", {
"source": "manual",
"manual": "15-debian",
"from_pattern": r"-\w+$",
"to_pattern": r"",
}) == "15"
async def test_substitute_prefix_has_higher_priority(get_version):
assert await get_version("example", {
"source": "manual",
"manual": "Version 1.2 Beta 3",
"prefix": "Version ",
"from_pattern": r" Beta ",
"to_pattern": r"b",
}) == "1.2b3"
nvchecker-2.15.1/tests/test_ubuntupkg.py000066400000000000000000000020221462655643400203620ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2020,2024 lilydjwg , et al.
# Copyright (c) 2017 Felix Yan , et al.
import pytest
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
@pytest.mark.flaky
async def test_ubuntupkg(get_version):
v = await get_version("sigrok-firmware-fx2lafw", {
"source": "ubuntupkg",
})
assert v.startswith("0.1.7-")
@pytest.mark.flaky
async def test_ubuntupkg_strip_release(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {
"source": "ubuntupkg",
"strip_release": True,
}) == "0.1.7"
@pytest.mark.flaky
async def test_ubuntupkg_suite(get_version):
assert await get_version("sigrok-firmware-fx2lafw", {
"source": "ubuntupkg",
"suite": "xenial",
}) == "0.1.2-1"
@pytest.mark.flaky
async def test_ubuntupkg_suite_with_paging(get_version):
assert await get_version("ffmpeg", {
"source": "ubuntupkg",
"suite": "xenial",
}) == "7:2.8.17-0ubuntu0.1"
nvchecker-2.15.1/tests/test_vsmarketplace.py000066400000000000000000000005321462655643400212030ustar00rootroot00000000000000# MIT licensed
# Copyright (c) 2013-2021 Th3Whit3Wolf , et al.
import pytest
pytestmark = [pytest.mark.asyncio(scope="session"), pytest.mark.needs_net]
async def test_vsmarketplace(get_version):
assert await get_version("usernamehw.indent-one-space", {
"source": "vsmarketplace",
}) == "1.0.0"
nvchecker-2.15.1/tox.ini000066400000000000000000000004321462655643400151010ustar00rootroot00000000000000[tox]
isolated_build = True
# you may find `tox --skip-missing-interpreters=true` helpful.
envlist = py3{8,9,10,11,12}
[testenv]
usedevelop = false
deps =
pytest
pytest-asyncio
pytest-httpbin
flaky
extras =
htmlparser
passenv = KEYFILE
commands = pytest -r fEs {posargs}