Skip to content

Commit

Permalink
Formatting
Browse files Browse the repository at this point in the history
  • Loading branch information
dvershinin committed Sep 27, 2023
1 parent b924c8d commit c8607d1
Show file tree
Hide file tree
Showing 15 changed files with 127 additions and 87 deletions.
10 changes: 7 additions & 3 deletions lastversion/BitBucketRepoSession.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,8 +8,10 @@ class BitBucketRepoSession(ProjectHolder):
KNOWN_REPO_URLS = {
'mmonit.com': {
'repo': 'tildeslash/monit',
# get URL from the official website because it is a "prepared" source
'release_url_format': "https://mmonit.com/{name}/dist/{name}-{version}.tar.gz"
# get URL from the official website because it is a "prepared"
# source that has the `./configure` script available
'release_url_format': "https://mmonit.com/{name}/dist/{name}-"
"{version}.tar.gz"
}
}

Expand All @@ -26,7 +28,9 @@ def __init__(self, repo, hostname):

def get_latest(self, pre_ok=False, major=None):
"""Get the latest release."""
response = self.get(f"https://api.bitbucket.org/2.0/repositories/{self.repo}/downloads")
response = self.get(
f"https://api.bitbucket.org/2.0/repositories/{self.repo}/downloads"
)
data = response.json()
release = data['values'][0]
version = self.sanitize_version(release['name'], pre_ok, major)
Expand Down
14 changes: 9 additions & 5 deletions lastversion/FeedRepoSession.py
Original file line number Diff line number Diff line change
Expand Up @@ -66,9 +66,9 @@ def __init__(self, repo, hostname):
def get_latest(self, pre_ok=False, major=None):
"""Get the latest release."""
ret = None
# To leverage `cachecontrol`, we fetch the feed using requests as usual,
# then feed the feed to feedparser as a raw string
# e.g. https://hg.nginx.org/nginx/atom-tags
# To leverage `cachecontrol`, we fetch the feed using requests as
# usual, then feed the feed to feedparser as a raw string e.g.
# https://hg.nginx.org/nginx/atom-tags
# https://pythonhosted.org/feedparser/common-atom-elements.html
r = self.get(self.feed_url)
feed = feedparser.parse(r.text)
Expand All @@ -83,7 +83,11 @@ def get_latest(self, pre_ok=False, major=None):
tag['version'] = version
if 'published_parsed' in tag:
# converting from struct
tag['tag_date'] = datetime.datetime(*tag['published_parsed'][:6])
tag['tag_date'] = datetime.datetime(
*tag['published_parsed'][:6]
)
elif 'updated_parsed' in tag:
tag['tag_date'] = datetime.datetime(*tag['updated_parsed'][:6])
tag['tag_date'] = datetime.datetime(
*tag['updated_parsed'][:6]
)
return ret
15 changes: 7 additions & 8 deletions lastversion/GitHubRepoSession.py
Original file line number Diff line number Diff line change
Expand Up @@ -198,7 +198,7 @@ def get(self, url, **kwargs):
f'API requests were denied after retrying {self.rate_limited_count} times'
)
remaining = int(r.headers['X-RateLimit-Remaining'])
# 1 sec to account for skewed clock between GitHub and client
# One sec to account for skewed clock between GitHub and client
wait_for = float(r.headers['X-RateLimit-Reset']) - time.time() + 1.0
wait_for = math.ceil(wait_for)
if not remaining:
Expand Down Expand Up @@ -405,13 +405,12 @@ def get_formal_release_for_tag(self, tag):
def find_in_tags(self, ret, pre_ok, major):
"""
Find a more recent release in the /tags API endpoint.
Finding in /tags requires paging through ALL of them because the API does not list them
in order of recency, thus this is very slow.
We need to check all tags commit dates simply because the most recent wins
We don't check tags which:
* marked pre-release in releases endpoints
* has a beta-like, non-version tag name
Finding in `/tags` requires paging through ALL of them because the API
does not list them in order of recency, thus this is very slow.
We need to check all tags commit dates because of the most recent wins.
We don't check tags which are:
* marked pre-release in releases endpoints
* has a beta-like, non-version tag name
# in: current release to be returned, output: newer release to be returned
"""
Expand Down
36 changes: 22 additions & 14 deletions lastversion/GiteaRepoSession.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,8 +45,8 @@ class GiteaRepoSession(ProjectHolder):
2) likely has been accessed by someone in CDN and thus faster
3) provides more or less unique filenames once the stuff is downloaded
See https://fedoraproject.org/wiki/Packaging:SourceURL#Git_Tags
We use variation of this: it does not need a parsed version (thus works for --pre better)
and it is not broken on fancy release tags like v1.2.3-stable
We use variation of this: it does not need a parsed version (works for
--pre better) and it is not broken on fancy release tags like v1.2.3-stable
https://github.com/OWNER/PROJECT/archive/%{git_tag}/%{git_tag}-%{version}.tar.gz
"""
RELEASE_URL_FORMAT = "https://{hostname}/{repo}/archive/{tag}.{ext}"
Expand All @@ -67,7 +67,7 @@ def find_repo_by_name_only(self, repo):
log.info("Found %s in repo short name cache", repo)
if not cache[repo]['repo']:
raise BadProjectError(
'No project found on GitHub for search query: {}'.format(repo)
f'No project found on GitHub for search query: {repo}'
)
# return cache[repo]['repo']
except TypeError:
Expand All @@ -80,11 +80,13 @@ def find_repo_by_name_only(self, repo):
}
)
if r.status_code == 404:
# when not found, skip using this holder in the factory by not setting self.repo
# when not found, skip using this holder in the factory by not
# setting self.repo
return None
if r.status_code != 200:
raise BadProjectError(
f'Error while identifying full repository on GitHub for search query: {repo}'
f'Error while identifying full repository on GitHub for '
f'search query: {repo}'
)
data = r.json()
full_name = ''
Expand Down Expand Up @@ -133,7 +135,10 @@ def __init__(self, repo, hostname):
else:
repo = self.find_repo_by_name_only(repo)
if repo:
log.info('Using repo %s obtained from search API', self.repo)
log.info(
'Using repo %s obtained from search API',
self.repo
)
else:
return
self.set_repo(repo)
Expand All @@ -147,18 +152,21 @@ def get(self, url, **kwargs):
log.info('Got HTTP status code %s from %s', r.status_code, url)
if r.status_code == 401:
if self.api_token:
raise ApiCredentialsError('API request was denied despite using an API token. '
'Missing scopes?')
raise ApiCredentialsError('Denied API access. Please set GITHUB_API_TOKEN env var '
'as per https://github.com/dvershinin/lastversion#tips')
raise ApiCredentialsError(
'API request was denied despite using an API token. '
'Missing scopes?')
raise ApiCredentialsError(
'Denied API access. Please set GITHUB_API_TOKEN env var as '
'per https://github.com/dvershinin/lastversion#tips')
if r.status_code == 403 and 'X-RateLimit-Reset' in r.headers \
and 'X-RateLimit-Remaining' in r.headers:
if self.rate_limited_count > 2:
raise ApiCredentialsError(
f'API requests were denied after retrying {self.rate_limited_count} times'
f'API requests were denied after retrying '
f'{self.rate_limited_count} times'
)
remaining = int(r.headers['X-RateLimit-Remaining'])
# 1 sec to account for skewed clock between GitHub and client
# One sec to account for skewed clock between GitHub and client
wait_for = float(r.headers['X-RateLimit-Reset']) - time.time() + 1.0
wait_for = math.ceil(wait_for)
if not remaining:
Expand Down Expand Up @@ -261,7 +269,6 @@ def get_latest(self, pre_ok=False, major=None):
Strive to fetch formal API release if it exists, because it has useful information
like assets.
"""

if self.having_asset:
# only formal releases which we enumerated above already, have assets,
# so there is no point looking in the tags/graphql below
Expand Down Expand Up @@ -315,7 +322,8 @@ def try_get_official(self, repo):
official_repo = f"{repo}/{repo}"
log.info('Checking existence of %s', official_repo)
r = self.get(f'https://{self.hostname}/{official_repo}/releases.atom')
# API requests are varied by cookie, we don't want serializer for cache fail because of that
# API requests are varied by cookie, we don't want serializer for
# cache fail because of that
self.cookies.clear()
if r.status_code == 200:
self.feed_contents[official_repo] = r.text
Expand Down
4 changes: 3 additions & 1 deletion lastversion/HelmChartRepoSession.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,7 +24,9 @@ def get_latest(self, pre_ok=False, major=None):
url = self.url
host = urlparse(url).hostname
if host in ['github.com']:
url = url.replace('github.com', 'raw.githubusercontent.com').replace('/blob/', '/')
url = url.replace(
'github.com', 'raw.githubusercontent.com'
).replace('/blob/', '/')
r = self.get(url)
chart_data = yaml.safe_load(r.text)
return {
Expand Down
12 changes: 9 additions & 3 deletions lastversion/HolderFactory.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,14 +49,20 @@ def guess_from_homepage(repo, hostname):
"""
# repo auto-discovery failed for detected/default provider
# now we simply try website provider based on the hostname/RSS feeds in HTML or GitHub links
# now we simply try website provider based on the hostname/RSS feeds
# in HTML or GitHub links
holder = FeedRepoSession(repo, hostname)
if not holder.is_valid():
# re-use soup from the feed holder object
log.info('Have not found any RSS feed for the website %s', hostname)
log.info(
'Have not found any RSS feed for the website %s',
hostname
)
github_link = holder.home_soup.select_one("a[href*='github.com']")
if github_link:
hostname, repo = GitHubRepoSession.get_host_repo_for_link(github_link['href'])
hostname, repo = GitHubRepoSession.get_host_repo_for_link(
github_link['href']
)
holder = GitHubRepoSession(repo, hostname)
return holder

Expand Down
12 changes: 9 additions & 3 deletions lastversion/ProjectHolder.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@
import os
import platform
import re

import requests
from appdirs import user_cache_dir
from cachecontrol import CacheControlAdapter
Expand Down Expand Up @@ -160,12 +161,17 @@ def set_having_asset(self, having_asset):
log.info('Only considering releases with asset "%s"', having_asset)
return self

@staticmethod
def is_link(repo):
"""Check if repo is a link."""
return repo.startswith(('https://', 'http://'))

@classmethod
def get_host_repo_for_link(cls, repo):
"""Return hostname and repo from a link."""
hostname = None
# return repo modified to result of extraction
if repo.startswith(('https://', 'http://')):
if cls.is_link(repo):
# parse hostname for passing to whatever holder selected
url_parts = repo.split('/')
hostname = url_parts[2]
Expand All @@ -176,7 +182,7 @@ def get_host_repo_for_link(cls, repo):
@classmethod
def is_official_for_repo(cls, repo):
"""Check if repo is a known repo for this type of project holder."""
if repo.startswith(('https://', 'http://')):
if cls.is_link(repo):
for url in cls.KNOWN_REPO_URLS:
if repo.startswith((url, f"https://{url}", f"http://{url}")):
log.info('%s Starts with %s', repo, url)
Expand All @@ -190,7 +196,7 @@ def is_official_for_repo(cls, repo):
@classmethod
def get_matching_hostname(cls, repo):
"""Find matching hostname between repo and holder's default hostname."""
if not repo.startswith(('http://', 'https://')):
if not cls.is_link(repo):
return None
if not cls.DEFAULT_HOSTNAME and not cls.SUBDOMAIN_INDICATOR:
return None
Expand Down
59 changes: 35 additions & 24 deletions lastversion/Version.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
"""Version class for lastversion"""
import re
from datetime import datetime

from packaging.version import Version as PackagingVersion, InvalidVersion


Expand All @@ -10,25 +11,35 @@ class Version(PackagingVersion):
scheme defined in PEP 440. A `Version` instance is comparison-aware and
can be compared and sorted using the standard Python interfaces.
This class is descendant from Version found in `packaging.version`,
and implements some additional, "AI"-like normalization during instantiation.
This class is descendant from `Version` found in `packaging.version`,
and implements some additional normalization during instantiation.
Args:
version (str): The string representation of a version which will be
parsed and normalized before use.
Raises:
InvalidVersion: If the ``version`` does not conform to PEP 440 in
any way then this exception will be raised.
InvalidVersion: If the `version`` does not conform to PEP 440 in
any way, then this exception will be raised.
"""

# Precompile the regular expressions
rc_pattern = re.compile(r'^rc(\d+)\.')
post_pattern = re.compile(r'^p(\d+)$')

part_to_pypi_dict = {
'devel': 'dev0',
'test': 'dev0',
'dev': 'dev0',
'alpha': 'a0',
'beta': 'b0'
}

def fix_letter_post_release(self, match):
self.fixed_letter_post_release = True
return match.group(1) + '.post' + str(ord(match.group(2)))

def is_semver(self):
"""
Check if this a semantic version or a shorthand of semantic version
"""
"""Check if this a (shorthand) semantic version"""
return self.base_version.count('.') >= 1

@staticmethod
Expand All @@ -39,30 +50,30 @@ def part_to_pypi(part):
Helps devel releases to be correctly identified
See https://www.python.org/dev/peps/pep-0440/#developmental-releases
"""
if part in ['devel', 'test', 'dev']:
part = 'dev0'
elif part in ['alpha']:
# "4.3.0-alpha"
part = 'a0'
elif part in ['beta']:
# "4.3.0-beta"
part = 'b0'
# if part starts with rc<num>., discard non-relevant info while preserving RC level
elif re.search('^rc(\\d+)\\.', part):
# Lookup in the dictionary
if part in Version.part_to_pypi_dict:
return Version.part_to_pypi_dict[part]

# Check for rc patterns
rc_match = Version.rc_pattern.search(part)
if rc_match:
# rc2.windows.1 => rc2.post1
sub_parts = part.split('.')
part = sub_parts[0]
for sub in sub_parts[1:]:
if sub.isdigit():
# use first numeric as post-release to RC
part += ".post" + sub
else:
# help post (patch) releases to be correctly identified (e.g. Magento 2.3.4-p2)
# p12 => post12
part = re.sub('^p(\\d+)$', 'post\\1', part, 1)
return part

# Check for the post-patterns
post_match = Version.post_pattern.sub(r'post\1', part)
if post_match != part:
return post_match

# If the part contains only alphabets, set it to None
if part.isalpha():
# it's meaningless to us if it has only letters
part = None
return None

return part

@staticmethod
Expand Down
4 changes: 3 additions & 1 deletion lastversion/WikipediaRepoSession.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,7 +107,9 @@ def get_latest(self, pre_ok=False, major=None):
# remove alphas from beginning
tag_name = remove_words(tag_name).split('/', maxsplit=1)[0]
# Remove unicode stuff (for Python 2)
tag['title'] = release_data.text.encode("ascii", "ignore").decode()
tag['title'] = release_data.text.encode(
"ascii", "ignore"
).decode()
log.info('Pre-parsed title: %s', tag['title'])
break
if not tag_name:
Expand Down
8 changes: 5 additions & 3 deletions lastversion/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from .__about__ import (
__version__,
)
# We intentionally import for export here, so it is ok to silence DeepSource test
# Intentionally import for export here, so it is ok to silence DeepSource test
# skipcq: PY-W2000
from .lastversion import __self__
# skipcq: PY-W2000
Expand All @@ -24,9 +24,11 @@
from .lastversion import main

# https://realpython.com/python-logging-source-code/#library-vs-application-logging-what-is-nullhandler
# when used as a library, we default to opt-in approach, whereas library user has to enable logging
# When used as a library, we default to opt-in approach, whereas library user
# has to enable logging
# from lastversion
logging.getLogger(__name__).addHandler(logging.NullHandler())
# patch up https://github.com/ionrock/cachecontrol/issues/230
logging.getLogger('cachecontrol.controller').addHandler(logging.NullHandler())
logging.getLogger('pip._vendor.cachecontrol.controller').addHandler(logging.NullHandler())
logging.getLogger('pip._vendor.cachecontrol.controller').addHandler(
logging.NullHandler())
1 change: 1 addition & 0 deletions lastversion/argparse_version.py
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
"""Provides a custom argparse action to show program's version and exit."""
import sys as _sys
from argparse import SUPPRESS, Action

Expand Down
Loading

0 comments on commit c8607d1

Please sign in to comment.