Skip to content

Commit

Permalink
Fixes for Python 3+
Browse files Browse the repository at this point in the history
  • Loading branch information
dvershinin committed Sep 24, 2023
1 parent ffab446 commit 68ab6e0
Show file tree
Hide file tree
Showing 19 changed files with 136 additions and 120 deletions.
8 changes: 4 additions & 4 deletions README-ZH-CN.md
Original file line number Diff line number Diff line change
Expand Up @@ -460,7 +460,7 @@ export GITLAB_PA_TOKEN=xxxxxxxxxxxxxxx
from lastversion import lastversion
latest_version = lastversion.has_update(repo="mautic/mautic", current_version='1.2.3')
if latest_version:
print('Newer Mautic version is available: {}'.format(str(latest_version)))
print(f'Newer Mautic version is available: {latest_version}')
else:
print('No update is available')
```
Expand All @@ -475,7 +475,7 @@ else:
from lastversion import lastversion
latest_version = lastversion.has_update(repo="requests", at='pip', current_version='1.2.3')
if latest_version:
print('Newer Requests library is available: {}'.format(str(latest_version)))
print(f'Newer Requests library is available: {latest_version}')
else:
print('No update is available')
```
Expand All @@ -493,7 +493,7 @@ from packaging import version

latest_mautic_version = lastversion.latest("mautic/mautic", output_format='version', pre_ok=True)

print('Latest Mautic version: {}'.format(str(latest_mautic_version)))
print(f'Latest Mautic version: {latest_mautic_version}')

if latest_mautic_version >= version.parse('1.8.1'):
print('It is newer')
Expand Down Expand Up @@ -525,4 +525,4 @@ if latest_mautic_version >= version.parse('1.8.1'):
* `pre_ok`,布尔值,表示预发布版本是否可以作为最新版本。
* `at`,该项目所在的平台,取值仅可能为`github``gitlab``bitbucket``pip``hg``sf``website-feed``local`

[![DeepSource](https://static.deepsource.io/deepsource-badge-light.svg)](https://deepsource.io/gh/dvershinin/lastversion/?ref=repository-badge)
[![DeepSource](https://static.deepsource.io/deepsource-badge-light.svg)](https://deepsource.io/gh/dvershinin/lastversion/?ref=repository-badge)
6 changes: 3 additions & 3 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -639,7 +639,7 @@ You can use `lastversion.has_update(...)` to find whether an update for existing
from lastversion import has_update
latest_version = has_update(repo="mautic/mautic", current_version='1.2.3')
if latest_version:
print('Newer Mautic version is available: {}'.format(str(latest_version)))
print(f'Newer Mautic version is available: {latest_version}')
else:
print('No update is available')
```
Expand All @@ -656,7 +656,7 @@ library:
from lastversion import has_update
latest_version = has_update(repo="requests", at='pip', current_version='1.2.3')
if latest_version:
print('Newer Requests library is available: {}'.format(str(latest_version)))
print('Newer Requests library is available: {latest_version}')
else:
print('No update is available')
```
Expand All @@ -675,7 +675,7 @@ from packaging import version

latest_mautic_version = latest("mautic/mautic", output_format='version', pre_ok=True)

print('Latest Mautic version: {}'.format(str(latest_mautic_version)))
print(f'Latest Mautic version: {latest_mautic_version}')

if latest_mautic_version >= version.parse('1.8.1'):
print('It is newer')
Expand Down
3 changes: 1 addition & 2 deletions lastversion/BitBucketRepoSession.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,8 +26,7 @@ def __init__(self, repo, hostname):

def get_latest(self, pre_ok=False, major=None):
"""Get the latest release."""
response = self.get("https://api.bitbucket.org/2.0/repositories/{}/downloads".format(
self.repo))
response = self.get(f"https://api.bitbucket.org/2.0/repositories/{self.repo}/downloads")
data = response.json()
release = data['values'][0]
version = self.sanitize_version(release['name'], pre_ok, major)
Expand Down
4 changes: 2 additions & 2 deletions lastversion/FeedRepoSession.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,7 @@ def find_feed(self, site):
if href:
possible_feeds.append(href)
parsed_url = urlparse(site)
base = "{}://{}".format(parsed_url.scheme, parsed_url.hostname)
base = f"{parsed_url.scheme}://{parsed_url.hostname}"
a_tags = html.findAll("a")
for a in a_tags:
href = a.get("href", None)
Expand Down Expand Up @@ -66,7 +66,7 @@ def __init__(self, repo, hostname):
def get_latest(self, pre_ok=False, major=None):
"""Get the latest release."""
ret = None
# To leverage cachecontrol, we fetch the feed using requests as usual,
# To leverage `cachecontrol`, we fetch the feed using requests as usual,
# then feed the feed to feedparser as a raw string
# e.g. https://hg.nginx.org/nginx/atom-tags
# https://pythonhosted.org/feedparser/common-atom-elements.html
Expand Down
70 changes: 34 additions & 36 deletions lastversion/GitHubRepoSession.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,7 @@ class GitHubRepoSession(ProjectHolder):
SHORT_RELEASE_URL_FORMAT = "https://{hostname}/{repo}/archive/{tag}.{ext}"

def find_repo_by_name_only(self, repo):
# noinspection HttpUrlsUsage
if repo.startswith(('https://', 'http://')):
return None
cache = self.get_name_cache()
Expand All @@ -96,17 +97,17 @@ def find_repo_by_name_only(self, repo):
log.info("Found %s in repo short name cache", repo)
if not cache[repo]['repo']:
raise BadProjectError(
'No project found on GitHub for search query: {}'.format(repo)
f'No project found on GitHub for search query: {repo}'
)
return cache[repo]['repo']
except TypeError:
pass

log.info("Making query against GitHub API to search repo %s", repo)
r = self.get(
'{}/search/repositories'.format(self.api_base),
f'{self.api_base}/search/repositories',
params={
'q': "{} in:name".format(repo),
'q': f"{repo} in:name",
'sort': 'stars',
'per_page': 1
}
Expand All @@ -116,9 +117,7 @@ def find_repo_by_name_only(self, repo):
return None
if r.status_code != 200:
raise BadProjectError(
'Error while identifying full repository on GitHub for search query: {}. Status: {}'.format(
repo, r.status_code
)
f'Error while identifying full repository on GitHub for search query: {repo}. Status: {r.status_code}'
)
data = r.json()
full_name = ''
Expand All @@ -133,7 +132,7 @@ def find_repo_by_name_only(self, repo):

if not full_name:
raise BadProjectError(
'No project found on GitHub for search query: {}'.format(repo)
f'No project found on GitHub for search query: {repo}'
)
return full_name

Expand All @@ -150,7 +149,7 @@ def __init__(self, repo, hostname):
if token:
self.api_token = token
log.info('Using API token %s.', var_name)
self.headers.update({'Authorization': "token {}".format(self.api_token)})
self.headers.update({'Authorization': f"token {self.api_token}"})
break
if not self.api_token:
log.info('No API token found in environment variables %s.', self.TOKEN_ENV_VARS)
Expand All @@ -163,9 +162,9 @@ def __init__(self, repo, hostname):
})

if self.hostname != self.DEFAULT_HOSTNAME:
self.api_base = 'https://{}/api/v3'.format(self.hostname)
self.api_base = f'https://{self.hostname}/api/v3'
else:
self.api_base = 'https://api.{}'.format(self.DEFAULT_HOSTNAME)
self.api_base = f'https://api.{self.DEFAULT_HOSTNAME}'
if '/' not in repo:
official_repo = self.try_get_official(repo)
if official_repo:
Expand All @@ -180,7 +179,7 @@ def __init__(self, repo, hostname):
self.set_repo(repo)

def get_rate_limit_url(self):
return '{}/rate_limit'.format(self.api_base)
return f'{self.api_base}/rate_limit'

def get(self, url, **kwargs):
"""Send GET request and account for GitHub rate limits and such."""
Expand All @@ -196,8 +195,7 @@ def get(self, url, **kwargs):
and 'X-RateLimit-Remaining' in r.headers:
if self.rate_limited_count > 2:
raise ApiCredentialsError(
'API requests were denied after retrying {} times'.format(
self.rate_limited_count)
f'API requests were denied after retrying {self.rate_limited_count} times'
)
remaining = int(r.headers['X-RateLimit-Remaining'])
# 1 sec to account for skewed clock between GitHub and client
Expand All @@ -212,9 +210,9 @@ def get(self, url, **kwargs):
'be reinstated'
)
else:
w = 'Waiting {} seconds for API quota reinstatement.'.format(wait_for)
w = f'Waiting {wait_for} seconds for API quota reinstatement.'
if not self.api_token:
w = "{} {}".format(w, TOKEN_PRO_TIP)
w = f"{w} {TOKEN_PRO_TIP}"
log.warning(w)
time.sleep(wait_for)
self.rate_limited_count = self.rate_limited_count + 1
Expand All @@ -230,28 +228,28 @@ def get(self, url, **kwargs):
return r

def rate_limit(self):
url = '{}/rate_limit'.format(self.api_base)
url = f'{self.api_base}/rate_limit'
return self.get(url)

def repo_query(self, uri):
"""API query for a repository"""
url = '{}/repos/{}{}'.format(self.api_base, self.repo, uri)
url = f'{self.api_base}/repos/{self.repo}{uri}'
return self.get(url)

def repo_license(self, tag):
r = self.repo_query('/license?ref={}'.format(tag))
r = self.repo_query(f'/license?ref={tag}')
if r.status_code == 200:
# unfortunately, unlike /readme, API always returns *latest* license, ignoring tag
# we have to double-check whether the license file exists "at release tag"
license_data = r.json()
license_path = license_data['path']
license_r = self.repo_query('/contents/{}?ref={}'.format(license_path, tag))
license_r = self.repo_query(f'/contents/{license_path}?ref={tag}')
if license_r.status_code == 200:
return license_data
return None

def repo_readme(self, tag):
r = self.repo_query('/readme?ref={}'.format(tag))
r = self.repo_query(f'/readme?ref={tag}')
if r.status_code == 200:
return r.json()
return None
Expand Down Expand Up @@ -331,15 +329,15 @@ def find_in_tags_via_graphql(self, ret, pre_ok, major):
owner, name = self.repo.split('/')
query = query_fmt % (owner, name, cursor)
log.info('Running query %s', query)
r = self.post('{}/graphql'.format(self.api_base), json={'query': query})
r = self.post(f'{self.api_base}/graphql', json={'query': query})
log.info('Requested graphql with cursor "%s"', cursor)
if r.status_code != 200:
log.info("query returned non 200 response code %s", r.status_code)
return ret
j = r.json()
if 'errors' in j and j['errors'][0]['type'] == 'NOT_FOUND':
raise BadProjectError(
'No such project found on GitHub: {}'.format(self.repo)
f'No such project found on GitHub: {self.repo}'
)
if not j['data']['repository']['tags']['edges']:
log.info('No tags in GraphQL response: %s', r.text)
Expand Down Expand Up @@ -398,7 +396,7 @@ def get_formal_release_for_tag(self, tag):
self.ensure_formal_releases_fetched()
# no releases in /releases means no
if self.formal_releases_by_tag and tag not in self.formal_releases_by_tag:
r = self.repo_query('/releases/tags/{}'.format(tag))
r = self.repo_query(f'/releases/tags/{tag}')
if r.status_code == 200:
self.formal_releases_by_tag[tag] = r.json()

Expand All @@ -407,7 +405,7 @@ def get_formal_release_for_tag(self, tag):
def find_in_tags(self, ret, pre_ok, major):
"""
Find a more recent release in the /tags API endpoint.
Finding in /tags requires paging through ALL of them, because the API does not list them
Finding in /tags requires paging through ALL of them because the API does not list them
in order of recency, thus this is very slow.
We need to check all tags commit dates simply because the most recent wins
We don't check tags which:
Expand All @@ -430,7 +428,7 @@ def find_in_tags(self, ret, pre_ok, major):
version = self.sanitize_version(tag_name, pre_ok, major)
if not version:
continue
c = self.repo_query('/git/commits/{}'.format(t['commit']['sha']))
c = self.repo_query(f'/git/commits/{t["commit"]["sha"]}')
d = c.json()['committer']['date']
d = parser.parse(d)

Expand Down Expand Up @@ -458,7 +456,7 @@ def get_releases_feed_contents(self, rename_checked=False):
The `releases.atom` feed includes non-formal releases which are just tags, so we are good.
Based on testing, edited old releases don't jump forward in the list and stay behind (good).
The only downside is they don't bear pre-release mark (unlike API), and have limited data.
We work around these by checking pre-release flag and get full release data via API.
We work around these by checking the pre-release flag and get full release data via API.
"""
if self.repo in self.feed_contents:
return self.feed_contents[self.repo]
Expand All @@ -468,11 +466,11 @@ def get_releases_feed_contents(self, rename_checked=False):
# authorization header may cause a false positive 200 response with an empty feed!
'Authorization': ''
}
r = self.get('https://{}/{}/releases.atom'.format(self.hostname, self.repo), headers=headers)
r = self.get(f'https://{self.hostname}/{self.repo}/releases.atom', headers=headers)
# API requests are varied by cookie, we don't want serializer for cache fail because of that
self.cookies.clear()
if r.status_code == 404 and not rename_checked:
# #44: in some network locations, GitHub returns 404 (as opposed to 301 redirect) for the renamed
# #44: in some network locations, GitHub returns 404 (as opposed to a 301 redirect) for the renamed
# repositories /releases.atom. When we get a 404, we lazily load repo info via API, and hopefully
# get redirect there as well as the new repo full name
r = self.repo_query('')
Expand Down Expand Up @@ -587,7 +585,7 @@ def get_latest(self, pre_ok=False, major=None):
log.info("Selected version as current selection: %s.", version)

# we are good with release from feeds only without looking at the API
# simply because feeds list stuff in order of recency
# simply because feeds list stuff in order of recency,
# however, still use /tags unless releases.atom has data within a year
if ret and ret['tag_date'].replace(tzinfo=None) > (datetime.utcnow() - timedelta(days=365)):
return self.enrich_release_info(ret)
Expand All @@ -598,7 +596,7 @@ def get_latest(self, pre_ok=False, major=None):
# this may be required in cases
# releases.atom has limited/no tags (#63), and all those are beta / invalid / non-versions
# likewise, we want an older branch (major), which is not there in releases.atom
# due to limited nature of data inside it
# due to the limited nature of data inside it

self.ensure_formal_releases_fetched()
for tag_name in self.formal_releases_by_tag:
Expand All @@ -610,13 +608,13 @@ def get_latest(self, pre_ok=False, major=None):
ret = self.set_matching_formal_release(ret, release, version, pre_ok)

if self.having_asset:
# only formal releases which we enumerated above already, have assets
# only formal releases which we enumerated above already, have assets,
# so there is no point looking in the tags/graphql below
# return whatever we got
return self.enrich_release_info(ret)

# formal release may not exist at all, or be "late/old" in case
# actual release is only a simple tag so let's try /tags
# actual release is only a simple tag, so let's try /tags
if self.api_token:
# GraphQL requires auth
ret = self.find_in_tags_via_graphql(ret, pre_ok, major)
Expand All @@ -627,7 +625,7 @@ def get_latest(self, pre_ok=False, major=None):

def set_matching_formal_release(self, ret, formal_release, version, pre_ok,
data_type='release'):
"""Set current release selection to this formal release if matching conditions."""
"""Set the current release selection to this formal release if matching conditions."""
if not pre_ok and formal_release['prerelease']:
log.info(
"Found formal release for this tag which is unwanted "
Expand Down Expand Up @@ -658,14 +656,14 @@ def set_matching_formal_release(self, ret, formal_release, version, pre_ok,
return formal_release

def try_get_official(self, repo):
"""Check existence of repo/repo
"""Check the existence of repo/repo
Returns:
str: updated repo
"""
official_repo = "{repo}/{repo}".format(repo=repo)
official_repo = f"{repo}/{repo}"
log.info('Checking existence of %s', official_repo)
r = self.get('https://{}/{}/releases.atom'.format(self.hostname, official_repo))
r = self.get(f'https://{self.hostname}/{official_repo}/releases.atom')
# API requests are varied by cookie, we don't want serializer for cache fail because of that
self.cookies.clear()
if r.status_code == 200:
Expand Down
10 changes: 5 additions & 5 deletions lastversion/GitLabRepoSession.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,15 +26,15 @@ def __init__(self, repo, hostname):
self.hostname = self.DEFAULT_HOSTNAME
if self.pa_token:
log.info('Using Personal Access token.')
self.headers.update({'Private-Token': "{}".format(self.pa_token)})
self.api_base = 'https://{}/api/v4'.format(self.hostname)
self.headers.update({'Private-Token': self.pa_token})
self.api_base = f'https://{self.hostname}/api/v4'
self.set_repo(repo)
self.repo_id = self.repo.replace('/', '%2F')
# lazy loaded dict cache of /releases response keyed by tag, only first page
self.formal_releases_by_tag = None

def repo_query(self, uri):
url = '{}/projects/{}{}'.format(self.api_base, self.repo_id, uri)
url = f'{self.api_base}/projects/{self.repo_id}{uri}'
return self.get(url)

def ensure_formal_releases_fetched(self):
Expand All @@ -54,7 +54,7 @@ def get_formal_release_for_tag(self, tag):
self.ensure_formal_releases_fetched()
# no releases in /releases means no
if self.formal_releases_by_tag and tag not in self.formal_releases_by_tag:
r = self.repo_query('/releases/{}'.format(tag))
r = self.repo_query(f'/releases/{tag}')
if r.status_code == 200:
self.formal_releases_by_tag[tag] = r.json()

Expand All @@ -64,7 +64,7 @@ def get_latest(self, pre_ok=False, major=None):
"""Get the latest release."""
ret = None

# gitlab returns tags by updated in desc order, this is just what we want :)
# gitlab returns tags by updated in desc order; this is just what we want :)
r = self.repo_query('/repository/tags')
if r.status_code == 200:
for t in r.json():
Expand Down
Loading

0 comments on commit 68ab6e0

Please sign in to comment.