Compare commits
10 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
0995cbe960
|
|||
|
b498049703
|
|||
|
81b9413a2a
|
|||
|
962c214664
|
|||
|
8320df3bc5
|
|||
|
51c58c1899
|
|||
|
bb152b2112
|
|||
|
423ec61cad
|
|||
|
5c800652d8
|
|||
|
42d607ba5d
|
@@ -1,6 +1,3 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
if __name__ == "__main__":
|
||||
from pkgcrap.cli import main
|
||||
main()
|
||||
|
||||
@@ -1,13 +1,16 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from sys import argv
|
||||
from pkgcrap.list_update import main as update
|
||||
from pkgcrap.list_update import maintlist as maintlist
|
||||
from pkgcrap.outdated_check import main as outdated
|
||||
from pkgcrap.forge_scan import main as forgescan
|
||||
from pkgcrap.website import main as website
|
||||
|
||||
options = {
|
||||
'update': update,
|
||||
'outdated': outdated,
|
||||
'maintlist': maintlist,
|
||||
'forgescan': forgescan,
|
||||
'site': website,
|
||||
}
|
||||
|
||||
def main():
|
||||
|
||||
42
pkgcrap/forge_scan.py
Normal file
42
pkgcrap/forge_scan.py
Normal file
@@ -0,0 +1,42 @@
|
||||
from pkgcrap.util import conf_file_path
|
||||
import pkgcrap.parse as parse
|
||||
from urllib.parse import urlparse
|
||||
|
||||
def main(args):
|
||||
repos = parse.repos()
|
||||
repos.load()
|
||||
checked = 0
|
||||
failed = 0
|
||||
forges = {}
|
||||
for repo in repos.repos.values():
|
||||
repo.load()
|
||||
print('Scanning', repo.name)
|
||||
for cat in repo.categories.values():
|
||||
cat.load()
|
||||
for pkg in cat.packages.values():
|
||||
pkg.load()
|
||||
if len(pkg.ebuilds) == 0 or 'EGIT_REPO_URI' not in pkg.ebuilds[0].vars:
|
||||
failed += 1
|
||||
continue
|
||||
eb = pkg.ebuilds[0]
|
||||
repo = pkg.ebuilds[0].vars['EGIT_REPO_URI'].replace('${PN}', pkg.name)
|
||||
if 'HOMEPAGE' in pkg.ebuilds[0].vars:
|
||||
repo = repo.replace('${HOMEPAGE}', eb.vars['HOMEPAGE'])
|
||||
if 'EGO_PN' in pkg.ebuilds[0].vars:
|
||||
repo = repo.replace('${EGO_PN}', eb.vars['EGO_PN'])
|
||||
if 'MY_REPO_URI' in pkg.ebuilds[0].vars:
|
||||
repo = repo.replace('${MY_REPO_URI}', eb.vars['MY_REPO_URI'])
|
||||
forge = urlparse(repo).netloc
|
||||
if forge == '' or forge.startswith('${'):
|
||||
failed += 1
|
||||
continue
|
||||
if forge not in forges:
|
||||
forges[forge] = 0
|
||||
forges[forge] += 1
|
||||
checked += 1
|
||||
print('Found git URI in '+str(checked)+' packages')
|
||||
print('Failed to find git URI in '+str(failed)+' packages')
|
||||
for forge in dict(reversed(sorted(forges.items(), key=lambda item: item[1]))):
|
||||
c = forges[forge]
|
||||
p = float(c)/checked*100
|
||||
print(forge+': '+str(c)+f' ({p:.2f}%)')
|
||||
@@ -1,16 +1,14 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from pkgcrap.util import conf_file_path
|
||||
import pkgcrap.parse as parse
|
||||
|
||||
def packages_scan(maintainer):
|
||||
def packages_scan(maintainer, verbose=False):
|
||||
repos = parse.repos()
|
||||
repos.load()
|
||||
packages = []
|
||||
for repo in repos.repos.values():
|
||||
repo.load()
|
||||
print('Scanning', repo.name)
|
||||
if verbose:
|
||||
print('Scanning', repo.name)
|
||||
for cat in repo.categories.values():
|
||||
cat.load()
|
||||
for pkg in cat.packages.values():
|
||||
@@ -22,11 +20,19 @@ def packages_scan(maintainer):
|
||||
pass
|
||||
return packages
|
||||
|
||||
def maintlist(args):
|
||||
if len(args) < 1:
|
||||
print('Specify the maintainer\'s email address')
|
||||
return
|
||||
packages = packages_scan(args[0])
|
||||
for pkg in packages:
|
||||
print(pkg.full_name)
|
||||
|
||||
def main(args):
|
||||
if len(args) < 1:
|
||||
print('Specify your maintainer email address')
|
||||
return
|
||||
packages = packages_scan(args[0])
|
||||
packages = packages_scan(args[0], verbose=True)
|
||||
with open(conf_file_path('maintained.txt'), 'w') as fp:
|
||||
fp.write("\n".join([pkg.full_name for pkg in packages]))
|
||||
print('Updated maintained package list')
|
||||
|
||||
@@ -1,37 +1,42 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import urllib.request
|
||||
import pkgcrap.parse as parse
|
||||
from pkgcrap.util import maintained_packages_get
|
||||
import json
|
||||
from pkgcrap.util import blacklisted_tags_get, maintained_packages_get
|
||||
from functools import cmp_to_key
|
||||
from libversion import version_compare
|
||||
|
||||
def guess_repo(uri):
|
||||
forges = [
|
||||
('git', 'https://github.com', True),
|
||||
('https://gitlab.com', True),
|
||||
('https://git.sr.ht', False),
|
||||
('git', 'https://gitlab.com', True),
|
||||
('git', 'https://git.sr.ht', False),
|
||||
('pypi', 'mirror://pypi', False),
|
||||
]
|
||||
for forge in forges:
|
||||
if uri.startswith(forge[0]):
|
||||
suffix = '.git' if forge[1] == True else ''
|
||||
return '/'.join(uri.split('/')[:5])+suffix
|
||||
if uri.startswith(forge[1]):
|
||||
if forge[0] == 'git':
|
||||
suffix = '.git' if forge[2] == True else ''
|
||||
return (forge[0], '/'.join(uri.split('/')[:5])+suffix)
|
||||
if forge[0] == 'pypi':
|
||||
return (forge[0], uri.split('/')[4])
|
||||
return None
|
||||
|
||||
def repo_from_metadata(pkg):
|
||||
if len(pkg.metadata.remotes) == 0:
|
||||
return None
|
||||
remote = pkg.metadata.remotes[0]
|
||||
if remote[0] == 'github':
|
||||
return "https://github.com/"+remote[1]+".git"
|
||||
if remote[0] == 'gitlab':
|
||||
return "https://gitlab.com/"+remote[1]+".git"
|
||||
return None
|
||||
def repos_from_metadata(pkg):
|
||||
remotes = []
|
||||
for remote in pkg.metadata.remotes:
|
||||
if remote[0] == 'github':
|
||||
remotes.append(('git', 'https://github.com/'+remote[1]+'.git'))
|
||||
if remote[0] == 'gitlab':
|
||||
remotes.append(('git', 'https://gitlab.com/'+remote[1]+'.git'))
|
||||
if remote[0] == 'pypi':
|
||||
remotes.append(('pypi', remote[1]))
|
||||
return remotes
|
||||
|
||||
def repo_from_egit_uri(pkg):
|
||||
def repo_from_repo_uri(pkg):
|
||||
if 'EGIT_REPO_URI' in pkg.ebuilds[0].vars:
|
||||
return pkg.ebuilds[0].vars['EGIT_REPO_URI'].replace('${PN}', pkg.name)
|
||||
return ('git', pkg.ebuilds[0].vars['EGIT_REPO_URI'].replace('${PN}', pkg.name))
|
||||
if 'EHG_REPO_URI' in pkg.ebuilds[0].vars:
|
||||
return ('hg', pkg.ebuilds[0].vars['EHG_REPO_URI'].replace('${PN}', pkg.name))
|
||||
|
||||
def repo_from_src_uri(pkg):
|
||||
for eb in pkg.ebuilds:
|
||||
@@ -43,31 +48,38 @@ def repo_from_homepage(pkg):
|
||||
if 'HOMEPAGE' in eb.vars:
|
||||
return guess_repo(eb.vars['HOMEPAGE'])
|
||||
|
||||
def repo_from_pkg(pkg):
|
||||
repo = repo_from_metadata(pkg)
|
||||
if repo is None:
|
||||
repo = repo_from_egit_uri(pkg)
|
||||
if repo is None:
|
||||
repo = repo_from_src_uri(pkg)
|
||||
if repo is None:
|
||||
repo = repo_from_homepage(pkg)
|
||||
return repo
|
||||
def repos_from_pkg(pkg):
|
||||
repos = []
|
||||
repos += repos_from_metadata(pkg)
|
||||
repos.append(repo_from_repo_uri(pkg))
|
||||
repos.append(repo_from_src_uri(pkg))
|
||||
repos.append(repo_from_homepage(pkg))
|
||||
return [repo for repo in repos if repo != None]
|
||||
|
||||
def repo_get_latest(repo, pkg):
|
||||
def repo_get_latest_git(uri, pkg, verbose):
|
||||
global blacklisted_tags
|
||||
try:
|
||||
r = urllib.request.urlopen(repo+'/info/refs?service=git-upload-pack')
|
||||
r = urllib.request.urlopen(uri+'/info/refs?service=git-upload-pack')
|
||||
except urllib.error.HTTPError:
|
||||
print(pkg.full_name+':', 'Invalid repo!', repo)
|
||||
if verbose:
|
||||
print(pkg.full_name+':', 'Invalid repo!', uri)
|
||||
return None
|
||||
tags = []
|
||||
for line in reversed(r.read().decode('utf-8').split("\n")):
|
||||
if line[4:44] in blacklisted_tags:
|
||||
continue
|
||||
line = line.split(' ')[-1].split("\t")[-1]
|
||||
if line[:10] == 'refs/tags/' and line[-3:] != '^{}':
|
||||
tag = line[10:]
|
||||
if tag in blacklisted_tags:
|
||||
continue
|
||||
if tag.startswith(pkg.name):
|
||||
tag = tag[len(pkg.name):]
|
||||
if tag.startswith('version'):
|
||||
tag = tag[7:]
|
||||
version_prefixes = ['version', 'release']
|
||||
for prefix in version_prefixes:
|
||||
if tag.startswith(prefix):
|
||||
tag = tag[len(prefix):]
|
||||
break
|
||||
if not tag[0].isdigit() and tag[1].isdigit():
|
||||
tag = tag[1:]
|
||||
tags.append(tag)
|
||||
@@ -78,26 +90,73 @@ def repo_get_latest(repo, pkg):
|
||||
return None
|
||||
return latest
|
||||
|
||||
def repo_get_latest_hg(uri, pkg, verbose):
|
||||
global blacklisted_tags
|
||||
forges = [
|
||||
('https://hg.sr.ht/', '/raw/'),
|
||||
]
|
||||
raw_path = '/raw-file/tip/'
|
||||
for forge in forges:
|
||||
if uri.startswith(forge[0]):
|
||||
raw_path = forge[1]
|
||||
try:
|
||||
r = urllib.request.urlopen(uri+raw_path+'.hgtags')
|
||||
except urllib.error.HTTPError:
|
||||
if verbose:
|
||||
print(pkg.full_name+':', 'Invalid repo!', uri)
|
||||
return None
|
||||
for line in r.read().decode('utf-8').split("\n"):
|
||||
tag = line[41:]
|
||||
if line[:40] in blacklisted_tags or tag in blacklisted_tags:
|
||||
continue
|
||||
latest = tag
|
||||
return latest
|
||||
|
||||
def repo_get_latest_pypi(uri, pkg, verbose):
|
||||
try:
|
||||
r = urllib.request.urlopen('https://pypi.org/pypi/'+uri+'/json')
|
||||
except urllib.error.HTTPError:
|
||||
if verbose:
|
||||
print(pkg.full_name+':', 'Invalid PyPI package!', uri)
|
||||
return None
|
||||
versions = list(json.load(r)['releases'].keys())
|
||||
latest = sorted(versions, key=cmp_to_key(version_compare))[-1]
|
||||
return latest
|
||||
|
||||
def repo_get_latest(repo, pkg, verbose):
|
||||
if repo[0] == 'git':
|
||||
return repo_get_latest_git(repo[1], pkg, verbose)
|
||||
if repo[0] == 'hg':
|
||||
return repo_get_latest_hg(repo[1], pkg, verbose)
|
||||
if repo[0] == 'pypi':
|
||||
return repo_get_latest_pypi(repo[1], pkg, verbose)
|
||||
return None
|
||||
|
||||
def outdated_check(pkg, verbose=False):
|
||||
if len(pkg.ebuilds) == 1 and pkg.ebuilds[0].live:
|
||||
if verbose:
|
||||
print(pkg.full_name+':', 'Only has live ebuild')
|
||||
return
|
||||
repo = repo_from_pkg(pkg)
|
||||
if repo == None:
|
||||
repos = repos_from_pkg(pkg)
|
||||
if len(repos) == 0:
|
||||
if verbose:
|
||||
print(pkg.full_name+':', 'Repo not found')
|
||||
return
|
||||
current = pkg.version_latest()
|
||||
latest = repo_get_latest(repo, pkg)
|
||||
for repo in repos:
|
||||
if verbose:
|
||||
print(pkg.full_name+':', 'Checking '+repo[0]+' repo ('+repo[1]+')')
|
||||
latest = repo_get_latest(repo, pkg, verbose)
|
||||
if latest != None:
|
||||
break
|
||||
if latest == None:
|
||||
if verbose:
|
||||
print(pkg.full_name+':', 'Unable to find latest version!')
|
||||
return
|
||||
if version_compare(current, latest) == -1:
|
||||
print(pkg.full_name+':', 'Outdated package!', current, '->', latest)
|
||||
elif verbose:
|
||||
print(pkg.full_name+':', 'Up to date!', current, '->', latest)
|
||||
else:
|
||||
if version_compare(current, latest) == -1:
|
||||
print(pkg.full_name+':', 'Outdated package!', current, '->', latest)
|
||||
elif verbose:
|
||||
print(pkg.full_name+':', 'Up to date!', current, '->', latest)
|
||||
|
||||
def main(args):
|
||||
if 'all' in args:
|
||||
@@ -106,6 +165,8 @@ def main(args):
|
||||
else:
|
||||
packages = [parse.package.from_path('.')]
|
||||
verbose = True
|
||||
global blacklisted_tags
|
||||
blacklisted_tags = blacklisted_tags_get()
|
||||
for pkg in packages:
|
||||
pkg.load()
|
||||
outdated_check(pkg, verbose=verbose)
|
||||
|
||||
@@ -1,6 +1,3 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
from os import listdir
|
||||
from os.path import abspath, isfile, dirname
|
||||
from portage.versions import cpv_sort_key
|
||||
|
||||
@@ -1,12 +1,9 @@
|
||||
#!/usr/bin/env python3
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
import os
|
||||
import pkgcrap.parse as parse
|
||||
|
||||
def conf_dir_path():
|
||||
conf_home = '~/.config'
|
||||
if os.environ:
|
||||
if 'XDG_CONFIG_HOME' in os.environ:
|
||||
conf_home = os.environ['XDG_CONFIG_HOME']
|
||||
if conf_home[0] == '~':
|
||||
conf_home = os.environ['HOME']+'/'+conf_home[1:]
|
||||
@@ -34,3 +31,12 @@ def maintained_packages_get():
|
||||
cat.load()
|
||||
packages.append(cat.packages[pkg_info[1]])
|
||||
return packages
|
||||
|
||||
def blacklisted_tags_get():
|
||||
blacklisted_tags = []
|
||||
with open(conf_file_path('eviltags.txt'), 'r') as fp:
|
||||
for line in fp.read().split('\n'):
|
||||
if len(line) > 0 and line[0] == '#':
|
||||
continue
|
||||
blacklisted_tags.append(line.split(' ',1)[0])
|
||||
return blacklisted_tags
|
||||
|
||||
43
pkgcrap/website.py
Normal file
43
pkgcrap/website.py
Normal file
@@ -0,0 +1,43 @@
|
||||
import webbrowser
|
||||
import pkgcrap.parse as parse
|
||||
|
||||
def repo2site(repo):
|
||||
return repo[:-4] if repo.endswith('.git') else repo
|
||||
|
||||
def stripslash(url):
|
||||
return url[:-1] if url.endswith('/') else url
|
||||
|
||||
def main(args):
|
||||
pkg = parse.package.from_path('.')
|
||||
pkg.load()
|
||||
|
||||
sites = list()
|
||||
if len(pkg.ebuilds) > 0:
|
||||
if 'HOMEPAGE' in pkg.ebuilds[0].vars:
|
||||
sites.append(stripslash(pkg.ebuilds[0].vars['HOMEPAGE']))
|
||||
if 'EGIT_REPO_URI' in pkg.ebuilds[0].vars:
|
||||
sites.append(repo2site(pkg.ebuilds[0].vars['EGIT_REPO_URI']))
|
||||
for remote in pkg.metadata.remotes:
|
||||
if remote[0] == 'github':
|
||||
sites.append('https://github.com/'+remote[1])
|
||||
if remote[0] == 'gitlab':
|
||||
sites.append('https://gitlab.com/'+remote[1])
|
||||
if remote[0] == 'pypi':
|
||||
sites.append('https://pypi.org/project/'+remote[1]+'/')
|
||||
|
||||
sites = sorted(list(set(sites)))
|
||||
match len(sites):
|
||||
case 0:
|
||||
print('No sites found :-(')
|
||||
exit(1)
|
||||
case 1:
|
||||
print('Opening '+sites[0])
|
||||
webbrowser.open(sites[0])
|
||||
case _:
|
||||
print('Sites:')
|
||||
for i in range(len(sites)):
|
||||
print(str(i+1)+') '+sites[i])
|
||||
pick = input('Pick one: ')
|
||||
pick = sites[int(pick)-1]
|
||||
print('Opening '+pick)
|
||||
webbrowser.open(pick)
|
||||
Reference in New Issue
Block a user