add watch scripts for PyPI projects

This commit is contained in:
John McQuah 2023-02-11 20:54:05 -05:00
parent 5859eecca0
commit c92af8eedf
15 changed files with 300 additions and 0 deletions

20
python3-agate-dbf/.watch Executable file
View File

@ -0,0 +1,20 @@
#!/usr/bin/env python3
from subprocess import Popen,PIPE
from bs4 import BeautifulSoup as bs
def get_html(url):
try:
curlpipe = Popen(["/usr/bin/curl","-s","-L",url],stdout=PIPE)
except:
print("- Request error: unable to retrieve {}".format(url))
return None
curltup = curlpipe.communicate()
return curltup[0]
page = get_html("https://pypi.org/project/agate-dbf/")
soup = bs(page, 'lxml')
headers = soup.body.find_all('h1')
for h in headers[:1]:
name_version = h.get_text().split()
print(name_version[-1])

20
python3-agate-excel/.watch Executable file
View File

@ -0,0 +1,20 @@
#!/usr/bin/env python3
from subprocess import Popen,PIPE
from bs4 import BeautifulSoup as bs
def get_html(url):
try:
curlpipe = Popen(["/usr/bin/curl","-s","-L",url],stdout=PIPE)
except:
print("- Request error: unable to retrieve {}".format(url))
return None
curltup = curlpipe.communicate()
return curltup[0]
page = get_html("https://pypi.org/project/agate-excel/")
soup = bs(page, 'lxml')
headers = soup.body.find_all('h1')
for h in headers[:1]:
name_version = h.get_text().split()
print(name_version[-1])

20
python3-agate-sql/.watch Executable file
View File

@ -0,0 +1,20 @@
#!/usr/bin/env python3
from subprocess import Popen,PIPE
from bs4 import BeautifulSoup as bs
def get_html(url):
try:
curlpipe = Popen(["/usr/bin/curl","-s","-L",url],stdout=PIPE)
except:
print("- Request error: unable to retrieve {}".format(url))
return None
curltup = curlpipe.communicate()
return curltup[0]
page = get_html("https://pypi.org/project/agate-sql/")
soup = bs(page, 'lxml')
headers = soup.body.find_all('h1')
for h in headers[:1]:
name_version = h.get_text().split()
print(name_version[-1])

20
python3-agate/.watch Executable file
View File

@ -0,0 +1,20 @@
#!/usr/bin/env python3
from subprocess import Popen,PIPE
from bs4 import BeautifulSoup as bs
def get_html(url):
try:
curlpipe = Popen(["/usr/bin/curl","-s","-L",url],stdout=PIPE)
except:
print("- Request error: unable to retrieve {}".format(url))
return None
curltup = curlpipe.communicate()
return curltup[0]
page = get_html("https://pypi.org/project/agate/")
soup = bs(page, 'lxml')
headers = soup.body.find_all('h1')
for h in headers[:1]:
name_version = h.get_text().split()
print(name_version[-1])

20
python3-csvkit/.watch Executable file
View File

@ -0,0 +1,20 @@
#!/usr/bin/env python3
from subprocess import Popen,PIPE
from bs4 import BeautifulSoup as bs
def get_html(url):
try:
curlpipe = Popen(["/usr/bin/curl","-s","-L",url],stdout=PIPE)
except:
print("- Request error: unable to retrieve {}".format(url))
return None
curltup = curlpipe.communicate()
return curltup[0]
page = get_html("https://pypi.org/project/csvkit/")
soup = bs(page, 'lxml')
headers = soup.body.find_all('h1')
for h in headers[:1]:
name_version = h.get_text().split()
print(name_version[-1])

20
python3-dbfread/.watch Executable file
View File

@ -0,0 +1,20 @@
#!/usr/bin/env python3
from subprocess import Popen,PIPE
from bs4 import BeautifulSoup as bs
def get_html(url):
try:
curlpipe = Popen(["/usr/bin/curl","-s","-L",url],stdout=PIPE)
except:
print("- Request error: unable to retrieve {}".format(url))
return None
curltup = curlpipe.communicate()
return curltup[0]
page = get_html("https://pypi.org/project/dbfread/")
soup = bs(page, 'lxml')
headers = soup.body.find_all('h1')
for h in headers[:1]:
name_version = h.get_text().split()
print(name_version[-1])

20
python3-et_xmlfile/.watch Executable file
View File

@ -0,0 +1,20 @@
#!/usr/bin/env python3
from subprocess import Popen,PIPE
from bs4 import BeautifulSoup as bs
def get_html(url):
try:
curlpipe = Popen(["/usr/bin/curl","-s","-L",url],stdout=PIPE)
except:
print("- Request error: unable to retrieve {}".format(url))
return None
curltup = curlpipe.communicate()
return curltup[0]
page = get_html("https://pypi.org/project/et_xmlfile/")
soup = bs(page, 'lxml')
headers = soup.body.find_all('h1')
for h in headers[:1]:
name_version = h.get_text().split()
print(name_version[-1])

20
python3-leather/.watch Executable file
View File

@ -0,0 +1,20 @@
#!/usr/bin/env python3
from subprocess import Popen,PIPE
from bs4 import BeautifulSoup as bs
def get_html(url):
try:
curlpipe = Popen(["/usr/bin/curl","-s","-L",url],stdout=PIPE)
except:
print("- Request error: unable to retrieve {}".format(url))
return None
curltup = curlpipe.communicate()
return curltup[0]
page = get_html("https://pypi.org/project/leather/")
soup = bs(page, 'lxml')
headers = soup.body.find_all('h1')
for h in headers[:1]:
name_version = h.get_text().split()
print(name_version[-1])

20
python3-olefile/.watch Executable file
View File

@ -0,0 +1,20 @@
#!/usr/bin/env python3
from subprocess import Popen,PIPE
from bs4 import BeautifulSoup as bs
def get_html(url):
try:
curlpipe = Popen(["/usr/bin/curl","-s","-L",url],stdout=PIPE)
except:
print("- Request error: unable to retrieve {}".format(url))
return None
curltup = curlpipe.communicate()
return curltup[0]
page = get_html("https://pypi.org/project/olefile/")
soup = bs(page, 'lxml')
headers = soup.body.find_all('h1')
for h in headers[:1]:
name_version = h.get_text().split()
print(name_version[-1])

20
python3-openpyxl/.watch Executable file
View File

@ -0,0 +1,20 @@
#!/usr/bin/env python3
from subprocess import Popen,PIPE
from bs4 import BeautifulSoup as bs
def get_html(url):
try:
curlpipe = Popen(["/usr/bin/curl","-s","-L",url],stdout=PIPE)
except:
print("- Request error: unable to retrieve {}".format(url))
return None
curltup = curlpipe.communicate()
return curltup[0]
page = get_html("https://pypi.org/project/openpyxl/")
soup = bs(page, 'lxml')
headers = soup.body.find_all('h1')
for h in headers[:1]:
name_version = h.get_text().split()
print(name_version[-1])

20
python3-pyicu/.watch Executable file
View File

@ -0,0 +1,20 @@
#!/usr/bin/env python3
from subprocess import Popen,PIPE
from bs4 import BeautifulSoup as bs
def get_html(url):
try:
curlpipe = Popen(["/usr/bin/curl","-s","-L",url],stdout=PIPE)
except:
print("- Request error: unable to retrieve {}".format(url))
return None
curltup = curlpipe.communicate()
return curltup[0]
page = get_html("https://pypi.org/project/pyicu/")
soup = bs(page, 'lxml')
headers = soup.body.find_all('h1')
for h in headers[:1]:
name_version = h.get_text().split()
print(name_version[-1])

20
python3-pytimeparse/.watch Executable file
View File

@ -0,0 +1,20 @@
#!/usr/bin/env python3
from subprocess import Popen,PIPE
from bs4 import BeautifulSoup as bs
def get_html(url):
try:
curlpipe = Popen(["/usr/bin/curl","-s","-L",url],stdout=PIPE)
except:
print("- Request error: unable to retrieve {}".format(url))
return None
curltup = curlpipe.communicate()
return curltup[0]
page = get_html("https://pypi.org/project/pytimeparse/")
soup = bs(page, 'lxml')
headers = soup.body.find_all('h1')
for h in headers[:1]:
name_version = h.get_text().split()
print(name_version[-1])

20
python3-slugify/.watch Executable file
View File

@ -0,0 +1,20 @@
#!/usr/bin/env python3
from subprocess import Popen,PIPE
from bs4 import BeautifulSoup as bs
def get_html(url):
try:
curlpipe = Popen(["/usr/bin/curl","-s","-L",url],stdout=PIPE)
except:
print("- Request error: unable to retrieve {}".format(url))
return None
curltup = curlpipe.communicate()
return curltup[0]
page = get_html("https://pypi.org/project/slugify/")
soup = bs(page, 'lxml')
headers = soup.body.find_all('h1')
for h in headers[:1]:
name_version = h.get_text().split()
print(name_version[-1])

20
python3-sqlalchemy/.watch Executable file
View File

@ -0,0 +1,20 @@
#!/usr/bin/env python3
from subprocess import Popen,PIPE
from bs4 import BeautifulSoup as bs
def get_html(url):
try:
curlpipe = Popen(["/usr/bin/curl","-s","-L",url],stdout=PIPE)
except:
print("- Request error: unable to retrieve {}".format(url))
return None
curltup = curlpipe.communicate()
return curltup[0]
page = get_html("https://pypi.org/project/sqlalchemy/")
soup = bs(page, 'lxml')
headers = soup.body.find_all('h1')
for h in headers[:1]:
name_version = h.get_text().split()
print(name_version[-1])

20
python3-xlrd/.watch Executable file
View File

@ -0,0 +1,20 @@
#!/usr/bin/env python3
from subprocess import Popen,PIPE
from bs4 import BeautifulSoup as bs
def get_html(url):
try:
curlpipe = Popen(["/usr/bin/curl","-s","-L",url],stdout=PIPE)
except:
print("- Request error: unable to retrieve {}".format(url))
return None
curltup = curlpipe.communicate()
return curltup[0]
page = get_html("https://pypi.org/project/xlrd/")
soup = bs(page, 'lxml')
headers = soup.body.find_all('h1')
for h in headers[:1]:
name_version = h.get_text().split()
print(name_version[-1])