Hey everybody!
Slackware hasn't any kind of dependency-resolution for SlackBuilds, the common way to install software.
And that's a good thing (kinda), because this way, you control every single package that is installed.
Still, this has a backside: If you want to install software that has a lot of dependencies and these have their own...
you have to invest some time. Thus, I wrote this simple python script to list dependencies of a package to gain a
quick overview, since I haven't found (or searched) for a project solving this. It might be still buggy and the code looks
terrible, but it works.
UPDATE:
Now it is possible to view the dependencies as a tree or a queue. Apart from that caching was implemented, so processing tangled dependencies should be at least a bit faster now.
import requests
from bs4 import BeautifulSoup
import argparse
BASE_URL = "http://www.slackbuilds.org/result/?search=PACKAGE&sv="
class DummyPackage(object):
def __init__(self, name):
self.name = name
def __eq__(self, other):
return self.name == other.name
class Package(DummyPackage):
def __init__(self, name, cache, tree=False, queue=False):
DummyPackage.__init__(self, name)
self.dependencies = []
self.package_cache = cache # a list of Package objects
self.visited = False
self.fetch()
if queue:
print 'install packages in this order:'
self.traverse_df()
if tree:
print 'dependency tree:'
self.traverse()
def fetch(self):
url = BASE_URL.replace('PACKAGE', self.name)
url2 = ''
site = requests.get(url).content
soup = BeautifulSoup(site)
if not soup.find_all('table', id='results'):
for link in soup.find_all('a'):
if link.string.endswith('.info'):
url2 = 'http://www.slackbuilds.org' + link.get('href')
else:
for link in soup.find_all('a'):
if link.string == ' ' + self.name:
url = 'http://www.slackbuilds.org' + link.get('href')
site = requests.get(url).content
soup = BeautifulSoup(site)
for link in soup.find_all('a'):
if link.string.endswith('.info'):
url2 = 'http://www.slackbuilds.org' + link.get('href')
try:
info = requests.get(url2).content
except requests.exceptions.MissingSchema:
print 'package not found. do you even spell?!'
exit()
for line in info.split('\n'):
if line.startswith('REQUIRES="'):
deps = line[10:-1].split(' ')
deps = [dep for dep in deps if dep]
for dep in deps:
o = DummyPackage(dep)
if o not in self.package_cache:
self.package_cache.append(Package(dep, cache))
self.dependencies.append(self.package_cache[-1])
else:
i = self.package_cache.index(o)
o = self.package_cache[i]
self.dependencies.append(o)
def traverse(self, depth=0):
print ' '*depth + self.name
for dep in self.dependencies:
dep.traverse(depth+1)
def traverse_df(self):
for i in self.dependencies:
if not i.visited:
i.visited = True
i.traverse_df()
print self.name
if __name__ == '__main__':
ARG_PARSER = argparse.ArgumentParser(description='''Find the dependencies
of a Slackbuild package.''')
ARG_PARSER.add_argument('package', help='''the full name of the package
which you want to install''')
ARG_PARSER.add_argument('--slackware-version', default='14.1',
help='''which major release of Slackware are you
using? defaults to 14.1''')
ARG_PARSER.add_argument('-t', help='''show tree''', action='store_true')
ARG_PARSER.add_argument('-q', help='''show queue''', action='store_true')
ARGS = ARG_PARSER.parse_args()
BASE_URL += ARGS.slackware_version
if not ARGS.t and not ARGS.q:
print 'no display mode (either -q or -t) specified. do you even think?!'
exit()
print 'fetching dependencies, please wait...'
cache = []
Package(ARGS.package, cache, ARGS.t, ARGS.q)
The old version (for reference):
import requests
from bs4 import BeautifulSoup
import argparse
BASE_URL = "http://www.slackbuilds.org/result/?search=PACKAGE&sv="
def fetch_deps(package, depth=0):
deps = []
url = BASE_URL.replace('PACKAGE', package)
url2 = ''
site = requests.get(url).content
soup = BeautifulSoup(site)
if not soup.find_all('table', id='results'):
for link in soup.find_all('a'):
if link.string.endswith('.info'):
url2 = 'http://www.slackbuilds.org' + link.get('href')
else:
for link in soup.find_all('a'):
if link.string == ' ' + package:
url = 'http://www.slackbuilds.org' + link.get('href')
site = requests.get(url).content
soup = BeautifulSoup(site)
for link in soup.find_all('a'):
if link.string.endswith('.info'):
url2 = 'http://www.slackbuilds.org' + link.get('href')
info = requests.get(url2).content
for line in info.split('\n'):
if line.startswith('REQUIRES="'):
deps = line[10:-1].split(' ')
ret_str = depth*' ' + package + ':\n'
deps = [dep for dep in deps if dep]
for dep in deps:
ret_str += fetch_deps(dep, depth+1)
return ret_str
if __name__ == '__main__':
ARG_PARSER = argparse.ArgumentParser(description='Find the dependencies of a Slackbuild package.')
ARG_PARSER.add_argument('package', help='the full name of the package which you want to install.')
ARG_PARSER.add_argument('--slackware-version', default='14.1', help='which major release of Slackware are you using? Defaults to 14.1')
ARGS = ARG_PARSER.parse_args()
BASE_URL += ARGS.slackware_version
print 'fetching dependencies, please wait...'
print fetch_deps(ARGS.package)