--- /dev/null
+
+Rob Cakebread <cakebread at gmail d0t com>
+
+
--- /dev/null
+
+2008-07-01 21:30 cakebread
+
+ * .: Release 0.2.1 - Fixes for portage 2.2 new API
+
+2007-10-01 01:44 cakebread
+
+ * .: Release 0.2.0
+
+2007-05-17 21:44 cakebread
+
+ * .: Initial release 0.1.0
--- /dev/null
+
+Released under the terms of the GNU Public License version 2
--- /dev/null
+
+
+g-pypi is Copyright 2006, 2007, 2008 Rob Cakebread
+Released under the terms of the GNU Public License v2
+
--- /dev/null
+
+Q: It says the SOURCE_URI isn't available. Huh?
+A: That means someone entered their package info on PyPI without bothering to give a URL to the package. Annoying, huh?
--- /dev/null
+INSTALL
+=======
+
+g-pypi is available in my overlay directory:
+
+If you haven't emerged and configured app-portage/layman:
+ emerge layman
+ echo "source /usr/portage/local/layman/make.conf" >> /etc/make.conf
+
+layman --add pythonhead
+emerge g-pypi
+
--- /dev/null
+
+23 Jul 2008 -
+
+ Moved project from assembla.com to google hosting.
+
+17 May 2007 - 0.1.0
+
+ First version of g-pypi added to my public overlay on overlays.gentoo.org
--- /dev/null
+
+g-pypi
+======
+
+
+`http://code.google.com/p/g-pypi/ <http://code.google.com/p/g-pypi/>`_
+
+g-pypi creates Python package ebuilds for Gentoo Linux by querying The Cheese Shop.
+
+Although it's in the early stages of development, testing has generated over 1,200 ebuilds automatically.
+
+Note: g-pypi will only be as good at creating ebuilds as The Python Package Index information is. People can leave out a lot of information when submitting info to PyPI, making ebuild creation impossible.
+
+Basic Usage
+===========
+
+Output ebuild to stdout:
+
+::
+
+ g-pypi -p package_name
+
+
+Write ebuild to your overlay:
+
+::
+
+ g-pypi package_name
+
+Create live svn ebuild (if subversion URI is listed in PyPI):
+
+::
+
+ g-pypi -s package_name
+
+By default your first overlay listed in /etc/make.conf PORTDIR_OVERLAY
+is used. If you want to use a different one, edit ~/.g-pypirc
+
+By default your first KEYWORD listed in /etc/make.conf KEYWORDS
+is used. If you want to use a different one, edit ~/.g-pypirc or prefix your command with ACCEPT_KEYWORDS e.g.
+
+::
+
+ $ ACCEPT_KEYWORDS='~amd64' g-pypi foopkg
+
+
+You can over-ride some variables if g-pypi can't figure out the
+PN, PV etc.
+
+
+::
+
+ -n or --PN=package-name
+
+ -v or --PV=version
+
+ --MY_P=my_p
+
+ --MY_PN=my_pn
+
+ --MY_PV=my_pv
+
+
+If you don't specify a portage category with '-c' the ebuild will be
+placed in dev-python
+
+Use '-V' for verbose output for debugging.
+
+
+INSTALL
+=======
+
+g-pypi is available in my overlay directory:
+
+If you haven't emerged and configured app-portage/layman:
+
+::
+
+ emerge layman
+ echo "source /usr/portage/local/layman/make.conf" >> /etc/make.conf
+ layman --add pythonhead
+ emerge g-pypi
+
+Subversion
+==========
+
+`subversion repository <http://g-pypi.googlecode.com/svn/trunk/#egg=g-pypi-dev>`_
+
+Hint: You can use to create a live svn ebuild for g-pypi ;)
+
+::
+
+ g-pypi -s g-pypi
+
+
--- /dev/null
+
+
+Thanks to Phillip J. Eby for setuptools and answering my crazy questions
+that I know drive him mad.
+
+Thanks to everyone who put together The Cheeseshop.
+
+
+
+
+
+
--- /dev/null
+
+Misc improvements:
+
+Do a third pass on ebuild(s) created and add the version installed for all (R)DEPENDs if the version information is missing.
+
+e.g.
+
+ DEPEND=dev-python/setuptools
+
+would be re-written on the third pass as
+
+ DEPEND=dev-python/setuptools-0.6_rc8-r1
+
+I say third pass because first a skeleton ebuild is written with the basics, then the ebuild Manifest is created, it's unpacked and ${S} may be written on the second pass.
+
+See the issue tracker for tons of TODO:
+
+http://code.google.com/p/g-pypi/issues/list
--- /dev/null
+
+This is a summary of how g-pypi finds the information it needs to create
+an ebuild.
+
+If you give g-pypi a package name with no other arguments it will:
+
+1) Query PyPI for the package name and return the metadata
+ a) If PyPI doesn't have the SRC_URI it tries to use setuptools to find it.
+
+2) Use enamer.py to determine the package name and version if
+possible (PN, MY_P, PV, MY_PV etc.)
+
+3) Write initial ebuild
+
+4) Download the package and unarchive it and try to determine ${S}
+
+5) Write ebuild again adding ${S}
+
+
+
+
--- /dev/null
+
+"""
+
+
+g-pypi creates Gentoo ebuilds by querying The Python Package Index
+
+
+"""
+
+__docformat__ = 'restructuredtext'
+__version__ = '0.2.1'
--- /dev/null
+#!/usr/bin/env python
+# pylint: disable-msg=C0301,W0613,W0612,C0103,E0611,W0511
+
+
+"""
+
+cli.py
+======
+
+Command-line code for g-pypi
+
+
+"""
+
+import sys
+import optparse
+import inspect
+
+from pkg_resources import Requirement
+try:
+ #portage >=2.2
+ from portage import exception as portage_exception
+except ImportError:
+ #portage <2.2
+ from portage import portage_exception
+
+from yolk.pypi import CheeseShop
+from yolk.yolklib import get_highest_version
+from yolk.setuptools_support import get_download_uri
+from g_pypi.config import MyConfig
+from g_pypi.ebuild import Ebuild
+from g_pypi.portage_utils import ebuild_exists
+from g_pypi.__init__ import __version__ as VERSION
+
+
+__docformat__ = 'restructuredtext'
+__revision__ = '$Revision: 215 $'[11:-1].strip()
+
+
+
+
+class StdOut:
+
+ """
+ Filter stdout or stderr from specific modules
+ So far this is just used for pkg_resources
+ """
+
+ def __init__(self, stream, modulenames):
+ self.stdout = stream
+ #Modules to squelch
+ self.modulenames = modulenames
+
+ def __getattr__(self, attribute):
+ if not self.__dict__.has_key(attribute) or attribute == '__doc__':
+ return getattr(self.stdout, attribute)
+ return self.__dict__[attribute]
+
+ def write(self, inline):
+ """
+ Write a line to stdout if it isn't in a blacklist
+
+ Try to get the name of the calling module to see if we want
+ to filter it. If there is no calling module, use current
+ frame in case there's a traceback before there is any calling module
+ """
+ frame = inspect.currentframe().f_back
+ if frame:
+ mod = frame.f_globals.get('__name__')
+ else:
+ mod = sys._getframe(0).f_globals.get('__name__')
+ if not mod in self.modulenames:
+ self.stdout.write(inline)
+
+ def writelines(self, inline):
+ """Write multiple lines"""
+ for line in inline:
+ self.write(line)
+
+
+class GPyPI(object):
+
+ """
+ Main class for command-line interface
+ """
+
+ def __init__(self, package_name, version, options, logger):
+ """
+ @param package_name: case-insensitive package name
+ @type package_name: string
+
+ @param version: package version
+ @type version: string
+
+ @param options: command-line options
+ @type options: OptParser config object
+
+ @param logger: message logger
+ @type logger: logger object
+ """
+
+ self.package_name = package_name
+ self.version = version
+ self. options = options
+ self.logger = logger
+ self.tree = [(package_name, version)]
+ self.pypi = CheeseShop()
+ self.create_ebuilds()
+
+ def raise_error(self, msg):
+ """
+ Cleanup, print error message and raise GPyPiErro
+
+ @param msg: Error message
+ @type msg: string
+
+ """
+ #XXX: Call function to do 'ebuild pkg-ver.ebuild clean' etc.
+ #to clean up unpacked ebuilds
+
+ self.logger.error("Error: " + msg)
+ sys.exit(1)
+
+ def create_ebuilds(self):
+ """
+ Create ebuild for given package_name and any ebuilds for dependencies
+ if needed. If no version is given we use the highest available.
+ """
+ #Create first ebuild then turn off overwrite in case a dependency
+ #ebuild already exists
+ #self.logger.debug("Creating dep tree...")
+ while len(self.tree):
+ (project_name, version) = self.tree.pop(0)
+ #self.logger.debug(self.tree)
+ #self.logger.debug("%s %s" % (project_name, version))
+ self.package_name = project_name
+ self.version = version
+ requires = self.do_ebuild()
+ #print "REQUIRES", requires
+ if requires:
+ for req in requires:
+ if self.options.no_deps or ebuild_exists("dev-python/%s" % req.project_name.lower()):
+ if not self.options.no_deps:
+ self.logger.info("Skipping dependency (exists): %s" % req.project_name)
+ else:
+ self.add_dep(req.project_name)
+ self.options.overwrite = False
+
+ def add_dep(self, project_name):
+ '''Add dependency'''
+ pkgs = []
+ if len(self.tree):
+ for deps in self.tree:
+ pkgs.append(deps[0])
+
+ if project_name not in pkgs:
+ self.tree.append((project_name, None))
+ #self.logger.info("Dependency needed: %s" % project_name)
+
+ def url_from_pypi(self):
+ """
+ Query PyPI for package's download URL
+
+ @returns: source URL string
+ """
+
+ try:
+ return self.pypi.get_download_urls(self.package_name, self.version, pkg_type="source")[0]
+ except IndexError:
+ return None
+
+ def find_uri(self, method="setuptools"):
+ """
+ Returns download URI for package
+ If no package version was given it returns highest available
+ Setuptools should find anything xml-rpc can and more.
+
+ @param method: download method can be 'xml-rpc', 'setuptools', or 'all'
+ @type method: string
+
+ @returns download_url string
+ """
+ download_url = None
+
+ if method == "all" or method == "xml-rpc":
+ download_url = self.url_from_pypi()
+
+ if (method == "all" or method == "setuptools") and not download_url:
+ #Sometimes setuptools can find a package URI if PyPI doesn't have it
+ download_url = self.uri_from_setuptools()
+ return download_url
+
+ def get_uri(self, svn=False):
+ """
+ Attempt to find a package's download URI
+
+ @returns: download_url string
+
+ """
+ download_url = self.find_uri()
+
+ if not download_url:
+ self.raise_error("Can't find SRC_URI for '%s'." % self.package_name)
+
+ self.logger.debug("Package URI: %s " % download_url)
+ return download_url
+
+ def uri_from_setuptools(self):
+ """
+ Use setuptools to find a package's URI
+
+ """
+ try:
+ req = Requirement.parse(self.package_name)
+ except ValueError:
+ self.raise_error("The package seems to have a ridiculous name or version, can't proceed.")
+
+ if self.options.subversion:
+ src_uri = get_download_uri(self.package_name, "dev", "source")
+ else:
+ src_uri = get_download_uri(self.package_name, self.version, "source")
+ if not src_uri:
+ self.raise_error("The package has no source URI available.")
+ return src_uri
+
+ def verify_pkgver(self):
+ """
+ Query PyPI to make sure we have correct case for package name
+ """
+
+
+ def do_ebuild(self):
+ """
+ Get SRC_URI using PyPI and attempt to create ebuild
+
+ @returns: tuple with exit code and pkg_resources requirement
+
+ """
+ #Get proper case for project name:
+ (package_name, versions) = self.pypi.query_versions_pypi(self.package_name)
+ if package_name != self.package_name:
+ self.package_name = package_name
+
+
+ if self.version and (self.version not in versions):
+ self.logger.error("Can't find package for version:'%s'." % self.version)
+ return
+ else:
+ self.version = get_highest_version(versions)
+
+ download_url = self.get_uri()
+ try:
+ ebuild = Ebuild(self.package_name, self.version, download_url)
+ except portage_exception.InvalidVersionString:
+ self.logger.error("Can't determine PV, use -v to set it: %s-%s" % \
+ (self.package_name, self.version))
+ return
+ except portage_exception.InvalidPackageName:
+ self.logger.error("Can't determine PN, use -n to set it: %s-%s" % \
+ (self.package_name, self.version))
+ return
+
+ ebuild.set_metadata(self.query_metadata())
+
+ ebuild.get_ebuild()
+ if self.options.pretend:
+ print
+ ebuild.print_ebuild()
+ return
+ return ebuild.create_ebuild()
+
+ def query_metadata(self):
+ """
+ Get package metadata from PyPI
+
+ @returns: metadata text
+
+ """
+
+ if self.version:
+ return self.pypi.release_data(self.package_name, self.version)
+ else:
+ (pn, vers) = self.pypi.query_versions_pypi(self.package_name)
+ return self.pypi.release_data(self.package_name, get_highest_version(vers))
+
+def parse_pkg_ver(package_spec):
+ """
+ Return tuple with package_name and version from CLI args
+
+ @param package_spec: pkg_resources package spec
+ @type package_spec: string
+
+ @returns: tupe with pkg_name and version
+
+ """
+
+ arg_str = ("").join(package_spec)
+ if "==" not in arg_str:
+ #No version specified
+ package_name = arg_str
+ version = None
+ else:
+ (package_name, version) = arg_str.split("==")
+ package_name = package_name.strip()
+ version = version.strip()
+ return (package_name, version)
+
+def show_version():
+ """
+ Print g-pypi's version
+ """
+ print "g-pypi version %s (rev. %s)" % (VERSION, __revision__)
+
+def main():
+ """Parse command-line options and do it."""
+
+ usage = "usage: %prog [options] <package_name[==version]>"
+ opt_parser = optparse.OptionParser(usage=usage)
+
+ opt_parser.add_option("-p", "--pretend", action='store_true', dest=
+ "pretend", default=False, help=
+ "Print ebuild to stdout, don't write ebuild file, \
+ don't download SRC_URI.")
+
+ opt_parser.add_option("-o", "--overwrite", action='store_true', dest=
+ "overwrite", default=False, help=
+ "Overwrite existing ebuild.")
+
+ opt_parser.add_option("--no-deps", action='store_true', dest=
+ "no_deps", default=False, help=
+ "Don't create ebuilds for any needed dependencies.")
+
+ opt_parser.add_option("-c", "--portage-category", action='store', dest=
+ "category", default="dev-python", help=
+ "Specify category to use when creating ebuild. Default is dev-python")
+
+ opt_parser.add_option("-n", "--PN", action='store', dest=
+ "pn", default=False, help=
+ "Specify PN to use when naming ebuild.")
+
+ opt_parser.add_option("-v", "--PV", action='store', dest=
+ "pv", default=False, help=
+ "Specify PV to use when naming ebuild.")
+
+ opt_parser.add_option("--MY_PV", action='store', dest=
+ "my_pv", default=False, help=
+ "Specify MY_PV")
+
+ opt_parser.add_option("--MY_PN", action='store', dest=
+ "my_pn", default=False, help=
+ "Specify MY_PN")
+
+ opt_parser.add_option("--MY_P", action='store', dest=
+ "my_p", default=False, help=
+ "Specify MY_P")
+
+ opt_parser.add_option("--format", action='store', dest=
+ "format", default=None, help=
+ "Format when printing to stdout: ansi, html, bbcode, or none")
+ opt_parser.add_option("-s", "--subversion", action='store_true', dest=
+ "subversion", default=False, help=
+ "Create live subversion ebuild if repo is available.")
+
+ opt_parser.add_option("-V", "--verbose", action='store_true', dest=
+ "verbose", default=False, help=
+ "Show more output.")
+ opt_parser.add_option("-q", "--quiet", action='store_true', dest=
+ "quiet", default=False, help=
+ "Show less output.")
+
+ opt_parser.add_option("-d", "--debug", action='store_true', dest=
+ "debug", default=False, help=
+ "Show debug information.")
+
+
+ opt_parser.add_option("--version", action='store_true', dest=
+ "version", default=False, help=
+ "Show g-pypi version and exit.")
+
+ (options, package_spec) = opt_parser.parse_args()
+ if options.version:
+ show_version()
+ return
+
+ #Turn off all output from the pkg_resources module by default
+ sys.stdout = StdOut(sys.stdout, ['distutils.log'])
+ sys.stderr = StdOut(sys.stderr, ['distutils.log'])
+
+ config = MyConfig()
+ config.set_options(options)
+ config.set_logger()
+ logger = config.get_logger()
+
+ if not package_spec:
+ opt_parser.print_help()
+ logger.error("\nError: You need to specify a package name at least.")
+ return 1
+ (package_name, version) = parse_pkg_ver(package_spec)
+ gpypi = GPyPI(package_name, version, options, logger)
+
+if __name__ == "__main__":
+ sys.exit(main())
+
--- /dev/null
+#!/usr/bin/env python
+
+# pylint: disable-msg=R0201
+# method could be function but we need shared class data
+
+"""
+
+config.py
+=========
+
+Creates and reads config file using ConfigObj
+
+ config['keyword'] = get_keyword()
+ config['overlay'] = get_portdir_overlay()
+ config['format'] = "ansi"
+
+"""
+
+import os
+import logging
+
+from configobj import ConfigObj
+
+from g_pypi.portage_utils import get_keyword, get_portdir_overlay
+
+__docformat__ = 'restructuredtext'
+
+CONFIG_DIR = os.path.expanduser("~/.g-pypi")
+
+
+class MyConfig:
+
+ """
+ Holds options from config file
+ """
+
+ config = None
+ options = None
+ logger = None
+
+ def __init__(self):
+ self.set_config(self.get_config())
+
+ def get_config(self):
+ """Read config file, create if it doesn't exist"""
+ if not os.path.exists(self.get_rc_filename()):
+ self.create_config()
+ return ConfigObj(self.get_rc_filename())
+
+ def create_config(self):
+ """Create config file with defaults"""
+ if not os.path.exists(CONFIG_DIR):
+ os.mkdir(CONFIG_DIR)
+ self.create_config_obj()
+
+ def create_config_obj(self):
+ """Set defaults for ConigObj"""
+ config = ConfigObj()
+ config.filename = self.get_rc_filename()
+ config['keyword'] = get_keyword()
+ config['overlay'] = get_portdir_overlay()
+ config['format'] = "ansi"
+ config['background'] = "dark"
+ config.write()
+ self.set_config(config)
+ #logger isn't set yet
+ print "Your default keyword will be: %s " % \
+ config['keyword']
+ print "Your default overlay will be: %s " % \
+ config['overlay']
+ print "To change these edit: %s \n\n" % config.filename
+
+ def set_config(self, config):
+ """Set config"""
+ MyConfig.config = config
+
+ def set_options(self, options):
+ """Set options"""
+ MyConfig.options = options
+
+ def get_rc_filename(self):
+ """Return rc_file filename"""
+ return os.path.join(CONFIG_DIR, "g-pypirc")
+
+ def set_logger(self):
+ """Set logger"""
+ MyConfig.logger = logging.getLogger("g-pypi")
+ if MyConfig.options.verbose:
+ MyConfig.logger.setLevel(logging.INFO)
+ elif MyConfig.options.quiet:
+ MyConfig.logger.setLevel(logging.ERROR)
+ elif MyConfig.options.debug:
+ MyConfig.logger.setLevel(logging.DEBUG)
+ else:
+ MyConfig.logger.setLevel(logging.INFO)
+ MyConfig.logger.addHandler(logging.StreamHandler())
+
+ def get_logger(self):
+ """Return logging object"""
+ return MyConfig.logger
+
--- /dev/null
+#!/usr/bin/env python
+# pylint: disable-msg=C0103,C0301,E0611,W0511
+
+# Reasons for pylint disable-msg's
+#
+# E0611 - No name 'resource_string' in module 'pkg_resources'
+# No name 'BashLexer' in module 'pygments.lexers'
+# No name 'TerminalFormatter' in module 'pygments.formatters'
+# (False positives ^^^)
+# C0103 - Variable names too short (p, pn, pv etc.)
+# (These can be ignored individually with some in-line pylint-foo.)
+# C0301 - Line too long in some docstrings
+"""
+
+ebuild.py
+=========
+
+Creates an ebuild
+
+
+"""
+
+import re
+import sys
+import os
+import logging
+from time import localtime
+
+from Cheetah.Template import Template
+from pkg_resources import resource_string, WorkingSet, Environment, Requirement
+from pygments import highlight
+from pygments.lexers import BashLexer
+from pygments.formatters import TerminalFormatter, HtmlFormatter
+from pygments.formatters import BBCodeFormatter
+
+from g_pypi.portage_utils import make_overlay_dir, find_s_dir, unpack_ebuild
+from g_pypi.portage_utils import get_portdir, get_workdir, find_egg_info_dir
+from g_pypi.portage_utils import valid_cpn, get_installed_ver
+from g_pypi.config import MyConfig
+from g_pypi import enamer
+from g_pypi.__init__ import __version__ as VERSION
+
+
+__docformat__ = 'restructuredtext'
+__revision__ = '$Revision: 214 $'[11:-1].strip()
+
+#Cheetah template
+EBUILD_TEMPLATE = 'ebuild.tmpl'
+
+
+def get_version():
+ """
+ Get g-pypi's version and revision
+
+ @returns: string
+ """
+ return "%s (rev. %s)" % (VERSION, __revision__)
+
+
+class Ebuild:
+
+ """Contains ebuild"""
+
+ def __init__(self, up_pn, up_pv, download_url):
+ """Setup ebuild variables"""
+ self.pypi_pkg_name = up_pn
+ self.config = MyConfig.config
+ self.options = MyConfig.options
+ #self.logger = MyConfig.logger
+ self.logger = logging.getLogger("g-pypi")
+ self.metadata = None
+ self.unpacked_dir = None
+ self.ebuild_text = ""
+ self.ebuild_path = ""
+ self.warnings = []
+ self.setup = []
+ self.requires = []
+ self.has_tests = None
+
+ #Variables that will be passed to the Cheetah template
+ self.vars = {
+ 'need_python': '',
+ 'python_modname': '',
+ 'description': '',
+ 'homepage': '',
+ 'rdepend': [],
+ 'depend': [],
+ 'use': [],
+ 'slot': '0',
+ 's': '',
+ 'keywords': self.config['keyword'],
+ 'inherit': ['distutils'],
+ 'esvn_repo_uri': '',
+ }
+ keyword = os.getenv('ACCEPT_KEYWORDS')
+ if keyword:
+ self.vars['keywords'] = keyword
+ if self.options.subversion:
+ #Live svn version ebuild
+ self.options.pv = "9999"
+ self.vars['esvn_repo_uri'] = download_url
+ self.add_inherit("subversion")
+ ebuild_vars = enamer.get_vars(download_url, up_pn, up_pv, self.options.pn,
+ self.options.pv, self.options.my_pn, self.options.my_pv)
+ for key in ebuild_vars.keys():
+ if not self.vars.has_key(key):
+ self.vars[key] = ebuild_vars[key]
+ self.vars['p'] = '%s-%s' % (self.vars['pn'], self.vars['pv'])
+
+ def set_metadata(self, metadata):
+ """Set metadata"""
+ if metadata:
+ self.metadata = metadata
+ else:
+ self.logger.error("Package has no metadata.")
+ sys.exit(2)
+
+ def get_ebuild_vars(self, download_url):
+ """Determine variables from SRC_URI"""
+ if self.options.pn or self.options.pv:
+ ebuild_vars = enamer.get_vars(download_url, self.vars['pn'],
+ self.vars['pv'], self.options.pn, self.options.pv)
+ else:
+ ebuild_vars = enamer.get_vars(download_url, self.vars['pn'],
+ self.vars['pv'])
+ if self.options.my_p:
+ ebuild_vars['my_p'] = self.options.my_p
+
+ if self.options.my_pv:
+ ebuild_vars['my_pv'] = self.options.my_pv
+
+ if self.options.my_pn:
+ ebuild_vars['my_pn'] = self.options.my_pn
+
+ if ebuild_vars.has_key('my_p'):
+ self.vars['my_p'] = ebuild_vars['my_p']
+ self.vars['my_p_raw'] = ebuild_vars['my_p_raw']
+ else:
+ self.vars['my_p'] = ''
+ self.vars['my_p_raw'] = ebuild_vars['my_p_raw']
+ if ebuild_vars.has_key('my_pn'):
+ self.vars['my_pn'] = ebuild_vars['my_pn']
+ else:
+ self.vars['my_pn'] = ''
+ if ebuild_vars.has_key('my_pv'):
+ self.vars['my_pv'] = ebuild_vars['my_pv']
+ else:
+ self.vars['my_pv'] = ''
+ self.vars['src_uri'] = ebuild_vars['src_uri']
+
+
+ def add_metadata(self):
+ """
+ Extract DESCRIPTION, HOMEPAGE, LICENSE ebuild variables from metadata
+ """
+ #Various spellings for 'homepage'
+ homepages = ['Home-page', 'home_page', 'home-page']
+ for hpage in homepages:
+ if self.metadata.has_key(hpage):
+ self.vars['homepage'] = self.metadata[hpage]
+
+ #There doesn't seem to be any specification for case
+ if self.metadata.has_key('Summary'):
+ self.vars['description'] = self.metadata['Summary']
+ elif self.metadata.has_key('summary'):
+ self.vars['description'] = self.metadata['summary']
+ #Replace double quotes to keep bash syntax correct
+ if self.vars['description'] is None:
+ self.vars['description'] = ""
+ else:
+ self.vars['description'] = self.vars['description'].replace('"', "'")
+
+ my_license = ""
+ if self.metadata.has_key('classifiers'):
+ for data in self.metadata['classifiers']:
+ if data.startswith("License :: "):
+ my_license = get_portage_license(data)
+ if not my_license:
+ if self.metadata.has_key('License'):
+ my_license = self.metadata['License']
+ elif self.metadata.has_key('license'):
+ my_license = self.metadata['license']
+ my_license = "%s" % my_license
+ if not is_valid_license(my_license):
+ if "LGPL" in my_license:
+ my_license = "LGPL-2.1"
+ elif "GPL" in my_license:
+ my_license = "GPL-2"
+ else:
+ self.add_warning("Invalid LICENSE.")
+
+ self.vars['license'] = "%s" % my_license
+
+ def add_warning(self, warning):
+ """Add warning to be shown after ebuild is created"""
+ if warning not in self.warnings:
+ self.warnings.append(warning.lstrip())
+
+ def post_unpack(self):
+ """Check setup.py for:
+ * PYTHON_MODNAME != $PN
+ * setuptools install_requires or extra_requires
+ # regex: install_requires[ \t]*=[ \t]*\[.*\],
+
+ """
+ name_regex = re.compile('''.*name\s*=\s*[',"]([\w+,\-,\_]*)[',"].*''')
+ module_regex = \
+ re.compile('''.*packages\s*=\s*\[[',"]([\w+,\-,\_]*)[',"].*''')
+ if os.path.exists(self.unpacked_dir):
+ setup_file = os.path.join(self.unpacked_dir, "setup.py")
+ if not os.path.exists(setup_file):
+ self.add_warning("No setup.py found!")
+ self.setup = ""
+ return
+ self.setup = open(setup_file, "r").readlines()
+
+ setuptools_requires = module_name = package_name = None
+ for line in self.setup:
+ name_match = name_regex.match(line)
+ if name_match:
+ package_name = name_match.group(1)
+ elif "packages=" in line or "packages =" in line:
+ #XXX If we have more than one and only one is a top-level
+ #use it e.g. "module, not module.foo, module.bar"
+ mods = line.split(",")[0]
+ #if len(mods) > 1:
+ # self.add_warning(line)
+ module_match = module_regex.match(mods)
+ if module_match:
+ module_name = module_match.group(1)
+ elif ("setuptools" in line) and ("import" in line):
+ setuptools_requires = True
+ #It requires setuptools to install pkg
+ self.add_depend("dev-python/setuptools")
+
+ if setuptools_requires:
+ self.get_dependencies(setup_file)
+ else:
+ self.logger.warn("This package does not use setuptools so you will have to determine any dependencies if needed.")
+
+ if module_name and package_name:
+ # if module_name != package_name:
+ self.vars['python_modname'] = module_name
+
+ def get_unpacked_dist(self, setup_file):
+ """
+ Return pkg_resources Distribution object from unpacked package
+ """
+ os.chdir(self.unpacked_dir)
+ os.system("/usr/bin/python %s egg_info" % setup_file)
+ ws = WorkingSet([find_egg_info_dir(self.unpacked_dir)])
+ env = Environment()
+ return env.best_match(Requirement.parse(self.pypi_pkg_name), ws)
+
+ def get_dependencies(self, setup_file):
+ """
+ Generate DEPEND/RDEPEND strings
+
+ * Run setup.py egg_info so we can get the setuptools requirements
+ (dependencies)
+
+ * Add the unpacked directory to the WorkingEnvironment
+
+ * Get a Distribution object for package we are isntalling
+
+ * Get Requirement object containing dependencies
+
+ a) Determine if any of the requirements are installed
+
+ b) If requirements aren't installed, see if we have a matching ebuild
+ with adequate version available
+
+ * Build DEPEND string based on either a) or b)
+
+ """
+
+ #`dist` is a pkg_resources Distribution object
+ dist = self.get_unpacked_dist(setup_file)
+ if not dist:
+ #Should only happen if ebuild had 'install_requires' in it but
+ #for some reason couldn't extract egg_info
+ self.logger.warn("Couldn't acquire Distribution obj for %s" % \
+ self.unpacked_dir)
+ return
+
+ for req in dist.requires():
+ added_dep = False
+ pkg_name = req.project_name.lower()
+ if not len(req.specs):
+ self.add_setuptools_depend(req)
+ self.add_rdepend("dev-python/%s" % pkg_name)
+ added_dep = True
+ #No version of requirement was specified so we only add
+ #dev-python/pkg_name
+ else:
+ comparator, ver = req.specs[0]
+ self.add_setuptools_depend(req)
+ if len(req.specs) > 1:
+ comparator1, ver = req.specs[0]
+ comparator2, ver = req.specs[1]
+ if comparator1.startswith(">") and \
+ comparator2.startswith("<"):
+ comparator = "="
+ self.add_warning("Couldn't resolve requirements. You will need to make sure the RDEPEND for %s is correct." % req)
+ else:
+ #Some packages have more than one comparator, i.e. cherrypy
+ #for turbogears has >=2.2,<3.0 which would translate to
+ #portage's =dev-python/cherrypy-2.2*
+ self.logger.warn(" **** Requirement %s has multi-specs ****" % req)
+ self.add_rdepend("dev-python/%s" % pkg_name)
+ break
+ #Requirement.specs is a list of (comparator,version) tuples
+ if comparator == "==":
+ comparator = "="
+ if valid_cpn("%sdev-python/%s-%s" % (comparator, pkg_name, ver)):
+ self.add_rdepend("%sdev-python/%s-%s" % (comparator, pkg_name, ver))
+ else:
+ self.logger.info(\
+ "Invalid PV in dependency: (Requirement %s) %sdev-python/%s-%s" \
+ % (req, comparator, pkg_name, ver)
+ )
+ installed_pv = get_installed_ver("dev-python/%s" % pkg_name)
+ if installed_pv:
+ self.add_rdepend(">=dev-python/%s-%s" % \
+ (pkg_name, installed_pv))
+ else:
+ #If we have it installed, use >= installed version
+ #If package has invalid version and we don't have
+ #an ebuild in portage, just add PN to DEPEND, no
+ #version. This means the dep ebuild will have to
+ #be created by adding --MY_? options using the CLI
+ self.add_rdepend("dev-python/%s" % pkg_name)
+ added_dep = True
+ if not added_dep:
+ self.add_warning("Couldn't determine dependency: %s" % req)
+
+ def add_setuptools_depend(self, req):
+ """
+ Add dependency for setuptools requirement
+ After current ebuild is created, we check if portage has an
+ ebuild for the requirement, if not create it.
+ @param req: requirement needed by ebuild
+ @type req: pkg_resources `Requirement` object
+ """
+ self.logger.debug("Found dependency: %s " % req)
+ if req not in self.requires:
+ self.requires.append(req)
+
+ def get_src_test(self):
+ """Create src_test if tests detected"""
+ nose_test = '''\tPYTHONPATH=. "${python}" setup.py nosetests || die "tests failed"'''
+ regular_test = '''\tPYTHONPATH=. "${python}" setup.py test || die "tests failed"'''
+
+ for line in self.setup:
+ if "nose.collector" in line:
+ self.add_depend("test? ( dev-python/nose )")
+ self.add_use("test")
+ self.has_tests = True
+ return nose_test
+ #XXX Search for sub-directories
+ if os.path.exists(os.path.join(self.unpacked_dir,
+ "tests")) or os.path.exists(os.path.join(self.unpacked_dir,
+ "test")):
+ self.has_tests = True
+ return regular_test
+
+ def add_use(self, use_flag):
+ """Add DEPEND"""
+ self.vars['use'].append(use_flag)
+
+ def add_inherit(self, eclass):
+ """Add inherit eclass"""
+ if eclass not in self.vars['inherit']:
+ self.vars['inherit'].append(eclass)
+
+ def add_depend(self, depend):
+ """Add DEPEND ebuild variable"""
+ if depend not in self.vars['depend']:
+ self.vars['depend'].append(depend)
+
+ def add_rdepend(self, rdepend):
+ """Add RDEPEND ebuild variable"""
+ if rdepend not in self.vars['rdepend']:
+ self.vars['rdepend'].append(rdepend)
+
+ def get_ebuild(self):
+ """Generate ebuild from template"""
+ self.set_variables()
+ functions = {
+ 'src_unpack': "",
+ 'src_compile': "",
+ 'src_install': "",
+ 'src_test': ""
+ }
+ if not self.options.pretend and self.unpacked_dir: # and \
+ # not self.options.subversion:
+ self.post_unpack()
+ functions['src_test'] = self.get_src_test()
+ # *_f variables are formatted text ready for ebuild
+ self.vars['depend_f'] = format_depend(self.vars['depend'])
+ self.vars['rdepend_f'] = format_depend(self.vars['rdepend'])
+ self.vars['use_f'] = " ".join(self.vars['use'])
+ self.vars['inherit_f'] = " ".join(self.vars['inherit'])
+ template = resource_string(__name__, EBUILD_TEMPLATE)
+ self.ebuild_text = \
+ Template(template, searchList=[self.vars, functions]).respond()
+
+ def set_variables(self):
+ """
+ Ensure all variables needed for ebuild template are set and formatted
+
+ """
+ if self.vars['src_uri'].endswith('.zip') or \
+ self.vars['src_uri'].endswith('.ZIP'):
+ self.add_depend("app-arch/unzip")
+ if self.vars['python_modname'] == self.vars['pn']:
+ self.vars['python_modname'] = ""
+ self.vars['year'] = localtime()[0]
+ #Add homepage, license and description from metadata
+ self.add_metadata()
+ self.vars['warnings'] = self.warnings
+ self.vars['gpypi_version'] = get_version()
+
+ def print_ebuild(self):
+ """Print ebuild to stdout"""
+ #No command-line set, config file says no formatting
+ self.logger.info("%s/%s-%s" % \
+ (self.options.category, self.vars['pn'],
+ self.vars['pv']))
+ if self.options.format == "none" or \
+ (self.config['format'] == "none" and not self.options.format):
+ self.logger.info(self.ebuild_text)
+ return
+
+ background = self.config['background']
+ if self.options.format == "html":
+ formatter = HtmlFormatter(full=True)
+ elif self.config['format'] == "bbcode" or \
+ self.options.format == "bbcode":
+ formatter = BBCodeFormatter()
+ elif self.options.format == "ansi" or self.config['format'] == "ansi":
+ formatter = TerminalFormatter(bg=background)
+ else:
+ #Invalid formatter specified
+ self.logger.info(self.ebuild_text)
+ print "ERROR - No formatter"
+ print self.config['format'], self.options.format
+ return
+ self.logger.info(highlight(self.ebuild_text,
+ BashLexer(),
+ formatter,
+ ))
+ self.show_warnings()
+
+ def create_ebuild(self):
+ """Write ebuild and update it after unpacking and examining ${S}"""
+ #Need to write the ebuild first so we can unpack it and check for $S
+ if self.write_ebuild(overwrite=self.options.overwrite):
+ unpack_ebuild(self.ebuild_path)
+ self.update_with_s()
+ #Write ebuild again after unpacking and adding ${S}
+ self.get_ebuild()
+ #Run any tests if found
+ #if self.has_tests:
+ # run_tests(self.ebuild_path)
+ #We must overwrite initial skeleton ebuild
+ self.write_ebuild(overwrite=True)
+ self.print_ebuild()
+ self.logger.info("Your ebuild is here: " + self.ebuild_path)
+ #If ebuild already exists, we don't unpack and get dependencies
+ #because they must exist.
+ #We should add an option to force creating dependencies or should
+ #overwrite be used?
+ return self.requires
+
+ def write_ebuild(self, overwrite=False):
+ """Write ebuild file"""
+ ebuild_dir = make_overlay_dir(self.options.category, self.vars['pn'], \
+ self.config['overlay'])
+ if not ebuild_dir:
+ self.logger.error("Couldn't create overylay ebuild directory.")
+ sys.exit(2)
+ self.ebuild_path = os.path.join(ebuild_dir, "%s.ebuild" % \
+ self.vars['p'])
+ if os.path.exists(self.ebuild_path) and not overwrite:
+ #self.logger.error("Ebuild exists. Use -o to overwrite.")
+ self.logger.error("Ebuild exists, skipping: %s" % self.ebuild_path)
+ return
+ try:
+ out = open(self.ebuild_path, "w")
+ except IOError, err:
+ self.logger.error(err)
+ sys.exit(2)
+ out.write(self.ebuild_text)
+ out.close()
+ return True
+
+ def show_warnings(self):
+ """Print warnings for incorrect ebuild syntax"""
+ for warning in self.warnings:
+ self.logger.warn("** Warning: %s" % warning)
+
+ def update_with_s(self):
+ """Add ${S} to ebuild if needed"""
+ #if self.options.subversion:
+ # return
+ self.logger.debug("Trying to determine ${S}, unpacking...")
+ unpacked_dir = find_s_dir(self.vars['p'], self.options.category)
+ if unpacked_dir == "":
+ self.vars["s"] = "${WORKDIR}"
+ return
+
+ self.unpacked_dir = os.path.join(get_workdir(self.vars['p'],
+ self.options.category), unpacked_dir)
+ if unpacked_dir and unpacked_dir != self.vars['p']:
+ if unpacked_dir == self.vars['my_p_raw']:
+ unpacked_dir = '${MY_P}'
+ elif unpacked_dir == self.vars['my_pn']:
+ unpacked_dir = '${MY_PN}'
+ elif unpacked_dir == self.vars['pn']:
+ unpacked_dir = '${PN}'
+
+ self.vars["s"] = "${WORKDIR}/%s" % unpacked_dir
+
+def get_portage_license(my_license):
+ """
+ Map defined classifier license to Portage license
+
+ http://cheeseshop.python.org/pypi?%3Aaction=list_classifiers
+ """
+ my_license = my_license.split(":: ")[-1:][0]
+ known_licenses = {
+ "Aladdin Free Public License (AFPL)": "Aladdin",
+ "Academic Free License (AFL)": "AFL-3.0",
+ "Apache Software License": "Apache-2.0",
+ "Apple Public Source License": "Apple",
+ "Artistic License": "Artistic-2",
+ "BSD License": "BSD-2",
+ "Common Public License": "CPL-1.0",
+ "GNU Free Documentation License (FDL)": "FDL-3",
+ "GNU General Public License (GPL)": "GPL-2",
+ "GNU Library or Lesser General Public License (LGPL)": "LGPL-2.1",
+ "IBM Public License": "IBM",
+ "Intel Open Source License": "Intel",
+ "MIT License": "MIT",
+ "Mozilla Public License 1.0 (MPL)": "MPL",
+ "Mozilla Public License 1.1 (MPL 1.1)": "MPL-1.1",
+ "Nethack General Public License": "nethack",
+ "Open Group Test Suite License": "OGTSL",
+ "Python License (CNRI Python License)": "PYTHON",
+ "Python Software Foundation License": "PSF-2.4",
+ "Qt Public License (QPL)": "QPL",
+ "Sleepycat License": "DB",
+ "Sun Public License": "SPL",
+ "University of Illinois/NCSA Open Source License": "ncsa-1.3",
+ "W3C License": "WC3",
+ "zlib/libpng License": "ZLIB",
+ "Zope Public License": "ZPL",
+ "Public Domain": "public-domain"
+ }
+ if known_licenses.has_key(my_license):
+ return known_licenses[my_license]
+ else:
+ return ""
+
+def is_valid_license(my_license):
+ """Check if license string matches a valid one in ${PORTDIR}/licenses"""
+ return os.path.exists(os.path.join(get_portdir(), "licenses", my_license))
+
+
+def format_depend(dep_list):
+ """
+ Return a formatted string containing DEPEND/RDEPEND
+
+ @param dep_list: list of portage-ready dependency strings
+ @return: formatted DEPEND or RDEPEND string ready for ebuild
+
+ Format::
+
+ DEPEND="dev-python/foo-1.0
+ >=dev-python/bar-0.2
+ dev-python/zaba"
+
+ * First dep has no tab, has linefeed
+ * Middle deps have tab and linefeed
+ * Last dep has tab, no linefeed
+
+ """
+
+ if not len(dep_list):
+ return ""
+
+ output = dep_list[0] + "\n"
+ if len(dep_list) == 1:
+ output = output.rstrip()
+ elif len(dep_list) == 2:
+ output += "\t" + dep_list[-1]
+ else:
+ #Three or more deps
+ middle = ""
+ for dep in dep_list[1:-1]:
+ middle += "\t%s\n" % dep
+ output += middle + "\t" + dep_list[-1]
+ return output
--- /dev/null
+# Copyright 1999-$year Gentoo Foundation
+# Distributed under the terms of the GNU General Public License v2
+# \$Header: \$
+# Ebuild generated by g-pypi $gpypi_version
+
+#if $warnings
+#for warn in $warnings:
+\# Warning: $warn
+#end for
+#end if
+#if $need_python
+NEED_PYTHON="$need_python"
+
+#end if
+#if $python_modname
+PYTHON_MODNAME="$python_modname"
+
+#end if
+inherit $inherit_f
+
+#if not $esvn_repo_uri
+#if $my_pn
+MY_PN="$my_pn"
+#end if
+#if $my_pv
+MY_PV="$my_pv"
+#end if
+#if $my_p
+MY_P="$my_p"
+#end if
+#end if
+
+DESCRIPTION="$description"
+HOMEPAGE="$homepage"
+#if $esvn_repo_uri
+ESVN_REPO_URI="$esvn_repo_uri"
+#else
+SRC_URI="$src_uri"
+#end if
+LICENSE="$license"
+KEYWORDS="$keywords"
+SLOT="$slot"
+IUSE="$use_f"
+#if $s
+S="$s"
+#end if
+#if $depend
+DEPEND="$depend_f"
+#end if
+#if $rdepend
+RDEPEND="$rdepend_f"
+#end if
+
+#if $src_unpack
+src_unpack() {
+$src_unpack
+}
+
+#end if
+#if $src_compile
+src_compile() {
+$src_compile
+}
+
+#end if
+#if $src_install
+src_install() {
+$src_install
+}
+
+#end if
+#if $src_test
+src_test() {
+$src_test
+}
+
+#end if
--- /dev/null
+#!/usr/bin/env python
+# pylint: disable-msg=C0103,E0611,W0511
+
+"""
+enamer.py
+=========
+
+Functions for extracting useful info from a pkg URI
+such as PN, PV, MY_P, SRC_URI
+
+* Examples of what it can detect/convert:
+ (See test_enamer.py for full capabilities)
+
+ http://www.foo.com/pkgfoo-1.0.tbz2
+ PN="pkgfoo"
+ PV="1.0"
+ Ebuild name: pkgfoo-1.0.ebuild
+ SRC_URI="http://www.foo.com/${P}.tbz2"
+
+ http://www.foo.com/PkgFoo-1.0.tbz2
+ PN="pkgfoo"
+ PV="1.0"
+ Ebuild name: pkgfoo-1.0.ebuild
+ MY_P="PkgFoo-${PV}"
+ SRC_URI="http://www.foo.com/${MY_P}.tbz2"
+
+ http://www.foo.com/pkgfoo_1.0.tbz2
+ PN="pkgfoo"
+ PV="1.0"
+ Ebuild name: pkgfoo-1.0.ebuild
+ MY_P="${PN}_${PV}"
+ SRC_URI="http://www.foo.com/${MY_P}.tbz2"
+
+ http://www.foo.com/PKGFOO_1.0.tbz2
+ PN="pkgfoo"
+ PV="1.0"
+ Ebuild name: pkgfoo-1.0.ebuild
+ MY_P="PKGFOO_${PV}"
+ SRC_URI="http://www.foo.com/${MY_P}.tbz2"
+
+ http://www.foo.com/pkg-foo-1.0_beta1.tbz2
+ PN="pkg-foo"
+ PV="1.0_beta1"
+ Ebuild name: pkg-foo-1.0_beta1.ebuild
+ SRC_URI="http://www.foo.com/${P}.tbz2"
+
+"""
+
+import urlparse
+import re
+
+from portage import pkgsplit
+
+try:
+ #portage >=2.2
+ from portage import dep as portage_dep
+except ImportError:
+ #portage <=2.1
+ from portage import portage_dep
+
+try:
+ #portage >=2.2
+ from portage import exception as portage_exception
+except ImportError:
+ #portage <=2.1
+ from portage import portage_exception
+
+__docformat__ = 'restructuredtext'
+
+
+def get_filename(uri):
+ """
+ Return file name minus extension from src_uri
+ e.g. http://somesite.com/foobar-1.0.tar.gz will yield foobar-1.0
+
+ @param uri: URI to package with no variables substitution
+ @type uri: string
+
+ @returns: string
+
+ """
+ path = urlparse.urlparse(uri)[2]
+ path = path.split('/')
+ return strip_ext(path[len(path)-1])
+
+def strip_ext(fname):
+ """Strip possible extensions from filename."""
+ valid_extensions = [".zip", ".tgz", ".tar.gz", ".tar.bz2", ".tbz2"]
+ for ext in valid_extensions:
+ if fname.endswith(ext):
+ fname = fname.replace(ext, "")
+ break
+ return fname
+
+def is_valid_uri(uri):
+ """
+ Check if URI's addressing scheme is valid
+
+ @param uri: URI to pacakge with no variable substitution
+ @type uri: string
+
+ @returns: True or False
+ """
+ if uri.startswith("http:") or uri.startswith("ftp:") or \
+ uri.startswith("mirror:") or uri.startswith("svn:"):
+ return True
+ else:
+ return False
+
+def parse_sourceforge_uri(uri):
+ """
+ Change URI to mirror://sourceforge format
+ Also determines a homepage string which can be used if the metadata
+ doesn't have Home_page
+
+ @param uri: URI to pacakge with no variable substitution
+ @type uri: string
+
+ @returns: tuple (uri string, homepage string)
+ """
+ uri_out = homepage = ""
+ tst_uri = urlparse.urlparse(uri)
+
+ host = tst_uri[1]
+ upath = tst_uri[2]
+ if upath.startswith("/sourceforge"):
+ upath = upath[12:]
+ if ("sourceforge" in host) or (host.endswith("sf.net")):
+ uri_out = 'mirror://sourceforge%s' % upath
+ homepage = "http://sourceforge.net/projects/%s/" % \
+ upath.split("/")[1]
+ return uri_out, homepage
+
+def is_good_filename(uri):
+ """If filename is sane enough to deduce PN & PV, return pkgsplit results"""
+ if is_valid_uri(uri):
+ psplit = split_p(uri)
+ if psplit and psplit[0].islower():
+ return psplit
+
+def split_p(uri):
+ """Try to split a URI into PN, PV"""
+ p = get_filename(uri)
+ psplit = pkgsplit(p)
+ return psplit
+
+def get_components(uri):
+ """Split uri into pn and pv and new uri"""
+ p = get_filename(uri)
+ psplit = split_p(uri)
+ uri_out = uri.replace(p, "${P}")
+ pn = psplit[0].lower()
+ pv = psplit[1]
+ return uri_out, pn, pv
+
+def get_myp(uri):
+ """Return MY_P and new uri with MY_P in it"""
+ my_p = get_filename(uri)
+ uri_out = uri.replace(my_p, "${MY_P}")
+ return uri_out, my_p
+
+def guess_components(my_p):
+ """Try to break up raw MY_P into PN and PV"""
+ pn, pv = "", ""
+
+ # Ok, we just have one automagical test here.
+ # We should look at versionator.eclass for inspiration
+ # and then come up with several functions.
+ my_p = my_p.replace("_", "-")
+
+ psplit = pkgsplit(my_p)
+ if psplit:
+ pn = psplit[0].lower()
+ pv = psplit[1]
+ return pn, pv
+
+
+def bad_pv(up_pn, up_pv, pn="", pv="", my_pn="", my_pv=""):
+ """
+ Can't determine PV from upstream's version.
+ Do our best with some well-known versioning schemes:
+
+ 1.0a1 (1.0_alpha1)
+ 1.0-a1 (1.0_alpha1)
+ 1.0b1 (1.0_beta1)
+ 1.0-b1 (1.0_beta1)
+ 1.0-r1234 (1.0_pre1234)
+ 1.0dev-r1234 (1.0_pre1234)
+ 1.0.dev-r1234 (1.0_pre1234)
+
+ regex match.groups:
+ pkgfoo-1.0.dev-r1234
+ group 1 pv major (1.0)
+ group 2 entire suffix (.dev-r1234)
+ group 3 replace this with portage suffix (.dev-r)
+ group 4 suffix version (1234)
+
+ The order of the regex's is significant. For instance if you have
+ .dev-r123, dev-r123 and -r123 you should order your regex's in
+ that order.
+
+ The number of regex's could have been reduced, but we use four
+ number of match.groups every time to simplify the code
+
+ The _pre suffix is most-likely incorrect. There is no 'dev'
+ prefix used by portage, the 'earliest' there is is '_alpha'.
+ The chronological portage release versions are:
+ _alpha
+ _beta
+ _pre
+ _rc
+ release
+ _p
+ """
+ my_p = ""
+ suf_matches = {
+ '_pre': ['(.*)((\.dev-r)([0-9]+))$',
+ '(.*)((dev-r)([0-9]+))$',
+ '(.*)((-r)([0-9]+))$'],
+ '_alpha': ['(.*)((-a)([0-9]+))$', '(.*)((a)([0-9]+))$'],
+ '_beta': ['(.*)((-b)([0-9]+))$', '(.*)((b)([0-9]+))$'],
+ '_rc': ['(.*)((\.rc)([0-9]+))$', '(.*)((-rc)([0-9]+))$',
+ '(.*)((rc)([0-9]+))$', '(.*)((-c)([0-9]+))$',
+ '(.*)((\.c)([0-9]+))$', '(.*)((c)([0-9]+))$'],
+ }
+ sufs = suf_matches.keys()
+ rs_match = None
+ for this_suf in sufs:
+ if rs_match:
+ break
+ for regex in suf_matches[this_suf]:
+ rsuffix_regex = re.compile(regex)
+ rs_match = rsuffix_regex.match(up_pv)
+ if rs_match:
+ portage_suffix = this_suf
+ break
+ if rs_match:
+ #e.g. 1.0.dev-r1234
+ major_ver = rs_match.group(1) # 1.0
+ #whole_suffix = rs_match.group(2) #.dev-r1234
+ replace_me = rs_match.group(3) #.dev-r
+ rev = rs_match.group(4) #1234
+ if not up_pn.islower():
+ my_pn = up_pn
+ pn = up_pn.lower()
+ pv = major_ver + portage_suffix + rev
+ if my_pn:
+ my_p = "${MY_PN}-${MY_PV}"
+ else:
+ my_p = "${PN}-${MY_PV}"
+ my_pv = "${PV/%s/%s}" % (portage_suffix, replace_me)
+
+ #Single suffixes with no numeric component are simply removed.
+ else:
+ bad_suffixes = [".dev", "-dev", "dev", ".final", "-final", "final"]
+ for suffix in bad_suffixes:
+ if up_pv.endswith(suffix):
+ my_pv = "${PV}%s" % suffix
+ my_p = "${PN}-${MY_PV}"
+ pn = up_pn
+ pv = up_pv[:-(len(suffix))]
+ if not pn.islower():
+ if not my_pn:
+ my_pn = pn
+ pn = pn.lower()
+ break
+ return pn, pv, my_p, my_pn, my_pv
+
+def sanitize_uri(uri):
+ """
+ Return URI without any un-needed extension.
+ e.g. http://downloads.sourceforge.net/pythonreports/PythonReports-0.3.1.tar.gz?modtime=1182702645&big_mirror=0
+ would have everything after '?' stripped
+
+ @param uri: URI to pacakge with no variable substitution
+ @type uri: string
+
+ @returns: string
+
+
+ """
+ return uri
+
+def get_vars(uri, up_pn, up_pv, pn="", pv="", my_pn="", my_pv=""):
+ """
+ Determine P* and MY_* variables
+
+ Don't modify this to accept new URI schemes without writing new
+ test_enamer unit tests
+
+ This function makes me weep and gives me nightmares.
+
+ """
+ my_p = my_p_raw = ""
+ uri = sanitize_uri(uri)
+ sf_uri, _sf_homepage = parse_sourceforge_uri(uri)
+ if sf_uri:
+ uri = sf_uri
+ #XXX _sf_homepage can be used if package metadata doesn't have one
+
+
+ #Make sure we have a valid PV
+
+ #Test for PV with -r1234 suffix
+ #Portage uses -r suffixes for it's own ebuild revisions so
+ #we have to convert it to _pre or _alpha etc.
+ try:
+ tail = up_pv.split("-")[-1:][0][0]
+ #we have a version with a -r[nnn] suffix
+ if tail == "r":
+ pn, pv, my_p, my_pn, my_pv = \
+ bad_pv(up_pn, up_pv, pn, pv, my_pn, my_pv)
+ except:
+ pass
+
+ if not portage_dep.isvalidatom("=dev-python/%s-%s" % (up_pn, up_pv)):
+ pn, pv, my_p, my_pn, my_pv = \
+ bad_pv(up_pn, up_pv, pn, pv, my_pn, my_pv)
+
+ #No PN or PV given on command-line, try upstream's name/version
+ if not pn and not pv:
+ #Try to determine pn and pv from uri
+ parts = split_p(uri)
+ if parts:
+ # pylint: disable-msg=W0612
+ # unused variable 'rev'
+ # The 'rev' is never used because these are
+ # new ebuilds being created.
+ pn, pv, rev = parts
+ else:
+ pn = up_pn
+ pv = up_pv
+ #Try upstream's version if it could't be determined from uri or cli option
+ elif pn and not pv:
+ pv = up_pv
+ elif not pn and pv:
+ pn = up_pn.lower()
+
+ if not pn.islower():
+ #up_pn is lower but uri has upper-case
+ if not my_pn:
+ my_pn = pn
+ pn = pn.lower()
+ p = "%s-%s" % (pn, pv)
+
+ #Check if we need to use MY_P based on src's uri
+ if my_p:
+ src_uri, my_p_raw = get_myp(uri)
+ else:
+ src_uri, my_p, my_p_raw = get_src_uri(uri)
+
+ #Make sure we have a valid P
+ if not portage_dep.isvalidatom("=dev-python/%s-%s" % (pn, pv)):
+ if not portage_dep.isjustname("dev-python/%s-%s" % (pn, pv)):
+ raise portage_exception.InvalidPackageName(pn)
+ else:
+ raise portage_exception.InvalidVersionString(pv)
+
+ if not my_pn:
+ my_pn = "-".join(my_p.split("-")[:-1])
+ if (my_pn == pn) or (my_pn == "${PN}"):
+ my_pn = ""
+
+ if my_p:
+ if my_p == "%s-%s" % (my_pn, "${PV}"):
+ my_p = "${MY_PN}-${PV}"
+ elif my_p == "%s-%s" % (my_pn, my_pv):
+ my_p = "${MY_PN}-${MY_PV}"
+ elif my_p == "%s-%s" % ("${PN}", my_pv):
+ my_p = "${PN}-${MY_PV}"
+ else:
+ my_p = my_p.replace(pn, "${PN}")
+ my_p = my_p.replace(pv, "${PV}")
+
+ return {'pn': pn,
+ 'pv': pv,
+ 'p': p,
+ 'my_p': my_p,
+ 'my_pn': my_pn,
+ 'my_pv': my_pv,
+ 'my_p_raw': my_p_raw,
+ 'src_uri': src_uri,
+ }
+
+def get_src_uri(uri):
+ """Return src_uri"""
+ my_p = my_p_raw = ''
+ if is_good_filename(uri):
+ src_uri, pn, pv = get_components(uri)
+ else:
+ src_uri, my_p = get_myp(uri)
+ pn, pv = guess_components(my_p)
+ if pn and pv:
+ my_p_raw = my_p
+ my_p = my_p.replace(pn, "${PN}")
+ my_p = my_p.replace(pv, "${PV}")
+
+ return src_uri, my_p, my_p_raw
+
--- /dev/null
+#!/usr/bin/env python
+# pylint: disable-msg=C0301,W0613,W0612,C0103,E0611,W0511
+
+"""
+
+portage_utils.py
+================
+
+Various functions dealing with portage
+
+"""
+
+import sys
+import os
+import commands
+import logging
+#import fnmatch
+
+import portage
+
+try:
+ #portage >= 2.2
+ from portage import dep as portage_dep
+except ImportError:
+ #portage <= 2.1
+ from portage import portage_dep
+
+sys.path.insert(0, "/usr/lib/gentoolkit/pym")
+import gentoolkit
+
+
+__docformat__ = 'restructuredtext'
+
+ENV = portage.config(clone=portage.settings).environ()
+LOGGER = logging.getLogger(__name__)
+LOGGER.setLevel(logging.DEBUG)
+LOGGER.addHandler(logging.StreamHandler())
+
+
+def get_installed_ver(cpn):
+ """
+ Return PV for installed version of package
+
+ @param cpn: cat/pkg-ver
+ @type cpn: string
+
+ @returns: string version or None if not pkg installed
+
+ """
+ try:
+ #Return first version installed
+ #XXX Log warning if more than one installed (SLOT)?
+ pkg = gentoolkit.find_installed_packages(cpn, masked=True)[0]
+ return pkg.get_version()
+ except:
+ return
+
+def valid_cpn(cpn):
+ """
+ Return True if cpn is valid portage category/pn-pv
+
+ @param cpn: cat/pkg-ver
+ @type cpn: string
+
+ @returns: True if installed, False if not installed
+ """
+ if portage_dep.isvalidatom(cpn):
+ return True
+ else:
+ return False
+
+
+def ebuild_exists(cat_pkg):
+ """
+
+ Checks if an ebuild exists in portage tree or overlay
+
+ @param cat_pkg: portage category/packagename
+ @type cat_pkg: string
+
+ @returns: True if ebuild exists, False if no ebuild exists
+ """
+
+ pkgs = gentoolkit.find_packages(cat_pkg)
+ if len(pkgs):
+ return True
+ else:
+ return False
+
+#def run_tests(ebuild_path):
+# """
+# Use portage to run tests
+
+# Some day I'll figure out how to get portage to do this directly. Some day.
+
+# @param ebuild_path: full path to ebuild
+# @type ebuild_path: string
+# @returns: None if succeed, raises OSError if fails to unpack
+
+# """
+# cmd = "/usr/bin/python /usr/bin/ebuild %s test" % ebuild_path
+# print cmd
+# (status, output) = commands.getstatusoutput(cmd)
+# print output
+# print status
+
+def unpack_ebuild(ebuild_path):
+ """
+ Use portage to unpack an ebuild
+
+ Some day I'll figure out how to get portage to do this directly. Some day.
+
+ @param ebuild_path: full path to ebuild
+ @type ebuild_path: string
+ @returns: None if succeed, raises OSError if fails to unpack
+
+ """
+ (status, output) = commands.getstatusoutput("ebuild %s digest setup clean unpack" % ebuild_path)
+ if status:
+ #Portage's error message, sometimes.
+ #Couldn't determine PN or PV so we misnamed ebuild
+ if 'does not follow correct package syntax' in output:
+ LOGGER.error(output)
+ LOGGER.error("Misnamed ebuild: %s" % ebuild_path)
+ LOGGER.error("Try using -n or -v to force PN or PV")
+ os.unlink(ebuild_path)
+ else:
+ LOGGER.error(output)
+ raise OSError
+
+def find_s_dir(p, cat):
+ """
+ Try to get ${S} by determining what directories were unpacked
+
+ @param p: portage ${P}
+ @type p: string
+
+ @param cat: valid portage category
+ @type cat: string
+
+ @returns: string with directory name if detected, empty string
+ if S=WORKDIR, None if couldn't find S
+
+
+ """
+
+ workdir = get_workdir(p, cat)
+ files = os.listdir(workdir)
+ dirs = []
+ for unpacked in files:
+ if os.path.isdir(os.path.join(workdir, unpacked)):
+ dirs.append(unpacked)
+ if len(dirs) == 1:
+ #Only one directory, must be it.
+ return dirs[0]
+ elif not len(dirs):
+ #Unpacked in cwd
+ return ""
+ else:
+ #XXX Need to search whole tree for setup.py
+ LOGGER.error("Can't determine ${S}")
+ LOGGER.error("Unpacked multiple directories: %s" % dirs)
+
+def get_workdir(p, cat):
+ """
+ Return WORKDIR
+
+ @param p: portage ${P}
+ @type p: string
+
+ @param cat: valid portage category
+ @type cat: string
+
+ @return: string of portage_tmpdir/cp
+ """
+
+ return '%s/portage/%s/%s/work' % (get_portage_tmpdir(), cat, p)
+
+def get_portdir_overlay():
+ """Return PORTDIR_OVERLAY from /etc/make.conf"""
+ return ENV['PORTDIR_OVERLAY'].split(" ")[0]
+
+def get_portage_tmpdir():
+ """Return PORTAGE_TMPDIR from /etc/make.conf"""
+ return ENV["PORTAGE_TMPDIR"]
+
+def get_portdir():
+ """Return PORTDIR from /etc/make.conf"""
+ return ENV["PORTDIR"]
+
+def get_keyword():
+ """Return first ACCEPT_KEYWORDS from /etc/make.conf"""
+ #Choose the first arch they have, in case of multiples.
+
+ arch = ENV["ACCEPT_KEYWORDS"].split(' ')[0]
+
+ #New ebuilds must be ~arch
+
+ if not arch.startswith('~'):
+ arch = "~%s" % arch
+ return arch
+
+def make_overlay_dir(category, pn, overlay):
+ """
+ Create directory(s) in overlay for ebuild
+
+ @param category: valid portage category
+ @type category: string
+
+ @param pn: portage ${PN}
+ @type pn: string
+
+ @param overlay: portage overlay directory
+ @type overlay: string
+
+ @return: string of full directory name
+
+ """
+
+ ebuild_dir = os.path.join(overlay, category, pn)
+ if not os.path.isdir(ebuild_dir):
+ try:
+ os.makedirs(ebuild_dir)
+ except OSError, err:
+ #XXX Use logger
+ LOGGER.error(err)
+ sys.exit(2)
+ return ebuild_dir
+
+
+def find_egg_info_dir(root):
+ """
+ Locate all files matching supplied filename pattern in and below
+ supplied root directory.
+ """
+ for path, dirs, files in os.walk(os.path.abspath(root)):
+ for this_dir in dirs:
+ if this_dir.endswith(".egg-info"):
+ return os.path.normpath(os.path.join(path, this_dir, ".."))
+
+#Unused as of now. Could be used to find setup.py
+#def find_files(pattern, root):
+# """
+# Locate all files matching supplied filename pattern in and below
+# supplied root directory.
+# """
+# for path, dirs, files in os.walk(os.path.abspath(root)):
+# for filename in fnmatch.filter(dirs, pattern):
+# yield os.path.join(path, filename)
--- /dev/null
+#!/usr/bin/python
+
+from setuptools import setup
+
+from g_pypi.__init__ import __version__ as VERSION
+
+
+setup(name="g-pypi",
+ license="GPL-2",
+ version=VERSION,
+ description="Tool for creating Gentoo ebuilds for Python packages by querying PyPI (The Cheese Shop)",
+ long_description=open("README", "r").read(),
+ maintainer="Rob Cakebread",
+ author="Rob Cakebread",
+ author_email="gentoodev@gmail.com",
+ url="http://tools.assembla.com/g-pypi/",
+ keywords="gentoo ebuilds PyPI setuptools cheeseshop distutils eggs portage package management",
+ classifiers=["Development Status :: 2 - Pre-Alpha",
+ "Intended Audience :: Developers",
+ "License :: OSI Approved :: GNU General Public License (GPL)",
+ "Programming Language :: Python",
+ "Topic :: Software Development :: Libraries :: Python Modules",
+ ],
+ install_requires=["Pygments",
+ "setuptools",
+ "Cheetah",
+ "ConfigObj",
+ ],
+ packages=['g_pypi'],
+ package_dir={'g_pypi':'g_pypi' },
+ include_package_data = True,
+ entry_points={'console_scripts': ['g-pypi = g_pypi.cli:main']},
+ test_suite='nose.collector',
+)
+
--- /dev/null
+#!/usr/bin/env python
+
+
+"""
+
+*** WARNING ***
+*** WARNING ***
+*** WARNING ***
+
+This will attempt to create an ebuild for every single release on PyPI
+which obviously will take a long time and require a decent amount of bandwidth
+
+*** WARNING ***
+*** WARNING ***
+*** WARNING ***
+
+"""
+
+import pickle
+import os
+
+from yolk.pypi import CheeseShop
+
+
+cheeseshop = CheeseShop()
+PKG_INDEX = "pkg_index"
+
+if os.path.exists(PKG_INDEX):
+ full_index = pickle.load(open(PKG_INDEX, 'r'))
+else:
+ full_index = cheeseshop.search({"name":"foo"}, "or")
+ pickle.dump(full_index, open(PKG_INDEX, "w"))
+
+for pkg in full_index:
+ os.system('echo Testing %s' % pkg['name'].encode('utf-8'))
+ os.system('g-pypi -qo %s' % pkg['name'])
+ #os.system('echo %s' % ('-' * 79))
--- /dev/null
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
+"""
+
+Functional CLI testing using the PyPI RSS feed to try to create an ebuild
+for each package.
+
+"""
+
+__docformat__ = 'restructuredtext'
+
+import urllib
+import os
+import sys
+
+#TODO:
+"""
+Add switch for --pretend make default write ebuilds
+Make option to write ebuild to tempdir and then cleanup after test is done
+
+
+
+
+"""
+if sys.version_info[0] == 2 and sys.version_info[1] == 5:
+ #Python >=2.5 has elementtree
+ from xml.etree.cElementTree import iterparse
+else:
+ try:
+ #Python <2.5 has elementtree as 3rd party module
+ from cElementTree import iterparse
+ except ImportError:
+ print "You need to install cElementTree"
+ sys.exit(2)
+
+PYPI_URL = 'http://www.python.org/pypi?:action=rss'
+
+#Packages we don't want to test. Mainly ones that require svn auth
+SKIP = ['byCycleCore']
+
+def get_pkg_ver(pv, add_quotes=True):
+ """Return package name and version"""
+ n = len(pv.split())
+ if n == 2:
+ #Normal package_name 1.0
+ pkg_name, ver = pv.split()
+ else:
+ parts = pv.split()
+ ver = parts[-1:]
+ if add_quotes:
+ pkg_name = "'%s'" % " ".join(parts[:-1])
+ else:
+ pkg_name = "%s" % " ".join(parts[:-1])
+ return pkg_name, ver
+
+def cli_test(pypi_xml):
+ """Test the command-line tool"""
+ for event, elem in iterparse(pypi_xml):
+ if elem.tag == "title":
+ if not elem.text.startswith('Cheese Shop recent updates'):
+ pkg_name, ver = get_pkg_ver(elem.text)
+ if pkg_name not in SKIP:
+ #If I don't use os.system for the echo's, all the msgs
+ #appear at the end of a log when redirecting output
+ os.system('echo Testing %s' % elem.text)
+ os.system('g-pypi -V %s' % pkg_name)
+ os.system('echo %s' % ('-' * 79))
+ elem.clear()
+
+if __name__ == "__main__":
+ cli_test(urllib.urlopen(PYPI_URL))
--- /dev/null
+#!/usr/bin/env python
+
+
+"""
+
+*** WARNING ***
+*** WARNING ***
+*** WARNING ***
+
+This will attempt to create an ebuild for every single release on PyPI
+which obviously will take a long time and require a decent amount of bandwidth
+
+*** WARNING ***
+*** WARNING ***
+*** WARNING ***
+
+"""
+
+import pickle
+import os
+
+from yolk.pypi import CheeseShop
+
+
+cheeseshop = CheeseShop()
+PKG_INDEX = "pkg_index"
+
+if os.path.exists(PKG_INDEX):
+ full_index = pickle.load(open(PKG_INDEX, 'r'))
+else:
+ full_index = cheeseshop.search({"name":"foo"}, "or")
+ pickle.dump(full_index, open(PKG_INDEX, "w"))
+
+found = False
+for pkg in full_index:
+ found = True
+ if found:
+ if 'dev' in pkg['version']:
+ #print pkg['name'], pkg['version']
+ os.system('echo Testing %s' % pkg['name'].encode('utf-8'))
+ os.system('g-pypi -Vo %s' % pkg['name'])
+ #os.system('echo %s' % ('-' * 79))
--- /dev/null
+
+
+from g_pypi.ebuild import *
+
+
+def test_get_portage_license():
+ """Convert classifier license to known portage license"""
+ assert get_portage_license("License :: OSI Approved :: Zope Public License") == "ZPL"
+
+ assert get_portage_license("License :: OSI Approved :: GNU Library or Lesser General Public License (LGPL)") == "LGPL-2.1"
+
+ assert get_portage_license("License :: Public Domain") == "public-domain"
+ assert get_portage_license("") == ""
+
+
+def test_is_valid_license():
+ """Check if license string matches a valid one in ${PORTDIR}/licenses"""
+ assert is_valid_license("GPL") == False
+ assert is_valid_license("GPL-2") == True
+
--- /dev/null
+#!/usr/bin/env python
+
+
+"""
+
+enamer
+======
+
+Unit tests via nose
+
+This module does all the trickery to determine MY_*, SRC_URI etc. variables
+
+"""
+
+import sys
+
+#import functions we're testing implicitly so we can get an idea of coverage
+from g_pypi.enamer import is_valid_uri, get_filename, get_vars, \
+ parse_sourceforge_uri
+
+
+def test_parse_sourceforge_uri():
+ """
+ Convert sourceforge URI to portage mirror URI
+
+ """
+
+ assert parse_sourceforge_uri("http://internap.dl.sourceforge.net/sourceforge/pythonreports/PythonReports-0.3.0.tar.gz") == \
+ ("mirror://sourceforge/pythonreports/PythonReports-0.3.0.tar.gz",
+ "http://sourceforge.net/projects/pythonreports/")
+ assert parse_sourceforge_uri("http://downloads.sourceforge.net/pythonreports/PythonReports-0.3.0.tar.gz") == \
+ ("mirror://sourceforge/pythonreports/PythonReports-0.3.0.tar.gz",
+ "http://sourceforge.net/projects/pythonreports/")
+
+ #Test abbreviated sf.net domain
+ assert parse_sourceforge_uri("http://downloads.sf.net/pythonreports/PythonReports-0.3.0.tar.gz") == \
+ ("mirror://sourceforge/pythonreports/PythonReports-0.3.0.tar.gz",
+ "http://sourceforge.net/projects/pythonreports/")
+
+ #Non-sourceforge URI
+ assert parse_sourceforge_uri("http://yahoo.com/pythonReports-0.3.0.tar.gz")\
+ == ('', '')
+
+def test_is_valid_uri():
+ """Check if URI's addressing scheme is valid
+
+ g-pypi will use http, ftp and mirror
+
+ """
+
+ assert is_valid_uri('http://foo.com/foo-1.0.tbz2') == True
+ assert is_valid_uri('ftp://foo.com/foo-1.0.tbz2') == True
+ assert is_valid_uri('mirror://sourceforge/foo-1.0.tbz2') == True
+ assert is_valid_uri('http://foo.com/foo-1.0.tbz2#md5=2E3AF09') == True
+ assert is_valid_uri('The Marked Men') == False
+ assert is_valid_uri('svn://foo.com/trunk/foo') == True
+ assert is_valid_uri('http://www.themarkedmen.com/') == True
+
+def test_get_filename():
+ """Return filename minus extension from src_uri"""
+ assert get_filename("http://www.foo.com/pkgfoo-1.0.tbz2") == "pkgfoo-1.0"
+ assert get_filename("http://www.foo.com/PKGFOO-1.0.tbz2") == "PKGFOO-1.0"
+ assert get_filename("http://www.foo.com/pkgfoo_1.0.tbz2") == "pkgfoo_1.0"
+ assert get_filename("http://www.foo.com/PKGFOO_1.0.tbz2") == "PKGFOO_1.0"
+ assert get_filename("http://www.foo.com/pkg-foo-1.0_beta1.tbz2") == \
+ "pkg-foo-1.0_beta1"
+ assert get_filename("http://www.foo.com/pkg_foo-1.0lawdy.tbz2") == \
+ "pkg_foo-1.0lawdy"
+ assert get_filename("http://internap.dl.sourceforge.net/sourceforge/abeni/abeni-0.0.22.tar.gz") == "abeni-0.0.22"
+ assert get_filename("http://internap.dl.sourceforge.net/sourceforge/dummy/StupidName_0.2.tar.gz") == "StupidName_0.2"
+
+
+
+test_get_vars_docs = \
+ """
+
+ Test ``get_vars`` with all types of URI's we can come up with.
+
+ Note:
+ -----
+
+ up_pn and up_pv are upstream's package name and package version respectively
+ and not actually used in an ebuild. These are the names returned
+ from yolklib/PyPI.
+
+
+ """
+
+def test_get_vars1():
+ """
+ Absolute best-case scenario determines $P from up_pn, up_pv
+ We have a sanely named package and URI is perfect.
+
+ """
+
+ up_pn = "pkgfoo"
+ up_pv = "1.0"
+ uri = "http://www.foo.com/pkgfoo-1.0.tbz2"
+ input_test = (uri, up_pn, up_pv)
+ correct = \
+ {'pn': 'pkgfoo',
+ 'pv': '1.0',
+ 'p': 'pkgfoo-1.0',
+ 'my_pn': '',
+ 'my_pv': '',
+ 'my_p': '',
+ 'my_p_raw': '',
+ 'src_uri': 'http://www.foo.com/${P}.tbz2',
+ }
+ results = get_vars(uri, up_pn, up_pv)
+ _get_vars(input_test, correct, results)
+
+
+def test_get_vars2():
+ """
+ (up_pn == pn) but URI has wrong case
+
+ """
+ up_pn = "pkgfoo"
+ up_pv = "1.0"
+ uri = "http://www.foo.com/PkgFoo-1.0.tbz2"
+ input_test = (uri, up_pn, up_pv)
+ correct = \
+ {'pn': 'pkgfoo',
+ 'pv': '1.0',
+ 'p': 'pkgfoo-1.0',
+ 'my_pn': 'PkgFoo',
+ 'my_pv': '',
+ 'my_p': '${MY_PN}-${PV}',
+ 'my_p_raw': 'PkgFoo-1.0',
+ 'src_uri': 'http://www.foo.com/${MY_P}.tbz2',
+ }
+ results = get_vars(uri, up_pn, up_pv)
+ _get_vars(input_test, correct, results)
+
+def test_get_vars3():
+ """
+ (up_pn != pn) URI has correct case
+
+ """
+ up_pn = "PKGFoo"
+ up_pv = "1.0"
+ uri = "http://www.foo.com/pkgfoo-1.0.tbz2"
+ input_test = (uri, up_pn, up_pv)
+ correct = \
+ {'pn': 'pkgfoo',
+ 'pv': '1.0',
+ 'p': 'pkgfoo-1.0',
+ 'my_pn': '',
+ 'my_pv': '',
+ 'my_p': '',
+ 'my_p_raw': '',
+ 'src_uri': 'http://www.foo.com/${P}.tbz2',
+ }
+ results = get_vars(uri, up_pn, up_pv)
+ _get_vars(input_test, correct, results)
+
+
+def test_get_vars4():
+ """
+
+ up_pn is not lower case but matches uri pn
+
+ """
+
+ pn = "pkgfoo"
+ up_pn = "PKGFoo"
+ up_pv = "1.0"
+ uri = "http://www.foo.com/PKGfoo-1.0.tbz2"
+ input_test = (uri, up_pn, up_pv, pn)
+ correct =\
+ {'pn': 'pkgfoo',
+ 'pv': '1.0',
+ 'p': 'pkgfoo-1.0',
+ 'my_pn': 'PKGfoo',
+ 'my_pv': '',
+ 'my_p': '${MY_PN}-${PV}',
+ 'my_p_raw': 'PKGfoo-1.0',
+ 'src_uri': 'http://www.foo.com/${MY_P}.tbz2',
+ }
+ results = get_vars(uri, up_pn, up_pv, pn)
+ _get_vars(input_test, correct, results)
+
+
+def test_get_vars5():
+ """
+ up_pn is not lower case and doesn't match uri case
+ """
+ pn = "pkgfoo"
+ up_pn = "PKGFoo"
+ up_pv = "1.0"
+ uri = "http://www.foo.com/pkgFOO-1.0.tbz2"
+ input_test = (uri, up_pn, up_pv, pn)
+ correct =\
+ {'pn': 'pkgfoo',
+ 'pv': '1.0',
+ 'p': 'pkgfoo-1.0',
+ 'my_pn': 'pkgFOO',
+ 'my_pv': '',
+ 'my_p': '${MY_PN}-${PV}',
+ 'my_p_raw': 'pkgFOO-1.0',
+ 'src_uri': 'http://www.foo.com/${MY_P}.tbz2',
+ }
+ results = get_vars(uri, up_pn, up_pv, pn)
+ _get_vars(input_test, correct, results)
+
+def test_get_vars6():
+ """
+ pn has uppercase
+ """
+ pn = "pkgfoo"
+ up_pn = "PkgFoo"
+ up_pv = "1.0"
+ pv = "1.0"
+ my_pn = up_pn
+ my_pv = ""
+ uri = "http://www.foo.com/PkgFoo-1.0.tbz2"
+ input_test = (uri, up_pn, up_pv, pn, pv, my_pn)
+ correct =\
+ {'pn': 'pkgfoo',
+ 'pv': '1.0',
+ 'p': 'pkgfoo-1.0',
+ 'my_pn': 'PkgFoo',
+ 'my_pv': '',
+ 'my_p': '${MY_PN}-${PV}',
+ 'my_p_raw': 'PkgFoo-1.0',
+ 'src_uri': 'http://www.foo.com/${MY_P}.tbz2',
+ }
+ results = get_vars(uri, up_pn, up_pv, pn, pv, my_pn, my_pv)
+ _get_vars(input_test, correct, results)
+
+def test_get_vars7():
+ """
+ up_pn has uppercase, no PN given
+ """
+ up_pn = "PkgFoo"
+ up_pv = "1.0"
+ pn = ""
+ pv = "1.0"
+ my_pv = ""
+ my_pn = "PkgFoo"
+ uri = "http://www.foo.com/PkgFoo-1.0.tbz2"
+ input_test = (uri, up_pn, up_pv, pn, pv, my_pn, my_pv)
+ correct =\
+ {'pn': 'pkgfoo',
+ 'pv': '1.0',
+ 'p': 'pkgfoo-1.0',
+ 'my_pn': 'PkgFoo',
+ 'my_pv': '',
+ 'my_p': '${MY_PN}-${PV}',
+ 'my_p_raw': 'PkgFoo-1.0',
+ 'src_uri': 'http://www.foo.com/${MY_P}.tbz2',
+ }
+ results = get_vars(uri, up_pn, up_pv, pn, pv, my_pn, my_pv)
+ _get_vars(input_test, correct, results)
+
+def test_get_vars8():
+ """
+ Bad suffix on PV that can be removed
+ """
+
+ up_pn = "pkgfoo"
+ up_pv = "1.0dev"
+ uri = "http://www.foo.com/pkgfoo-1.0dev.tbz2"
+ input_test = (uri, up_pn, up_pv)
+ correct = \
+ {'pn': 'pkgfoo',
+ 'pv': '1.0',
+ 'p': 'pkgfoo-1.0',
+ 'my_pn': '',
+ 'my_pv': '${PV}dev',
+ 'my_p': '${PN}-${MY_PV}',
+ 'my_p_raw': 'pkgfoo-1.0dev',
+ 'src_uri': 'http://www.foo.com/${MY_P}.tbz2',
+ }
+ results = get_vars(uri, up_pn, up_pv)
+ _get_vars(input_test, correct, results)
+
+
+def test_get_vars9():
+ """
+ An existing -r123 suffix on upstream version
+ is changed to _pre123
+ """
+ up_pn = "pkgfoo"
+ up_pv = "1.0-r123"
+ uri = "http://www.foo.com/pkgfoo-1.0-r123.tbz2"
+ input_test = (uri, up_pn, up_pv)
+ correct = \
+ {'pn': 'pkgfoo',
+ 'pv': '1.0_pre123',
+ 'p': 'pkgfoo-1.0_pre123',
+ 'my_pn': '',
+ 'my_pv': '${PV/_pre/-r}',
+ 'my_p': '${PN}-${MY_PV}',
+ 'my_p_raw': 'pkgfoo-1.0-r123',
+ 'src_uri': 'http://www.foo.com/${MY_P}.tbz2',
+ }
+ results = get_vars(uri, up_pn, up_pv)
+ _get_vars(input_test, correct, results)
+
+
+def test_get_vars10():
+ """
+ -r1234 suffix on PV that can be removed
+ """
+ up_pn = "pkgfoo"
+ up_pv = "1.0.dev-r1234"
+ uri = "http://www.foo.com/pkgfoo-1.0.dev-r1234.tbz2"
+ input_test = (uri, up_pn, up_pv)
+ correct = \
+ {'pn': 'pkgfoo',
+ 'pv': '1.0_pre1234',
+ 'p': 'pkgfoo-1.0_pre1234',
+ 'my_pn': '',
+ 'my_pv': '${PV/_pre/.dev-r}',
+ 'my_p': '${PN}-${MY_PV}',
+ 'my_p_raw': 'pkgfoo-1.0.dev-r1234',
+ 'src_uri': 'http://www.foo.com/${MY_P}.tbz2',
+ }
+ results = get_vars(uri, up_pn, up_pv)
+ _get_vars(input_test, correct, results)
+
+
+def test_get_vars11():
+ """
+ -r1234 suffix on PV that can be removed
+ """
+ up_pn = "pkgfoo"
+ up_pv = "1.0dev-r1234"
+ uri = "http://www.foo.com/pkgfoo-1.0dev-r1234.tbz2"
+ input_test = (uri, up_pn, up_pv)
+ correct = \
+ {'pn': 'pkgfoo',
+ 'pv': '1.0_pre1234',
+ 'p': 'pkgfoo-1.0_pre1234',
+ 'my_pn': '',
+ 'my_pv': '${PV/_pre/dev-r}',
+ 'my_p': '${PN}-${MY_PV}',
+ 'my_p_raw': 'pkgfoo-1.0dev-r1234',
+ 'src_uri': 'http://www.foo.com/${MY_P}.tbz2',
+ }
+ results = get_vars(uri, up_pn, up_pv)
+ _get_vars(input_test, correct, results)
+
+
+def test_get_vars12():
+ """
+ a4 suffix -> _alpha4
+ """
+ up_pn = "pkgfoo"
+ up_pv = "1.0a4"
+ uri = "http://www.foo.com/pkgfoo-1.0a4.tbz2"
+ input_test = (uri, up_pn, up_pv)
+ correct = \
+ {'pn': 'pkgfoo',
+ 'pv': '1.0_alpha4',
+ 'p': 'pkgfoo-1.0_alpha4',
+ 'my_pn': '',
+ 'my_pv': '${PV/_alpha/a}',
+ 'my_p': '${PN}-${MY_PV}',
+ 'my_p_raw': 'pkgfoo-1.0a4',
+ 'src_uri': 'http://www.foo.com/${MY_P}.tbz2',
+ }
+ results = get_vars(uri, up_pn, up_pv)
+ _get_vars(input_test, correct, results)
+
+
+def test_get_vars13():
+ """
+ b1 suffix -> _beta1
+ """
+ up_pn = "pkgfoo"
+ up_pv = "1.0b1"
+ uri = "http://www.foo.com/pkgfoo-1.0b1.tbz2"
+ input_test = (uri, up_pn, up_pv)
+ correct = \
+ {'pn': 'pkgfoo',
+ 'pv': '1.0_beta1',
+ 'p': 'pkgfoo-1.0_beta1',
+ 'my_pn': '',
+ 'my_pv': '${PV/_beta/b}',
+ 'my_p': '${PN}-${MY_PV}',
+ 'my_p_raw': 'pkgfoo-1.0b1',
+ 'src_uri': 'http://www.foo.com/${MY_P}.tbz2',
+ }
+ results = get_vars(uri, up_pn, up_pv)
+ _get_vars(input_test, correct, results)
+
+
+def test_get_vars14():
+ """
+ -b1 suffix -> _beta1
+ """
+ up_pn = "pkgfoo"
+ up_pv = "1.0-b1"
+ uri = "http://www.foo.com/pkgfoo-1.0-b1.tbz2"
+ input_test = (uri, up_pn, up_pv)
+ correct = \
+ {'pn': 'pkgfoo',
+ 'pv': '1.0_beta1',
+ 'p': 'pkgfoo-1.0_beta1',
+ 'my_pn': '',
+ 'my_pv': '${PV/_beta/-b}',
+ 'my_p': '${PN}-${MY_PV}',
+ 'my_p_raw': 'pkgfoo-1.0-b1',
+ 'src_uri': 'http://www.foo.com/${MY_P}.tbz2',
+ }
+ results = get_vars(uri, up_pn, up_pv)
+ _get_vars(input_test, correct, results)
+
+def test_get_vars15():
+ """
+ -a4 suffix -> _alpha4
+ """
+ up_pn = "pkgfoo"
+ up_pv = "1.0-a4"
+ uri = "http://www.foo.com/pkgfoo-1.0-a4.tbz2"
+ input_test = (uri, up_pn, up_pv)
+ correct = \
+ {'pn': 'pkgfoo',
+ 'pv': '1.0_alpha4',
+ 'p': 'pkgfoo-1.0_alpha4',
+ 'my_pn': '',
+ 'my_pv': '${PV/_alpha/-a}',
+ 'my_p': '${PN}-${MY_PV}',
+ 'my_p_raw': 'pkgfoo-1.0-a4',
+ 'src_uri': 'http://www.foo.com/${MY_P}.tbz2',
+ }
+ results = get_vars(uri, up_pn, up_pv)
+ _get_vars(input_test, correct, results)
+
+def test_get_vars16():
+ """
+ -rc3 suffix -> _rc3
+ """
+ up_pn = "pkgfoo"
+ up_pv = "1.0-rc3"
+ uri = "http://www.foo.com/pkgfoo-1.0-rc3.tbz2"
+ input_test = (uri, up_pn, up_pv)
+ correct = \
+ {'pn': 'pkgfoo',
+ 'pv': '1.0_rc3',
+ 'p': 'pkgfoo-1.0_rc3',
+ 'my_pn': '',
+ 'my_pv': '${PV/_rc/-rc}',
+ 'my_p': '${PN}-${MY_PV}',
+ 'my_p_raw': 'pkgfoo-1.0-rc3',
+ 'src_uri': 'http://www.foo.com/${MY_P}.tbz2',
+ }
+ results = get_vars(uri, up_pn, up_pv)
+ _get_vars(input_test, correct, results)
+
+def test_get_vars17():
+ """
+ rc3 suffix -> _rc3
+ """
+ up_pn = "pkgfoo"
+ up_pv = "1.0rc3"
+ uri = "http://www.foo.com/pkgfoo-1.0rc3.tbz2"
+ input_test = (uri, up_pn, up_pv)
+ correct = \
+ {'pn': 'pkgfoo',
+ 'pv': '1.0_rc3',
+ 'p': 'pkgfoo-1.0_rc3',
+ 'my_pn': '',
+ 'my_pv': '${PV/_rc/rc}',
+ 'my_p': '${PN}-${MY_PV}',
+ 'my_p_raw': 'pkgfoo-1.0rc3',
+ 'src_uri': 'http://www.foo.com/${MY_P}.tbz2',
+ }
+ results = get_vars(uri, up_pn, up_pv)
+ _get_vars(input_test, correct, results)
+
+def test_get_vars18():
+ """
+ .rc3 suffix -> _rc3
+ """
+ up_pn = "pkgfoo"
+ up_pv = "1.0.rc3"
+ uri = "http://www.foo.com/pkgfoo-1.0.rc3.tbz2"
+ input_test = (uri, up_pn, up_pv)
+ correct = \
+ {'pn': 'pkgfoo',
+ 'pv': '1.0_rc3',
+ 'p': 'pkgfoo-1.0_rc3',
+ 'my_pn': '',
+ 'my_pv': '${PV/_rc/.rc}',
+ 'my_p': '${PN}-${MY_PV}',
+ 'my_p_raw': 'pkgfoo-1.0.rc3',
+ 'src_uri': 'http://www.foo.com/${MY_P}.tbz2',
+ }
+ results = get_vars(uri, up_pn, up_pv)
+ _get_vars(input_test, correct, results)
+
+def test_get_vars19():
+ """
+ uppercase package name
+ .rc3 suffix -> _rc3
+ """
+ up_pn = "PkgFoo"
+ up_pv = "1.0.rc3"
+ uri = "http://www.foo.com/PkgFoo-1.0.rc3.tbz2"
+ input_test = (uri, up_pn, up_pv)
+ correct = \
+ {'pn': 'pkgfoo',
+ 'pv': '1.0_rc3',
+ 'p': 'pkgfoo-1.0_rc3',
+ 'my_pn': 'PkgFoo',
+ 'my_pv': '${PV/_rc/.rc}',
+ 'my_p': '${MY_PN}-${MY_PV}',
+ 'my_p_raw': 'PkgFoo-1.0.rc3',
+ 'src_uri': 'http://www.foo.com/${MY_P}.tbz2',
+ }
+ results = get_vars(uri, up_pn, up_pv)
+ _get_vars(input_test, correct, results)
+
+def test_get_vars20():
+ """
+ -c3 suffix -> _rc3
+ """
+ up_pn = "PkgFoo"
+ up_pv = "1.0-c3"
+ uri = "http://www.foo.com/PkgFoo-1.0-c3.tbz2"
+ input_test = (uri, up_pn, up_pv)
+ correct = \
+ {'pn': 'pkgfoo',
+ 'pv': '1.0_rc3',
+ 'p': 'pkgfoo-1.0_rc3',
+ 'my_pn': 'PkgFoo',
+ 'my_pv': '${PV/_rc/-c}',
+ 'my_p': '${MY_PN}-${MY_PV}',
+ 'my_p_raw': 'PkgFoo-1.0-c3',
+ 'src_uri': 'http://www.foo.com/${MY_P}.tbz2',
+ }
+ results = get_vars(uri, up_pn, up_pv)
+ _get_vars(input_test, correct, results)
+
+def test_get_vars21():
+ """
+ uppercase package name
+ .c3 suffix -> _rc3
+ """
+ up_pn = "PkgFoo"
+ up_pv = "1.0.c3"
+ uri = "http://www.foo.com/PkgFoo-1.0.c3.tbz2"
+ input_test = (uri, up_pn, up_pv)
+ correct = \
+ {'pn': 'pkgfoo',
+ 'pv': '1.0_rc3',
+ 'p': 'pkgfoo-1.0_rc3',
+ 'my_pn': 'PkgFoo',
+ 'my_pv': '${PV/_rc/.c}',
+ 'my_p': '${MY_PN}-${MY_PV}',
+ 'my_p_raw': 'PkgFoo-1.0.c3',
+ 'src_uri': 'http://www.foo.com/${MY_P}.tbz2',
+ }
+ results = get_vars(uri, up_pn, up_pv)
+ _get_vars(input_test, correct, results)
+
+def test_get_vars22():
+ """
+ uppercase package name
+ c3 suffix -> _rc3
+ """
+ up_pn = "PkgFoo"
+ up_pv = "1.0c3"
+ uri = "http://www.foo.com/PkgFoo-1.0c3.tbz2"
+ input_test = (uri, up_pn, up_pv)
+ correct = \
+ {'pn': 'pkgfoo',
+ 'pv': '1.0_rc3',
+ 'p': 'pkgfoo-1.0_rc3',
+ 'my_pn': 'PkgFoo',
+ 'my_pv': '${PV/_rc/c}',
+ 'my_p': '${MY_PN}-${MY_PV}',
+ 'my_p_raw': 'PkgFoo-1.0c3',
+ 'src_uri': 'http://www.foo.com/${MY_P}.tbz2',
+ }
+ results = get_vars(uri, up_pn, up_pv)
+ _get_vars(input_test, correct, results)
+
+
+def _get_vars(input_test, correct, results):
+ try:
+ assert results == correct
+ except AssertionError:
+ print "=" * 79
+ #Docstring from the calling function:
+ print sys._getframe(1).f_code.co_name + ".__doc__"
+ print sys._getframe(1).f_code.co_name.__doc__
+ print "=" * 79
+ print
+ print input_test
+ print "<var> -> <result> -> <correct>"
+ print
+ for key in results.keys():
+ if results[key] != correct[key]:
+ print "*",
+ print key, "->", results[key], "->", correct[key]
+
+ raise AssertionError
--- /dev/null
+
+import os
+
+from g_pypi.portage_utils import find_files, find_egg_info_dir
+
+
+def test_find_files():
+ print [egg for egg in find_files("passwd", "/etc")]
+
+def test_find_egg_info_dir():
+ udir = "/var/tmp/portage/dev-python/mako-0.1.7/work/Mako-0.1.7"
+ assert find_egg_info_dir(udir) == os.path.join(udir, "lib")