Commits

Andrew Macgregor committed e47204f Merge

Merge in develop

Comments (0)

Files changed (16)

 .coverage
 vptest/*
 vpdocs/*
+vp/*
+htmlcov/*
+docs/_build/*
+*.pyc
 
 syntax: regexp
 
 
 Patches and Suggestions
 ```````````````````````
-  - others
+  - Michael Heyvaert
+  - Josh Hansen
+  
 vtest: vptest
 	vptest/bin/nosetests -v ./tests/*.py
 
-coverage:
+coverage: vptest
 	vptest/bin/nosetests --with-coverage --cover-erase --cover-package=parcel --cover-html --cover-branches
+	vptest/bin/coverage html
 
 docs: $(DOCFILES) vpdocs
 	. vpdocs/bin/activate; cd docs; make html
 vpdocs:
 	virtualenv vpdocs
 	vpdocs/bin/pip install sphinx==1.1.3
+
+clean:
+	rm -rf vptest vpdocs
+
 .. autoclass:: Deployment
    :inherited-members:
 
+.. module:: parcel.revisions
 
-.. module:: parcel.distro
+Hg
+---
+
+Access your Mercurial revision control information with the :class:`Hg <Hg>` objects.
+
+.. autoclass:: Hg
+   :inherited-members:
+
+Git
+----
+
+Access your Git revision control information with the :class:`Git <Git>` objects.
+
+.. autoclass:: Git
+   :inherited-members:
+
 
 Distro
 ------
 
+.. module:: parcel.distro
+
 Code specific to different distributions can be found in the :class:`Distro <Distro>` object.
 
 .. autoclass:: Distro
 Feature Support
 ---------------
 
-Parcel is under heavy development and initially supports: 
+Although Parcel is intended to be fairly minimal, it presently supports: 
 
 - Debian targets
 - CentOS targets
 - uWSGI deployment
-
+- Git and Mercurial source trees
 
 
 .. include:: contents.rst.inc

parcel/deploy/deploy.py

     Parcel will probably use this an instance of this class.
     """
 
-    virtual = "vp"
+    default_virtual = "vp"
     build_dir = '.parcel'
     
     # these are the full text versions of the scripts
     preinst_lines = []
     postinst_lines = []
     
-    def __init__(self, app_name, build_deps=[], run_deps=[], path=".", base=None, arch=distro.Debian(), version=None):
+    def __init__(self, app_name, build_deps=None, run_deps=None, path=".", 
+		 base=None, arch=distro.Debian(), version=None, venv_dirname=default_virtual):
+        """Initialise the Deploy object. 
+        WARNING: This is not your usual contructor. Constructing this object makes an immediate fabric connection to the
+        remote host to fetch information and update. Be aware of this. This is probably bad.
+        """
 
         #: The architecture of the build host. This should be a :class:`Distro <Distro>` object. 
         self.arch = arch
         self.app_name = app_name
 
         #: A list of packages that must be installed to run the resulting package.
-        self.run_deps = run_deps
+        self.run_deps = run_deps or []
 
         #: A list of packages that need to be installed to build the software.
-        self.build_deps = build_deps
+        self.build_deps = build_deps or []
         self.pkg_name = app_name.lower()
 
         #: The directory that will be used as the base level directory.
         self.path = os.path.realpath(path)
 
         #: Location of files during build on build host. Default is user's home directory.
-        #: If path is relative, it's relative to the remote user's home directory. If the path is absolute,
-        #: it's used as is.
+        #: If path is relative, it's relative to the remote user's home directory. 
+        #: If the path is absolute, it's used as is.
         self.base_path = os.path.join(remotehome,self.build_dir)
 
         self.pkg_name = app_name.lower()
         # the build path
         self.build_path = os.path.join(self.root_path, self.app_path[1:])                # cut the first / off app_path
 
-        print "ROOT_PATH", self.root_path
-        print "BASE_PATH",self.base_path
-        print "APP PATH",self.app_path
-        print "BUILD PATH",self.build_path
+        # the name of the virtual environment dir
+        self.virtual = venv_dirname
 
         self._clean()
         
     def prepare_app(self, branch=None, requirements="requirements.txt"):
         """Creates the necessary directories on the build server, checks out the desired branch (None means current),
         creates a virtualenv and populates it with dependencies from requirements.txt. 
-        As a bonus it also fixes the shebangs (#!) of all scripts in the virtualenv to point the correct Python path
-        on the target system."""
+
+        :param requirements: The name of the requirements.txt file relative to the path setting used in the constructor.
+        """
         self._sync_app()
         self._add_venv(requirements)
             
 
     def add_prerm(self, lines):
         """Add lines to the prerm file"""
-        self.prerm_lines.extend(lines)
+        self.prerm_lines = self.prerm_lines + lines
         
     def add_postrm(self, lines):
         """Add lines to the postrm file"""        
-        self.postrm_lines.extend(lines)
+        self.postrm_lines = self.postrm_lines + lines
         
     def add_preinst(self, lines):
         """Add lines to the preinst file"""        
-        self.preinst_lines.extend(lines)
+        self.preinst_lines = self.preinst_lines + lines
         
     def add_postinst(self, lines):
         """Add lines to the postinst file"""
-        self.postinst_lines.extend(lines)
+        self.postinst_lines = self.postinst_lines + lines
 
     def build_package(self, templates=True):
         """Takes the whole app including the virtualenv, packages it using fpm and downloads it to the local host.
     def _add_venv(self,requirements="requirements.txt"):
         """Builds virtualenv on remote host and installs from requirements.txt.
         
-        :param requirements: The name of the requirements.txt file.
+        :param requirements: The name of the requirements.txt file relative to the path setting used in the constructor.
         """
         self.venv_path = os.path.join(self.build_path, self.virtual)
         run('virtualenv %s'%(self.venv_path))
-        if requirements and os.path.exists(requirements):
+        if requirements and os.path.exists(os.path.join(self.path, requirements)):
             run('PIP_DOWNLOAD_CACHE="%s" %s install -r %s'%(
                 self.arch.pip_download_cache,
 	            os.path.join(self.venv_path, 'bin/pip'),
 import os.path
 
-from fabric.api import settings, run, cd, lcd, put, get, local, env, with_settings
+from fabric.api import settings, run, cd, lcd, put, get, local, env, sudo, with_settings
 from fabric.contrib.files import append
 from fabric.colors import green
 
 #
 
 class Distro(object):
+    """The base class for Distro classes. If use_sudo is true, then super user commands
+    will be run using fabric's sudo call. If sudo is false, super user access is gained by
+    getting fabric to connect as root user."""
+
     space = '.parcel-build-temp'
     pip_download_cache = '/tmp/pip-download-cache/'
 
-    def __init__(self):
-        pass
+    def __init__(self, use_sudo=False):
+        """Construct a Distro instance. If use_sudo is true, then super user commands
+        will be run using fabric's sudo call. If sudo is false, super user access is gained by
+        getting fabric to connect as root user."""
+        self.use_sudo = use_sudo
 
     def mkdir(self, remote):
+        """Make a directory on the remote"""
         return run('mkdir -p "%s" && cd "%s" && pwd'%(remote,remote))
 
+    def su(self, *args, **kwargs):
+        """Method to perform a remote task as a super user. Takes same arguments as fabric.api.run        or fabric.api.sudo. Can be overridden to provide your own super user execution hook."""
+        if self.use_sudo:
+            return sudo(*args, **kwargs)
+
+        with settings(user='root'):
+            return run(*args, **kwargs)
+
     def update_packages(self):
         """This method should update the packages on the remote box.
         """
         raise NotImplementedError
 	
     def push_files(self,pathlist,dst):
+        """Push all the files in pathlist into dst directory on remote."""
         for path in pathlist:
             put(path, os.path.join(dst,os.path.basename(path)))
     	
 
 
 class Debian(Distro):
-
-    def __init__(self):
+    def __init__(self, *args, **kwargs):
         self.defaults = debian_defaults
+        Distro.__init__(self,*args,**kwargs)
 
     def update_packages(self):
-        with settings(user='root'):
-            run("apt-get update -qq")
+        self.su("apt-get update -qq")
 
     def build_deps(self, deps):
-        with settings(user='root'):
-            run("apt-get install -qq %s"%(' '.join(deps)))
+        self.su("apt-get install -qq %s"%(' '.join(deps)))
 
     def version(self,package):
         """Look at the debian apt package system for a package with this name and return its version.
         """this method sets up a remote debian box for parcel package building.
         Installs fpm, easyinstall and some libraries.
         """
-        with settings(user='root'):
-            self.build_deps(['libyaml-ruby','libzlib-ruby','ruby','ruby-dev','checkinstall'])
+        self.build_deps(['libyaml-ruby','libzlib-ruby','ruby','ruby-dev','checkinstall'])
+            
+        base_dir, src_dir, build_dir = self._setup()
 
-            base_dir, src_dir, build_dir = self._setup()
-            
-            # get rubygems and copy it across
-            path = cache.get("http://production.cf.rubygems.org/rubygems/rubygems-1.8.24.tgz")
-            self.push_files([path],src_dir)
-            filename = os.path.basename(path)
-            
-            with cd(build_dir):
-                run("tar xvfz ../src/%s"%filename)
-                with cd("rubygems-1.8.24"):
-                    run("ruby setup.rb")
-            run("gem1.8 install fpm")
+        # get rubygems and copy it across
+        path = cache.get("http://production.cf.rubygems.org/rubygems/rubygems-1.8.24.tgz")
+        self.push_files([path],src_dir)
+        filename = os.path.basename(path)
+
+        with cd(build_dir):
+            run("tar xvfz ../src/%s"%filename)
+            with cd("rubygems-1.8.24"):
+                run("ruby setup.rb")
+        run("gem1.8 install fpm")
 
     def install_package(self, pkg):
         """Installs package on the host using apt-get install bypassing
         """this method sets up a remote ubuntu box for parcel package building.
         Installs fpm and also rubygems if not present.
         """
-        with settings(user='root'):
-            run("apt-get install rubygems -y")
-            run("gem install fpm")
+        self.build_deps(['rubygems', 'python-virtualenv', 'python-dev'])
+        self.su("gem install fpm")
 
 
 class Centos(Distro):
 
-    def __init__(self):
+    def __init__(self, *args, **kwargs):
         self.defaults = centos_defaults
+        Distro.__init__(self, *args, **kwargs)
 
     def update_packages(self):
-        with settings(user='root'):
-            run("yum update -y")
+        self.su("yum update -y")
 
     def build_deps(self, deps):
-        with settings(user='root'):
-            run("yum install -y %s"%(' '.join(deps)))
+        self.su("yum install -y %s"%(' '.join(deps)))
 
     def version(self,package):
         """Look at the debian apt package system for a package with this name and return its version.
         """this method sets up a remote centos box for parcel package building.
         Installs fpm and also rubygems if not present.
         """
-        with settings(user='root'):
-            run("yum install rubygems -y")
-            run("gem install fpm")
-            run("yum install rpm-build -y")
-            run("yum install rsync -y")            
+        self.su("yum install rubygems -y")
+        self.su("gem install fpm")
+        self.su("yum install rpm-build -y")
+        self.su("yum install rsync -y")            
 
     def build_package(self, deployment=None):
         """

parcel/revisions.py

 from fabric.api import settings, run, cd, lcd, put, get, local, env, with_settings
 from fabric.contrib.files import sed
 
+
 class Hg(object):
+    """An interface to Mercurial source repositories."""
     def __init__(self,path):
+        """Initialise the Hg object to a repository on disk."""
+        #: The base path of the Mercurial repository.
         self.path = path
 
-    @property
     def branch(self):
+        """A property that is the present checked out branch"""
         with lcd(self.path):
             return local('hg branch',capture=True).strip()
 
-    @property
     def log(self):
+        """A property that is the latest log entry. Returns a dictionary with the following keys:
+        changeset: The hash number of the lastest changeset.
+        date: The date and time of the latest changeset.
+        user: Who committed the change
+        summary: The commit message
+        tag: if this commit is tagged, this is the tag."""
         with lcd(self.path):
             return dict([
                 (a.strip(),b.strip()) 
                 for a,b in [
-                    line.split(':',1) for line in local('hg log | head -4', capture=True).splitlines()
+                    line.split(':',1) for line in local('hg log | head -6', capture=True).splitlines()
                 ]
             ])
 		
-    @property
     def logs(self):
+        """Returns all the log entries as a list of dictionaries. Each dictionary 
+        is of the format returned by log."""
         with lcd(self.path):
             logs = local('hg log', capture=True).split("\n\n")
             return [
                 ]) for chunk in logs
             ]
 	
-    @property
     def pull(self):
+        """Issue a hg pull on the repository"""
         with lcd(self.path):
             return local('hg pull',capture=True).splitlines()
     
-    @property
     def update(self):
+        """Issue a hg update on the repository"""
         with lcd(self.path):
             return dict([
                 (cat,int(num))
                 ]
             ])
 
+    def describe(self, template='{latesttag}-{latesttagdistance}-{node|short}'):
+        """Create a vesrion tag composed of the latest tag, the tag distance,
+        and the short hash. For example:
+
+        0.5.1-23-d63d252639de
+
+        composed of the tag 0.5.1, from which we are 23 commits forwards of, with
+        a latest changeset of hash d63d252639de
+        """
+        with lcd(self.path):
+            return local('hg log -r . --template %r'%template, capture=True).strip()
+
+    def clone(self, repo):
+        """hg clone a repo or path to the present repo location.
+        The Hg path and object this is called on should be clean. In other words
+        you should call clone() immedately after construction of the Hg object and
+        make sure that the Hg object is constructed on an empty path.
+        
+        eg.
+
+        hg = Hg("build/clone")
+        hg.clone("hg+ssh://bitbucet.org/project")
+        """
+        with lcd(self.path):
+            return local('hg clone "{0}" .'.format(repo), capture=True).strip()
+
+
+class GitException(Exception):
+    """This exception is raised when:
+    - force_tag is set to true during a describe, when the repository is not on a tag.
+    """
+    pass
+
+
 class Git(object):
-    def __init(self,path):
+    """An abstraction of Git Repositories."""
+
+    def __init__(self,path):
+        """Initialise a Git object based apon this path."""
         self.path = path
 
+    def branch(self):
+        """Return which branch the repository is checked out on."""
+        with lcd(self.path):
+            return local('git branch',capture=True).strip()[2:]
+
+    def checkout(self, target):
+        """Use git checkout to bring the repository to a particular point"""
+        with lcd(self.path):
+            return local('git checkout '+target, capture=True)
+
+    def log(self):
+        """Return all the git logs in a list"""
+        with lcd(self.path):
+            git_log = local('git log', capture=True)
+            return [{
+                    'changeset': changeset[0],
+                    'date': changeset[2].strip("Date:").strip(),
+                    'author': changeset[1].strip("Author: "),
+                    'summary': changeset[4].strip()
+                    } for changeset in (x.split("\n") for x in ("\n" + git_log).split('\ncommit ')[1:])]
+
+    def pull(self):
+        """Execute a git pull in the repository"""
+        with lcd(self.path):
+            return local('git pull', capture=True)
+        
+    def describe(self, force_tag=False):
+        """Use git describe to create a version tag describing the repository at this point."""
+        with lcd(self.path):
+            if not force_tag:
+                return local('git describe', capture=True)
+
+            # force_tag = True
+            with settings(warn_only=True):
+                version = local('git describe --exact-match --tags HEAD', capture=True) # this fails if the present working tree is not exactly on a tag
+
+                if version.return_code == 0:
+                    return version
+
+                raise GitException("Repository is not checked-out exactly on a tag")
+
+    def clone(self, repo):
+        """git clone a repo or path to the present repo location.
+        The Git path and object this is called on should be clean. In other words
+        you should call clone() immedately after construction of the Git object and
+        make sure that the Git object is constructed on an empty path.
+        
+        eg.
+
+        git = Git("build/clone")
+        git.clone("git://github.org/project")
+        """
+        with lcd(self.path):
+            return local('git clone "{0}" .'.format(repo), capture=True).strip()
+
+
 def repo(path):
     content = os.listdir(path)
     if '.hg' in content:
     command = []
     command.append('rsync')
     command.append('-av')
+
+    custom_port = int(env.port) if env.port is not None and env.port != '22' else None
+    if env.key_filename is not None or custom_port is not None or env.disable_known_hosts:
+        ssh_arguments = []
+
+        # use specified port if it is a non-standard port
+        if custom_port is not None:
+            ssh_arguments.append("-p {0.port}".format(env))
+
+        if env.disable_known_hosts:
+            ssh_arguments.append("-o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no")
+
+        # use specified identity file to connect
+        keyfiles = []
+        if type(env.key_filename) in (str, unicode): 
+            keyfiles = [env.key_filename]
+        elif env.key_filename:
+            assert type(env.key_filename) in (list, tuple), "malformed env.key_filename: %r"%(env.key_filename)
+            keyfiles = env.key_filename
+        if keyfiles:
+            ssh_arguments.append("-l {0}".format(env.user))
+            ssh_arguments += ['-i "{0}"'.format(keyfile) for keyfile in keyfiles]
+
+        ssh_command = "'ssh {0}'".format(" ".join(ssh_arguments))
+        command.extend(['-e',ssh_command])
+
     command.extend("'%s'"%s for s in sources)
     command.append("'%s@%s:%s'"%(env.user,env.host,dest))
                 
         if os.path.isfile(rsync_ignore):
             command.append('--exclude-from=%s'%rsync_ignore)
     
-    
     if not color_files:   
         return local(" ".join(command))
         

test-requirements.txt

 nose
 unittest2
 coverage==3.5.3
+
+-e git://github.com/kennethreitz/envoy.git#egg=envoy

tests/repomanager.py

+"""A context manager to use with the test code for parcel.revisions"""
+
+import os
+import tempfile
+import uuid
+import envoy
+import random
+
+def random_data(size):
+    return "".join(
+        [ random.choice(r"\n\r abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789!@#$%^&*()_-+=[]{}\|`~,.<>/?;:'") 
+          for x in xrange(size) ])
+
+DEFAULT_REVISIONS = [
+            ('initial check-in', {
+                "test.dat": random_data(1024),
+                "test_dir/another_file.txt": "This is a test text file\nWith a few lines\nand no newline at the end..." }),
+            ('second commit', {
+                "new file with space.exe": random_data(8096),
+                "test_dir/another_file.txt": "New first line!\nThis is a test text file\nWith a few lines\nand a newline at the end!\n" })]
+
+class RepoTestManager(object):
+    """Mocked up repository for testing as a context manager"""
+    def __init__(self, path=None, bin=None, revisions=None, clone=None):
+        self.path = path or self.make_path()
+        self.bin = bin
+        self.revisions = revisions if revisions is not None else DEFAULT_REVISIONS
+        self.clone = clone
+
+    def __enter__(self):
+        """Initialise the path as git repository and
+        put the dummy hierarcy and changes in there"""
+        assert not os.path.exists(self.path)
+        
+        # create and initialise the repository
+        os.makedirs(self.path)
+
+        # if we are a clone, clone, else git init the empty dir
+        if self.clone:
+            r = self.run('{0.bin} clone "{0.clone}" .'.format(self))
+        else:
+            r = self.run('{0.bin} init'.format(self))
+        assert r.status_code==0, "could not {0.bin} init {0.path}: {1.std_out} {1.std_err}".format(self,r)
+
+        # commit the changesets
+        for comment, files in self.revisions:
+            self.commit_changes(comment, files)
+
+        return self
+
+    def __exit__(self, *args, **kwargs):
+        r = self.run('rm -rf "{0.path}"'.format(self))
+        assert r.status_code==0, "could not remove {0.bin} directory {0.path}: {1.std_out} {1.std_err}".format(self, r)
+
+    def run(self, command, cwd=None):
+        return envoy.run(command, cwd=cwd or self.path)
+
+    def make_path(self):
+        """if a path is not passed in, this method makes a temporary path (but doesn't create it)"""
+        return os.path.join("/",tempfile.gettempprefix(),str(uuid.uuid4()))
+
+    def write_file(self, relpath, contents):
+        """Create a file name relpath with contents in the repo"""
+        filepath = os.path.join(self.path, relpath)
+        dirname = os.path.dirname(filepath)
+
+        # make directory if it doesnt exist
+        if not os.path.exists(dirname):
+            os.makedirs(dirname)
+
+        # write the file
+        with open(filepath, 'wb') as fh:
+            fh.write(contents)
+
+    def write_files(self, files):
+        """Pass a hash of {filename:contents} to make them"""
+        for filename, contents in files.iteritems():
+            self.write_file(filename, contents)
+
+    def commit(self, message):
+        r = self.run('{0.bin} commit -m "{1}"'.format(self, message))
+        assert r.status_code==0, "could not {0.bin} commit '{1}': {2.std_out} {2.std_err}".format(self, message, r)
+
+    def commit_changes(self, message, files):
+        """Make changes and then commit with message"""
+        self.write_files(files) # write each file
+        self.add(files.keys())  # list of files to hg add
+        self.commit(message)    # commit them
+
+    def add(self,files):
+        """hg add each file in files. List of relpaths."""
+        command = '{0.bin} add '.format(self) + " ".join(
+                      ['"'+str(fpath)+'"' for fpath in files])
+        r = self.run(command)
+        assert r.status_code==0, "could not {0}: {1.std_out} {1.std_err}".format(command, r) 
+
+    def log(self):
+        r = self.run('{0.bin} log'.format(self))
+        assert r.status_code==0, "could not {0.bin} log: {1.std_out} {1.std_err}".format(self, r)
+        return r.std_out
+"""
+Context manager to build a temporary path
+"""
+
+import os
+import tempfile
+import uuid
+
+class TempDir(object):
+    def __init__(self, create=True):
+        self.path = os.path.join("/",tempfile.gettempprefix(),str(uuid.uuid4()))
+        self.create = create
+
+    def __enter__(self):
+        if self.create:
+            os.makedirs(self.path)
+
+            self.pwd = os.getcwd()
+            os.chdir(self.path)
+
+        return self
+
+    def __exit__(self, *args, **kwargs):
+        if self.create:
+            os.chdir(self.pwd)
+        os.system('rm -rf "{0.path}"'.format(self))
+        

tests/test_deploy.py

-import sys
 import os
 import unittest2 as unittest
-from functools import partial
 import shutil
-import mock
 from mock import patch
 
 from parcel.deploy import Deployment
-from parcel.distro import Debian, Centos, centos
+from parcel.distro import centos
 from parcel.versions import Version
 from parcel_mocks import (run, rsync, version_mock, update_packages,
                           build_deps, mock_put, mock_get, lcd, mock_local)
     def __init__(self, app_name=None):
         self.app_name = app_name
 
-
 class DeployTestSuite(unittest.TestCase):
 
     def setUp(self):
         self.app_name = "testapp"
         self.deploy = TestDeploy(self.app_name)
-        
+
     def tearDown(self):
-        test_build_dir = os.path.join(os.path.expanduser('~/'), '.parcel_test') 
+        test_build_dir = os.path.join(os.path.expanduser('~/'), '.parcel_test')
         shutil.rmtree(test_build_dir, True)
         for m in mocks_to_reset:
             m.reset_mock()
-        
+
     def test_write_prerm_template(self):
         prerm_template = "Test rm template {app_name} and {lines}"
 
 
     def setUp(self):
         pass
-    
+
     def tearDown(self):
-        test_build_dir = os.path.join(os.path.expanduser('~/'), '.parcel_test') 
+        test_build_dir = os.path.join(os.path.expanduser('~/'), '.parcel_test')
         shutil.rmtree(test_build_dir, True)
         for m in mocks_to_reset:
             m.reset_mock()
-            
+
         # remove the deb we may have built
-        deb = os.path.join(os.path.dirname(__file__),"data", "testapp_0.1.2_all.deb")
+        deb = os.path.join(os.path.dirname(__file__), "data", "testapp_0.1.2_all.deb")
         if os.path.exists(deb):
             os.unlink(deb)
 
         basepath = os.path.join(os.path.expanduser('~/'))
         d = Deployment('testapp', base=basepath)
         d.prepare_app()
-        
+
         # version
         self.assertEquals(d.version.version, Version('0.1.2').version)
 
         # now check with no basepath
         d = Deployment('testapp')
         self.assertTrue(basepath in d.build_path)
-        
+
         # now check with basepath without initial slash
         basepath = os.path.join(os.path.dirname(__file__))
         d = Deployment('testapp', base=basepath[1:])
         self.assertTrue(basepath in d.build_path)
-        
+
     @patch('parcel.deploy.deploy.run', mock_local())
     @patch.multiple('parcel.tools', run=mock_local(), rsync=rsync)
     @patch('parcel.distro.run', mock_local())
         # if we supply build_deps then this should be called
         build_deps.assert_called_once()
 
-
     @patch('parcel.deploy.deploy.run', mock_local())
     @patch.multiple('parcel.tools', run=mock_local(), rsync=rsync)
     @patch('parcel.distro.run', mock_local())
         ve_path = os.path.join(d.build_path, d.virtual)
         self.assertTrue(os.path.exists(ve_path))
 
+    @patch('parcel.deploy.deploy.run', mock_local())
+    @patch.multiple('parcel.tools', run=mock_local(), rsync=rsync)
+    @patch('parcel.distro.run', mock_local())
+    @patch.multiple('parcel.distro.Debian', version=version_mock, update_packages=update_packages, build_deps=build_deps)
+    def test_prepare_app_custom_venv(self):
+        basepath = os.path.join(os.path.expanduser('~/'))
+        d = Deployment('testapp', base=basepath, venv_dirname='venv')
+        d.prepare_app()
+
+        # check that sync_app worked
+        self.assertTrue(os.path.exists(d.build_path))
+
+        # check that virtualenv was built
+        ve_path = os.path.join(d.build_path, 'venv')
+        self.assertTrue(os.path.exists(ve_path))
 
     @patch.multiple('parcel.deploy.deploy', run=mock_local(), put=mock_put, cd=lcd, get=mock_get)
     @patch.multiple('parcel.tools', run=mock_local(), rsync=rsync, put=mock_put)
         self.assertTrue(os.path.exists(ve_path))
 
         # check we can add a file
-        test_file = os.path.join(os.path.dirname(__file__),"data", "tip.tar.gz")
+        test_file = os.path.join(os.path.dirname(__file__), "data", "tip.tar.gz")
         d.add_to_root_fs(test_file, 'tip.tar.gz')
         dest_file = os.path.join(d.root_path, "tip.tar.gz")
         self.assertTrue(os.path.exists(dest_file))
             self.assertEquals(data, f.read())
 
         # check we can compile python files
+        test_file = os.path.join(os.path.dirname(__file__), "data", "hello.py")
+        d.add_to_root_fs(test_file, 'hello.py')
         d.compile_python()
-        dest_file = os.path.join(d.build_path, 'data', 'hello.pyc')
+        dest_file = os.path.join(d.build_path, 'hello.pyc')
         self.assertTrue(os.path.exists(dest_file))
 
         # check we can clear .py and just leave .pyc files
         dest_file = os.path.join(d.build_path, 'data', 'hello.py')
         self.assertFalse(os.path.exists(dest_file))
 
-
     @patch.multiple('parcel.deploy.deploy', run=mock_local(), put=mock_put, cd=lcd, get=mock_get)
     @patch.multiple('parcel.tools', run=mock_local(), rsync=rsync, put=mock_put)
     @patch.multiple('parcel.distro', run=mock_local(), get=mock_get)
         d = Deployment('testapp', base=basepath)
         d.root_path = os.path.join(basepath, '.parcel')
         d.prepare_app()
-        
+
         # test build, will not actually call fpm
         d.build_package()
-        dest_file = os.path.join(os.path.dirname(__file__),"data", "testapp_0.1.2_all.deb")
+        dest_file = os.path.join(os.path.dirname(__file__), "data", "testapp_0.1.2_all.deb")
         self.assertTrue(os.path.exists(dest_file))
         os.unlink(dest_file)
 
         d = Deployment('testapp', base=basepath)
         d.root_path = os.path.join(basepath, '.parcel_test')
         d.build_package(templates=False)
-        dest_file = os.path.join(os.path.dirname(__file__),"data", "testapp_0.1.2_all.deb")
+        dest_file = os.path.join(os.path.dirname(__file__), "data", "testapp_0.1.2_all.deb")
         self.assertTrue(os.path.exists(dest_file))
         os.unlink(dest_file)
 
         d.preinst = " ".join(lines)
         d.postinst = " ".join(lines)
         d.build_package()
-        dest_file = os.path.join(os.path.dirname(__file__),"data", "testapp_0.1.2_all.deb")
+        dest_file = os.path.join(os.path.dirname(__file__), "data", "testapp_0.1.2_all.deb")
         self.assertTrue(os.path.exists(dest_file))
         os.unlink(dest_file)
 
-
     @patch('parcel.deploy.deploy.run', run)
     @patch('parcel.distro.run', mock_local())
     @patch.multiple('parcel.distro.Debian', version=version_mock, update_packages=update_packages)
     def test_add_venv_with_requirements(self):
         basepath = os.path.join(os.path.expanduser('~/'))
         d = Deployment('testapp', base=basepath)
-        req_file = os.path.join(os.path.dirname(__file__),"data", "requirements_test")
+        req_file = os.path.join(os.path.dirname(__file__), "data", "requirements_test")
 
         # call _add_venv directly so we can just mock that run out
         d._add_venv(requirements=req_file)
 
     def setUp(self):
         pass
-    
+
     def tearDown(self):
-        test_build_dir = os.path.join(os.path.expanduser('~/'), '.parcel_test') 
+        test_build_dir = os.path.join(os.path.expanduser('~/'), '.parcel_test')
         shutil.rmtree(test_build_dir, True)
         for m in mocks_to_reset:
             m.reset_mock()
-            
+
         # remove the deb we may have built
-        deb = os.path.join(os.path.dirname(__file__),"data", "testapp_0.1.1-1.noarch.rpm")
+        deb = os.path.join(os.path.dirname(__file__), "data", "testapp_0.1.1-1.noarch.rpm")
         if os.path.exists(deb):
             os.unlink(deb)
 
-
     # centos tests
     @patch.multiple('parcel.deploy.deploy', run=mock_local(), put=mock_put, cd=lcd, get=mock_get)
     @patch.multiple('parcel.tools', run=mock_local(), rsync=rsync, put=mock_put)
         d = Deployment('testapp', base=basepath, arch=centos)
         d.root_path = os.path.join(basepath, '.parcel')
         d.prepare_app()
-        
+
         # test build, will not actually call fpm
         d.build_package()
-        dest_file = os.path.join(os.path.dirname(__file__),"data", "testapp_0.1.1-1.noarch.rpm")
+        dest_file = os.path.join(os.path.dirname(__file__), "data", "testapp_0.1.1-1.noarch.rpm")
         print dest_file
         self.assertTrue(os.path.exists(dest_file))
         os.unlink(dest_file)
-

tests/test_distro.py

 
 from mock import patch, MagicMock
 
-from parcel.distro import debian, ubuntu, centos, Distro
+from parcel.distro import debian, ubuntu, centos, Distro, Debian
 from parcel.deploy import Deployment
 
 from parcel_mocks import (run, _AttributeString, version_run, with_settings,
         debian.update_packages()
         run.assert_called_once_with("apt-get update -qq")
 
+    @patch('parcel.distro.sudo', run)
+    def test_update_packages_with_sudo(self):
+        debian = Debian(use_sudo=True)
+        debian.update_packages()
+        run.assert_called_once_with("apt-get update -qq")
+
     @patch('parcel.distro.run', run)
     def test_cleanup(self):
         debian._cleanup()
             
             # the classes build space should be in the path
             self.assertTrue(debian.space in command[0][0])
-
+            
     @patch('parcel.distro.run', run)
     def test_check_fpm_not_present(self):
         def called(command):
 
     @patch.multiple('parcel.distro', with_settings=with_settings, run=run, put=put, cd=distro_cd)
     @patch('parcel.distro.cache.get', distro_cache)
-    @patch('parcel.distro.Debian.mkdir', distro_mkdir)    
+    @patch('parcel.distro.Debian.mkdir', distro_mkdir)
     def test_setup(self):
         distro_cache.return_value = '/a/test/path/file.gz'
         distro_mkdir.side_effect = ['.parcel-build-temp', '.parcel-build-temp/src', '.parcel-build-temp/build']
 
     @patch.multiple('parcel.distro', with_settings=with_settings, run=run, put=put, cd=distro_cd)
     @patch('parcel.distro.cache.get', distro_cache)
-    @patch('parcel.distro.Debian.mkdir', distro_mkdir)    
+    @patch('parcel.distro.Debian.mkdir', distro_mkdir)
     def test_setup_ubuntu(self):
         distro_cache.return_value = '/a/test/path/file.gz'
         distro_mkdir.side_effect = ['.parcel-build-temp', '.parcel-build-temp/src', '.parcel-build-temp/build']
         ubuntu.setup()
 
-        self.assertTrue(run.call_args_list[0][0][0] == 'apt-get install rubygems -y')
+        self.assertTrue(run.call_args_list[0][0][0] == 'apt-get install -qq rubygems python-virtualenv python-dev')
         self.assertTrue(run.call_args_list[1][0][0] == 'gem install fpm')
 
 

tests/test_revisions.py

+import unittest
+import random
+import os
+from fabric.api import hide
+
+from parcel.revisions import Hg, Git, repo, GitException
+
+from repomanager import RepoTestManager, DEFAULT_REVISIONS, random_data
+from tempdir import TempDir
+
+class MercurialRepo(RepoTestManager):
+    """Mocked up mercurial repository for testing as a context manager"""
+    def __init__(self, path=None, hg=None, revisions=None, clone=None):
+        return RepoTestManager.__init__(self, path, hg or "hg", revisions, clone)
+
+class MercurialTestSuite(unittest.TestCase):
+    """parcel.revisions.Hg test cases."""
+
+    def test_hg_instantiate(self):
+        with MercurialRepo() as repo:
+            hg = Hg(repo.path)
+
+    def test_hg_logs(self):
+        revisions = DEFAULT_REVISIONS + [
+            ('Third custom checkin', {
+                    'blah-blah.txt': random_data(random.randint(1024,32000))})]
+        set_keys = ('date', 'changeset', 'user', 'summary')
+        opt_keys = ('tag', )
+
+        with MercurialRepo(revisions=revisions) as repo:
+            logs = Hg(repo.path).logs()
+            self.assertEquals(len(logs), len(revisions))
+            for log in logs:
+                # compulsory keys
+                for key in set_keys:
+                    self.assertIn(key, log)
+
+                # optional keys
+                for key in set(log.keys()).difference(set(set_keys)):
+                    self.assertIn(key, opt_keys)
+
+    def test_hg_branch(self):
+        with MercurialRepo() as repo:
+            self.assertEquals(Hg(repo.path).branch(), 'default')
+            
+    def test_hg_log(self):
+        revisions = DEFAULT_REVISIONS + [
+            ('Third custom checkin', {
+                    'blah-blah.txt': random_data(random.randint(1024,32000))})]
+        set_keys = ('date', 'changeset', 'user', 'summary')
+        opt_keys = ('tag', )
+
+        with MercurialRepo(revisions=revisions) as repo:
+            log = Hg(repo.path).log()
+            # compulsory keys
+            for key in set_keys:
+                self.assertIn(key, log)
+
+            # optional keys
+            for key in set(log.keys()).difference(set(set_keys)):
+                self.assertIn(key, opt_keys)
+
+            self.assertEquals(log['summary'], 'Third custom checkin')
+
+    def test_hg_update(self):
+        with MercurialRepo() as repo:
+            self.assertEquals( Hg(repo.path).update(),
+                               {'updated': 0,
+                                'removed': 0,
+                                'merged': 0,
+                                'unresolved': 0})
+    
+    def test_hg_pull(self):
+        with MercurialRepo() as parent:
+            with MercurialRepo(clone=parent.path,revisions=[]) as repo:
+                hg = Hg(repo.path)
+                result = hg.pull()
+                
+                self.assertIn(parent.path, result[0])
+
+    def test_hg_describe(self):
+        with MercurialRepo() as repo:
+            description = Hg(repo.path).describe()
+
+            # eg null-2-d63d252639deF
+            self.assertTrue(description.startswith('null-2-'))
+            self.assertEquals(len(description), 19)
+
+    def test_hg_clone(self):
+        revisions = DEFAULT_REVISIONS + [
+            ('Third custom checkin', {
+                    'check.dat': random_data(1024)})]
+        with MercurialRepo(revisions=revisions) as source:
+            with TempDir() as dest:
+                hg = Hg(dest.path)
+                hg.clone(source.path)
+
+                dirs = os.listdir(dest.path)
+                self.assertIn('check.dat', dirs)
+                
+
+class GitRepo(RepoTestManager):
+    """Mocked up mercurial repository for testing as a context manager"""
+    def __init__(self, path=None, git=None, revisions=None, clone=None):
+        return RepoTestManager.__init__(self, path, git or "git", revisions, clone)
+
+    def tag(self, tagname, message=None, annotated=True):
+        message = message if message is None else "{0} committed".format(tagname)
+        r = self.run('{0.bin} tag -a {1} -m "{2}"'.format(self, tagname, message))
+        assert r.status_code==0
+
+    def checkout(self, tag):
+        r = self.run('{0.bin} checkout {1}'.format(self, tag))
+        assert r.status_code==0
+
+class GitTestSuite(unittest.TestCase):
+    """parcel.revisions.Git test cases."""
+
+    def test_git_instantiate(self):
+        with GitRepo() as repo:
+            Git(repo.path)
+
+    def test_git_branch(self):
+        with GitRepo() as repo:
+            self.assertEquals(Git(repo.path).branch(), 'master')
+
+    def test_git_log(self):
+        with GitRepo() as repo:
+            log = Git(repo.path).log()
+            self.assertEquals(len(log), len(DEFAULT_REVISIONS))
+            for cs in log:
+                # changeset hash
+                self.assertEquals(len(cs['changeset']), 40)
+                self.assertNotIn(False, [x in "0123456789abcdef" for x in cs['changeset']])
+                
+                self.assertIn('date', cs)
+                self.assertIn('summary', cs)
+                self.assertIn('author', cs)
+
+                self.assertEquals(log[-1]['summary'], DEFAULT_REVISIONS[0][0])
+                self.assertEquals(log[-2]['summary'], DEFAULT_REVISIONS[1][0])
+
+                self.assertIn('@',cs['author'])
+                self.assertEqual(cs['date'].count(':'), 2)
+
+    def test_git_pull(self):
+        with GitRepo() as repo:
+            with GitRepo(clone=repo.path) as inner:
+                result = Git(inner.path).pull()
+                self.assertIn('Already up-to-date.', result)
+
+                # make upstream mods
+                repo.commit_changes('checkin upstream',{'newfile.dat': random_data(1024) })
+
+                # pull
+                result = Git(inner.path).pull()
+                self.assertIn('1 file changed', result)
+                self.assertIn('1 insertion', result)
+                self.assertIn('newfile.dat', result)
+
+    def test_git_checkout(self):
+        with GitRepo() as repo:
+            git = Git(repo.path)
+            git.checkout("master")
+
+            # latest file is present
+            self.assertIn('new file with space.exe', os.listdir(repo.path))
+
+            # go back
+            tag = git.log()[1]['changeset']
+            git.checkout(tag)
+            
+            # the file is gone
+            self.assertNotIn('new file with space.exe', os.listdir(repo.path))
+           
+    def test_git_clone(self):
+        revisions = DEFAULT_REVISIONS + [
+            ('Third custom checkin', {
+                    'check.dat': random_data(1024)})]
+        with GitRepo(revisions=revisions) as source:
+            with TempDir() as dest:
+                git = Git(dest.path)
+                git.clone(source.path)
+
+                dirs = os.listdir(dest.path)
+                self.assertIn('check.dat', dirs)
+                
+    def test_git_describe(self):
+        with GitRepo() as repo:
+            repo.tag("v1.0.3")
+            result = Git(repo.path).describe()
+            self.assertEquals("v1.0.3", result)
+
+            repo.commit_changes('checkin upstream',{'newfile.dat': random_data(1024) })
+            result = Git(repo.path).describe()
+            self.assertTrue(result.startswith("v1.0.3-1-g"))
+
+            # not on a tag, should fail
+            with self.assertRaises(GitException):
+                with hide('warnings'):
+                    result = Git(repo.path).describe(force_tag=True)
+
+            # move to tag and should pass
+            repo.checkout("v1.0.3")
+            result = Git(repo.path).describe(force_tag=True)
+            self.assertEquals(result, "v1.0.3")
+
+class repoTestSuite(unittest.TestCase):
+    """parcel.revisions.repo test cases."""
+    def test_repo_hg(self):
+        with MercurialRepo() as hg:
+            r = repo(hg.path)
+            self.assertEquals(r.__class__, Hg)
+
+    def test_repo_git(self):
+        with GitRepo() as git:
+            r = repo(git.path)
+            self.assertEquals(r.__class__, Git)
+
+    def test_repo_recurse(self):
+        with MercurialRepo() as hg:
+            path = os.path.join(hg.path,"dir1","dir2","dir3")
+            os.makedirs(path)
+            r = repo(path)
+            self.assertEquals(r.__class__, Hg)
+            
+        

tests/test_tools.py

         rsync(test_file, 'test.tar.gz')
 
         # check rsync was called
-        self.assertTrue('rsync -av' in rsync_local.call_args[0][0])
+        self.assertIn('rsync -av', rsync_local.call_args[0][0])
 
         # with colors off
         rsync(test_file, 'test.tar.gz', color_files=False)
 
         # with rsync_ignore but a non-existent file
         rsync(test_file, 'test.tar.gz', rsync_ignore='rsync_ignore')
-        self.assertTrue('--exclude-from' not in rsync_local.call_args[0][0])
+        self.assertNotIn('--exclude-from', rsync_local.call_args[0][0])
 
         # call with a list
         rsync([test_file, 'another_test_file'], 'test_files/')
         command = "rsync -av '{0}' '{1}'".format(test_file, 'another_test_file')
-        self.assertTrue(command in rsync_local.call_args[0][0])
+        self.assertIn(command, rsync_local.call_args[0][0])
+
+        # call with single fabric env.key_filename
+        from fabric.state import env
+        from fabric.api import settings
+        with settings(key_filename="/some/path/to/a/keyfile.pub"):
+            rsync(test_file, 'test.tar.gz')
+        
+            # rsync command should specify ssh command
+            self.assertIn("rsync -av -e 'ssh -l {0.user} -i \"{0.key_filename}\"'".format(env), rsync_local.call_args[0][0])
+
+        # call with multiple fabric env.key_filename
+        with settings(key_filename=["keyfile1.pub","keyfile2.pub"]):
+            rsync(test_file, 'test.tar.gz')
+        
+            # rsync command should specify ssh command
+            self.assertIn("rsync -av -e 'ssh -l {0.user} -i \"{0.key_filename[0]}\" -i \"{0.key_filename[1]}\"'".format(env), rsync_local.call_args[0][0])
+
+        # call with nonstandard ssh port
+        with settings(port=2222):
+            rsync(test_file, 'test.tar.gz')
+
+            # rsync command should specify ssh command
+            self.assertIn("rsync -av -e 'ssh -p 2222'".format(env), rsync_local.call_args[0][0])
+
+        with settings(disable_known_hosts=True):
+            rsync(test_file, 'test.tar.gz')
+
+            # rsync command should specify ssh command
+            print rsync_local.call_args
+            self.assertIn("rsync -av -e 'ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no'",
+                          rsync_local.call_args[0][0])
+
+        # combine keyfile and port
+        with settings(key_filename="/some/path/to/a/keyfile.pub", port=3232):
+            rsync(test_file, 'test.tar.gz')
+        
+            # rsync command should specify ssh command
+            self.assertIn("rsync -av -e 'ssh -p 3232 -l {0.user} -i \"{0.key_filename}\"'".format(env), rsync_local.call_args[0][0])