Commits

mickael9  committed 6202394

Switched to setuptools rather than deprecated distribute
Improved User.refresh so that it doesn't need to login again
Bumped version to 0.2.3

  • Participants
  • Parent commits 0f2f388

Comments (0)

Files changed (10)

+syntax: glob
+*.pyc
+*.orig
+*.egg-info
+__pycache__
+build/*
+dist/*

File distribute_setup.py

-#!python
-"""Bootstrap distribute installation
-
-If you want to use setuptools in your package's setup.py, just include this
-file in the same directory with it, and add this to the top of your setup.py:
-
-    from distribute_setup import use_setuptools
-    use_setuptools()
-
-If you want to require a specific version of setuptools, set a download
-mirror, or use an alternate download directory, you can do so by supplying
-the appropriate options to ``use_setuptools()``.
-
-This file can also be run as a script to install or upgrade setuptools.
-"""
-import os
-import sys
-import time
-import fnmatch
-import tempfile
-import tarfile
-from distutils import log
-
-try:
-    from site import USER_SITE
-except ImportError:
-    USER_SITE = None
-
-try:
-    import subprocess
-
-    def _python_cmd(*args):
-        args = (sys.executable,) + args
-        return subprocess.call(args) == 0
-
-except ImportError:
-    # will be used for python 2.3
-    def _python_cmd(*args):
-        args = (sys.executable,) + args
-        # quoting arguments if windows
-        if sys.platform == 'win32':
-            def quote(arg):
-                if ' ' in arg:
-                    return '"%s"' % arg
-                return arg
-            args = [quote(arg) for arg in args]
-        return os.spawnl(os.P_WAIT, sys.executable, *args) == 0
-
-DEFAULT_VERSION = "0.6.24"
-DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/"
-SETUPTOOLS_FAKED_VERSION = "0.6c11"
-
-SETUPTOOLS_PKG_INFO = """\
-Metadata-Version: 1.0
-Name: setuptools
-Version: %s
-Summary: xxxx
-Home-page: xxx
-Author: xxx
-Author-email: xxx
-License: xxx
-Description: xxx
-""" % SETUPTOOLS_FAKED_VERSION
-
-
-def _install(tarball):
-    # extracting the tarball
-    tmpdir = tempfile.mkdtemp()
-    log.warn('Extracting in %s', tmpdir)
-    old_wd = os.getcwd()
-    try:
-        os.chdir(tmpdir)
-        tar = tarfile.open(tarball)
-        _extractall(tar)
-        tar.close()
-
-        # going in the directory
-        subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
-        os.chdir(subdir)
-        log.warn('Now working in %s', subdir)
-
-        # installing
-        log.warn('Installing Distribute')
-        if not _python_cmd('setup.py', 'install'):
-            log.warn('Something went wrong during the installation.')
-            log.warn('See the error message above.')
-    finally:
-        os.chdir(old_wd)
-
-
-def _build_egg(egg, tarball, to_dir):
-    # extracting the tarball
-    tmpdir = tempfile.mkdtemp()
-    log.warn('Extracting in %s', tmpdir)
-    old_wd = os.getcwd()
-    try:
-        os.chdir(tmpdir)
-        tar = tarfile.open(tarball)
-        _extractall(tar)
-        tar.close()
-
-        # going in the directory
-        subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
-        os.chdir(subdir)
-        log.warn('Now working in %s', subdir)
-
-        # building an egg
-        log.warn('Building a Distribute egg in %s', to_dir)
-        _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
-
-    finally:
-        os.chdir(old_wd)
-    # returning the result
-    log.warn(egg)
-    if not os.path.exists(egg):
-        raise IOError('Could not build the egg.')
-
-
-def _do_download(version, download_base, to_dir, download_delay):
-    egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg'
-                       % (version, sys.version_info[0], sys.version_info[1]))
-    if not os.path.exists(egg):
-        tarball = download_setuptools(version, download_base,
-                                      to_dir, download_delay)
-        _build_egg(egg, tarball, to_dir)
-    sys.path.insert(0, egg)
-    import setuptools
-    setuptools.bootstrap_install_from = egg
-
-
-def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
-                   to_dir=os.curdir, download_delay=15, no_fake=True):
-    # making sure we use the absolute path
-    to_dir = os.path.abspath(to_dir)
-    was_imported = 'pkg_resources' in sys.modules or \
-        'setuptools' in sys.modules
-    try:
-        try:
-            import pkg_resources
-            if not hasattr(pkg_resources, '_distribute'):
-                if not no_fake:
-                    _fake_setuptools()
-                raise ImportError
-        except ImportError:
-            return _do_download(version, download_base, to_dir, download_delay)
-        try:
-            pkg_resources.require("distribute>="+version)
-            return
-        except pkg_resources.VersionConflict:
-            e = sys.exc_info()[1]
-            if was_imported:
-                sys.stderr.write(
-                "The required version of distribute (>=%s) is not available,\n"
-                "and can't be installed while this script is running. Please\n"
-                "install a more recent version first, using\n"
-                "'easy_install -U distribute'."
-                "\n\n(Currently using %r)\n" % (version, e.args[0]))
-                sys.exit(2)
-            else:
-                del pkg_resources, sys.modules['pkg_resources']    # reload ok
-                return _do_download(version, download_base, to_dir,
-                                    download_delay)
-        except pkg_resources.DistributionNotFound:
-            return _do_download(version, download_base, to_dir,
-                                download_delay)
-    finally:
-        if not no_fake:
-            _create_fake_setuptools_pkg_info(to_dir)
-
-def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
-                        to_dir=os.curdir, delay=15):
-    """Download distribute from a specified location and return its filename
-
-    `version` should be a valid distribute version number that is available
-    as an egg for download under the `download_base` URL (which should end
-    with a '/'). `to_dir` is the directory where the egg will be downloaded.
-    `delay` is the number of seconds to pause before an actual download
-    attempt.
-    """
-    # making sure we use the absolute path
-    to_dir = os.path.abspath(to_dir)
-    try:
-        from urllib.request import urlopen
-    except ImportError:
-        from urllib2 import urlopen
-    tgz_name = "distribute-%s.tar.gz" % version
-    url = download_base + tgz_name
-    saveto = os.path.join(to_dir, tgz_name)
-    src = dst = None
-    if not os.path.exists(saveto):  # Avoid repeated downloads
-        try:
-            log.warn("Downloading %s", url)
-            src = urlopen(url)
-            # Read/write all in one block, so we don't create a corrupt file
-            # if the download is interrupted.
-            data = src.read()
-            dst = open(saveto, "wb")
-            dst.write(data)
-        finally:
-            if src:
-                src.close()
-            if dst:
-                dst.close()
-    return os.path.realpath(saveto)
-
-def _no_sandbox(function):
-    def __no_sandbox(*args, **kw):
-        try:
-            from setuptools.sandbox import DirectorySandbox
-            if not hasattr(DirectorySandbox, '_old'):
-                def violation(*args):
-                    pass
-                DirectorySandbox._old = DirectorySandbox._violation
-                DirectorySandbox._violation = violation
-                patched = True
-            else:
-                patched = False
-        except ImportError:
-            patched = False
-
-        try:
-            return function(*args, **kw)
-        finally:
-            if patched:
-                DirectorySandbox._violation = DirectorySandbox._old
-                del DirectorySandbox._old
-
-    return __no_sandbox
-
-def _patch_file(path, content):
-    """Will backup the file then patch it"""
-    existing_content = open(path).read()
-    if existing_content == content:
-        # already patched
-        log.warn('Already patched.')
-        return False
-    log.warn('Patching...')
-    _rename_path(path)
-    f = open(path, 'w')
-    try:
-        f.write(content)
-    finally:
-        f.close()
-    return True
-
-_patch_file = _no_sandbox(_patch_file)
-
-def _same_content(path, content):
-    return open(path).read() == content
-
-def _rename_path(path):
-    new_name = path + '.OLD.%s' % time.time()
-    log.warn('Renaming %s into %s', path, new_name)
-    os.rename(path, new_name)
-    return new_name
-
-def _remove_flat_installation(placeholder):
-    if not os.path.isdir(placeholder):
-        log.warn('Unkown installation at %s', placeholder)
-        return False
-    found = False
-    for file in os.listdir(placeholder):
-        if fnmatch.fnmatch(file, 'setuptools*.egg-info'):
-            found = True
-            break
-    if not found:
-        log.warn('Could not locate setuptools*.egg-info')
-        return
-
-    log.warn('Removing elements out of the way...')
-    pkg_info = os.path.join(placeholder, file)
-    if os.path.isdir(pkg_info):
-        patched = _patch_egg_dir(pkg_info)
-    else:
-        patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO)
-
-    if not patched:
-        log.warn('%s already patched.', pkg_info)
-        return False
-    # now let's move the files out of the way
-    for element in ('setuptools', 'pkg_resources.py', 'site.py'):
-        element = os.path.join(placeholder, element)
-        if os.path.exists(element):
-            _rename_path(element)
-        else:
-            log.warn('Could not find the %s element of the '
-                     'Setuptools distribution', element)
-    return True
-
-_remove_flat_installation = _no_sandbox(_remove_flat_installation)
-
-def _after_install(dist):
-    log.warn('After install bootstrap.')
-    placeholder = dist.get_command_obj('install').install_purelib
-    _create_fake_setuptools_pkg_info(placeholder)
-
-def _create_fake_setuptools_pkg_info(placeholder):
-    if not placeholder or not os.path.exists(placeholder):
-        log.warn('Could not find the install location')
-        return
-    pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1])
-    setuptools_file = 'setuptools-%s-py%s.egg-info' % \
-            (SETUPTOOLS_FAKED_VERSION, pyver)
-    pkg_info = os.path.join(placeholder, setuptools_file)
-    if os.path.exists(pkg_info):
-        log.warn('%s already exists', pkg_info)
-        return
-
-    log.warn('Creating %s', pkg_info)
-    f = open(pkg_info, 'w')
-    try:
-        f.write(SETUPTOOLS_PKG_INFO)
-    finally:
-        f.close()
-
-    pth_file = os.path.join(placeholder, 'setuptools.pth')
-    log.warn('Creating %s', pth_file)
-    f = open(pth_file, 'w')
-    try:
-        f.write(os.path.join(os.curdir, setuptools_file))
-    finally:
-        f.close()
-
-_create_fake_setuptools_pkg_info = _no_sandbox(_create_fake_setuptools_pkg_info)
-
-def _patch_egg_dir(path):
-    # let's check if it's already patched
-    pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
-    if os.path.exists(pkg_info):
-        if _same_content(pkg_info, SETUPTOOLS_PKG_INFO):
-            log.warn('%s already patched.', pkg_info)
-            return False
-    _rename_path(path)
-    os.mkdir(path)
-    os.mkdir(os.path.join(path, 'EGG-INFO'))
-    pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
-    f = open(pkg_info, 'w')
-    try:
-        f.write(SETUPTOOLS_PKG_INFO)
-    finally:
-        f.close()
-    return True
-
-_patch_egg_dir = _no_sandbox(_patch_egg_dir)
-
-def _before_install():
-    log.warn('Before install bootstrap.')
-    _fake_setuptools()
-
-
-def _under_prefix(location):
-    if 'install' not in sys.argv:
-        return True
-    args = sys.argv[sys.argv.index('install')+1:]
-    for index, arg in enumerate(args):
-        for option in ('--root', '--prefix'):
-            if arg.startswith('%s=' % option):
-                top_dir = arg.split('root=')[-1]
-                return location.startswith(top_dir)
-            elif arg == option:
-                if len(args) > index:
-                    top_dir = args[index+1]
-                    return location.startswith(top_dir)
-        if arg == '--user' and USER_SITE is not None:
-            return location.startswith(USER_SITE)
-    return True
-
-
-def _fake_setuptools():
-    log.warn('Scanning installed packages')
-    try:
-        import pkg_resources
-    except ImportError:
-        # we're cool
-        log.warn('Setuptools or Distribute does not seem to be installed.')
-        return
-    ws = pkg_resources.working_set
-    try:
-        setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools',
-                                  replacement=False))
-    except TypeError:
-        # old distribute API
-        setuptools_dist = ws.find(pkg_resources.Requirement.parse('setuptools'))
-
-    if setuptools_dist is None:
-        log.warn('No setuptools distribution found')
-        return
-    # detecting if it was already faked
-    setuptools_location = setuptools_dist.location
-    log.warn('Setuptools installation detected at %s', setuptools_location)
-
-    # if --root or --preix was provided, and if
-    # setuptools is not located in them, we don't patch it
-    if not _under_prefix(setuptools_location):
-        log.warn('Not patching, --root or --prefix is installing Distribute'
-                 ' in another location')
-        return
-
-    # let's see if its an egg
-    if not setuptools_location.endswith('.egg'):
-        log.warn('Non-egg installation')
-        res = _remove_flat_installation(setuptools_location)
-        if not res:
-            return
-    else:
-        log.warn('Egg installation')
-        pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO')
-        if (os.path.exists(pkg_info) and
-            _same_content(pkg_info, SETUPTOOLS_PKG_INFO)):
-            log.warn('Already patched.')
-            return
-        log.warn('Patching...')
-        # let's create a fake egg replacing setuptools one
-        res = _patch_egg_dir(setuptools_location)
-        if not res:
-            return
-    log.warn('Patched done.')
-    _relaunch()
-
-
-def _relaunch():
-    log.warn('Relaunching...')
-    # we have to relaunch the process
-    # pip marker to avoid a relaunch bug
-    if sys.argv[:3] == ['-c', 'install', '--single-version-externally-managed']:
-        sys.argv[0] = 'setup.py'
-    args = [sys.executable] + sys.argv
-    sys.exit(subprocess.call(args))
-
-
-def _extractall(self, path=".", members=None):
-    """Extract all members from the archive to the current working
-       directory and set owner, modification time and permissions on
-       directories afterwards. `path' specifies a different directory
-       to extract to. `members' is optional and must be a subset of the
-       list returned by getmembers().
-    """
-    import copy
-    import operator
-    from tarfile import ExtractError
-    directories = []
-
-    if members is None:
-        members = self
-
-    for tarinfo in members:
-        if tarinfo.isdir():
-            # Extract directories with a safe mode.
-            directories.append(tarinfo)
-            tarinfo = copy.copy(tarinfo)
-            tarinfo.mode = 448 # decimal for oct 0700
-        self.extract(tarinfo, path)
-
-    # Reverse sort directories.
-    if sys.version_info < (2, 4):
-        def sorter(dir1, dir2):
-            return cmp(dir1.name, dir2.name)
-        directories.sort(sorter)
-        directories.reverse()
-    else:
-        directories.sort(key=operator.attrgetter('name'), reverse=True)
-
-    # Set correct owner, mtime and filemode on directories.
-    for tarinfo in directories:
-        dirpath = os.path.join(path, tarinfo.name)
-        try:
-            self.chown(tarinfo, dirpath)
-            self.utime(tarinfo, dirpath)
-            self.chmod(tarinfo, dirpath)
-        except ExtractError:
-            e = sys.exc_info()[1]
-            if self.errorlevel > 1:
-                raise
-            else:
-                self._dbg(1, "tarfile: %s" % e)
-
-
-def main(argv, version=DEFAULT_VERSION):
-    """Install or upgrade setuptools and EasyInstall"""
-    tarball = download_setuptools()
-    _install(tarball)
-
-
-if __name__ == '__main__':
-    main(sys.argv[1:])
+#!python
+"""Bootstrap setuptools installation
+
+To use setuptools in your package's setup.py, include this
+file in the same directory and add this to the top of your setup.py::
+
+    from ez_setup import use_setuptools
+    use_setuptools()
+
+To require a specific version of setuptools, set a download
+mirror, or use an alternate download directory, simply supply
+the appropriate options to ``use_setuptools()``.
+
+This file can also be run as a script to install or upgrade setuptools.
+"""
+import os
+import shutil
+import sys
+import tempfile
+import tarfile
+import optparse
+import subprocess
+import platform
+import textwrap
+
+from distutils import log
+
+try:
+    from site import USER_SITE
+except ImportError:
+    USER_SITE = None
+
+DEFAULT_VERSION = "2.0.1"
+DEFAULT_URL = "https://pypi.python.org/packages/source/s/setuptools/"
+
+def _python_cmd(*args):
+    args = (sys.executable,) + args
+    return subprocess.call(args) == 0
+
+def _install(tarball, install_args=()):
+    # extracting the tarball
+    tmpdir = tempfile.mkdtemp()
+    log.warn('Extracting in %s', tmpdir)
+    old_wd = os.getcwd()
+    try:
+        os.chdir(tmpdir)
+        tar = tarfile.open(tarball)
+        _extractall(tar)
+        tar.close()
+
+        # going in the directory
+        subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
+        os.chdir(subdir)
+        log.warn('Now working in %s', subdir)
+
+        # installing
+        log.warn('Installing Setuptools')
+        if not _python_cmd('setup.py', 'install', *install_args):
+            log.warn('Something went wrong during the installation.')
+            log.warn('See the error message above.')
+            # exitcode will be 2
+            return 2
+    finally:
+        os.chdir(old_wd)
+        shutil.rmtree(tmpdir)
+
+
+def _build_egg(egg, tarball, to_dir):
+    # extracting the tarball
+    tmpdir = tempfile.mkdtemp()
+    log.warn('Extracting in %s', tmpdir)
+    old_wd = os.getcwd()
+    try:
+        os.chdir(tmpdir)
+        tar = tarfile.open(tarball)
+        _extractall(tar)
+        tar.close()
+
+        # going in the directory
+        subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
+        os.chdir(subdir)
+        log.warn('Now working in %s', subdir)
+
+        # building an egg
+        log.warn('Building a Setuptools egg in %s', to_dir)
+        _python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
+
+    finally:
+        os.chdir(old_wd)
+        shutil.rmtree(tmpdir)
+    # returning the result
+    log.warn(egg)
+    if not os.path.exists(egg):
+        raise IOError('Could not build the egg.')
+
+
+def _do_download(version, download_base, to_dir, download_delay):
+    egg = os.path.join(to_dir, 'setuptools-%s-py%d.%d.egg'
+                       % (version, sys.version_info[0], sys.version_info[1]))
+    if not os.path.exists(egg):
+        tarball = download_setuptools(version, download_base,
+                                      to_dir, download_delay)
+        _build_egg(egg, tarball, to_dir)
+    sys.path.insert(0, egg)
+
+    # Remove previously-imported pkg_resources if present (see
+    # https://bitbucket.org/pypa/setuptools/pull-request/7/ for details).
+    if 'pkg_resources' in sys.modules:
+        del sys.modules['pkg_resources']
+
+    import setuptools
+    setuptools.bootstrap_install_from = egg
+
+
+def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
+                   to_dir=os.curdir, download_delay=15):
+    to_dir = os.path.abspath(to_dir)
+    rep_modules = 'pkg_resources', 'setuptools'
+    imported = set(sys.modules).intersection(rep_modules)
+    try:
+        import pkg_resources
+    except ImportError:
+        return _do_download(version, download_base, to_dir, download_delay)
+    try:
+        pkg_resources.require("setuptools>=" + version)
+        return
+    except pkg_resources.DistributionNotFound:
+        return _do_download(version, download_base, to_dir, download_delay)
+    except pkg_resources.VersionConflict as VC_err:
+        if imported:
+            msg = textwrap.dedent("""
+                The required version of setuptools (>={version}) is not available,
+                and can't be installed while this script is running. Please
+                install a more recent version first, using
+                'easy_install -U setuptools'.
+
+                (Currently using {VC_err.args[0]!r})
+                """).format(VC_err=VC_err, version=version)
+            sys.stderr.write(msg)
+            sys.exit(2)
+
+        # otherwise, reload ok
+        del pkg_resources, sys.modules['pkg_resources']
+        return _do_download(version, download_base, to_dir, download_delay)
+
+def _clean_check(cmd, target):
+    """
+    Run the command to download target. If the command fails, clean up before
+    re-raising the error.
+    """
+    try:
+        subprocess.check_call(cmd)
+    except subprocess.CalledProcessError:
+        if os.access(target, os.F_OK):
+            os.unlink(target)
+        raise
+
+def download_file_powershell(url, target):
+    """
+    Download the file at url to target using Powershell (which will validate
+    trust). Raise an exception if the command cannot complete.
+    """
+    target = os.path.abspath(target)
+    cmd = [
+        'powershell',
+        '-Command',
+        "(new-object System.Net.WebClient).DownloadFile(%(url)r, %(target)r)" % vars(),
+    ]
+    _clean_check(cmd, target)
+
+def has_powershell():
+    if platform.system() != 'Windows':
+        return False
+    cmd = ['powershell', '-Command', 'echo test']
+    devnull = open(os.path.devnull, 'wb')
+    try:
+        try:
+            subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
+        except:
+            return False
+    finally:
+        devnull.close()
+    return True
+
+download_file_powershell.viable = has_powershell
+
+def download_file_curl(url, target):
+    cmd = ['curl', url, '--silent', '--output', target]
+    _clean_check(cmd, target)
+
+def has_curl():
+    cmd = ['curl', '--version']
+    devnull = open(os.path.devnull, 'wb')
+    try:
+        try:
+            subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
+        except:
+            return False
+    finally:
+        devnull.close()
+    return True
+
+download_file_curl.viable = has_curl
+
+def download_file_wget(url, target):
+    cmd = ['wget', url, '--quiet', '--output-document', target]
+    _clean_check(cmd, target)
+
+def has_wget():
+    cmd = ['wget', '--version']
+    devnull = open(os.path.devnull, 'wb')
+    try:
+        try:
+            subprocess.check_call(cmd, stdout=devnull, stderr=devnull)
+        except:
+            return False
+    finally:
+        devnull.close()
+    return True
+
+download_file_wget.viable = has_wget
+
+def download_file_insecure(url, target):
+    """
+    Use Python to download the file, even though it cannot authenticate the
+    connection.
+    """
+    try:
+        from urllib.request import urlopen
+    except ImportError:
+        from urllib2 import urlopen
+    src = dst = None
+    try:
+        src = urlopen(url)
+        # Read/write all in one block, so we don't create a corrupt file
+        # if the download is interrupted.
+        data = src.read()
+        dst = open(target, "wb")
+        dst.write(data)
+    finally:
+        if src:
+            src.close()
+        if dst:
+            dst.close()
+
+download_file_insecure.viable = lambda: True
+
+def get_best_downloader():
+    downloaders = [
+        download_file_powershell,
+        download_file_curl,
+        download_file_wget,
+        download_file_insecure,
+    ]
+
+    for dl in downloaders:
+        if dl.viable():
+            return dl
+
+def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
+                        to_dir=os.curdir, delay=15,
+                        downloader_factory=get_best_downloader):
+    """Download setuptools from a specified location and return its filename
+
+    `version` should be a valid setuptools version number that is available
+    as an egg for download under the `download_base` URL (which should end
+    with a '/'). `to_dir` is the directory where the egg will be downloaded.
+    `delay` is the number of seconds to pause before an actual download
+    attempt.
+
+    ``downloader_factory`` should be a function taking no arguments and
+    returning a function for downloading a URL to a target.
+    """
+    # making sure we use the absolute path
+    to_dir = os.path.abspath(to_dir)
+    tgz_name = "setuptools-%s.tar.gz" % version
+    url = download_base + tgz_name
+    saveto = os.path.join(to_dir, tgz_name)
+    if not os.path.exists(saveto):  # Avoid repeated downloads
+        log.warn("Downloading %s", url)
+        downloader = downloader_factory()
+        downloader(url, saveto)
+    return os.path.realpath(saveto)
+
+
+def _extractall(self, path=".", members=None):
+    """Extract all members from the archive to the current working
+       directory and set owner, modification time and permissions on
+       directories afterwards. `path' specifies a different directory
+       to extract to. `members' is optional and must be a subset of the
+       list returned by getmembers().
+    """
+    import copy
+    import operator
+    from tarfile import ExtractError
+    directories = []
+
+    if members is None:
+        members = self
+
+    for tarinfo in members:
+        if tarinfo.isdir():
+            # Extract directories with a safe mode.
+            directories.append(tarinfo)
+            tarinfo = copy.copy(tarinfo)
+            tarinfo.mode = 448  # decimal for oct 0700
+        self.extract(tarinfo, path)
+
+    # Reverse sort directories.
+    directories.sort(key=operator.attrgetter('name'), reverse=True)
+
+    # Set correct owner, mtime and filemode on directories.
+    for tarinfo in directories:
+        dirpath = os.path.join(path, tarinfo.name)
+        try:
+            self.chown(tarinfo, dirpath)
+            self.utime(tarinfo, dirpath)
+            self.chmod(tarinfo, dirpath)
+        except ExtractError as e:
+            if self.errorlevel > 1:
+                raise
+            else:
+                self._dbg(1, "tarfile: %s" % e)
+
+
+def _build_install_args(options):
+    """
+    Build the arguments to 'python setup.py install' on the setuptools package
+    """
+    return ['--user'] if options.user_install else []
+
+def _parse_args():
+    """
+    Parse the command line for options
+    """
+    parser = optparse.OptionParser()
+    parser.add_option(
+        '--user', dest='user_install', action='store_true', default=False,
+        help='install in user site package (requires Python 2.6 or later)')
+    parser.add_option(
+        '--download-base', dest='download_base', metavar="URL",
+        default=DEFAULT_URL,
+        help='alternative URL from where to download the setuptools package')
+    parser.add_option(
+        '--insecure', dest='downloader_factory', action='store_const',
+        const=lambda: download_file_insecure, default=get_best_downloader,
+        help='Use internal, non-validating downloader'
+    )
+    options, args = parser.parse_args()
+    # positional arguments are ignored
+    return options
+
+def main(version=DEFAULT_VERSION):
+    """Install or upgrade setuptools and EasyInstall"""
+    options = _parse_args()
+    tarball = download_setuptools(download_base=options.download_base,
+        downloader_factory=options.downloader_factory)
+    return _install(tarball, _build_install_args(options))
+
+if __name__ == '__main__':
+    sys.exit(main())

File gett.py

-#!/usr/bin/env python
-# coding: utf-8
-
-# Copyright 2011 - Mickaël THOMAS
-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-
-import datetime
-import json
-import logging
-import pprint
-import threading
-
-from urllib.error import HTTPError
-from urllib.parse import urlencode
-from urllib.parse import urlparse
-from urllib.request import urlopen, Request
-
-from http.client import HTTPConnection
-
-logger = logging.getLogger(__name__)
-
-API_BASE = 'https://open.ge.tt/1/'
-API_KEY = 't05kormjprb2o6rm8f8wmts2thjjor'
-
-
-class APIError(Exception):
-    pass
-
-
-def _post_request(path, **kwargs):
-    qskeys = {}
-    url = API_BASE + path
-
-    for key, value in list(kwargs.items()):
-        if key.startswith('_'):
-            qskeys[key[1:]] = value
-            del kwargs[key]
-
-    if qskeys:
-        url += '?' + urlencode(qskeys)
-
-    input_data = json.dumps(kwargs).encode('utf-8')
-    request = Request(url, input_data)
-
-    return _request(request)
-
-
-def _get_request(path, **kwargs):
-    url = API_BASE + path
-
-    if kwargs:
-        url += '?' + urlencode(kwargs)
-
-    request = Request(url)
-    return _request(request)
-
-
-def _request(req):
-    logger.debug("%s request to %s", req.get_method(), req.full_url)
-    if req.data:
-        logger.debug("data: %s", req.data.decode('utf-8'))
-
-    try:
-        resp = urlopen(req)
-        logger.debug("got %d response", resp.status)
-        raw = resp.read()
-    except HTTPError as ex:
-        resp = ex
-        logger.debug("got %d error", resp.code)  # urllib sucks
-        raw = ex.read()
-
-    raw = raw.decode('utf-8')
-
-    try:
-        result = json.loads(raw)
-    except Exception:
-        print("Error: unable to decode JSON: %s" % raw)
-        raise APIError("Unable to decode JSON: %s", raw)
-
-    logger.debug("json:\n%s", pprint.pformat(result))
-
-    if 'error' in result:
-        raise APIError(result['error'])
-
-    return result
-
-
-class User(object):
-    def _load(self, result):
-        self.atoken = result['accesstoken']
-        self.rtoken = result['refreshtoken']
-        self.userid = result['user']['userid']
-        self.email = result['user']['email']
-        self.full_name = result['user']['fullname']
-        self.storage_used = result['user']['storage']['used']
-        self.storage_limit = result['user']['storage']['limit']
-
-    def refresh(self):
-        self.login_token(self.rtoken)
-
-    def login_auth(self, email, password):
-        logger.debug("Loging-in user %r", email)
-        result = _post_request('users/login',
-            apikey=API_KEY, email=email, password=password
-        )
-        self._load(result)
-
-    def login_token(self, rtoken):
-        logger.debug("Loging-in (refresh) user with token %r", rtoken)
-        result = _post_request('users/login', refreshtoken=rtoken)
-        self._load(result)
-
-    def list_shares(self, skip=None, limit=None):
-        logger.debug("Listing shares")
-        if skip is not None and limit is not None:
-            results = _get_request('shares', accesstoken=self.atoken,
-                skip=str(skip), limit=str(limit)
-            )
-        else:
-            results = _get_request('shares', accesstoken=self.atoken)
-
-        for share_result in results:
-            share = UserShare(self)
-            share._load(share_result)
-            yield share
-
-    def get_share(self, name):
-        logger.debug("Getting share %r", name)
-        result = _get_request('shares/' + name)
-
-        share = UserShare(self)
-        share._load(result)
-
-        return share
-
-    def create_share(self, title=None):
-        logger.debug("Creating share with title %r", title)
-        if title is not None:
-            result = _post_request('shares/create',
-                _accesstoken=self.atoken, title=title
-            )
-        else:
-            result = _post_request('shares/create', _accesstoken=self.atoken)
-
-        share = UserShare(self)
-        share._load(result)
-
-        return share
-
-
-class Share(object):
-    def __init__(self, name):
-        self.name = name
-        self.refresh()
-
-    def _load(self, result):
-        self.name = result['sharename']
-        self.title = result.get('title')
-        self.created = datetime.datetime.fromtimestamp(result['created'])
-        self.url = result.get('getturl')
-        self.files = {}
-
-        # FIXME: work around API omission
-        if not self.url:
-            self.url = 'http://ge.tt/%s' % self.name
-
-        for file_result in result['files']:
-            f = File(self)
-            f._load(file_result)
-
-            self.files[f.id] = f
-
-    def refresh(self):
-        logger.debug("Refreshing share %r", self.name)
-        result = _get_request('shares/%s' % self.name)
-        self._load(result)
-
-
-class UserShare(Share):
-    def __init__(self, user):
-        self.user = user
-
-    def update(self, **fields):
-        logger.debug("Updating user share %r", self.name)
-        result = _post_request('shares/%s/update' % self.name,
-            _accesstoken=self.user.atoken, **fields
-        )
-        self._load(result)
-
-    def destroy(self):
-        logger.debug("Destroying user share %r", self.name)
-        _post_request('shares/%s/destroy' % self.name,
-            _accesstoken=self.user.atoken
-        )
-
-    def create_file(self, filename, size=None):
-        logger.debug("Creating file %r in user share %r", filename, self.name)
-        result = _post_request('files/%s/create' % self.name,
-            _accesstoken=self.user.atoken, filename=filename
-        )
-        file = File(self)
-        file._load(result)
-        file.size = size
-        self.files[file.id] = file
-        return file
-
-
-class File(object):
-    def __init__(self, share):
-        self.share = share
-
-    def _load(self, result):
-        self.name = result['filename']
-        self.id = result['fileid']
-        self.size = result.get('size')
-        self.downloads = result['downloads']
-        self.readystate = result['readystate']
-        self.created = datetime.datetime.fromtimestamp(result['created'])
-        self.url = result.get('getturl')
-
-        # FIXME: work around API omission
-        if not self.url:
-            self.url = self.share.url + '/v/%s' % self.id
-
-        if 'upload' in result:
-            self.put_url = result['upload']['puturl']
-        else:
-            self.put_url = None
-
-    def destroy(self):
-        logger.debug("Destoying file %s/%s", self.share.name, self.id)
-        _post_request('files/%s/%s/destroy' % (self.share.name, self.id),
-            _accesstoken=self.share.user.atoken
-        )
-        del self.share.files[self.id]
-
-    def refresh(self):
-        logger.debug("Refreshing file %s/%s", self.share.name, self.id)
-        result = _get_request('files/%s/%s' % (self.share.name,  self.id))
-        self._load(result)
-
-
-class FileUpload(threading.Thread):
-    def __init__(self, file, fp):
-        super().__init__()
-
-        self.file = file
-        self.fp = fp
-        self.file_size = file.size
-        self.percent_done = 0
-        self.bytes_written = 0
-        self.ex = None
-
-    def run(self):
-        logger.debug("Runnning FileUpload thread for file %r", self.file)
-        try:
-            parsed = urlparse(self.file.put_url)
-            conn = HTTPConnection(parsed.netloc)
-            conn.connect()
-            conn.putrequest('PUT', parsed.path + (('?' + parsed.query) if parsed.query else ''))
-            conn.putheader('Content-Length', str(self.file_size))
-            conn.endheaders()
-
-            while self.bytes_written != self.file_size:
-                data = self.fp.read(4096)
-                conn.sock.sendall(data)
-                self.bytes_written += len(data)
-                self.percent_done = self.bytes_written * 100 / self.file_size
-
-            self.percent_done = 100  # needed when file_size is zero...
-
-            conn.getresponse()
-
-        except Exception as ex:
-            self.ex = ex

File gett/__init__.py

Empty file added.

File gett/gett.py

+#!/usr/bin/env python
+# coding: utf-8
+
+# Copyright 2011 - Mickaël THOMAS
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+import datetime
+import json
+import logging
+import pprint
+import threading
+
+from urllib.error import HTTPError
+from urllib.parse import urlencode
+from urllib.parse import urlparse
+from urllib.request import urlopen, Request
+
+from http.client import HTTPConnection
+
+logger = logging.getLogger(__name__)
+
+API_BASE = 'https://open.ge.tt/1/'
+API_KEY = 't05kormjprb2o6rm8f8wmts2thjjor'
+
+
+class APIError(Exception):
+    pass
+
+
+def _post_request(path, **kwargs):
+    qskeys = {}
+    url = API_BASE + path
+
+    for key, value in list(kwargs.items()):
+        if key.startswith('_'):
+            qskeys[key[1:]] = value
+            del kwargs[key]
+
+    if qskeys:
+        url += '?' + urlencode(qskeys)
+
+    input_data = json.dumps(kwargs).encode('utf-8')
+    request = Request(url, input_data)
+
+    return _request(request)
+
+
+def _get_request(path, **kwargs):
+    url = API_BASE + path
+
+    if kwargs:
+        url += '?' + urlencode(kwargs)
+
+    request = Request(url)
+    return _request(request)
+
+
+def _request(req):
+    logger.debug("%s request to %s", req.get_method(), req.full_url)
+    if req.data:
+        logger.debug("data: %s", req.data.decode('utf-8'))
+
+    try:
+        resp = urlopen(req)
+        logger.debug("got %d response", resp.status)
+        raw = resp.read()
+    except HTTPError as ex:
+        resp = ex
+        logger.debug("got %d error", resp.code)  # urllib sucks
+        raw = ex.read()
+
+    raw = raw.decode('utf-8')
+
+    try:
+        result = json.loads(raw)
+    except Exception:
+        print("Error: unable to decode JSON: %s" % raw)
+        raise APIError("Unable to decode JSON: %s", raw)
+
+    logger.debug("json:\n%s", pprint.pformat(result))
+
+    if 'error' in result:
+        raise APIError(result['error'])
+
+    return result
+
+
+class User(object):
+    def _load(self, result):
+        self.result = result
+        self.atoken = result['accesstoken']
+        self.rtoken = result['refreshtoken']
+        self.userid = result['user']['userid']
+        self.email = result['user']['email']
+        self.full_name = result['user']['fullname']
+        self.storage_used = result['user']['storage']['used']
+        self.storage_limit = result['user']['storage']['limit']
+
+    def refresh(self):
+        logger.debug("Getting user info")
+        result = _get_request('users/me', accesstoken=self.atoken)
+        self.result['user'].update(result)
+        self._load(self.result)
+
+    def login_auth(self, email, password):
+        logger.debug("Logging-in user %r", email)
+        result = _post_request('users/login',
+            apikey=API_KEY, email=email, password=password
+        )
+        self._load(result)
+
+    def login_token(self, rtoken):
+        logger.debug("Logging-in user with token %r", rtoken)
+        result = _post_request('users/login', refreshtoken=rtoken)
+        self._load(result)
+
+    def list_shares(self, skip=None, limit=None):
+        logger.debug("Listing shares")
+        if skip is not None and limit is not None:
+            results = _get_request('shares', accesstoken=self.atoken,
+                skip=str(skip), limit=str(limit)
+            )
+        else:
+            results = _get_request('shares', accesstoken=self.atoken)
+
+        for share_result in results:
+            share = UserShare(self)
+            share._load(share_result)
+            yield share
+
+    def get_share(self, name):
+        logger.debug("Getting share %r", name)
+        result = _get_request('shares/' + name)
+
+        share = UserShare(self)
+        share._load(result)
+
+        return share
+
+    def create_share(self, title=None):
+        logger.debug("Creating share with title %r", title)
+        if title is not None:
+            result = _post_request('shares/create',
+                _accesstoken=self.atoken, title=title
+            )
+        else:
+            result = _post_request('shares/create', _accesstoken=self.atoken)
+
+        share = UserShare(self)
+        share._load(result)
+
+        return share
+
+
+class Share(object):
+    def __init__(self, name):
+        self.name = name
+        self.refresh()
+
+    def _load(self, result):
+        self.name = result['sharename']
+        self.title = result.get('title')
+        self.created = datetime.datetime.fromtimestamp(result['created'])
+        self.url = result.get('getturl')
+        self.files = {}
+
+        # FIXME: work around API omission
+        if not self.url:
+            self.url = 'http://ge.tt/%s' % self.name
+
+        for file_result in result['files']:
+            f = File(self)
+            f._load(file_result)
+
+            self.files[f.id] = f
+
+    def refresh(self):
+        logger.debug("Refreshing share %r", self.name)
+        result = _get_request('shares/%s' % self.name)
+        self._load(result)
+
+
+class UserShare(Share):
+    def __init__(self, user):
+        self.user = user
+
+    def update(self, **fields):
+        logger.debug("Updating user share %r", self.name)
+        result = _post_request('shares/%s/update' % self.name,
+            _accesstoken=self.user.atoken, **fields
+        )
+        self._load(result)
+
+    def destroy(self):
+        logger.debug("Destroying user share %r", self.name)
+        _post_request('shares/%s/destroy' % self.name,
+            _accesstoken=self.user.atoken
+        )
+
+    def create_file(self, filename, size=None):
+        logger.debug("Creating file %r in user share %r", filename, self.name)
+        result = _post_request('files/%s/create' % self.name,
+            _accesstoken=self.user.atoken, filename=filename
+        )
+        file = File(self)
+        file._load(result)
+        file.size = size
+        self.files[file.id] = file
+        return file
+
+
+class File(object):
+    def __init__(self, share):
+        self.share = share
+
+    def _load(self, result):
+        self.name = result['filename']
+        self.id = result['fileid']
+        self.size = result.get('size')
+        self.downloads = result['downloads']
+        self.readystate = result['readystate']
+        self.created = datetime.datetime.fromtimestamp(result['created'])
+        self.url = result.get('getturl')
+
+        # FIXME: work around API omission
+        if not self.url:
+            self.url = self.share.url + '/v/%s' % self.id
+
+        if 'upload' in result:
+            self.put_url = result['upload']['puturl']
+        else:
+            self.put_url = None
+
+    def destroy(self):
+        logger.debug("Destoying file %s/%s", self.share.name, self.id)
+        _post_request('files/%s/%s/destroy' % (self.share.name, self.id),
+            _accesstoken=self.share.user.atoken
+        )
+        del self.share.files[self.id]
+
+    def refresh(self):
+        logger.debug("Refreshing file %s/%s", self.share.name, self.id)
+        result = _get_request('files/%s/%s' % (self.share.name,  self.id))
+        self._load(result)
+
+
+class FileUpload(threading.Thread):
+    def __init__(self, file, fp):
+        super().__init__()
+
+        self.file = file
+        self.fp = fp
+        self.file_size = file.size
+        self.percent_done = 0
+        self.bytes_written = 0
+        self.ex = None
+
+    def run(self):
+        logger.debug("Runnning FileUpload thread for file %r", self.file)
+        try:
+            parsed = urlparse(self.file.put_url)
+            conn = HTTPConnection(parsed.netloc)
+            conn.connect()
+            conn.putrequest('PUT', parsed.path + (('?' + parsed.query) if parsed.query else ''))
+            conn.putheader('Content-Length', str(self.file_size))
+            conn.endheaders()
+
+            while self.bytes_written != self.file_size:
+                data = self.fp.read(4096)
+                conn.sock.sendall(data)
+                self.bytes_written += len(data)
+                self.percent_done = self.bytes_written * 100 / self.file_size
+
+            self.percent_done = 100  # needed when file_size is zero...
+
+            conn.getresponse()
+
+        except Exception as ex:
+            self.ex = ex

File gett/gett.pyc

Binary file added.

File gett/uploader.py

+#!/usr/bin/env python
+# coding: utf-8
+
+# Copyright 2011 - Mickaël THOMAS
+
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+# GNU General Public License for more details.
+
+# You should have received a copy of the GNU General Public License
+# along with this program.  If not, see <http://www.gnu.org/licenses/>.
+
+import argparse
+import difflib
+import os
+import re
+import signal
+import sys
+import unicodedata
+
+from collections import defaultdict
+from itertools import chain
+
+from gett.gett import *
+
+url_re = re.compile(r'^(?:http://ge\.tt/|/)?(\w+)(?:/(?:v/(\d+)/?)?)?$')
+DEFAULT_SIMILARITY_RATIO = .95
+
+
+def ascii_only(input_str):
+    nkfd_form = unicodedata.normalize('NFKD', input_str)
+    return ''.join([c for c in nkfd_form if not unicodedata.combining(c)])
+
+
+def similar(a, b, ratio_floor):
+    return difflib.SequenceMatcher(a=a, b=b).ratio() > ratio_floor
+
+
+def humansize(nbytes, pad=False):
+    if nbytes is None:
+        return ''
+
+    for (exp, unit) in ((9, 'GB'), (6, 'MB'), (3, 'KB'), (0, ' B')):
+        if nbytes >= 10 ** exp:
+            break
+
+    if pad:
+        return '%6.2f %-2s' % (nbytes / 10 ** exp, unit)
+    else:
+        return '%.2f %s' % (nbytes / 10 ** exp, unit)
+
+
+def shorten(filename, maxsize):
+    base, ext = os.path.splitext(filename)
+    max_base = maxsize - len(ext)
+
+    if len(base) > max_base:
+        base = base[:max_base - 2] + '..'
+
+    return (base + ext).ljust(max_base + len(ext))
+
+
+def print_status(upload, index, count):
+        name = shorten(upload.file.name, 22)
+        bar_size = int(40 * upload.percent_done / 100)
+        bar = '[' + (bar_size * '#') + ((40 - bar_size) * '-') + ']'
+
+        sys.stderr.write('\r%s (%3d/%d) %s %d %%' % \
+            (name, index, count, bar, upload.percent_done))
+
+
+def show_share(share):
+    print("--------------------------------------------------------------------------------")
+    print("Share: %s (%d file(s)) [%s]" % (share.title or "Untitled", len(share.files), share.url))
+    print("--------------------------------------------------------------------------------")
+
+    if share.files:
+        max_url = max(len(_.url) for _ in share.files.values())
+
+        for file in share.files.values():
+            print(' - %s  %s  %s  %s' % (shorten(file.name, 28), humansize(file.size, True), file.url.ljust(max_url), file.readystate))
+    else:
+        print(" - No files")
+
+    print()
+
+
+def entry_point():
+    try:
+        main()
+    except APIError as ex:
+        logger.error("API error: %s", ex)
+
+
+def pattern(string):
+    import glob
+
+    ret = []
+
+    if os.path.isfile(string):
+        ret.append(open(string, 'rb'))
+    else:
+        if os.path.isdir(string):
+            pattern = string + '/*'
+        else:
+            pattern = string
+
+        for item in glob.iglob(pattern):
+            if os.path.isfile(item):
+                ret.append(open(item, 'rb'))
+
+    if not ret:
+        logger.warning("%s: no match", string)
+
+    return ret
+
+
+def main():
+    signal.signal(signal.SIGINT, signal.SIG_DFL)
+
+    home = os.getenv('USERPROFILE') or os.getenv('HOME')
+
+    parser = argparse.ArgumentParser(
+            description="A command-line Ge.tt uploader and manager",
+            epilog="Note that whenever http://ge.tt/<share_name>[/v/<fileid>] is expected, you can omit the http://ge.tt/ part.")
+    parser.add_argument('-D', dest='debug', action='store_true',
+        help="Debug API calls (warning: very verbose).")
+
+    upload_group = parser.add_argument_group('Upload options')
+    upload_group.add_argument('file', nargs='*', type=pattern,
+        help="Name of a file or a directory to upload. Patterns are allowed. This is not recursive.")
+    upload_group.add_argument('-t', dest='title',
+        help="Title of the newly created share.")
+    upload_group.add_argument('-s', dest='share',
+        help="URL of the share to upload to (defaults to a newly created one).")
+    upload_group.add_argument('-P', dest='parallel_upload', action='store_true',
+        help="Upload files in parallel rather than sequentially. The progress bars are displayed in ascending file size order.")
+
+    search_group = parser.add_argument_group('Search actions')
+    search_group.add_argument('-S', '--search', nargs='+', dest='search',
+        metavar='SEARCH_TERM', help="Search in share titles and file names.")
+    search_group.add_argument('-R', '--similarity-ratio',
+        default=DEFAULT_SIMILARITY_RATIO, type=float, dest='similarity_ratio',
+        help="Similarity ratio (between 0 and 1) used for searching. 1 means strict, 0 means very loose.")
+
+    other_group = parser.add_argument_group("Other actions")
+    other_group.add_argument('--delete', nargs='+', dest='delete',
+        metavar='URL', help="Delete a share or a file.")
+    other_group.add_argument('-l', '--list', nargs='*', dest='list',
+        metavar='SHARE_URL', help="List the files in the specified share. If no share is specified, list all your shares.")
+
+    auth_group = parser.add_argument_group("Authentification")
+    auth_group.add_argument('-L', dest='ignore_token', action='store_true',
+        help="Log-in with a different account than the stored one (if any).")
+    auth_group.add_argument('-e', dest='email', help="Email to login with.")
+    auth_group.add_argument('-p', dest='password', help="Password to login with.")
+    auth_group.add_argument('-k', dest='tokenfile',
+        default=os.path.join(home, '.gett-token'),
+        help="Ge.tt token file path (default: ~/.gett-token).")
+
+    args = parser.parse_args()
+
+    if args.debug:
+        import logging
+        logging.basicConfig(level=logging.DEBUG)
+
+    user = User()
+    logged = False
+
+    # If not logging-in with a different account
+
+    if not args.ignore_token and not args.email:
+        try:
+            # Try to log-in with the token
+
+            token = open(args.tokenfile, 'r').read()
+            user.login_token(token)
+
+            logged = True
+        except (APIError, IOError):
+            pass
+
+    if not logged:
+        if not args.email:
+            args.email = input("Please enter your Ge.tt email: ")
+
+        if not args.password:
+            import getpass
+            args.password = getpass.getpass("Please enter your Ge.tt password: ")
+
+        try:
+            user.login_auth(args.email, args.password)
+        except APIError as ex:
+            logger.error("Unable to login: %s", ex)
+            sys.exit(1)
+
+        reply = input("Do you wish to store the session token? (y/n): ")
+
+        if reply.lower() == 'y':
+            # Save the refreshtoken to the user's home directory (by default)
+            with open(args.tokenfile, 'w') as file:
+                file.write(user.rtoken)
+
+    # --list command
+
+    if args.list is not None:
+        for name in args.list:
+            match = url_re.match(name)
+
+            if not match or match.group(2):
+                parser.error("argument --list: invalid format, please supply either share url or path")
+
+            share = Share(match.group(1))
+            show_share(share)
+
+        if not args.list:
+            found = False
+
+            nshares = 0
+            nfiles = 0
+
+            for share in user.list_shares():
+                found = True
+                nshares += 1
+                nfiles += len(share.files)
+
+                show_share(share)
+
+            print("Displayed %d shares and %d files." % (nshares, nfiles))
+            print()
+
+            if not found:
+                print("You have no shares!")
+                print()
+
+    # --delete command
+
+    if args.delete:
+        for item in args.delete:
+            match = url_re.match(item)
+
+            if not match:
+                parser.error("argument --delete: invalid format, please supply either file/share url or path")
+
+            share = user.get_share(match.group(1))
+
+            if match.group(2):
+                try:
+                    id = match.group(2)
+                    file = share.files[id]
+                    file.destroy()
+
+                    print("Deleted file: %s [%s]" % (file.name, file.url))
+                except KeyError:
+                    print("No such file in the share")
+            else:
+                share.destroy()
+                print("Deleted share: %s [%s]" % \
+                    (share.title or "Untitled", share.url))
+        print()
+
+    # --search command
+
+    if args.search:
+        simplify = lambda s: ascii_only(s.lower())
+        phrase = ' '.join(args.search)
+        phrase_simple = simplify(phrase)
+        print("Searching for `%s`...\n" % phrase)
+
+        ratio = args.similarity_ratio
+        if not 0 <= ratio <= 1:
+            ratio = DEFAULT_SIMILARITY_RATIO
+            logger.warning("Bad similarity ratio value. Using default: %2.f" % \
+                DEFAULT_SIMILARITY_RATIO)
+
+        is_similar = lambda a, b: similar(a, b, ratio)
+
+        found_in_shares = []
+        found_in_files = defaultdict(lambda: [])
+        for share in user.list_shares():
+            title = simplify(share.title or '')
+            if phrase_simple in title or is_similar(phrase_simple, title):
+                found_in_shares.append(share)
+
+            for file in share.files.values():
+                fname = simplify(file.name or '')
+                if phrase_simple in fname or is_similar(phrase_simple, fname):
+                    found_in_files[share].append(file)
+
+        try:
+            max_url = max(len(_.url) for _ in found_in_shares)
+        except ValueError:
+            max_url = 0
+        try:
+            max_url_shares = max(len(_.url) for _ in found_in_files.keys())
+            max_url_files = max(
+                max(len(_.url) for _ in f)
+                for f in found_in_files.values()
+            )
+        except ValueError:
+            max_url_shares = max_url_files = 0
+
+        maximax = max(max_url, max_url_shares + 6, max_url_files + 3)
+
+        if found_in_shares:
+            print("Found %d share(s):" % len(found_in_shares))
+
+            for share in found_in_shares:
+                print(" - %s  %s%s" % (
+                    share.url.ljust(max_url),
+                    ' ' * (maximax - max_url),
+                    (share.title or '')[:75 - max_url]
+                ))
+        else:
+            print("Nothing found in share names.")
+
+        print()
+
+        if found_in_files:
+
+            print("Found %d file(s):" % sum(map(len, found_in_files.values())))
+
+            for share, files in found_in_files.items():
+                print(" - Share %s  %s%s" % (
+                    share.url.ljust(max_url_shares),
+                    ' ' * (maximax - max_url_shares - 6),
+                    (share.title or '')[:69 - max_url_shares]
+                ))
+
+                for file in files:
+                    print("    - %s  %s%s" % (
+                        file.url.ljust(max_url_files),
+                        ' ' * (maximax - max_url_files - 6),
+                        shorten(file.name, 72 - max_url_files)
+                    ))
+        else:
+            print("Nothing found in file names.")
+
+        print()
+
+    # File uploads
+
+    if args.file:
+        if args.share:
+            # Upload to existing share
+
+            match = url_re.match(args.share)
+
+            if match:
+                share = user.get_share(match.group(1))
+            else:
+                parser.error("argument --list: invalid share name, please supply either URL or name")
+        else:
+            # Upload to a new share
+
+            if args.title:
+                share = user.create_share(args.title)
+            else:
+                share = user.create_share()
+
+        uploads = []
+
+        # Create the file URLs
+
+        print("Creating file(s)...")
+
+        for fp in chain.from_iterable(args.file):
+            name = os.path.basename(fp.name)
+            file = share.create_file(name, os.path.getsize(fp.name))
+
+            upload = FileUpload(file, fp)
+
+            if args.parallel_upload:
+                upload.start()
+
+            uploads.append(upload)
+
+        show_share(share)
+
+        if args.parallel_upload:
+            uploads.sort(key=lambda item: item.file_size)
+
+        for i, upload in enumerate(uploads):
+            if not args.parallel_upload:
+                upload.start()
+
+            while True:
+                print_status(upload, i + 1, len(uploads))
+                upload.join(0.5)
+
+                if not upload.is_alive():
+                    print_status(upload, i + 1, len(uploads))
+
+                    if upload.ex:
+                        sys.stderr.write(("\rError uploading %s: %s" % \
+                            (upload.file.name, upload.ex)).ljust(80))
+
+                    sys.stderr.write('\n')
+                    break
+        print()
+
+    user.refresh()
+
+    print("Storage used: %s out of %s (%.1f%%)" % (
+        humansize(user.storage_used),
+        humansize(user.storage_limit),
+        user.storage_used / user.storage_limit * 100,
+    ))
+
+
+if __name__ == '__main__':
+    entry_point()

File gett_uploader.py

-#!/usr/bin/env python
-# coding: utf-8
-
-# Copyright 2011 - Mickaël THOMAS
-
-# This program is free software: you can redistribute it and/or modify
-# it under the terms of the GNU General Public License as published by
-# the Free Software Foundation, either version 3 of the License, or
-# (at your option) any later version.
-
-# This program is distributed in the hope that it will be useful,
-# but WITHOUT ANY WARRANTY; without even the implied warranty of
-# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
-# GNU General Public License for more details.
-
-# You should have received a copy of the GNU General Public License
-# along with this program.  If not, see <http://www.gnu.org/licenses/>.
-
-import argparse
-import difflib
-import os
-import re
-import signal
-import sys
-import unicodedata
-
-from collections import defaultdict
-from itertools import chain
-
-from gett import *
-
-url_re = re.compile(r'^(?:http://ge\.tt/|/)?(\w+)(?:/(?:v/(\d+)/?)?)?$')
-DEFAULT_SIMILARITY_RATIO = .95
-
-
-def ascii_only(input_str):
-    nkfd_form = unicodedata.normalize('NFKD', input_str)
-    return ''.join([c for c in nkfd_form if not unicodedata.combining(c)])
-
-
-def similar(a, b, ratio_floor):
-    return difflib.SequenceMatcher(a=a, b=b).ratio() > ratio_floor
-
-
-def humansize(nbytes, pad=False):
-    if nbytes is None:
-        return ''
-
-    for (exp, unit) in ((9, 'GB'), (6, 'MB'), (3, 'KB'), (0, ' B')):
-        if nbytes >= 10 ** exp:
-            break
-
-    if pad:
-        return '%6.2f %-2s' % (nbytes / 10 ** exp, unit)
-    else:
-        return '%.2f %s' % (nbytes / 10 ** exp, unit)
-
-
-def shorten(filename, maxsize):
-    base, ext = os.path.splitext(filename)
-    max_base = maxsize - len(ext)
-
-    if len(base) > max_base:
-        base = base[:max_base - 2] + '..'
-
-    return (base + ext).ljust(max_base + len(ext))
-
-
-def print_status(upload, index, count):
-        name = shorten(upload.file.name, 22)
-        bar_size = int(40 * upload.percent_done / 100)
-        bar = '[' + (bar_size * '#') + ((40 - bar_size) * '-') + ']'
-
-        sys.stderr.write('\r%s (%3d/%d) %s %d %%' % \
-            (name, index, count, bar, upload.percent_done))
-
-
-def show_share(share):
-    print("--------------------------------------------------------------------------------")
-    print("Share: %s (%d file(s)) [%s]" % (share.title or "Untitled", len(share.files), share.url))
-    print("--------------------------------------------------------------------------------")
-
-    if share.files:
-        max_url = max(len(_.url) for _ in share.files.values())
-
-        for file in share.files.values():
-            print(' - %s  %s  %s  %s' % (shorten(file.name, 28), humansize(file.size, True), file.url.ljust(max_url), file.readystate))
-    else:
-        print(" - No files")
-
-    print()
-
-
-def entry_point():
-    try:
-        main()
-    except APIError as ex:
-        logger.error("API error: %s", ex)
-
-
-def pattern(string):
-    import glob
-
-    ret = []
-
-    if os.path.isfile(string):
-        ret.append(open(string, 'rb'))
-    else:
-        if os.path.isdir(string):
-            pattern = string + '/*'
-        else:
-            pattern = string
-
-        for item in glob.iglob(pattern):
-            if os.path.isfile(item):
-                ret.append(open(item, 'rb'))
-
-    if not ret:
-        logger.warning("%s: no match", string)
-
-    return ret
-
-
-def main():
-    signal.signal(signal.SIGINT, signal.SIG_DFL)
-
-    home = os.getenv('USERPROFILE') or os.getenv('HOME')
-
-    parser = argparse.ArgumentParser(
-            description="A command-line Ge.tt uploader and manager",
-            epilog="Note that whenever http://ge.tt/<share_name>[/v/<fileid>] is expected, you can omit the http://ge.tt/ part.")
-    parser.add_argument('-D', dest='debug', action='store_true',
-        help="Debug API calls (warning: very verbose).")
-
-    upload_group = parser.add_argument_group('Upload options')
-    upload_group.add_argument('file', nargs='*', type=pattern,
-        help="Name of a file or a directory to upload. Patterns are allowed. This is not recursive.")
-    upload_group.add_argument('-t', dest='title',
-        help="Title of the newly created share.")
-    upload_group.add_argument('-s', dest='share',
-        help="URL of the share to upload to (defaults to a newly created one).")
-    upload_group.add_argument('-P', dest='parallel_upload', action='store_true',
-        help="Upload files in parallel rather than sequentially. The progress bars are displayed in ascending file size order.")
-
-    search_group = parser.add_argument_group('Search actions')
-    search_group.add_argument('-S', '--search', nargs='+', dest='search',
-        metavar='SEARCH_TERM', help="Search in share titles and file names.")
-    search_group.add_argument('-R', '--similarity-ratio',
-        default=DEFAULT_SIMILARITY_RATIO, type=float, dest='similarity_ratio',
-        help="Similarity ratio (between 0 and 1) used for searching. 1 means strict, 0 means very loose.")
-
-    other_group = parser.add_argument_group("Other actions")
-    other_group.add_argument('--delete', nargs='+', dest='delete',
-        metavar='URL', help="Delete a share or a file.")
-    other_group.add_argument('-l', '--list', nargs='*', dest='list',
-        metavar='SHARE_URL', help="List the files in the specified share. If no share is specified, list all your shares.")
-
-    auth_group = parser.add_argument_group("Authentification")
-    auth_group.add_argument('-L', dest='ignore_token', action='store_true',
-        help="Log-in with a different account than the stored one (if any).")
-    auth_group.add_argument('-e', dest='email', help="Email to login with.")
-    auth_group.add_argument('-p', dest='password', help="Password to login with.")
-    auth_group.add_argument('-k', dest='tokenfile',
-        default=os.path.join(home, '.gett-token'),
-        help="Ge.tt token file path (default: ~/.gett-token).")
-
-    args = parser.parse_args()
-
-    if args.debug:
-        import logging
-        logging.basicConfig(level=logging.DEBUG)
-
-    user = User()
-    logged = False
-
-    # If not logging-in with a different account
-
-    if not args.ignore_token and not args.email:
-        try:
-            # Try to log-in with the token
-
-            token = open(args.tokenfile, 'r').read()
-            user.login_token(token)
-
-            logged = True
-        except (APIError, IOError):
-            pass
-
-    if not logged:
-        if not args.email:
-            args.email = input("Please enter your Ge.tt email: ")
-
-        if not args.password:
-            import getpass
-            args.password = getpass.getpass("Please enter your Ge.tt password: ")
-
-        try:
-            user.login_auth(args.email, args.password)
-        except APIError as ex:
-            logger.error("Unable to login: %s", ex)
-            sys.exit(1)
-
-        reply = input("Do you wish to store the session token? (y/n): ")
-
-        if reply.lower() == 'y':
-            # Save the refreshtoken to the user's home directory (by default)
-            with open(args.tokenfile, 'w') as file:
-                file.write(user.rtoken)
-
-    # --list command
-
-    if args.list is not None:
-        for name in args.list:
-            match = url_re.match(name)
-
-            if not match or match.group(2):
-                parser.error("argument --list: invalid format, please supply either share url or path")
-
-            share = Share(match.group(1))
-            show_share(share)
-
-        if not args.list:
-            found = False
-
-            nshares = 0
-            nfiles = 0
-
-            for share in user.list_shares():
-                found = True
-                nshares += 1
-                nfiles += len(share.files)
-
-                show_share(share)
-
-            print("Displayed %d shares and %d files." % (nshares, nfiles))
-            print()
-
-            if not found:
-                print("You have no shares!")
-                print()
-
-    # --delete command
-
-    if args.delete:
-        for item in args.delete:
-            match = url_re.match(item)
-
-            if not match:
-                parser.error("argument --delete: invalid format, please supply either file/share url or path")
-
-            share = user.get_share(match.group(1))
-
-            if match.group(2):
-                try:
-                    id = match.group(2)
-                    file = share.files[id]
-                    file.destroy()
-
-                    print("Deleted file: %s [%s]" % (file.name, file.url))
-                except KeyError:
-                    print("No such file in the share")
-            else:
-                share.destroy()
-                print("Deleted share: %s [%s]" % \
-                    (share.title or "Untitled", share.url))
-        print()
-
-    # --search command
-
-    if args.search:
-        simplify = lambda s: ascii_only(s.lower())
-        phrase = ' '.join(args.search)
-        phrase_simple = simplify(phrase)
-        print("Searching for `%s`...\n" % phrase)
-
-        ratio = args.similarity_ratio
-        if not 0 <= ratio <= 1:
-            ratio = DEFAULT_SIMILARITY_RATIO
-            logger.warning("Bad similarity ratio value. Using default: %2.f" % \
-                DEFAULT_SIMILARITY_RATIO)
-
-        is_similar = lambda a, b: similar(a, b, ratio)
-
-        found_in_shares = []
-        found_in_files = defaultdict(lambda: [])
-        for share in user.list_shares():
-            title = simplify(share.title or '')
-            if phrase_simple in title or is_similar(phrase_simple, title):
-                found_in_shares.append(share)
-
-            for file in share.files.values():
-                fname = simplify(file.name or '')
-                if phrase_simple in fname or is_similar(phrase_simple, fname):
-                    found_in_files[share].append(file)
-
-        try:
-            max_url = max(len(_.url) for _ in found_in_shares)
-        except ValueError:
-            max_url = 0
-        try:
-            max_url_shares = max(len(_.url) for _ in found_in_files.keys())
-            max_url_files = max(
-                max(len(_.url) for _ in f)
-                for f in found_in_files.values()
-            )
-        except ValueError:
-            max_url_shares = max_url_files = 0
-
-        maximax = max(max_url, max_url_shares + 6, max_url_files + 3)
-
-        if found_in_shares:
-            print("Found %d share(s):" % len(found_in_shares))
-
-            for share in found_in_shares:
-                print(" - %s  %s%s" % (
-                    share.url.ljust(max_url),
-                    ' ' * (maximax - max_url),
-                    (share.title or '')[:75 - max_url]
-                ))
-        else:
-            print("Nothing found in share names.")
-
-        print()
-
-        if found_in_files:
-
-            print("Found %d file(s):" % sum(map(len, found_in_files.values())))
-
-            for share, files in found_in_files.items():
-                print(" - Share %s  %s%s" % (
-                    share.url.ljust(max_url_shares),
-                    ' ' * (maximax - max_url_shares - 6),
-                    (share.title or '')[:69 - max_url_shares]
-                ))
-
-                for file in files:
-                    print("    - %s  %s%s" % (
-                        file.url.ljust(max_url_files),
-                        ' ' * (maximax - max_url_files - 6),
-                        shorten(file.name, 72 - max_url_files)