Commits

Borja Sotomayor  committed 4522249

Updated GO code to v10 API. Updated location of root certificate.

  • Participants
  • Parent commits 90ab147

Comments (0)

Files changed (3)

File tools/globus/transfer.py

     display_ls(destination_ep); print
 
     # submit a transfer
-    code, message, data = api.transfer_generate_id()
+    code, message, data = api.transfer_submission_id()
     transfer_id = data["value"]
     deadline = datetime.utcnow() + timedelta(minutes=10)
     t = transfer_api.SimpleTransfer(transfer_id, source_ep, destination_ep, deadline)
     print >>output,"=== Endpoint pre-activation ==="
     display_endpoint(endpoint_name)
     print
-    code, reason, result = api.endpoint_activate(endpoint_name, None,
-                                                 if_expires_in=600)
-    if result.code.startswith("AutoActivationFailed"):
-        print >>output,"Auto activation failed, ls and transfers will likely fail!"
-    print >>output,"result: %s (%s)" % (result.code, result.message)
+    code, reason, result = api.endpoint_autoactivate(endpoint_name, if_expires_in=600)
+    #if result.code.startswith("AutoActivationFailed"):
+    #    print >>output,"Auto activation failed, ls and transfers will likely fail!"
+    #print >>output,"result: %s (%s)" % (result.code, result.message)
     print >>output,"=== Endpoint post-activation ==="
     display_endpoint(endpoint_name)
     print
         # upper limit on request_time.
         kwargs["request_time"] = "%s," % min_request_time
 
-    code, reason, tasks = api.tasks(**kwargs)
+    code, reason, tasks = api.task_list(**kwargs)
     print >>output,"Tasks for %s:" % api.username
-    for task in tasks.get("DATA", []):
+    for task in tasks["DATA"]:
         print >>output,"Task %s:" % task["task_id"]
         _print_task(task)
 
     _print_task(data, 0)
 
     if show_subtasks:
-        code, reason, data = api.subtasks(task_id)
-        subtasks = data.get("DATA", [])
+        code, reason, data = api.subtask_list(task_id)
+        subtasks = data["DATA"]
         for t in subtasks:
             print >>output,"  subtask %s:" % t["task_id"]
             _print_task(t, 4)
     headers = "name, type, permissions, size, user, group, last_modified"
     headers_list = headers.split(", ")
     print >>output,headers
-    for f in data.get("DATA", []):
+    for f in data["DATA"]:
         print >>output,", ".join([unicode_(f[k]) for k in headers_list])
 
 def _print_endpoint(ep):
 
 
 def display_endpoints():
-    code, reason, endpoints = api.endpoints(limit=100)
+    code, reason, endpoints = api.endpoint_list(limit=100)
     print >>output, ("Found %d endpoints for user %s:" %
                      (endpoints["length"], api.username))
-    for ep in endpoints.get("DATA", []):
+    for ep in endpoints["DATA"]:
         _print_endpoint(ep)
 
 

File tools/globus/transfer_api.py

 
 ipython -- transfer_api.py USERNAME -k ~/.globus/userkey.pem \
            -c ~/.globus/usercert.pem \
-           -C ~/.globus/certificates/1c3f2ca8.0
+           -C ../gd-bundle_ca.cert
 
 OR
 
 
 See https://transfer.api.globusonline.org for API documentation.
 """
-
+import os.path
+import os
+import sys
+import platform
+import socket
+import json
+import urllib
+import time
+import ssl
+import struct
+import traceback
+from urlparse import urlparse
 from httplib import BadStatusLine
-import json
-import os.path
-import socket
-import sys
-import urllib
-from urlparse import urlparse
 
 from verified_https import VerifiedHTTPSConnection
 
-
-VERSION = "v0.9"
+VERSION = "v0.10"
 DEFAULT_BASE_URL = "https://transfer.api.globusonline.org/" + VERSION
+RETRY_WAIT_SECONDS=30
 
 __all__ = ["TransferAPIClient","TransferAPIError", "InterfaceError",
            "ClientError", "ServerError", "ExternalError",
            "ServiceUnavailable"]
 
+# client version
+__version__ = "0.10.5"
 
 class TransferAPIClient(object):
     """
     or None if the reponse was empty, or a conveninience wrapper around
     the JSON data if the data itself is hard to use directly.
 
-    Endpoint names can be full canonical names of the form ausername#epname,
-    or simply epname, in which case the API looks at the logged in user's
-    endpoints.
+    Endpoint names can be full canonical names of the form
+    ausername#epname, or simply epname, in which case the API looks at
+    the logged in user's endpoints.
     """
 
     def __init__(self, username, server_ca_file,
                  cert_file=None, key_file=None, saml_cookie=None,
                  base_url=DEFAULT_BASE_URL,
                  timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
-                 httplib_debuglevel=0):
+                 httplib_debuglevel=0, max_attempts=1):
         """
         Initialize a client with the client credential and optional alternate
         base URL.
         @param base_url: optionally specify an alternate base url, if testing
                          out an unreleased or alternatively hosted version of
                          the API.
+        @param timeout: timeout to set on the underlying TCP socket.
+        @param max_attempts: Retry every API call on network
+                             errors and ServiceUnavailable up to this many
+                             times. Sleeps for 30 seconds between each attempt.
+                             Note that a socket timeout will be treated as
+                             a network error and retried. When max_attempts
+                             is exceeded, the exception from the last attempt
+                             will be raised. max_attempts=1 implies no
+                             retrying.
         """
         if not os.path.isfile(server_ca_file):
             raise ValueError("server_ca_file not found: %s" % server_ca_file)
                 if not os.path.isfile(key_file):
                     raise ValueError("key_file not found: %s" % key_file)
 
+        if max_attempts is not None:
+            max_attempts = int(max_attempts)
+            if max_attempts < 1:
+                raise ValueError(
+                    "max_attempts must be None or a positive integer")
+        self.max_attempts = max_attempts
+
         self.saml_cookie = saml_cookie
         self.cert_file = cert_file
         self.key_file = key_file
         self.timeout = timeout
 
         if saml_cookie:
-            if saml_cookie.find("un=%s|" % username) == -1:
+            unquoted = urllib.unquote(saml_cookie)
+            if unquoted.find("un=%s|" % username) == -1:
                 raise ValueError("saml cookie username does not match "
                                  "username argument")
             self.headers = {}
         self.print_response = False
         self.c = None
 
+        self.user_agent = "Python-httplib/%s (%s)" \
+                          % (platform.python_version(), platform.system())
+        self.client_info = "transfer_api.py/%s" % __version__
+
     def connect(self):
         """
         Create an HTTPS connection to the server. Run automatically by
         self.c = None
 
     def _request(self, method, path, body=None, content_type=None):
-        if self.c is None:
-            self.connect()
         if not path.startswith("/"):
             path = "/" + path
         url = self.base_url + path
             headers["Content-Type"] = content_type
 
         if self.print_request:
-            print "REQUEST:"
+            print
+            print ">>>REQUEST>>>:"
             print "%s %s" % (method, url)
             if self.saml_cookie:
                 # Should be enough to show the username and still hide the
                 # signature.
                 headers["Cookie"] = "saml=%s..." % self.saml_cookie[:31]
-            print "headers:"
             for h in headers.iteritems():
-                print "  %s: %s" % h
-            print "body:"
+                print "%s: %s" % h
+            print
             if body:
                 print body
 
         if self.saml_cookie:
             headers["Cookie"] = "saml=%s" % self.saml_cookie
 
+        headers["User-Agent"] = self.user_agent
+        headers["X-Transfer-API-Client"] = self.client_info
+
         def do_request():
+            if self.c is None:
+                self.connect()
             self.c.request(method, url, body=body, headers=headers)
-            return self.c.getresponse()
-        try:
-            r = do_request()
-        except BadStatusLine:
-            # This happens when the connection is closed by the server
-            # in between request, which is very likely when using
-            # interactively or in a client that waits for user input
-            # between requests.
-            self.c.close()
-            self.c = None
-            self.connect()
-            r = do_request()
+            r = self.c.getresponse()
+            response_body = r.read()
+            return r, response_body
 
-        response_body = r.read()
+        for attempt in xrange(self.max_attempts):
+            #print "attempt:", attempt
+            r = None
+            try:
+                try:
+                    r, response_body = do_request()
+                except BadStatusLine:
+                    # This happens when the connection is closed by the server
+                    # in between request, which is very likely when using
+                    # interactively, in a client that waits for user input
+                    # between requests, or after a retry wait. This does not
+                    # count as an attempt - it just means the old connection
+                    # has gone stale and we need a new one.
+                    # TODO: find a more elegant way to re-use the connection
+                    #       on closely spaced requests. Can we tell that the
+                    #       connection is dead without making a request?
+                    self.close()
+                    r, response_body = do_request()
+            except ssl.SSLError:
+                # This probably has to do with failed authentication, so
+                # retrying is not useful.
+                traceback.print_exc()
+                self.close()
+                raise
+            except socket.error:
+                # Network error. If the last attempt failed, raise,
+                # otherwise do nothing and go on to next attempt.
+                traceback.print_exc()
+                self.close()
+                if attempt == self.max_attempts - 1:
+                    raise
+
+            # Check for ServiceUnavailable, which is treated just like
+            # network errors.
+            if r is not None and attempt < self.max_attempts - 1:
+                error_code = r.getheader("X-Transfer-API-Error", None)
+                if error_code is not None \
+                and error_code.startswith("ServiceUnavailable"):
+                    # Force sleep below and continue loop
+                    self.close()
+                    r = None
+
+            if r is not None:
+                break
+            else:
+                time.sleep(RETRY_WAIT_SECONDS)
 
         if self.print_response:
-            print "RESPONSE:"
-            print "status line:", r.status, r.reason
-            print "headers:"
+            print
+            print "<<<RESPONSE<<<:"
+            print r.status, r.reason
             for h in r.getheaders():
-                print "  %s: %s" % h
-            print "body:"
+                print "%s: %s" % h
+            print
             print response_body
 
         return r, response_body
         """
         return self._request_json("POST", path, body, "application/json")
 
+    def delete(self, path):
+        """
+        @return: (status_code, status_reason, data)
+        @raise TransferAPIError
+        """
+        return self._request_json("DELETE", path)
+
     # Convenience API methods:
     def tasksummary(self, **kw):
         """
         @return: (status_code, status_reason, data)
         @raise TransferAPIError
         """
-        return self.get("/tasksummary")
+        return self.get("/tasksummary" + encode_qs(kw))
 
-    def tasks(self, **kw):
+    def task_list(self, **kw):
         """
         @return: (status_code, status_reason, data)
         @raise TransferAPIError
         """
-        return self.get("/task")
+        return self.get("/task_list" + encode_qs(kw))
 
     def task(self, task_id, **kw):
         """
         @return: (status_code, status_reason, data)
         @raise TransferAPIError
         """
-        return self.get("/task(%s)" % task_id + encode_qs(kw))
+        return self.get("/task/%s" % task_id + encode_qs(kw))
 
-    def subtasks(self, parent_task_id, **kw):
+    def subtask_list(self, parent_task_id, **kw):
         """
         @return: (status_code, status_reason, data)
         @raise TransferAPIError
         """
-        return self.get("/task(%s)/subtask" % parent_task_id + encode_qs(kw))
+        return self.get("/task/%s/subtask_list"
+                        % parent_task_id + encode_qs(kw))
 
     def subtask(self, task_id, **kw):
         """
         @return: (status_code, status_reason, data)
         @raise TransferAPIError
         """
-        return self.get("/subtask(%s)" % task_id + encode_qs(kw))
+        return self.get("/subtask/%s" % task_id + encode_qs(kw))
 
-    def task_events(self, parent_task_id, **kw):
+    def task_event_list(self, parent_task_id, **kw):
         """
         @return: (status_code, status_reason, data)
         @raise TransferAPIError
         """
-        return self.get("/task(%s)/event" % parent_task_id + encode_qs(kw))
+        return self.get("/task/%s/event_list" % parent_task_id + encode_qs(kw))
 
-    def subtask_events(self, task_id, **kw):
+    def subtask_event_list(self, task_id, **kw):
         """
         @return: (status_code, status_reason, data)
         @raise TransferAPIError
         """
-        return self.get("/subtask(%s)/event" % task_id + encode_qs(kw))
+        return self.get("/subtask/%s/event_list" % task_id + encode_qs(kw))
 
-    def endpoints(self, **kw):
+    def endpoint_list(self, **kw):
         """
         @return: (status_code, status_reason, data)
         @raise TransferAPIError
         """
-        return self.get("/user(%s)/endpoint" % self.username + encode_qs(kw))
+        return self.get("/endpoint_list" + encode_qs(kw))
 
     def endpoint(self, endpoint_name, **kw):
         """
         @return: (status_code, status_reason, data)
         @raise TransferAPIError
         """
-        return self.get(_endpoint_path(endpoint_name))
+        return self.get(_endpoint_path(endpoint_name) + encode_qs(kw))
 
-    def endpoint_activation_requirements(self, endpoint_name):
+    def endpoint_activation_requirements(self, endpoint_name, **kw):
         """
         @return: (code, reason, data), where data is an
                  ActivationRequirements instance instead of a plain
         @raise TransferAPIError
         """
         code, reason, data = self.get(_endpoint_path(endpoint_name,
-                                                 "/activation_requirements"))
+                                                 "/activation_requirements")
+                                      + encode_qs(kw))
         if code == 200 and data:
-            data = ActivationRequirements(data)
+            data = ActivationRequirementList(data)
         return code, reason, data
 
     def endpoint_activate(self, endpoint_name, filled_requirements,
-                          if_expires_in="", timeout=5):
+                          if_expires_in="", timeout=30):
         """
         @param endpoint_name: partial or canonical name of endpoint to
                               activate.
         if filled_requirements:
             body = json.dumps(filled_requirements.json_data)
         else:
-            body = None
+            raise ValueError("Use autoactivate instead; using activate "
+                "with an empty request body to auto activate is "
+                "deprecated.")
         # Note: blank query parameters are ignored, so we can pass blank
         # values to use the default behavior.
         qs = encode_qs(dict(if_expires_in=str(if_expires_in),
         code, reason, data = self.post(
             _endpoint_path(endpoint_name, "/activate" + qs), body=body)
         if code == 200 and data:
-            data = ActivationResult(data)
+            data = ActivationRequirementList(data)
         return code, reason, data
 
-    def endpoint_ls(self, endpoint_name, path=""):
+    def endpoint_autoactivate(self, endpoint_name, if_expires_in="",
+                              timeout=30):
+        """
+        @param endpoint_name: partial or canonical name of endpoint to
+                              activate.
+        @param if_expires_in: don't re-activate endpoint if it doesn't expire
+                              for this many minutes. If not passed, always
+                              activate, even if already activated.
+        @param timeout: timeout in seconds to attempt contacting external
+                        servers to get the credential.
+        @return: (code, reason, data), where data is an ActivationRequirements
+                 instance.
+        @raise TransferAPIError
+        """
+        # Note: blank query parameters are ignored, so we can pass blank
+        # values to use the default behavior.
+        qs = encode_qs(dict(if_expires_in=str(if_expires_in),
+                            timeout=str(timeout)))
+        code, reason, data = self.post(
+            _endpoint_path(endpoint_name, "/autoactivate" + qs), body=None)
+        if code == 200 and data:
+            data = ActivationRequirementList(data)
+        return code, reason, data
+
+    def endpoint_deactivate(self, endpoint_name, **kw):
+        """
+        @param endpoint_name: partial or canonical name of endpoint to
+                              activate.
+        @return: (code, reason, data)
+        @raise TransferAPIError
+        """
+        # Note: blank query parameters are ignored, so we can pass blank
+        # values to use the default behavior.
+        code, reason, data = self.post(
+            _endpoint_path(endpoint_name, "/deactivate") + encode_qs(kw),
+            body=None)
+        return code, reason, data
+
+    def endpoint_ls(self, endpoint_name, path="", **kw):
         """
         @return: (status_code, status_reason, data)
         @raise TransferAPIError
         """
+        kw["path"] = path
         return self.get(_endpoint_path(endpoint_name, "/ls")
-                        + encode_qs(dict(path=path)))
+                        + encode_qs(kw))
 
     def endpoint_create(self, endpoint_name, hostname, description="",
                         scheme="gsiftp", port=2811, subject=None,
-                        myproxy_server=None):
+                        myproxy_server=None, public=False,
+                        is_globus_connect=False):
         """
         @return: (status_code, status_reason, data)
         @raise TransferAPIError
         """
-        code, reason, data = self.endpoint(endpoint_name)
-        if code != 404:
-            raise InterfaceError("Endpoint '%s' already exists"
-                                 % endpoint_name)
         data = {
                  "DATA_TYPE": "endpoint",
                  "myproxy_server": myproxy_server,
                  "description": description,
+                 "canonical_name": endpoint_name,
+                 "public": public,
+                 "is_globus_connect": is_globus_connect,
                  "DATA": [dict(DATA_TYPE="server",
                                hostname=hostname,
                                scheme=scheme,
                                port=port,
                                subject=subject)],
                }
-        return self.put(_endpoint_path(endpoint_name), json.dumps(data))
+        return self.post("/endpoint", json.dumps(data))
 
-    def endpoint_update(self, endpoint_data):
+    def endpoint_update(self, endpoint_name, endpoint_data):
         """
         Call endpoint to get the data, modify as needed, then pass the
         modified data to this method.
 
-        Note that rename is not supported; if you change the name, it will
-        create a new endpoint.
+        @return: (status_code, status_reason, data)
+        @raise TransferAPIError
+        """
+        return self.put(_endpoint_path(endpoint_name),
+                        json.dumps(endpoint_data))
+
+    def endpoint_rename(self, endpoint_name, new_endpoint_name):
+        _, _, endpoint_data = self.endpoint(endpoint_name)
+        endpoint_data["canonical_name"] = new_endpoint_name
+        del endpoint_data["name"]
+        return self.endpoint_update(endpoint_name, endpoint_data)
+
+    def endpoint_delete(self, endpoint_name):
+        """
+        Delete the specified endpoint. Existing transfers using the endpoint
+        will continue to work, but you will not be able to use the endpoint
+        in any new operations, and it will be gone from the endpoint_list.
 
         @return: (status_code, status_reason, data)
         @raise TransferAPIError
         """
-        return self.put(_endpoint_path(endpoint_data["canonical_name"]),
-                        json.dumps(endpoint_data))
+        return self.delete(_endpoint_path(endpoint_name))
 
-    def transfer_generate_id(self):
+    def transfer_submission_id(self):
         """
         @return: (status_code, status_reason, data)
         @raise: TransferAPIError
         """
-        return self.get("/transfer/generate_id")
+        return self.get("/transfer/submission_id")
 
     def transfer(self, transfer):
         """
 class Transfer(object):
     """
     Class for constructing a transfer request, which is a collections of items
-    containing the source and destination endpoint and path, along with flags.
-    Each item can have different source and destination endpoints.
+    containing the source and destination paths, along with flags.
+    A transfer can only invovle one source and one destination endpoint, so
+    they are set in the constructor.
     """
+    def __init__(self, submission_id, source_endpoint, destination_endpoint,
+                 deadline=None, sync_level=None):
+        self.submission_id = submission_id
+        self.deadline = deadline
+        self.sync_level = sync_level
+        self.items = []
+        self.source_endpoint = source_endpoint
+        self.destination_endpoint = destination_endpoint
 
-    def __init__(self, transfer_id, deadline=None):
-        self.transfer_id = transfer_id
-        self.deadline = deadline
-        self.items = []
-
-    def add_item(self, source_endpoint, source_path,
-                 destination_endpoint, destination_path,
-                 recursive=False):
-        item = dict(source_endpoint=source_endpoint,
+    def add_item(self, source_path, destination_path, recursive=False,
+                 verify_size=None):
+        item = dict(source_endpoint=self.source_endpoint,
                     source_path=source_path,
-                    destination_endpoint=destination_endpoint,
+                    destination_endpoint=self.destination_endpoint,
                     destination_path=destination_path,
                     recursive=recursive,
+                    verify_size=verify_size,
                     DATA_TYPE="transfer_item")
         self.items.append(item)
 
     def as_data(self):
+        if self.deadline is None:
+            deadline = None
+        else:
+            deadline = str(self.deadline)
         return { "DATA_TYPE": "transfer",
                  "length": len(self.items),
-                 "transfer_id": self.transfer_id,
-                 "deadline": str(self.deadline),
+                 "submission_id": self.submission_id,
+                 "deadline": deadline,
+                 "sync_level": self.sync_level,
                  "DATA": self.items }
 
     def as_json(self):
         return json.dumps(self.as_data())
 
-
-class SimpleTransfer(Transfer):
-    """
-    A transfer involving a single source and destination endpoint for all
-    the items.
-    """
-    def __init__(self, transfer_id, source_endpoint, destination_endpoint,
-                 deadline=None):
-        super(SimpleTransfer, self).__init__(transfer_id, deadline)
-        self.source_endpoint = source_endpoint
-        self.destination_endpoint = destination_endpoint
-
-    def add_item(self, source_path, destination_path, recursive=False):
-        super(SimpleTransfer, self).add_item(
-                                self.source_endpoint, source_path,
-                                self.destination_endpoint, destination_path,
-                                recursive)
+# For backward compatibility; new code should just use Transfer.
+SimpleTransfer = Transfer
 
 
 class ActivationRequirementList(object):
             self.req_list.append(r)
             self.index_map[key] = len(self.req_list) - 1
 
-        for f in self.fields:
-            setattr(self, f, json_data.get(f))
+    def __getitem__(self, key):
+        return self.json_data[key]
 
-    def __getitem__(self, key):
+    def _get_requirement(self, key):
         """
         Keys should be "type.name"
         """
         @raise KeyError: if requirement not found.
         """
         key = type + "." + name
-        self[key]["value"] = value
+        self._get_requirement(key)["value"] = value
 
     def get_requirement_value(self, type, name):
         """
         @raise KeyError: if requirement not found.
         """
-        return self[key]["value"]
+        key = type + "." + name
+        return self._get_requirement(key)["value"]
 
     def is_required(self, type, name):
         """
         @raise KeyError: if requirement not found.
         """
-        return self[key]["required"]
+        return self._get_requirement(key)["required"]
 
     def is_private(self, type, name):
         """
         @raise KeyError: if requirement not found.
         """
-        return self[key]["private"]
+        return self._get_requirement(key)["private"]
 
     def get_requirements_list(self, type):
         """
             return reqs
         return None
 
+    def set_submit_type(self, type):
+        """
+        Removes requirements of other types; this is needed when submitting,
+        to indicate what type of activation is actually desired.
+        """
+        self.req_list = [req for req in self.req_list if req["type"] == type]
+
     def as_json(self):
         return json.dumps(self.json_data)
 
     def supported_types(self):
         return self.types
 
-class ActivationRequirements(ActivationRequirementList):
-    fields = "activated expire_time auto_activation_supported".split()
+    def __str__(self):
+        return str(self.json_data)
 
-class ActivationResult(ActivationRequirementList):
-    fields = "code message request_id resource expire_time subject".split()
-
+    def __repr__(self):
+        return str(self.json_data)
 
 def _get_host_port(url):
     o = urlparse(url)
         self.status_message = status_message
         self.code = error_code
         if error_data:
-            self.resource = error_data["resource"]
-            self.message = error_data["message"]
-            self.request_id = error_data["request_id"]
+            self.read_error_data(error_data)
         else:
             self.resource = None
-            self.message = status_message
+            self._message = status_message
             self.request_id = None
 
-        Exception.__init__(self, self.message)
+        Exception.__init__(self, status_message)
+
+    def read_error_data(self, error_data):
+        self.resource = error_data["resource"]
+        self._message = error_data["message"]
+        self.request_id = error_data["request_id"]
+
+    @property
+    def message(self):
+        return self._message
 
     @property
     def status(self):
     endpoint_name must be percent encoded, because it may contain
     '#' (used to separate username from endpoint name).
     """
-    p = "/endpoint(%s)" % urllib.quote(endpoint_name)
+    p = "/endpoint/%s" % urllib.quote(endpoint_name)
     if trailing_path:
         p += trailing_path
     return p
     else:
         return ""
 
+def process_args(args=None, parser=None):
+    from optparse import OptionParser
+
+    if not parser:
+        usage = "usage: %prog username -k KEY_FILE -c CERT_FILE " \
+              + "-C SERVER_CA_FILE"
+        parser = OptionParser(usage=usage)
+
+    parser.add_option("-C", "--server-ca-file", dest="server_ca_file",
+                      help="ca file for validating server",
+                      metavar="SERVER_CA_FILE")
+    parser.add_option("-c", "--cert", dest="cert_file",
+                      help="client cert file", metavar="CERT_FILE")
+    parser.add_option("-k", "--key", dest="key_file",
+                      help="client key file", metavar="KEY_FILE")
+    parser.add_option("-s", "--saml-cookie", dest="saml_cookie",
+                      help="alternate authentication method",
+                      metavar="COOKIE_DATA")
+    parser.add_option("-p", "--password-prompt", dest="password_prompt",
+                      action="store_true", default=False,
+                      help="prompt for GO password for authentication")
+    parser.add_option("-b", "--base-url", dest="base_url",
+                      help="alternate base URL", metavar="URL")
+    parser.add_option("-t", "--socket-timeout", dest="timeout", type="int",
+                      help="timeout in seconds for underlying TCP socket",
+                      metavar="TIMEOUT_SECONDS")
+    parser.add_option("-a", "--max-attempts", dest="max_attempts", type="int",
+                      help="retry up to this many times on connection errors",
+                      metavar="ATTEMPTS")
+    parser.set_defaults(base_url=DEFAULT_BASE_URL,
+                        max_attempts=1,
+                        timeout=socket._GLOBAL_DEFAULT_TIMEOUT)
+
+    options, args = parser.parse_args(args)
+    if len(args) < 1:
+        parser.error("username arguments is required")
+
+    if not options.server_ca_file:
+        parser.error("missing required option -C (--server-ca-file)")
+
+    if options.password_prompt:
+        if options.saml_cookie or options.key_file or options.cert_file:
+            parser.error("use only one authentication method: -p, -k/-c, or -s")
+        from get_go_cookie import get_go_auth
+        username = args[0]
+        success = False
+        for i in xrange(5):
+            try:
+                result = get_go_auth(ca_certs=options.server_ca_file,
+                                     username=username)
+                args[0] = result.username
+                options.saml_cookie = result.cookie
+                success = True
+                break
+            except ValueError as e:
+                sys.stderr.write("authentication to GO failed")
+                if i < 4:
+                     sys.stderr.write(", please try again")
+                sys.stderr.write("\n")
+                username = None
+        if not success:
+            sys.stderr.write("too many failed attempts, exiting\n")
+            sys.exit(2)
+    elif options.saml_cookie:
+        if options.key_file or options.cert_file:
+            parser.error("use only one authentication method: -p, -k/-c, or -s")
+    elif not options.key_file or not options.cert_file:
+        parser.error("specify one authentication method: -p, -k/-c, or -s")
+
+    return options, args
+
+
+def get_random_serial():
+    """
+    Under RFC 3820 there are many ways to generate the serial number. However
+    making the number unpredictable has security benefits, e.g. it can make
+    this style of attack more difficult:
+
+    http://www.win.tue.nl/hashclash/rogue-ca
+    """
+    return struct.unpack("<Q", os.urandom(8))[0]
+
+def create_proxy_from_file(issuer_cred_file, public_key, lifetime=3600):
+    """
+    Create a proxy of the credential in issuer_cred_file, using the
+    specified public key and lifetime.
+
+    @param issuer_cred_file: file containing a credential, including the
+                             certificate, public key, and optionally chain
+                             certs.
+    @param public_key: the public key as a PEM string
+    @param lifetime: lifetime of the proxy in seconds (default 1 hour)
+    """
+    with open(issuer_cred_file) as f:
+        issuer_cred = f.read()
+    return create_proxy(issuer_cred, public_key, lifetime)
+
+_begin_private_key = "-----BEGIN RSA PRIVATE KEY-----"
+_end_private_key = "-----END RSA PRIVATE KEY-----"
+
+# The issuer is required to have this bit set if keyUsage is present;
+# see RFC 3820 section 3.1.
+REQUIRED_KEY_USAGE = ["Digital Signature"]
+def create_proxy(issuer_cred, public_key, lifetime=3600):
+    from M2Crypto import X509, RSA, EVP, ASN1, BIO
+
+    # Standard order is cert, private key, then the chain.
+    _begin_idx = issuer_cred.index(_begin_private_key)
+    _end_idx = issuer_cred.index(_end_private_key) + len(_end_private_key)
+    issuer_key = issuer_cred[_begin_idx:_end_idx]
+    issuer_cert = issuer_cred[:_begin_idx]
+    issuer_chain = issuer_cert + issuer_cred[_end_idx:]
+
+    proxy = X509.X509()
+    proxy.set_version(2)
+    serial = get_random_serial()
+    proxy.set_serial_number(serial)
+
+    now = long(time.time())
+    not_before = ASN1.ASN1_UTCTIME()
+    not_before.set_time(now)
+    proxy.set_not_before(not_before)
+
+    not_after = ASN1.ASN1_UTCTIME()
+    not_after.set_time(now + lifetime)
+    proxy.set_not_after(not_after)
+
+    pkey = EVP.PKey()
+    tmp_bio = BIO.MemoryBuffer(str(public_key))
+    rsa = RSA.load_pub_key_bio(tmp_bio)
+    pkey.assign_rsa(rsa)
+    del rsa
+    del tmp_bio
+    proxy.set_pubkey(pkey)
+
+    issuer = X509.load_cert_string(issuer_cert)
+
+    # If the issuer has keyUsage extension, make sure it contains all
+    # the values we require.
+    try:
+        keyUsageExt = issuer.get_ext("keyUsage")
+        if keyUsageExt:
+            values = keyUsageExt.get_value().split(", ")
+            for required in REQUIRED_KEY_USAGE:
+                if required not in values:
+                    raise ValueError(
+                      "issuer contains keyUsage without required usage '%s'"
+                      % required)
+    except LookupError:
+        pass
+
+    # hack to get a copy of the X509 name that we can append to.
+    issuer_copy = X509.load_cert_string(issuer_cert)
+    proxy_subject = issuer_copy.get_subject()
+
+    proxy_subject.add_entry_by_txt(field="CN", type=ASN1.MBSTRING_ASC,
+                                   entry=str(serial),
+                                   len=-1, loc=-1, set=0)
+    proxy.set_subject(proxy_subject)
+    proxy.set_issuer(issuer.get_subject())
+
+    # create a full proxy
+    pci_ext = X509.new_extension("proxyCertInfo",
+                                 "critical,language:Inherit all", 1)
+    proxy.add_ext(pci_ext)
+
+    # Clients may wish to add restrictions to the proxy that are not
+    # present in the issuer. To do this, keyUsage and extendedKeyUsage
+    # extensions can be added to the proxy; the effictive usage is
+    # defined as the intersection of the usage. See section 4.2 of the
+    # RFC. In the absense of application specific requirements, we
+    # choose not to add either extension, in which case the usage of the
+    # issuer(s) will be inherited as is. See the example below if you
+    # wish to customize this behavior.
+    #
+    #ku_ext = X509.new_extension("keyUsage",
+    #            "Digital Signature, Key Encipherment, Data Encipherment", 1)
+    #proxy.add_ext(ku_ext)
+
+    issuer_rsa = RSA.load_key_string(issuer_key)
+    sign_pkey = EVP.PKey()
+    sign_pkey.assign_rsa(issuer_rsa)
+    proxy.sign(pkey=sign_pkey, md="sha1")
+    return proxy.as_pem() + issuer_chain
+
+
+if __name__ == '__main__':
+    options, args = process_args()
+    api = TransferAPIClient(args[0], server_ca_file=options.server_ca_file,
+                            cert_file=options.cert_file,
+                            key_file=options.key_file,
+                            saml_cookie=options.saml_cookie,
+                            base_url=options.base_url,
+                            timeout=options.timeout,
+                            max_attempts=options.max_attempts)

File universe_wsgi.ini.sample

 globus_userkey = .globus/userkey.pem
 globus_usercert = .globus/usercert.pem
 # you also need to specify the path to your root certificate:
-globus_rootcert = /etc/grid-security/certificates/d1b603c3.0
+globus_rootcert = /etc/grid-security/certificates/gd-bundle_ca.cert
 
 # When we want to transfer files back with globus online we symbolicly
 # link those datasets from galaxy's home dir into something readable
 
 [galaxy:tools]
 # tool specific configuration values:
-complete_genomics_root = /Users/steder/T2DTest
+complete_genomics_root = /Users/steder/T2DTest