Greg Von Kuster avatar Greg Von Kuster committed fbfa0c7

Add the framework for discovering and installing simple repository dependencies.

Comments (0)

Files changed (9)

lib/galaxy/tool_shed/common_util.py

 import os, urllib2
 from galaxy import util
 from galaxy.util.odict import odict
-from galaxy.tool_shed.encoding_util import tool_shed_decode
+from galaxy.tool_shed import encoding_util
 
 REPOSITORY_OWNER = 'devteam'
 
                     print "The URL\n%s\nraised the exception:\n%s\n" % ( url, str( e ) )
                 if tool_shed_accessible:
                     if text:
-                        tool_dependencies_dict = tool_shed_decode( text )
+                        tool_dependencies_dict = encoding_util.tool_shed_decode( text )
                         for dependency_key, requirements_dict in tool_dependencies_dict.items():
                             tool_dependency_name = requirements_dict[ 'name' ]
                             tool_dependency_version = requirements_dict[ 'version' ]

lib/galaxy/tool_shed/encoding_util.py

 log = logging.getLogger( __name__ )
 
 encoding_sep = '__esep__'
+encoding_sep2 = '__esepii__'
 
 def tool_shed_decode( value ):
     # Extract and verify hash
     try:
         values = simplejson.loads( value )
     except Exception, e:
-        log.debug( "Decoding json value from tool shed threw exception: %s" % str( e ) )
+        log.debug( "Decoding json value from tool shed for value '%s' threw exception: %s" % ( str( value ), str( e ) ) )
     if values is not None:
         try:
             return json_fix( values )
         except Exception, e:
-            log.debug( "Fixing decoded json value from tool shed threw exception: %s" % str( e ) )
+            log.debug( "Fixing decoded json values '%s' from tool shed threw exception: %s" % ( str( values ), str( e ) ) )
             fixed_values = values
     if values is None:
         values = value

lib/galaxy/tool_shed/tool_dependencies/install_util.py

 import sys, os, subprocess, tempfile
 import common_util
 import fabric_util
-from galaxy.tool_shed.encoding_util import encoding_sep, tool_shed_encode, tool_shed_decode
+from galaxy.tool_shed import encoding_util
 from galaxy.model.orm import and_
 
 from galaxy import eggs
                 for action_elem in param_elem:
                     actions.append( action_elem.text.replace( '$INSTALL_DIR', install_dir ) )
                 if actions:
-                    params_str += 'actions=%s,' % tool_shed_encode( encoding_sep.join( actions ) )
+                    params_str += 'actions=%s,' % encoding_util.tool_shed_encode( encoding_util.encoding_sep.join( actions ) )
             else:
                 if param_elem.text:
-                    param_value = tool_shed_encode( param_elem.text )
+                    param_value = encoding_util.tool_shed_encode( param_elem.text )
                     params_str += '%s=%s,' % ( param_name, param_value )
     if package_name:
         params_str += 'package_name=%s' % package_name

lib/galaxy/util/shed_util_common.py

         valid_filenames.append( '%s.txt' % r )
     valid_filenames.append( '%s.txt' % repository_name )
     return valid_filenames
+def get_repo_info_tuple_contents( repo_info_tuple ):
+    # Take care in handling the repo_info_tuple as it evolves over time as new tool shed features are introduced.
+    if len( repo_info_tuple ) == 6:
+        description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple
+        repository_dependencies = None
+    elif len( repo_info_tuple ) == 7:
+        description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
+    return description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies
 def get_repository_by_name_and_owner( trans, name, owner ):
     """Get a repository from the database via name and owner"""
     if trans.webapp.name == 'galaxy':

lib/galaxy/webapps/community/controllers/repository.py

 from galaxy.util.json import from_json_string, to_json_string
 from galaxy.model.orm import and_
 import galaxy.util.shed_util_common as suc
-from galaxy.tool_shed.encoding_util import tool_shed_encode
+from galaxy.tool_shed import encoding_util
 import common
 
 from galaxy import eggs
         update_dict = dict( changeset_revision=changeset_revision, ctx_rev=ctx_rev )
         if changeset_revision == repository.tip( trans.app ):
             # If changeset_revision is the repository tip, there are no additional updates.
-            return tool_shed_encode( update_dict )
+            return encoding_util.tool_shed_encode( update_dict )
         else:
             repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, 
                                                                                      trans.security.encode_id( repository.id ),
                                                                                      changeset_revision )
             if repository_metadata:
                 # If changeset_revision is in the repository_metadata table for this repository, there are no additional updates.
-                return tool_shed_encode( update_dict )
+                return encoding_util.tool_shed_encode( update_dict )
             else:
                 # The changeset_revision column in the repository_metadata table has been updated with a new changeset_revision value since the
                 # repository was installed.  We need to find the changeset_revision to which we need to update.
                         update_to_changeset_hash = changeset_hash
                 update_dict[ 'changeset_revision' ] = str( latest_changeset_revision )
         update_dict[ 'ctx_rev' ] = str( update_to_ctx.rev() )
-        return tool_shed_encode( update_dict )
+        return encoding_util.tool_shed_encode( update_dict )
     @web.expose
     def get_ctx_rev( self, trans, **kwd ):
         """Given a repository and changeset_revision, return the correct ctx.rev() value."""
             return repository_metadata.metadata
         return None
     @web.json
+    def get_readme_files( self, trans, **kwd ):
+        """
+        This method is called when installing or re-installing a single repository into a Galaxy instance.  If the received changeset_revision 
+        includes one or more readme files, return them in a dictionary.
+        """
+        repository_name = kwd[ 'name' ]
+        repository_owner = kwd[ 'owner' ]
+        changeset_revision = kwd[ 'changeset_revision' ]
+        repository = suc.get_repository_by_name_and_owner( trans, repository_name, repository_owner )
+        repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_revision )        
+        return suc.build_readme_files_dict( repository_metadata )
+    @web.json
     def get_repository_dependencies( self, trans, **kwd ):
+        """Return an encoded dictionary of all repositories upon which the contents of the received repository depends."""
         params = util.Params( kwd )
         name = params.get( 'name', None )
         owner = params.get( 'owner', None )
         if repository_metadata:
             metadata = repository_metadata.metadata
             if metadata:
-                # Get a dictionary of all repositories upon which the contents of the received repository depends.
                 repository_dependencies = suc.get_repository_dependencies_for_changeset_revision( trans=trans,
                                                                                                   repository=repository,
                                                                                                   repository_metadata=repository_metadata,
                                                                                                   handled_key_rd_dicts=None,
                                                                                                   circular_repository_dependencies=None )
                 if repository_dependencies:
-                    return tool_shed_encode( repository_dependencies )
+                    return encoding_util.tool_shed_encode( repository_dependencies )
         return ''
     @web.json
     def get_repository_information( self, trans, repository_ids, changeset_revisions, **kwd ):
                                                         repository=repository,
                                                         metadata=None,
                                                         repository_metadata=repository_metadata )      
-            repo_info_dicts.append( tool_shed_encode( repo_info_dict ) )
+            repo_info_dicts.append( encoding_util.tool_shed_encode( repo_info_dict ) )
         return dict( includes_tools=includes_tools,
                      includes_repository_dependencies=includes_repository_dependencies,
                      includes_tool_dependencies=includes_tool_dependencies,
                      repo_info_dicts=repo_info_dicts )
     @web.json
-    def get_readme_files( self, trans, **kwd ):
-        """
-        This method is called when installing or re-installing a single repository into a Galaxy instance.  If the received changeset_revision 
-        includes one or more readme files, return them in a dictionary.
-        """
-        repository_name = kwd[ 'name' ]
-        repository_owner = kwd[ 'owner' ]
-        changeset_revision = kwd[ 'changeset_revision' ]
-        repository = suc.get_repository_by_name_and_owner( trans, repository_name, repository_owner )
-        repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, trans.security.encode_id( repository.id ), changeset_revision )        
-        return suc.build_readme_files_dict( repository_metadata )
+    def get_required_repo_info_dict( self, trans, encoded_str ):
+        """Retrive a list of dictionaries that each contain all of the information needed to install the list of repositories defined by encoded_str."""
+        encoded_required_repository_str = encoding_util.tool_shed_decode( encoded_str )
+        encoded_required_repository_tups = encoded_required_repository_str.split( encoding_util.encoding_sep2 )
+        decoded_required_repository_tups = []
+        for encoded_required_repository_tup in encoded_required_repository_tups:
+            decoded_required_repository_tups.append( encoded_required_repository_tup.split( encoding_util.encoding_sep ) )
+        encoded_repository_ids = []
+        changeset_revisions = []
+        for required_repository_tup in decoded_required_repository_tups:
+            tool_shed, name, owner, changeset_revision = required_repository_tup
+            repository = suc.get_repository_by_name_and_owner( trans, name, owner )
+            encoded_repository_ids.append( trans.security.encode_id( repository.id ) )
+            changeset_revisions.append( changeset_revision )
+        if encoded_repository_ids and changeset_revisions:
+            repo_info_dict = from_json_string( self.get_repository_information( trans, encoded_repository_ids, changeset_revisions ) )
+        else:
+            repo_info_dict = {}
+        return repo_info_dict
     @web.expose
     def get_tool_dependencies( self, trans, **kwd ):
-        """Handle a request from a local Galaxy instance."""
+        """Handle a request from the InstallManager of a local Galaxy instance."""
         params = util.Params( kwd )
         message = util.restore_text( params.get( 'message', ''  ) )
         status = params.get( 'status', 'done' )
         from_install_manager = kwd.get( 'from_install_manager', False )
         if from_install_manager:
             if tool_dependencies:
-                return tool_shed_encode( tool_dependencies )
-            return ''
-        # TODO: future handler where request comes from some Galaxy admin feature.
+                return encoding_util.tool_shed_encode( tool_dependencies )
+        return ''
     @web.expose
     def get_tool_versions( self, trans, **kwd ):
         """

lib/galaxy/webapps/community/controllers/workflow.py

 from galaxy.webapps.galaxy.controllers.workflow import attach_ordered_steps
 import common
 import galaxy.util.shed_util_common as suc
-from galaxy.tool_shed.encoding_util import tool_shed_encode, tool_shed_decode
+from galaxy.tool_shed import encoding_util
 
 class RepoInputDataModule( InputDataModule ):
 
         repository_metadata_id = kwd.get( 'repository_metadata_id', '' )
         workflow_name = kwd.get( 'workflow_name', '' )
         if workflow_name:
-            workflow_name = tool_shed_decode( workflow_name )
+            workflow_name = encoding_util.tool_shed_decode( workflow_name )
         message = kwd.get( 'message', '' )
         status = kwd.get( 'status', 'done' )
         repository_metadata = common.get_repository_metadata_by_id( trans, repository_metadata_id )
         repository_id = trans.security.encode_id( repository_metadata.repository_id )
         changeset_revision = repository_metadata.changeset_revision
         metadata = repository_metadata.metadata
-        workflow_name = tool_shed_decode( workflow_name )
+        workflow_name = encoding_util.tool_shed_decode( workflow_name )
         # metadata[ 'workflows' ] is a list of tuples where each contained tuple is
         # [ <relative path to the .ga file in the repository>, <exported workflow dict> ]
         for workflow_tup in metadata[ 'workflows' ]:
         repository_metadata_id = kwd.get( 'repository_metadata_id', '' )
         workflow_name = kwd.get( 'workflow_name', '' )
         if workflow_name:
-            workflow_name = tool_shed_decode( workflow_name )
+            workflow_name = encoding_util.tool_shed_decode( workflow_name )
         message = kwd.get( 'message', '' )
         status = kwd.get( 'status', 'done' )
         repository_metadata = get_repository_metadata_by_id( trans, repository_metadata_id )
                 return open( tmp_fname )
             galaxy_url = trans.get_cookie( name='toolshedgalaxyurl' )
             url = '%sworkflow/import_workflow?tool_shed_url=%s&repository_metadata_id=%s&workflow_name=%s' % \
-                ( galaxy_url, url_for( '/', qualified=True ), repository_metadata_id, tool_shed_encode( workflow_name ) )
+                ( galaxy_url, url_for( '/', qualified=True ), repository_metadata_id, encoding_util.tool_shed_encode( workflow_name ) )
             return trans.response.send_redirect( url )
         return trans.response.send_redirect( web.url_for( controller='workflow',
                                                           action='view_workflow',

lib/galaxy/webapps/galaxy/controllers/admin.py

 from galaxy.web.params import QuotaParamParser
 from galaxy.exceptions import *
 from galaxy.util.odict import *
-from galaxy.tool_shed.encoding_util import tool_shed_decode
+from galaxy.tool_shed import encoding_util
 import galaxy.datatypes.registry
 import logging, imp, subprocess, urllib2
 
                     text = response.read()
                     response.close()
                     if text:
-                        tool_dependencies_dict = tool_shed_decode( text )
+                        tool_dependencies_dict = encoding_util.tool_shed_decode( text )
                         for dependency_key, requirements_dict in tool_dependencies_dict.items():
                             tool_dependency_name = requirements_dict[ 'name' ]
                             tool_dependency_version = requirements_dict[ 'version' ]

lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py

 from galaxy.util.json import from_json_string, to_json_string
 import galaxy.util.shed_util as shed_util
 import galaxy.util.shed_util_common as suc
-from galaxy.tool_shed.encoding_util import tool_shed_encode, tool_shed_decode
+from galaxy.tool_shed import encoding_util
+from galaxy.webapps.community.util import container_util
 from galaxy import eggs, tools
 
 eggs.require( 'mercurial' )
         raw_text = response.read()
         response.close()
         if len( raw_text ) > 2:
-            text = json.from_json_string( tool_shed_decode( raw_text ) )
+            text = json.from_json_string( encoding_util.tool_shed_decode( raw_text ) )
             log.debug( text )
         else:
             text = ''
         return text
+    def get_required_repo_info_dicts( self, tool_shed_url, repo_info_dicts ):
+        """
+        Inspect the list of repo_info_dicts for repository dependencies and append a repo_info_dict for each of them to the list.  All
+        repository_dependencies entries in each of the received repo_info_dicts includes all required repositories, so only one pass through
+        this methid is required to retrieve all repository dependencies.
+        """
+        if repo_info_dicts:
+            all_repo_info_dicts = [ rid for rid in repo_info_dicts ]
+            # We'll send tuples of ( tool_shed, repository_name, repository_owner, changeset_revision ) to the tool shed to discover repository ids.
+            required_repository_tups = []
+            for repo_info_dict in repo_info_dicts:
+                for repository_name, repo_info_tup in repo_info_dict.items():
+                    description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+                        suc.get_repo_info_tuple_contents( repo_info_tup )
+                    if repository_dependencies:
+                        for key, val in repository_dependencies.items():
+                            if key in [ 'root_key', 'description' ]:
+                                continue
+                            toolshed, name, owner, changeset_revision = container_util.get_components_from_key( key )
+                            components_list = [ toolshed, name, owner, changeset_revision ]
+                            if components_list not in required_repository_tups:
+                                required_repository_tups.append( components_list )
+                            for components_list in val:
+                                if components_list not in required_repository_tups:
+                                    required_repository_tups.append( components_list )
+                if required_repository_tups:
+                    # The value of required_repository_tups is a list of tuples, so we need to encode it.
+                    encoded_required_repository_tups = []
+                    for required_repository_tup in required_repository_tups:
+                        encoded_required_repository_tups.append( encoding_util.encoding_sep.join( required_repository_tup ) )
+                    encoded_required_repository_str = encoding_util.encoding_sep2.join( encoded_required_repository_tups )
+                    encoded_required_repository_str = encoding_util.tool_shed_encode( encoded_required_repository_str )
+                    url = suc.url_join( tool_shed_url, '/repository/get_required_repo_info_dict?encoded_str=%s' % encoded_required_repository_str )
+                    response = urllib2.urlopen( url )
+                    text = response.read()
+                    response.close()
+                    if text:
+                        required_repo_info_dict = from_json_string( text )                        
+                        required_repo_info_dicts = []
+                        encoded_dict_strings = required_repo_info_dict[ 'repo_info_dicts' ]
+                        for encoded_dict_str in encoded_dict_strings:
+                            decoded_dict = encoding_util.tool_shed_decode( encoded_dict_str )
+                            required_repo_info_dicts.append( decoded_dict )                        
+                        if required_repo_info_dicts:                            
+                            for required_repo_info_dict in required_repo_info_dicts:
+                                if required_repo_info_dict not in all_repo_info_dicts:
+                                    all_repo_info_dicts.append( required_repo_info_dict )
+        return all_repo_info_dicts
     def get_versions_of_tool( self, app, guid ):
         tool_version = shed_util.get_tool_version( app, guid )
         return tool_version.get_version_ids( app, reverse=True )
             tool_section = None
         for tup in zip( tool_shed_repositories, repo_info_dicts ):
             tool_shed_repository, repo_info_dict = tup
-            repo_info_dict = tool_shed_decode( repo_info_dict )
+            repo_info_dict = encoding_util.tool_shed_decode( repo_info_dict )
             # Clone each repository to the configured location.
             shed_util.update_tool_shed_repository_status( trans.app, tool_shed_repository, trans.model.ToolShedRepository.installation_status.CLONING )
             repo_info_tuple = repo_info_dict[ tool_shed_repository.name ]
             elif operation == "install":
                 reinstalling = util.string_as_bool( params.get( 'reinstalling', False ) )
                 encoded_kwd = kwd[ 'encoded_kwd' ]
-                decoded_kwd = tool_shed_decode( encoded_kwd )
+                decoded_kwd = encoding_util.tool_shed_decode( encoded_kwd )
                 tsr_ids = decoded_kwd[ 'tool_shed_repository_ids' ]
                 repositories_for_installation = []
                 for tsr_id in tsr_ids:
             includes_repository_dependencies = util.string_as_bool( repo_information_dict.get( 'includes_repository_dependencies', False ) )
             includes_tool_dependencies = util.string_as_bool( repo_information_dict.get( 'includes_tool_dependencies', False ) )
             encoded_repo_info_dicts = util.listify( repo_information_dict.get( 'repo_info_dicts', [] ) )
-        repo_info_dicts = [ tool_shed_decode( encoded_repo_info_dict ) for encoded_repo_info_dict in encoded_repo_info_dicts ]
+        repo_info_dicts = [ encoding_util.tool_shed_decode( encoded_repo_info_dict ) for encoded_repo_info_dict in encoded_repo_info_dicts ]
         if ( not includes_tools and not includes_repository_dependencies ) or \
             ( ( includes_tools or includes_repository_dependencies ) and kwd.get( 'select_tool_panel_section_button', False ) ):
             install_repository_dependencies = CheckboxField.is_checked( install_repository_dependencies )
             created_or_updated_tool_shed_repositories = []
             # Repositories will be filtered (e.g., if already installed, etc), so filter the associated repo_info_dicts accordingly.
             filtered_repo_info_dicts = []
+            # Disciver all repository dependencies and retrieve information for installing them.
+            repo_info_dicts = self.get_required_repo_info_dicts( tool_shed_url, repo_info_dicts )
             for repo_info_dict in repo_info_dicts:
                 for name, repo_info_tuple in repo_info_dict.items():
-                    # Take care in handling the repo_info_tuple as it evolves over time as new features are introduced.
-                    if len( repo_info_tuple ) == 6:
-                        description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple
-                        repository_dependencies = None
-                    elif len( repo_info_tuple ) == 7:
-                        description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
+                    description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+                        suc.get_repo_info_tuple_contents( repo_info_tuple )
                     clone_dir = os.path.join( tool_path, self.generate_tool_path( repository_clone_url, changeset_revision ) )
                     relative_install_dir = os.path.join( clone_dir, name )
                     # Make sure the repository was not already installed.
                                                                                                 owner=repository_owner,
                                                                                                 dist_to_shed=False )
                         created_or_updated_tool_shed_repositories.append( tool_shed_repository )
-                        filtered_repo_info_dicts.append( tool_shed_encode( repo_info_dict ) )
+                        filtered_repo_info_dicts.append( encoding_util.tool_shed_encode( repo_info_dict ) )
             if created_or_updated_tool_shed_repositories:
                 if includes_tools and ( new_tool_panel_section or tool_panel_section ):
                     if new_tool_panel_section:
                 tsrids_list = [ trans.security.encode_id( tsr.id ) for tsr in created_or_updated_tool_shed_repositories ]
                 new_kwd = dict( includes_tools=includes_tools,
                                 includes_repository_dependencies=includes_repository_dependencies,
+                                install_repository_dependencies=install_repository_dependencies,
                                 includes_tool_dependencies=includes_tool_dependencies,
                                 install_tool_dependencies=install_tool_dependencies,
                                 message=message,
                                 tool_panel_section_key=tool_panel_section_key,
                                 tool_shed_repository_ids=tsrids_list,
                                 tool_shed_url=tool_shed_url )
-                encoded_kwd = tool_shed_encode( new_kwd )
+                encoded_kwd = encoding_util.tool_shed_encode( new_kwd )
                 tsrids_str = ','.join( tsrids_list )
                 return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
                                                                   action='initiate_repository_installation',
                                                         repository_metadata=None,
                                                         metadata=metadata,
                                                         repository_dependencies=repository_dependencies )
-            repo_info_dict = tool_shed_encode( repo_info_dict )
+            repo_info_dict = encoding_util.tool_shed_encode( repo_info_dict )
         new_kwd = dict( includes_tool_dependencies=tool_shed_repository.includes_tool_dependencies,
                         includes_tools=tool_shed_repository.includes_tools,
                         install_tool_dependencies=install_tool_dependencies,
                         tool_panel_section_key=tool_panel_section_key,
                         tool_shed_repository_ids=[ repository_id ],
                         tool_shed_url=tool_shed_url )
-        encoded_kwd = tool_shed_encode( new_kwd )
+        encoded_kwd = encoding_util.tool_shed_encode( new_kwd )
         return trans.response.send_redirect( web.url_for( controller='admin_toolshed',
                                                           action='initiate_repository_installation',
                                                           shed_repository_ids=repository_id,
         # Handle case where the repository was previously installed using an older changeset_revsion, but later the repository was updated
         # in the tool shed and now we're trying to install the latest changeset revision of the same repository instead of updating the one
         # that was previously installed.  We'll look in the database instead of on disk since the repository may be uninstalled.
-        if len( repo_info_tuple ) == 6:
-            description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, tool_dependencies = repo_info_tuple
-            repository_dependencies = None
-        elif len( repo_info_tuple ) == 7:
-            description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = repo_info_tuple
+        description, repository_clone_url, changeset_revision, ctx_rev, repository_owner, repository_dependencies, tool_dependencies = \
+            suc.get_repo_info_tuple_contents( repo_info_tuple )
         tool_shed = suc.get_tool_shed_from_clone_url( repository_clone_url )
         # Get all previous change set revisions from the tool shed for the repository back to, but excluding, the previous valid changeset
         # revision to see if it was previously installed using one of them.
                                     install_tool_dependencies_check_box=install_tool_dependencies_check_box,
                                     containers_dict=containers_dict,
                                     tool_panel_section_select_field=tool_panel_section_select_field,
-                                    encoded_repo_info_dict=tool_shed_encode( repo_info_dict ),
+                                    encoded_repo_info_dict=encoding_util.tool_shed_encode( repo_info_dict ),
                                     repo_info_dict=repo_info_dict,
                                     message=message,
                                     status=status )

lib/galaxy/webapps/galaxy/controllers/workflow.py

 from galaxy.util.odict import odict
 from galaxy.util.sanitize_html import sanitize_html
 from galaxy.util.topsort import topsort, topsort_levels, CycleError
-from galaxy.tool_shed.encoding_util import tool_shed_encode, tool_shed_decode
+from galaxy.tool_shed import encoding_util
 from galaxy.workflow.modules import *
 from galaxy import model
 from galaxy import util
         # from a Galaxy tool shed, in which case the value was encoded.
         workflow_name = kwd.get( 'workflow_name', '' )
         if workflow_name:
-            workflow_name = tool_shed_decode( workflow_name )
+            workflow_name = encoding_util.tool_shed_decode( workflow_name )
         # The following parameters will have a value only if the import originated
         # from a tool shed repository installed locally or from the API.
         installed_repository_file = kwd.get( 'installed_repository_file', '' )
         if tool_shed_url and not import_button:
             # Use urllib (send another request to the tool shed) to retrieve the workflow.
             workflow_url = '%s/workflow/import_workflow?repository_metadata_id=%s&workflow_name=%s&open_for_url=true' % \
-                ( tool_shed_url, repository_metadata_id, tool_shed_encode( workflow_name ) )
+                ( tool_shed_url, repository_metadata_id, encoding_util.tool_shed_encode( workflow_name ) )
             response = urllib2.urlopen( workflow_url )
             workflow_text = response.read()
             response.close()
                         # We've received the textual representation of a workflow from a Galaxy tool shed.
                         message = "Workflow <b>%s</b> imported successfully." % workflow.name
                         url = '%s/workflow/view_workflow?repository_metadata_id=%s&workflow_name=%s&message=%s' % \
-                            ( tool_shed_url, repository_metadata_id, tool_shed_encode( workflow_name ), message )
+                            ( tool_shed_url, repository_metadata_id, encoding_util.tool_shed_encode( workflow_name ), message )
                         return trans.response.send_redirect( url )
                     elif installed_repository_file:
                         # The workflow was read from a file included with an installed tool shed repository.
Tip: Filter by directory path e.g. /media app.js to search for public/media/app.js.
Tip: Use camelCasing e.g. ProjME to search for ProjectModifiedEvent.java.
Tip: Filter by extension type e.g. /repo .js to search for all .js files in the /repo directory.
Tip: Separate your search with spaces e.g. /ssh pom.xml to search for src/ssh/pom.xml.
Tip: Use ↑ and ↓ arrow keys to navigate and return to view the file.
Tip: You can also navigate files with Ctrl+j (next) and Ctrl+k (previous) and view the file with Ctrl+o.
Tip: You can also navigate files with Alt+j (next) and Alt+k (previous) and view the file with Alt+o.