Commits

hillrunner2008 committed 18ac704 Merge

Merged galaxy/galaxy-central into default

Comments (0)

Files changed (114)

config/plugins/visualizations/charts/static/app.js

         // append views
         this.$el.append(this.viewer_view.$el);
         this.$el.append(this.editor_view.$el);
-        this.$el.css('height', 'inherit');
         
         // pick start screen
         if (!this.storage.load()) {

config/plugins/visualizations/charts/static/charts/nvd3/config.js

 define([], function() {
 
 return {
-    title   : '',
-    library : 'nvd3.js',
-    element : 'svg',
+    title       : '',
+    library     : 'nvd3.js',
+    element     : 'svg',
     columns : {
         y : {
             title   : 'Values for y-axis'
         }
     },
-    settings  : {
+    settings : {
         separator_label  : {
             title       : 'X axis',
             type        : 'separator'

config/plugins/visualizations/charts/static/charts/nvd3_piechart/config.js

-define(['plugin/charts/nvd3/config'], function(nvd3_config) {
+define([], function() {
 
-return $.extend(true, {}, nvd3_config, {
-    title   : 'Pie chart',
-});
+return {
+    title       : 'Pie chart',
+    library     : 'nvd3.js',
+    element     : 'svg',
+    columns : {
+        label : {
+            title       : 'Labels',
+            any_type    : true
+        },
+        y : {
+            title       : 'Values'
+        }
+    }
+};
 
 });

config/plugins/visualizations/charts/static/charts/nvd3_piechart/nvd3_piechart.js

                 for (var key in group.values) {
                     var value = group.values[key];
                     pie_data.push ({
-                        key : value.x,
-                        y   : value.y
+                        y : value.y,
+                        x : value.label
                     });
                 }
+            }
+            
+            // add graph to screen
+            nv.addGraph(function() {
+                self.chart_3d = nv.models.pieChart()
+                    .donut(true)
+                    .showLegend(false);
                 
-                // add graph to screen
-                nv.addGraph(function() {
-                    self.chart_3d = nv.models.pieChart()
-                        .donut(true)
-                        .showLegend(false);
-                    
-                    self.options.canvas.datum(pie_data)
-                                       .call(self.chart_3d);
+                self.options.canvas.datum(pie_data)
+                                   .call(self.chart_3d);
 
-                    nv.utils.windowResize(self.chart_3d.update);
-                    
-                    // set chart state
-                    chart.state('ok', 'Pie chart drawn.');
-                    
-                    // unregister process
-                    chart.deferred.done(process_id);
-                });
-            }
+                nv.utils.windowResize(self.chart_3d.update);
+                
+                // set chart state
+                chart.state('ok', 'Chart has been drawn.');
+            
+                // unregister process
+                chart.deferred.done(process_id);
+            });
         });
     }
 });

config/plugins/visualizations/charts/static/library/deferred.js

     
     // unregister process
     done: function(id) {
-        // delete tag
-        delete this.process[id];
-        
-        // decrease process counter
-        this.counter--;
-        
-        // log
-        console.debug('Deferred:done() - Unregistering ' + id);
-        
-        // trigger change
-        this.trigger('refresh');
+        if (this.process[id]) {
+            // delete tag
+            delete this.process[id];
+            
+            // decrease process counter
+            this.counter--;
+            
+            // log
+            console.debug('Deferred:done() - Unregistering ' + id);
+            
+            // trigger change
+            this.trigger('refresh');
+        }
     },
     
     // ready

config/plugins/visualizations/charts/static/views/group.js

 // widget
 return Backbone.View.extend(
 {
-    // columns
-    columns: [],
-    
     // initialize
     initialize: function(app, options) {
         // link app
         
         // get dataset
         this.app.datasets.request({id : dataset_id}, function(dataset) {
-            // configure columns
-            self.columns = [];
-            var meta = dataset.metadata_column_types;
-            for (var key in meta) {
-                // check type
-                if(meta[key] == 'int' || meta[key] == 'float') {
-                    // add to selection
-                    self.columns.push({
-                        'label' : 'Column: ' + (parseInt(key) + 1) + ' [' + meta[key] + ']',
-                        'value' : key
-                    });
+            // update select fields
+            for (var id in list) {
+                
+                // is a numeric number required
+                var any_type = chart_settings.columns[id].any_type;
+            
+                // configure columns
+                var columns = [];
+                var meta = dataset.metadata_column_types;
+                for (var key in meta) {
+                    // check type
+                    if (meta[key] == 'int' || meta[key] == 'float' || any_type) {
+                        // add to selection
+                        columns.push({
+                            'label' : 'Column: ' + (parseInt(key) + 1) + ' [' + meta[key] + ']',
+                            'value' : key
+                        });
+                    }
                 }
-            }
             
-            // update select fields
-            for (var key in list) {
-                list[key].update(self.columns);
-                list[key].show();
+                // list
+                list[id].update(columns);
+                list[id].show();
             }
             
             // loading

doc/source/lib/galaxy.webapps.galaxy.api.rst~

-api Package
-===========
-
-:mod:`datasets` Module
-----------------------
-
-.. automodule:: galaxy.webapps.galaxy.api.datasets
-    :members:
-    :undoc-members:
-    :show-inheritance:
-
-:mod:`folder_contents` Module
------------------------------
-
-.. automodule:: galaxy.webapps.galaxy.api.folder_contents
-    :members:
-    :undoc-members:
-    :show-inheritance:
-
-:mod:`folders` Module
----------------------
-
-.. automodule:: galaxy.webapps.galaxy.api.folders
-    :members:
-    :undoc-members:
-    :show-inheritance:
-
-:mod:`forms` Module
--------------------
-
-.. automodule:: galaxy.webapps.galaxy.api.forms
-    :members:
-    :undoc-members:
-    :show-inheritance:
-
-:mod:`genomes` Module
----------------------
-
-.. automodule:: galaxy.webapps.galaxy.api.genomes
-    :members:
-    :undoc-members:
-    :show-inheritance:
-
-:mod:`group_roles` Module
--------------------------
-
-.. automodule:: galaxy.webapps.galaxy.api.group_roles
-    :members:
-    :undoc-members:
-    :show-inheritance:
-
-:mod:`group_users` Module
--------------------------
-
-.. automodule:: galaxy.webapps.galaxy.api.group_users
-    :members:
-    :undoc-members:
-    :show-inheritance:
-
-:mod:`groups` Module
---------------------
-
-.. automodule:: galaxy.webapps.galaxy.api.groups
-    :members:
-    :undoc-members:
-    :show-inheritance:
-
-:mod:`histories` Module
------------------------
-
-.. automodule:: galaxy.webapps.galaxy.api.histories
-    :members:
-    :undoc-members:
-    :show-inheritance:
-
-:mod:`history_contents` Module
-------------------------------
-
-.. automodule:: galaxy.webapps.galaxy.api.history_contents
-    :members:
-    :undoc-members:
-    :show-inheritance:
-
-:mod:`libraries` Module
------------------------
-
-.. automodule:: galaxy.webapps.galaxy.api.libraries
-    :members:
-    :undoc-members:
-    :show-inheritance:
-
-:mod:`library_contents` Module
-------------------------------
-
-.. automodule:: galaxy.webapps.galaxy.api.library_contents
-    :members:
-    :undoc-members:
-    :show-inheritance:
-
-:mod:`permissions` Module
--------------------------
-
-.. automodule:: galaxy.webapps.galaxy.api.permissions
-    :members:
-    :undoc-members:
-    :show-inheritance:
-
-:mod:`quotas` Module
---------------------
-
-.. automodule:: galaxy.webapps.galaxy.api.quotas
-    :members:
-    :undoc-members:
-    :show-inheritance:
-
-:mod:`request_types` Module
----------------------------
-
-.. automodule:: galaxy.webapps.galaxy.api.request_types
-    :members:
-    :undoc-members:
-    :show-inheritance:
-
-:mod:`requests` Module
-----------------------
-
-.. automodule:: galaxy.webapps.galaxy.api.requests
-    :members:
-    :undoc-members:
-    :show-inheritance:
-
-:mod:`roles` Module
--------------------
-
-.. automodule:: galaxy.webapps.galaxy.api.roles
-    :members:
-    :undoc-members:
-    :show-inheritance:
-
-:mod:`samples` Module
----------------------
-
-.. automodule:: galaxy.webapps.galaxy.api.samples
-    :members:
-    :undoc-members:
-    :show-inheritance:
-
-:mod:`tools` Module
--------------------
-
-.. automodule:: galaxy.webapps.galaxy.api.tools
-    :members:
-    :undoc-members:
-    :show-inheritance:
-
-:mod:`users` Module
--------------------
-
-.. automodule:: galaxy.webapps.galaxy.api.users
-    :members:
-    :undoc-members:
-    :show-inheritance:
-
-:mod:`visualizations` Module
-----------------------------
-
-.. automodule:: galaxy.webapps.galaxy.api.visualizations
-    :members:
-    :undoc-members:
-    :show-inheritance:
-
-:mod:`workflows` Module
------------------------
-
-.. automodule:: galaxy.webapps.galaxy.api.workflows
-    :members:
-    :undoc-members:
-    :show-inheritance:
-
 ;
 ; This file is version controlled and should not be edited by hand!
 ; For more information, see:
-;   http://wiki.g2.bx.psu.edu/Admin/Config/Eggs
+;   https://wiki.galaxyproject.org/Admin/Config/Eggs
 ;
 
 [general]

job_conf.xml.sample_advanced

             <!-- Traditionally, the LWR client sends request to LWR
                  server to populate various system properties. This
                  extra step can be disabled and these calculated here
-                 on client by uncommenting job_directory and
+                 on client by uncommenting jobs_directory and
                  specifying any additional remote_property_ of
                  interest. When using message queues this is nessecary
                  not optional.

lib/galaxy/config.py

         self.message_box_visible = kwargs.get( 'message_box_visible', False )
         self.message_box_content = kwargs.get( 'message_box_content', None )
         self.message_box_class = kwargs.get( 'message_box_class', 'info' )
-        self.support_url = kwargs.get( 'support_url', 'http://wiki.g2.bx.psu.edu/Support' )
+        self.support_url = kwargs.get( 'support_url', 'https://wiki.galaxyproject.org/Support' )
         self.wiki_url = kwargs.get( 'wiki_url', 'http://wiki.galaxyproject.org/' )
         self.blog_url = kwargs.get( 'blog_url', None )
         self.screencasts_url = kwargs.get( 'screencasts_url', None )

lib/galaxy/exceptions/__init__.py

     status_code = 400
     err_code = error_codes.MALFORMED_ID
 
+class UnknownContentsType( MessageException ):
+    status_code = 400
+    err_code = error_codes.UNKNOWN_CONTENTS_TYPE
+
 class RequestParameterMissingException( MessageException ):
     status_code = 400
     err_code = error_codes.USER_REQUEST_MISSING_PARAMETER

lib/galaxy/exceptions/error_codes.json

     "message": "The id of the resource is malformed."
     },
    {
+    "name": "UNKNOWN_CONTENTS_TYPE",
+    "code": 400010,
+    "message": "The request contains unknown type of contents."
+    },
+   {
     "name": "USER_NO_API_KEY",
     "code": 403001,
     "message": "API authentication required for this request"

lib/galaxy/jobs/runners/pbs.py

     'Variable_List'         : '-v',
 }
 
-# From pbs' job.h
+# From pbs' pbs_job.h
 JOB_EXIT_STATUS = {
-    0:  "job exec successful",
+    0: "job exec successful",
     -1: "job exec failed, before files, no retry",
     -2: "job exec failed, after files, no retry",
     -3: "job execution failed, do retry",
     -6: "job aborted on MOM init, chkpt, ok migrate",
     -7: "job restart failed",
     -8: "exec() of user command failed",
-    -11: "job maximum walltime exceeded",  # Added by John, not from job.h.
+    -9: "could not create/open stdout stderr files",
+    -10: "job exceeded a memory limit",
+    -11: "job exceeded a walltime limit",
+    -12: "job exceeded a cpu time limit",
 }
 
 

lib/galaxy/model/tool_shed_install/__init__.py

 from galaxy.util.bunch import Bunch
 from galaxy.util import asbool
 from tool_shed.util import common_util
+from urlparse import urljoin
 
 log = logging.getLogger( __name__ )
 
     def can_reinstall_or_activate( self ):
         return self.deleted
 
+    def get_sharable_url( self, app ):
+        tool_shed_url = common_util.get_tool_shed_url_from_tool_shed_registry( app, self.tool_shed )
+        if tool_shed_url:
+            return urljoin( tool_shed_url, 'view/%s/%s' % ( self.owner, self.name ) )
+        return tool_shed_url
+
     def get_shed_config_filename( self ):
         shed_config_filename = None
         if self.metadata:

lib/galaxy/tools/errors.py

 -----------------------------------------------------------------------------
 job id: ${job_id}
 tool id: ${job_tool_id}
+tool version: ${tool_version}
 job pid or drm id: ${job_runner_external_id}
+job tool version: ${job_tool_version}
 -----------------------------------------------------------------------------
 job command line:
 ${job_command_line}
                               history_item_name=hda.get_display_name(),
                               history_view_link=history_view_link,
                               job_id=job.id,
+                              tool_version=job.tool_version,
                               job_tool_id=job.tool_id,
+                              job_tool_version=hda.tool_version,
                               job_runner_external_id=job.job_runner_external_id,
                               job_command_line=job.command_line,
                               job_stderr=util.unicodify( job.stderr ),

lib/galaxy/tools/parameters/basic.py

                 if option[2]:
                     # Found selected option.
                     value = option[1]
-            d[ 'value' ] = options
+            d[ 'value' ] = value
 
         return d
 

lib/galaxy/util/biostar.py

 def populate_tool_payload( payload=None, tool=None ):
     payload = populate_tag_payload( payload=payload, tool=tool )
     payload[ 'title' ] = 'Need help with "%s" tool' % ( tool.name )
-    payload[ 'content' ] = '<br /><hr /><p>Tool name: %s</br>Tool version: %s</br>Tool ID: %s</p>' % ( tool.name, tool.version, tool.id )
+    tool_url = None
+    if tool.tool_shed_repository:
+        tool_url = tool.tool_shed_repository.get_sharable_url( tool.app )
+        if tool_url:
+            tool_url = '</br>ToolShed URL: <a href="%s">%s</a>' % ( tool_url, tool_url )
+    if not tool_url:
+        tool_url = ''
+    payload[ 'content' ] = '<br /><hr /><p>Tool name: %s</br>Tool version: %s</br>Tool ID: %s%s</p></br>' % ( tool.name, tool.version, tool.id, tool_url )
     return payload
 
 def determine_cookie_domain( galaxy_hostname, biostar_hostname ):

lib/galaxy/webapps/demo_sequencer/config.py

         self.smtp_server = kwargs.get( 'smtp_server', None )
         self.log_actions = string_as_bool( kwargs.get( 'log_actions', 'False' ) )
         self.brand = kwargs.get( 'brand', None )
-        self.wiki_url = kwargs.get( 'wiki_url', 'http://wiki.g2.bx.psu.edu/FrontPage' )
+        self.wiki_url = kwargs.get( 'wiki_url', 'https://wiki.galaxyproject.org/' )
         self.blog_url = kwargs.get( 'blog_url', None )
         self.screencasts_url = kwargs.get( 'screencasts_url', None )
         self.log_events = False

lib/galaxy/webapps/galaxy/api/folder_contents.py

 """
-API operations on the contents of a folder.
+API operations on the contents of a library folder.
 """
-import logging, os, string, shutil, urllib, re, socket
-from cgi import escape, FieldStorage
-from galaxy import util, datatypes, jobs, web, util
-from galaxy.web.base.controller import *
-from galaxy.util.sanitize_html import sanitize_html
-from galaxy.model.orm import *
+from galaxy import web
+from galaxy import exceptions
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web import _future_expose_api_anonymous as expose_api_anonymous
+from sqlalchemy.orm.exc import MultipleResultsFound
+from sqlalchemy.orm.exc import NoResultFound
+from galaxy.web.base.controller import BaseAPIController, UsesLibraryMixin, UsesLibraryMixinItems
 
+import logging
 log = logging.getLogger( __name__ )
 
 class FolderContentsController( BaseAPIController, UsesLibraryMixin, UsesLibraryMixinItems ):
     Class controls retrieval, creation and updating of folder contents.
     """
 
-    def load_folder_contents( self, trans, folder ):
+    @expose_api_anonymous
+    def index( self, trans, folder_id, **kwd ):
+        """
+        GET /api/folders/{encoded_folder_id}/contents
+        Displays a collection (list) of a folder's contents (files and folders).
+        Encoded folder ID is prepended with 'F' if it is a folder as opposed to a data set which does not have it.
+        Full path is provided in response as a separate object providing data for breadcrumb path building.
+        """
+
+        if ( len( folder_id ) == 17 and folder_id.startswith( 'F' ) ):
+            try:
+                decoded_folder_id = trans.security.decode_id( folder_id[ 1: ] )
+            except TypeError:
+                raise exceptions.MalformedId( 'Malformed folder id ( %s ) specified, unable to decode.' % str( folder_id ) )
+        else:
+            raise exceptions.MalformedId( 'Malformed folder id ( %s ) specified, unable to decode.' % str( folder_id ) )
+
+        try:
+            folder = trans.sa_session.query( trans.app.model.LibraryFolder ).filter( trans.app.model.LibraryFolder.table.c.id == decoded_folder_id ).one()
+        except MultipleResultsFound:
+            raise exceptions.InconsistentDatabase( 'Multiple folders with same id found.' )
+        except NoResultFound:
+            raise exceptions.ObjectNotFound( 'Folder with the id provided ( %s ) was not found' % str( folder_id ) )
+        except Exception:
+            raise exceptions.InternalServerError( 'Error loading from the database.' )
+
+        current_user_roles = trans.get_current_user_roles()
+        can_add_library_item = trans.user_is_admin() or trans.app.security_agent.can_add_library_item( current_user_roles, folder )
+
+        if not ( trans.user_is_admin() or trans.app.security_agent.can_access_library_item( current_user_roles, folder, trans.user ) ):
+            if folder.parent_id == None:
+                try:
+                    library = trans.sa_session.query( trans.app.model.Library ).filter( trans.app.model.Library.table.c.root_folder_id == decoded_folder_id ).one()
+                except Exception:
+                    raise exceptions.InternalServerError( 'Error loading from the database.' )
+                if trans.app.security_agent.library_is_public( library, contents=False ):
+                    pass
+                else:
+                    if trans.user:
+                        log.warning( "SECURITY: User (id: %s) without proper access rights is trying to load folder with ID of %s" % ( trans.user.id, decoded_folder_id ) )
+                    else:
+                        log.warning( "SECURITY: Anonymous user without proper access rights is trying to load folder with ID of %s" % ( decoded_folder_id ) )
+                    raise exceptions.ObjectNotFound( 'Folder with the id provided ( %s ) was not found' % str( folder_id ) ) 
+            else:
+                if trans.user:
+                    log.warning( "SECURITY: User (id: %s) without proper access rights is trying to load folder with ID of %s" % ( trans.user.id, decoded_folder_id ) )
+                else:
+                    log.warning( "SECURITY: Anonymous user without proper access rights is trying to load folder with ID of %s" % ( decoded_folder_id ) )
+                raise exceptions.ObjectNotFound( 'Folder with the id provided ( %s ) was not found' % str( folder_id ) )
+        
+        def build_path( folder ):
+            """
+            Search the path upwards recursively and load the whole route of names and ids for breadcrumb building purposes.
+            """
+            path_to_root = []
+            # We are almost in root
+            if folder.parent_id is None:
+                path_to_root.append( ( 'F' + trans.security.encode_id( folder.id ), folder.name ) )
+            else:
+            # We add the current folder and traverse up one folder.
+                path_to_root.append( ( 'F' + trans.security.encode_id( folder.id ), folder.name ) )
+                upper_folder = trans.sa_session.query( trans.app.model.LibraryFolder ).get( folder.parent_id )
+                path_to_root.extend( build_path( upper_folder ) )
+            return path_to_root
+            
+        # Return the reversed path so it starts with the library node.
+        full_path = build_path( folder )[::-1]
+
+        folder_contents = []
+        time_updated = ''
+        time_created = ''
+        # Go through every accessible item in the folder and include its meta-data.
+        for content_item in self._load_folder_contents( trans, folder ):
+            if trans.app.security_agent.can_access_library_item( current_user_roles, content_item, trans.user ):
+                return_item = {}
+                encoded_id = trans.security.encode_id( content_item.id )
+                time_updated = content_item.update_time.strftime( "%Y-%m-%d %I:%M %p" )
+                time_created = content_item.create_time.strftime( "%Y-%m-%d %I:%M %p" )
+
+                # For folder return also hierarchy values
+                if content_item.api_type == 'folder':
+                    encoded_id = 'F' + encoded_id
+                    return_item.update ( dict ( item_count = content_item.item_count ) )
+
+                if content_item.api_type == 'file':
+                    library_dataset_dict = content_item.to_dict()
+                    library_dataset_dict['data_type']
+                    library_dataset_dict['file_size']
+                    library_dataset_dict['date_uploaded']
+                    return_item.update ( dict ( data_type = library_dataset_dict['data_type'],
+                                                file_size = library_dataset_dict['file_size'],
+                                                date_uploaded = library_dataset_dict['date_uploaded'] ) )
+
+                # For every item return also the default meta-data
+                return_item.update( dict( id = encoded_id,
+                                   type = content_item.api_type,
+                                   name = content_item.name,
+                                   time_updated = time_updated,
+                                   time_created = time_created
+                                    ) )
+                folder_contents.append( return_item )
+
+        return { 'metadata' : { 'full_path' : full_path, 'can_add_library_item': can_add_library_item }, 'folder_contents' : folder_contents }
+
+    def _load_folder_contents( self, trans, folder ):
         """
         Loads all contents of the folder (folders and data sets) but only in the first level.
         """
         return content_items
 
     @web.expose_api
-    def index( self, trans, folder_id, **kwd ):
-        """
-        GET /api/folders/{encoded_folder_id}/contents
-        Displays a collection (list) of a folder's contents (files and folders).
-        Encoded folder ID is prepended with 'F' if it is a folder as opposed to a data set which does not have it.
-        Full path is provided as a separate object in response providing data for breadcrumb path building.
-        """
-        folder_container = []
-        current_user_roles = trans.get_current_user_roles()
-
-        if ( folder_id.startswith( 'F' ) ):
-            try:
-                decoded_folder_id = trans.security.decode_id( folder_id[1:] )
-            except TypeError:
-                trans.response.status = 400
-                return "Malformed folder id ( %s ) specified, unable to decode." % str( folder_id )
-
-        try:
-            folder = trans.sa_session.query( trans.app.model.LibraryFolder ).get( decoded_folder_id )
-        except:
-            folder = None
-            log.error( "FolderContentsController.index: Unable to retrieve folder with ID: %s" % folder_id )
-
-        # We didn't find the folder or user does not have an access to it.
-        if not folder:
-            trans.response.status = 400
-            return "Invalid folder id ( %s ) specified." % str( folder_id )
-        
-        if not ( trans.user_is_admin() or trans.app.security_agent.can_access_library_item( current_user_roles, folder, trans.user ) ):
-            log.warning( "SECURITY: User (id: %s) without proper access rights is trying to load folder with ID of %s" % ( trans.user.id, folder.id ) )
-            trans.response.status = 400
-            return "Invalid folder id ( %s ) specified." % str( folder_id )
-        
-        path_to_root = []
-        def build_path ( folder ):
-            """
-            Search the path upwards recursively and load the whole route of names and ids for breadcrumb purposes.
-            """
-            path_to_root = []
-            # We are almost in root
-            if folder.parent_id is None:
-                path_to_root.append( ( 'F' + trans.security.encode_id( folder.id ), folder.name ) )
-            else:
-            # We add the current folder and traverse up one folder.
-                path_to_root.append( ( 'F' + trans.security.encode_id( folder.id ), folder.name ) )
-                upper_folder = trans.sa_session.query( trans.app.model.LibraryFolder ).get( folder.parent_id )
-                path_to_root.extend( build_path( upper_folder ) )
-            return path_to_root
-            
-        # Return the reversed path so it starts with the library node.
-        full_path = build_path( folder )[::-1]
-        folder_container.append( dict( full_path = full_path ) )
-        
-        folder_contents = []
-        time_updated = ''
-        time_created = ''
-        # Go through every item in the folder and include its meta-data.
-        for content_item in self.load_folder_contents( trans, folder ):
-            return_item = {}
-            encoded_id = trans.security.encode_id( content_item.id )
-            time_updated = content_item.update_time.strftime( "%Y-%m-%d %I:%M %p" )
-            time_created = content_item.create_time.strftime( "%Y-%m-%d %I:%M %p" )
-            
-            # For folder return also hierarchy values
-            if content_item.api_type == 'folder':
-                encoded_id = 'F' + encoded_id
-                return_item.update ( dict ( item_count = content_item.item_count ) )
-
-            if content_item.api_type == 'file':
-                library_dataset_dict = content_item.to_dict()
-                library_dataset_dict['data_type']
-                library_dataset_dict['file_size']
-                library_dataset_dict['date_uploaded']
-                return_item.update ( dict ( data_type = library_dataset_dict['data_type'],
-                                            file_size = library_dataset_dict['file_size'],
-                                            date_uploaded = library_dataset_dict['date_uploaded'] ) )
-
-            # For every item return also the default meta-data
-            return_item.update( dict( id = encoded_id,
-                               type = content_item.api_type,
-                               name = content_item.name,
-                               time_updated = time_updated,
-                               time_created = time_created
-                                ) )
-            folder_contents.append( return_item )
-        # Put the data in the container
-        folder_container.append( dict( folder_contents = folder_contents ) )
-        return folder_container
-
-    @web.expose_api
     def show( self, trans, id, library_id, **kwd ):
         """
         GET /api/folders/{encoded_folder_id}/
         """
-        pass
+        raise exceptions.NotImplemented( 'Showing the library folder content is not implemented.' )
 
     @web.expose_api
     def create( self, trans, library_id, payload, **kwd ):
         Creates a new folder. This should be superseded by the
         LibraryController.
         """
-        pass
+        raise exceptions.NotImplemented( 'Creating the library folder content is not implemented.' )
 
     @web.expose_api
     def update( self, trans, id,  library_id, payload, **kwd ):
         """
         PUT /api/folders/{encoded_folder_id}/contents
         """
-        pass
-
-    # TODO: Move to library_common.
-    def __decode_library_content_id( self, trans, content_id ):
-        if ( len( content_id ) % 16 == 0 ):
-            return 'LibraryDataset', content_id
-        elif ( content_id.startswith( 'F' ) ):
-            return 'LibraryFolder', content_id[1:]
-        else:
-            raise HTTPBadRequest( 'Malformed library content id ( %s ) specified, unable to decode.' % str( content_id ) )
+        raise exceptions.NotImplemented( 'Updating the library folder content is not implemented.' )

lib/galaxy/webapps/galaxy/api/folders.py

 """
-API operations on folders
+API operations on library folders
 """
-import logging, os, string, shutil, urllib, re, socket, traceback
+import os, string, shutil, urllib, re, socket, traceback
 from galaxy import datatypes, jobs, web, security
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web import _future_expose_api_anonymous as expose_api_anonymous
 from galaxy.web.base.controller import BaseAPIController,UsesLibraryMixin,UsesLibraryMixinItems
 from galaxy.util.sanitize_html import sanitize_html
 
 from cgi import escape, FieldStorage
 from paste.httpexceptions import HTTPBadRequest
 
+import logging
 log = logging.getLogger( __name__ )
 
 class FoldersController( BaseAPIController, UsesLibraryMixin, UsesLibraryMixinItems ):
         """
         GET /api/folders/
         This would normally display a list of folders. However, that would
-        be across multiple libraries, so it's not implemented yet.
+        be across multiple libraries, so it's not implemented.
         """
-        pass
+        raise exceptions.NotImplemented( 'Listing all accessible library folders is not implemented.' )
 
     @web.expose_api
     def show( self, trans, id, **kwd ):

lib/galaxy/webapps/galaxy/api/genomes.py

-from galaxy import config, tools, web, util
-from galaxy.web.base.controller import BaseController, BaseAPIController
-from galaxy.util.bunch import Bunch
+from galaxy import web, util
+from galaxy.web.base.controller import BaseAPIController
 from galaxy.web.framework.helpers import is_true
+from galaxy.webapps.galaxy.controllers.data_admin import build_param_dict as massage
 
 def get_id( base, format ):
     if format:
         #                    'download'  Download and index
         #                    'index'     Index only
         params = util.Params( payload )
-        from galaxy.web.controllers.data_admin import build_param_dict as massage
         paramdict = massage( params, trans )
         func = params.get( 'func', 'download' )
         if func == 'download':

lib/galaxy/webapps/galaxy/api/group_users.py

                                    email = user.email,
                                    url = url_for( 'group_user', group_id=group_id, id=user_id) ) # TODO Fix This
             if not item:
-                item = "user %s not in group %s" % (user.email,group.name)
+                item = "user %s not in group %s" % (user.email, group.name)
         except Exception, e:
             item = "Error in group_user API group %s user %s" % (group.name, user.email)
             log.error(item + ": %s" % str(e))
                              email = user.email,
                              url = url_for( 'group_user', group_id=group_id, id=user_id) )
         except Exception, e:
-            item = "Error in group_user API Adding user %s to group %s" % (user.email,group.name)
+            item = "Error in group_user API Adding user %s to group %s" % (user.email, group.name)
             log.error(item + ": %s" % str(e))
         return item
 
                                  email = user.email,
                                  url = url_for( 'group_user', group_id=group_id, id=user_id) )
             if not item:
-                item = "user %s not in group %s" % (user.email,group.name)
+                item = "user %s not in group %s" % (user.email, group.name)
         except Exception, e:
-            item = "Error in group_user API Removing user %s from group %s" % (user.email,group.name)
+            item = "Error in group_user API Removing user %s from group %s" % (user.email, group.name)
             log.error(item + ": %s" % str(e))
         return item

lib/galaxy/webapps/galaxy/api/histories.py

 pkg_resources.require( "Paste" )
 
 from galaxy import exceptions
-from galaxy import web
 from galaxy.web import _future_expose_api as expose_api
 from galaxy.web import _future_expose_api_anonymous as expose_api_anonymous
 from galaxy.web import _future_expose_api_raw as expose_api_raw

lib/galaxy/webapps/galaxy/api/history_contents.py

 
 from galaxy import exceptions
 from galaxy import util
-from galaxy import web
 
 from galaxy.web import _future_expose_api as expose_api
 from galaxy.web import _future_expose_api_anonymous as expose_api_anonymous
 from galaxy.web.base.controller import UsesTagsMixin
 
 from galaxy.web.base.controller import url_for
-from galaxy.util.sanitize_html import sanitize_html
 
 from galaxy.webapps.galaxy.api import histories
 
         hda_dict[ 'display_apps' ] = self.get_display_apps( trans, hda )
         return hda_dict
 
-    #TODO: allow anon users to copy hdas, ldas
-    @expose_api
+    @expose_api_anonymous
     def create( self, trans, history_id, payload, **kwd ):
         """
         create( self, trans, history_id, payload, **kwd )
         return validated_payload
 
     def __handle_unknown_contents_type( self, trans, contents_type ):
-        # TODO: raise a message exception instead of setting status and returning dict.
-        trans.response.status = 400
-        return { 'error': 'Unknown contents type %s' % type }
-
+        raise exceptions.UnknownContentsType('Unknown contents type: %s' % type)
 
 class HDAManager( object ):
 

lib/galaxy/webapps/galaxy/api/lda_datasets.py

 import urllib
 import urllib2
 import zipfile
-from paste.httpexceptions import HTTPBadRequest
-from galaxy import util, web
-from galaxy.exceptions import ItemAccessibilityException, MessageException, ItemDeletionException, ObjectNotFound
+from galaxy import exceptions
+from galaxy.web import _future_expose_api as expose_api
+from galaxy.web import _future_expose_api_anonymous as expose_api_anonymous
+# from paste.httpexceptions import HTTPBadRequest
+from galaxy import util
+from galaxy import web
+# from galaxy.exceptions import ItemAccessibilityException, MessageException, ItemDeletionException, ObjectNotFound
 from galaxy.security import Action
 from galaxy.util.streamball import StreamBall
 from galaxy.web.base.controller import BaseAPIController, UsesVisualizationMixin
 
 class LibraryDatasetsController( BaseAPIController, UsesVisualizationMixin ):
 
-    @web.expose_api
+    @expose_api_anonymous
     def show( self, trans, id, **kwd ):
         """
         show( self, trans, id, **kwd )
         try:
             dataset = self.get_library_dataset( trans, id = id, check_ownership=False, check_accessible=True )
         except Exception, e:
-            trans.response.status = 500
-            return str( e )
-        try:
-            rval = dataset.to_dict()
-        except Exception, e:
-            rval = "Error in dataset API at listing contents: " + str( e )
-            log.error( rval + ": %s" % str(e), exc_info=True )
-            trans.response.status = 500
-            return "Error in dataset API at listing contents: " + str( e )
+            raise exceptions.ObjectNotFound( 'Requested dataset was not found.' )
+            # trans.response.status = 500
+            # return str( e )
+        # try:
+        rval = dataset.to_dict()
+        # except Exception, e:
+        #     rval = "Error in dataset API at listing contents: " + str( e )
+        #     log.error( rval + ": %s" % str(e), exc_info=True )
+        #     trans.response.status = 500
+        #     return "Error in dataset API at listing contents: " + str( e )
 
         rval['id'] = trans.security.encode_id(rval['id']);
         rval['ldda_id'] = trans.security.encode_id(rval['ldda_id']);
-        rval['folder_id'] = 'f' + trans.security.encode_id(rval['folder_id'])
+        rval['folder_id'] = 'F' + trans.security.encode_id(rval['folder_id'])
         return rval
 
     @web.expose

lib/galaxy/webapps/galaxy/api/libraries.py

 from galaxy.web import _future_expose_api as expose_api
 from galaxy.web import _future_expose_api_anonymous as expose_api_anonymous
 from galaxy.model.orm import and_, not_, or_
-from galaxy.web.base.controller import BaseAPIController, url_for
+from galaxy.web.base.controller import BaseAPIController
 
 import logging
 log = logging.getLogger( __name__ )
             if trans.app.security_agent.library_is_public( library, contents=False ):
                 item[ 'public' ] = True
             current_user_roles = trans.get_current_user_roles()
-            # can_user_add = trans.app.security_agent.can_add_library_item( current_user_roles, library.root_folder )
             if not trans.user_is_admin():
                 item['can_user_add'] = trans.app.security_agent.can_add_library_item( current_user_roles, library )
                 item['can_user_modify'] = trans.app.security_agent.can_modify_library_item( current_user_roles, library )
         deleted = util.string_as_bool( deleted )
         try:
             decoded_library_id = trans.security.decode_id( library_id )
-        except Exception:
+        except TypeError:
             raise exceptions.MalformedId( 'Malformed library id ( %s ) specified, unable to decode.' % id )
         try:
             library = trans.sa_session.query( trans.app.model.Library ).get( decoded_library_id )
         library.root_folder = root_folder
         trans.sa_session.add_all( ( library, root_folder ) )
         trans.sa_session.flush()
-        return library.to_dict( view='element', value_mapper={ 'id' : trans.security.encode_id , 'root_folder_id' : trans.security.encode_id } )
+
+        item = library.to_dict( view='element', value_mapper={ 'id' : trans.security.encode_id , 'root_folder_id' : trans.security.encode_id } )
+        item['can_user_add'] = True
+        item['can_user_modify'] = True
+        item['can_user_manage'] = True
+        if trans.app.security_agent.library_is_public( library, contents=False ):
+            item[ 'public' ] = True
+        return item
 
     @expose_api
     def update( self, trans, id, **kwd ):

lib/galaxy/webapps/galaxy/api/library_contents.py

 log = logging.getLogger( __name__ )
 
 
-class LibraryContentsController( BaseAPIController, UsesLibraryMixin, UsesLibraryMixinItems,
-                                 UsesHistoryDatasetAssociationMixin ):
+class LibraryContentsController( BaseAPIController, UsesLibraryMixin, UsesLibraryMixinItems, UsesHistoryDatasetAssociationMixin ):
 
     @expose_api
     def index( self, trans, library_id, **kwd ):

lib/galaxy/webapps/galaxy/api/metrics.py

 import datetime
 
 from galaxy import exceptions
-from galaxy import web
 from galaxy.web import require_admin
 from galaxy.web import _future_expose_api as expose_api
 from galaxy.web import _future_expose_api_anonymous as expose_api_anonymous

lib/galaxy/webapps/galaxy/api/tools.py

     # -- Helper methods --
     #
 
-    def _run_tool( self, trans, tool_id, target_dataset_id, **kwargs ):
-        """
-        Run a tool. This method serves as a general purpose way to run tools asynchronously.
-        """
-
-        #
-        # Set target history (the history that tool will use for outputs) using
-        # target dataset. If user owns dataset, put new data in original
-        # dataset's history; if user does not own dataset (and hence is accessing
-        # dataset via sharing), put new data in user's current history.
-        #
-        target_dataset = self.get_dataset( trans, target_dataset_id, check_ownership=False, check_accessible=True )
-        if target_dataset.history.user == trans.user:
-            target_history = target_dataset.history
-        else:
-            target_history = trans.get_history( create=True )
-
-        # HACK: tools require unencoded parameters but kwargs are typically
-        # encoded, so try decoding all parameter values.
-        for key, value in kwargs.items():
-            try:
-                value = trans.security.decode_id( value )
-                kwargs[ key ] = value
-            except:
-                pass
-
-        #
-        # Execute tool.
-        #
-        tool = trans.app.toolbox.get_tool( tool_id )
-        if not tool:
-            return trans.app.model.Dataset.conversion_messages.NO_TOOL
-
-        # HACK: add run button so that tool.handle_input will run tool.
-        kwargs['runtool_btn'] = 'Execute'
-        params = util.Params( kwargs, sanitize=False )
-        template, vars = tool.handle_input( trans, params.__dict__, history=target_history )
-
-        # TODO: check for errors and ensure that output dataset is available.
-        output_datasets = vars[ 'out_data' ].values()
-        return self.add_track_async( trans, output_datasets[0].id )
-
     def _rerun_tool( self, trans, payload, **kwargs ):
         """
         Rerun a tool to produce a new output dataset that corresponds to a
                 else:
                     # Recursive search.
                     return_val = False
-                    for name, value in param_dict.items():
+                    for value in param_dict.values():
                         if isinstance( value, dict ):
                             return_val = set_value( value, group_name, group_index, param_name, param_value)
                             if return_val:

lib/galaxy/webapps/galaxy/buildapp.py

     # ====================
 
     # Handle displaying tool help images and README file images contained in repositories installed from the tool shed.
-    webapp.add_route( '/admin_toolshed/static/images/:repository_id/:image_file',
+    webapp.add_route( '/admin_toolshed/static/images/:repository_id/{image_file:.+?}',
                       controller='admin_toolshed',
                       action='display_image_in_repository',
                       repository_id=None,

lib/galaxy/webapps/galaxy/controllers/admin_toolshed.py

 from tool_shed.util import data_manager_util
 from tool_shed.util import datatype_util
 from tool_shed.util import encoding_util
+from tool_shed.util import hg_util
 from tool_shed.util import metadata_util
 from tool_shed.util import readme_util
 from tool_shed.util import repository_dependency_util
             message += 'attribute value which is a directory relative to the Galaxy installation directory in order '
             message += 'to automatically install tools from a Galaxy Tool Shed (e.g., the file name <b>shed_tool_conf.xml</b> '
             message += 'whose <b>&lt;toolbox&gt;</b> tag is <b>&lt;toolbox tool_path="../shed_tools"&gt;</b>).<p/>See the '
-            message += '<a href="http://wiki.g2.bx.psu.edu/InstallingRepositoriesToGalaxy" target="_blank">Installation '
+            message += '<a href="https://wiki.galaxyproject.org/InstallingRepositoriesToGalaxy" target="_blank">Installation '
             message += 'of Galaxy Tool Shed repository tools into a local Galaxy instance</a> section of the Galaxy Tool '
             message += 'Shed wiki for all of the details.'
             return trans.show_error_message( message )
                         repo_files_dir = os.path.abspath( os.path.join( tool_path, relative_install_dir, name ) )
                     else:
                         repo_files_dir = os.path.abspath( os.path.join( relative_install_dir, name ) )
-                    repo = hg.repository( suc.get_configured_ui(), path=repo_files_dir )
+                    repo = hg.repository( hg_util.get_configured_ui(), path=repo_files_dir )
                     repository_clone_url = os.path.join( tool_shed_url, 'repos', owner, name )
                     repository_util.pull_repository( repo, repository_clone_url, latest_ctx_rev )
                     suc.update_repository( repo, latest_ctx_rev )

lib/galaxy/webapps/reports/config.py

         self.message_box_visible = kwargs.get( 'message_box_visible', False )
         self.message_box_content = kwargs.get( 'message_box_content', None )
         self.message_box_class = kwargs.get( 'message_box_class', 'info' )
-        self.wiki_url = kwargs.get( 'wiki_url', 'http://wiki.g2.bx.psu.edu/FrontPage' )
+        self.wiki_url = kwargs.get( 'wiki_url', 'https://wiki.galaxyproject.org/' )
         self.blog_url = kwargs.get( 'blog_url', None )
         self.screencasts_url = kwargs.get( 'screencasts_url', None )
         self.log_events = False

lib/galaxy/webapps/tool_shed/api/repositories.py

 import tool_shed.util.shed_util_common as suc
 from tool_shed.galaxy_install import repository_util
 from tool_shed.util import encoding_util
+from tool_shed.util import hg_util
 from tool_shed.util import import_util
 from tool_shed.util import metadata_util
 from tool_shed.util import repository_maintenance_util
                 log.debug( error_message )
                 return []
             repo_dir = repository.repo_path( trans.app )
-            repo = hg.repository( suc.get_configured_ui(), repo_dir )
+            repo = hg.repository( hg_util.get_configured_ui(), repo_dir )
             ordered_installable_revisions = suc.get_ordered_metadata_changeset_revisions( repository, repo, downloadable=True )
             return ordered_installable_revisions
         else:
                 # The changeset_revision column in the repository_metadata table has been updated with a new
                 # value value, so find the changeset_revision to which we need to update.
                 repo_dir = repository.repo_path( trans.app )
-                repo = hg.repository( suc.get_configured_ui(), repo_dir )
+                repo = hg.repository( hg_util.get_configured_ui(), repo_dir )
                 new_changeset_revision = suc.get_next_downloadable_changeset_revision( repository, repo, changeset_revision )
                 repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans,
                                                                                          encoded_repository_id,
     @web.expose_api
     def repository_ids_for_setting_metadata( self, trans, my_writable=False, **kwd ):
         """
-        GET /api/get_repository_ids_for_setting_metadata
+        GET /api/repository_ids_for_setting_metadata
 
         Displays a collection (list) of repository ids ordered for setting metadata.
 

lib/galaxy/webapps/tool_shed/api/repository_revisions.py

 from galaxy.model.orm import and_, not_, select
 from galaxy.web.base.controller import BaseAPIController, HTTPBadRequest
 from tool_shed.util import export_util
+from tool_shed.util import hg_util
 import tool_shed.util.shed_util_common as suc
 
 from galaxy import eggs
         Creates and saves a gzip compressed tar archive of a repository and optionally all of it's repository dependencies.
 
         The following parameters are included in the payload.
-        :param tool_shed_url (required): the base URL of the Tool Shed from which the Repository was installed
+        :param tool_shed_url (required): the base URL of the Tool Shed from which the Repository is to be exported
         :param name (required): the name of the Repository
         :param owner (required): the owner of the Repository
         :param changeset_revision (required): the changeset_revision of the RepositoryMetadata object associated with the Repository
                     # The changeset_revision column in the repository_metadata table has been updated with a new
                     # value value, so find the changeset_revision to which we need to update.
                     repo_dir = repository_dependency.repo_path( trans.app )
-                    repo = hg.repository( suc.get_configured_ui(), repo_dir )
+                    repo = hg.repository( hg_util.get_configured_ui(), repo_dir )
                     new_changeset_revision = suc.get_next_downloadable_changeset_revision( repository_dependency,
                                                                                            repo,
                                                                                            changeset_revision )

lib/galaxy/webapps/tool_shed/config.py

         self.message_box_visible = kwargs.get( 'message_box_visible', False )
         self.message_box_content = kwargs.get( 'message_box_content', None )
         self.message_box_class = kwargs.get( 'message_box_class', 'info' )
-        self.support_url = kwargs.get( 'support_url', 'http://wiki.g2.bx.psu.edu/Support' )
-        self.wiki_url = kwargs.get( 'wiki_url', 'http://wiki.g2.bx.psu.edu/FrontPage' )
+        self.support_url = kwargs.get( 'support_url', 'https://wiki.galaxyproject.org/Support' )
+        self.wiki_url = kwargs.get( 'wiki_url', 'https://wiki.galaxyproject.org/' )
         self.blog_url = kwargs.get( 'blog_url', None )
         self.biostar_url = kwargs.get( 'biostar_url', None )
         self.screencasts_url = kwargs.get( 'screencasts_url', None )

lib/galaxy/webapps/tool_shed/controllers/repository.py

 from tool_shed.util import container_util
 from tool_shed.util import encoding_util
 from tool_shed.util import export_util
+from tool_shed.util import hg_util
 from tool_shed.util import import_util
 from tool_shed.util import metadata_util
 from tool_shed.util import readme_util
         status = kwd.get( 'status', 'done' )
         commit_message = kwd.get( 'commit_message', 'Deleted selected files' )
         repository = suc.get_repository_in_tool_shed( trans, id )
-        repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
+        repo = hg.repository( hg_util.get_configured_ui(), repository.repo_path( trans.app ) )
         # Update repository files for browsing.
         suc.update_repository( repo )
         changeset_revision = repository.tip( trans.app )
         changeset_revision = kwd.get( 'changeset_revision', None )
         repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
         repo_dir = repository.repo_path( trans.app )
-        repo = hg.repository( suc.get_configured_ui(), repo_dir )
+        repo = hg.repository( hg_util.get_configured_ui(), repo_dir )
         # Default to the current changeset revision.
-        update_to_ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
+        update_to_ctx = hg_util.get_changectx_for_changeset( repo, changeset_revision )
         latest_changeset_revision = changeset_revision
         from_update_manager = kwd.get( 'from_update_manager', False )
         if from_update_manager:
                 update_to_changeset_hash = None
                 for changeset in repo.changelog:
                     changeset_hash = str( repo.changectx( changeset ) )
-                    ctx = suc.get_changectx_for_changeset( repo, changeset_hash )
+                    ctx = hg_util.get_changectx_for_changeset( repo, changeset_hash )
                     if update_to_changeset_hash:
                         if changeset_hash == repository.tip( trans.app ):
-                            update_to_ctx = suc.get_changectx_for_changeset( repo, changeset_hash )
+                            update_to_ctx = hg_util.get_changectx_for_changeset( repo, changeset_hash )
                             latest_changeset_revision = changeset_hash
                             break
                         else:
                                                                                                      changeset_hash )
                             if repository_metadata:
                                 # We found a RepositoryMetadata record.
-                                update_to_ctx = suc.get_changectx_for_changeset( repo, changeset_hash )
+                                update_to_ctx = hg_util.get_changectx_for_changeset( repo, changeset_hash )
                                 latest_changeset_revision = changeset_hash
                                 break
                             else:
         else:
             containers_dict = None
             export_repository_dependencies_check_box = None
-        revision_label = suc.get_revision_label( trans, repository, changeset_revision, include_date=True )
+        revision_label = hg_util.get_revision_label( trans, repository, changeset_revision, include_date=True )
         return trans.fill_template( "/webapps/tool_shed/repository/export_repository.mako",
                                     changeset_revision=changeset_revision,
                                     containers_dict=containers_dict,
         has_repository_dependencies_only_if_compiling_contained_td = has_galaxy_utilities_dict[ 'has_repository_dependencies_only_if_compiling_contained_td' ]
         includes_workflows = has_galaxy_utilities_dict[ 'includes_workflows' ]
         repo_dir = repository.repo_path( trans.app )
-        repo = hg.repository( suc.get_configured_ui(), repo_dir )
+        repo = hg.repository( hg_util.get_configured_ui(), repo_dir )
         # Default to the received changeset revision and ctx_rev.
-        update_to_ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
+        update_to_ctx = hg_util.get_changectx_for_changeset( repo, changeset_revision )
         ctx_rev = str( update_to_ctx.rev() )
         latest_changeset_revision = changeset_revision
         update_dict = dict( changeset_revision=changeset_revision,
                     has_repository_dependencies = False
                     has_repository_dependencies_only_if_compiling_contained_td = False
                     changeset_hash = str( repo.changectx( changeset ) )
-                    ctx = suc.get_changectx_for_changeset( repo, changeset_hash )
+                    ctx = hg_util.get_changectx_for_changeset( repo, changeset_hash )
                     if update_to_changeset_hash:
                         update_to_repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans,
                                                                                                            trans.security.encode_id( repository.id ),
                             # We found a RepositoryMetadata record.
                             if changeset_hash == repository.tip( trans.app ):
                                 # The current ctx is the repository tip, so use it.
-                                update_to_ctx = suc.get_changectx_for_changeset( repo, changeset_hash )
+                                update_to_ctx = hg_util.get_changectx_for_changeset( repo, changeset_hash )
                                 latest_changeset_revision = changeset_hash
                             else:
-                                update_to_ctx = suc.get_changectx_for_changeset( repo, update_to_changeset_hash )
+                                update_to_ctx = hg_util.get_changectx_for_changeset( repo, update_to_changeset_hash )
                                 latest_changeset_revision = update_to_changeset_hash
                             break
                     elif not update_to_changeset_hash and changeset_hash == changeset_revision:
         changeset_revision = kwd[ 'changeset_revision' ]
         repository = suc.get_repository_by_name_and_owner( trans.app, repository_name, repository_owner )
         repo_dir = repository.repo_path( trans.app )
-        repo = hg.repository( suc.get_configured_ui(), repo_dir )
-        ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
+        repo = hg.repository( hg_util.get_configured_ui(), repo_dir )
+        ctx = hg_util.get_changectx_for_changeset( repo, changeset_revision )
         if ctx:
             return str( ctx.rev() )
         return ''
     def get_file_from_changeset_revision( self, repo_files_dir, changeset_revision, file_name, dir ):
         """Return file_name from the received changeset_revision of the repository manifest."""
         stripped_file_name = suc.strip_path( file_name )
-        repo = hg.repository( suc.get_configured_ui(), repo_files_dir )
-        ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
+        repo = hg.repository( hg_util.get_configured_ui(), repo_files_dir )
+        ctx = hg_util.get_changectx_for_changeset( repo, changeset_revision )
         named_tmp_file = suc.get_named_tmpfile_from_ctx( ctx, file_name, dir )
         return named_tmp_file
 
                                                            trans.model.Repository.table.c.user_id == user.id ) ):
             repository = repository_metadata.repository
             repo_dir = repository.repo_path( trans.app )
-            repo = hg.repository( suc.get_configured_ui(), repo_dir )
+            repo = hg.repository( hg_util.get_configured_ui(), repo_dir )
             latest_downloadable_changeset_revsion = suc.get_latest_downloadable_changeset_revision( trans, repository, repo )
             if repository_metadata.changeset_revision == latest_downloadable_changeset_revsion:
                 # We'll display only the test run for the latest installable revision in the rss feed.
         if repository_name is not None and repository_owner is not None:
             repository = suc.get_repository_by_name_and_owner( trans.app, repository_name, repository_owner )
             if repository:
-                repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
+                repo = hg.repository( hg_util.get_configured_ui(), repository.repo_path( trans.app ) )
                 return suc.get_latest_downloadable_changeset_revision( trans, repository, repo )
         return suc.INITIAL_CHANGELOG_HASH
 
         changeset_revision = kwd[ 'changeset_revision' ]
         repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
         repo_dir = repository.repo_path( trans.app )
-        repo = hg.repository( suc.get_configured_ui(), repo_dir )
+        repo = hg.repository( hg_util.get_configured_ui(), repo_dir )
         tool_version_dicts = []
         for changeset in repo.changelog:
             current_changeset_revision = str( repo.changectx( changeset ) )
         repository_id = trans.security.encode_id( repository.id )
         repository_clone_url = common_util.generate_clone_url_for_repository_in_tool_shed( trans, repository )
         repo_dir = repository.repo_path( trans.app )
-        repo = hg.repository( suc.get_configured_ui(), repo_dir )
+        repo = hg.repository( hg_util.get_configured_ui(), repo_dir )
         repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
         if not repository_metadata:
             # The received changeset_revision is no longer associated with metadata, so get the next changeset_revision in the repository
                                                                                repo,
                                                                                after_changeset_revision=changeset_revision )
             repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
-        ctx = suc.get_changectx_for_changeset( repo, changeset_revision )
+        ctx = hg_util.get_changectx_for_changeset( repo, changeset_revision )
         repo_info_dict = repository_util.create_repo_info_dict( trans=trans,
                                                                 repository_clone_url=repository_clone_url,
                                                                 changeset_revision=changeset_revision,
         repository = suc.get_repository_in_tool_shed( trans, id )
         repository_type = kwd.get( 'repository_type', str( repository.type ) )
         repo_dir = repository.repo_path( trans.app )
-        repo = hg.repository( suc.get_configured_ui(), repo_dir )
+        repo = hg.repository( hg_util.get_configured_ui(), repo_dir )
         repo_name = kwd.get( 'repo_name', repository.name )
         changeset_revision = kwd.get( 'changeset_revision', repository.tip( trans.app ) )
         description = kwd.get( 'description', repository.description )
                                                                                             selected_value=changeset_revision,
                                                                                             add_id_to_name=False,
                                                                                             downloadable=False )
-        revision_label = suc.get_revision_label( trans, repository, repository.tip( trans.app ), include_date=False )
+        revision_label = hg_util.get_revision_label( trans, repository, repository.tip( trans.app ), include_date=False )
         repository_metadata = None
         metadata = None
         is_malicious = False
         if changeset_revision != suc.INITIAL_CHANGELOG_HASH:
             repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
             if repository_metadata:
-                revision_label = suc.get_revision_label( trans, repository, changeset_revision, include_date=False )
+                revision_label = hg_util.get_revision_label( trans, repository, changeset_revision, include_date=False )
                 metadata = repository_metadata.metadata
                 is_malicious = repository_metadata.malicious
             else:
                 if previous_changeset_revision != suc.INITIAL_CHANGELOG_HASH:
                     repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, previous_changeset_revision )
                     if repository_metadata:
-                        revision_label = suc.get_revision_label( trans, repository, previous_changeset_revision, include_date=False )
+                        revision_label = hg_util.get_revision_label( trans, repository, previous_changeset_revision, include_date=False )
                         metadata = repository_metadata.metadata
                         is_malicious = repository_metadata.malicious
                         changeset_revision = previous_changeset_revision
                 # There is no repository_metadata defined for the changeset_revision, so see if it was defined
                 # in a previous changeset in the changelog.
                 repo_dir = repository.repo_path( trans.app )
-                repo = hg.repository( suc.get_configured_ui(), repo_dir )
+                repo = hg.repository( hg_util.get_configured_ui(), repo_dir )
                 previous_changeset_revision = \
                     suc.get_previous_metadata_changeset_revision( repository,
                                                                   repo,
         changeset_revision = kwd.get( 'changeset_revision', None )
         repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
         repo_dir = repository.repo_path( trans.app )
-        repo = hg.repository( suc.get_configured_ui(), repo_dir )
+        repo = hg.repository( hg_util.get_configured_ui(), repo_dir )
         # Get the next installable changeset_revision beyond the received changeset_revision.
         changeset_revision = suc.get_next_downloadable_changeset_revision( repository, repo, changeset_revision )
         if changeset_revision:
         status = kwd.get( 'status', 'done' )
         repository = suc.get_repository_in_tool_shed( trans, repository_id )
         repo_dir = repository.repo_path( trans.app )
-        repo = hg.repository( suc.get_configured_ui(), repo_dir )
+        repo = hg.repository( hg_util.get_configured_ui(), repo_dir )
         changeset_revision = kwd.get( 'changeset_revision', repository.tip( trans.app ) )
         repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
         if repository_metadata:
                             invalid = True
                             break
                     if invalid:
-                        message += 'The repository dependency definitions for this repository are invalid and will be ignored.  '
-                        message += 'The complete dependency hierarchy could not be determined.  The cause of repository dependency '
-                        message += 'definition errors like this can usually be seen when viewing the repository directly from the '
-                        message += 'Tool Shed.  The exact cause cannot be determined when visiting the Tool Shed from Galaxy to '
-                        message += 'install the repository.'
+                        message = repository_dependency_util.generate_message_for_invalid_repository_dependencies( metadata,
+                                                                                                                   error_from_tuple=False )
                         status = 'error'
         else:
             repository_metadata_id = None
             metadata = None
             repository_dependencies = None
-        revision_label = suc.get_revision_label( trans, repository, changeset_revision, include_date=True )
+        revision_label = hg_util.get_revision_label( trans, repository, changeset_revision, include_date=True )
         changeset_revision_select_field = grids_util.build_changeset_revision_select_field( trans,
                                                                                             repository,
                                                                                             selected_value=changeset_revision,
         changeset_revision = kwd.get( 'changeset_revision', None )
         repository = suc.get_repository_by_name_and_owner( trans.app, name, owner )
         repo_dir = repository.repo_path( trans.app )
-        repo = hg.repository( suc.get_configured_ui(), repo_dir )
+        repo = hg.repository( hg_util.get_configured_ui(), repo_dir )
         # Get the lower bound changeset revision.
         lower_bound_changeset_revision = suc.get_previous_metadata_changeset_revision( repository, repo, changeset_revision, downloadable=True )
         # Build the list of changeset revision hashes.
                                                               status='error' ) )
         repository = suc.get_repository_in_tool_shed( trans, id )
         changeset_revision = repository.tip( trans.app )
-        repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
+        repo = hg.repository( hg_util.get_configured_ui(), repository.repo_path( trans.app ) )
         if repository.user == trans.user:
             return trans.response.send_redirect( web.url_for( controller='repository',
                                                               action='browse_repositories',
                                                                                               changeset_revision,
                                                                                               metadata_only=True )
         repository_type_select_field = rt_util.build_repository_type_select_field( trans, repository=repository )
-        revision_label = suc.get_revision_label( trans, repository, changeset_revision, include_date=True )
+        revision_label = hg_util.get_revision_label( trans, repository, changeset_revision, include_date=True )
         return trans.fill_template( '/webapps/tool_shed/repository/rate_repository.mako',
                                     repository=repository,
                                     metadata=metadata,
         commit_message = kwd.get( 'commit_message', 'Deleted selected files' )
         repository = suc.get_repository_in_tool_shed( trans, id )
         repo_dir = repository.repo_path( trans.app )
-        repo = hg.repository( suc.get_configured_ui(), repo_dir )
+        repo = hg.repository( hg_util.get_configured_ui(), repo_dir )
         selected_files_to_delete = kwd.get( 'selected_files_to_delete', '' )
         if kwd.get( 'select_files_to_delete_button', False ):
             if selected_files_to_delete:
                 # Update the repository files for browsing.
                 suc.update_repository( repo )
                 # Get the new repository tip.
-                repo = hg.repository( suc.get_configured_ui(), repo_dir )
+                repo = hg.repository( hg_util.get_configured_ui(), repo_dir )
                 if tip == repository.tip( trans.app ):
                     message += 'No changes to repository.  '
                     kwd[ 'message' ] = message
             if not repository_metadata:
                 # Get updates to the received changeset_revision if any exist.
                 repo_dir = repository.repo_path( trans.app )
-                repo = hg.repository( suc.get_configured_ui(), repo_dir )
+                repo = hg.repository( hg_util.get_configured_ui(), repo_dir )
                 upper_bound_changeset_revision = suc.get_next_downloadable_changeset_revision( repository, repo, changeset_revision )
                 if upper_bound_changeset_revision:
                     changeset_revision = upper_bound_changeset_revision
                                                                                      trans.security.encode_id( repository.id ),
                                                                                      changeset_revision )
             repo_dir = repository.repo_path( trans.app )
-            repo = hg.repository( suc.get_configured_ui(), repo_dir )
+            repo = hg.repository( hg_util.get_configured_ui(), repo_dir )
             tool_shed_status_dict = {}
             # Handle repository deprecation.
             tool_shed_status_dict[ 'repository_deprecated' ] = str( repository.deprecated )
         message = kwd.get( 'message', ''  )
         status = kwd.get( 'status', 'done' )
         repository = suc.get_repository_in_tool_shed( trans, id )
-        repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
+        repo = hg.repository( hg_util.get_configured_ui(), repository.repo_path( trans.app ) )
         changesets = []
         for changeset in repo.changelog:
             ctx = repo.changectx( changeset )
             change_dict = { 'ctx' : ctx,
                             'rev' : str( ctx.rev() ),
                             'date' : date,
-                            'display_date' : suc.get_readable_ctx_date( ctx ),
+                            'display_date' : hg_util.get_readable_ctx_date( ctx ),
                             'description' : ctx.description(),
                             'files' : ctx.files(),
                             'user' : ctx.user(),
         message = kwd.get( 'message', ''  )
         status = kwd.get( 'status', 'done' )
         repository = suc.get_repository_in_tool_shed( trans, id )
-        repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
-        ctx = suc.get_changectx_for_changeset( repo, ctx_str )
+        repo = hg.repository( hg_util.get_configured_ui(), repository.repo_path( trans.app ) )
+        ctx = hg_util.get_changectx_for_changeset( repo, ctx_str )
         if ctx is None:
             message = "Repository does not include changeset revision '%s'." % str( ctx_str )
             status = 'error'
         metadata = metadata_util.get_repository_metadata_by_repository_id_changeset_revision( trans, id, ctx_str, metadata_only=True )
         # For rendering the prev button.
         if ctx_parent:
-            ctx_parent_date = suc.get_readable_ctx_date( ctx_parent )
+            ctx_parent_date = hg_util.get_readable_ctx_date( ctx_parent )
             ctx_parent_rev = ctx_parent.rev()
             if ctx_parent_rev < 0:
                  prev = None
         else:
            prev = None
         if ctx_child:
-            ctx_child_date = suc.get_readable_ctx_date( ctx_child )
+            ctx_child_date = hg_util.get_readable_ctx_date( ctx_child )
             ctx_child_rev = ctx_child.rev()
             next = "<b>%s:%s</b> <i>(%s)</i>" % ( ctx_child_rev, ctx_child, ctx_child_date )
         else:
         message = kwd.get( 'message', ''  )
         status = kwd.get( 'status', 'done' )
         repository = suc.get_repository_in_tool_shed( trans, id )
-        repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
+        repo = hg.repository( hg_util.get_configured_ui(), repository.repo_path( trans.app ) )
         avg_rating, num_ratings = self.get_ave_item_rating_data( trans.sa_session, repository, webapp_model=trans.model )
         changeset_revision = kwd.get( 'changeset_revision', repository.tip( trans.app ) )
         display_reviews = kwd.get( 'display_reviews', False )
                                                                                             selected_value=changeset_revision,
                                                                                             add_id_to_name=False,
                                                                                             downloadable=False )
-        revision_label = suc.get_revision_label( trans, repository, changeset_revision, include_date=False )
+        revision_label = hg_util.get_revision_label( trans, repository, changeset_revision, include_date=False )
         repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, id, changeset_revision )
         if repository_metadata:
             metadata = repository_metadata.metadata
         render_repository_actions_for = kwd.get( 'render_repository_actions_for', 'tool_shed' )
         repository = suc.get_repository_in_tool_shed( trans, repository_id )
         repo_files_dir = repository.repo_path( trans.app )
-        repo = hg.repository( suc.get_configured_ui(), repo_files_dir )
+        repo = hg.repository( hg_util.get_configured_ui(), repo_files_dir )
         tool_metadata_dict = {}
         tool_lineage = []
         tool = None
         guid = None
         original_tool_data_path = trans.app.config.tool_data_path
-        revision_label = suc.get_revision_label( trans, repository, changeset_revision, include_date=False )
+        revision_label = hg_util.get_revision_label( trans, repository, changeset_revision, include_date=False )
         repository_metadata = suc.get_repository_metadata_by_changeset_revision( trans, repository_id, changeset_revision )
         if repository_metadata:
             repository_metadata_id = trans.security.encode_id( repository_metadata.id )

lib/galaxy/webapps/tool_shed/controllers/repository_review.py

 from galaxy.webapps.tool_shed.util import ratings_util
 from tool_shed.util.container_util import STRSEP
 import tool_shed.util.shed_util_common as suc
+from tool_shed.util import hg_util
 from tool_shed.util import review_util
 from galaxy.util.odict import odict
 import tool_shed.grids.repository_review_grids as repository_review_grids
         status = kwd.get( 'status', 'done' )
         review = review_util.get_review( trans, kwd[ 'id' ] )
         repository = review.repository
-        repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
-        rev, changeset_revision_label = suc.get_rev_label_from_changeset_revision( repo, review.changeset_revision )
+        repo = hg.repository( hg_util.get_configured_ui(), repository.repo_path( trans.app ) )
+        rev, changeset_revision_label = hg_util.get_rev_label_from_changeset_revision( repo, review.changeset_revision )
         return trans.fill_template( '/webapps/tool_shed/repository_review/browse_review.mako',
                                     repository=repository,
                                     changeset_revision_label=changeset_revision_label,
         for component in review_util.get_components( trans ):
             components_dict[ component.name ] = dict( component=component, component_review=None )
         repository = review.repository
-        repo = hg.repository( suc.get_configured_ui(), repository.repo_path( trans.app ) )
+        repo = hg.repository( hg_util.get_configured_ui(), repository.repo_path( trans.app ) )
         for component_review in review.component_reviews:
             if component_review and component_review.component:
                 component_name = component_review.component.name
                                                                                  name='revision_approved',
                                                                                  selected_value=selected_value,
                                                                                  for_component=False )
-        rev, changeset_revision_label = suc.get_rev_label_from_changeset_revision( repo, review.changeset_revision )
+        rev, changeset_revision_label = hg_util.get_rev_label_from_changeset_revision( repo, review.changeset_revision )
         return trans.fill_template( '/webapps/tool_shed/repository_review/edit_review.mako',
                                     repository=repository,
                                     review=review,
         if repository_id: