1. jdlogicman
  2. galaxy-central

Commits

Kelly Vincent  committed 07dffb2 Merge

merging heads

  • Participants
  • Parent commits 61dd78c, aabcc79
  • Branches default

Comments (0)

Files changed (12)

File lib/galaxy/datatypes/data.py

View file
     
     is_binary = True #The dataset contains binary data --> do not space_to_tab or convert newlines, etc. Allow binary file uploads of this type when True.
     
+    allow_datatype_change = True #Allow user to change between this datatype and others. If False, this datatype cannot be changed from or into.
+    
     #Composite datatypes
     composite_type = None
     composite_files = odict()

File lib/galaxy/datatypes/genetics.py

View file
     
     file_ext="html"
     composite_type = 'auto_primary_file'
+    allow_datatype_change = False
     
     def missing_meta( self, dataset ):
         """Checks for empty meta values"""
     
     is_binary = True
     
+    allow_datatype_change = False
+    
     composite_type = 'basic'
     
     def __init__( self, **kwd ):

File lib/galaxy/model/migrate/versions/0011_v0010_mysql_index_fix.py

View file
+from sqlalchemy import *
+from sqlalchemy.orm import *
+from sqlalchemy.exceptions import *
+from migrate import *
+from migrate.changeset import *
+
+import datetime
+now = datetime.datetime.utcnow
+
+import sys, logging
+log = logging.getLogger( __name__ )
+log.setLevel(logging.DEBUG)
+handler = logging.StreamHandler( sys.stdout )
+format = "%(name)s %(levelname)s %(asctime)s %(message)s"
+formatter = logging.Formatter( format )
+handler.setFormatter( formatter )
+log.addHandler( handler )
+
+# Need our custom types, but don't import anything else from model
+from galaxy.model.custom_types import *
+
+metadata = MetaData( migrate_engine )
+db_session = scoped_session( sessionmaker( bind=migrate_engine, autoflush=False, transactional=False ) )
+
+HistoryDatasetAssociationDisplayAtAuthorization_table = Table( "history_dataset_association_display_at_authorization", metadata,
+    Column( "id", Integer, primary_key=True ),
+    Column( "create_time", DateTime, default=now ),
+    Column( "update_time", DateTime, index=True, default=now, onupdate=now ),
+    Column( "history_dataset_association_id", Integer, ForeignKey( "history_dataset_association.id" ), index=True ),
+    Column( "user_id", Integer, ForeignKey( "galaxy_user.id" ), index=True ),
+    Column( "site", TrimmedString( 255 ) ) )
+
+def upgrade():
+    if migrate_engine.name == 'mysql':
+        # Load existing tables
+        metadata.reflect()
+        i = Index( "ix_hdadaa_history_dataset_association_id", HistoryDatasetAssociationDisplayAtAuthorization_table.c.history_dataset_association_id )
+        try:
+            i.create()
+        except Exception, e:
+            log.debug( "Adding index 'ix_hdadaa_history_dataset_association_id' to table 'history_dataset_association_display_at_authorization' table failed: %s" % str( e ) )  
+    
+def downgrade():
+    if migrate_engine.name == 'mysql':
+        # Load existing tables
+        metadata.reflect()
+        i = Index( "ix_hdadaa_history_dataset_association_id", HistoryDatasetAssociationDisplayAtAuthorization_table.c.history_dataset_association_id )
+        try:
+            i.drop()
+        except Exception, e:
+            log.debug( "Removing index 'ix_hdadaa_history_dataset_association_id' from table 'history_dataset_association_display_at_authorization' table failed: %s" % str( e ) )  

File lib/galaxy/web/controllers/admin.py

View file
             replace_dataset = None
         # Let's not overwrite the imported datatypes module with the variable datatypes?
         # The built-in 'id' is overwritten in lots of places as well
-        ldatatypes = [ x for x in trans.app.datatypes_registry.datatypes_by_extension.iterkeys() ]
+        ldatatypes = [ dtype_name for dtype_name, dtype_value in trans.app.datatypes_registry.datatypes_by_extension.iteritems() if dtype_value.allow_datatype_change ]
         ldatatypes.sort()
         if params.get( 'new_dataset_button', False ):
             upload_option = params.get( 'upload_option', 'upload_file' )
             elif action == 'edit_info':
                 if params.get( 'change', False ):
                     # The user clicked the Save button on the 'Change data type' form
-                    trans.app.datatypes_registry.change_datatype( ldda, params.datatype )
-                    trans.app.model.flush()
-                    msg = "Data type changed for library dataset '%s'" % ldda.name
-                    return trans.fill_template( "/admin/library/ldda_edit_info.mako", 
-                                                ldda=ldda,
-                                                library_id=library_id,
-                                                datatypes=ldatatypes,
-                                                restrict=params.get( 'restrict', True ),
-                                                render_templates=params.get( 'render_templates', False ),
-                                                msg=msg,
-                                                messagetype=messagetype )
+                    if ldda.datatype.allow_datatype_change and trans.app.datatypes_registry.get_datatype_by_extension( params.datatype ).allow_datatype_change:
+                        trans.app.datatypes_registry.change_datatype( ldda, params.datatype )
+                        trans.app.model.flush()
+                        msg = "Data type changed for library dataset '%s'" % ldda.name
+                        return trans.fill_template( "/admin/library/ldda_edit_info.mako", 
+                                                    ldda=ldda,
+                                                    library_id=library_id,
+                                                    datatypes=ldatatypes,
+                                                    restrict=params.get( 'restrict', True ),
+                                                    render_templates=params.get( 'render_templates', False ),
+                                                    msg=msg,
+                                                    messagetype=messagetype )
+                    else:
+                        return trans.show_error_message( "You are unable to change datatypes in this manner. Changing %s to %s is not allowed." % ( ldda.extension, params.datatype ) )
                 elif params.get( 'save', False ):
                     # The user clicked the Save button on the 'Edit Attributes' form
                     old_name = ldda.name

File lib/galaxy/web/controllers/dataset.py

View file
         redirect_url = kwd['redirect_url'] % urllib.quote_plus( kwd['display_url'] )
         if trans.app.security_agent.allow_action( None, data.permitted_actions.DATASET_ACCESS, dataset = data ):
             return trans.response.send_redirect( redirect_url ) # anon access already permitted by rbac
-        if trans.app.security_agent.allow_action( trans.user, data.permitted_actions.DATASET_MANAGE_PERMISSIONS, dataset = data ):
+        if trans.app.security_agent.allow_action( trans.user, data.permitted_actions.DATASET_ACCESS, dataset = data ):
             trans.app.host_security_agent.set_dataset_permissions( data, trans.user, site )
             return trans.response.send_redirect( redirect_url )
         else:

File lib/galaxy/web/controllers/history.py

View file
                     default_permissions[ default_action ] = [ private_user_role ]
                     trans.app.security_agent.history_set_default_permissions( history, default_permissions )
                 n_undeleted += 1
-                trans.log_event( "History (%s) %d marked as undeleted" % history.name )
+                trans.log_event( "History (%s) %d marked as undeleted" % ( history.name, history.id ) )
         status = SUCCESS
         message_parts = []
         if n_undeleted:

File lib/galaxy/web/controllers/library.py

View file
             replace_dataset = None
         # Let's not overwrite the imported datatypes module with the variable datatypes?
         # The built-in 'id' is overwritten in lots of places as well
-        ldatatypes = [ x for x in trans.app.datatypes_registry.datatypes_by_extension.iterkeys() ]
+        ldatatypes = [ dtype_name for dtype_name, dtype_value in trans.app.datatypes_registry.datatypes_by_extension.iteritems() if dtype_value.allow_datatype_change ]
         ldatatypes.sort()
         if id:
             if params.get( 'permissions', False ):
                     if trans.app.security_agent.allow_action( trans.user,
                                                               trans.app.security_agent.permitted_actions.LIBRARY_MODIFY,
                                                               library_item=ldda ):
-                        trans.app.datatypes_registry.change_datatype( ldda, params.datatype )
-                        trans.app.model.flush()
-                        msg = "Data type changed for library dataset '%s'" % ldda.name
-                        messagetype = 'done'
+                        if ldda.datatype.allow_datatype_change and trans.app.datatypes_registry.get_datatype_by_extension( params.datatype ).allow_datatype_change:
+                            trans.app.datatypes_registry.change_datatype( ldda, params.datatype )
+                            trans.app.model.flush()
+                            msg = "Data type changed for library dataset '%s'" % ldda.name
+                            messagetype = 'done'
+                        else:
+                            msg = "You are unable to change datatypes in this manner. Changing %s to %s is not allowed." % ( ldda.extension, params.datatype )
+                            messagetype = 'error'
                     else:
                         msg = "You are not authorized to change the data type of dataset '%s'" % ldda.name
                         messagetype = 'error'

File lib/galaxy/web/controllers/root.py

View file
             params = util.Params( kwd, safe=False )
             if params.change:
                 # The user clicked the Save button on the 'Change data type' form
-                trans.app.datatypes_registry.change_datatype( data, params.datatype )
-                trans.app.model.flush()
+                if data.datatype.allow_datatype_change and trans.app.datatypes_registry.get_datatype_by_extension( params.datatype ).allow_datatype_change:
+                    trans.app.datatypes_registry.change_datatype( data, params.datatype )
+                    trans.app.model.flush()
+                else:
+                    return trans.show_error_message( "You are unable to change datatypes in this manner. Changing %s to %s is not allowed." % ( data.extension, params.datatype ) )
             elif params.save:
                 # The user clicked the Save button on the 'Edit Attributes' form
                 data.name  = params.name
                 data.metadata.dbkey = data.dbkey
             # let's not overwrite the imported datatypes module with the variable datatypes?
             # the built-in 'id' is overwritten in lots of places as well
-            ldatatypes = [x for x in trans.app.datatypes_registry.datatypes_by_extension.iterkeys()]
+            ldatatypes = [ dtype_name for dtype_name, dtype_value in trans.app.datatypes_registry.datatypes_by_extension.iteritems() if dtype_value.allow_datatype_change ]
             ldatatypes.sort()
             trans.log_event( "Opened edit view on dataset %s" % str(id) )
             return trans.fill_template( "/dataset/edit_attributes.mako", data=data, datatypes=ldatatypes )

File scripts/cleanup_datasets/cleanup_datasets.py

View file
     parser.add_option( "-d", "--days", dest="days", action="store", type="int", help="number of days (60)", default=60 )
     parser.add_option( "-r", "--remove_from_disk", action="store_true", dest="remove_from_disk", help="remove datasets from disk when purged", default=False )
     parser.add_option( "-i", "--info_only", action="store_true", dest="info_only", help="info about the requested action", default=False )
+    parser.add_option( "-f", "--force_retry", action="store_true", dest="force_retry", help="performs the requested actions, but ignores whether it might have been done before. Useful when -r wasn't used, but should have been", default=False )
     
     parser.add_option( "-1", "--delete_userless_histories", action="store_true", dest="delete_userless_histories", default=False, help="delete userless histories and datasets" )
     
         print "# Datasets will NOT be removed from disk.\n"
     
     if options.delete_userless_histories:
-        delete_userless_histories( app, cutoff_time, info_only = options.info_only )
+        delete_userless_histories( app, cutoff_time, info_only = options.info_only, force_retry = options.force_retry )
     elif options.purge_histories:
-        purge_histories( app, cutoff_time, options.remove_from_disk, info_only = options.info_only )
+        purge_histories( app, cutoff_time, options.remove_from_disk, info_only = options.info_only, force_retry = options.force_retry )
     elif options.purge_datasets:
-        purge_datasets( app, cutoff_time, options.remove_from_disk, info_only = options.info_only )
+        purge_datasets( app, cutoff_time, options.remove_from_disk, info_only = options.info_only, force_retry = options.force_retry )
     elif options.purge_libraries:
-        purge_libraries( app, cutoff_time, options.remove_from_disk, info_only = options.info_only )
+        purge_libraries( app, cutoff_time, options.remove_from_disk, info_only = options.info_only, force_retry = options.force_retry )
     elif options.purge_folders:
-        purge_folders( app, cutoff_time, options.remove_from_disk, info_only = options.info_only )
+        purge_folders( app, cutoff_time, options.remove_from_disk, info_only = options.info_only, force_retry = options.force_retry )
     
     sys.exit(0)
 
-def delete_userless_histories( app, cutoff_time, info_only = False ):
+def delete_userless_histories( app, cutoff_time, info_only = False, force_retry = False ):
     # Deletes userless histories whose update_time value is older than the cutoff_time.
     # The purge history script will handle marking DatasetInstances as deleted. 
     # Nothing is removed from disk yet.
     history_count = 0
     print '# The following datasets and associated userless histories have been deleted'
     start = time.clock()
-    histories = app.model.History.filter( and_( app.model.History.table.c.user_id==None,
-                                app.model.History.table.c.deleted==False,
-                                app.model.History.table.c.update_time < cutoff_time ) ).all()# \
+    if force_retry:
+        histories = app.model.History.filter( and_( app.model.History.table.c.user_id==None,
+                                    app.model.History.table.c.update_time < cutoff_time ) ).all()
+    else:
+        histories = app.model.History.filter( and_( app.model.History.table.c.user_id==None,
+                                    app.model.History.table.c.deleted==False,
+                                    app.model.History.table.c.update_time < cutoff_time ) ).all()
     for history in histories:
         if not info_only:
             history.deleted = True
     print "Elapsed time: ", stop - start, "\n"
     
 
-def purge_histories( app, cutoff_time, remove_from_disk, info_only = False ):
+def purge_histories( app, cutoff_time, remove_from_disk, info_only = False, force_retry = False ):
     # Purges deleted histories whose update_time is older than the cutoff_time.
     # The dataset associations of each history are also marked as deleted.
     # The Purge Dataset method will purge each Dataset as necessary
     history_count = 0
     print '# The following datasets and associated deleted histories have been purged'
     start = time.clock()
-    histories = app.model.History.filter( and_( app.model.History.table.c.deleted==True,
-                                app.model.History.table.c.purged==False,
-                                app.model.History.table.c.update_time < cutoff_time ) ) \
-                 .options( eagerload( 'datasets' ) ).all()
+    if force_retry:
+        histories = app.model.History.filter( and_( app.model.History.table.c.deleted==True,
+                                    app.model.History.table.c.update_time < cutoff_time ) ) \
+                     .options( eagerload( 'datasets' ) ).all()
+    else:
+        histories = app.model.History.filter( and_( app.model.History.table.c.deleted==True,
+                                    app.model.History.table.c.purged==False,
+                                    app.model.History.table.c.update_time < cutoff_time ) ) \
+                     .options( eagerload( 'datasets' ) ).all()
     for history in histories:
         for dataset_assoc in history.datasets:
             _purge_dataset_instance( dataset_assoc, app, remove_from_disk, info_only = info_only ) #mark a DatasetInstance as deleted, clear associated files, and mark the Dataset as deleted if it is deletable
     print '# Purged %d histories.' % ( history_count ), '\n'
     print "Elapsed time: ", stop - start, "\n"
 
-def purge_libraries( app, cutoff_time, remove_from_disk, info_only = False ):
+def purge_libraries( app, cutoff_time, remove_from_disk, info_only = False, force_retry = False ):
     # Purges deleted libraries whose update_time is older than the cutoff_time.
     # The dataset associations of each library are also marked as deleted.
     # The Purge Dataset method will purge each Dataset as necessary
     library_count = 0
     print '# The following libraries and associated folders have been purged'
     start = time.clock()
-    libraries = app.model.Library.filter( and_( app.model.Library.table.c.deleted==True,
-                                app.model.Library.table.c.purged==False,
-                                app.model.Library.table.c.update_time < cutoff_time ) ).all()
+    if force_retry:
+        libraries = app.model.Library.filter( and_( app.model.Library.table.c.deleted==True,
+                                    app.model.Library.table.c.update_time < cutoff_time ) ).all()
+    else:
+        libraries = app.model.Library.filter( and_( app.model.Library.table.c.deleted==True,
+                                    app.model.Library.table.c.purged==False,
+                                    app.model.Library.table.c.update_time < cutoff_time ) ).all()
     for library in libraries:
         _purge_folder( library.root_folder, app, remove_from_disk, info_only = info_only )
         if not info_only:
     print '# Purged %d libraries .' % ( library_count ), '\n'
     print "Elapsed time: ", stop - start, "\n"
 
-def purge_folders( app, cutoff_time, remove_from_disk, info_only = False ):
+def purge_folders( app, cutoff_time, remove_from_disk, info_only = False, force_retry = False ):
     # Purges deleted folders whose update_time is older than the cutoff_time.
     # The dataset associations of each folder are also marked as deleted.
     # The Purge Dataset method will purge each Dataset as necessary
     folder_count = 0
     print '# The following folders have been purged'
     start = time.clock()
-    folders = app.model.LibraryFolder.filter( and_( app.model.LibraryFolder.table.c.deleted==True,
-                                app.model.LibraryFolder.table.c.purged==False,
-                                app.model.LibraryFolder.table.c.update_time < cutoff_time ) ).all()
+    if force_retry:
+        folders = app.model.LibraryFolder.filter( and_( app.model.LibraryFolder.table.c.deleted==True,
+                                    app.model.LibraryFolder.table.c.update_time < cutoff_time ) ).all()
+    else:
+        folders = app.model.LibraryFolder.filter( and_( app.model.LibraryFolder.table.c.deleted==True,
+                                    app.model.LibraryFolder.table.c.purged==False,
+                                    app.model.LibraryFolder.table.c.update_time < cutoff_time ) ).all()
     for folder in folders:
         _purge_folder( folder, app, remove_from_disk, info_only = info_only )
         print "%d" % folder.id
     print '# Purged %d folders.' % ( folder_count ), '\n'
     print "Elapsed time: ", stop - start, "\n"
 
-def purge_datasets( app, cutoff_time, remove_from_disk, info_only = False ):
+def purge_datasets( app, cutoff_time, remove_from_disk, info_only = False, repurge = False, force_retry = False ):
     # Purges deleted datasets whose update_time is older than cutoff_time.  Files may or may
     # not be removed from disk.
     dataset_count = 0
     disk_space = 0
     print '# The following deleted datasets have been purged'
     start = time.clock()
-    datasets = app.model.Dataset.filter( and_( app.model.Dataset.table.c.deleted==True,
-                               app.model.Dataset.table.c.purgable==True,
-                               app.model.Dataset.table.c.purged==False,
-                               app.model.Dataset.table.c.update_time < cutoff_time ) ).all()
+    if force_retry:
+        datasets = app.model.Dataset.filter( and_( app.model.Dataset.table.c.deleted==True,
+                                   app.model.Dataset.table.c.purgable==True,
+                                   app.model.Dataset.table.c.update_time < cutoff_time ) ).all()
+    else:
+        datasets = app.model.Dataset.filter( and_( app.model.Dataset.table.c.deleted==True,
+                                   app.model.Dataset.table.c.purgable==True,
+                                   app.model.Dataset.table.c.purged==False,
+                                   app.model.Dataset.table.c.update_time < cutoff_time ) ).all()
     for dataset in datasets:
         file_size = dataset.file_size
         _purge_dataset( dataset, remove_from_disk, info_only = info_only )

File templates/admin/library/ldda_edit_info.mako

View file
 <div class="toolForm">
     <div class="toolFormTitle">Change data type of ${ldda.name}</div>
     <div class="toolFormBody">
-        <form name="change_datatype" action="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library_id, folder_id=ldda.library_dataset.folder.id, edit_info=True )}" method="post">
-            <input type="hidden" name="id" value="${ldda.id}"/>
+        %if ldda.datatype.allow_datatype_change:
+            <form name="change_datatype" action="${h.url_for( controller='admin', action='library_dataset_dataset_association', library_id=library_id, folder_id=ldda.library_dataset.folder.id, edit_info=True )}" method="post">
+                <input type="hidden" name="id" value="${ldda.id}"/>
+                <div class="form-row">
+                    <label>New Type:</label>
+                    <div style="float: left; width: 250px; margin-right: 10px;">
+                        ${datatype( ldda, datatypes )}
+                    </div>
+                    <div class="toolParamHelp" style="clear: both;">
+                        This will change the datatype of the existing dataset
+                        but <i>not</i> modify its contents. Use this if Galaxy
+                        has incorrectly guessed the type of your dataset.
+                    </div>
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <input type="submit" name="change" value="Save"/>
+                </div>
+            </form>
+        %else:
             <div class="form-row">
-                <label>New Type:</label>
-                <div style="float: left; width: 250px; margin-right: 10px;">
-                    ${datatype( ldda, datatypes )}
-                </div>
-                <div class="toolParamHelp" style="clear: both;">
-                    This will change the datatype of the existing dataset
-                    but <i>not</i> modify its contents. Use this if Galaxy
-                    has incorrectly guessed the type of your dataset.
-                </div>
-                <div style="clear: both"></div>
+                <div class="warningmessagesmall">${_('Changing the datatype of this dataset is not allowed.')}</div>
             </div>
-            <div class="form-row">
-                <input type="submit" name="change" value="Save"/>
-            </div>
-        </form>
+        %endif
     </div>
 </div>
 

File templates/dataset/edit_attributes.mako

View file
     </div>
     <p />
 %endif
+
 <div class="toolForm">
     <div class="toolFormTitle">${_('Change data type')}</div>
     <div class="toolFormBody">
-        <form name="change_datatype" action="${h.url_for( controller='root', action='edit' )}" method="post">
-            <input type="hidden" name="id" value="${data.id}"/>
+        %if data.datatype.allow_datatype_change:
+            <form name="change_datatype" action="${h.url_for( controller='root', action='edit' )}" method="post">
+                <input type="hidden" name="id" value="${data.id}"/>
+                <div class="form-row">
+                    <label>
+                        ${_('New Type')}:
+                    </label>
+                    <div style="float: left; width: 250px; margin-right: 10px;">
+                        ${datatype( data, datatypes )}
+                    </div>
+                    <div class="toolParamHelp" style="clear: both;">
+                        ${_('This will change the datatype of the existing dataset but <i>not</i> modify its contents. Use this if Galaxy has incorrectly guessed the type of your dataset.')}
+                    </div>
+                    <div style="clear: both"></div>
+                </div>
+                <div class="form-row">
+                    <input type="submit" name="change" value="${_('Save')}"/>
+                </div>
+            </form>
+        %else:
             <div class="form-row">
-                <label>
-                    ${_('New Type')}:
-                </label>
-                <div style="float: left; width: 250px; margin-right: 10px;">
-                    ${datatype( data, datatypes )}
-                </div>
-                <div class="toolParamHelp" style="clear: both;">
-                    ${_('This will change the datatype of the existing dataset but <i>not</i> modify its contents. Use this if Galaxy has incorrectly guessed the type of your dataset.')}
-                </div>
-                <div style="clear: both"></div>
+                <div class="warningmessagesmall">${_('Changing the datatype of this dataset is not allowed.')}</div>
             </div>
-            <div class="form-row">
-                <input type="submit" name="change" value="${_('Save')}"/>
-            </div>
-        </form>
+        %endif
     </div>
 </div>
 <p />

File templates/library/ldda_edit_info.mako

View file
     <div class="toolForm">
         <div class="toolFormTitle">Change data type</div>
         <div class="toolFormBody">
-            <form name="change_datatype" action="${h.url_for( controller='library', action='library_dataset_dataset_association', library_id=library_id, folder_id=ldda.library_dataset.folder.id, edit_info=True )}" method="post">
-                <input type="hidden" name="id" value="${ldda.id}"/>
+            %if ldda.datatype.allow_datatype_change:
+                <form name="change_datatype" action="${h.url_for( controller='library', action='library_dataset_dataset_association', library_id=library_id, folder_id=ldda.library_dataset.folder.id, edit_info=True )}" method="post">
+                    <input type="hidden" name="id" value="${ldda.id}"/>
+                    <div class="form-row">
+                        <label>New Type:</label>
+                        <div style="float: left; width: 250px; margin-right: 10px;">
+                            ${datatype( ldda, datatypes )}
+                        </div>
+                        <div class="toolParamHelp" style="clear: both;">
+                            This will change the datatype of the existing dataset
+                            but <i>not</i> modify its contents. Use this if Galaxy
+                            has incorrectly guessed the type of your dataset.
+                        </div>
+                        <div style="clear: both"></div>
+                    </div>
+                    <div class="form-row">
+                        <input type="submit" name="change" value="Save"/>
+                    </div>
+                </form>
+            %else:
                 <div class="form-row">
-                    <label>New Type:</label>
-                    <div style="float: left; width: 250px; margin-right: 10px;">
-                        ${datatype( ldda, datatypes )}
-                    </div>
-                    <div class="toolParamHelp" style="clear: both;">
-                        This will change the datatype of the existing dataset
-                        but <i>not</i> modify its contents. Use this if Galaxy
-                        has incorrectly guessed the type of your dataset.
-                    </div>
-                    <div style="clear: both"></div>
+                    <div class="warningmessagesmall">${_('Changing the datatype of this dataset is not allowed.')}</div>
                 </div>
-                <div class="form-row">
-                    <input type="submit" name="change" value="Save"/>
-                </div>
-            </form>
+            %endif
         </div>
     </div>
     <p/>