Commits

Brad Chapman  committed 40b2520

Correctly set history and handle output datasets for error cases in tool API. Allow specification of dataset name during uploads, exposing through API

  • Participants
  • Parent commits f3b183e

Comments (0)

Files changed (3)

File lib/galaxy/tools/parameters/grouping.py

                     dataset_name = get_file_name( data_file['filename'] )
                 if not dataset_info:
                     dataset_info = 'uploaded file'
-                return Bunch( type='file', path=data_file['local_filename'], name=get_file_name( data_file['filename'] ) )
+                return Bunch( type='file', path=data_file['local_filename'], name=dataset_name )
                 #return 'file', data_file['local_filename'], get_file_name( data_file.filename ), dataset_name, dataset_info
             except:
                 # The uploaded file should've been persisted by the upload tool action
                         if line:
                             if not line.lower().startswith( 'http://' ) and not line.lower().startswith( 'ftp://' ) and not line.lower().startswith( 'https://' ):
                                 continue # non-url line, ignore
-                            precreated_name = line
                             dataset_name = override_name
                             if not dataset_name:
                                 dataset_name = line
                             dataset_info = override_info
                             if not dataset_info:
                                 dataset_info = 'uploaded url'
-                            yield Bunch( type='url', path=line, name=precreated_name )
+                            yield Bunch( type='url', path=line, name=dataset_name )
                             #yield ( 'url', line, precreated_name, dataset_name, dataset_info )
                 else:
                     dataset_name = dataset_info = precreated_name = 'Pasted Entry' #we need to differentiate between various url pastes here

File lib/galaxy/webapps/galaxy/api/tools.py

         tool = trans.app.toolbox.get_tool( tool_id )
         if not tool:
             return { "message": { "type": "error", "text" : messages.NO_TOOL } }
+
+        # Set running history from payload parameters.
+        # History not set correctly as part of this API call for
+        # dataset upload.
+        history_id = payload.get("history_id", None)
+        if history_id:
+            target_history = trans.sa_session.query(trans.app.model.History).get(
+                trans.security.decode_id(history_id))
+            trans.galaxy_session.current_history = target_history
+        else:
+            target_history = None
         
         # Set up inputs.
         inputs = payload[ 'inputs' ]
         inputs['runtool_btn'] = 'Execute'
         # TODO: encode data ids and decode ids.
         params = util.Params( inputs, sanitize = False )
-        template, vars = tool.handle_input( trans, params.__dict__ )
-        
+        template, vars = tool.handle_input( trans, params.__dict__, history=target_history)
+
         # TODO: check for errors and ensure that output dataset(s) are available.
-        output_datasets = vars[ 'out_data' ].values()
+        output_datasets = vars.get('out_data', {}).values()
         rval = {
             "outputs": []
         }

File tools/data_source/upload.xml

       <param name="space_to_tab" type="select" display="checkboxes" multiple="True" label="Convert spaces to tabs" help="Use this option if you are entering intervals by hand."> 
         <option value="Yes">Yes</option>
       </param>
+      <param name="NAME" type="hidden" help="Name for dataset in upload"></param>
     </upload_dataset>
     <param name="dbkey" type="genomebuild" label="Genome" />
     <conditional name="files_metadata" title="Specify metadata" value_from="self:app.datatypes_registry.get_upload_metadata_params" value_ref="file_type" value_ref_in_group="False" />