Commits

dahlo committed 8fbb242 Merge

Merged Romans changes

Comments (0)

Files changed (1)

lib/galaxy/jobs/runners/drmaa.py

 
     def get_native_spec( self, url ):
         """Get any native DRM arguments specified by the site configuration
-           i.e: drmaa.py://-A prj_id
+           i.e: drmaa://-A prj_id -p core
         """
         try:
             return url.split('/')[2] or None
         job_wrapper.change_state( model.Job.states.QUEUED )
 
         # define job attributes
-        ofile = "%s/%s.o" % (self.app.config.cluster_files_directory, job_wrapper.job_id)
-        efile = "%s/%s.e" % (self.app.config.cluster_files_directory, job_wrapper.job_id)
+        ofile = "%s/%s.out" % (self.app.config.cluster_files_directory, job_wrapper.job_id)
+        efile = "%s/%s.err" % (self.app.config.cluster_files_directory, job_wrapper.job_id)
         jt = self.ds.createJobTemplate()
         jt.remoteCommand = "%s/database/pbs/galaxy_%s.sh" % (os.getcwd(), job_wrapper.get_id_tag())
         jt.outputPath = ":%s" % ofile
         jt.errorPath = ":%s" % efile
+        jt.jobName = "GalaxyJob_%s" % (job_wrapper.get_id_tag())  # store job name
 
         
         native_spec = self.get_native_spec( runner_url )
         if native_spec is not None:
+            log.debug("Running with global job manager native specification: %s" % native_spec)
             jt.nativeSpecification = native_spec
-            # get user-specific job runner settings
-            project = job_wrapper.get_job().user.preferences.get('project', '')
-            time_limit = job_wrapper.get_job().user.preferences.get('time_limit', '')
-            partition = job_wrapper.get_job().user.preferences.get('partition', '')
+            # get user-specific job runner settings, overriding global
+            jw = job_wrapper.get_job()
+            project = jw.user.preferences.get('project', '')
+            time_limit = jw.user.preferences.get('time_limit', '')
+            partition = jw.user.preferences.get('partition', '')
+            
+            #jt.hardWallclockTimeLimit = time_limit
         
-        script = drm_template % (project, partition, time_limit, job_wrapper.galaxy_lib_dir, os.path.abspath( job_wrapper.working_directory ), command_line)
+        script = drm_template % (project, partition, time_limit, job_wrapper.galaxy_lib_dir,
+                                 os.path.abspath( job_wrapper.working_directory ), command_line)
         fh = file( jt.remoteCommand, "w" )
         fh.write( script )
         fh.close()
         os.chmod( jt.remoteCommand, 0750 )
 
+        # XXX bad hack, $HOME variable unset on cluster nodes ?!?
+        jt.remoteCommand = "export HOME=/home/roman ; %s/database/pbs/galaxy_%s.sh" % (os.getcwd(), job_wrapper.get_id_tag())
+
         # job was deleted while we were preparing it
         if job_wrapper.get_state() == model.Job.states.DELETED:
             log.debug( "Job %s deleted by user before it entered the queue" % job_wrapper.get_id_tag() )