Commits

Anonymous committed 13ffc69

QUARTZ-560, QUARTZ-556, QUARTZ-513 : error depersisting empty job datamaps with various delegates.

git-svn-id: http://svn.opensymphony.com/svn/quartz/branches/quartz_1-6@73669f7d36a-ea1c-0410-88ea-9fd03e4c9665

Comments (0)

Files changed (5)

src/java/org/quartz/impl/jdbcjobstore/CloudscapeDelegate.java

-/* 
- * Copyright 2004-2005 OpenSymphony 
- * 
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not 
- * use this file except in compliance with the License. You may obtain a copy 
- * of the License at 
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0 
- *   
- * Unless required by applicable law or agreed to in writing, software 
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 
- * License for the specific language governing permissions and limitations 
+/*
+ * Copyright 2004-2005 OpenSymphony
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy
+ * of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations
  * under the License.
- * 
+ *
  */
 
 /*
 
 /**
  * <p>
- * This is a driver delegate for the Cloudscape database, not surprisingly, 
+ * This is a driver delegate for the Cloudscape database, not surprisingly,
  * it is known to work with Derby as well.
  * </p>
- * 
+ *
  * @author James House
  * @author Sridhar Jawaharlal, Srinivas Venkatarangaiah
+ * @deprecated Use the StdJDBCDelegate for latest versions of Derby
  */
 public class CloudscapeDelegate extends StdJDBCDelegate {
     /**
      * <p>
      * Create new CloudscapeDelegate instance.
      * </p>
-     * 
+     *
      * @param log
      *          the logger to use during execution
      * @param tablePrefix
      * <p>
      * Create new CloudscapeDelegate instance.
      * </p>
-     * 
+     *
      * @param log
      *          the logger to use during execution
      * @param tablePrefix
      * special handling for BLOBs. The default implementation uses standard
      * JDBC <code>java.sql.Blob</code> operations.
      * </p>
-     * 
+     *
      * @param rs
      *          the result set, already queued to the correct row
      * @param colName
 
         byte[] inputBytes = rs.getBytes(colName);
 
-        if (null != inputBytes) {
+        if (null != inputBytes && inputBytes.length != 0) {
             ByteArrayInputStream bais = new
-            ByteArrayInputStream(inputBytes); 
+            ByteArrayInputStream(inputBytes);
 
             ObjectInputStream in = new ObjectInputStream(bais);
             try {
         }
 
         return obj;
-    }    
+    }
 }
 
 // EOF

src/java/org/quartz/impl/jdbcjobstore/HSQLDBDelegate.java

-/* 
- * Copyright 2004-2005 OpenSymphony 
- * 
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not 
- * use this file except in compliance with the License. You may obtain a copy 
- * of the License at 
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0 
- *   
- * Unless required by applicable law or agreed to in writing, software 
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 
- * License for the specific language governing permissions and limitations 
+/*
+ * Copyright 2004-2005 OpenSymphony
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy
+ * of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations
  * under the License.
- * 
+ *
  */
 
 /*
  * <p>
  * This is a driver delegate for the HSQLDB database.
  * </p>
- * 
+ *
  * @author James House
  * @author <a href="mailto:jeff@binaryfeed.org">Jeffrey Wescott</a>
  */
      * <p>
      * Create new HSQLDBDelegate instance.
      * </p>
-     * 
+     *
      * @param log
      *          the logger to use during execution
      * @param tablePrefix
      * <p>
      * Create new MSSQLDelegate instance.
      * </p>
-     * 
+     *
      * @param log
      *          the logger to use during execution
      * @param tablePrefix
      * special handling for BLOBs. The default implementation uses standard
      * JDBC <code>java.sql.Blob</code> operations.
      * </p>
-     * 
+     *
      * @param rs
      *          the result set, already queued to the correct row
      * @param colName
         throws ClassNotFoundException, IOException, SQLException {
         InputStream binaryInput = rs.getBinaryStream(colName);
 
-        if(binaryInput == null) {
+        if(binaryInput == null || binaryInput.available() == 0) {
             return null;
         }
-        
+
         Object obj = null;
-        
+
         ObjectInputStream in = new ObjectInputStream(binaryInput);
         try {
             obj = in.readObject();

src/java/org/quartz/impl/jdbcjobstore/MSSQLDelegate.java

-/* 
- * Copyright 2004-2005 OpenSymphony 
- * 
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not 
- * use this file except in compliance with the License. You may obtain a copy 
- * of the License at 
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0 
- *   
- * Unless required by applicable law or agreed to in writing, software 
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 
- * License for the specific language governing permissions and limitations 
+/*
+ * Copyright 2004-2005 OpenSymphony
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy
+ * of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations
  * under the License.
- * 
+ *
  */
 
 /*
  * <p>
  * This is a driver delegate for the MSSQL JDBC driver.
  * </p>
- * 
+ *
  * @author <a href="mailto:jeff@binaryfeed.org">Jeffrey Wescott</a>
  */
 public class MSSQLDelegate extends StdJDBCDelegate {
      * <p>
      * Create new MSSQLDelegate instance.
      * </p>
-     * 
+     *
      * @param log
      *          the logger to use during execution
      * @param tablePrefix
      * special handling for BLOBs. The default implementation uses standard
      * JDBC <code>java.sql.Blob</code> operations.
      * </p>
-     * 
+     *
      * @param rs
      *          the result set, already queued to the correct row
      * @param colName
         throws ClassNotFoundException, IOException, SQLException {
         InputStream binaryInput = rs.getBinaryStream(colName);
 
-        if(binaryInput == null) {
+        if(binaryInput == null || binaryInput.available() == 0) {
             return null;
         }
 

src/java/org/quartz/impl/jdbcjobstore/PointbaseDelegate.java

-/* 
- * Copyright 2004-2005 OpenSymphony 
- * 
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not 
- * use this file except in compliance with the License. You may obtain a copy 
- * of the License at 
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0 
- *   
- * Unless required by applicable law or agreed to in writing, software 
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 
- * License for the specific language governing permissions and limitations 
+/*
+ * Copyright 2004-2005 OpenSymphony
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy
+ * of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations
  * under the License.
- * 
+ *
  */
 
 /*
  * <p>
  * This is a driver delegate for the Pointbase JDBC driver.
  * </p>
- * 
+ *
  * @author Gregg Freeman
  */
 public class PointbaseDelegate extends StdJDBCDelegate {
      * <p>
      * Create new PointbaseJDBCDelegate instance.
      * </p>
-     * 
+     *
      * @param logger
      *          the logger to use during execution
      * @param tablePrefix
      * <p>
      * Create new PointbaseJDBCDelegate instance.
      * </p>
-     * 
+     *
      * @param logger
      *          the logger to use during execution
      * @param tablePrefix
      * <p>
      * Insert the job detail record.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param job
      * <p>
      * Update the job detail record.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param job
         ByteArrayOutputStream baos = serializeJobData(trigger.getJobDataMap());
         int len = baos.toByteArray().length;
         ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
-        
+
         PreparedStatement ps = null;
 
         int insertResult = 0;
             ps.setInt(14, trigger.getMisfireInstruction());
             ps.setBinaryStream(15, bais, len);
             ps.setInt(16, trigger.getPriority());
-            
+
             insertResult = ps.executeUpdate();
         } finally {
             closeStatement(ps);
 
         return insertResult;
     }
-    
+
     public int updateTrigger(Connection conn, Trigger trigger, String state,
             JobDetail jobDetail) throws SQLException, IOException {
 
         ByteArrayOutputStream baos = serializeJobData(trigger.getJobDataMap());
         int len = baos.toByteArray().length;
         ByteArrayInputStream bais = new ByteArrayInputStream(baos.toByteArray());
-                
+
         PreparedStatement ps = null;
 
         int insertResult = 0;
 
         try {
             ps = conn.prepareStatement(rtp(UPDATE_TRIGGER));
-                
+
             ps.setString(1, trigger.getJobName());
             ps.setString(2, trigger.getJobGroup());
             setBoolean(ps, 3, trigger.isVolatile());
             ps.setBigDecimal(10, new BigDecimal(String.valueOf(endTime)));
             ps.setString(11, trigger.getCalendarName());
             ps.setInt(12, trigger.getMisfireInstruction());
-            
+
             ps.setInt(13, trigger.getPriority());
             ps.setBinaryStream(14, bais, len);
             ps.setString(15, trigger.getName());
      * <p>
      * Update the job data map for the given job.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param job
      * <p>
      * Insert a new calendar.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param calendarName
      * <p>
      * Update a calendar.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param calendarName
      * special handling for BLOBs. The default implementation uses standard
      * JDBC <code>java.sql.Blob</code> operations.
      * </p>
-     * 
+     *
      * @param rs
      *          the result set, already queued to the correct row
      * @param colName
 
         InputStream binaryInput = new ByteArrayInputStream(binaryData);
 
-        if (null != binaryInput) {
+        if (null != binaryInput && binaryInput.available() != 0) {
             ObjectInputStream in = new ObjectInputStream(binaryInput);
             try {
                 obj = in.readObject();
      * special handling for BLOBs for job details. The default implementation
      * uses standard JDBC <code>java.sql.Blob</code> operations.
      * </p>
-     * 
+     *
      * @param rs
      *          the result set, already queued to the correct row
      * @param colName

src/java/org/quartz/impl/jdbcjobstore/StdJDBCDelegate.java

-/* 
- * Copyright 2004-2005 OpenSymphony 
- * 
- * Licensed under the Apache License, Version 2.0 (the "License"); you may not 
- * use this file except in compliance with the License. You may obtain a copy 
- * of the License at 
- * 
- *   http://www.apache.org/licenses/LICENSE-2.0 
- *   
- * Unless required by applicable law or agreed to in writing, software 
- * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT 
- * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the 
- * License for the specific language governing permissions and limitations 
+/*
+ * Copyright 2004-2005 OpenSymphony
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License"); you may not
+ * use this file except in compliance with the License. You may obtain a copy
+ * of the License at
+ *
+ *   http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+ * WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+ * License for the specific language governing permissions and limitations
  * under the License.
- * 
+ *
  */
 
 /*
  * implementations. Subclasses should override only those methods that need
  * special handling for the DBMS driver in question.
  * </p>
- * 
+ *
  * @author <a href="mailto:jeff@binaryfeed.org">Jeffrey Wescott</a>
  * @author James House
  * @author Eric Mueller
 
     /*
      * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-     * 
+     *
      * Data members.
-     * 
+     *
      * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
      */
 
 
     /*
      * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-     * 
+     *
      * Constructors.
-     * 
+     *
      * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
      */
 
      * <p>
      * Create new StdJDBCDelegate instance.
      * </p>
-     * 
+     *
      * @param logger
      *          the logger to use during execution
      * @param tablePrefix
      * <p>
      * Create new StdJDBCDelegate instance.
      * </p>
-     * 
+     *
      * @param logger
      *          the logger to use during execution
      * @param tablePrefix
 
     /*
      * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
-     * 
+     *
      * Interface.
-     * 
+     *
      * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
      */
 
      * <p>
      * Insert the job detail record.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param newState
      * <p>
      * Get the names of all of the triggers that have misfired.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @return an array of <code>{@link
      * <p>
      * Select all of the triggers in a given state.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param state
      * misfired - according to the given timestamp.  No more than count will
      * be returned.
      * </p>
-     * 
+     *
      * @param conn The DB Connection
      * @param count The most misfired triggers to return, negative for all
-     * @param resultList Output parameter.  A List of 
+     * @param resultList Output parameter.  A List of
      *      <code>{@link org.quartz.utils.Key}</code> objects.  Must not be null.
-     *          
+     *
      * @return Whether there are more misfired triggers left to find beyond
      *         the given count.
      */
                     resultList.add(new Key(triggerName, groupName));
                 }
             }
-            
+
             return hasReachedLimit;
         } finally {
             closeResultSet(rs);
             closeStatement(ps);
         }
     }
-    
+
     /**
      * <p>
      * Get the number of triggers in the given states that have
      * misfired - according to the given timestamp.
      * </p>
-     * 
+     *
      * @param conn the DB Connection
      */
     public int countMisfiredTriggersInStates(
      * Get the names of all of the triggers in the given group and state that
      * have misfired.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @return an array of <code>{@link
      * org.quartz.Scheduler}.DEFAULT_RECOVERY_GROUP</code>
      * trigger group.
      * </p>
-     * 
+     *
      * <p>
      * In order to preserve the ordering of the triggers, the fire time will be
      * set from the <code>COL_FIRED_TIME</code> column in the <code>TABLE_FIRED_TRIGGERS</code>
      * on each returned trigger. It is also up to the caller to insert the
      * returned triggers to ensure that they are fired.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @return an array of <code>{@link org.quartz.Trigger}</code> objects
                 jd.put(Scheduler.FAILED_JOB_ORIGINAL_TRIGGER_GROUP, trigGroup);
                 jd.put(Scheduler.FAILED_JOB_ORIGINAL_TRIGGER_FIRETIME_IN_MILLISECONDS, String.valueOf(firedTime));
                 rcvryTrig.setJobDataMap(jd);
-                
+
                 list.add(rcvryTrig);
             }
             Object[] oArr = list.toArray();
      * <p>
      * Delete all fired triggers.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @return the number of rows deleted
      * <p>
      * Insert the job detail record.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param job
      * <p>
      * Update the job detail record.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param job
      * <p>
      * Get the names of all of the triggers associated with the given job.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param jobName
      * <p>
      * Delete all job listeners for the given job.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param jobName
      * <p>
      * Delete the job detail record for the given job.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param jobName
      * <p>
      * Check whether or not the given job is stateful.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param jobName
      * <p>
      * Check whether or not the given job exists.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param jobName
      * <p>
      * Update the job data map for the given job.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param job
      * <p>
      * Associate a listener with a job.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param job
      * <p>
      * Get all of the listeners for a given job.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param jobName
      * <p>
      * Select the JobDetail object for a given job name / group name.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param jobName
      * <p>
      * Select the total number of jobs stored.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @return the total number of jobs stored
      * <p>
      * Select all of the job group names that are stored.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @return an array of <code>String</code> group names
      * <p>
      * Select all of the jobs contained in a given group.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param groupName
      * <p>
      * Insert the base trigger data.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param trigger
         if(trigger.getJobDataMap().size() > 0) {
             baos = serializeJobData(trigger.getJobDataMap());
         }
-        
+
         PreparedStatement ps = null;
 
         int insertResult = 0;
             ps.setInt(14, trigger.getMisfireInstruction());
             setBytes(ps, 15, baos);
             ps.setInt(16, trigger.getPriority());
-            
+
             insertResult = ps.executeUpdate();
         } finally {
             closeStatement(ps);
      * <p>
      * Insert the simple trigger data.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param trigger
      * <p>
      * Insert the cron trigger data.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param trigger
      * <p>
      * Insert the blob trigger data.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param trigger
      * <p>
      * Update the base trigger data.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param trigger
         if(updateJobData && trigger.getJobDataMap().size() > 0) {
             baos = serializeJobData(trigger.getJobDataMap());
         }
-                
+
         PreparedStatement ps = null;
 
         int insertResult = 0;
             } else {
                 ps = conn.prepareStatement(rtp(UPDATE_TRIGGER_SKIP_DATA));
             }
-                
+
             ps.setString(1, trigger.getJobName());
             ps.setString(2, trigger.getJobGroup());
             setBoolean(ps, 3, trigger.isVolatile());
      * <p>
      * Update the simple trigger data.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param trigger
      * <p>
      * Update the cron trigger data.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param trigger
      * <p>
      * Update the blob trigger data.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param trigger
      * <p>
      * Check whether or not a trigger exists.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param triggerName
      * <p>
      * Update the state for a given trigger.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param triggerName
      * Update the given trigger to the given new state, if it is one of the
      * given old states.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB connection
      * @param triggerName
      * Update all triggers in the given group to the given new state, if they
      * are in one of the given old states.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB connection
      * @param groupName
      * Update the given trigger to the given new state, if it is in the given
      * old state.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB connection
      * @param triggerName
      * Update all of the triggers of the given group to the given new state, if
      * they are in the given old state.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB connection
      * @param groupName
      * <p>
      * Update the states of all triggers associated with the given job.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param jobName
      * <p>
      * Delete all of the listeners associated with a given trigger.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param triggerName
      * <p>
      * Associate a listener with the given trigger.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param trigger
      * <p>
      * Select the listeners associated with a given trigger.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param triggerName
      * <p>
      * Delete the simple trigger data for a trigger.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param triggerName
      * <p>
      * Delete the cron trigger data for a trigger.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param triggerName
      * <p>
      * Delete the cron trigger data for a trigger.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param triggerName
      * <p>
      * Delete the base trigger data for a trigger.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param triggerName
      * <p>
      * Select the number of triggers associated with a given job.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param jobName
      * <p>
      * Select the job to which the trigger is associated.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param triggerName
                 job.setJobClass(loadHelper.loadClass(rs
                         .getString(4)));
                 job.setRequestsRecovery(getBoolean(rs, 5));
-                
+
                 return job;
             } else {
                 if (logger.isDebugEnabled()) {
      * <p>
      * Select the triggers for a job
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param jobName
 
             while (rs.next()) {
                 Trigger t = selectTrigger(conn,
-                        rs.getString(COL_TRIGGER_NAME), 
+                        rs.getString(COL_TRIGGER_NAME),
                         rs.getString(COL_TRIGGER_GROUP));
                 if(t != null) {
                     trigList.add(t);
 
         return (Trigger[]) trigList.toArray(new Trigger[trigList.size()]);
     }
-    
+
     public List selectStatefulJobsOfTriggerGroup(Connection conn,
             String groupName) throws SQLException {
         ArrayList jobList = new ArrayList();
      * <p>
      * Select a trigger.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param triggerName
                 } else {
                     map = (Map) getObjectFromBlob(rs, COL_JOB_DATAMAP);
                 }
-                
+
                 Date nft = null;
                 if (nextFireTime > 0) {
                     nft = new Date(nextFireTime);
      * <p>
      * Select a trigger's JobDataMap.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param triggerName
     public JobDataMap selectTriggerJobDataMap(Connection conn, String triggerName,
             String groupName) throws SQLException, ClassNotFoundException,
             IOException {
-        
+
         PreparedStatement ps = null;
         ResultSet rs = null;
 
             if (rs.next()) {
 
                 Map map = null;
-                if (canUseProperties()) { 
+                if (canUseProperties()) {
                     map = getMapFromProperties(rs);
                 } else {
                     map = (Map) getObjectFromBlob(rs, COL_JOB_DATAMAP);
                 }
-                
+
                 rs.close();
                 ps.close();
 
             closeResultSet(rs);
             closeStatement(ps);
         }
-        
+
         return new JobDataMap();
     }
-            
+
 
     /**
      * <p>
      * Select a trigger' state value.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param triggerName
      * <p>
      * Select a trigger' status (state & next fire time).
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param triggerName
      * <p>
      * Select the total number of triggers stored.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @return the total number of triggers stored
      * <p>
      * Select all of the trigger group names that are stored.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @return an array of <code>String</code> group names
      * <p>
      * Select all of the triggers contained in a given group.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param groupName
      * <p>
      * Insert a new calendar.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param calendarName
      * <p>
      * Update a calendar.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param calendarName
      * <p>
      * Check whether or not a calendar exists.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param calendarName
      * <p>
      * Select a calendar.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param calendarName
      * <p>
      * Check whether or not a calendar is referenced by any triggers.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param calendarName
      * <p>
      * Delete a calendar.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param calendarName
      * <p>
      * Select the total number of calendars stored.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @return the total number of calendars stored
      * <p>
      * Select all of the stored calendars.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @return an array of <code>String</code> calendar names
      * <p>
      * Select the next time that a trigger will be fired.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @return the next fire time, or 0 if no trigger will be fired
-     * 
+     *
      * @deprecated Does not account for misfires.
      */
     public long selectNextFireTime(Connection conn) throws SQLException {
      * <p>
      * Select the trigger that will be fired at the given fire time.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param fireTime
 
     /**
      * <p>
-     * Select the next trigger which will fire to fire between the two given timestamps 
+     * Select the next trigger which will fire to fire between the two given timestamps
      * in ascending order of fire time, and then descending by priority.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param noLaterThan
      *          highest value of <code>getNextFireTime()</code> of the triggers (exclusive)
-     * @param noEarlierThan 
+     * @param noEarlierThan
      *          highest value of <code>getNextFireTime()</code> of the triggers (inclusive)
-     *          
+     *
      * @return The next identifier of the next trigger to be fired.
      */
     public Key selectTriggerToAcquire(Connection conn, long noLaterThan, long noEarlierThan)
         ResultSet rs = null;
         try {
             ps = conn.prepareStatement(rtp(SELECT_NEXT_TRIGGER_TO_ACQUIRE));
-            
-            // Try to give jdbc driver a hint to hopefully not pull over 
+
+            // Try to give jdbc driver a hint to hopefully not pull over
             // more than the one row we actually need.
             ps.setFetchSize(1);
             ps.setMaxRows(1);
-            
+
             ps.setString(1, STATE_WAITING);
             ps.setBigDecimal(2, new BigDecimal(String.valueOf(noLaterThan)));
             ps.setBigDecimal(3, new BigDecimal(String.valueOf(noEarlierThan)));
             rs = ps.executeQuery();
-            
+
             if (rs.next()) {
                 return new Key(
                         rs.getString(COL_TRIGGER_NAME),
                         rs.getString(COL_TRIGGER_GROUP));
             }
-            
+
             return null;
         } finally {
             closeResultSet(rs);
             closeStatement(ps);
-        }      
+        }
     }
 
     /**
      * <p>
      * Insert a fired trigger.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param trigger
                 setBoolean(ps, 11, false);
             }
             ps.setInt(12, trigger.getPriority());
-            
+
 
             return ps.executeUpdate();
         } finally {
      * Select the states of all fired-trigger records for a given trigger, or
      * trigger group if trigger name is <code>null</code>.
      * </p>
-     * 
+     *
      * @return a List of FiredTriggerRecord objects.
      */
     public List selectFiredTriggerRecords(Connection conn, String triggerName,
      * Select the states of all fired-trigger records for a given job, or job
      * group if job name is <code>null</code>.
      * </p>
-     * 
+     *
      * @return a List of FiredTriggerRecord objects.
      */
     public List selectFiredTriggerRecordsByJob(Connection conn, String jobName,
      * <p>
      * Select the distinct instance names of all fired-trigger records.
      * </p>
-     * 
+     *
      * <p>
-     * This is useful when trying to identify orphaned fired triggers (a 
-     * fired trigger without a scheduler state record.) 
+     * This is useful when trying to identify orphaned fired triggers (a
+     * fired trigger without a scheduler state record.)
      * </p>
-     * 
+     *
      * @return a Set of String objects.
      */
-    public Set selectFiredTriggerInstanceNames(Connection conn) 
+    public Set selectFiredTriggerInstanceNames(Connection conn)
         throws SQLException {
         PreparedStatement ps = null;
         ResultSet rs = null;
             closeStatement(ps);
         }
     }
-    
+
     /**
      * <p>
      * Delete a fired trigger.
      * </p>
-     * 
+     *
      * @param conn
      *          the DB Connection
      * @param entryId
             closeStatement(ps);
         }
     }
-    
+
     public int insertSchedulerState(Connection conn, String instanceId,
             long checkInTime, long interval)
         throws SQLException {
             ps = conn.prepareStatement(rtp(UPDATE_SCHEDULER_STATE));
             ps.setLong(1, checkInTime);
             ps.setString(2, instanceId);
-        
+
             return ps.executeUpdate();
         } finally {
             closeStatement(ps);
         }
     }
-        
+
     public List selectSchedulerStateRecords(Connection conn, String instanceId)
         throws SQLException {
         PreparedStatement ps = null;
      * Replace the table prefix in a query by replacing any occurrences of
      * "{0}" with the table prefix.
      * </p>
-     * 
+     *
      * @param query
      *          the unsubstitued query
      * @return the query, with proper table prefix substituted
      * Create a serialized <code>java.util.ByteArrayOutputStream</code>
      * version of an Object.
      * </p>
-     * 
+     *
      * @param obj
      *          the object to serialize
      * @return the serialized ByteArrayOutputStream
      * Remove the transient data from and then create a serialized <code>java.util.ByteArrayOutputStream</code>
      * version of a <code>{@link org.quartz.JobDataMap}</code>.
      * </p>
-     * 
+     *
      * @param data
      *          the JobDataMap to serialize
      * @return the serialized ByteArrayOutputStream
             return serializeObject(data);
         } catch (NotSerializableException e) {
             throw new NotSerializableException(
-                "Unable to serialize JobDataMap for insertion into " + 
-                "database because the value of property '" + 
-                getKeyOfNonSerializableValue(data) + 
+                "Unable to serialize JobDataMap for insertion into " +
+                "database because the value of property '" +
+                getKeyOfNonSerializableValue(data) +
                 "' is not serializable: " + e.getMessage());
         }
     }
 
     /**
      * Find the key of the first non-serializable value in the given Map.
-     * 
-     * @return The key of the first non-serializable value in the given Map or 
+     *
+     * @return The key of the first non-serializable value in the given Map or
      * null if all values are serializable.
      */
     protected Object getKeyOfNonSerializableValue(Map data) {
         for (Iterator entryIter = data.entrySet().iterator(); entryIter.hasNext();) {
             Map.Entry entry = (Map.Entry)entryIter.next();
-            
+
             ByteArrayOutputStream baos = null;
             try {
                 serializeObject(entry.getValue());
                 }
             }
         }
-        
+
         // As long as it is true that the Map was not serializable, we should
         // not hit this case.
-        return null;   
+        return null;
     }
-    
+
     /**
      * serialize the java.util.Properties
      */
      */
     protected Properties convertToProperty(Map data) throws IOException {
         Properties properties = new Properties();
-        
+
         for (Iterator entryIter = data.entrySet().iterator(); entryIter.hasNext();) {
             Map.Entry entry = (Map.Entry)entryIter.next();
-            
+
             Object key = entry.getKey();
             Object val = (entry.getValue() == null) ? "" : entry.getValue();
-            
+
             if(!(key instanceof String)) {
-                throw new IOException("JobDataMap keys/values must be Strings " 
-                        + "when the 'useProperties' property is set. " 
+                throw new IOException("JobDataMap keys/values must be Strings "
+                        + "when the 'useProperties' property is set. "
                         + " offending Key: " + key);
             }
-            
+
             if(!(val instanceof String)) {
-                throw new IOException("JobDataMap values must be Strings " 
-                        + "when the 'useProperties' property is set. " 
+                throw new IOException("JobDataMap values must be Strings "
+                        + "when the 'useProperties' property is set. "
                         + " Key of offending value: " + key);
             }
-            
+
             properties.put(key, val);
         }
-        
+
         return properties;
     }
 
      * special handling for BLOBs. The default implementation uses standard
      * JDBC <code>java.sql.Blob</code> operations.
      * </p>
-     * 
+     *
      * @param rs
      *          the result set, already queued to the correct row
      * @param colName
         Object obj = null;
 
         Blob blobLocator = rs.getBlob(colName);
-        if (blobLocator != null) {
+        if (blobLocator != null && blobLocator.length() != 0) {
             InputStream binaryInput = blobLocator.getBinaryStream();
 
             if (null != binaryInput) {
      * special handling for BLOBs for job details. The default implementation
      * uses standard JDBC <code>java.sql.Blob</code> operations.
      * </p>
-     * 
+     *
      * @param rs
      *          the result set, already queued to the correct row
      * @param colName
         return getObjectFromBlob(rs, colName);
     }
 
-    /** 
+    /**
      * @see org.quartz.impl.jdbcjobstore.DriverDelegate#selectPausedTriggerGroups(java.sql.Connection)
      */
     public Set selectPausedTriggerGroups(Connection conn) throws SQLException {
             }
         }
     }
-    
+
 
     /**
      * Sets the designated parameter to the given Java <code>boolean</code> value.
     protected boolean getBoolean(ResultSet rs, String columnName) throws SQLException {
         return rs.getBoolean(columnName);
     }
-    
+
     /**
      * Retrieves the value of the designated column index in the current row as
      * a <code>boolean</code>.
     protected boolean getBoolean(ResultSet rs, int columnIndex) throws SQLException {
         return rs.getBoolean(columnIndex);
     }
-    
+
     /**
      * Sets the designated parameter to the byte array of the given
-     * <code>ByteArrayOutputStream</code>.  Will set parameter value to null if the 
+     * <code>ByteArrayOutputStream</code>.  Will set parameter value to null if the
      * <code>ByteArrayOutputStream</code> is null.
      * This just wraps <code>{@link PreparedStatement#setBytes(int, byte[])}</code>
      * by default, but it can be overloaded by subclass delegates for databases that