Commits

Richard Gerkin committed 9d785dc

Comments (0)

Files changed (10)

Acquisition/Acquisition.ipf

 #if exists("AxonTelegraphFindServers") && exists("InitAxon")
 	InitAxon()
 #endif
+// Urban Legend initialization hooks.  	
+#ifdef UL
+	UL#AcqInit()
+#endif
 	Variable /G root:status:acqInit=1
 	string how,init_msg = "Initialized experiment %s @ %s\r"
 	if(defaults)
 	if(currSweep==0)
 		ResetExperimentClock()
 	endif
-#if exists("CameraHome")==6
+#ifdef Img
 	AcquireMovie(now=0)
 #endif
 	if(StartClock(isi,DAQs=DAQ)) // If Start_Clock returns an error.  
 		default:
 			dfref chanDF=GetChanDF(chan)
 			if(parent<0 && !stringmatch(chanSaveMode,"No Save") && !stringmatch(saveMode,"Nothing"))
-				String destName="sweep"+num2str(currSweepNum)
-				Wave /Z Sweep=chanDF:$destName
+				string destName="sweep"+num2str(currSweepNum)
+				wave /Z Sweep=chanDF:$destName
 				if(WaveExists(Sweep) && WhichListItem(DAQ,MasterDAQ())!=0) // If the sweep exists and this is not the master DAQ.  
 					redimension /n=(-1,dimsize(Sweep,1)+1) Sweep // Add a column to the destination wave.  
 					Sweep[][dimsize(Sweep,1)-1]=InputWave[p] // And fill it.  
 				string timeStamp
 				sprintf timeStamp "TIME=%f",StopMSTimer(-2) // Time since boot. 
 				Note Sweep timeStamp
+#ifdef UL
+				UL#AcqBind(Sweep,"scope")
+#endif
 			endif
 			break
 	endswitch
 	if(InDynamicClamp())
 		UnloadDynamicClampMode()
 	endif
+// Urban Legend Stop Hooks.
+#ifdef UL
+	UL#AcqStop()
+#endif
 	Button Start title="\\Z10Start", win=$(DAQ+"_Selector")
 	variable /g df:acquiring=0
 End
 //		if(Core#IsCurrprofile("Rick"))
 //			SetAcqModeFromTelegraph(0)
 //		endif
-		
+// Urban Legend start hooks.  	
+#ifdef UL
+	UL#AcqStart()
+#endif
 		Button Start title="\\Z10Stop", win=$(DAQ+"_Selector")
 		StartSweep(DAQ=DAQ)
 	else	
 	
 	// Reset data for each channel.  Only works for current channels.  
 	variable numChannels=GetNumChannels()
-	make /o/df/n=0 chanDFs
+	make /free/df/n=0 chanDFs
 	for(i=0;i<numChannels;i+=1)
 		DAQ=Chan2DAQ(i)
 		dfref daqDF=GetDaqDF(DAQ)
 	if(!v_flag)
 		string full_path = strvarordefault("root:Packages:HDF:path","")
 		if(strlen(full_path))
-			newpath /o hdf_path,full_path
+			newpath /o/q hdf_path,full_path
 			pathinfo hdf_path
 		endif
 		if(!v_flag)
-			newpath /o hdf_path
+			newpath /o/q hdf_path
 		endif
 		path_name = "hdf_path"
 	endif

Basics/Wave Functions.ipf

 		name+=nameofwave(w)+"_"
 	endfor
 	copyscales w,matrix
-	Note matrix,"NAME:"+name
+	Note matrix,"NAME="+name
 	return matrix
 End
 
 #include "Neuralynx Analysis2"
 #endif
 
+#ifdef UL
+#include "UrbanLegend"
+#endif
+
 #endif
 
 #if exists("Core#SetProfile")

Core/Settings.ipf

 		mode=numtype(mode) ? 1 : min(1,mode)
 	endif
 	if(editing)
-		popupmenu $(info.ctrlName) mode=mode, win=$(info.win) // Update mode.  
+		string cmd = "popupmenu /z "+info.ctrlName+" mode="+num2str(mode)+", win="+info.win   
 		string module=stringbykey("MODULE",info.userData)
 		string package=stringfromlist(0,info.ctrlName,"_")
 		EditModule(module,package=package)
 	else
-		string cmd="popupmenu /z "+info.ctrlName+" userData=ReplaceStringByKey(\"MODE\",\""+info.userData+"\",\""+num2str(info.popNum)+"\"), win="+info.win // Update user data. 
-		Execute /Q/P cmd // Must go into operation queue because control will because this function returns control to the normal control handler, which will block updates of user data.   
+		cmd="popupmenu /z "+info.ctrlName+" userData=ReplaceStringByKey(\"MODE\",\""+info.userData+"\",\""+num2str(info.popNum)+"\"), win="+info.win // Update user data. 
 	endif
+	Execute /Q/P cmd // Must go into operation queue because this function returns control to the normal control handler, which will block updates of user data.   
 	return editing
 end
 

IgorExchange/Neuralynx/Load Neuralynx.ipf

 // Things we want to read out of each file's text header.  
 static constant NlxHeaderSize = 16384			// Size of header in version 1.25 and later files
 Structure NlxMetaData // Metadata in the header.  
-	double SampleFreq 
+	double sampleFreq 
 	double bitVolts
 	double fileOpenT[2]
 	double fileCloseT[2]
 	Struct NlxFeature feature[8]
-	double reserved[100]
+	uint32 numRecords
+	uint32 reserved2
+	double reserved[99]
 EndStructure
 
 constant NlxTimestampScale=1000000 // The timestamp units are in microseconds.  
 
 // --------------------------- Neuralynx Menus -------------------------------------
 
-// Add an item to Igor's Load Waves submenu (Data menu)
+// Add an item to Igor's Load Waves submenu (Data menu).
 Menu "Load Waves"
 	SubMenu "Load Neuralynx"
 		"Load Neuralynx Binary Continuous File...", LoadBinaryFile("ncs","")
 
 // ----------------------------------- Neuralynx Loading/Saving -------------------------------------
 
-// Loads the header of a neuralynx file.  Fills the NlxMetaData structure for use by other functions, creates a Name_h wave containing the binary header.  
+// Load a Neuralynx file.  
+Function /s LoadBinaryFile(type,fileName[,pathName,baseName,downSamp,quiet])
+	String type					// File type: continuous "ncs" or tetrode "ntt".   
+	String fileName				// file name, partial path and file name, or full path or "" for dialog.
+	String pathName				// Igor symbolic path name or "" for dialog.
+	String baseName				// Base name for the wave for this channel .  
+	Variable downSamp,quiet
+	
+	downSamp = downsamp>1 ? downsamp : 1
+	
+	strswitch(type)
+		case "ncs":
+			break
+		case "ntt":
+			break
+		case "nev":
+			break
+		default:
+			DoAlert 0, "Unknown format: "+type
+			return "-100" // Exit now so we don't have to deal with the default case again.  
+			break
+	endswitch
+	
+	if(ParamIsDefault(pathName))
+		PathInfo NlxPath
+		if(V_flag)
+			pathName="NlxPath"
+		else
+			pathName="home"
+		endif
+	endif
+
+	if(ParamIsDefault(baseName))
+		baseName=StrVarOrDefault(NlxFolder+"baseNameDefault",fileName)
+	endif
+	
+	String message = "Select a Nlx binary ."+type+" file"
+	Variable refNum
+	Open/R/Z=2/P=$pathName/M=message/T=("."+type) refNum as fileName+"."+type
+	// Save outputs from Open in a safe place.
+	if (V_flag != 0)
+		return num2str(V_flag)			// -1 means user canceled.
+	endif
+	String fullPath = S_fileName
+	String path=RemoveListItem(ItemsInList(fullPath,":")-1,fullPath,":")
+	fileName=StringFromList(ItemsInList(fullPath,":")-1,fullPath,":")
+	fileName=StringFromList(0,fileName,".")
+	if(!strlen(baseName))
+		baseName=fileName
+	endif
+	
+	// Loaded files with names like "TTA_E8.ntt" into data folders like "TTA:E8" relative to the current folder (usually root).  
+	variable i
+	string folder = "root"
+	for(i=0;i<itemsinlist(baseName,"_");i+=1)
+		folder += ":"+stringfromlist(i,baseName,"_")
+		NewDataFolder /O $folder
+	endfor
+	dfref df = $folder
+	string /g df:$"type"=type
+	
+	NewPath /O/Q/Z NlxPath path
+	
+	struct NlxMetaData NlxMetaData
+	duplicate /o LoadHeader(refNum,type,NlxMetaData) df:header
+	string /g df:headerValues=KeyValueHeader(refNum,type)
+	make /o/b/u df:metadata /wave=metadata // Metadata from the header.  
+	StructPut NlxMetaData MetaData // Save the header file in case we need to export a modified Nlx file.  
+	
+	fstatus refNum
+	if(V_logEOF<=16384) // Has only the header or even less.  
+		printf "%s has no data\r",fullPath
+		close refNum
+		return "-200"
+	else
+		wave data = LoadNlxData(refNum, df)	// Load the sample data
+	endif
+	
+	if(StringMatch(type,"ncs"))
+		variable loadedPoints=numpnts(data)
+		variable delta=deltax(data)
+		if(downSamp>1)
+			delta*=downSamp
+			if(loadedPoints>100000000) // Do it this way if the wave is large so we don't run out of memory.  
+				variable offset=leftx(data)
+				data[0,dimsize(data,0)/downSamp]=mean(data,offset+p*delta,offset+(p+1)*delta)
+				Redimension /n=(loadedPoints/downSamp,-1) Data
+				SetScale /P x,leftx(data),delta,data
+			else // Otherwise, do it the normal way.  
+				Resample /DOWN=(downSamp) data
+			endif
+		endif
+		// Fix time wave for continous data so it is a monotonic representation of the time records.  
+		wave /sdfr=df times
+		duplicate /free/d times,temp
+		variable factor=NCSSamples/downsamp
+		redimension /d/n=(numpnts(times)*factor) times
+		times=temp[floor(p/factor)]+(temp[floor(p/factor)+1]-temp[floor(p/factor)])*(mod(p,factor)/factor)
+	endif
+	
+	strswitch(type)
+		case "nev":
+			variable numSamples = numpnts(Data)
+			break
+		case "ncs":
+			numSamples = numpnts(Data)
+			break
+		case "ntt":
+			numSamples=dimsize(Data,1)
+			break
+	endswitch
+	if(!quiet)
+		printf "Loaded data for %s (%d samples).\r", GetDataFolder(0,df), numSamples
+	endif
+	
+	Close refNum
+	return folder
+End
+
+// Returns the header of a neuralynx file.  
+// Fills the NlxMetaData structure for with parsed information from the header.  
 Function /WAVE LoadHeader(refNum, type, NlxMetaData)
 	Variable refNum
 	String type
 End
 
 // Loads the data (everything after the header) from a Neuralynx file.  
-static Function /wave LoadNlxData(refNum, type, baseName, NlxMetaData)
-	Variable refNum
-	String type,baseName
-	Struct NlxMetaData &NlxMetaData
+static Function /wave LoadNlxData(refNum, df)
+	variable refNum
+	dfref df
+	
+	struct NlxMetaData NlxMetaData
+	wave /b/u/sdfr=df metadata
+	structget NlxMetaData metadata
 		
-	FSetPos refNum, NlxHeaderSize													// Go to the end of the header (start of the data).  
+	FSetPos refNum, NlxHeaderSize // Go to the end of the header (start of the data).  
 	FStatus refNum
-	Variable i,j,bytes=V_logEOF-V_filePos
+	variable i,j,bytes=V_logEOF-V_filePos
+	svar /sdfr=df type
 	strswitch(type)
 		case "ncs":
-			Variable numRecords=bytes/(NCSInfoLength+NCSDataLength) // Number of 'NCSSamples' point frames of continuous data plus associated meta-information.  
-			Struct NlxNCSInfo NCSInfo
-			FBinRead/B=3 refNum, NCSInfo						// Read the info for the first frame of this continuous event.  
-			Variable firstFrameT=(NCSInfo.timestamp_low+(2^32)*NCSInfo.timestamp_high)/NlxTimestampScale // Only useful if there are no interruptions in acquisition.  
-			Variable chan=NCSInfo.ChannelNumber
-			FSetPos refNum, NlxHeaderSize+NCSInfoLength+NCSDataLength
-			FBinRead/B=3 refNum, NCSInfo						// Read the info for the second frame of this continuous event.  
-			variable secondFrameT=(NCSInfo.timestamp_low+(2^32)*NCSInfo.timestamp_high)/NlxTimestampScale
-			variable startT=firstFrameT
-			variable deltaT=(secondFrameT-firstFrameT)/NCSSamples
+			variable frameLength = NCSInfoLength+NCSDataLength
+			NlxMetaData.numRecords = bytes/frameLength // Number of 'NCSSamples' point frames of continuous data plus associated meta-information.  
 			break
 		case "ntt":
-			numRecords = bytes/(NTTInfoLength+NTTDataLength) // Number of distinct tetrode records.  
-			Struct NlxNTTInfo NTTInfo
-			FBinRead/B=3 refNum, NTTInfo						// Read the info for the first frame of this tetrode event.  
-			startT=0 										// Each event has its own timestamp.  
-			chan=NTTInfo.SpikeAcqNumber
-			deltaT=1/NlxMetaData.SampleFreq // Sampling frequency given in the header but not in the frame info for tetrode data.  
+			frameLength = NTTInfoLength+NTTDataLength
+			NlxMetaData.numRecords = bytes/frameLength // Number of distinct tetrode records.  
 			break
 		case "nev":
-			numRecords = bytes/(NEVInfoLength) // Number of distinct event records.  
-			Struct NlxNEVInfo NEVInfo
-			FBinRead/B=3 refNum, NEVInfo						// Read the info for the first frame of this tetrode event.  
-			startT=0
-			break
-	endswitch
-
-	if(!strlen(baseName))
-		baseName=type
-	endif
-	String wave_name= CleanupName(baseName,0)
-	Make/o/n=(bytes/numRecords,numRecords) /B/U data // uint8 wave (byte wave).  
-	FSetPos refNum,NlxHeaderSize 								// Go back to the beginning of the data.  
-	FBinRead/B=3 refNum, Data								// Read the rest of the file.  	
-	
-	// Now sort the wheat from the chaff (clean up the data wave to remove bytes containing metainformation, and resize/redimension it.)  
-	IgorizeData(:,type,numRecords)
-	
-	Note Data, "TYPE:"+type+";"
-	strswitch(type)
-		case "ncs":
-			SetScale/P x, startT, deltaT, "s", Data
-			break
-		case "ntt":
-			SetScale/P x, 0, deltaT, "s", Data
+			frameLength = NEVInfoLength
+			NlxMetaData.numRecords = bytes/frameLength // Number of distinct event records.  
 			break
 	endswitch
 	
-	return Data
+	structput NlxMetaData metadata
+	make/o/n=(frameLength,NlxMetaData.numRecords)/b/u df:raw /wave=raw // uint8 wave (byte wave).  
+	FSetPos refNum,NlxHeaderSize 								// Go back to the beginning of the data (end of the header).  
+	FBinRead/B=3 refNum, raw								// Read the rest of the file.  	
+	
+	// Now sort the wheat from the chaff:
+	// -- Clean up the data wave to remove bytes containing metainformation
+	// -- Resize/redimension it.
+	// -- Set all scales.  
+	IgorizeData(df) // Creates "data" among other objects.  
+	wave /sdfr=df data
+	Note data, "TYPE="+type+";"
+	
+	return data
 End
 
-// Takes Igor waves of data and times, created by LoadNlxData, and saves it back into a Neuralynx file.  Assumes there will be already be an intact header in the file that will be overwritten.  
-// Use this if you have done some processing on the data in Igor, like removing bogus spikes or extracting a subset of the data, and now want to re-export it for analysis with Neuralynx tools.  
-static Function /S SaveNlxData(refNum,df)
-	Variable refNum
+// All the Neuralynx data after the header consists of a series of frames.  
+// Each frame contains data and metadata.  
+// This converts the old metadata+data (Raw) into just the data (Raw) as well 
+// as a wave of cluster numbers (_c suffix) and a wave of times (_t suffix).  
+Function IgorizeData(df)
 	dfref df
 	
-	string type=Nlx#DataType(df)
-	wave /sdfr=df Times,Header
+	svar /sdfr=df type
+	wave /b/u/sdfr=df metadata
+	struct NlxMetaData NlxMetaData
+	StructGet NlxMetaData metadata
+		
+	wave /B/U/sdfr=df raw // The raw data (without the header).  	
+	variable deltaT = 1/NlxMetaData.sampleFreq // Can't trust this because data may have been downsampled and then saved.  
+	variable numRecords = NlxMetaData.numRecords 
+	variable i	
+	duplicate /o raw,df:data /wave=data		
 	
 	strswitch(type)
 		case "ncs":
-			break
-		case "ntt":
-			Variable numRecords = numpnts(Times) // Number of distinct tetrode records.  
-	endswitch
-	
-	wave NlxFormatted=DeIgorizeData(df)
-	FSetPos refNum,0
-	FBinWrite /B=3 refNum, Header
-	FSetPos refNum,NlxHeaderSize 								// Go back to the beginning of the data.  
-	FBinWrite/B=3 refNum, NlxFormatted						// Write the new data.  
-End
-
-// All the Neuralynx data after the header consists of a series of frames.  Each frame contains data and metadata.  This converts the old metadata+data (Raw) into just the data (Raw) as well 
-// as a wave of cluster numbers (_c suffix) and a wave of times (_t suffix).  
-Function IgorizeData(df,type,numRecords)
-	dfref df
-	String type
-	Variable numRecords
-		
-	Variable i
-	//Make /o/n=(numRecords)/I NumValidSamples // Always 512 according to Brian at Nlx.  
-	strswitch(type)
-		case "ncs":
-			Make /o/D/U/n=(numRecords) df:times /wave=Times
+			Make /o/D/U/n=(numRecords) df:times /wave=Times	// This will later be upsampled by a factor of NCSSamples.  
+			Struct NlxNCSInfo NCSInfo
+			StructGet /B=3 NCSInfo raw[0] // Read the info for the first frame of this continuous event.  
+			variable startT=(NCSInfo.timestamp_low+(2^32)*NCSInfo.timestamp_high)/NlxTimestampScale
+			StructGet /B=3 NCSInfo raw[1] // Read the info for the second frame of this continuous event.  
+			variable nextT = (NCSInfo.timestamp_low+(2^32)*NCSInfo.timestamp_high)/NlxTimestampScale
+			deltaT=(nextT - startT)/NCSSamples
+			for(i=0;i<numRecords;i+=1)
+				StructGet /B=3 NCSInfo, raw[i]
+				variable timestamp=NCSInfo.timestamp_low+(2^32)*NCSInfo.timestamp_high
+				Times[i]=timestamp/NlxTimestampScale
+			endfor
+			DeletePoints /M=0 0,NCSInfoLength,data // Delete the metainformation. 
+			redimension /n=(numRecords*NCSSamples)/w/e=1 data // Convert into one long wave with the appropriate point size.    	
+			Redimension/s data // Change to floating point so we can represent data in volts.
+			data*=NlxMetaData.bitVolts // Convert to volts.  
+			SetScale/P x, startT, deltaT, "s", data // Time scale is useful only if there are no interruptions in acquisition.  
+			SetScale d, -2^(NTTDataSize*8-1), 2^(NTTDataSize*8-1), "V", data	// Note that the data units are volts with minimum and maximum given by bit depth of sampling.  
 			break
 		case "ntt":
 			Make /o/D/U/n=(numRecords) df:times /wave=Times
 			Make /o/W/U/n=(numRecords) df:clusters /wave=Clusters
 			Make /o/I/U/n=(numRecords,8) df:features /wave=Features 
-			break
-		case "nev":
-			Make /o/T/n=(numRecords) df:desc /wave=Events
-			Make /o/W/U/n=(numRecords) df:TTL /wave=TTL
-			Make /o/D/U/n=(numRecords) df:times /wave=Times
-			break
-	endswitch
-	
-	wave Raw=df:Data
-	for(i=0;i<numRecords;i+=1)
-		Variable timestamp=0
-		strswitch(type)
-			case "ncs":
-				Struct NlxNCSInfo NCSInfo
-				StructGet /B=3 NCSInfo, Raw[i]
-				timestamp=NCSInfo.timestamp_low+(2^32)*NCSInfo.timestamp_high
-				Times[i]=timestamp/NlxTimestampScale
-				//NumValidSamples[i]=NCSInfo.NumValidSamples
-				break
-			case "ntt":
+			startT=0 // Each event has its own timestamp.  
+			for(i=0;i<numRecords;i+=1)
 				Struct NlxNTTInfo NTTInfo
 				StructGet /B=3 NTTInfo, Raw[i]
 				timestamp=NTTInfo.timestamp_low+(2^32)*NTTInfo.timestamp_high
 				Times[i]=timestamp/NlxTimestampScale
 				Clusters[i]=NTTInfo.ClassCellNumber
 				Features[i][]=NTTInfo.FeatureParams[q]
-				break
-			case "nev":
+			endfor
+			DeletePoints /M=0 0,NTTInfoLength,data // Delete the metainformation.  
+			Redimension /n=(numpnts(data)/NTTDataSize)/E=1/W data // Convert into one long wave with the appropriate point size.    
+			Duplicate /FREE data, NTTTemp
+			Redimension /n=(NTTSamples,numRecords,4) data // Tetrode number (not sample number) is the first index, so redimensioning in one step doesn't work.  
+			data[][][]=NTTTemp[(p*4)+(128*q)+r] // Maybe use MatrixOp with WaveMap to improve speed?  No, it only supports 2 dimensional waves.  
+			Redimension/S data // Change to floating point so we can represent data in volts.
+			data*=NlxMetaData.bitVolts // Convert to volts.  
+			SetScale/P x, 0, deltaT, "s", data
+			SetScale d, -2^(NTTDataSize*8-1), 2^(NTTDataSize*8-1), "V", Data	// Note that the data units are volts with minimum and maximum given by bit depth of sampling.  
+			break
+		case "nev":
+			Make /o/T/n=(numRecords) df:desc /wave=Events
+			Make /o/W/U/n=(numRecords) df:TTL /wave=TTL
+			Make /o/D/U/n=(numRecords) df:times /wave=Times
+			startT=0
+			for(i=0;i<numRecords;i+=1)
 				Struct NlxNEVInfo NEVInfo
 				StructGet /B=3 NEVInfo, Raw[i]
 				String eventStr=NEVInfo.EventString_low+NEVInfo.EventString_high
 				Events[i]=eventStr
 				TTL[i]=NEVInfo.nttl
 				Times[i]=timestamp/NlxTimestampScale
-				break
-		endswitch
-	endfor
+			endfor
+			// Nothing left to do since TTL values and times are already extracted.    
+			break
+	endswitch
 	
-	strswitch(type)
-			case "ncs":
-				DeletePoints /M=0 0,NCSInfoLength,Raw // Delete the metainformation.  
-				Redimension /n=(numpnts(Raw)/NCSDataSize)/E=1/W Raw // Convert into one long wave with the appropriate point size.  
-				break
-			case "ntt":
-				DeletePoints /M=0 0,NTTInfoLength,Raw // Delete the metainformation.  
-				Redimension /n=(numpnts(Raw)/NTTDataSize)/E=1/W Raw // Convert into one long wave with the appropriate point size.    
-				Duplicate /FREE Raw, NTTTemp
-				Redimension /n=(NTTSamples,numRecords,4) Raw // Tetrode number (not sample number) is the first index, so redimensioning in one step doesn't work.  
-				Raw[][][]=NTTTemp[(p*4)+(128*q)+r] // Maybe use MatrixOp with WaveMap to improve speed?  No, it only supports 2 dimensional waves.  
-				break
-			case "nev":
-				// Nothing left to do since TTL values and times are already extracted.  If you want to do more with the data you should do it in the previous .nev case.  
-				break
-	endswitch
+	KillWaves /z raw
 	// Raw is now clean!  
 End
 
-// The opposite of IgorizeData.  This puts the Data and the Times (but not the clusters) back into a Neuralynx frame format for use with re-exporting Neuralynx data with SaveNlxData.  
-Function /wave DeIgorizeData(df)
-	dfref df
-	
-	Make/FREE/n=0 /B/U NlxFormatted // uint8 wave (byte wave).  
-	string type=DataType(df)	
-	Wave /sdfr=df Data,Times,MetaData
-	Variable i,j,k,numRecords=numpnts(Times)
-	Struct NlxMetaData NlxMetaData
-	
-	Wave /I/U /sdfr=df Features
-	Wave /W/U /sdfr=df Clusters
-	StructGet NlxMetaData MetaData
-	//Make/O/n=(bytes/numRecords,numRecords) /B/U $(wave_name) /WAVE=NxFormatted // uint8 wave (byte wave).
-	//Make /o/n=(numRecords)/I NumValidSamples // Always 512 according to Brian at Nlx.  
-	for(i=0;i<numRecords;i+=1)
-		Variable timestamp=0
-		strswitch(type)
-			case "ncs":
-				break
-			case "ntt":
-				Struct NlxNTTInfo NTTInfo
-				NTTInfo.timestamp_low=mod(Times[i]*NlxTimestampScale,2^32)
-				NTTInfo.timestamp_high=floor(Times[i]*NlxTimestampScale/(2^32))
-//				NTTInfo.SpikeAcqNumber=0 // Ignore this since it doesn't matter.  
-				NTTInfo.ClassCellNumber=Clusters[i]  
-				for(j=0;j<8;j+=1)
-					if(WaveExists(Features))
-						NTTInfo.FeatureParams[j]=Features[i][j]
-					else
-						NTTInfo.FeatureParams[j]=MetaData
-					endif
-				endfor
-				StructPut /B=3 NTTInfo, NlxFormatted[i]
-				break
-			case "nev":
-				break
-		endswitch
-	endfor
-	strswitch(type)
-			case "ncs":
-				break
-			case "ntt":  
-				Duplicate /FREE Data,NTTTemp
-				Redimension /n=(numpnts(Data))/W NTTTemp
-				NTTTemp=Data[floor(mod(p,128)/4)][floor(p/128)][mod(p,4)]/NlxMetaData.bitVolts
-				Redimension /n=(NTTDataSize*NTTSamples*4,numRecords)/E=1/B/U NTTTemp
-				Redimension /n=(NTTInfoLength+NTTSamples*4*NTTDataSize,numRecords) NlxFormatted
-				NlxFormatted[NTTInfoLength,][]=NTTTemp[p-NTTInfoLength][q] // Maybe use MatrixOp with WaveMap to improve speed?    
-				break
-			case "nev":
-				break
-	endswitch
-	return NlxFormatted // Clean is now raw!  
-End
-
-// Load a Neuralynx file.  
-Function /s LoadBinaryFile(type,fileName[,pathName,baseName,downSamp])
-	String type					// File type: continuous "ncs" or tetrode "ntt".   
-	String fileName				// file name, partial path and file name, or full path or "" for dialog.
-	String pathName				// Igor symbolic path name or "" for dialog.
-	String baseName				// Base name for the wave for this channel .  
-	Variable downSamp
-	
-	dfref currFolder=GetDataFolderDFR()
-	downSamp = downsamp>1 ? downsamp : 1
-	
-	strswitch(type)
-		case "ncs":
-			break
-		case "ntt":
-			break
-		case "nev":
-			break
-		default:
-			DoAlert 0, "Unknown format: "+type
-			return "-100" // Exit now so we don't have to deal with the default case again.  
-			break
-	endswitch
-	
-	if(ParamIsDefault(pathName))
-		PathInfo NlxPath
-		if(V_flag)
-			pathName="NlxPath"
-		else
-			pathName="home"
-		endif
-	endif
-
-	if(ParamIsDefault(baseName))
-		baseName=StrVarOrDefault(NlxFolder+"baseNameDefault",fileName)
-	endif
-	
-	String message = "Select a Nlx binary ."+type+" file"
-	Variable refNum
-	//print fileName+"."+type
-	Open/R/Z=2/P=$pathName/M=message/T=("."+type) refNum as fileName+"."+type
-	// Save outputs from Open in a safe place.
-	if (V_flag != 0)
-		return num2str(V_flag)			// -1 means user canceled.
-	endif
-	String fullPath = S_fileName
-	String path=RemoveListItem(ItemsInList(fullPath,":")-1,fullPath,":")
-	fileName=StringFromList(ItemsInList(fullPath,":")-1,fullPath,":")
-	fileName=StringFromList(0,fileName,".")
-	if(!strlen(baseName))
-		baseName=fileName
-	endif
-	
-	// Loaded files with names like "TTA_E8.ntt" into data folders like "TTA:E8" relative to the current folder (usually root).  
-	variable i
-	for(i=0;i<itemsinlist(baseName,"_");i+=1)
-		string folder=stringfromlist(i,baseName,"_")
-		NewDataFolder /O/S $folder
-	endfor
-	dfref df=GetDataFolderDFR()
-	
-	NewPath /O/Q/Z NlxPath path
-	
-	Struct NlxMetaData NlxMetaData
-	Duplicate /o LoadHeader(refNum,type,NlxMetaData) header
-	
-	FStatus refNum
-	if(V_logEOF<=16384) // Has only the header or even less.  
-		printf "%s has no data\r",fullPath
-		Close refNum
-		SetDataFolder currFolder
-		return "-200"
-	else
-		wave Data=LoadNlxData(refNum, type, baseName, NlxMetaData)	// Load the sample data
-	endif
-	
-	string /g $"type"=type
-	string /g headerValues=KeyValueHeader(refNum,type)
-	Make /o metadata // Metadata from the header.  
-	StructPut NlxMetaData MetaData // Save the header file in case we need to export a modified Nlx file.  
-	
-	if(StringMatch(type,"ncs"))
-		Variable loadedPoints=numpnts(Data)
-		Variable delta=deltax(Data)
-		if(downSamp>1)
-			delta*=downSamp
-			if(loadedPoints>100000000) // Do it this way if the wave is large so we don't run out of memory.  
-				Variable offset=leftx(Data)
-				Data[0,dimsize(Data,0)/downSamp]=mean(Data,offset+p*delta,offset+(p+1)*delta)
-				Redimension /n=(loadedPoints/downSamp,-1) Data
-				SetScale /P x,leftx(Data),delta,Data
-			else // Otherwise, do it the normal way.  
-				Resample /DOWN=(downSamp) Data
-			endif
-		endif
-	
-		// Fix time wave for continous data so it is a monotonic representation of the time records.  
-		if(StringMatch(type,"ncs"))
-			Wave Times // Currently just the times for each block of 512 samples.  
-			Duplicate /FREE/D Times,temp
-			Redimension /D/n=(loadedPoints/downSamp) Times
-			Variable factor=NCSSamples/downsamp
-			Times=temp[floor(p/factor)]+(temp[floor(p/factor)+1]-temp[floor(p/factor)])*(mod(p,factor)/factor)
-		endif
-	endif
-	
-	strswitch(type)
-		case "nev":
-			variable numSamples = numpnts(Data)
-			break
-		default: 
-			//Redimension/S Data												// Change to floating point so we can represent data in volts.
-			//Data*=NlxMetaData.bitVolts
-			SetScale d, 0, 0, "V", Data										// Note that the data units are volts
-			strswitch(type)
-				case "ncs":
-					numSamples = numpnts(Data)
-					break
-				case "ntt":
-					numSamples=dimsize(Data,1)
-					break
-			endswitch
-			break
-	endswitch
-	printf "Loaded data for %s (%d samples).\r", GetDataFolder(0), numSamples
-	
-	Close refNum
-	SetDataFolder currFolder
-
-	return getdatafolder(1)
-End
-
 // Using Igor components, overwrite the data component of a Neuralynx file.  
-Function SaveBinaryFile(df[,path,fileName,force])
+Function SaveBinaryFile(df[,pathName,fileName,force])
 	dfref df						// The folder which contains the data to be written to a file.  
-	String path					// Igor symbolic path name or "" for dialog.
+	String pathName			// Igor symbolic path name or "" for dialog.
 	String fileName				// file name, partial path and file name, or full path or "" for dialog.
 	variable force				// Force overwrite without prompting.  
 	
 	
 	Variable err
 	
-	if(ParamIsDefault(path))
-		path="NlxPath"
-		pathinfo $path
+	if(ParamIsDefault(pathName))
+		pathName="NlxPath"
+		pathinfo $pathName
 		if(!strlen(s_path))
 			Do
-				NewPath /O/M="Choose a default folder for Nlx files."/Q $path
+				NewPath /O/M="Choose a default folder for Nlx files."/Q $pathName
 			While(V_flag)
 		endif
 	endif
 	if(ParamIsDefault(fileName))
 		fileName=DF2FileName(df)
 	endif
-	Open /R/Z=1/P=$path/M=message/T=("."+type) refNum as fileName+"."+type
+	Open /R/Z=1/P=$pathName/M=message/T=("."+type) refNum as fileName+"."+type
 	variable fileExists=!V_flag
 	if(fileExists && !force) // File with this name already exists.  
 		DoAlert 1,"Overwrite existing file "+fileName+"?"
 			return -3
 		endif
 	endif
-	Open /P=$path/M=message/T=("."+type) refNum as fileName+"."+type
+	Open /P=$pathName/M=message/T=("."+type) refNum as fileName+"."+type
 	
 	// Save outputs from Open in a safe place.
 	err = V_Flag
 	return 0			// Zero signifies no error.	
 End
 
+// Takes Igor waves of data and times, created by LoadNlxData, and saves it back into a Neuralynx file.  
+// Assumes there will be already be an intact header in the file that will be overwritten.  
+// Use this if you have done some processing on the data in Igor, like removing bogus spikes or extracting 
+// a subset of the data, and now want to re-export it for analysis with Neuralynx tools.  
+static Function /S SaveNlxData(refNum,df)
+	Variable refNum
+	dfref df
+	
+	string type=Nlx#DataType(df)
+	wave /sdfr=df Times,Header
+	
+	strswitch(type)
+		case "ncs":
+			break
+		case "ntt":
+			break  
+	endswitch
+	
+	wave Raw = DeIgorizeData(df)
+	FSetPos refNum,0
+	FBinWrite /B=3 refNum, Header
+	FSetPos refNum,NlxHeaderSize 								// Go back to the beginning of the data.  
+	FBinWrite/B=3 refNum, Raw						// Write the new data.  
+End
+
+// The opposite of IgorizeData.  
+// This puts the Data and the Times (but not the clusters) back into a Neuralynx frame format 
+// for use with re-exporting Neuralynx data with SaveNlxData.  
+Function /wave DeIgorizeData(df)
+	dfref df
+	
+	Make/FREE/n=0 /B/U raw // uint8 wave (byte wave).  
+	string type=DataType(df)	
+	Wave /W/U/sdfr=df Data // 32-bit float.    
+	wave /D/sdfr=df Times
+	wave /B/U/sdfr=df MetaData
+	wave /B/sdfr=df Header
+	Variable i,j,k
+	Struct NlxMetaData NlxMetaData
+	StructGet NlxMetaData MetaData
+	//Make/O/n=(bytes/numRecords,numRecords) /B/U $(wave_name) /WAVE=NxFormatted // uint8 wave (byte wave).
+	//Make /o/n=(numRecords)/I NumValidSamples // Always 512 according to Brian at Nlx.  
+	
+	Variable timestamp=0
+	strswitch(type)
+		case "ncs":
+			variable numRecords = numpnts(Times)/NCSSamples
+			variable frameLength = NCSInfoLength+NCSDataLength
+			redimension /n=(NCSInfoLength,numRecords) raw
+			Struct NlxNCSInfo NCSInfo
+			for(i=0;i<numRecords;i+=1)
+				NCSInfo.timestamp_low=mod(times[NCSSamples*i]*NlxTimestampScale,2^32)
+				NCSInfo.timestamp_high=floor(times[NCSSamples*i]*NlxTimestampScale/(2^32))
+				NCSInfo.SampleFreq = 1/dimdelta(data,0) // Note that the data may have been downsampled in Igor.  
+				NCSInfo.NumValidSamples = NCSSamples
+				StructPut /B=3 NCSInfo, Raw[i] // Fill NTTInfoLength worth of Raw[i] with information about frame i.  
+			endfor
+			redimension /n=(frameLength,numRecords) raw
+			duplicate /free data,NCSTemp
+			NCSTemp = data/NlxMetaData.bitVolts // Convert back to a 16-bit integer.  
+			redimension /w NCSTemp // Rescale to 16-bit integer wave (rounding off data).  
+			redimension /n=(NCSDataLength,numRecords)/E=1/B/U NCSTemp // Reshape to 2D byte wave. 
+			raw[NCSInfoLength,][]=NCSTemp[p-NCSInfoLength + NCSDataLength*q]
+			break
+		case "ntt":
+			numRecords = NlxMetaData.numRecords // Should also be numpnts(Times)
+			frameLength = NTTInfoLength+NTTDataLength
+			redimension /n=(NTTInfoLength,numRecords) raw
+			wave /I/U /sdfr=df Features
+			wave /W/U /sdfr=df Clusters
+			Struct NlxNTTInfo NTTInfo
+			for(i=0;i<numRecords;i+=1)
+				NTTInfo.timestamp_low=mod(Times[i]*NlxTimestampScale,2^32)
+				NTTInfo.timestamp_high=floor(Times[i]*NlxTimestampScale/(2^32))
+				NTTInfo.ClassCellNumber=Clusters[i] // This appears to set the cluster.  
+				for(j=0;j<8;j+=1)
+					if(WaveExists(Features))
+						NTTInfo.FeatureParams[j]=Features[i][j]
+					else
+						NTTInfo.FeatureParams[j]=MetaData
+					endif
+				endfor
+				StructPut /B=3 NTTInfo, Raw[i] // Fill NTTInfoLength worth of Raw[i] with information about frame i.  
+			endfor
+			redimension /n=(frameLength,numRecords) raw
+			duplicate /free data,NTTTemp
+			Redimension /n=(numpnts(Data))/W NTTTemp
+			NTTTemp=Data[floor(mod(p,128)/4)][floor(p/128)][mod(p,4)]/NlxMetaData.bitVolts
+			Redimension /n=(NTTDataSize*NTTSamples*4,numRecords)/E=1/B/U NTTTemp
+			raw[NTTInfoLength,][]=NTTTemp[p-NTTInfoLength][q] // Maybe use MatrixOp with WaveMap to improve speed?    
+			break
+		case "nev":
+			// Not implemented.  
+			break
+	endswitch
+	
+	// Clean is now raw!  
+	return raw 
+End
+
 // Loads all the neuralynx files of a given type in a directory.  
 Function /s LoadAllBinaryFiles(type,baseName[,pathName,downSamp])
 	String type				// File type.  
 			dfref df=$DFFromMenu()
 			NewPath /O/Q/C NeuralynxSavePath dirName
 			if(strlen(fileName))
-				SaveBinaryFile(df,fileName=fileName,path="NeuralynxSavePath")
+				SaveBinaryFile(df,fileName=fileName,pathName="NeuralynxSavePath")
 			endif
 			break
 		case "View":

IgorExchange/Neuralynx/Neuralynx Analysis.ipf

 		Concat-=offset // Must do this to prevent filtering from producing a ringing transient.  
 		FilterIIR /HI=(hi) Concat
 		Concat+=offset
-		Note Concat, "FilterIIR:"+num2str(hi/dimdelta(Concat,0))
+		Note Concat, "FilterIIR="+num2str(hi/dimdelta(Concat,0))
 	endif
-	Note Concat, "Timestamp:"+num2str(timestamp-cumul_duration)
+	Note Concat, "Timestamp="+num2str(timestamp-cumul_duration)
 	if(sayv)
 		Save /P=SaveLocation Concat as concat_name+".ibw"
 	endif
 		wave /sdfr=df times
 		crossings=times[crossings[p]]
 	endif
-	note /nocr crossings "PHASE:"+num2str(value)
+	note /nocr crossings "PHASE="+num2str(value)
 	return crossings
 End
 

IgorExchange/Neuralynx/Neuralynx Analysis2.ipf

 	for(i=0;i<numpnts(counts);i+=1)
 		colLabels+=getdimlabel(counts,0,i)+";"
 	endfor
-	note /nocr NlxPETH, "COLLABELS:"+collabels
+	note /nocr NlxPETH, "COLLABELS="+collabels
 	return NlxPETH
 end
 
 			PETH_sem=PETH*PETH_cv
 			break
 	endswitch
-	Note PETH, replacestringbykey("PETH_INSTANCE",note(PETH),pethInstance)
-	Note PETH, replacestringbykey("NORMALIZE",note(PETH),normalize)
-	//Note PETH, replacestringbykey("INTERVALS",note(PETH),intervals)
+	Note PETH, replacestringbykey("PETH_INSTANCE",note(PETH),pethInstance,"=")
+	Note PETH, replacestringbykey("NORMALIZE",note(PETH),normalize,"=")
+	//Note PETH, replacestringbykey("INTERVALS",note(PETH),intervals,"=")
 	if(!keepTrials)
 		killwaves /z Trials
 	endif
 			Variable i
 			for(i=0;i<numSegments;i+=1)
 				dfref Segment=df:$("E"+num2str(i))
-				SaveBinaryFile(Segment,fileName=getdatafolder(0,df)+"_E"+num2str(i),path="NeuralynxSavePath")
+				SaveBinaryFile(Segment,fileName=getdatafolder(0,df)+"_E"+num2str(i),pathName="NeuralynxSavePath")
 			endfor
 			break
 	endswitch
 	return ChopEpochs
 End
 
-static Function ChopData(dataDF[,epochsDF,type,killSource])
+static Function ChopData(dataDF[,epochsDF,type,killSource,quiet])
 	dfref dataDF,epochsDF
 	string type
 	variable killSource // To save memory, redimension to zero the data waves from the original folders after chopping.  
+	variable quiet
 	
 	if(paramisdefault(type))
 		type=Nlx#DataType(dataDF)
 			endif
 			count=finish-start+1
 		endif
-		printf "Epoch %d: Start (%d), Finish (%d), # Frames (%d)\r", i,start,finish,count
+		if(!quiet)
+			printf "Epoch %d: Start (%d), Finish (%d), # Frames (%d)\r", i,start,finish,count
+		endif
 		epochDuration[i]=ChopTimes[i+1]-ChopTimes[i]
 		epochEventCount[i]=count
 		Variable rows=dimsize(Data,0)
 End
 
 // Extract events with a particular stimulation message.   
-static Function /df ExtractEvents(eventType[,df,epoch])
+static Function /df ExtractEvents(eventType[,df,epoch,message,offset])
 	string eventType
 	dfref df
 	variable epoch // - Leave unspecified for to operate on the Events folder
 				  // - epoch>=0 to operate on that epoch.  
 				  // - epoch=-1 to operate on each epoch.  
+	string message // Value of the 'desc' wave indicating an event.  
+	variable offset // Offset the index of the event occurence from the index of the message.  
 	
 	variable i,j
 	if(paramisdefault(df))
 		case "stimulus":
 			eventType="stims"
 		case "stim":
-			string message=STIMULUS_MESSAGE
+			if(paramisdefault(message))
+				message = STIMULUS_MESSAGE
+			endif
 			break
 		case "epoch": // Extract events corresponding to the start of recording.  
 		case "epochs":
 			eventType="epochs"
-			message=START_MESSAGE
+			if(paramisdefault(message))
+				message = START_MESSAGE
+			endif
 			break
 		default:
 			if(stringmatch(eventType,"TTL=*"))
 					return NULL
 				else
 					wave /sdfr=df /t desc
-					message=desc[v_value]
+					if(paramisdefault(message))
+						message = desc[v_value]
+					endif
 				endif
 			else
 				DoAlert 0,"Unknown event type: "+eventType
 			string name=stringfromlist(i,waves)
 			wave /z w=df:$name
 			if(waveexists(w))
-				extract /o w,eventDF:$name, stringmatch(Desc,message)
+				extract /o w,eventDF:$name, stringmatch(Desc[p+offset],message)
 				string /g eventDF:type=type
 			endif
 		endfor
 	endif
 	svar /sdfr=events type
 	dfref epochs=events:epochs
-	if(datafolderrefstatus(epochs))
+	if(!datafolderrefstatus(epochs))
 		ExtractEvents("epoch",df=events)
 		dfref epochs=events:epochs
 	endif

Other/Olfaction.ipf

 		if(!v_flag)
 			loadwave /o/q/p=Correlation name+".ibw"
 			wave mu = df:$name
-			note /k/nocr mu replacestringbykey("delete",note(mu),"1")
+			note /k/nocr mu replacestringbykey("delete",note(mu),"1","=")
 		else
 			make /o/d/n=(100,10,10) df:$name /wave=mu
-			note /k/nocr mu replacestringbykey("delete",note(mu),"0")
+			note /k/nocr mu replacestringbykey("delete",note(mu),"0","=")
 		endif
 	else
-		note /k/nocr mu replacestringbykey("delete",note(mu),"0")
+		note /k/nocr mu replacestringbykey("delete",note(mu),"0","=")
 	endif
 	setdatafolder currDF
 	return mu

Users/Rick/Paul.ipf

 #pragma rtGlobals=1		// Use modern global access method.
 
+strconstant pathName = "PaulPath"
+constant epoch_offset = 3 // Column number of epoch 0 in all_experiments_list.  
+
+function All([i,j,k])
+	variable i,j,k
+	
+	string animals = "bee;"
+	string odors = "hpn;hpn0.1;hpn0.01;hpn0.001;hx;hx0.1;nol;iso;lem;lio;bom;6me"
+	string stimuli = "bb"
+	
+	for(i=i;i<itemsinlist(animals);i+=1)
+		Prog("Animal",i,itemsinlist(animals))
+		string animal = stringfromlist(i,animals)
+		for(j=j;j<itemsinlist(odors);j+=1)
+			Prog("Odor",j,itemsinlist(odors))
+			string odor = stringfromlist(j,odors)
+			for(k=0;k<itemsinlist(stimuli);k+=1)
+				Prog("Stimulus",k,itemsinlist(animals))
+				string stimulus = stringfromlist(k,stimuli)
+				Init(animal,stimulus,odor)
+				LoadAllEpochs()
+				string files_missing = CheckFileList() 
+				if(strlen(files_missing))
+					printf "Missing these files %s",files_missing
+					return -1
+				endif
+				abort
+				Go()
+			endfor
+			k=0
+		endfor
+		j=0
+	endfor
+end
+
+function Go()
+	CleanData()
+	//MakeAllVTAs()
+	svar /sdfr=root: stimulus
+	FormatData()
+	MakeAllPeriodograms()
+	MakeAllSpectrograms()
+	if(stringmatch(stimulus,"BB"))
+		wave eag_bb = MergeBBs(subtract=1)
+		MergeBBs(subtract=0)
+		wave ticl_bb = root:bb:ticl_bb:x:data
+		EAG_TiCl_Coherence(EAG_bb,TiCl_bb)
+		EAG_TiCl_Coherogram(EAG_bb,TiCl_bb)
+	endif
+end
+
+function Init(animal,stimulus,odor)
+	string animal // e.g. "bee"
+	string stimulus // e.g. "FF" or "BB"
+	string odor // e.g. "hpn" or "hpn0.1"
+	
+	cd root:
+	pathinfo $pathName
+	if(!strlen(s_path))
+		newpath /o/q/m="Choose the data path" $pathName
+	endif
+	
+	wave /t/sdfr=root: w=all_experiments_list
+	make /o/t/n=0 active_experiments_list
+	string /g root:$"animal" = animal
+	string /g root:$"stimulus" = stimulus
+	string /g root:$"odor" = odor
+	
+	string species_list = "bee:MS_Apis mellifera;TiCl:MS_photodetector + TiCl;"
+	string stimulus_list = "FF:;BB:BB"
+	variable i,j,k,n=0
+	for(i=0;i<dimsize(w,0);i+=1)
+		string species = stringbykey(animal,species_list)
+		if(stringmatch(w[i][1],species)) // If this experiment corresponds to this species.  
+			n+=1
+			redimension /n=(n,3) active_experiments_list // File name, odor epochs, nearest blank epochs.  
+			active_experiments_list[n-1][0] = w[i][0] // File name.  
+			active_experiments_list[n-1][1] = "" // Epoch list.  
+		else
+			continue
+		endif
+		string stim = stringbykey(stimulus,stimulus_list)
+		if(!strlen(odor) && stringmatch(stim,""))
+			stim = "FF"
+		endif
+		for(j=0;j<dimsize(w,1);j+=1)
+			if(stringmatch(w[i][j],stim+odor) || stringmatch(w[i][j],stim+" "+odor))
+				active_experiments_list[n-1][1] += "E"+num2str(j-epoch_offset)//+";"
+				for(k=0;k<=3;k+=1)
+					if(stringmatch(w[i][j+k],stim+"Blank") || stringmatch(w[i][j+k],stim+" Blank")) // If there is a blank k epochs after it.  
+						active_experiments_list[n-1][2] += "E"+num2str(j+k-epoch_offset)//+";"
+						break
+					endif
+					if(stringmatch(w[i][j-k],stim+"Blank") || stringmatch(w[i][j-k],stim+" Blank")) // If there is a blank k epochs before it.  
+						active_experiments_list[n-1][2] += "E"+num2str(j-k-epoch_offset)//+";"
+						break
+					endif
+				endfor
+				break // Ignore other matching epochs.  
+			endif
+		endfor
+		if(strlen(active_experiments_list[i][1])==0) // If there were no epochs matching these conditions.  
+			n-=1
+			redimension /n=(n,3) active_experiments_list // Remove this experiment from the list.  
+		endif
+	endfor
+end	
+
+function /s Environment()
+	svar /sdfr=root: animal,stimulus,odor
+	
+	return animal+"_"+stimulus+"_"+odor	
+end
+
+function DownsampleAllNCS(downSamp[,force])
+	variable downSamp
+	variable force // Make a downsampled version even when one already exists.  
+	
+	variable i
+	do
+		string fileName = IndexedFile($pathName,i,".ncs")
+		fileName = removeending(fileName,".ncs")
+		if(!strlen(fileName))
+			break
+		elseif(!stringmatch(fileName,"*ds"))
+			string ds_name = fileName + "ds.ncs"
+			GetFileFolderInfo /P=$pathName /Q /Z=1 ds_name
+			if(v_flag || force) // No downsampled version.  
+				dfref df = DownsampleNCS(fileName,downSamp)
+				KillDataFolder /z df
+			endif
+		endif
+		i+=1
+	while(1)
+end
+
+function /df DownsampleNCS(fileName,downSamp)
+	string fileName
+	variable downSamp
+	
+	string folder = LoadBinaryFile("ncs",fileName,pathName=pathName,downSamp=downSamp)
+	dfref df = $folder
+	SaveBinaryFile(df,pathName=pathName,fileName=fileName + "ds",force=1)
+	return df
+end
+
+function /s CheckFileList()
+	variable i,j,k
+	wave /t/sdfr=root: list = active_experiments_list
+	string kinds = "_EAGds,ncs;_Events,nev"
+	string files = ""
+	for(i=0;i<dimsize(list,0);i+=1)
+		for(j=0;j<itemsinlist(kinds);j+=1)
+			string kind = stringfromlist(j,kinds)
+			string suffix = stringfromlist(0,kind,",")
+			string type = stringfromlist(1,kind,",")
+			string fileName = list[i][0]+suffix+"."+type
+			GetFileFolderInfo /P=$pathName /Q /Z fileName
+			if(v_flag)
+				files += fileName+";"
+			endif
+		endfor
+	endfor
+	return files
+end
+
+function LoadAllEpochs()
+	cd root:
+	variable i,j,k
+	wave /t/sdfr=root: all_list = all_experiments_list
+	wave /t/sdfr=root: list = active_experiments_list
+	string kinds = "_EAGds,ncs;_Events,nev"
+	for(i=0;i<dimsize(list,0);i+=1)
+		Prog("Load",i,dimsize(list,0))
+		if(!stringmatch(list[i][0],"ps121125d"))
+			//continue
+		endif
+		for(j=0;j<itemsinlist(kinds);j+=1)
+			string kind = stringfromlist(j,kinds)
+			string suffix = stringfromlist(0,kind,",")
+			string type = stringfromlist(1,kind,",")
+			string fileName = list[i][0]+suffix
+			printf "Loading %s\r",fileName
+			//string folder = removeending("root:"+fileName,"ds")
+			string folder = LoadBinaryFile(type,fileName,baseName=removeending(fileName,"ds"),pathName=pathName,downSamp=1,quiet=1)
+		endfor
+		dfref df = $list[i][0]
+		printf "Chopping %s\r",list[i][0]
+		Chop(df,quiet=1)
+		for(j=0;j<itemsinlist(kinds);j+=1)
+			kind = stringfromlist(j,kinds)
+			suffix = stringfromlist(0,kind,",")
+			printf "Cleaning %s\r",list[i]+suffix
+			dfref df_ = df:$removeending(suffix[1,strlen(suffix)-1],"ds")
+			killwaves /z df_:data,df_:times,df_:TTL // Delete the whole experiment data since we have already extracted the data by epoch.  
+			k=0
+			do
+				string epoch = getindexedobjnamedfr(df_,4,k)
+				if(!strlen(epoch))
+					break
+				elseif(stringmatch(epoch,"epochs"))
+					k+=1
+				else
+					dfref dfi = df_:$epoch
+					if(stringmatch(type,"nev"))
+						wave /sdfr=dfi times
+						if(numpnts(times)<100)
+							string df_name = getdatafolder(1,dfi)
+							printf "%s probably has no usable data.\r", df_name
+							duplicate /o/r=[][0,0] all_experiments_list,experiment_names
+							findvalue /text=(list[i][0])/txop=4 experiment_names
+							variable epochNum = str2num(epoch[1,strlen(epoch)-1])-epoch_offset
+							if(!stringmatch(all_list[v_value][epochNum],"!!*"))
+								string msg
+								sprintf msg,"Epoch %d in experiment %s found to be unusable but not marked has such.\r",epochNum,list[i][0]
+								doalert 0,msg
+							endif
+						endif
+					endif 
+					if(stringmatch(epoch,list[i][1]) || stringmatch(epoch,list[i][2])) // Match the odor epoch or the corresponding blank epoch.  
+						k+=1
+					else
+						killdatafolder /z dfi // Otherwise delete since we don't need it right now.  
+					endif
+				endif
+			while(1)
+		endfor
+	endfor
+	Prog("Load",0,0)
+end
+
+// Some data have errors that need to be removed.  
+function CleanData()
+	variable i,j,k
+	wave /t/sdfr=root: list = active_experiments_list
+	for(i=0;i<dimsize(list,0);i+=1)
+		Prog("Cleam",i,dimsize(list,0))
+		dfref df = root:$list[i][0]
+		string odorEpoch = list[i][1]
+		string airEpoch = list[i][2]
+		dfref eventsDF_ = df:Events
+		
+		string epochs = odorEpoch+";"+airEpoch
+		for(j=0;j<itemsinlist(epochs);j+=1)
+			string epoch = stringfromlist(j,epochs)
+			dfref eventsDF = eventsDF_:$epoch
+			wave /t/sdfr=eventsDF desc
+			wave /t/sdfr=eventsDF times,TTL
+			if(stringmatch(desc[12],"6ms"))
+				deletepoints 0,15,desc,times,TTL
+			endif
+			if(stringmatch(desc[numpnts(desc)-1],"50ms"))
+				redimension /n=(numpnts(desc)-1) desc,times,TTL
+			endif
+		endfor
+	endfor
+	Prog("Cleam",0,0)
+end
+
+function MakeAllVTAs()
+	variable i,j,k
+	wave /t/sdfr=root: list = active_experiments_list
+	svar /sdfr=root: stimulus
+	strswitch(stimulus)
+		case "ff":
+			string conditions = "100ms;30ms;15ms;6ms"
+			break
+		case "bb":
+			conditions = ";"
+			break
+		default:
+			conditions = ""
+	endswitch
+	string nths = "-1;0;1"
+	for(i=0;i<dimsize(list,0);i+=1)
+		Prog("VTA",i,dimsize(list,0))
+		dfref df = root:$list[i][0]
+		string odorEpoch = list[i][1]
+		variable odorEpoch_ = str2num(odorEpoch[1,strlen(odorEpoch)-1])
+		string airEpoch = list[i][2]
+		variable airEpoch_ = str2num(airEpoch[1,strlen(airEpoch)-1])
+		printf "Making VTA for %s\r",list[i][0]
+		for(j=0;j<itemsinlist(conditions);j+=1)
+			string condition = stringfromlist(j,conditions)
+			for(k=0;k<itemsinlist(nths);k+=1)
+				variable nth = str2num(stringfromlist(k,nths))
+				//wave vta = MakeVTA(df,odorEpoch_,subtractEpoch=airEpoch_,condition=condition,nth=nth,invert=0)
+				//wave vta = MakeVTA(df,odorEpoch_,condition=condition,nth=nth,invert=0)
+			endfor
+		endfor
+	endfor
+	Prog("VTA",0,0)
+	string dfs = GetDFs()
+	for(j=0;j<itemsinlist(conditions);j+=1)
+			condition = stringfromlist(j,conditions)
+			for(k=0;k<itemsinlist(nths);k+=1)
+				nth = str2num(stringfromlist(k,nths))
+				wave vta = MergeVTAs(dfs,conditions=conditions,nth=nth,subtracted=1,pool_errors=1)
+				VTAStats(vta)
+				wave vta = MergeVTAs(dfs,conditions=conditions,nth=nth,subtracted=0,pool_errors=1)
+				VTAStats(vta)
+			endfor
+	endfor
+end
+
 // Formats data into a matrix. Requires odor_name and odor_code, which defines how e.g. "o1" maps onto an odor name.    
-function TrialOdors(df[,epoch])
+function /wave FormatFF(df,epoch)
 	dfref df
 	variable epoch
 	
-	dfref currDF = getdatafolderdfr()
-	setdatafolder df
-	wave /t odor_name,odor_code
+	//wave /t odor_name,odor_code
 	dfref eventsDF = df:Events
 	dfref eagDF = df:EAG
-	if(!paramisdefault(epoch))
-		dfref eagDF2 = eagDF:$("E"+num2str(epoch))
-		eagDF = eagDF2
-		dfref eventsDF2 = eventsDF:$("E"+num2str(epoch))
-		eventsDF = eventsDF2
-	endif
+	dfref eagDF2 = eagDF:$("E"+num2str(epoch))
+	eagDF = eagDF2
+	dfref eventsDF2 = eventsDF:$("E"+num2str(epoch))
+	eventsDF = eventsDF2
 	wave /t/sdfr=eventsDF desc
-	wave /sdfr=eventsDF times
+	wave /sdfr=eventsDF event_times = times, ttl
 	
-	extract /o times,starts,stringmatch(desc[p],"*ms*") || stringmatch(desc[p],"*cont*")
-	extract /o/t desc,durations,stringmatch(desc[p],"*ms*") || stringmatch(desc[p],"*cont*")
-	extract /free/t desc,code1,stringmatch(desc[p+1],"*ms*") || stringmatch(desc[p+1],"*cont*")
-	extract /free/t desc,code2,stringmatch(desc[p+2],"*ms*") || stringmatch(desc[p+2],"*cont*")
-	duplicate /o/t code1,trial_odor_codes,trial_odor_names
-	trial_odor_codes = selectstring(stringmatch(code2[p],"o*"),code1[p],code2[p]+";"+code1[p]) 
-	variable i
-	for(i=0;i<numpnts(trial_odor_names);i+=1)
-		findvalue /text=trial_odor_codes[i] /txop=4 odor_code
-		if(v_value==-1)
-			print "Couldn't find "+trial_odor_codes[i]
-		endif
-		//print trial_odor_codes[i],v_value
-		trial_odor_names[i] = odor_name[v_value]
-	endfor
+	extract /free event_times,starts,stringmatch(desc[p],"*ms*") || stringmatch(desc[p],"*cont*")
 	
-	wave /sdfr=eagDF times,data
+	wave /sdfr=eagDF data_times = times, data
 	variable x_scale = dimdelta(data,0)
 	variable n_pnts = ceil(1/x_scale)
 	variable n_freqs = 10
-	variable n_odors = ceil(numpnts(starts)/n_freqs)
-	make /free/n=(n_pnts,n_freqs,n_odors) matrix
-	setscale /p x,0,x_scale,matrix
+	if(numpnts(starts)!=n_freqs)
+		string msg
+		sprintf msg,"Number of starts not equal to number of frequencies for %s, epoch %d",getdatafolder(1,df),epoch
+		DoAlert 0,msg
+	endif
+	make /free/n=(n_pnts,n_freqs) indices,data_,times_
+	setscale /p x,0,x_scale,data_,times_
+	variable i
 	for(i=0;i<numpnts(starts);i+=1)
-		variable index = binarysearch(times,starts[i])
-		matrix[][mod(i,10)][floor(i/10)] = data[index+p]
+		variable index = binarysearch(data_times,starts[i])
+		indices[][mod(i,10)] = index+p
 	endfor
-	if(paramisdefault(epoch))
-		duplicate /o matrix $"matrix"
-	else
-		duplicate /o matrix $("matrix_"+num2str(epoch))
-	endif
-	setdatafolder currDF
+	data_ = data[indices[p][q]]
+	times_ = data_times[indices[p][q]]
+	
+	duplicate /o data_ eagDF:data // Overwrite 1D data wave with 2D data wave (time x condition).  
+	duplicate /o times_ eagDF:times // Overwrite 1D data wave with 2D data wave (time x condition).  
+	return matrix
 end
 
-function MakePeriodograms(df[,epoch,air_subtracted])
+function FormatData()
+	svar /sdfr=root: stimulus
+	variable i,n = GetNumExperiments()
+	for(i=0;i<n;i+=1)
+		Prog("Format",i,dimsize(list,0))
+		dfref df = GetDF(i)
+		variable odorEpoch = GetOdorEpoch(i)
+		variable airEpoch = GetAirEpoch(i)
+		printf "Formatting data for %s\r",getdatafolder(1,df)
+		strswitch(stimulus)
+			case "FF":
+				FormatFF(df,odorEpoch)
+				FormatFF(df,airEpoch)
+				break
+			case "BB":
+				FormatBB(df,odorEpoch)
+				FormatBB(df,airEpoch)
+				break
+		endswitch
+	endfor
+	Prog("Matrix",0,0)
+end
+
+// FormatData() must be run first.  
+function /wave MakePeriodograms(df,epoch[,subtractEpoch])
 	dfref df
-	variable epoch,air_subtracted
+	variable epoch
+	variable subtractEpoch
 	
-	dfref currDF = getdatafolderdfr()
-	setdatafolder df
-	if(air_subtracted)
-		wave matrix = matrix_airsubtracted
-	else
-		if(paramisdefault(epoch))
-			wave matrix
-		else
-			wave matrix = $("matrix_"+num2str(epoch))
-		endif
-	endif
-	variable n_pnts = dimsize(matrix,0)
-	variable n_freqs = dimsize(matrix,1)
-	variable n_odors = dimsize(matrix,2)
+	dfref eagDF_ = df:EAG
+	dfref eagDF = eagDF_:$("E"+num2str(epoch))
+	
+	wave /sdfr=eagDF data
+	variable n_pnts = dimsize(data,0)
+	variable n_freqs = dimsize(data,1)
 	variable seg_length = round(n_pnts/5)
 	seg_length -= mod(seg_length,2)==0 ? 0 : 1
 	variable seg_overlap = round(seg_length*0.99)
 	variable start = n_pnts/10
 	variable i,j
-	make /free/n=(1,n_freqs,n_odors) periodograms
+	string suffix = ""
+	if(!paramisdefault(subtractEpoch))
+		suffix = "_sub"
+	endif
+	make /o/n=(1,n_freqs) eagDF:$("periodograms"+suffix) /wave=periodograms
+	
 	for(i=0;i<n_freqs;i+=1)
 		prog("Freq",i,n_freqs)
-		for(j=0;j<n_odors;j+=1)
-			duplicate /free/r=[][i,i][j,j] matrix trial
-			copyscales /p matrix,trial
-			DSPPeriodogram /NODC=1 /Q /SEGN={(seg_length),(seg_overlap)} /R=[(start),(n_pnts)] /WIN=Hanning trial 
-			wave w_periodogram
-			redimension /n=(numpnts(w_periodogram),-1,-1) periodograms
-			copyscales /p w_periodogram,periodograms
-			periodograms[][i][j] = log(w_periodogram[p])
-			//timefrequency(test,0.2,0.99,maxfreq=100)
-		endfor
+		duplicate /free/r=[][i,i] data trial
+		copyscales /p data,trial
+		DSPPeriodogram /NODC=1 /Q /SEGN={(seg_length),(seg_overlap)} /R=[(start),(n_pnts)] /WIN=Hanning trial 
+		wave w_periodogram
+		redimension /n=(numpnts(w_periodogram),-1) periodograms
+		copyscales /p w_periodogram,periodograms
+		periodograms[][i] = log(w_periodogram[p])
 	endfor
+	killwaves /z w_periodogram
 	redimension /n=(200/dimdelta(periodograms,0),-1,-1) periodograms
-	if(paramisdefault(epoch))
-		duplicate /o periodograms $"periodograms"
-	else
-		duplicate /o periodograms $("periodograms_"+num2str(epoch))
+	
+	if(!paramisdefault(subtractEpoch))
+		wave air_periodograms = MakePeriodograms(df,subtractEpoch)
+		periodograms -= air_periodograms // Periodograms are already log-transformed so subtraction is correct here.  	
+//		for(i=0;i<n_freqs;i+=1)
+//			for(j=0;j<n_odors;j+=1)
+//				duplicate /free/r=[][i,i][j,j] periodograms one_periodogram
+//				wavestats /q/m=1 one_periodogram
+//				periodograms[][i,i][j,j] -= v_max // Normalize so that 0 is the maximum.  
+//			endfor
+//		endfor
 	endif
-	setdatafolder currDF
+	
+	return periodograms
+end
+
+// FormatData() must be run first.  
+function /wave MakeSpectrogram(df,epoch[,subtractEpoch])
+	dfref df
+	variable epoch
+	variable subtractEpoch
+	
+	dfref eagDF_ = df:EAG
+	dfref eagDF = eagDF_:$("E"+num2str(epoch))
+	
+	wave /sdfr=eagDF data
+	variable n_pnts = dimsize(data,0)
+	variable n_freqs = dimsize(data,1)
+	variable seg_length = round(n_pnts/5)
+	seg_length -= mod(seg_length,2)==0 ? 0 : 1
+	variable seg_overlap = round(seg_length*0.99)
+	variable start = n_pnts/10
+	variable i,j
+	string suffix = ""
+	if(!paramisdefault(subtractEpoch))
+		suffix = "_sub"
+	endif
+	
+	duplicate /free data data1D
+	redimension /n=(numpnts(data)) data1D
+	setscale /p x,0,dimdelta(data,0),data1D
+	duplicate /o timefrequency(data1D,0.2,0.95,maxfreq=200,logg=1) eagDF:$("spectrogram"+suffix) /wave=spectrogram
+	
+	if(!paramisdefault(subtractEpoch))
+		wave air_spectrogram = MakeSpectrogram(df,subtractEpoch)
+		spectrogram -= air_spectrogram // Periodograms are already log-transformed so subtraction is correct here.  	
+	endif
+	
+	return spectrogram
+end
+
+function /wave MakeBBPeriodogram(df,epoch[,subtractEpoch])
+	dfref df
+	variable epoch
+	variable subtractEpoch
+	
+	dfref eagDF_ = df:EAG
+	dfref eagDF = eagDF_:$("E"+num2str(epoch))
+	wave /sdfr=eagDF data
+	variable n_pnts = dimsize(data,0)
+	variable seg_length = round(n_pnts/5)
+	seg_length -= mod(seg_length,2)==0 ? 0 : 1
+	variable seg_overlap = round(seg_length*0.99)
+	variable start = n_pnts/10
+	variable i,j
+	string suffix = ""
+	if(!paramisdefault(subtractEpoch))
+		suffix = "_sub"
+	endif
+	
+	DSPPeriodogram /NODC=1 /Q /SEGN={(seg_length),(seg_overlap)} /R=[(start),(n_pnts)] /WIN=Hanning trial 
+	wave w_periodogram
+	redimension /n=(200/dimdelta(w_periodogram,0),1) w_periodogram
+	w_periodogram = log(w_periodogram[p])
+	duplicate /o w_periodogram eagDF:$("periodogram"+suffix) /wave=periodogram
+	killwaves /z w_periodogram
+	
+	if(!paramisdefault(subtractEpoch))
+		wave air_periodogram = MakeBBPeriodogram(df,subtractEpoch)
+		periodogram -= air_periodogram // Periodograms are already log-transformed so subtraction is correct here.  	
+	endif
+	
+	return w_periodogram
+end
+
+function MakeAllPeriodograms()
+	wave /t/sdfr=root: list = active_experiments_list
+	svar /sdfr=root: stimulus
+	strswitch(stimulus)
+		case "ff":
+			string conditions = "100ms;30ms;15ms;6ms"
+			break
+		case "bb":
+			conditions = ";"
+			break
+		default:
+			conditions = ""
+	endswitch
+	variable i
+	for(i=0;i<dimsize(list,0);i+=1)
+		Prog("Periodograms",i,dimsize(list,0))
+		dfref df = root:$list[i][0]
+		string odorEpoch = list[i][1]
+		variable odorEpoch_ = str2num(odorEpoch[1,strlen(odorEpoch)-1])
+		string airEpoch = list[i][2]
+		variable airEpoch_ = str2num(airEpoch[1,strlen(airEpoch)-1])
+		printf "Making periodogram for %s\r",list[i][0]
+		MakePeriodograms(df,odorEpoch_,subtractEpoch=airEpoch_)
+		MakePeriodograms(df,odorEpoch_)
+	endfor
+	Prog("Periodograms",0,0)
+	string dfs = GetDFs()
+	wave periodogram = MergePeriodograms(dfs,subtracted=1)
+	PeriodogramStats(periodogram)
+	wave periodogram = MergePeriodograms(dfs,subtracted=0)
+end
+
+function MakeAllSpectrograms()
+	wave /t/sdfr=root: list = active_experiments_list
+	svar /sdfr=root: stimulus
+	strswitch(stimulus)
+		case "ff":
+			string conditions = "100ms;30ms;15ms;6ms"
+			break
+		case "bb":
+			conditions = ";"
+			break
+		default:
+			conditions = ""
+	endswitch
+	variable i
+	for(i=0;i<dimsize(list,0);i+=1)
+		Prog("Spectrograms",i,dimsize(list,0))
+		dfref df = root:$list[i][0]
+		string odorEpoch = list[i][1]
+		variable odorEpoch_ = str2num(odorEpoch[1,strlen(odorEpoch)-1])
+		string airEpoch = list[i][2]
+		variable airEpoch_ = str2num(airEpoch[1,strlen(airEpoch)-1])
+		printf "Making spectrogram for %s\r",list[i][0]
+		//MakeSpectrogram(df,odorEpoch_,subtractEpoch=airEpoch_)
+		//MakeSpectrogram(df,odorEpoch_)
+	endfor
+	Prog("Spectrograms",0,0)
+	string dfs = GetDFs()
+	wave spectrogram = MergeSpectrograms(dfs,subtracted=1)
+	wave spectrogram = MergeSpectrograms(dfs,subtracted=0)
+end
+
+function PeriodogramStats(periodogram)
+	wave periodogram
+	
+	dfref df = getwavesdatafolderdfr(periodogram)
+	
+	string /g df:$(nameofwave(periodogram)+"_stats")
+	svar /sdfr=df stats = $(nameofwave(periodogram)+"_stats")
+	wave periodogram_sem = df:$(nameofwave(periodogram)+"_sem")
+	duplicate /o periodogram df:periodogram_z /wave=periodogram_z
+	periodogram_z = periodogram/periodogram_sem
 end
 
 function RandomStim(duration)
 			count += 1
 			state = 0
 		endif
-		//print t,state
 	while(t<numpnts(stim))
 end
 
 	while(epoch<100)
 end
 
-function Chop(df)
+function Chop(df[,quiet])
 	dfref df
+	variable quiet
 	
 	dfref eagDF = df:EAG
 	dfref eventsDF = df:Events
 	NlxA#ExtractEvents("epochs",df=eventsDF)
 	dfref epochsDF = eventsDF:epochs
-	NlxA#ChopData(df:EAG,epochsDF=epochsDF)
+	wave /sdfr=epochsDF times
+	if(numpnts(times)<=1) // Epoch extraction failed, recording was probably continuous.  
+		NlxA#ExtractEvents("epochs",message="100ms",offset=2,df=eventsDF)
+	endif
+	NlxA#ChopData(df:EAG,epochsDF=epochsDF,quiet=quiet)
 	NlxA#ChopEvents(events=eventsDF) // Uses the epochs folder automatically.  
 end
 
-function MakeVTA(df[,epoch,t_min,t_max,e_min,e_max,condition,nth,resampled,invert]) // Valve-triggered average. 
+function /wave MakeVTA(df,epoch[,subtractEpoch,t_min,t_max,e_min,e_max,condition,nth,resampled,invert,no_save]) // Valve-triggered average. 
 	dfref df
-	variable epoch,resampled,invert,nth,t_min,t_max,e_min,e_max
+	variable epoch,subtractEpoch,resampled,invert,nth,t_min,t_max,e_min,e_max,no_save
 	string condition
 	
-	dfref currDF = getdatafolderdfr()
-	dfref eagDF = df:EAG
-	dfref eventsDF = df:Events
+	dfref eagDF_ = df:EAG
+	dfref eventsDF_ = df:Events
+	dfref eagDF = eagDF_:$("E"+num2str(epoch))
+	dfref eventsDF = eventsDF_:$("E"+num2str(epoch))
 	
 	// Set defaults.  
-	if(!paramisdefault(epoch))
-		dfref eagDF2 = eagDF:$("E"+num2str(epoch))
-		eagDF = eagDF2
-		dfref eventsDF2 = eventsDF:$("E"+num2str(epoch))
-		eventsDF = eventsDF2
-	endif
 	if(!paramisdefault(e_min))
 		dfref e_minDF = eagDF:$("E"+num2str(e_min))
 		wave /sdfr=e_minDF times
 	else
 		t_max = paramisdefault(t_max) ? Inf : t_max
 	endif
+	nth = paramisdefault(nth) ? -1 : nth
 	
-	// Find valve-opening times.  
+	// Find valve-opening times. 
 	wave /t/sdfr=eventsDF desc
 	wave /sdfr=eventsDF times
+	wave event_times = times
 	make /free/n=0 valve_open_times
 	if(!paramisdefault(condition))
-		variable i = 0, set = 0
+		variable i = 0, trial = 0
 		do
 			variable rank = 0
 			i = FindValue2T(desc,condition,start=i)
 			do
-				if(stringmatch(desc[i],"o*") || stringmatch(desc[i],"*ms*") || stringmatch(desc[i],"*cont*"))
-					redimension /n=(max(dimsize(valve_open_times,0),rank+1),set+1) valve_open_times
-					valve_open_times[rank][set] = times[i]
-					rank += 1
+				if(i<numpnts(desc))			
+					if(stringmatch(desc[i],"o*") || stringmatch(desc[i],"*ms*") || stringmatch(desc[i],"*cont*"))
+						redimension /n=(max(dimsize(valve_open_times,0),rank+1),trial+1) valve_open_times
+						valve_open_times[rank][trial] = event_times[i]
+						rank += 1
+					endif
+					i += 1
+				else
+					break
 				endif
-				i += 1
-			while(!stringmatch(desc[i+1],"*ms*") && !stringmatch(desc[i+1],"*cont*") && i<numpnts(desc))
-			set += 1
+			while(!stringmatch(desc[i+1],"*ms*") && !stringmatch(desc[i+1],"*cont*"))
+			trial += 1
 		while(i<numpnts(desc))
 	else
 		rank = 0
 		do
 			if(stringmatch(desc[i],"o*"))
 				redimension /n=(max(dimsize(valve_open_times,0),rank+1),1) valve_open_times
-				valve_open_times[rank][0] = times[i]
+				valve_open_times[rank][0] = event_times[i]
 				rank += 1
 			endif
 			i+=1
 		while(i<numpnts(desc))
 	endif
 	variable n_opens = dimsize(valve_open_times,0)
-	variable n_sets = max(1,dimsize(valve_open_times,1))
-	printf "%d candidates... ", n_opens*n_sets
+	variable n_trials = max(1,dimsize(valve_open_times,1))
+	if(n_trials !=1 && n_trials != 10)
+		string msg
+		sprintf msg,"Found %d trials in epoch %d of %s.\r",n_trials,epoch,getdatafolder(1,df)
+		doalert 0,msg
+	endif
+	printf "%d candidates... ", n_opens*n_trials
 	
 	// Filtering.  
-	wave /sdfr=eagDF data,times
+	wave /sdfr=eagDF data_times = times, data
 	variable x_scale = dimdelta(data,0)
 	duplicate /free data,copy
 	variable median_width = round(0.001/x_scale)
 	
 	// Make the VT matrix.  
 	variable n_pnts = 2000
-	make /free/n=(n_pnts) matrix
+	make /free/n=(n_pnts,1) matrix
 	i = 0
-	for(set=0;set<n_sets;set+=1)
+	for(trial=0;trial<n_trials;trial+=1)
 		for(rank=0;rank<n_opens;rank+=1)
 			if(!paramisdefault(nth) && nth>=0 && rank!=nth)
 				continue
 			endif
-			variable t = valve_open_times[rank][set]
+			variable t = valve_open_times[rank][trial]
 			if(t < t_min || t > t_max)
 				continue
 			endif
-			variable index =binarysearch(times,t)
+			wave trial_times = col(data_times,trial)
+			variable index = binarysearch(trial_times,t)
 			redimension /n=(-1,i+1) matrix
-			matrix[][i] = copy[index+p-n_pnts/2]
+			matrix[][i] = copy[index+p-n_pnts/2][trial]
 			i += 1
 		endfor
 	endfor
 	printf "%d selected.\r", i
-	setdatafolder eagDF
 	
 	// Upsample.  
 	if(resampled)
 		suffix2 = "_"+num2str(nth)+"th"
 	endif
 	string suffix = suffix1 + suffix2
-	duplicate /o vta,$cleanupname("vta"+suffix,0)
-	duplicate /o vta_sd,$cleanupname("vta"+suffix+"_sd",0)
-	duplicate /o vta_sem,$cleanupname("vta"+suffix+"_sem",0)
-	
-	setdatafolder currDF
+	if(!paramisdefault(subtractEpoch))
+		wave background = MakeVTA(df,subtractEpoch,condition=condition,nth=nth,resampled=resampled,invert=invert,no_save=1)
+		vta -= background
+		suffix += "_sub"
+	endif
+	if(!no_save)
+		duplicate /o vta,eagDF:$cleanupname("vta"+suffix,0)
+		duplicate /o vta_sd,eagDF:$cleanupname("vta"+suffix+"_sd",0)
+		duplicate /o vta_sem,eagDF:$cleanupname("vta"+suffix+"_sem",0)
+	endif
+	return vta
 end
 
-function MergeVTAs(dfs[,condition,nth,pool_errors,merge_num])
+function /wave MergeVTAs(dfs[,conditions,nth,subtracted,pool_errors,merge_num])
 	string dfs
-	string condition
+	string conditions
 	variable nth
+	variable subtracted // 1 if they are air subtracted (they should have "_subtracted" in the name).  
 	variable pool_errors // Compute sd and sem as mean across sd/sem's rather than sd/sem across means.   
 	variable merge_num
 	
 	dfref currDF = getdatafolderdfr()
-	string suffix1 = "", suffix2 = ""
-	if(!paramisdefault(condition))
-		suffix1 = "_"+condition
-	endif
+	string suffix1 = "", suffix2 = "", suffix3 = ""
 	if(!paramisdefault(nth) && nth>=0)
 		suffix2 = "_"+num2str(nth)+"th"
 	endif
-	string suffix = suffix1 + suffix2
+	if(subtracted)
+		suffix3 = "_sub"
+	endif
+	string suffix = suffix2 + suffix3
+	variable i,j
+	newdatafolder /o/s root:vta
+	newdatafolder /o/s $Environment()
+	if(paramisdefault(merge_num))
+		newdatafolder /o/s $("x"+suffix)
+	else
+		newdatafolder /o/s $("N"+num2str(merge_num))
+	endif
+	string /g sources = dfs
+	string /g $"conditions" = conditions
+	variable /g $"nth" = nth
+	variable /g $"subtracted" = subtracted
+	variable /g $"pool_errors" = pool_errors
+	make /o/n=0 vta,vta_sd,vta_sem
+	
+	conditions = selectstring(!paramisdefault(conditions),";",conditions)
+	for(j=0;j<itemsinlist(conditions);j+=1)
+		string condition = stringfromlist(j,conditions)
+		if(strlen(condition))
+			suffix1 = "_"+condition
+		endif
+		suffix = suffix1 + suffix2 + suffix3
+		make /free/n=0 vta_,vta_sd_,vta_sem_	
+		for(i=0;i<itemsinlist(dfs);i+=1)
+			string df_names = stringfromlist(i,dfs)
+			string odor_df_name = stringfromlist(0,df_names,",")
+			dfref df = $odor_df_name
+			wave /sdfr=df vtai = $("vta"+suffix)
+			wave /sdfr=df vtai_sd = $("vta"+suffix+"_sd")
+			wave /sdfr=df vtai_sem = $("vta"+suffix+"_sem")
+			concatenate {vtai},vta_
+			concatenate {vtai_sd},vta_sd_
+			concatenate {vtai_sem},vta_sem_
+		endfor	
+		if(pool_errors)
+			matrixop /free vta_sd__ = meancols(vta_sd_^t)
+			matrixop /free vta_sem__ = meancols(vta_sem_^t)/sqrt(i)
+		else
+			matrixop /free vta_sd__ = sqrt(varcols(vta_^t)^t)
+			matrixop /free vta_sem__ = vta_sd_/sqrt(i)
+		endif
+		matrixop /free vta__ = meancols(vta_^t)
+		wavestats /q/r=[0,dimsize(vta__,0)/2] vta__
+		vta__ -= v_avg
+		concatenate {vta__},vta
+		concatenate {vta_sd__},vta_sd
+		concatenate {vta_sem__},vta_sem
+	endfor
+	copyscales vtai,vta,vta_sd,vta_sem
+	if(paramisdefault(merge_num))
+		printf "Merged into vta%s\r",suffix
+	else
+		printf "Merged into number %d\r" merge_num
+	endif
+	setdatafolder currDF
+	
+	return vta
+end
+
+function FormatBB(df,epoch)
+	dfref df 
+	variable epoch
+	
+	dfref eagDF_ = df:EAG
+	dfref eagDF = eagDF:$("E"+num2str(epoch))
+	dfref eventsDF_ = df:Events
+	dfref eventsDF = eventsDF:$("E"+num2str(epoch))
+	
+	wave /sdfr=eag_df data,data_times=times
+	wave /t/sdfr=events_df desc
+	wave /sdfr=events_df TTL,events_times=times
+	extract /free events_times,stim_times,(TTL & 32768)>0 && (stringmatch(desc[p],"o*") || stringmatch(desc[p-1],"o*"))
+	variable start = stim_times[0]
+	variable finish = stim_times[numpnts(stim_times)-1]
+	variable first_sample = 1+binarysearch(data_times,start)
+	variable last_sample = binarysearch(data_times,finish)
+	duplicate /free/r=[first_sample,last_sample] data,data_
+	duplicate /free/r=[first_sample,last_sample] data_times,times_
+	
+	duplicate /o data_ eagDF:data_ // Overwrite data with formatted data.  
+	duplicate /o times_ eagDF:times_ // Overwrite data with formatted data.  
+	
+	return data
+end
+
+function /wave MergeBBs([subtract,merge_num])
+	variable subtract // Subtract air epochs.  
+	variable merge_num
+	
+	string dfs = GetDFs()
+	
+	dfref currDF = getdatafolderdfr()
+	string suffix1 = "", suffix2 = "", suffix3 = ""
+	if(paramisdefault(subtract))
+		suffix1 = "_sub"
+	endif
+	string suffix = suffix1 + suffix2 + suffix3
+	newdatafolder /o/s root:BB
+	newdatafolder /o/s $Environment()
+	if(paramisdefault(merge_num))
+		newdatafolder /o/s $("x"+suffix)
+	else
+		newdatafolder /o/s $("N"+num2str(merge_num))
+	endif
+	
+	string /g sources = dfs
+	make /o/n=0 bb,bb_sd,bb_sem
 	variable i
-	newdatafolder /o root:merged
-	merge_num = paramisdefault(merge_num) ? floor(abs(enoise(10000))) : merge_num
-	newdatafolder /o/s root:merged:$("N"+num2str(merge_num))
-	string /g sources = dfs
-	make /o/n=0 vta,vta_sd,vta_sem
 	for(i=0;i<itemsinlist(dfs);i+=1)
-		string df_name = stringfromlist(i,dfs)
-		if(stringmatch(df_name[0],":"))
-			df_name = getdatafolder(1,currDF)+df_name[1,strlen(df_name)-1]
+		string df_names = stringfromlist(i,dfs)
+		string odor_df_name = stringfromlist(0,df_names,",")
+		string air_df_name = stringfromlist(1,df_names,",")
+		if(stringmatch(odor_df_name[0],":"))
+			odor_df_name = getdatafolder(1,currDF)+odor_df_name[1,strlen(odor_df_name)-1]
 		endif
-		dfref df = $df_name
-		wave /sdfr=df vtai = $("vta"+suffix)
-		wave /sdfr=df vtai_sd = $("vta"+suffix+"_sd")
-		wave /sdfr=df vtai_sem = $("vta"+suffix+"_sem")
-		concatenate {vtai},vta
-		concatenate {vtai_sd},vta_sd
-		concatenate {vtai_sem},vta_sem
-	endfor	
-	if(pool_errors)
-		matrixop /o vta_sd = meancols(vta_sd^t)
-		matrixop /o vta_sem = meancols(vta_sem^t)/sqrt(i)
+		dfref odorDF = $odor_df_name
+		wave /sdfr=odorDF data
+		duplicate /free data,bbi
+		if(subtract)
+			dfref airDF = $air_df_name		
+			wave /sdfr=airDF data
+			bbi -= data
+		endif
+		if(i>0)
+			if(numpnts(bbi)<dimsize(bb,0))
+				redimension /n=(numpnts(bbi),dimsize(bb,1)) bb
+			elseif(numpnts(bbi)>dimsize(bb,0))
+				redimension /n=(dimsize(bb,0)) bbi
+			endif
+		endif
+		concatenate {bbi},bb
+		matrixop /o bb_sd = sqrt(varcols(bb^t)^t)
+		matrixop /o bb_sem = bb_sd/sqrt(i)
+	endfor
+	matrixop /free bb = meancols(bb^t)
+	copyscales bbi,bb,bb_sd,bb_sem
+	if(paramisdefault(merge_num))
+		printf "Merged into bb%s\r",suffix
 	else
-		matrixop /o vta_sd = sqrt(varcols(vta^t)^t)
-		matrixop /o vta_sem = vta_sd/sqrt(i)
+		printf "Merged into number %d\r" merge_num
 	endif
-	matrixop /o vta = meancols(vta^t)
-	copyscales vtai,vta,vta_sd,vta_sem
-	printf "Merged into number %d\r" merge_num
 	setdatafolder currDF
+	
+	return bb
+end
+