################################################################################ #### License and Permissions #### ################################################################################ # Herschel/SPIRE Spectral Feature Finder: A program to extract significant # spectral features from SPIRE spectral data. # Copyright (C) 2020 (authors listed below). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or any # later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see https://www.gnu.org/licenses/gpl-3.0.html # # Acknowledging or Citing the Feature Finder Software or Products: # If you plan to use this notebook for research or work publications we # ask that you please cite the following Feature Finder Paper: # # * R Hopwood, I Valtchanov, L D Spencer, J Scott, C Benson, N Marchili, # N Hładczuk, E T Polehampton, N Lu, G Makiwa, D A Naylor, B G Gom, G Noble, # M J Griffin, The Herschel SPIRE Fourier Transform Spectrometer Spectral # Feature Finder I. The Spectral Feature Finder and Catalogue, Monthly # Notices of the Royal Astronomical Society, Volume 496, Issue 4, August # 2020, Pages 4874–4893, https://doi.org/10.1093/mnras/staa1612 # # * and provide link and refer to DOI https://doi.org/10.5270/esa-lysf2yi ################################################################################ #### FTS Spectral Feature Finder sparse 9/4/20 #### ################################################################################ # Script to find and fit features in SPIRE FTS spectra # # The script is set out in the following sections ###### User options ###### # 1. Example obsids for testing # 2. Input options # 3. Gof/ToFE options # 4. Product save options # 5. Plotting options ######## Functions ####### ##Compiled from a separate file # 6. Import statements # 7. Plotting # 8. Products I/O # 9. GoF/ToFE # 10. Check fit/redshift # 11. Feature finding ### Running the script ### # 12. script execution ################################################################################ #### 1. Example obsids for testing #### ################################################################################ ##IRAS17578-0400 #obsids = [1342231047] ##NGC6240 (partially resolved lines) #obsids = [1342214831] ##NGC6302 (semi-extended) #obsids = [1342265809] ##NGC7027 #obsids = [1342189124] ##CRL618 #obsids = [1342268302] #obsids = [1342227452] ##AFGL2688 #obsids = [1342245076] ##Arp220 #obsids = [1342190674] ##Neptune #obsids = [1342256096] ##Mrk231 #obsids = [0x50002975] ##Dark from OD209 #obsids = [0x50002972] ##Uranus HR #obsids = [1342259588] ##Uranus LR #obsids = [1342259587] ##Ceres #obsids = [1342249467] ##Hebe #obsids = [1342245850] ##AFGL4106 #obsids = [1342247560] ##CW Leo #obsids = [1342197466] ##IRAS F18293-3413 #obsids = [1342192830,1342192832,1342202270,1342204930] ##IRAS F05189-2524 #obsids = [1342192832] ##IRAS 12474-6119 #obsids = [1342202270] ##02570+6028 #obsids = [1342204930] ##Mrk 273 #bsids = [1342209850] ##Saturn #Takes many hours to run #obsids = [1342198279] ##LR NGC7027 #obsids = [1342247624] #obsids = [1342247107] #obsids = [1342257339] #obsids = [1342245074] ##LR AFGL4206 #obsids = [1342247561] ##LR AFGL2688 #obsids = [1342247104] #obsids = [1342257341] #obsids = [1342259589] ##LR CRL618 #obsids = [1342251298] ##LR Ceres #obsids = [0x500119FA] ##Weird LR Dark #obsids = [1342262859] ##L1689B_on #obsids = [1342191221] ##CY Tau #obsids = [1342250504] ##HR #obsids = [1342189124,1342268302,1342270191,1342190674,1342256096,\ # 1342231047,1342214831,1342265809,1342249467,1342245850] #obsids = [,1342214831, #obsids = [1342189124,1342268302,1342245076,1342256096,0x50002975,0x50002972,\ # 1342259588,1342249467,1342247560,1342197466,1342192830,1342209850] #obsids = [1342202270] ##LR #obsids = [1342191221,1342250504, 1342250503] #obsids = [1342245074] #obsids = [1342255283] #obsids = [1342255277] #RDor #obsids = [1342253669] #VY CMa #obsids = [1342253664] ##LR problem observations #obsids = [0x5000314C] obsids = [1342212319] ################################################################################ #### 2. Input options #### ################################################################################ #Set useHsa to fetch observations from the archive #Or HIPE will look for a pool useHsa = True #Define the detectors to fit detectors = ['SLWC3','SSWD4'] #Set the resolution of the observation(s) res = 'HR' apod = False #Set printToScreen if you want all features printed to the console printToScreen = 0 #Set the overlap region (GHz) overlap = [944.0,1017.8] #The additional noisy regions (GHz), based on #HR sensitivity > 0.25 Jy #HR overlap > 0.45 Jy noisyRegions = [608.57868974,1504.6583467] ###FINDING PARAMETERS### snrCut = 5.0 #final cut using final noise estimate from full residual snrCap = 500. #Highest SNR allowed for the initial guess snrRange = [4.0,25.0] #(GHz) range either side of feature for final noise estimate avoid = 10 #(GHz) excludes the ends of the bands by this width mergeRadius = 20.0 #(GHz) merge radius for preliminary features found flagWidths = [8.0, 8.0, 5.0, 4.0, 2.0, 2.0] #(GHz) HALF the width of region flagged for feature found snrThresholds = [100.0, 50.0, 30.0, 10.0, 5.0, 3.0] #SNR thresholds for finding to loop over signs = ['pos']*len(snrThresholds) signs+= ['neg']*len(snrThresholds) flagWidths+=flagWidths snrThresholds+=snrThresholds if apod: flagWidths = [6.0, 6.0, 4.0, 4.0, 2.0, 2.0, 0.0] #(GHz) HALF the width of region flagged for feature found snrThresholds = [100.0, 50.0, 30.0, 10.0, 5.0, 3.0]#, 1.0] #SNR thresholds for finding to loop over snrThresholds = [x/3. for x in snrThresholds] snrCut = 4 #Default colours for plotting per snrThreshold are [black,lightOrange,darkGreen,purple,cyan,teal] polyOrder = 3 #order of polynomial fitted to the continuum (2 for LR, see below) #Initial continuum fit only parameters resampleResolution = 5.0 #(GHz) for continuum fitting only jumpThreshold = 3.5 #minimum jump (in RMS) to flag for continuum fitting only baselineOrder = 3 #order of poly used for final SNR estimate gOfFRng = 3 #range used by GofF to take the stats limitValue = 2. #To prevent features racing off or jumping outside of the frequency bands checkValue = 2. #To remove features moved too far ##SET THE MAXIMUM NUMBER OF ITERATIONS FOR THE SPECTRUM FITTER maxIter = None maxIter = 500 ##Set checkCutResults to a snrThreshold if you want to check the corresponding ##mid-cut residual and totalModel *produced for 1st detector in 1st obsid only* checkCutResults = False #checkCutResults = snrThresholds[3] ###LR needs a different set of some finding parameters #if res == 'LR': # resampleResolution = 1.0 # snrThresholds = [50.0, 30.0, 10.0, 5.0] # flagWidths = [30.0, 25.0, 15.0, 15.0] # snrRange = [4.0, 50.0] # mergeRadius = 150.0 # avoid = 30.0 # baselineOrder = 1 #order of poly used for final SNR estimate # gOfFRng=10.0 # polyOrder = 2 # #Removes features in the avoid region and outside of the band # #Removes features that have traveled too far # #Limits the feature position during the fit # extraConstraints=True # useSensitivityCurve = False #this is not working well yet #else: useSensitivityCurve = False extraConstraints = False #Set to test improving the initial parameter guess #Not set up for LR #if res != 'LR': ################## ##1. Set to attempt an estimate of the source redshift estimateRedshift = True maxVelocity, velocityDelta = 6000.0, 2000.0 ##2. Set to test notoriously blended lines with two sinc profiles testNeutralC = True CWindow, CLineTol = 20.0, 0.8 if testNeutralC: # velocity estimate is required estimateRedshift = True ################## workingDirectory = '/PATH/TO/WORKING/DIRECTORY/' pathToFunctions = '%s'%(workingDirectory) #log events and parameters used pathToLogFile = '%slogs/'%(workingDirectory) date = '%s'%(java.util.Date()) ###logfile name logFileName = ('%sFSFF_logfile_%s%s%s_%sh%sm%ss.txt'%(pathToLogFile,date[8:10],\ date[4:7],date[24:],date[11:13],date[14:16],date[17:19])) ###set to True to write the log to a text file called logFileName writeLogToFile = True ################################################################################ #### 3. Gof/ToFE options #### ################################################################################ ###GoF testGofFfinal = 1 testGofFcut = 0 testGofFcutLines=0 ###ToFE testFitEvidence = 1 limitFitEvidence = True plotFitEvidence = False verboseFitEvidence = False useWeights = False ################################################################################ #### 4. Product save options #### ################################################################################ ###FEATURES FOUND### ###set an output directory to write the feature tables to outDir = '%scatalogues/'%(workingDirectory) ###TABLE PER DETECTOR### ###Save a table per detector in a csv file writeResultsDet = 0 ###Save a table per detector to FITS writeResultsDetFits = 0 ###TABLE PER OBSID### ###Save a table per obsid in a csv file writeResultsObsid = 1 ###COMBINED TABLE FOR ALL OBSID### ###Save the results for all obsids in one csv file writeResultsCombined = 0 ###FITTED CONTINUUM### ###Set an output directory for the saved continuum/continua continuumPath = '%scontinuum/'%(workingDirectory) ###Save a table of fitted parameter per obsid ###Written to csv. saveContinuumTable = 1 ###Save the fitted continuum curve per obsid ###An SDS written to FITS saveContinuumFits = 0 ################################################################################ #### 5. Plotting options #### ################################################################################ ###MAIN DIAGNOSTIC PLOTS### ###Set plotIt if you want the main diagnostic plots plotIt = 1 ###Set to save the main diagnostic plots saveMainPlots = 0 #closePlots will only close plots if saveMainPlots is set to True closePlots = 1 ###Set a folder to save the main diagnostic plots to plotPath = '%splots/'%(workingDirectory) ###ZOOM PLOTS PER FEATURE FOUND### ###Set zoomPlots if you want the zoom plots #(plotIt must also be set to True) zoomPlots = 0 #set a folder to save the zoom plots to #(zoom plots are automatically saved, so a folder must be set) zoomPlotPath = '%splots/zoom/'%(workingDirectory) ###POSTCARDS### ###Set plotPostCards to plot a postcard per obsid plotPostCards = 1 ###Set to save the postcard plots savePostCards = 1 ###Set to close the postcard plots #(This is not linked to savePostCards) closePostCards = 1 ###Set a folder to save the postcards to postCardPlotPath = '%splots/postcards/'%(workingDirectory) ###If closePostCards is True, then the postcard plots are not shown during the script execution if closePostCards: seePostCards = 0 else: seePostCards = 1 ###If closePosts is True, then the diagnostic plots are not shown during the script execution if saveMainPlots and closePlots: seePlots = 0 else: seePlots = 1 ################################################################################ #### 6-11 functions #### ################################################################################ execfile('%sFeatureFinder_Functions.py'%(pathToFunctions)) ################################################################################ #### 12. script execution #### ################################################################################ if res == 'LR': threshColours = threshColours[0:len(snrThresholds)] plotCol = {} for go in range(len(snrThresholds)): plotCol[snrThresholds[go]]=threshColours[go] ####Setting up the log file with number of observations ####options chosen and parameters used if writeLogToFile: file = open(logFileName,'w') file.write('-----------------\n') file.write(' %s %s obsids\n'%(len(obsids),res)) file.write('-----------------\n') file.write('### Parameters used ###\n') file.write('snrCut:%s, snrCap:%s, snrRange:[%s,%s]\n'%(snrCut,snrCap,snrRange[0],snrRange[1])) file.write('avoid:%s, mergeRadius:%s, limitValue:%s, checkValue:%s \n'%(avoid,mergeRadius,limitValue,checkValue)) file.write('flagWidths:[%s'%(flagWidths[0])) for flag in range(1,len(flagWidths)): file.write(', %s'%flagWidths[flag]) file.write(']\n') file.write('snrThresholds:[%s'%(snrThresholds[0])) for thresh in range(1,len(snrThresholds)): file.write(', %s'%snrThresholds[thresh]) file.write(']\n') file.write('signs:[%s'%(signs[0])) for sign in range(1,len(signs)): file.write(', %s'%signs[sign]) file.write(']\n') file.write('polyOrder:%s, resampleResolution:%s,jumpThreshold:%s,\n'%(polyOrder,resampleResolution,jumpThreshold)) file.write('baselineOrder:%s\n'%(baselineOrder)) file.write('-----------------\n') file.write('### Testing options chosen ###\n') file.write('testNeutralC:%s, estimateRedshift:%s \n'%(testNeutralC,estimateRedshift)\ +'testGofFfinal:%s, testGofFcut:%s, testFitEvidence:%s\n'\ %(testGofFfinal,testGofFcut,testFitEvidence)) if res == 'LR': file.write('useSensitivityCurve:%s\n'%(useSensitivityCurve)) file.write('-----------------\n') file.write('### Saving options chosen ###\n') file.write('writeResultsDet:%s, writeResultsDetFits:%s, writeResultsObsid:%s, writeResultsCombined:%s\n'\ %(writeResultsDet, writeResultsDetFits, writeResultsObsid, writeResultsCombined)) file.write('saveContinuumTable:%s, saveContinuumFits:%s\n'%(saveContinuumTable, saveContinuumFits)) file.write('-----------------\n') file.write('### Plotting options chosen ###\n') file.write('plotIt:%s, saveMainPlots:%s, zoomPlots:%s\n'%(plotIt,saveMainPlots,zoomPlots)) file.write('plotPostCards:%s, savePostCards:%s\n'%(plotPostCards,savePostCards)) file.write('-----------------\n') file.write('### Features found ###\n') file.close() #Speed up loading observations archive = HsaReadPool() hsa_storage = ProductStorage([archive]) #Empty lists to fill for the combine results for all obsids freqDetsAll, indDetsAll ,freqErrDetsAll ,snrDetsAll ,detDetsAll = [],[],[],[],[] snrDetsAll2, snrDetsAll3 = [],[] obsidAll ,raAll ,decAll ,resolutionAll ,biasModeAll ,mapSamplingAll = [],[],[],[],[],[] snrIterDetsAll,threshDetsAll = [],[] if testGofFfinal: gOfFdetsAll, gOfFflagsDetsAll = [], [] combinedFlagsAll = [] resultOut = None if testFitEvidence: oddsAll, fitEvidenceFlagsAll = [], [] if estimateRedshift: finalZall =[] finalZerrAll =[] finalZflagAll = [] nMaxMatchAll = [] plotColPlot = 1 ###Loop over the obsids for obsid in obsids: #log events date = '%s'%(java.util.Date()) message = '%s: '%(date[11:19]) #Empty lists to fill and combine results per obsid for the output product freqDets, initialFreqDets, indDets, freqErrDets, snrDets = [],[],[],[],[] snrIterDets, detDets ,threshDets ,modelsDets ,residDets = [],[],[],[],[] sincDets, totalModDets, nFeatures = [], [], [] snrDets2, snrDets3 = [],[] if testGofFfinal: gOfFdets, gOfFdetsFlags = [], [] ###Load the observation context and retrive the necessary product fitsFile = '%sdata/%i.fits'%(workingDirectory,obsid) obs, spec, name = getProduct(obsid, res=res, useHsa=useHsa, mapping=False, fitsFile=fitsFile) message+='%s, '%(name) if spec == 'failed': print name if writeLogToFile: addToLog(message,logFileName) continue print '\n%s 0x%X %i'%(obs.meta['object'].value,obsid,obsid) #make a copy of the input sds to fill with the final continuum fit. continuum = spec.copy() continuum = filterChannels(continuum, keepChannels=detectors) #create the table to fill with the fitted polynominal parameters continuumTable = prepareOutputTable([String1d(['SLW','SSW'])],['detector'], tableDescription= 'Polynomial parameters fitted to the continuum, of the form p0 + p1*freq + p2*freq**2 + p3*freq**3',\ addMeta=True, metas=[obsid,spec.meta['raNominal'].value,spec.meta['decNominal'].value],metaType=['L','D','D'],\ metaNames=['obsid','raNominal','decNominal'],metaDescription=[spec.meta['obsid'].description,spec.meta['raNominal'].description,spec.meta['decNominal'].description]) #create a structure to put the continuum parameters in during the detector loop continuumCat = {} #NEW FLAGGING flags = {'SLWC3':{'p1':Double1d(),'fL':Double1d(),'fR':Double1d()},\ 'SSWD4':{'p1':Double1d(),'fL':Double1d(),'fR':Double1d()}} if plotPostCards: plot2,boxMinMax = preparePostcardPlots(spec,avoid,seePostCards=seePostCards) #Loop over the detectors resNoFlag, resOldFlag, resNewFlag = spec.copy(), spec.copy(), spec.copy() plotTitle='' for det_i in range(len(detectors)): det = detectors[det_i] plotName = '%s_%i_%s'%(spec.meta['object'].value,obsid,det) specIn = spec[0][det] if useSensitivityCurve: noiseOut = getLrSensitivityCurve(specIn,det,spec.meta['numRepetitions'].value) specIn.error = noiseOut ''' ## Main function is `catalogueFeatures' ## ## Which calls functions in this order: ## # For inital fit and subtraction of the contiuum - findJumps: Looks for jumps to indicate significant features Flags and weights spectrum for fitting - removeContinuum: Initial fit to the continuum Outputs a continuum subtracted spectrum and the fit # The finding and fitting process begins and loops over the snrThresholds - listFeatures: finds new features > snrThreshold - fitFeatures: fits the old+new features and keeps new features > snrThreshold # Outputs are then prepared, including taking the final SNRs - findResidualSnr: takes the SNR using the fitted peak and local noise from the residual ''' features, snrs, snrsIter, cutFeatures, cutSnrs, cutSnrsIter, thresholds, \ cutThresholds, finalModelArray, finalResidualArray, fitter, continuumModel,\ sincModels, cutSincModels, featuresErr, cutFeaturesErr, initialContinuumArray, \ peaks, cutPeaks, cutContinuumValues, outputInitialFeatureFreq, \ cutOutputInitialFeatureFreq, indexFeatures, cutIndexFeatures,\ threshSincs,threshResiduals,threshTotalModels,\ gOfFthreshold, linesForGofFthresholds, allSnrForGofFthresholds,\ flags, cutSnrs2, cutSnrs3,residualDet,residualDet2,residualDet3 = \ catalogueFeatures(specIn,snrThresholds,signs,jumpThreshold,det,flags,\ resampleResolution=resampleResolution,flagWidths=flagWidths, \ polyOrder=polyOrder, snrCut=snrCut, snrRange=snrRange,\ mergeRadius=mergeRadius, subtractBaseline=True, \ baselineOrder=baselineOrder, checkCutResults = checkCutResults,\ avoid=avoid, snrCap=snrCap, testGofFcut=testGofFcut,\ testGofFcutLines=testGofFcutLines,maxIter = maxIter,\ limitValue=limitValue,checkValue=checkValue,apod=apod,fwhm=None) resNoFlag[0][det] = residualDet resOldFlag[0][det] = residualDet2 resNewFlag[0][det] = residualDet3 if checkCutResults == False: #Append the detector array to the feature index for ll in range(indexFeatures.size): indexFeatures[ll]=indexFeatures[ll]+det[0:3] for ll in range(cutIndexFeatures.size): cutIndexFeatures[ll]=cutIndexFeatures[ll]+det[0:3] #Get the continuum from the results param = continuumModel.getFittedParameters() paramError = continuumModel.getFittedStdDev() poly = PolynomialModel(polyOrder) poly.setParameters(param) con1 = poly(spec[0][det].wave) continuum[0][det].flux = con1 #Get the maximum continuum level if det[:3]=='SSW': maxCon = MAX(con1) continuumCat[det] = {} continuumCat[det]['param']=param continuumCat[det]['error']=paramError for addPara in range(param.size): continuum[0][det].meta['p%s'%(addPara)]=DoubleParameter(param[addPara],'Coefficient p%s of order %s polynomial fitted to the continuum'%(addPara,polyOrder)) continuum.meta['%s_p%s'%(det[:3],addPara)]=DoubleParameter(param[addPara],'Coefficient p%s of order %s polynomial fitted to the %s continuum'%(addPara,polyOrder,det)) if testGofFfinal: ###When using the final fitted features, GoF can be run outside the main function specInSub = specIn.copy() specInSub.flux = specInSub.flux-con1 gOfFdet = gOfF(cutFeatures,specInSub,cutSincModels,rng=gOfFRng) ###PLOTTING if plotIt: if testGofFfinal: ###If plotting, run GoF for all features specInSub = specIn.copy() specInSub.flux = specInSub.flux-con1 #####This bit crashed for 0x500092AF #gOfFdetPlot = gOfF(features,specInSub,sincModels,rng=gOfFRng) ###GOF STILL TO BE ADDED TO A PLOT #plot a colour reference for the snrThreshold colours used if plotColPlot: colPlot = plotColours(threshColours,snrThresholds) if zoomPlots: colPlot.saveAsPDF('%scolourReference.pdf'%(zoomPlotPath)) if saveMainPlots: colPlot.saveAsPDF('%scolourReference.pdf'%(plotPath)) if closePlots: colPlot.close() plotColPlot = 0 if checkCutResults: if len(features) == 0: continue #Plot the main diagnostic plot and if requested, the zoomPlots plot1 = diagnosticPlots(specIn, con1, finalModelArray, features, indexFeatures, snrs, thresholds, plotCol,\ sincModels, obsid, name, det_i, zoomPlots=zoomPlots, saveMainPlots=saveMainPlots,\ closePlots=closePlots, plotPath=plotPath, zoomPlotPath=zoomPlotPath,\ checkCutResults=checkCutResults, snrCut=snrCut,\ seePlots=seePlots,res=res) ####END OF MAIN DIAGNOSTIC PLOTTING (postcards are plotted later) if checkCutResults: print 'Mid loop products are available for snrThreshold %s:'%(checkCutResults) print ' tempTotalMod, tempResidual, cutTotalModels, cutResiduals' print ' **Only cutSnrs is filled at this point (not snrs)**' tempTotalMod = finalModelArray #checkCutResults total model tempResidual = finalResidualArray #checkCutResults residual cutTotalModels = cutFeatures #All total models up to checkCutResults threshold cutResiduals = cutSnrs #All residuals up to checkCutResults threshold #Also: finalModelArray, residual, fitter, continuumModel, models stop #Print each feature to the console if requested if printToScreen: printFeaturesVerbose(cutFeatures, cutSnrs, name) #Print the number of features above the SNR cut that were found print 'SNR cut > %s found %s features for %s'%(snrCut,cutFeatures.size,det) message+='%s %s features found, '%(cutFeatures.size,det) if det == 'SLWC3': plotTitle+=' (%s/'%(cutFeatures.size) elif det == 'SSWD4': plotTitle+='%s)'%(cutFeatures.size) nFeatures.append(cutFeatures.size) #MOVE SORTING INSIDE FITFEATURES SO THE BITS ADDED FOR TESTING CAN BE SHORTER? ###SORT THE CUT RESULTS cutFeatures, [cutIndexFeatures,cutIndexFeatures,cutFeaturesErr,cutSnrs,\ cutSnrsIter, cutThresholds,cutPeaks,cutContinuumValues,cutOutputInitialFeatureFreq,\ cutSnrs2,cutSnrs3],\ indOut = sortArrays(cutFeatures,[cutIndexFeatures,cutIndexFeatures,\ cutFeaturesErr,cutSnrs,cutSnrsIter,cutThresholds,\ cutPeaks,cutContinuumValues,cutOutputInitialFeatureFreq,\ cutSnrs2,cutSnrs3]) if testGofFfinal: gOfFdet = gOfFdet[Selection(indOut)] gOfFdets+=gOfFdet gOfFdetsAll+=gOfFdet ###Create the output table for a single detector featureCat = prepareOutputTable([cutIndexFeatures,cutFeatures,cutFeaturesErr,cutSnrs,cutSnrs2,cutSnrs3,cutSnrsIter,cutThresholds],\ ['featureIndex','frequency','frequencyErr','cutSnr','cutSnr2','cutSnr3','cutSnrIter','interThreshold'],\ addMeta = True, metas=[name,obsid,res,det,snrCut,polyOrder,mergeRadius,jumpThreshold],\ metaNames=['object','obsid','resolution','detector','SNRcut','polyOrder','mergeRadius','jumpThreshold'],\ metaType=["S","L","S","S","D","L","D","D"]) ###Append output for the table per obsid freqDets+=cutFeatures indDets+=cutIndexFeatures initialFreqDets+=cutOutputInitialFeatureFreq freqErrDets+=cutFeaturesErr snrDets+=cutSnrs snrDets2+=cutSnrs2 snrDets3+=cutSnrs3 snrIterDets+=cutSnrsIter detDets+=[det]*cutFeatures.size threshDets+=cutThresholds modelsDets+=cutSincModels residDets.append(threshResiduals) totalModDets.append(threshTotalModels) sincDets.append(threshSincs) ###For the combined output for all obsids freqDetsAll+=cutFeatures indDetsAll+=cutIndexFeatures freqErrDetsAll+=cutFeaturesErr snrDetsAll+=cutSnrs snrDetsAll2+=cutSnrs2 snrDetsAll3+=cutSnrs3 detDetsAll+=[det[:3]]*cutFeatures.size snrIterDetsAll+=cutSnrsIter threshDetsAll+=cutThresholds ### if resampleResolution != None: featureCat.meta['resampleResolution']=DoubleParameter(resampleResolution) else: featureCat.meta['resampleResolution']=StringParameter('None') if writeResultsDetFits: simpleFitsWriter(product=featureCat, file='%s%s_%s_%ssig.fits'%(outDir,plotName,det,snrCut)) #Write out table per detector to csv if writeResultsDet: asciiTableWriter(file='%s%s_%s.csv'%(outDir,plotName,det),table=featureCat, writeMetadata=True) if plotPostCards: plot2 = postcardPlots(plot2, spec, det, det_i, cutFeatures, cutSnrs3, \ cutPeaks, cutContinuumValues, overlap, boxMinMax) #Add the fitted continuum plot2.addLayer(LayerXY(specIn.wave,con1,color=Color.GREEN,stroke=1)) #GoF flags if testGofFfinal: gOfFflagsOut = getGofFflags(gOfFdets) gOfFflagsDetsAll+=gOfFflagsOut ###ToFE if testFitEvidence: specx = spec.copy().getDefault() oddsOut = Double1d() resultOut = Double1d() dets = String1d(detDets) for det in UNIQ(String1d(dets)): sel = dets.where(dets == det) specDet = specx[det] if limitFitEvidence: if len(Double1d(freqDets)[sel]) > 35: print 'Number of features found for %s %s is > 35, so skipping fitEvidence'%(obsid,det) odds = Double1d((Double1d(freqDets)[sel]).size)-99.0 result = odds else: result, odds = fitEvidence(specDet,Double1d(freqDets)[sel],normalise=True, plotIt=plotFitEvidence, verbose=verboseFitEvidence, useWeights=useWeights, polyOrder=polyOrder) else: result, odds = fitEvidence(specDet,Double1d(freqDets)[sel],normalise=True, plotIt=plotFitEvidence, verbose=verboseFitEvidence, useWeights=useWeights, polyOrder=polyOrder) oddsOut.append(odds) resultOut.append(result) oddsAll+=oddsOut #ToFE flags flagsOut = getFitEvidenceFlags(resultOut) fitEvidenceFlagsAll+=flagsOut if testGofFfinal: combinedFlags = getCombinedFlags(gOfFdets, resultOut) combinedFlags = getCombinedFlags2(combinedFlags,freqDets,overlap,noisyRegions) combinedFlagsAll += combinedFlags #### Redshift and Neutral Carbon #### if estimateRedshift: velocity, velocityErr, zFlag, N, shiftFreq = \ redshiftFF(freqDets,freqErrDets,snrDets3,maxVelocity,velocityDelta,N_min=7) if testNeutralC and (velocity is not Double.NaN): removeFreq, newFreq, newErr, newSnr = \ checkNeutralC(spec[0]['SLWC3'], freqDets, snrDets3, continuumCat['SLWC3']['param'],\ velocity, window=25.0, apod=False, fwhm=None) nccFlag = [0]*len(freqDets) if len(newFreq) > 0: freqDets, freqErrDets, snrDets3, detDets, combinedFlags, nccFlag = \ updateProducts([removeFreq, newFreq, newErr, newSnr], freqDets, \ freqErrDets, snrDets3, detDets, combinedFlags, nccFlag) if plotPostCards: plot2, plotTitle = rePlot(spec, continuum, detectors, freqDets, snrDets3, detDets, \ overlap, boxMinMax, avoid, seePostCards) nFeatures[0] = String1d(detDets).where(String1d(detDets) == 'SLWC3').length() #######PREPARE AND SAVE THE OUTPUT PRODUCT PER OBSID####### featureCatPerObsid = prepareOutputTable([Double1d(freqDets),Double1d(freqErrDets),Double1d(snrDets3),String1d(detDets)],\ ['frequency','frequencyError','SNR','detector'],\ units=[Frequency.GIGAHERTZ,Frequency.GIGAHERTZ,None,None],\ description=['Measured frequency','Error on frequency','Peak/local noise','Detector'],\ addMeta = True, metas=[spec.obsid,spec.meta['object'].value,spec.meta['raNominal'].value,spec.meta['decNominal'].value,\ spec.meta['odNumber'].value,spec.meta['processResolution'].value,spec.meta['biasMode'].value,spec.meta['mapSampling'].value,snrCut,avoid,maxCon,nFeatures[0],nFeatures[1]],\ metaNames=['obsid','object','raNominal','decNominal','operationalDay','resolution','biasMode','mapSampling','minimumSnr','edgeMaskWidth','maxContinuum','nFeaturesSlw','nFeaturesSsw'],\ metaDescription=[spec.meta['obsid'].description,spec.meta['object'].description,spec.meta['raNominal'].description,spec.meta['decNominal'].description,\ spec.meta['odNumber'].description,'Resolution',spec.meta['biasMode'].description,\ spec.meta['mapSampling'].description,'Minimum SNR limit for features found','Width of spectral edges masked','Maximum of fitted continuum',\ 'n features found in SLW','n features found in SSW'],\ metaType=['L','S','D','D','L','S','S','S','D','D','D','L','L','D','D'],metaUnit=[None,None,None,None,None,None,None,None,None,Frequency.GIGAHERTZ,spec.fluxUnit,None,None]) ''' if testGofFfinal: featureCatPerObsid['gOfF'] = Column(Double1d(gOfFdets)) featureCatPerObsid['gOfF'].description = "Local goodness of fit" featureCatPerObsid['gOfFflag'] = Column(gOfFflagsOut) featureCatPerObsid['gOfFflag'].description = "Goodness of fit flag" if testFitEvidence: featureCatPerObsid['odds'] = Column(oddsOut) featureCatPerObsid['odds'].description = "Total fit evidence" featureCatPerObsid['oddsFlag'] = Column(flagsOut) featureCatPerObsid['oddsFlag'].description = "Total fit evidence flag" ''' if testGofFfinal: featureCatPerObsid['featureFlag'] = Column(combinedFlags) featureCatPerObsid['featureFlag'].description = "Feature flag" ###Write out the continuum table if requested if saveContinuumTable: for para in range(polyOrder+1): continuumTable['p%s'%(para)] = Column(Double1d([continuumCat['SLWC3']['param'][para],continuumCat['SSWD4']['param'][para]])) continuumTable['p%serr'%(para)] = Column(Double1d([continuumCat['SLWC3']['error'][para],continuumCat['SSWD4']['error'][para]])) asciiTableWriter(file='%s%s_fittedContinuumParameters.csv'%(continuumPath,obsid),table=continuumTable, writeMetadata=True) #### Redshift metadata #### if estimateRedshift: featureCatPerObsid.meta['radVelEstimate']=DoubleParameter(velocity) featureCatPerObsid.meta['radVelEstimate'].description = 'Radial velocity estimate based on features found' featureCatPerObsid.meta['radVelEstimate'].unit = Speed.KILOMETERS_PER_SECOND featureCatPerObsid.meta['radVelErr']=DoubleParameter(velocityErr) featureCatPerObsid.meta['radVelErr'].description = 'Radial velocity error' featureCatPerObsid.meta['radVelErr'].unit = Speed.KILOMETERS_PER_SECOND featureCatPerObsid.meta['radVelFlag']=StringParameter(zFlag) featureCatPerObsid.meta['radVelFlag'].description = 'Radial velocity flag: max number of matches' featureCatPerObsid.meta['nMaxMatch']=LongParameter(N) featureCatPerObsid.meta['nMaxMatch'].description = 'Number of features with max number of matches' if testNeutralC and (velocity is not Double.NaN): featureCatPerObsid['nccFlag'] = Column(Int1d(nccFlag)) featureCatPerObsid.meta['NCC_INFO'] = StringParameter('Column entry' ' true if the Neutral Carbon Check added or modified the feature') ###APPEND TO THE FULL OUTPUT obsidAll+=[spec.obsid]*len(freqDets) raAll+=[spec.meta['raNominal'].value]*len(freqDets) decAll+=[spec.meta['decNominal'].value]*len(freqDets) resolutionAll+=[spec.meta['processResolution'].value]*len(freqDets) biasModeAll+=[spec.meta['biasMode'].value]*len(freqDets) mapSamplingAll+=[spec.meta['mapSampling'].value]*len(freqDets) if estimateRedshift: finalZall+=[velocity]*len(freqDets) finalZerrAll+=[velocityErr]*len(freqDets) finalZflagAll+=[zFlag]*len(freqDets) nMaxMatchAll+=[N]*len(freqDets) if writeResultsObsid: asciiTableWriter(file='%s%s_featuresFound.csv'%(outDir,obsid),table=featureCatPerObsid, writeMetadata=True) writeFits(table=featureCatPerObsid, file='%s%s_featuresFound.fits'%(outDir,obsid)) if saveContinuumFits: simpleFitsWriter(continuum,'%s%s_continuum.fits.gz'%(continuumPath,obsid),compression='GZIP') if plotPostCards: plot2.titleText='%s Observation ID %s;%s'%(spec.meta['object'].value,obsid,plotTitle) if savePostCards: plot2.saveAsPDF('%s%s_postcard.pdf'%(postCardPlotPath,obsid)) if closePostCards: plot2.close() #print message if writeLogToFile: addToLog(message,logFileName) residual = spec.copy() residual[0]['SLWC3'].flux = residDets[0][-1] residual[0]['SSWD4'].flux = residDets[1][-1] if writeLogToFile: date = '%s'%(java.util.Date()) addToLog('-----------------\n%s: Finished\n-----------------'%(date[11:19]),logFileName) ################################################################################