rem
stringlengths 0
322k
| add
stringlengths 0
2.05M
| context
stringlengths 8
228k
|
---|---|---|
if (cpt[0]-fpt[0])*(cpt[0]-fpt[0]) <= coarseres/2 and \ (cpt[1]-fpt[1])*(cpt[1]-fpt[1])*abs(sin(fpt[1])) <= coarseres/2: | if (cpt[0]-fpt[0])*(cpt[0]-fpt[0]) <= ds*ds/4 and \ (cpt[1]-fpt[1])*(cpt[1]-fpt[1])*abs(sin(fpt[1]))*abs(sin(fpt[1])) \ <= ds*ds/4: | def map_grids(coarsegrid,finegrid,coarseres=4.0): """ takes the two grids (lists of lat/lon tuples) and returns a dictionary where the points in the coarse grid are the keys and lists of tuples of points in the fine grid are the values """ fgtemp = finegrid coarsedict = {} for cpt in coarsegrid: flist = [] for fpt in fgtemp: if (cpt[0]-fpt[0])*(cpt[0]-fpt[0]) <= coarseres/2 and \ (cpt[1]-fpt[1])*(cpt[1]-fpt[1])*abs(sin(fpt[1])) <= coarseres/2: flist.append(fpt) coarsedict[cpt] = flist for rpt in flist: fgtemp.remove(pt) return coarsedict |
fgtemp.remove(pt) return coarsedict | fgtemp.remove(rpt) return coarsedict, fgtemp | def map_grids(coarsegrid,finegrid,coarseres=4.0): """ takes the two grids (lists of lat/lon tuples) and returns a dictionary where the points in the coarse grid are the keys and lists of tuples of points in the fine grid are the values """ fgtemp = finegrid coarsedict = {} for cpt in coarsegrid: flist = [] for fpt in fgtemp: if (cpt[0]-fpt[0])*(cpt[0]-fpt[0]) <= coarseres/2 and \ (cpt[1]-fpt[1])*(cpt[1]-fpt[1])*abs(sin(fpt[1])) <= coarseres/2: flist.append(fpt) coarsedict[cpt] = flist for rpt in flist: fgtemp.remove(pt) return coarsedict |
background_livetime[on_instruments].setdfault(key, 0) | background_livetime[on_instruments].setdefault(key, 0) | def background_livetime_ring_by_slide(connection, live_time_program, seglists, veto_segments, verbose = False): background_livetime = {} instruments = frozenset(seglists.keys()) offset_vectors = db_thinca_rings.get_background_offset_vectors(connection) # first work out time slide live time for on_instruments, livetimes in db_thinca_rings.get_thinca_livetimes(db_thinca_rings.get_thinca_rings_by_available_instruments(connection, program_name = live_time_program), veto_segments, offset_vectors, verbose = verbose).items(): on_instruments = frozenset(on_instruments)#lsctables.ifos_from_instrument_set(on_instruments) for offset, lt in zip(offset_vectors,livetimes): background_livetime.setdefault(on_instruments,{}) key = frozenset(offset.items()) background_livetime[on_instruments].setdfault(key, 0) background_livetime[on_instruments][key] += lt return background_livetime |
self.add_var_arg(p_nodes[1].outputFileName) | self.add_var_arg(p_nodes[1].localFileName) | def __init__(self, dag, job, cp, opts, ifo, time, p_nodes=[], type=""): |
input_cache_file = p_nodes[0].outputFileName | input_cache_file = p_nodes[0].localFileName | def __init__(self, dag, job, cp, opts, time, ifo, p_nodes=[], type="ht", variety="fg"): |
if cp.has_option('fu-remote-jobs','remote-jobs') and job.name in cp.get('fu-remote-jobs','remote-jobs') and cp.has_option('fu-remote-jobs','remote-ifos') and ifo in cp.get('fu-remote-jobs','remote-ifos'): self.add_var_arg('--server ldr-bologna.phys.uwm.edu') else: self.add_var_arg('') | def __init__(self, dag, job, cp, opts, ifo, sngl=None, qscan=False, trigger_time=None, data_type="hoft", p_nodes=[]): |
|
qCache = lalCache.rstrip("cache") + "qcache" self.set_post_script(os.getcwd()+"/cacheconv.sh %s %s" %(lalCache,qCache) ) | qCache = lalCache.rstrip("lcf") + "qcache" if cp.has_option('fu-remote-jobs','remote-jobs') and job.name in cp.get('fu-remote-jobs','remote-jobs') and cp.has_option('fu-remote-jobs','remote-ifos') and ifo in cp.get('fu-remote-jobs','remote-ifos'): self.add_var_arg('--server ldr-bologna.phys.uwm.edu') postScriptTest = "y" self.localFileName = os.path.basename(qCache) else: self.add_var_arg('') postScriptTest = "n" self.set_post_script(os.getcwd()+"/cacheconv.sh %s %s %s" %(lalCache,qCache,postScriptTest) ) | def setup_qscan(self, job, cp, time, ifo, data_type): # 1s is substracted to the expected startTime to make sure the window # will be large enough. This is to be sure to handle the rouding to the # next sample done by qscan. type, channel = figure_out_type(time,ifo,data_type) self.set_type(type) self.q_time = float(cp.get("fu-q-"+data_type+"-datafind",ifo+"-search-time-range"))/2. self.set_observatory(ifo[0]) self.set_start(int( time - self.q_time - 1.)) self.set_end(int( time + self.q_time + 1.)) lalCache = self.get_output() qCache = lalCache.rstrip("cache") + "qcache" self.set_post_script(os.getcwd()+"/cacheconv.sh %s %s" %(lalCache,qCache) ) return(qCache) |
given an instance of skypoints populate and return skylocinjtable | record injection data in a skylocinjtable | def populate_SkyLocInjTable(skylocinjtable,coinc,dt_area,rank_area, \ dtrss_inj,dDrss_inj): """ given an instance of skypoints populate and return skylocinjtable """ row = skylocinjtable.RowType() row.end_time = coinc.time rhosquared = 0.0 for ifo in coinc.ifo_list: rhosquared += coinc.snr[ifo]*coinc.snr[ifo] row.comb_snr = sqrt(rhosquared) try: row.h1_snr = coinc.snr['H1'] except: row.h1_snr = None try: row.l1_snr = coinc.snr['L1'] except: row.l1_snr = None try: row.v1_snr = coinc.snr['V1'] except: row.v1_snr = None row.ra = coinc.longitude_inj row.dec = coinc.latitude_inj row.dt_area = dt_area row.rank_area = rank_area row.delta_t_rss = dtrss_inj row.delta_D_rss = dDrss_inj try: row.h1_eff_distance = coinc.eff_distances_inj['H1'] except: row.h1_eff_distance = None try: row.l1_eff_distance = coinc.eff_distances_inj['L1'] except: row.l1_eff_distance = None try: row.v1_eff_distance = coinc.eff_distances_inj['V1'] except: row.v1_eff_distance = None row.mass1 = coinc.mass1_inj row.mass2 = coinc.mass2_inj skylocinjtable.append(row) |
" --segment_url https://segdb.ligo.caltech.edu"+\ | " --segment-url https://segdb.ligo.caltech.edu"+\ | def grab_segments(start,end,flag): """ Returns a segmentlist containing the segments during which the given flag was active in the given period. """ #== construct segment query segment_cmd = "ligolw_segment_query --query-segments"+\ " --segment_url https://segdb.ligo.caltech.edu"+\ " --include-segments "+flag+\ " --gps-start-time "+str(start)+\ " --gps-end-time "+str(end)+\ ''' | ligolw_print -t segment -c start_time -c end_time --delimiter " "''' #== run segment query segs = GetCommandOutput(segment_cmd) #== construct segments as structure seglist=[] segs=segs.split('\n') for seg in segs: if seg=='': continue try: [seg_start,seg_end]=seg.split(' ') seglist.append(segment(int(seg_start),int(seg_end))) except: continue seglist = segmentlist([seglist]) return seglist |
" --gps-start-time "+str(start)+\ " --gps-end-time "+str(end)+\ | " --gps-start-time "+str(int(start))+\ " --gps-end-time "+str(int(end))+\ | def grab_segments(start,end,flag): """ Returns a segmentlist containing the segments during which the given flag was active in the given period. """ #== construct segment query segment_cmd = "ligolw_segment_query --query-segments"+\ " --segment_url https://segdb.ligo.caltech.edu"+\ " --include-segments "+flag+\ " --gps-start-time "+str(start)+\ " --gps-end-time "+str(end)+\ ''' | ligolw_print -t segment -c start_time -c end_time --delimiter " "''' #== run segment query segs = GetCommandOutput(segment_cmd) #== construct segments as structure seglist=[] segs=segs.split('\n') for seg in segs: if seg=='': continue try: [seg_start,seg_end]=seg.split(' ') seglist.append(segment(int(seg_start),int(seg_end))) except: continue seglist = segmentlist([seglist]) return seglist |
segs = GetCommandOutput(segment_cmd) | segs,status = GetCommandOutput(segment_cmd) | def grab_segments(start,end,flag): """ Returns a segmentlist containing the segments during which the given flag was active in the given period. """ #== construct segment query segment_cmd = "ligolw_segment_query --query-segments"+\ " --segment_url https://segdb.ligo.caltech.edu"+\ " --include-segments "+flag+\ " --gps-start-time "+str(start)+\ " --gps-end-time "+str(end)+\ ''' | ligolw_print -t segment -c start_time -c end_time --delimiter " "''' #== run segment query segs = GetCommandOutput(segment_cmd) #== construct segments as structure seglist=[] segs=segs.split('\n') for seg in segs: if seg=='': continue try: [seg_start,seg_end]=seg.split(' ') seglist.append(segment(int(seg_start),int(seg_end))) except: continue seglist = segmentlist([seglist]) return seglist |
segs=segs.split('\n') for seg in segs: if seg=='': continue try: [seg_start,seg_end]=seg.split(' ') seglist.append(segment(int(seg_start),int(seg_end))) except: continue | if status==0: segs=segs.split('\n') for seg in segs: if seg=='': continue try: [seg_start,seg_end]=seg.split(' ') seglist.append(segment(int(seg_start),int(seg_end))) except: continue seglist = segmentlist(seglist) else: print >>sys.stderr, "Warning: Call to ligolw_segment_query failed with "+\ "command:" print >>sys.stderr, "\n"+segment_cmd+"\n" | def grab_segments(start,end,flag): """ Returns a segmentlist containing the segments during which the given flag was active in the given period. """ #== construct segment query segment_cmd = "ligolw_segment_query --query-segments"+\ " --segment_url https://segdb.ligo.caltech.edu"+\ " --include-segments "+flag+\ " --gps-start-time "+str(start)+\ " --gps-end-time "+str(end)+\ ''' | ligolw_print -t segment -c start_time -c end_time --delimiter " "''' #== run segment query segs = GetCommandOutput(segment_cmd) #== construct segments as structure seglist=[] segs=segs.split('\n') for seg in segs: if seg=='': continue try: [seg_start,seg_end]=seg.split(' ') seglist.append(segment(int(seg_start),int(seg_end))) except: continue seglist = segmentlist([seglist]) return seglist |
seglist = segmentlist([seglist]) | def grab_segments(start,end,flag): """ Returns a segmentlist containing the segments during which the given flag was active in the given period. """ #== construct segment query segment_cmd = "ligolw_segment_query --query-segments"+\ " --segment_url https://segdb.ligo.caltech.edu"+\ " --include-segments "+flag+\ " --gps-start-time "+str(start)+\ " --gps-end-time "+str(end)+\ ''' | ligolw_print -t segment -c start_time -c end_time --delimiter " "''' #== run segment query segs = GetCommandOutput(segment_cmd) #== construct segments as structure seglist=[] segs=segs.split('\n') for seg in segs: if seg=='': continue try: [seg_start,seg_end]=seg.split(' ') seglist.append(segment(int(seg_start),int(seg_end))) except: continue seglist = segmentlist([seglist]) return seglist |
|
plot(getinjpar(injection,0),getinjpar(injection,1),'go',scalex=False,scaley=False) | plot([getinjpar(injection,0)],[getinjpar(injection,1)],'go',scalex=False,scaley=False) | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() |
plot(getinjpar(injection,5),getinjpar(injection,6),'go',scalex=False,scaley=False) | print 'getinjpar(5),getinjpar(6) = %f,%f\n'%(getinjpar(injection,5),getinjpar(injection,6)) plot([getinjpar(injection,5)],[getinjpar(injection,6)],'go',scalex=False,scaley=False) | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() |
if injection and getinjpar(injection,7)<max(pos[:,7]) and getinjpar(injection,7)>min(pos[:,7]) and getinjpar(injection,8)<max(pos[:,8]) and getinjpar(injection,8)>min(pos[:,8]): plot(getinjpar(injection,7),getinjpar(injection,8),'go',scalex=False,scaley=False) | if injection and getinjpar(injection,7)<max(pos[:,7]) and getinjpar(injection,7)>min(pos[:,7]) and getinjpar(injection,8)<max(pos[:,8]) and getinjpar(injection,8)>min(pos[:,8]): plot([getinjpar(injection,7)],[getinjpar(injection,8)],'go',scalex=False,scaley=False) | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() |
plot(injection.mass1,injection.mass2,'go',scalex=False,scaley=False) | plot([injection.mass1],[injection.mass2],'go',scalex=False,scaley=False) | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() |
plot(injection.mass1,injection.distance,'go',scalex=False,scaley=False) | plot([injection.mass1],[injection.distance],'go',scalex=False,scaley=False) | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() |
plot(getinjpar(injection,4),getinjpar(injection,8),'go',scalex=False,scaley=False) | plot([getinjpar(injection,4)],[getinjpar(injection,8)],'go',scalex=False,scaley=False) | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() |
plot(getinjpar(injection,i),getinjpar(injection,j),'go',scalex=False,scaley=False) | plot([getinjpar(injection,i)],[getinjpar(injection,j)],'go',scalex=False,scaley=False) | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() |
htmlfile.write('<td width=30%><img width=100% src="skymap.png"></td>') htmlfile.write('<td width=30%><img width=100% src="m2dist.png"></td>') | if opts.skyres is not None: htmlfile.write('<td width=30%><img width=100% src="skymap.png"></td>') else: htmlfile.write('<td width=30%><img width=100% src="psiiota.png"></td>') htmlfile.write('<td width=30%><img width=100% src="Diota.png"></td>') | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() |
gkde=stats.gaussian_kde(pos[:,i]) ind=linspace(min(pos[:,i]),max(pos[:,i]),101) kdepdf=gkde.evaluate(ind) plot(ind,kdepdf,label='density estimate') | if size(unique(pos[:,i]))>1: gkde=stats.gaussian_kde(pos[:,i]) ind=linspace(min(pos[:,i]),max(pos[:,i]),101) kdepdf=gkde.evaluate(ind) plot(ind,kdepdf,label='density estimate') | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() |
for x in self.ifos: if x not in self.__backgroundPickle__.keys(): sys.stderr.write("Could not either open or save DQ \ background in %s.\n"%(self.__backgroundPickle__)) self.__backgroundResults__=list() self.__backgroundTimesDict__=dict() self.__backgroundDict__=dict() self.__haveBackgroundDict__=bool(False) return | if not self.__haveBackgroundDict__: sys.stderr.write("Could not either open or save DQ \ background in %s no background data available!\n"%(self.__backgroundPickle__)) self.__backgroundResults__=list() self.__backgroundTimesDict__=dict() self.__backgroundDict__=dict() self.__haveBackgroundDict__=bool(False) return | def estimateDQbackground(self): """ This method looks at the self.resultlist inside the instance. Using this and 1000 generated time stamp it tabulates a ranking of flag prevelance, binomial probability 'p' """ if len(self.resultList) < 1: self.__backgroundResults__=list() self.__backgroundTimesDict__=dict() self.__backgroundDict__=dict() self.__haveBackgroundDict__=bool(False) return #Create DQ background, specify pickle locale to try and load first #Determine which IFOs from DQ listing uniqIfos=list() for ifo,b,c,d,e,f in self.resultList: if ifo not in uniqIfos: uniqIfos.append(ifo) ifoEpochList=[(x,getRunEpoch(self.triggerTime,x)) for x in self.ifos] self.createDQbackground(ifoEpochList,self.__backgroundPickle__) for x in self.ifos: if x not in self.__backgroundPickle__.keys(): sys.stderr.write("Could not either open or save DQ \ |
os.stdout.write("Path information to background pickle unchanged.\n") | sys.stdout.write("Path information to background pickle unchanged.\n") | def resetPicklePointer(self,filename=None): """ If you called the class definition with the wrong pickle path. You can reset it with this method. """ if filename==None: os.stdout.write("Path information to background pickle unchanged.\n") elif filename.__contains__("~"): self.__backgroundPickle__=os.path.expanduser(filename) else: self.__backgroundPickle__=filename |
os.stdout.write("Specify trigger time please.\n") | sys.stdout.write("Specify trigger time please.\n") | def fetchInformationDualWindow(self,triggerTime=None,frontWindow=300,\ backWindow=150,ifoList='DEFAULT'): """ This method is responsible for queries to the data server. The results of the query become an internal list that can be converted into an HTML table. The arguments allow you to query with trigger time of interest and to change the window with each call if desired. The version argument will fetch segments with that version or higher. """ if ifoList=="DEFAULT": ifoList=interferometers if (ifoList == None) or \ (len(ifoList) < 1): sys.stderr.write("Ifolist passed is malformed! : %s\n"%ifoList) return #Set the internal class variable self.ifos self.ifos=ifoList if sum([x.upper() in interferometers for x in ifoList]) < 1: sys.stderr.write("Valid ifos not specified for DQ lookups. %s\n"%ifoList) return triggerTime=float(triggerTime) if triggerTime==int(-1): os.stdout.write("Specify trigger time please.\n") return else: self.triggerTime = float(triggerTime) gpsEnd=int(triggerTime)+int(backWindow) gpsStart=int(triggerTime)-int(frontWindow) sqlString=self.dqvQueryLatestVersion%(gpsEnd,gpsStart) self.resultList=self.query(sqlString) if len(self.resultList) < 1: sys.stdout.write("Query Completed, Nothing Returned for time %s.\n"%(triggerTime)) #Coalesce the segments for each DQ flag #Reparse the information newDQSeg=list() if self.resultList.__len__() > 0: #Obtain list of all flags, ignore IFOs not specified uniqSegmentName=list() for ifo,name,version,comment,start,end in self.resultList: if (not uniqSegmentName.__contains__((ifo,name,version,comment))) and \ (ifo.strip().upper() in ifoList): uniqSegmentName.append((ifo,name,version,comment)) #Add the SCIENCE segment no matter which IFOs are specified! if ((name.lower().__contains__('science')) and \ not (ifo.strip().upper() in ifoList)): uniqSegmentName.append((ifo,name,version,comment)) #Save textKey for all uniq segments combos for uifo,uname,uversion,ucomment in uniqSegmentName: segmentIntervals=list() #Extra segments based on uniq textKey for ifo,name,version,comment,start,end in self.resultList: if (uifo,uname,uversion,ucomment)==(ifo,name,version,comment): segmentIntervals.append((start,end)) segmentIntervals.sort() #Coalesce those segments newSegmentIntervals=self.__merge__(segmentIntervals) #Write them to the object which we will return for newStart,newStop in newSegmentIntervals: newDQSeg.append([uifo,uname,uversion,ucomment,newStart,newStop]) newDQSeg.sort() del segmentIntervals #Reset the result list to the IFO restricted set self.resultList=newDQSeg return newDQSeg |
os.stdout.write("Problem saving pickle of DQ information.") os.stdout.write("Trying to place pickle in your home directory.") | sys.stdout.write("Problem saving pickle of DQ information.") sys.stdout.write("Trying to place pickle in your home directory.") | def createDQbackground(self,ifoEpochList=list(),pickleLocale=None): """ Two inputs a list of tuples (ifo,epochname) for each instrument. Also a place to save the potential pickle to for quick access later. """ if type(ifoEpochList) != type(list()): raise Exception, \ "Invalid input argument ifoEpochList,%s type(%s)"\ %(ifoEpochList,type(ifoEpochList)) #Make sure epoch exists for reach ifo for ifo,epoch in ifoEpochList: if ifo not in runEpochs.keys(): raise Exception, "Bad ifo specified, %s"%ifo if epoch not in runEpochs[ifo].keys(): raise Exception, "Bad ifo epoch specified, %s:%s"%(ifo,epoch) #If pickle location given try to load that pickle first. backgroundPickle=False if pickleLocale!=None: #If pickle file exists read it if not make sure we can #generate it properly otherwise skip creating background if os.path.isfile(pickleLocale): try: self.__backgroundDict__=cPickle.load(file(pickleLocale,'r')) backgroundPickle=True except: backgroundPickle=False sys.stderr.write("Error importing the pickle file! %s\n"\ %(pickleLocale)) return for (ifo,epoch) in ifoEpochList: if (ifo.upper().strip(),epoch.upper().strip()) \ not in self.__backgroundDict__["ifoepoch"]: raise Exception,\ "Invalid ifo and epoch information in \ |
os.stdout.write("Really ignoring pickle generation now!\n") | sys.stdout.write("Really ignoring pickle generation now!\n") | def createDQbackground(self,ifoEpochList=list(),pickleLocale=None): """ Two inputs a list of tuples (ifo,epochname) for each instrument. Also a place to save the potential pickle to for quick access later. """ if type(ifoEpochList) != type(list()): raise Exception, \ "Invalid input argument ifoEpochList,%s type(%s)"\ %(ifoEpochList,type(ifoEpochList)) #Make sure epoch exists for reach ifo for ifo,epoch in ifoEpochList: if ifo not in runEpochs.keys(): raise Exception, "Bad ifo specified, %s"%ifo if epoch not in runEpochs[ifo].keys(): raise Exception, "Bad ifo epoch specified, %s:%s"%(ifo,epoch) #If pickle location given try to load that pickle first. backgroundPickle=False if pickleLocale!=None: #If pickle file exists read it if not make sure we can #generate it properly otherwise skip creating background if os.path.isfile(pickleLocale): try: self.__backgroundDict__=cPickle.load(file(pickleLocale,'r')) backgroundPickle=True except: backgroundPickle=False sys.stderr.write("Error importing the pickle file! %s\n"\ %(pickleLocale)) return for (ifo,epoch) in ifoEpochList: if (ifo.upper().strip(),epoch.upper().strip()) \ not in self.__backgroundDict__["ifoepoch"]: raise Exception,\ "Invalid ifo and epoch information in \ |
coincTrigs = CoincInspiralUtils.coincInspiralTable(inspTrigs,'snr') | statistic = CoincInspiralUtils.coincStatistic('snr',None,None) coincTrigs = CoincInspiralUtils.coincInspiralTable(inspTrigs,statistic) | def get_coincs_from_coire(self,files): """ uses CoincInspiralUtils to get data from old-style (coire'd) coincs """ #coincTrigs = CoincInspiralUtils.coincInspiralTable() inspTrigs = SnglInspiralUtils.ReadSnglInspiralFromFiles(files, \ mangle_event_id = True,verbose=None) #note that it's hardcoded to use snr as the statistic coincTrigs = CoincInspiralUtils.coincInspiralTable(inspTrigs,'snr') try: inspInj = SimInspiralUtils.ReadSimInspiralFromFiles(files) coincTrigs.add_sim_inspirals(inspInj) #FIXME: name the exception! except: pass |
outdir=opts.outpath | def histN(mat,N): Nd=size(N) histo=zeros(N) scale=array(map(lambda a,b:a/b,map(lambda a,b:(1*a)-b,map(max,mat),map(min,mat)),N)) axes=array(map(lambda a,N:linspace(min(a),max(a),N),mat,N)) bins=floor(map(lambda a,b:a/b , map(lambda a,b:a-b, mat, map(min,mat) ),scale*1.01)) hbins=reshape(map(int,bins.flat),bins.shape) for co in transpose(hbins): t=tuple(co) histo[t[::-1]]=histo[t[::-1]]+1 return (axes,histo) |
|
if debug: print "maxdx : %i , maxvalue: %f sample0 : %f sample1: %f"%(maxdx,maxvalue,sample[RAdim],sample[decdim]) | def skyhist_cart(skycarts,sky_samples): """ Histogram the list of samples into bins defined by Cartesian vectors in skycarts """ dot=np.dot N=len(skycarts) print 'operating on %d sky points'%(N) bins=np.zeros(N) for RAsample,decsample in sky_samples: sampcart=pol2cart(RAsample,decsample) maxdx=-1 maxvalue=-1 for i in xrange(0,N): dx=dot(sampcart,skycarts[i]) if dx>maxvalue: maxdx=i maxvalue=dx #if debug: #print "sky co : "+str(skycarts[i])+"long : "+str(sample[RAdim])+" "+str(sample[decdim]) #print "sample co: "+str(sampcart) if debug: print "maxdx : %i , maxvalue: %f sample0 : %f sample1: %f"%(maxdx,maxvalue,sample[RAdim],sample[decdim]) bins[maxdx]+=1 return bins |
|
summary_fo=open('summary.ini','w') summary_file=ConfigParser() summary_file.add_section('metadata') summary_file.set('metadata','group_id','X') if opts.eventnum: summary_file.set('metadata','event_id',str(opts.eventnum)) summary_file.add_section('Confidence levels') summary_file.set('Confidence levels','confidence levels',str(confidence_levels)) paramnames, pos=loadDataFile(opts.data[0]) if "m1" not in paramnames and "m2" not in paramnames and "mchirp" in paramnames and "eta" in paramnames: (m1,m2)=mc2ms(pos[:,paramnames.index('mchirp')],pos[:,paramnames.index('eta')]) pos=np.column_stack((pos,m1,m2)) paramnames.append("m1") paramnames.append("m2") Nd=len(paramnames) print "Number of posterior samples: " + str(size(pos,0)) means = mean(pos,axis=0) meanStr=map(str,means) out=reduce(lambda a,b:a+'||'+b,meanStr) print 'Means:' print '||'+out+'||' RAdim=paramnames.index('RA') decdim=paramnames.index('dec') injection=None if(opts.injfile is not None): import itertools injections = SimInspiralUtils.ReadSimInspiralFromFiles([opts.injfile]) if(opts.eventnum is not None): if(len(injections)<opts.eventnum): print "Error: You asked for event %d, but %s contains only %d injections" %(opts.eventnum,opts.opts.injfile,len(injections)) sys.exit(1) else: injection=injections[opts.eventnum] else: if(len(injections)<1): print 'Warning: Cannot find injection with end time %f' %(means[2]) else: injection = itertools.ifilter(lambda a: abs(a.get_end() - means[2]) < 0.1, injections).next() def getinjpar(inj,parnum): | def getinjpar(paramnames,inj,parnum): | def loadDataFile(filename): print filename infile=open(filename,'r') formatstr=infile.readline().lstrip() formatstr=formatstr.replace('#','') header=formatstr.split() llines=[] import re dec=re.compile(r'[^\d.-]+') line_count=0 for line in infile: sline=line.split() proceed=True if len(sline)<1: print 'Ignoring empty line in input file: %s'%(sline) proceed=False for s in sline: if dec.search(s) is not None: print 'Warning! Ignoring non-numeric data after the header: %s'%(sline) proceed=False if proceed: llines.append(array(map(float,sline))) flines=array(llines) for i in range(0,len(header)): if header[i].lower().find('log')!=-1 and header[i].lower()!='logl': print 'exponentiating %s'%(header[i]) flines[:,i]=exp(flines[:,i]) header[i]=header[i].replace('log','') if header[i].lower().find('sin')!=-1: print 'asining %s'%(header[i]) flines[:,i]=arcsin(flines[:,i]) header[i]=header[i].replace('sin','') if header[i].lower().find('cos')!=-1: print 'acosing %s'%(header[i]) flines[:,i]=arccos(flines[:,i]) header[i]=header[i].replace('cos','') header[i]=header[i].replace('(','') header[i]=header[i].replace(')','') print 'Read columns %s'%(str(header)) return header,flines |
if injection: injpoint=map(lambda a: getinjpar(injection,a),range(len(paramnames))) injvals=map(str,injpoint) out=reduce(lambda a,b:a+'||'+b,injvals) print 'Injected values:' print out summary_file.add_section('Injection values') for parnum in range(len(paramnames)): summary_file.set('Injection values',paramnames[parnum],getinjpar(injection,parnum)) | def getinjpar(inj,parnum): if paramnames[parnum]=='mchirp' or paramnames[parnum]=='mc': return inj.mchirp if paramnames[parnum]=='mass1' or paramnames[parnum]=='m1': (m1,m2)=mc2ms(inj.mchirp,inj.eta) return m1 if paramnames[parnum]=='mass2' or paramnames[parnum]=='m2': (m1,m2)=mc2ms(inj.mchirp,inj.eta) return m2 if paramnames[parnum]=='eta': return inj.eta if paramnames[parnum]=='time': return inj.get_end() if paramnames[parnum]=='phi0': return inj.phi0 if paramnames[parnum]=='dist' or paramnames[parnum]=='distance': return inj.distance if paramnames[parnum]=='RA' or paramnames[parnum]=='long': return inj.longitude if paramnames[parnum]=='dec' or paramnames[parnum]=='lat': return inj.latitude if paramnames[parnum]=='psi': return inj.polarization if paramnames[parnum]=='iota': return inj.inclination return None |
|
def plot2Dbins(toppoints,cl,par1_name,par1_bin,par2_name,par2_bin,injpoint): | def plot2Dbins(toppoints,par1_bin,par2_bin,outdir,par1name=None,par2name=None,injpoint=None): | def plot2Dbins(toppoints,cl,par1_name,par1_bin,par2_name,par2_bin,injpoint): #Work out good bin size xbins=int(ceil((max(toppoints[:,0])-min(toppoints[:,0]))/par1_bin)) ybins=int(ceil((max(toppoints[:,1])-min(toppoints[:,1]))/par2_bin)) _dpi=120 xsize_in_inches=6. xsize_points = xsize_in_inches * _dpi points_per_bin_width=xsize_points/xbins ysize_points=ybins*points_per_bin_width ysize_in_inches=ysize_points/_dpi # myfig=plt.figure(1,figsize=(xsize_in_inches+2,ysize_in_inches+2),dpi=_dpi,autoscale_on=True) cnlevel=[1-tp for tp in toppoints[:,3]] # myfig.gca().scatter(toppoints[:,0],toppoints[:,1],s=int(points_per_bin_width*1.5),faceted=False,marker='s',c=cnlevel,cmap=matplotlib.cm.jet) plt.colorbar() #Determine limits based on injection point (if any) and min/max values min_xlim=min(toppoints[:,0]) max_xlim=max(toppoints[:,0]) min_ylim=min(toppoints[:,1]) max_ylim=max(toppoints[:,1]) if injpoint is not None: myfig.gca().plot([injpoint[0]],[injpoint[1]],'r*',ms=20.) if injpoint[0] < min(toppoints[:,0]): min_xlim=injpoint[0] elif injpoint[0] > max(toppoints[:,0]): max_xlim=injpoint[0] if injpoint[1] < min(toppoints[:,1]): min_ylim=injpoint[1] elif injpoint[1] > max(toppoints[:,1]): max_ylim=injpoint[1] |
plt.title("%s-%s histogram (greedy binning)"%(par1_name,par2_name)) | plt.title("%s-%s histogram (greedy binning)"%(par1name,par2name)) | def plot2Dbins(toppoints,cl,par1_name,par1_bin,par2_name,par2_bin,injpoint): #Work out good bin size xbins=int(ceil((max(toppoints[:,0])-min(toppoints[:,0]))/par1_bin)) ybins=int(ceil((max(toppoints[:,1])-min(toppoints[:,1]))/par2_bin)) _dpi=120 xsize_in_inches=6. xsize_points = xsize_in_inches * _dpi points_per_bin_width=xsize_points/xbins ysize_points=ybins*points_per_bin_width ysize_in_inches=ysize_points/_dpi # myfig=plt.figure(1,figsize=(xsize_in_inches+2,ysize_in_inches+2),dpi=_dpi,autoscale_on=True) cnlevel=[1-tp for tp in toppoints[:,3]] # myfig.gca().scatter(toppoints[:,0],toppoints[:,1],s=int(points_per_bin_width*1.5),faceted=False,marker='s',c=cnlevel,cmap=matplotlib.cm.jet) plt.colorbar() #Determine limits based on injection point (if any) and min/max values min_xlim=min(toppoints[:,0]) max_xlim=max(toppoints[:,0]) min_ylim=min(toppoints[:,1]) max_ylim=max(toppoints[:,1]) if injpoint is not None: myfig.gca().plot([injpoint[0]],[injpoint[1]],'r*',ms=20.) if injpoint[0] < min(toppoints[:,0]): min_xlim=injpoint[0] elif injpoint[0] > max(toppoints[:,0]): max_xlim=injpoint[0] if injpoint[1] < min(toppoints[:,1]): min_ylim=injpoint[1] elif injpoint[1] > max(toppoints[:,1]): max_ylim=injpoint[1] |
myfig.savefig(os.path.join(outdir,'2Dbins',par1_name+'-'+par2_name+'.png'),dpi=_dpi) | myfig.savefig(os.path.join(outdir,'2Dbins',par1name+'-'+par2name+'.png'),dpi=_dpi) | def plot2Dbins(toppoints,cl,par1_name,par1_bin,par2_name,par2_bin,injpoint): #Work out good bin size xbins=int(ceil((max(toppoints[:,0])-min(toppoints[:,0]))/par1_bin)) ybins=int(ceil((max(toppoints[:,1])-min(toppoints[:,1]))/par2_bin)) _dpi=120 xsize_in_inches=6. xsize_points = xsize_in_inches * _dpi points_per_bin_width=xsize_points/xbins ysize_points=ybins*points_per_bin_width ysize_in_inches=ysize_points/_dpi # myfig=plt.figure(1,figsize=(xsize_in_inches+2,ysize_in_inches+2),dpi=_dpi,autoscale_on=True) cnlevel=[1-tp for tp in toppoints[:,3]] # myfig.gca().scatter(toppoints[:,0],toppoints[:,1],s=int(points_per_bin_width*1.5),faceted=False,marker='s',c=cnlevel,cmap=matplotlib.cm.jet) plt.colorbar() #Determine limits based on injection point (if any) and min/max values min_xlim=min(toppoints[:,0]) max_xlim=max(toppoints[:,0]) min_ylim=min(toppoints[:,1]) max_ylim=max(toppoints[:,1]) if injpoint is not None: myfig.gca().plot([injpoint[0]],[injpoint[1]],'r*',ms=20.) if injpoint[0] < min(toppoints[:,0]): min_xlim=injpoint[0] elif injpoint[0] > max(toppoints[:,0]): max_xlim=injpoint[0] if injpoint[1] < min(toppoints[:,1]): min_ylim=injpoint[1] elif injpoint[1] > max(toppoints[:,1]): max_ylim=injpoint[1] |
def plotSkyMap(skypos,skyres,sky_injpoint): | def plotSkyMap(skypos,skyres,sky_injpoint,confidence_levels,pos,outdir): | def plot2Dbins(toppoints,cl,par1_name,par1_bin,par2_name,par2_bin,injpoint): #Work out good bin size xbins=int(ceil((max(toppoints[:,0])-min(toppoints[:,0]))/par1_bin)) ybins=int(ceil((max(toppoints[:,1])-min(toppoints[:,1]))/par2_bin)) _dpi=120 xsize_in_inches=6. xsize_points = xsize_in_inches * _dpi points_per_bin_width=xsize_points/xbins ysize_points=ybins*points_per_bin_width ysize_in_inches=ysize_points/_dpi # myfig=plt.figure(1,figsize=(xsize_in_inches+2,ysize_in_inches+2),dpi=_dpi,autoscale_on=True) cnlevel=[1-tp for tp in toppoints[:,3]] # myfig.gca().scatter(toppoints[:,0],toppoints[:,1],s=int(points_per_bin_width*1.5),faceted=False,marker='s',c=cnlevel,cmap=matplotlib.cm.jet) plt.colorbar() #Determine limits based on injection point (if any) and min/max values min_xlim=min(toppoints[:,0]) max_xlim=max(toppoints[:,0]) min_ylim=min(toppoints[:,1]) max_ylim=max(toppoints[:,1]) if injpoint is not None: myfig.gca().plot([injpoint[0]],[injpoint[1]],'r*',ms=20.) if injpoint[0] < min(toppoints[:,0]): min_xlim=injpoint[0] elif injpoint[0] > max(toppoints[:,0]): max_xlim=injpoint[0] if injpoint[1] < min(toppoints[:,1]): min_ylim=injpoint[1] elif injpoint[1] > max(toppoints[:,1]): max_ylim=injpoint[1] |
(skyinjectionconfidence,toppoints,skyreses)=bayespputils.calculateConfidenceLevels(shist,skypoints,injbin,float(opts.skyres),confidence_levels,len(pos)) | (skyinjectionconfidence,toppoints,skyreses)=bayespputils.calculateConfidenceLevels(shist,skypoints,injbin,float(skyres),confidence_levels,len(pos)) | def plotSkyMap(skypos,skyres,sky_injpoint): from pylal import skylocutils from mpl_toolkits.basemap import Basemap skypoints=array(skylocutils.gridsky(float(skyres))) skycarts=map(lambda s: pol2cart(s[1],s[0]),skypoints) skyinjectionconfidence=None shist=bayespputils.skyhist_cart(array(skycarts),skypos) #shist=skyhist_cart(skycarts,list(pos)) bins=skycarts # Find the bin of the injection if available injbin=None if sky_injpoint: injhist=skyhist_cart(skycarts,array([sky_injpoint])) injbin=injhist.tolist().index(1) print 'Found injection in bin %d with co-ordinates %f,%f .'%(injbin,skypoints[injbin,0],skypoints[injbin,1]) (skyinjectionconfidence,toppoints,skyreses)=bayespputils.calculateConfidenceLevels(shist,skypoints,injbin,float(opts.skyres),confidence_levels,len(pos)) if injbin and skyinjectionconfidence: i=list(np.nonzero(np.asarray(toppoints)[:,2]==injbin))[0] min_sky_area_containing_injection=float(opts.skyres)*float(opts.skyres)*i print 'Minimum sky area containing injection point = %f square degrees'%min_sky_area_containing_injection myfig=plt.figure() plt.clf() m=Basemap(projection='moll',lon_0=180.0,lat_0=0.0) plx,ply=m(np.asarray(toppoints)[::-1,1]*57.296,np.asarray(toppoints)[::-1,0]*57.296) cnlevel=[1-tp for tp in np.asarray(toppoints)[::-1,3]] plt.scatter(plx,ply,s=5,c=cnlevel,faceted=False,cmap=matplotlib.cm.jet) m.drawmapboundary() m.drawparallels(np.arange(-90.,120.,45.),labels=[1,0,0,0],labelstyle='+/-') # draw parallels m.drawmeridians(np.arange(0.,360.,90.),labels=[0,0,0,1],labelstyle='+/-') # draw meridians plt.title("Skymap") # add a title plt.colorbar() myfig.savefig(os.path.join(outdir,'skymap.png')) plt.clf() #Save skypoints np.savetxt('ranked_sky_pixels',column_stack([np.asarray(toppoints)[:,0:1],np.asarray(toppoints)[:,1],np.asarray(toppoints)[:,3]])) return skyreses,skyinjectionconfidence |
min_sky_area_containing_injection=float(opts.skyres)*float(opts.skyres)*i | min_sky_area_containing_injection=float(skyres)*float(skyres)*i | def plotSkyMap(skypos,skyres,sky_injpoint): from pylal import skylocutils from mpl_toolkits.basemap import Basemap skypoints=array(skylocutils.gridsky(float(skyres))) skycarts=map(lambda s: pol2cart(s[1],s[0]),skypoints) skyinjectionconfidence=None shist=bayespputils.skyhist_cart(array(skycarts),skypos) #shist=skyhist_cart(skycarts,list(pos)) bins=skycarts # Find the bin of the injection if available injbin=None if sky_injpoint: injhist=skyhist_cart(skycarts,array([sky_injpoint])) injbin=injhist.tolist().index(1) print 'Found injection in bin %d with co-ordinates %f,%f .'%(injbin,skypoints[injbin,0],skypoints[injbin,1]) (skyinjectionconfidence,toppoints,skyreses)=bayespputils.calculateConfidenceLevels(shist,skypoints,injbin,float(opts.skyres),confidence_levels,len(pos)) if injbin and skyinjectionconfidence: i=list(np.nonzero(np.asarray(toppoints)[:,2]==injbin))[0] min_sky_area_containing_injection=float(opts.skyres)*float(opts.skyres)*i print 'Minimum sky area containing injection point = %f square degrees'%min_sky_area_containing_injection myfig=plt.figure() plt.clf() m=Basemap(projection='moll',lon_0=180.0,lat_0=0.0) plx,ply=m(np.asarray(toppoints)[::-1,1]*57.296,np.asarray(toppoints)[::-1,0]*57.296) cnlevel=[1-tp for tp in np.asarray(toppoints)[::-1,3]] plt.scatter(plx,ply,s=5,c=cnlevel,faceted=False,cmap=matplotlib.cm.jet) m.drawmapboundary() m.drawparallels(np.arange(-90.,120.,45.),labels=[1,0,0,0],labelstyle='+/-') # draw parallels m.drawmeridians(np.arange(0.,360.,90.),labels=[0,0,0,1],labelstyle='+/-') # draw meridians plt.title("Skymap") # add a title plt.colorbar() myfig.savefig(os.path.join(outdir,'skymap.png')) plt.clf() #Save skypoints np.savetxt('ranked_sky_pixels',column_stack([np.asarray(toppoints)[:,0:1],np.asarray(toppoints)[:,1],np.asarray(toppoints)[:,3]])) return skyreses,skyinjectionconfidence |
skyreses=None if opts.skyres is not None: RAvec=array(pos)[:,paramnames.index('RA')] decvec=array(pos)[:,paramnames.index('dec')] skypos=column_stack([RAvec,decvec]) | def cbcBayesSkyRes(outdir,data,oneDMenu,twoDGreedyMenu,GreedyRes,confidence_levels,twoDplots,injfile=None,eventnum=None,skyres=None): if eventnum is not None and injfile is None: print "You specified an event number but no injection file. Ignoring!" if data is None: print 'You must specify an input data file' exit(1) if outdir is None: print "You must specify an output directory." exit(1) if not os.path.isdir(outdir): os.makedirs(outdir) summary_fo=open(os.path.join(outdir,'summary.ini'),'w') summary_file=ConfigParser() summary_file.add_section('metadata') summary_file.set('metadata','group_id','X') if eventnum: summary_file.set('metadata','event_id',str(eventnum)) summary_file.add_section('Confidence levels') summary_file.set('Confidence levels','confidence levels',str(confidence_levels)) paramnames, pos=loadDataFile(data[0]) if "m1" not in paramnames and "m2" not in paramnames and "mchirp" in paramnames and "eta" in paramnames: (m1,m2)=mc2ms(pos[:,paramnames.index('mchirp')],pos[:,paramnames.index('eta')]) pos=np.column_stack((pos,m1,m2)) paramnames.append("m1") paramnames.append("m2") Nd=len(paramnames) print "Number of posterior samples: " + str(size(pos,0)) means = mean(pos,axis=0) meanStr=map(str,means) out=reduce(lambda a,b:a+'||'+b,meanStr) print 'Means:' print '||'+out+'||' RAdim=paramnames.index('RA') decdim=paramnames.index('dec') injection=None if(injfile is not None): import itertools injections = SimInspiralUtils.ReadSimInspiralFromFiles([injfile]) if(eventnum is not None): if(len(injections)<eventnum): print "Error: You asked for event %d, but %s contains only %d injections" %(eventnum,injfile,len(injections)) sys.exit(1) else: injection=injections[eventnum] else: if(len(injections)<1): print 'Warning: Cannot find injection with end time %f' %(means[2]) else: injection = itertools.ifilter(lambda a: abs(a.get_end() - means[2]) < 0.1, injections).next() | def plotSkyMap(skypos,skyres,sky_injpoint): from pylal import skylocutils from mpl_toolkits.basemap import Basemap skypoints=array(skylocutils.gridsky(float(skyres))) skycarts=map(lambda s: pol2cart(s[1],s[0]),skypoints) skyinjectionconfidence=None shist=bayespputils.skyhist_cart(array(skycarts),skypos) #shist=skyhist_cart(skycarts,list(pos)) bins=skycarts # Find the bin of the injection if available injbin=None if sky_injpoint: injhist=skyhist_cart(skycarts,array([sky_injpoint])) injbin=injhist.tolist().index(1) print 'Found injection in bin %d with co-ordinates %f,%f .'%(injbin,skypoints[injbin,0],skypoints[injbin,1]) (skyinjectionconfidence,toppoints,skyreses)=bayespputils.calculateConfidenceLevels(shist,skypoints,injbin,float(opts.skyres),confidence_levels,len(pos)) if injbin and skyinjectionconfidence: i=list(np.nonzero(np.asarray(toppoints)[:,2]==injbin))[0] min_sky_area_containing_injection=float(opts.skyres)*float(opts.skyres)*i print 'Minimum sky area containing injection point = %f square degrees'%min_sky_area_containing_injection myfig=plt.figure() plt.clf() m=Basemap(projection='moll',lon_0=180.0,lat_0=0.0) plx,ply=m(np.asarray(toppoints)[::-1,1]*57.296,np.asarray(toppoints)[::-1,0]*57.296) cnlevel=[1-tp for tp in np.asarray(toppoints)[::-1,3]] plt.scatter(plx,ply,s=5,c=cnlevel,faceted=False,cmap=matplotlib.cm.jet) m.drawmapboundary() m.drawparallels(np.arange(-90.,120.,45.),labels=[1,0,0,0],labelstyle='+/-') # draw parallels m.drawmeridians(np.arange(0.,360.,90.),labels=[0,0,0,1],labelstyle='+/-') # draw meridians plt.title("Skymap") # add a title plt.colorbar() myfig.savefig(os.path.join(outdir,'skymap.png')) plt.clf() #Save skypoints np.savetxt('ranked_sky_pixels',column_stack([np.asarray(toppoints)[:,0:1],np.asarray(toppoints)[:,1],np.asarray(toppoints)[:,3]])) return skyreses,skyinjectionconfidence |
skyreses,skyinjectionconfidence=plotSkyMap(skypos,opts.skyres,(injpoint[RAdim],injpoint[decdim])) else: skyreses,skyinjectionconfidence=plotSkyMap(skypos,opts.skyres,None) myfig=plt.figure(1,figsize=(6,4),dpi=80) plt.clf() summary_file.add_section('2D greedy cl') ncon=len(confidence_levels) pos_array=np.array(pos) for par1_name,par2_name in twoDGreedyMenu: print "Binning %s-%s to determine confidence levels ..."%(par1_name,par2_name) try: par1_bin=GreedyRes[par1_name] except KeyError: print "Bin size is not set for %s, skipping %s/%s binning."%(par1_name,par1_name,par2_name) continue try: par2_bin=GreedyRes[par2_name] except KeyError: print "Bin size is not set for %s, skipping %s/%s binning."%(par2_name,par1_name,par2_name) continue try: par1_index=paramnames.index(par1_name) except ValueError: print "No input chain for %s, skipping %s/%s binning."%(par1_name,par1_name,par2_name) continue try: par2_index=paramnames.index(par2_name) except ValueError: print "No input chain for %s, skipping %s/%s binning."%(par2_name,par1_name,par2_name) continue par1pos=pos_array[:,par1_index] par2pos=pos_array[:,par2_index] par1pos_min=min(par1pos) par2pos_min=min(par2pos) par1pos_max=max(par1pos) par2pos_max=max(par2pos) par1pos_Nbins= int(ceil((par1pos_max - par1pos_min)/par1_bin))+1 par2pos_Nbins= int(ceil((par2pos_max - par2pos_min)/par2_bin))+1 greedyHist = np.zeros(par1pos_Nbins*par2pos_Nbins,dtype='i8') greedyPoints = np.zeros((par1pos_Nbins*par2pos_Nbins,2)) par1_point=par1pos_min par2_point=par2pos_min for i in range(par2pos_Nbins): | injpoint=map(lambda a: getinjpar(paramnames,injection,a),range(len(paramnames))) injvals=map(str,injpoint) out=reduce(lambda a,b:a+'||'+b,injvals) print 'Injected values:' print out summary_file.add_section('Injection values') for parnum in range(len(paramnames)): summary_file.set('Injection values',paramnames[parnum],getinjpar(paramnames,injection,parnum)) skyreses=None if skyres is not None: RAvec=array(pos)[:,paramnames.index('RA')] decvec=array(pos)[:,paramnames.index('dec')] skypos=column_stack([RAvec,decvec]) if injection: skyreses,skyinjectionconfidence=plotSkyMap(skypos,skyres,(injpoint[RAdim],injpoint[decdim]),confidence_levels,pos,outdir) else: skyreses,skyinjectionconfidence=plotSkyMap(skypos,skyres,None,confidence_levels,pos,outdir) myfig=plt.figure(1,figsize=(6,4),dpi=80) plt.clf() summary_file.add_section('2D greedy cl') ncon=len(confidence_levels) pos_array=np.array(pos) twoDGreedyCL={} for par1_name,par2_name in twoDGreedyMenu: print "Binning %s-%s to determine confidence levels ..."%(par1_name,par2_name) try: par1_bin=GreedyRes[par1_name] except KeyError: print "Bin size is not set for %s, skipping %s/%s binning."%(par1_name,par1_name,par2_name) continue try: par2_bin=GreedyRes[par2_name] except KeyError: print "Bin size is not set for %s, skipping %s/%s binning."%(par2_name,par1_name,par2_name) continue try: par1_index=paramnames.index(par1_name) except ValueError: print "No input chain for %s, skipping %s/%s binning."%(par1_name,par1_name,par2_name) continue try: par2_index=paramnames.index(par2_name) except ValueError: print "No input chain for %s, skipping %s/%s binning."%(par2_name,par1_name,par2_name) continue twoDGreedyCL["%s-%s"%(par1_name,par2_name)]={} par1pos=pos_array[:,par1_index] par2pos=pos_array[:,par2_index] par1pos_min=min(par1pos) par2pos_min=min(par2pos) par1pos_max=max(par1pos) par2pos_max=max(par2pos) par1pos_Nbins= int(ceil((par1pos_max - par1pos_min)/par1_bin))+1 par2pos_Nbins= int(ceil((par2pos_max - par2pos_min)/par2_bin))+1 greedyHist = np.zeros(par1pos_Nbins*par2pos_Nbins,dtype='i8') greedyPoints = np.zeros((par1pos_Nbins*par2pos_Nbins,2)) | def plotSkyMap(skypos,skyres,sky_injpoint): from pylal import skylocutils from mpl_toolkits.basemap import Basemap skypoints=array(skylocutils.gridsky(float(skyres))) skycarts=map(lambda s: pol2cart(s[1],s[0]),skypoints) skyinjectionconfidence=None shist=bayespputils.skyhist_cart(array(skycarts),skypos) #shist=skyhist_cart(skycarts,list(pos)) bins=skycarts # Find the bin of the injection if available injbin=None if sky_injpoint: injhist=skyhist_cart(skycarts,array([sky_injpoint])) injbin=injhist.tolist().index(1) print 'Found injection in bin %d with co-ordinates %f,%f .'%(injbin,skypoints[injbin,0],skypoints[injbin,1]) (skyinjectionconfidence,toppoints,skyreses)=bayespputils.calculateConfidenceLevels(shist,skypoints,injbin,float(opts.skyres),confidence_levels,len(pos)) if injbin and skyinjectionconfidence: i=list(np.nonzero(np.asarray(toppoints)[:,2]==injbin))[0] min_sky_area_containing_injection=float(opts.skyres)*float(opts.skyres)*i print 'Minimum sky area containing injection point = %f square degrees'%min_sky_area_containing_injection myfig=plt.figure() plt.clf() m=Basemap(projection='moll',lon_0=180.0,lat_0=0.0) plx,ply=m(np.asarray(toppoints)[::-1,1]*57.296,np.asarray(toppoints)[::-1,0]*57.296) cnlevel=[1-tp for tp in np.asarray(toppoints)[::-1,3]] plt.scatter(plx,ply,s=5,c=cnlevel,faceted=False,cmap=matplotlib.cm.jet) m.drawmapboundary() m.drawparallels(np.arange(-90.,120.,45.),labels=[1,0,0,0],labelstyle='+/-') # draw parallels m.drawmeridians(np.arange(0.,360.,90.),labels=[0,0,0,1],labelstyle='+/-') # draw meridians plt.title("Skymap") # add a title plt.colorbar() myfig.savefig(os.path.join(outdir,'skymap.png')) plt.clf() #Save skypoints np.savetxt('ranked_sky_pixels',column_stack([np.asarray(toppoints)[:,0:1],np.asarray(toppoints)[:,1],np.asarray(toppoints)[:,3]])) return skyreses,skyinjectionconfidence |
for j in range(par1pos_Nbins): greedyPoints[j+par1pos_Nbins*i,0]=par1_point greedyPoints[j+par1pos_Nbins*i,1]=par2_point par1_point+=par1_bin par2_point+=par2_bin injbin=None if injection: par1_injvalue=np.array(injpoint)[par1_index] par2_injvalue=np.array(injpoint)[par2_index] if par1_injvalue is not None and par2_injvalue is not None: par1_binNumber=floor((par1_injvalue-par1pos_min)/par1_bin) par2_binNumber=floor((par2_injvalue-par2pos_min)/par2_bin) injbin=int(par1_binNumber+par2_binNumber*par1pos_Nbins) elif par1_injvalue is None and par2_injvalue is not None: print "Injection value not found for %s!"%par1_name elif par1_injvalue is not None and par2_injvalue is None: print "Injection value not found for %s!"%par2_name for par1_samp,par2_samp in zip(par1pos,par2pos): par1_binNumber=floor((par1_samp-par1pos_min)/par1_bin) par2_binNumber=floor((par2_samp-par2pos_min)/par2_bin) greedyHist[par1_binNumber+par2_binNumber*par1pos_Nbins]+=1 (injectionconfidence,toppoints,reses)=bayespputils.calculateConfidenceLevels(greedyHist,greedyPoints,injbin,float(sqrt(par1_bin*par2_bin)),confidence_levels,int(len(par1pos))) areastr='' for (frac,area) in reses: areastr+='%s,'%str(area) areastr=areastr.rstrip(',') summary_file.set('2D greedy cl',par1_name+','+par2_name,str(areastr)) if injection is not None and injectionconfidence is not None: summary_file.set('2D greedy cl',par1_name+','+par2_name+'_inj',str(injectionconfidence)) if injection is not None and par1_injvalue is not None and par2_injvalue is not None: plot2Dbins(np.array(toppoints),confidence_levels,par1_name,par1_bin,par2_name,par2_bin,[par1_injvalue,par2_injvalue]) else: plot2Dbins(np.array(toppoints),confidence_levels,par1_name,par1_bin,par2_name,par2_bin,None) summary_file.add_section('1D mean') summary_file.add_section('1D median') summary_file.add_section('1D mode') summary_file.add_section('1D contigious cl') summary_file.add_section('1D greedy cl') summary_file.add_section('1D stacc') max_i=0 max_pos=pos_array[0,-1] par_samps=pos_array[:,-1] for i in range(len(pos_array)): if par_samps[i] > max_pos: max_pos=par_samps[i] max_i=i for par_name in oneDMenu: print "Binning %s to determine confidence levels ..."%par_name try: par_index=paramnames.index(par_name) except ValueError: print "No input chain for %s, skipping binning."%par_name continue try: par_bin=GreedyRes[par_name] except KeyError: print "Bin size is not set for %s, skipping binning."%par_name continue par_samps=pos_array[:,par_index] summary_file.set('1D mode',par_name,str(par_samps[max_i])) summary_file.set("1D mean",par_name,str(np.mean(par_samps))) summary_file.set("1D median",par_name,str(np.median(par_samps))) parpos_min=min(par_samps) parpos_max=max(par_samps) par_point=parpos_min parpos_Nbins= int(ceil((parpos_max - parpos_min)/par_bin))+1 greedyPoints=np.zeros((parpos_Nbins,2)) greedyHist=np.zeros(parpos_Nbins,dtype='i8') for i in range(parpos_Nbins): greedyPoints[i,0]=par_point greedyPoints[i,1]=par_point par_point+=par_bin for par_samp in par_samps: par_binNumber=int(floor((par_samp-parpos_min)/par_bin)) | par2_point=par2pos_min for i in range(par2pos_Nbins): par1_point=par1pos_min for j in range(par1pos_Nbins): greedyPoints[j+par1pos_Nbins*i,0]=par1_point greedyPoints[j+par1pos_Nbins*i,1]=par2_point par1_point+=par1_bin par2_point+=par2_bin injbin=None if injection: par1_injvalue=np.array(injpoint)[par1_index] par2_injvalue=np.array(injpoint)[par2_index] if par1_injvalue is not None and par2_injvalue is not None: par1_binNumber=floor((par1_injvalue-par1pos_min)/par1_bin) par2_binNumber=floor((par2_injvalue-par2pos_min)/par2_bin) injbin=int(par1_binNumber+par2_binNumber*par1pos_Nbins) elif par1_injvalue is None and par2_injvalue is not None: print "Injection value not found for %s!"%par1_name elif par1_injvalue is not None and par2_injvalue is None: print "Injection value not found for %s!"%par2_name for par1_samp,par2_samp in zip(par1pos,par2pos): par1_binNumber=floor((par1_samp-par1pos_min)/par1_bin) par2_binNumber=floor((par2_samp-par2pos_min)/par2_bin) greedyHist[par1_binNumber+par2_binNumber*par1pos_Nbins]+=1 (injectionconfidence,toppoints,reses)=bayespputils.calculateConfidenceLevels(greedyHist,greedyPoints,injbin,float(sqrt(par1_bin*par2_bin)),confidence_levels,int(len(par1pos))) areastr='' for (frac,area) in reses: areastr+='%s,'%str(area) twoDGreedyCL["%s-%s"%(par1_name,par2_name)][frac]=area areastr=areastr.rstrip(',') summary_file.set('2D greedy cl',par1_name+','+par2_name,str(areastr)) if injection is not None and injectionconfidence is not None: summary_file.set('2D greedy cl',par1_name+','+par2_name+'_inj',str(injectionconfidence)) if injection is not None and par1_injvalue is not None and par2_injvalue is not None: plot2Dbins(np.array(toppoints),par1_bin,par2_bin,outdir,par1name=par1_name,par2name=par2_name,injpoint=[par1_injvalue,par2_injvalue]) else: plot2Dbins(np.array(toppoints),par1_bin,par2_bin,outdir,par1name=par1_name,par2name=par2_name) pickle_to_file(twoDGreedyCL,os.path.join(outdir,'twoGreedyCL.pickle')) for par in twoDGreedyCL.keys(): oneD_dict_to_file(twoDGreedyCL[par],os.path.join(outdir,str(par)+'_greedy2.dat')) summary_file.add_section('1D mean') summary_file.add_section('1D median') summary_file.add_section('1D mode') summary_file.add_section('1D contigious cl') summary_file.add_section('1D greedy cl') summary_file.add_section('1D stacc') max_i=0 oneDStats={} oneDGreedyCL={} oneDContCL={} max_pos=pos_array[0,-1] par_samps=pos_array[:,-1] for i in range(len(pos_array)): if par_samps[i] > max_pos: max_pos=par_samps[i] max_i=i for par_name in oneDMenu: print "Binning %s to determine confidence levels ..."%par_name | def plotSkyMap(skypos,skyres,sky_injpoint): from pylal import skylocutils from mpl_toolkits.basemap import Basemap skypoints=array(skylocutils.gridsky(float(skyres))) skycarts=map(lambda s: pol2cart(s[1],s[0]),skypoints) skyinjectionconfidence=None shist=bayespputils.skyhist_cart(array(skycarts),skypos) #shist=skyhist_cart(skycarts,list(pos)) bins=skycarts # Find the bin of the injection if available injbin=None if sky_injpoint: injhist=skyhist_cart(skycarts,array([sky_injpoint])) injbin=injhist.tolist().index(1) print 'Found injection in bin %d with co-ordinates %f,%f .'%(injbin,skypoints[injbin,0],skypoints[injbin,1]) (skyinjectionconfidence,toppoints,skyreses)=bayespputils.calculateConfidenceLevels(shist,skypoints,injbin,float(opts.skyres),confidence_levels,len(pos)) if injbin and skyinjectionconfidence: i=list(np.nonzero(np.asarray(toppoints)[:,2]==injbin))[0] min_sky_area_containing_injection=float(opts.skyres)*float(opts.skyres)*i print 'Minimum sky area containing injection point = %f square degrees'%min_sky_area_containing_injection myfig=plt.figure() plt.clf() m=Basemap(projection='moll',lon_0=180.0,lat_0=0.0) plx,ply=m(np.asarray(toppoints)[::-1,1]*57.296,np.asarray(toppoints)[::-1,0]*57.296) cnlevel=[1-tp for tp in np.asarray(toppoints)[::-1,3]] plt.scatter(plx,ply,s=5,c=cnlevel,faceted=False,cmap=matplotlib.cm.jet) m.drawmapboundary() m.drawparallels(np.arange(-90.,120.,45.),labels=[1,0,0,0],labelstyle='+/-') # draw parallels m.drawmeridians(np.arange(0.,360.,90.),labels=[0,0,0,1],labelstyle='+/-') # draw meridians plt.title("Skymap") # add a title plt.colorbar() myfig.savefig(os.path.join(outdir,'skymap.png')) plt.clf() #Save skypoints np.savetxt('ranked_sky_pixels',column_stack([np.asarray(toppoints)[:,0:1],np.asarray(toppoints)[:,1],np.asarray(toppoints)[:,3]])) return skyreses,skyinjectionconfidence |
greedyHist[par_binNumber]+=1 except IndexError: print "IndexError: bin number: %i total bins: %i parsamp: %f bin: %f - %f"%(par_binNumber,parpos_Nbins,par_samp,greedyPoints[par_binNumber-1,0],greedyPoints[par_binNumber-1,0]+par_bin) injbin=None if injection: par_injvalue=np.array(injpoint)[par_index] if par_injvalue is not None: par_binNumber=floor((par_injvalue-parpos_min)/par_bin) injbin=par_binNumber j=0 print "Calculating contigious confidence intervals for %s..."%par_name len_par_samps=len(par_samps) while j < len(confidence_levels): confidence_level=confidence_levels[j] max_left=0 max_right=0 for i in range(len(greedyHist)): max_frac=None left=0 right=i while right<len(greedyHist): Npoints=sum(greedyHist[left:right]) frac=float(Npoints)/float(len_par_samps) if frac>confidence_level: if max_frac is None: max_frac=frac max_left=left max_right=right else: if frac>max_frac: | par_index=paramnames.index(par_name) except ValueError: print "No input chain for %s, skipping binning."%par_name continue try: par_bin=GreedyRes[par_name] except KeyError: print "Bin size is not set for %s, skipping binning."%par_name continue oneDGreedyCL[par_name]={} oneDStats[par_name]={} oneDContCL[par_name]={} par_samps=pos_array[:,par_index] summary_file.set('1D mode',par_name,str(par_samps[max_i])) summary_file.set("1D mean",par_name,str(np.mean(par_samps))) summary_file.set("1D median",par_name,str(np.median(par_samps))) oneDStats[par_name]['mode']=par_samps[max_i] oneDStats[par_name]['mean']=np.mean(par_samps) oneDStats[par_name]['median']=np.median(par_samps) parpos_min=min(par_samps) parpos_max=max(par_samps) par_point=parpos_min parpos_Nbins= int(ceil((parpos_max - parpos_min)/par_bin))+1 greedyPoints=np.zeros((parpos_Nbins,2)) greedyHist=np.zeros(parpos_Nbins,dtype='i8') for i in range(parpos_Nbins): greedyPoints[i,0]=par_point greedyPoints[i,1]=par_point par_point+=par_bin for par_samp in par_samps: par_binNumber=int(floor((par_samp-parpos_min)/par_bin)) try: greedyHist[par_binNumber]+=1 except IndexError: print "IndexError: bin number: %i total bins: %i parsamp: %f bin: %f - %f"%(par_binNumber,parpos_Nbins,par_samp,greedyPoints[par_binNumber-1,0],greedyPoints[par_binNumber-1,0]+par_bin) injbin=None if injection: par_injvalue=np.array(injpoint)[par_index] if par_injvalue is not None: par_binNumber=floor((par_injvalue-parpos_min)/par_bin) injbin=par_binNumber j=0 print "Calculating contigious confidence intervals for %s..."%par_name len_par_samps=len(par_samps) while j < len(confidence_levels): confidence_level=confidence_levels[j] max_left=0 max_right=0 for i in range(len(greedyHist)): max_frac=None left=0 right=i while right<len(greedyHist): Npoints=sum(greedyHist[left:right]) frac=float(Npoints)/float(len_par_samps) if frac>confidence_level: if max_frac is None: | def plotSkyMap(skypos,skyres,sky_injpoint): from pylal import skylocutils from mpl_toolkits.basemap import Basemap skypoints=array(skylocutils.gridsky(float(skyres))) skycarts=map(lambda s: pol2cart(s[1],s[0]),skypoints) skyinjectionconfidence=None shist=bayespputils.skyhist_cart(array(skycarts),skypos) #shist=skyhist_cart(skycarts,list(pos)) bins=skycarts # Find the bin of the injection if available injbin=None if sky_injpoint: injhist=skyhist_cart(skycarts,array([sky_injpoint])) injbin=injhist.tolist().index(1) print 'Found injection in bin %d with co-ordinates %f,%f .'%(injbin,skypoints[injbin,0],skypoints[injbin,1]) (skyinjectionconfidence,toppoints,skyreses)=bayespputils.calculateConfidenceLevels(shist,skypoints,injbin,float(opts.skyres),confidence_levels,len(pos)) if injbin and skyinjectionconfidence: i=list(np.nonzero(np.asarray(toppoints)[:,2]==injbin))[0] min_sky_area_containing_injection=float(opts.skyres)*float(opts.skyres)*i print 'Minimum sky area containing injection point = %f square degrees'%min_sky_area_containing_injection myfig=plt.figure() plt.clf() m=Basemap(projection='moll',lon_0=180.0,lat_0=0.0) plx,ply=m(np.asarray(toppoints)[::-1,1]*57.296,np.asarray(toppoints)[::-1,0]*57.296) cnlevel=[1-tp for tp in np.asarray(toppoints)[::-1,3]] plt.scatter(plx,ply,s=5,c=cnlevel,faceted=False,cmap=matplotlib.cm.jet) m.drawmapboundary() m.drawparallels(np.arange(-90.,120.,45.),labels=[1,0,0,0],labelstyle='+/-') # draw parallels m.drawmeridians(np.arange(0.,360.,90.),labels=[0,0,0,1],labelstyle='+/-') # draw meridians plt.title("Skymap") # add a title plt.colorbar() myfig.savefig(os.path.join(outdir,'skymap.png')) plt.clf() #Save skypoints np.savetxt('ranked_sky_pixels',column_stack([np.asarray(toppoints)[:,0:1],np.asarray(toppoints)[:,1],np.asarray(toppoints)[:,3]])) return skyreses,skyinjectionconfidence |
left+=1 right+=1 if max_frac is not None: break if max_frac is None: print "Cant determine intervals at %f confidence!"%confidence_level | else: if frac>max_frac: max_frac=frac max_left=left max_right=right left+=1 right+=1 if max_frac is not None: break if max_frac is None: print "Cant determine intervals at %f confidence!"%confidence_level else: summary_file.set('1D contigious cl',par_name,'['+str(max_left*par_bin)+','+str(max_right*par_bin)+','+str((max_right-max_left)*par_bin)+']') oneDContCL[par_name]['left']=max_left*par_bin oneDContCL[par_name]['right']=max_right*par_bin oneDContCL[par_name]['width']=(max_right-max_left)*par_bin k=j while k+1<len(confidence_levels) : if confidence_levels[k+1]<max_frac: j+=1 k+=1 j+=1 (injectionconfidence,toppoints,reses)=bayespputils.calculateConfidenceLevels(greedyHist,greedyPoints,injbin,sqrt(par_bin),confidence_levels,len(par_samps)) areastr='' for (frac,area) in reses: areastr+='%s,'%str(area) oneDGreedyCL[par_name][frac]=area areastr=areastr.rstrip(',') summary_file.set('1D greedy cl',par_name,'['+str(areastr)+']') if injection is not None and injectionconfidence is not None: summary_file.set('1D greedy cl',par_name+'_inj',str(injectionconfidence)) if injection: injvalue=np.array(injpoint)[par_index] if injvalue is not None: stacc=sqrt(np.var(par_samps)+pow((np.mean(par_samps)-injvalue),2) ) summary_file.set('1D stacc',par_name,str(stacc)) oneDStats[par_name]['stacc']=stacc pickle_to_file(oneDGreedyCL,os.path.join(outdir,'oneDGreedyCL.pickle')) pickle_to_file(oneDContCL,os.path.join(outdir,'oneDContCL.pickle')) pickle_to_file(oneDStats,os.path.join(outdir,'oneDStats.pickle')) for par in oneDGreedyCL.keys(): oneD_dict_to_file(oneDGreedyCL[par],os.path.join(outdir,str(par)+'_greedy1.dat')) for par in oneDGreedyCL.keys(): oneD_dict_to_file(oneDContCL[par],os.path.join(outdir,str(par)+'_cont.dat')) for par in oneDStats.keys(): oneD_dict_to_file(oneDStats[par],os.path.join(outdir,str(par)+'_stats.dat')) for par1,par2 in twoDplots: try: i=paramnames.index(par1) except ValueError: print "No input chain for %s, skipping 2D plot of %s-%s."%(par1,par1,par2) continue try: j=paramnames.index(par2) except ValueError: print "No input chain for %s, skipping 2D plot of %s-%s."%(par2,par1,par2) continue print 'Generating %s-%s plot'%(paramnames[i],paramnames[j]) if (size(np.unique(pos[:,i]))<2 or size(np.unique(pos[:,j]))<2): continue plot2Dkernel(pos[:,i],pos[:,j],50,50) if injection and reduce (lambda a,b: a and b, map(lambda idx: getinjpar(paramnames,injection,idx)>min(pos[:,idx]) and getinjpar(paramnames,injection,idx)<max(pos[:,idx]),[i,j])) : if getinjpar(paramnames,injection,i) is not None and getinjpar(paramnames,injection,j) is not None: plt.plot([getinjpar(paramnames,injection,i)],[getinjpar(paramnames,injection,j)],'go',scalex=False,scaley=False) plt.xlabel(paramnames[i]) plt.ylabel(paramnames[j]) plt.grid() margdir=os.path.join(outdir,'2D') if not os.path.isdir(margdir+'/'): os.mkdir(margdir) myfig.savefig(os.path.join(margdir,paramnames[i]+'-'+paramnames[j]+'_2Dkernel.png')) myfig.clear() htmlfile=open(os.path.join(outdir,'posplots.html'),'w') htmlfile.write('<HTML><HEAD><TITLE>Posterior PDFs</TITLE></HEAD><BODY><h3>'+str(means[2])+' Posterior PDFs</h3>') if(skyres is not None): htmlfile.write('<table border=1><tr><td>Confidence region<td>size (sq. deg)</tr>') for (frac,skysize) in skyreses: htmlfile.write('<tr><td>%f<td>%f</tr>'%(frac,skysize)) htmlfile.write('</table>') htmlfile.write('Produced from '+str(size(pos,0))+' posterior samples.<br>') htmlfile.write('Samples read from %s<br>'%(data[0])) htmlfile.write('<h4>Mean parameter estimates</h4>') htmlfile.write('<table border=1><tr>') paramline=reduce(lambda a,b:a+'<td>'+b,paramnames) htmlfile.write('<td>'+paramline+'<td>logLmax</tr><tr>') meanline=reduce(lambda a,b:a+'<td>'+b,meanStr) htmlfile.write('<td>'+meanline+'</tr>') if injection: htmlfile.write('<tr><th colspan='+str(len(paramnames))+'>Injected values</tr>') injline=reduce(lambda a,b:a+'<td>'+b,injvals) htmlfile.write('<td>'+injline+'<td></tr>') htmlfile.write('</table>') if injection: if skyinjectionconfidence: htmlfile.write('<p>Injection found at confidence interval %f in sky location</p>'%(skyinjectionconfidence)) | def plotSkyMap(skypos,skyres,sky_injpoint): from pylal import skylocutils from mpl_toolkits.basemap import Basemap skypoints=array(skylocutils.gridsky(float(skyres))) skycarts=map(lambda s: pol2cart(s[1],s[0]),skypoints) skyinjectionconfidence=None shist=bayespputils.skyhist_cart(array(skycarts),skypos) #shist=skyhist_cart(skycarts,list(pos)) bins=skycarts # Find the bin of the injection if available injbin=None if sky_injpoint: injhist=skyhist_cart(skycarts,array([sky_injpoint])) injbin=injhist.tolist().index(1) print 'Found injection in bin %d with co-ordinates %f,%f .'%(injbin,skypoints[injbin,0],skypoints[injbin,1]) (skyinjectionconfidence,toppoints,skyreses)=bayespputils.calculateConfidenceLevels(shist,skypoints,injbin,float(opts.skyres),confidence_levels,len(pos)) if injbin and skyinjectionconfidence: i=list(np.nonzero(np.asarray(toppoints)[:,2]==injbin))[0] min_sky_area_containing_injection=float(opts.skyres)*float(opts.skyres)*i print 'Minimum sky area containing injection point = %f square degrees'%min_sky_area_containing_injection myfig=plt.figure() plt.clf() m=Basemap(projection='moll',lon_0=180.0,lat_0=0.0) plx,ply=m(np.asarray(toppoints)[::-1,1]*57.296,np.asarray(toppoints)[::-1,0]*57.296) cnlevel=[1-tp for tp in np.asarray(toppoints)[::-1,3]] plt.scatter(plx,ply,s=5,c=cnlevel,faceted=False,cmap=matplotlib.cm.jet) m.drawmapboundary() m.drawparallels(np.arange(-90.,120.,45.),labels=[1,0,0,0],labelstyle='+/-') # draw parallels m.drawmeridians(np.arange(0.,360.,90.),labels=[0,0,0,1],labelstyle='+/-') # draw meridians plt.title("Skymap") # add a title plt.colorbar() myfig.savefig(os.path.join(outdir,'skymap.png')) plt.clf() #Save skypoints np.savetxt('ranked_sky_pixels',column_stack([np.asarray(toppoints)[:,0:1],np.asarray(toppoints)[:,1],np.asarray(toppoints)[:,3]])) return skyreses,skyinjectionconfidence |
summary_file.set('1D contigious cl',par_name,'['+str(max_left*par_bin)+','+str(max_right*par_bin)+','+str((max_right-max_left)*par_bin)+']') k=j while k+1<len(confidence_levels) : if confidence_levels[k+1]<max_frac: j+=1 k+=1 j+=1 (injectionconfidence,toppoints,reses)=bayespputils.calculateConfidenceLevels(greedyHist,greedyPoints,injbin,sqrt(par_bin),confidence_levels,len(par_samps)) areastr='' for (frac,area) in reses: areastr+='%s,'%str(area) areastr=areastr.rstrip(',') summary_file.set('1D greedy cl',par_name,'['+str(areastr)+']') if injection is not None and injectionconfidence is not None: summary_file.set('1D greedy cl',par_name+'_inj',str(injectionconfidence)) if injection: injvalue=np.array(injpoint)[par_index] if injvalue is not None: stacc=sqrt(np.var(par_samps)+pow((np.mean(par_samps)-injvalue),2) ) summary_file.set('1D stacc',par_name,str(stacc)) def plot2Dkernel(xdat,ydat,Nx,Ny): xax=np.linspace(min(xdat),max(xdat),Nx) yax=np.linspace(min(ydat),max(ydat),Ny) x,y=np.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = np.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) | htmlfile.write('<p>Injection not found in posterior bins in sky location!</p>') htmlfile.write('<h5>2D Marginal PDFs</h5><br>') htmlfile.write('<table border=1><tr>') if skyres is not None: htmlfile.write('<td width=30%><img width=100% src="skymap.png"></td>') else: htmlfile.write('<td width=30%><img width=100% src="m1dist.png:></td>') row_switch=1 for par1,par2 in twoDplots: if row_switch==3: row_switch=0 plot_path=None if os.path.isfile(os.path.join(outdir,'2D',par1+'-'+par2+'_2Dkernel.png')): plot_path='2D/'+par1+'-'+par2+'_2Dkernel.png' elif os.path.isfile(os.path.join(outdir,'2D',par2+'-'+par1+'_2Dkernel.png')): plot_path='2D/'+par2+'-'+par1+'_2Dkernel.png' if plot_path: if row_switch==0: htmlfile.write('<tr>') htmlfile.write('<td width=30%><img width=100% src="'+plot_path+'"></td>') if row_switch==2: htmlfile.write('</tr>') row_switch+=1 if row_switch==2: htmlfile.write('<td></td></tr>') elif row_switch==1: htmlfile.write('<td></td><td></td></tr>') htmlfile.write('</table>') htmlfile.write('<br><a href="2D/">All 2D Marginal PDFs</a><hr><h5>1D marginal posterior PDFs</h5><br>') summary_file.add_section('1D ranking kde') summary_file.add_section('1D ranking bins') for param in oneDMenu: try: par_index=paramnames.index(param) i=par_index except ValueError: print "No input chain for %s, skipping 1D plot."%param continue myfig=plt.figure(figsize=(4,3.5),dpi=80) histbins=50 (n, bins, patches)=plt.hist(pos[:,i],histbins,normed='true') histbinSize=bins[1]-bins[0] np.seterr(under='ignore') scipy.seterr(under='ignore') try: gkde=stats.kde.gaussian_kde(pos[:,i]) pass except np.linalg.linalg.LinAlgError: print "Error occured generating plot for parameter %s: %s ! Trying next parameter."%(paramnames[i],'LinAlgError') continue print "Generating 1D plot for %s."%param ind=np.linspace(min(pos[:,i]),max(pos[:,i]),101) kdepdf=gkde.evaluate(ind) plt.plot(ind,kdepdf,label='density estimate') if injection and min(pos[:,i])<getinjpar(paramnames,injection,i) and max(pos[:,i])>getinjpar(paramnames,injection,i): plt.plot([getinjpar(paramnames,injection,i),getinjpar(paramnames,injection,i)],[0,max(kdepdf)],'r-.',scalex=False,scaley=False) bins_to_inj=(getinjpar(paramnames,injection,i)-bins[0])/histbinSize injbinh=int(floor(bins_to_inj)) injbin_frac=bins_to_inj-float(injbinh) rbins=(sum(n[0:injbinh-1])+injbin_frac*n[injbinh])*histbinSize print "r of injected value of %s (bins) = %f"%(param, rbins) summary_file.set('1D ranking bins',param,rbins) plt.grid() plt.xlabel(paramnames[i]) plt.ylabel('Probability Density') myfig.savefig(os.path.join(outdir,paramnames[i]+ '.png')) myfig=plt.figure(figsize=(4,3.5),dpi=80) plt.plot(pos[:,i],'.') if injection and min(pos[:,i])<getinjpar(paramnames,injection,i) and max(pos[:,i])>getinjpar(paramnames,injection,i): plt.plot([0,len(pos)],[getinjpar(paramnames,injection,i),getinjpar(paramnames,injection,i)],'r-.') myfig.savefig(os.path.join(outdir,paramnames[i]+'_samps.png')) htmlfile.write('<img src="'+paramnames[i]+'.png"><img src="'+paramnames[i]+'_samps.png"><br>') htmlfile.write('<hr><br />Produced using cbcBayesSkyRes.py at '+strftime("%Y-%m-%d %H:%M:%S")) htmlfile.write('</BODY></HTML>') htmlfile.close() posfilename=os.path.join(outdir,'posterior_samples.dat') posfile=open(posfilename,'w') for row in pos: for i in row: posfile.write('%f\t'%(i)) posfile.write('\n') posfile.close() summary_file.write(summary_fo) if __name__=='__main__': parser=OptionParser() parser.add_option("-o","--outpath", dest="outpath",help="make page and plots in DIR", metavar="DIR") parser.add_option("-d","--data",dest="data",action="append",help="datafile") parser.add_option("-i","--inj",dest="injfile",help="SimInsipral injection file",metavar="INJ.XML",default=None) parser.add_option("--skyres",dest="skyres",help="Sky resolution to use to calculate sky box size",default=None) parser.add_option("--eventnum",dest="eventnum",action="store",default=None,help="event number in SimInspiral file of this signal",type="int",metavar="NUM") (opts,args)=parser.parse_args() oneDMenu=['mtotal','m1','m2','mchirp','mc','distance','distMPC','dist','iota','eta','RA','dec','a1','a2','phi1','theta1','phi2','theta2'] twoDGreedyMenu=[['mc','eta'],['mchirp','eta'],['m1','m2'],['mtotal','eta'],['distance','iota'],['dist','iota'],['dist','m1'],['RA','dec']] greedyRes={'mc':0.025,'m1':0.1,'m2':0.1,'mtotal':0.1,'eta':0.001,'iota':0.01,'time':1e-4,'distance':1.0,'dist':1.0,'mchirp':0.025,'a1':0.02,'a2':0.02,'phi1':0.05,'phi2':0.05,'theta1':0.05,'theta2':0.05,'RA':0.005,'dec':0.005} confidenceLevels=[0.67,0.9,0.95,0.99] twoDplots=[['mc','eta'],['mchirp','eta'],['m1','m2'],['mtotal','eta'],['distance','iota'],['dist','iota'],['RA','dec'],['m1','dist'],['m2','dist']] | def plotSkyMap(skypos,skyres,sky_injpoint): from pylal import skylocutils from mpl_toolkits.basemap import Basemap skypoints=array(skylocutils.gridsky(float(skyres))) skycarts=map(lambda s: pol2cart(s[1],s[0]),skypoints) skyinjectionconfidence=None shist=bayespputils.skyhist_cart(array(skycarts),skypos) #shist=skyhist_cart(skycarts,list(pos)) bins=skycarts # Find the bin of the injection if available injbin=None if sky_injpoint: injhist=skyhist_cart(skycarts,array([sky_injpoint])) injbin=injhist.tolist().index(1) print 'Found injection in bin %d with co-ordinates %f,%f .'%(injbin,skypoints[injbin,0],skypoints[injbin,1]) (skyinjectionconfidence,toppoints,skyreses)=bayespputils.calculateConfidenceLevels(shist,skypoints,injbin,float(opts.skyres),confidence_levels,len(pos)) if injbin and skyinjectionconfidence: i=list(np.nonzero(np.asarray(toppoints)[:,2]==injbin))[0] min_sky_area_containing_injection=float(opts.skyres)*float(opts.skyres)*i print 'Minimum sky area containing injection point = %f square degrees'%min_sky_area_containing_injection myfig=plt.figure() plt.clf() m=Basemap(projection='moll',lon_0=180.0,lat_0=0.0) plx,ply=m(np.asarray(toppoints)[::-1,1]*57.296,np.asarray(toppoints)[::-1,0]*57.296) cnlevel=[1-tp for tp in np.asarray(toppoints)[::-1,3]] plt.scatter(plx,ply,s=5,c=cnlevel,faceted=False,cmap=matplotlib.cm.jet) m.drawmapboundary() m.drawparallels(np.arange(-90.,120.,45.),labels=[1,0,0,0],labelstyle='+/-') # draw parallels m.drawmeridians(np.arange(0.,360.,90.),labels=[0,0,0,1],labelstyle='+/-') # draw meridians plt.title("Skymap") # add a title plt.colorbar() myfig.savefig(os.path.join(outdir,'skymap.png')) plt.clf() #Save skypoints np.savetxt('ranked_sky_pixels',column_stack([np.asarray(toppoints)[:,0:1],np.asarray(toppoints)[:,1],np.asarray(toppoints)[:,3]])) return skyreses,skyinjectionconfidence |
z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() plt.imshow(z,extent=(xax[0],xax[-1],yax[0],yax[-1]),aspect=asp,origin='lower') plt.colorbar() for par1,par2 in twoDplots: try: i=paramnames.index(par1) except ValueError: print "No input chain for %s, skipping 2D plot of %s-%s."%(par1,par1,par2) continue try: j=paramnames.index(par2) except ValueError: print "No input chain for %s, skipping 2D plot of %s-%s."%(par2,par1,par2) continue print 'Generating %s-%s plot'%(paramnames[i],paramnames[j]) if (size(np.unique(pos[:,i]))<2 or size(np.unique(pos[:,j]))<2): continue plot2Dkernel(pos[:,i],pos[:,j],50,50) if injection and reduce (lambda a,b: a and b, map(lambda idx: getinjpar(injection,idx)>min(pos[:,idx]) and getinjpar(injection,idx)<max(pos[:,idx]),[i,j])) : if getinjpar(injection,i) is not None and getinjpar(injection,j) is not None: plt.plot([getinjpar(injection,i)],[getinjpar(injection,j)],'go',scalex=False,scaley=False) plt.xlabel(paramnames[i]) plt.ylabel(paramnames[j]) plt.grid() margdir=os.path.join(outdir,'2D') if not os.path.isdir(margdir+'/'): os.mkdir(margdir) myfig.savefig(os.path.join(margdir,paramnames[i]+'-'+paramnames[j]+'_2Dkernel.png')) myfig.clear() htmlfile=open(os.path.join(outdir,'posplots.html'),'w') htmlfile.write('<HTML><HEAD><TITLE>Posterior PDFs</TITLE></HEAD><BODY><h3>'+str(means[2])+' Posterior PDFs</h3>') if(opts.skyres is not None): htmlfile.write('<table border=1><tr><td>Confidence region<td>size (sq. deg)</tr>') for (frac,skysize) in skyreses: htmlfile.write('<tr><td>%f<td>%f</tr>'%(frac,skysize)) htmlfile.write('</table>') htmlfile.write('Produced from '+str(size(pos,0))+' posterior samples.<br>') htmlfile.write('Samples read from %s<br>'%(opts.data[0])) htmlfile.write('<h4>Mean parameter estimates</h4>') htmlfile.write('<table border=1><tr>') paramline=reduce(lambda a,b:a+'<td>'+b,paramnames) htmlfile.write('<td>'+paramline+'<td>logLmax</tr><tr>') meanline=reduce(lambda a,b:a+'<td>'+b,meanStr) htmlfile.write('<td>'+meanline+'</tr>') if injection: htmlfile.write('<tr><th colspan='+str(len(paramnames))+'>Injected values</tr>') injline=reduce(lambda a,b:a+'<td>'+b,injvals) htmlfile.write('<td>'+injline+'<td></tr>') htmlfile.write('</table>') if injection: if skyinjectionconfidence: htmlfile.write('<p>Injection found at confidence interval %f in sky location</p>'%(skyinjectionconfidence)) else: htmlfile.write('<p>Injection not found in posterior bins in sky location!</p>') htmlfile.write('<h5>2D Marginal PDFs</h5><br>') htmlfile.write('<table border=1><tr>') if opts.skyres is not None: htmlfile.write('<td width=30%><img width=100% src="skymap.png"></td>') else: htmlfile.write('<td width=30%><img width=100% src="m1dist.png:></td>') row_switch=1 for par1,par2 in twoDplots: if row_switch==3: row_switch=0 plot_path=None if os.path.isfile(os.path.join(outdir,'2D',par1+'-'+par2+'_2Dkernel.png')): plot_path='2D/'+par1+'-'+par2+'_2Dkernel.png' elif os.path.isfile(os.path.join(outdir,'2D',par2+'-'+par1+'_2Dkernel.png')): plot_path='2D/'+par2+'-'+par1+'_2Dkernel.png' if plot_path: if row_switch==0: htmlfile.write('<tr>') htmlfile.write('<td width=30%><img width=100% src="'+plot_path+'"></td>') if row_switch==2: htmlfile.write('</tr>') row_switch+=1 if row_switch==2: htmlfile.write('<td></td></tr>') elif row_switch==1: htmlfile.write('<td></td><td></td></tr>') htmlfile.write('</table>') htmlfile.write('<br><a href="2D/">All 2D Marginal PDFs</a><hr><h5>1D marginal posterior PDFs</h5><br>') summary_file.add_section('1D ranking kde') summary_file.add_section('1D ranking bins') for param in oneDMenu: try: par_index=paramnames.index(param) i=par_index except ValueError: print "No input chain for %s, skipping 1D plot."%param continue myfig=plt.figure(figsize=(4,3.5),dpi=80) histbins=50 (n, bins, patches)=plt.hist(pos[:,i],histbins,normed='true') histbinSize=bins[1]-bins[0] try: gkde=stats.kde.gaussian_kde(pos[:,i]) pass except np.linalg.linalg.LinAlgError: print "Error occured generating plot for parameter %s: %s ! Trying next parameter."%(paramnames[i],'LinAlgError') continue print "Generating 1D plot for %s."%param ind=np.linspace(min(pos[:,i]),max(pos[:,i]),101) kdepdf=gkde.evaluate(ind) plt.plot(ind,kdepdf,label='density estimate') if injection and min(pos[:,i])<getinjpar(injection,i) and max(pos[:,i])>getinjpar(injection,i): plt.plot([getinjpar(injection,i),getinjpar(injection,i)],[0,max(kdepdf)],'r-.',scalex=False,scaley=False) rkde=gkde.integrate_box_1d(min(pos[:,i]),getinjpar(injection,i)) print "r of injected value of %s (kde) = %f"%(param,rkde) bins_to_inj=(getinjpar(injection,i)-bins[0])/histbinSize injbinh=int(floor(bins_to_inj)) injbin_frac=bins_to_inj-float(injbinh) rbins=(sum(n[0:injbinh-1])+injbin_frac*n[injbinh])*histbinSize print "r of injected value of %s (bins) = %f"%(param, rbins) summary_file.set('1D ranking kde',param,rkde) summary_file.set('1D ranking bins',param,rbins) plt.grid() plt.xlabel(paramnames[i]) plt.ylabel('Probability Density') myfig.savefig(os.path.join(outdir,paramnames[i]+ '.png')) myfig=plt.figure(figsize=(4,3.5),dpi=80) plt.plot(pos[:,i],'.') if injection and min(pos[:,i])<getinjpar(injection,i) and max(pos[:,i])>getinjpar(injection,i): plt.plot([0,len(pos)],[getinjpar(injection,i),getinjpar(injection,i)],'r-.') myfig.savefig(os.path.join(outdir,paramnames[i]+'_samps.png')) htmlfile.write('<img src="'+paramnames[i]+'.png"><img src="'+paramnames[i]+'_samps.png"><br>') htmlfile.write('<hr><br />Produced using cbcBayesSkyRes.py at '+strftime("%Y-%m-%d %H:%M:%S")) htmlfile.write('</BODY></HTML>') htmlfile.close() posfilename=os.path.join(outdir,'posterior_samples.dat') posfile=open(posfilename,'w') for row in pos: for i in row: posfile.write('%f\t'%(i)) posfile.write('\n') posfile.close() summary_file.write(summary_fo) | cbcBayesSkyRes(opts.outpath,opts.data,oneDMenu,twoDGreedyMenu,greedyRes,confidenceLevels,twoDplots,injfile=opts.injfile,eventnum=opts.eventnum,skyres=opts.skyres) | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=np.linspace(min(xdat),max(xdat),Nx) yax=np.linspace(min(ydat),max(ydat),Ny) x,y=np.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = np.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() |
cp.add_section("condor-max-jobs","remoteScan_FG_RDS.sh_FG_RDS_full_data","30") cp.add_section("condor-max-jobs","remoteScan_FG_SEIS_RDS.sh_FG_SEIS_RDS_full_data","30") | cp.set("condor-max-jobs","remoteScan_FG_RDS.sh_FG_RDS_full_data","30") cp.set("condor-max-jobs","remoteScan_FG_SEIS_RDS.sh_FG_SEIS_RDS_full_data","30") | def __init__(self, configfile=None): cp = ConfigParser.ConfigParser() self.cp = cp self.time_now = "_".join([str(i) for i in time_method.gmtime()[0:6]]) self.ini_file=self.time_now + ".ini" home_base = home_dirs() # CONDOR SECTION NEEDED BY THINGS IN INSPIRAL.PY cp.add_section("condor") cp.set("condor","datafind",self.which("ligo_data_find")) cp.set("condor","inspiral",self.which("lalapps_inspiral")) cp.set("condor","chia", self.which("lalapps_coherent_inspiral")) cp.set("condor","universe","standard") # SECTIONS TO SHUT UP WARNINGS cp.add_section("inspiral") cp.add_section("data") # DATAFIND SECTION cp.add_section("datafind") |
def __init__(self, options): | def __init__(self, options, ifo_times = None, ifo_tag = None, user_tag = None,\ gps_start_time = None, gps_end_time = None): | def __init__(self, options): """ Initializes this class with the options. """ self.opts = options self.fname_list = [] self.tag_list = [] self.html_footer = "" |
initialise(self.opts, os.path.basename(sys.argv[0])) def add_plot(self, plot_fig, text): """ Add a plot to the page """ | def initialize(self): """ Extract information from the option structure. Does NOT alter the information in the option structure """ if hasattr(self.opts, 'ifo_times'): self.ifo_times = self.opts.ifo_times else: self.ifo_times = None if hasattr(self.opts, 'ifo_tag'): self.ifo_tag = self.opts.ifo_tag else: self.ifo_tag = None if hasattr(self.opts, 'user_tag'): self.user_tag = self.opts.user_tag else: self.user_tag = None if hasattr(self.opts, 'gps_start_time'): self.time_string = str(int(self.opts.gps_start_time))+"-"+\ str(int(math.ceil(self.opts.gps_end_time))-int(self.opts.gps_start_time)) else: self.time_string = "unspecified-gpstime" if hasattr(self.opts,'output_path'): self.output_path = self.opts.output_path if not self.output_path.endswith('/'): self.output_path+= '/' else: self.output_path = './' if hasattr(self.opts,'html_for_cbcweb'): self.html_for_cbcweb = self.opts.html_for_cbcweb else: self.html_for_cbcweb = None def create_affixes(self): """ Create the affixes (prefix/suffix) for the image naming """ self.prefix = self.name if self.ifo_times: self.prefix = self.ifo_times + "-" + self.prefix if self.ifo_tag: self.prefix = self.prefix + "_" + self.ifo_tag if self.user_tag: self.prefix = self.prefix + "_" + self.user_tag self.suffix = "-"+self.time_string def add_plot(self, plot_fig, fig_description, output_dir = None): """ Add a plot to the page. @param plot_fig: handle of the figure @param fig_description: descriptive figure text for the filename @param output_dir: alternate output directory [optional] """ fname = "Images/" + self.prefix + "_"+ fig_description + self.suffix if output_dir: fname = output_dir + '/' + fname else: fname = self.output_path + fname filename, filename_thumb = self.savefig(fname, plot_fig) | def __init__(self, options): """ Initializes this class with the options. """ self.opts = options self.fname_list = [] self.tag_list = [] self.html_footer = "" |
fname = set_figure_name(self.opts, text) fname_thumb = savefig_pylal(fname, fig=plot_fig) self.fname_list.append(fname) self.tag_list.append(fname) def write_page(self): """ create the page | self.fname_list.append(filename) self.tag_list.append(filename) def savefig(self, filename_base, fig, doThumb=True, dpi = None, dpi_thumb=50): """ Function to create the image file. @param filename_base: basename of the filename (without the .png ending) @param fig: handle to the figure to save @param doThumb: save the thumbnail or not (doThumb=True by default) @param dpi: resolution of the figure @param dpi_thumb: resolution of the thumbnail (dpi=50 by default) """ savefig_kwargs = {} if dpi is not None: savefig_kwargs["dpi"] = dpi filename = filename_base + '.png' fig.savefig(filename, **savefig_kwargs) if doThumb: filename_thumb = filename_base + '_thumb.png' fig.savefig(filename_thumb, dpi=dpi_thumb) else: filename_thumb = None return filename, filename_thumb def write_page(self, infix = None, doThumb = True, cbcweb = False, \ map_list = [], coinc_summ_table = None ): """ Create the pages if output is enabled | def add_plot(self, plot_fig, text): """ Add a plot to the page """ fname = set_figure_name(self.opts, text) fname_thumb = savefig_pylal(fname, fig=plot_fig) self.fname_list.append(fname) self.tag_list.append(fname) |
html_filename = write_html_output(self.opts, sys.argv[1:],\ self.fname_list, self.tag_list,\ comment=self.html_footer or None) write_cache_output(self.opts, html_filename, self.fname_list) | html_filename = self.create_htmlname(infix, cbcweb) self.write_html_output(html_filename, doThumb = doThumb, cbcweb = cbcweb, \ map_list = map_list, coinc_summ_table = coinc_summ_table,\ comment=self.html_footer or None) self.write_cache_output(html_filename) | def write_page(self): """ create the page """ if self.opts.enable_output: html_filename = write_html_output(self.opts, sys.argv[1:],\ self.fname_list, self.tag_list,\ comment=self.html_footer or None) write_cache_output(self.opts, html_filename, self.fname_list) return html_filename |
def create_htmlname(self, infix, cbcweb): """ Create the html filename """ if infix: html_filename = self.prefix + '_'+ infix +self.suffix else: html_filename = self.prefix + self.suffix if cbcweb: html_filename += "_publish.html" else: html_filename += ".html" if self.output_path: html_filename = self.output_path + html_filename return html_filename def write_html_output(self, html_filename, doThumb = True, cbcweb = False, map_list = [],\ comment = None, coinc_summ_table = None ): """ @param doThumb: Uses the thumbnail file as the sourcs for the images @param cbcweb: Creates the output as a CVS webpage @param map_list: A list of dictionaries to create the image maps @param comment: A comment that can be added to the page @param coinc_summ_table: A CoincSummTable that can be added to the page """ page = markup.page() try: page.init(title=__title__) except: page.init() if cbcweb: page.addheader("<%method title>" + self.name + " results</%method>") page.addheader("<%method headline>" + self.name + " results</%method>") page.addheader("<%method cvsid> $Id: InspiralUtils.py,v 1.41 2009/02/27 20:21:07 jclayton Exp $ </%method>") else: page.h1(self.name + " results") page.p(self.prefix + self.suffix) page.hr() html_file = file(html_filename, "w") for tag,filename in zip(self.tag_list, self.fname_list): if self.html_for_cbcweb: fname = opts.html_for_cbcweb + "/Images/" + os.path.basename(filename) else: fname = "Images/" + os.path.basename(filename) if doThumb: fname_thumb = fname[:-4] + "_thumb.png" else: fname_thumb = fname page.a(extra_oneliner.img(src=[fname_thumb], width=400, \ alt=tag, border="2"), title=tag, href=[ fname]) page.add("<hr/>") m=0 for map_dict in map_list: m+=1 page.add( map_dict['text']+'<br>' ) page.add( '<IMG src="%s" width=800px '\ 'usemap=" page.add( '<MAP name="map%d"> <P>' % m ) n=0 for px, py, link in zip( map_dict['xCoords'], \ map_dict['yCoords'], \ map_dict['links']): n+=1 page.add( '<area href="%s" shape="circle" '\ 'coords="%d, %d, 5"> Point%d</a>' %\ ( link, px, py, n) ) page.add('</P></MAP></OBJECT><br>') page.add("<hr/>") if comment: page.add("<div> "+comment+"</div>") page.hr() if coinc_summ_table: page.add(coinc_summ_table) page.hr() text = self.write_process_params() page.add(text) html_file.write(page(False)) html_file.close() def write_cache_output(self, html_filename): """ Write the output cache file of the plotting functions. @param: html_filename: the name of the html file """ output_cache_name = self.prefix + self.suffix +'.cache' if self.output_path: output_cache_name = self.output_path + output_cache_name cachefile = open(output_cache_name, 'w') cachefile.write(os.path.basename(html_filename) + '\n') for filename in self.fname_list: if filename.endswith('.png'): fname = "Images/"+os.path.basename(filename) elif filename.endswith('.html'): fname = os.path.basename(str(filename)) cachefile.write(fname + '\n') cachefile.close() def write_process_params(self): """ Returns the version and the full command run """ text = "Figure(s) produced with '" + self.name + "' with version: <br>" \ + self.version \ + '<br>\n<p style="width:80%; color:blue">'+ self.name for arg in self.arguments: text += " " + arg text+='</p>' return text | def write(self, text): """ Write some text to the standard output AND to the page. """ print text self.html_footer+=text+'<br>' |
|
def gridsky(resolution): | def gridsky(resolution,shifted=False): | def gridsky(resolution): """ grid the sky up into roughly square regions resolution is the length of a side the points get placed at the center of the squares and to first order each square has an area of resolution^2 """ latitude = 0.0 longitude = pi ds = pi*resolution/180.0 points = [(latitude-0.5*pi, longitude)] while latitude <= pi: latitude += ds longitude = 0.0 points.append((latitude-0.5*pi, longitude)) while longitude <= 2.0*pi: longitude += ds / abs(sin(latitude)) points.append((latitude-0.5*pi, longitude)) #there's some slop so get rid of it and only focus on points on the sphere sphpts = [] for pt in points: if pt[0] > pi/2 or pt[0] < -pi/2 \ or pt[1] > 2*pi or pt[1] < 0: pass else: sphpts.append(pt) return sphpts |
longitude = pi | longitude = 0.0 | def gridsky(resolution): """ grid the sky up into roughly square regions resolution is the length of a side the points get placed at the center of the squares and to first order each square has an area of resolution^2 """ latitude = 0.0 longitude = pi ds = pi*resolution/180.0 points = [(latitude-0.5*pi, longitude)] while latitude <= pi: latitude += ds longitude = 0.0 points.append((latitude-0.5*pi, longitude)) while longitude <= 2.0*pi: longitude += ds / abs(sin(latitude)) points.append((latitude-0.5*pi, longitude)) #there's some slop so get rid of it and only focus on points on the sphere sphpts = [] for pt in points: if pt[0] > pi/2 or pt[0] < -pi/2 \ or pt[1] > 2*pi or pt[1] < 0: pass else: sphpts.append(pt) return sphpts |
points = [(latitude-0.5*pi, longitude)] | points = [] if shifted: dlat = 0.0 else: dlat = 0.5*pi | def gridsky(resolution): """ grid the sky up into roughly square regions resolution is the length of a side the points get placed at the center of the squares and to first order each square has an area of resolution^2 """ latitude = 0.0 longitude = pi ds = pi*resolution/180.0 points = [(latitude-0.5*pi, longitude)] while latitude <= pi: latitude += ds longitude = 0.0 points.append((latitude-0.5*pi, longitude)) while longitude <= 2.0*pi: longitude += ds / abs(sin(latitude)) points.append((latitude-0.5*pi, longitude)) #there's some slop so get rid of it and only focus on points on the sphere sphpts = [] for pt in points: if pt[0] > pi/2 or pt[0] < -pi/2 \ or pt[1] > 2*pi or pt[1] < 0: pass else: sphpts.append(pt) return sphpts |
points.append((latitude-0.5*pi, longitude)) | points.append((latitude-dlat, longitude)) | def gridsky(resolution): """ grid the sky up into roughly square regions resolution is the length of a side the points get placed at the center of the squares and to first order each square has an area of resolution^2 """ latitude = 0.0 longitude = pi ds = pi*resolution/180.0 points = [(latitude-0.5*pi, longitude)] while latitude <= pi: latitude += ds longitude = 0.0 points.append((latitude-0.5*pi, longitude)) while longitude <= 2.0*pi: longitude += ds / abs(sin(latitude)) points.append((latitude-0.5*pi, longitude)) #there's some slop so get rid of it and only focus on points on the sphere sphpts = [] for pt in points: if pt[0] > pi/2 or pt[0] < -pi/2 \ or pt[1] > 2*pi or pt[1] < 0: pass else: sphpts.append(pt) return sphpts |
points.append((latitude-0.5*pi, longitude)) | points.append((latitude-dlat, longitude)) points.append((0.0-dlat,0.0)) | def gridsky(resolution): """ grid the sky up into roughly square regions resolution is the length of a side the points get placed at the center of the squares and to first order each square has an area of resolution^2 """ latitude = 0.0 longitude = pi ds = pi*resolution/180.0 points = [(latitude-0.5*pi, longitude)] while latitude <= pi: latitude += ds longitude = 0.0 points.append((latitude-0.5*pi, longitude)) while longitude <= 2.0*pi: longitude += ds / abs(sin(latitude)) points.append((latitude-0.5*pi, longitude)) #there's some slop so get rid of it and only focus on points on the sphere sphpts = [] for pt in points: if pt[0] > pi/2 or pt[0] < -pi/2 \ or pt[1] > 2*pi or pt[1] < 0: pass else: sphpts.append(pt) return sphpts |
if pt[0] > pi/2 or pt[0] < -pi/2 \ or pt[1] > 2*pi or pt[1] < 0: | if pt[0] > latmax or pt[0] < latmin or pt[1] > 2*pi or pt[1] < 0.0: | def gridsky(resolution): """ grid the sky up into roughly square regions resolution is the length of a side the points get placed at the center of the squares and to first order each square has an area of resolution^2 """ latitude = 0.0 longitude = pi ds = pi*resolution/180.0 points = [(latitude-0.5*pi, longitude)] while latitude <= pi: latitude += ds longitude = 0.0 points.append((latitude-0.5*pi, longitude)) while longitude <= 2.0*pi: longitude += ds / abs(sin(latitude)) points.append((latitude-0.5*pi, longitude)) #there's some slop so get rid of it and only focus on points on the sphere sphpts = [] for pt in points: if pt[0] > pi/2 or pt[0] < -pi/2 \ or pt[1] > 2*pi or pt[1] < 0: pass else: sphpts.append(pt) return sphpts |
takes the two grids (lists of lat/lon tuples) and returns a dictionary | takes the two grids (lists of lat/lon tuples) and returns (1) a dictionary | def map_grids(coarsegrid,finegrid,coarseres=4.0): """ takes the two grids (lists of lat/lon tuples) and returns a dictionary where the points in the coarse grid are the keys and lists of tuples of points in the fine grid are the values """ fgtemp = finegrid[:] coarsedict = {} ds = coarseres*pi/180.0 for cpt in coarsegrid: flist = [] for fpt in fgtemp: if (cpt[0]-fpt[0])*(cpt[0]-fpt[0]) <= ds*ds/4.0 and \ (cpt[1]-fpt[1])*(cpt[1]-fpt[1])*sin(cpt[0])*sin(cpt[0]) \ <= ds*ds/4.0: flist.append(fpt) coarsedict[cpt] = flist for rpt in flist: fgtemp.remove(rpt) return coarsedict, fgtemp |
points in the fine grid are the values | points in the fine grid are the values and (2) (for debugging purposes) returns a list of points in the fine grid that didn't make it NB: *** should work alright if the resolution isn't too coarse (because it uses the infinitesimal form of the metric); 5 is at the upper end of ok *** there is a fudge factor (epsilon) in the distance computation to help account for not using the integrated distance and to help with floating point comparisons | def map_grids(coarsegrid,finegrid,coarseres=4.0): """ takes the two grids (lists of lat/lon tuples) and returns a dictionary where the points in the coarse grid are the keys and lists of tuples of points in the fine grid are the values """ fgtemp = finegrid[:] coarsedict = {} ds = coarseres*pi/180.0 for cpt in coarsegrid: flist = [] for fpt in fgtemp: if (cpt[0]-fpt[0])*(cpt[0]-fpt[0]) <= ds*ds/4.0 and \ (cpt[1]-fpt[1])*(cpt[1]-fpt[1])*sin(cpt[0])*sin(cpt[0]) \ <= ds*ds/4.0: flist.append(fpt) coarsedict[cpt] = flist for rpt in flist: fgtemp.remove(rpt) return coarsedict, fgtemp |
if (cpt[0]-fpt[0])*(cpt[0]-fpt[0]) <= ds*ds/4.0 and \ (cpt[1]-fpt[1])*(cpt[1]-fpt[1])*sin(cpt[0])*sin(cpt[0]) \ <= ds*ds/4.0: | if (cpt[0]-fpt[0])*(cpt[0]-fpt[0])-ds*ds/4.0 <= epsilon and \ (cpt[1]-fpt[1])*(cpt[1]-fpt[1])*sin(cpt[0])*sin(cpt[0])-ds*ds/4.0 <= epsilon: | def map_grids(coarsegrid,finegrid,coarseres=4.0): """ takes the two grids (lists of lat/lon tuples) and returns a dictionary where the points in the coarse grid are the keys and lists of tuples of points in the fine grid are the values """ fgtemp = finegrid[:] coarsedict = {} ds = coarseres*pi/180.0 for cpt in coarsegrid: flist = [] for fpt in fgtemp: if (cpt[0]-fpt[0])*(cpt[0]-fpt[0]) <= ds*ds/4.0 and \ (cpt[1]-fpt[1])*(cpt[1]-fpt[1])*sin(cpt[0])*sin(cpt[0]) \ <= ds*ds/4.0: flist.append(fpt) coarsedict[cpt] = flist for rpt in flist: fgtemp.remove(rpt) return coarsedict, fgtemp |
first_col = [pt for pt in coarsegrid if pt[1] == 0.0] for cpt in first_col: flist = [] for fpt in fgtemp: if (cpt[0]-fpt[0])*(cpt[0]-fpt[0])-ds*ds/4.0 <= epsilon and \ (2*pi-fpt[1])*(2*pi-fpt[1])*sin(cpt[0])*sin(cpt[0])-ds*ds/4.0 <= epsilon: coarsedict[cpt].append(fpt) flist.append(fpt) for rpt in flist: fgtemp.remove(rpt) | def map_grids(coarsegrid,finegrid,coarseres=4.0): """ takes the two grids (lists of lat/lon tuples) and returns a dictionary where the points in the coarse grid are the keys and lists of tuples of points in the fine grid are the values """ fgtemp = finegrid[:] coarsedict = {} ds = coarseres*pi/180.0 for cpt in coarsegrid: flist = [] for fpt in fgtemp: if (cpt[0]-fpt[0])*(cpt[0]-fpt[0]) <= ds*ds/4.0 and \ (cpt[1]-fpt[1])*(cpt[1]-fpt[1])*sin(cpt[0])*sin(cpt[0]) \ <= ds*ds/4.0: flist.append(fpt) coarsedict[cpt] = flist for rpt in flist: fgtemp.remove(rpt) return coarsedict, fgtemp |
|
"ISI-OMC_DISPPF_H1_IN1_DAQ",\ "PEM-LVEA2_V2",\ "OMC-ASC_POS_X_IN1_DAQ",\ "OMC-ASC_POS_Y_IN1_DAQ",\ "OMC-QPD3_SUM_IN1_DAQ",\ "OMC-QPD1_SUM_IN1_DAQ",\ "OMC-QPD2_SUM_IN1_DAQ",\ "OMC-QPD4_SUM_IN1_DAQ"\ | "XX:ISI-OMC_DISPPF_H1_IN1_DAQ",\ "XX:PEM-LVEA2_V2",\ "XX:OMC-ASC_POS_X_IN1_DAQ",\ "XX:OMC-ASC_POS_Y_IN1_DAQ",\ "XX:OMC-QPD3_SUM_IN1_DAQ",\ "XX:OMC-QPD1_SUM_IN1_DAQ",\ "XX:OMC-QPD2_SUM_IN1_DAQ",\ "XX:OMC-QPD4_SUM_IN1_DAQ"\ | def __filenameToChannelList__(self,filenameList=[]): """ This method attempts to construct a set of simplified channel names based of a list of image filenames. """ #Parsed filename channel list specialChannelList=[ "ISI-OMC_DISPPF_H1_IN1_DAQ",\ "PEM-LVEA2_V2",\ "OMC-ASC_POS_X_IN1_DAQ",\ "OMC-ASC_POS_Y_IN1_DAQ",\ "OMC-QPD3_SUM_IN1_DAQ",\ "OMC-QPD1_SUM_IN1_DAQ",\ "OMC-QPD2_SUM_IN1_DAQ",\ "OMC-QPD4_SUM_IN1_DAQ"\ ] fileBasenames=[os.path.basename(x) for x in filenameList] startREG=re.compile('_[H,V,L][0,1,2][:,-,_]') stopREG=re.compile('_(?=[0-9,a-z])') channelNames=[[x,re.split(stopREG,re.split(startREG,x).pop().strip())[0].strip()]\ for x in filenameList] #Correct badly parsed names finalChannelList=list() for myURL,myName in channelNames: for specialName in specialChannelList: if myURL.__contains__(specialName): finalChannelList.append(specialName) else: finalChannelList.append(myName) return [str(x).strip() for x in finalChannelList] |
return [str(x).strip() for x in finalChannelList] | return ["XX:"+str(x).strip() for x in finalChannelList] | def __filenameToChannelList__(self,filenameList=[]): """ This method attempts to construct a set of simplified channel names based of a list of image filenames. """ #Parsed filename channel list specialChannelList=[ "ISI-OMC_DISPPF_H1_IN1_DAQ",\ "PEM-LVEA2_V2",\ "OMC-ASC_POS_X_IN1_DAQ",\ "OMC-ASC_POS_Y_IN1_DAQ",\ "OMC-QPD3_SUM_IN1_DAQ",\ "OMC-QPD1_SUM_IN1_DAQ",\ "OMC-QPD2_SUM_IN1_DAQ",\ "OMC-QPD4_SUM_IN1_DAQ"\ ] fileBasenames=[os.path.basename(x) for x in filenameList] startREG=re.compile('_[H,V,L][0,1,2][:,-,_]') stopREG=re.compile('_(?=[0-9,a-z])') channelNames=[[x,re.split(stopREG,re.split(startREG,x).pop().strip())[0].strip()]\ for x in filenameList] #Correct badly parsed names finalChannelList=list() for myURL,myName in channelNames: for specialName in specialChannelList: if myURL.__contains__(specialName): finalChannelList.append(specialName) else: finalChannelList.append(myName) return [str(x).strip() for x in finalChannelList] |
imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() | imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() filesOmega=wikiFileFinder.get_RDS_R_L1_SEIS() filesAnalyze=wikiFileFinder.get_analyzeQscan_SEIS() | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ | indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() indexDict[sngl.ifo]=fnmatch.filter(filesOmega,\ | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ | imageDict[sngl.ifo]=fnmatch.filter(filesOmega,\ | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ | thumbDict[sngl.ifo]=fnmatch.filter(filesOmega,\ | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
|
if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) tmpList=list() for chan in zValueDict[sngl.ifo]: | for zFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) | zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ | indexDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ | imageDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
|
if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) tmpList=list() for chan in zValueDictAQ[sngl.ifo]: | for zFile in fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) | zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() | imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ | indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() for myFile in fnmatch.filter(filesOmega,\ | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ | for myFile in fnmatch.filter(filesOmega,\ | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
|
if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) tmpList=list() for chan in zValueDict[sngl.ifo]: | for zFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
|
for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ | for myFile in fnmatch.filter(filesAnalyze,\ | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ | for myFile in fnmatch.filter(filesAnalyze,\ | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ | for myFile in fnmatch.filter(filesAnalyze,\ | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if not "PEM" in chan[0] or not "SEI" in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() # for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if (len(zValueFiles) > 0): for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Search for analyzeQscan files #/L1-analyseQscan_L1_932797512_687_seis_rds_L1_SEI-ETMX_X_z_scat-unspecified-gpstime.png timeString=str(float(sngl.time)).replace(".","_") zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)) indexDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_analyzeQscan_SEIS(),\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse keeping SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only PEM channels for sngl in wikiCoinc.sngls_in_coinc(): imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse only keeping PEM and not SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) for chan in zValueDictAQ[sngl.ifo]: if "PEM" in chan[0] and not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Analyze Qscan Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() zValueDict=dict() imageDictAQ=dict() indexDictAQ=dict() thumbDictAQ=dict() zValueDictAQ=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)) zValueDict[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDict[sngl.ifo].extend(wikiFileFinder.__readSummary__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDict[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDict[sngl.ifo]=tmpList else: sys.stdout.write("Omega scan summary file not for for %s. ...skipping...\n"%sngl.ifo) #Select associated analyzeQscans imageDictAQ[sngl.ifo]=list() indexDictAQ[sngl.ifo]=list() thumbDictAQ[sngl.ifo]=list() timeString=str(float(sngl.time)).replace(".","_") #H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.html for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueFiles=fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)) #Process zValue ranking file if found for IFO zValueDictAQ[sngl.ifo]=list() if len(zValueFiles)>0: for zFile in zValueFiles: zValueDictAQ[sngl.ifo].extend(wikiFileFinder.__readZranks__(zFile)) #Reparse NOT keeping PEM or SEI channels tmpList=list() for chan in zValueDictAQ[sngl.ifo]: if not "PEM" in chan[0] or not "SEI" in chan[0]: tmpList.append(chan) zValueDictAQ[sngl.ifo]=tmpList else: sys.stdout.write("Z ranking file not found for %s. ...skipping...\n"%sngl.ifo) #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime_thumb.png #H1-analyseQscan_H1_931176926_116_rds_H0_PEM-MY_SEISX_z_scat-unspecified-gpstime.png for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_analyzeQscan_RDS(),\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
|
sys.stdout.write("Available via browser at %s\n"%(mapFileURL.convert(myDestPath+"/"+myChecklistFilename))) | def __init__(self,type=None,ifo=None,time=None,snr=None,chisqr=None,mass1=None,mass2=None,mchirp=None): """ """ self.type=str(type) self.ifo=str(ifo) self.time=float(time) self.snr=float(snr) self.chisqr=float(chisqr) self.mass1=float(mass1) self.mass2=float(mass2) self.mchirp=float(mchirp) |
|
i=list(nonzero(np.asarray(toppoints)[:,2]==injbin))[0] | i=list(np.nonzero(np.asarray(toppoints)[:,2]==injbin))[0] | def plotSkyMap(skypos,skyres,sky_injpoint): from pylal import skylocutils from mpl_toolkits.basemap import Basemap skypoints=array(skylocutils.gridsky(float(skyres))) skycarts=map(lambda s: pol2cart(s[1],s[0]),skypoints) skyinjectionconfidence=None shist=bayespputils.skyhist_cart(array(skycarts),skypos) #shist=skyhist_cart(skycarts,list(pos)) bins=skycarts # Find the bin of the injection if available injbin=None if sky_injpoint: injhist=skyhist_cart(skycarts,array([sky_injpoint])) injbin=injhist.tolist().index(1) print 'Found injection in bin %d with co-ordinates %f,%f .'%(injbin,skypoints[injbin,0],skypoints[injbin,1]) (skyinjectionconfidence,toppoints,skyreses)=bayespputils.calculateConfidenceLevels(shist,skypoints,injbin,float(opts.skyres),confidence_levels,len(pos)) if injbin and skyinjectionconfidence: i=list(nonzero(np.asarray(toppoints)[:,2]==injbin))[0] min_sky_area_containing_injection=float(opts.skyres)*float(opts.skyres)*i print 'Minimum sky area containing injection point = %f square degrees'%min_sky_area_containing_injection myfig=plt.figure() plt.clf() m=Basemap(projection='moll',lon_0=180.0,lat_0=0.0) plx,ply=m(np.asarray(toppoints)[::-1,1]*57.296,np.asarray(toppoints)[::-1,0]*57.296) cnlevel=[1-tp for tp in np.asarray(toppoints)[::-1,3]] plt.scatter(plx,ply,s=5,c=cnlevel,faceted=False,cmap=matplotlib.cm.jet) m.drawmapboundary() m.drawparallels(np.arange(-90.,120.,45.),labels=[1,0,0,0],labelstyle='+/-') # draw parallels m.drawmeridians(np.arange(0.,360.,90.),labels=[0,0,0,1],labelstyle='+/-') # draw meridians plt.title("Skymap") # add a title plt.colorbar() myfig.savefig(os.path.join(outdir,'skymap.png')) plt.clf() #Save skypoints np.savetxt('ranked_sky_pixels',column_stack([np.asarray(toppoints)[:,0:1],np.asarray(toppoints)[:,1],np.asarray(toppoints)[:,3]])) return skyreses,skyinjectionconfidence |
self.cp.set("fu-condor","qscan",home_dirs()+"/rgouaty/opt/omega/omega_r3270_glnxa64_binary/bin/wpipeline") | self.cp.set("fu-condor","qscan",stfu_pipe.home_dirs()+"/rgouaty/opt/omega/omega_r3270_glnxa64_binary/bin/wpipeline") | def set_qscan_executable(self): host = stfu_pipe.get_hostname() if 'phy.syr.edu' in host: self.cp.set("fu-condor","qscan",home_dirs()+"/rgouaty/opt/omega/omega_r3270_glnxa64_binary/bin/wpipeline") else: self.cp.set("fu-condor","qscan",home_dirs()+"/romain/opt/omega/omega_r3270_glnxa64_binary/bin/wpipeline") |
self.cp.set("fu-condor","qscan",home_dirs()+"/romain/opt/omega/omega_r3270_glnxa64_binary/bin/wpipeline") | self.cp.set("fu-condor","qscan",stfu_pipe.home_dirs()+"/romain/opt/omega/omega_r3270_glnxa64_binary/bin/wpipeline") def get_cp(self): return self.cp def write(self): self.get_cp().write(open(self.ini_file,"w")) | def set_qscan_executable(self): host = stfu_pipe.get_hostname() if 'phy.syr.edu' in host: self.cp.set("fu-condor","qscan",home_dirs()+"/rgouaty/opt/omega/omega_r3270_glnxa64_binary/bin/wpipeline") else: self.cp.set("fu-condor","qscan",home_dirs()+"/romain/opt/omega/omega_r3270_glnxa64_binary/bin/wpipeline") |
parser.add_option("-f", "--config-file", default="followup_pipe.ini", help="the config file, default looks for stfu_pipe.ini in path, if none is found it makes one from your environment (only provide a config file if you know you must override something)") parser.add_option("-g", "--gps-times", default='', help="Specify gps times to follow up independently of any triggers. Format --gps-times=ifos:time,ifos:time (e.g. --gps-times=H1L1:888888888.999,H2L1:787787787.987,H1H2L1:999999999.999). No segment validation is done. If there is no data for these times it will crash.") | parser.add_option("-f", "--config-file", default="WOD_Bologna.ini", help="the config file, default looks for stfu_pipe.ini in path, if none is found it makes one from your environment (only provide a config file if you know you must override something)") parser.add_option("-g", "--gps-times", default='', help="Specify gps times to follow up. Format --gps-times=ifos:time,ifos:time (e.g. --gps-times=H1L1:888888888.999,H2L1:787787787.987,H1H2L1:999999999.999). No segment validation is done. If there is no data for these times it will crash.") parser.add_option("-i", "--input-file", default='', help="Specify gps times to follow up inside a text file. Format --gps-times=myfile.txt. No segment validation is done. If there is no data for these times it will crash.") | def parse_command_line(): parser = OptionParser( version = "%prog", description = "Pipeline to setup Remote Wscans On Demand" ) parser.add_option("-v", "--verbose", action = "store_true", help = "Be verbose.") parser.add_option("-f", "--config-file", default="followup_pipe.ini", help="the config file, default looks for stfu_pipe.ini in path, if none is found it makes one from your environment (only provide a config file if you know you must override something)") parser.add_option("-g", "--gps-times", default='', help="Specify gps times to follow up independently of any triggers. Format --gps-times=ifos:time,ifos:time (e.g. --gps-times=H1L1:888888888.999,H2L1:787787787.987,H1H2L1:999999999.999). No segment validation is done. If there is no data for these times it will crash.") parser.add_option("","--disable-dag-categories",action="store_true",\ default=False,help="disable the internal dag category maxjobs") parser.add_option("","--no-ht-qscan", action="store_true",\ default=False,help="disable hoft qscan nodes") parser.add_option("","--no-rds-qscan", action="store_true",\ default=False,help="disable rds qscan nodes") parser.add_option("","--no-seismic-qscan", action="store_true",\ default=False,help="disable seismic qscan nodes") parser.add_option("","--no-htQscan-datafind", action="store_true",\ default=False,help="disable hoft qscan datafind nodes") parser.add_option("","--no-rdsQscan-datafind", action="store_true",\ default=False,help="disable rds qscan datafind nodes") parser.add_option("","--do-remoteScans", action="store_true",\ default=True,help="enable the remote scans through condor flocking." \ " This option should be deprecated as soon as the condor flocking is" \ " set up on every LIGO cluster.") options, filenames = parser.parse_args() if not filenames: filenames = [] return options, (filenames or []) |
default_cp = stfu_pipe.create_default_config_wod(options.config_file) | default_cp = create_default_config_wod(options.config_file) | def parse_command_line(): parser = OptionParser( version = "%prog", description = "Pipeline to setup Remote Wscans On Demand" ) parser.add_option("-v", "--verbose", action = "store_true", help = "Be verbose.") parser.add_option("-f", "--config-file", default="followup_pipe.ini", help="the config file, default looks for stfu_pipe.ini in path, if none is found it makes one from your environment (only provide a config file if you know you must override something)") parser.add_option("-g", "--gps-times", default='', help="Specify gps times to follow up independently of any triggers. Format --gps-times=ifos:time,ifos:time (e.g. --gps-times=H1L1:888888888.999,H2L1:787787787.987,H1H2L1:999999999.999). No segment validation is done. If there is no data for these times it will crash.") parser.add_option("","--disable-dag-categories",action="store_true",\ default=False,help="disable the internal dag category maxjobs") parser.add_option("","--no-ht-qscan", action="store_true",\ default=False,help="disable hoft qscan nodes") parser.add_option("","--no-rds-qscan", action="store_true",\ default=False,help="disable rds qscan nodes") parser.add_option("","--no-seismic-qscan", action="store_true",\ default=False,help="disable seismic qscan nodes") parser.add_option("","--no-htQscan-datafind", action="store_true",\ default=False,help="disable hoft qscan datafind nodes") parser.add_option("","--no-rdsQscan-datafind", action="store_true",\ default=False,help="disable rds qscan datafind nodes") parser.add_option("","--do-remoteScans", action="store_true",\ default=True,help="enable the remote scans through condor flocking." \ " This option should be deprecated as soon as the condor flocking is" \ " set up on every LIGO cluster.") options, filenames = parser.parse_args() if not filenames: filenames = [] return options, (filenames or []) |
gpsevents = time_only_events(options.gps_times) | if options.gps_times: gpsevents = time_only_events(options.gps_times) elif options.input_file: gpsevents = extractTimesFromFile(options.input_file) else: print >> sys.stderr, "an argument is missing in the command:\n You need to use one of the options --gps-times or --input-file" sys.exit(1) | def parse_command_line(): parser = OptionParser( version = "%prog", description = "Pipeline to setup Remote Wscans On Demand" ) parser.add_option("-v", "--verbose", action = "store_true", help = "Be verbose.") parser.add_option("-f", "--config-file", default="followup_pipe.ini", help="the config file, default looks for stfu_pipe.ini in path, if none is found it makes one from your environment (only provide a config file if you know you must override something)") parser.add_option("-g", "--gps-times", default='', help="Specify gps times to follow up independently of any triggers. Format --gps-times=ifos:time,ifos:time (e.g. --gps-times=H1L1:888888888.999,H2L1:787787787.987,H1H2L1:999999999.999). No segment validation is done. If there is no data for these times it will crash.") parser.add_option("","--disable-dag-categories",action="store_true",\ default=False,help="disable the internal dag category maxjobs") parser.add_option("","--no-ht-qscan", action="store_true",\ default=False,help="disable hoft qscan nodes") parser.add_option("","--no-rds-qscan", action="store_true",\ default=False,help="disable rds qscan nodes") parser.add_option("","--no-seismic-qscan", action="store_true",\ default=False,help="disable seismic qscan nodes") parser.add_option("","--no-htQscan-datafind", action="store_true",\ default=False,help="disable hoft qscan datafind nodes") parser.add_option("","--no-rdsQscan-datafind", action="store_true",\ default=False,help="disable rds qscan datafind nodes") parser.add_option("","--do-remoteScans", action="store_true",\ default=True,help="enable the remote scans through condor flocking." \ " This option should be deprecated as soon as the condor flocking is" \ " set up on every LIGO cluster.") options, filenames = parser.parse_args() if not filenames: filenames = [] return options, (filenames or []) |
print >>sys.stderr, "following up %s @ %s" % (ifo, event.time) | print >>sys.stdout, "following up %s @ %s" % (ifo, event.time) | def parse_command_line(): parser = OptionParser( version = "%prog", description = "Pipeline to setup Remote Wscans On Demand" ) parser.add_option("-v", "--verbose", action = "store_true", help = "Be verbose.") parser.add_option("-f", "--config-file", default="followup_pipe.ini", help="the config file, default looks for stfu_pipe.ini in path, if none is found it makes one from your environment (only provide a config file if you know you must override something)") parser.add_option("-g", "--gps-times", default='', help="Specify gps times to follow up independently of any triggers. Format --gps-times=ifos:time,ifos:time (e.g. --gps-times=H1L1:888888888.999,H2L1:787787787.987,H1H2L1:999999999.999). No segment validation is done. If there is no data for these times it will crash.") parser.add_option("","--disable-dag-categories",action="store_true",\ default=False,help="disable the internal dag category maxjobs") parser.add_option("","--no-ht-qscan", action="store_true",\ default=False,help="disable hoft qscan nodes") parser.add_option("","--no-rds-qscan", action="store_true",\ default=False,help="disable rds qscan nodes") parser.add_option("","--no-seismic-qscan", action="store_true",\ default=False,help="disable seismic qscan nodes") parser.add_option("","--no-htQscan-datafind", action="store_true",\ default=False,help="disable hoft qscan datafind nodes") parser.add_option("","--no-rdsQscan-datafind", action="store_true",\ default=False,help="disable rds qscan datafind nodes") parser.add_option("","--do-remoteScans", action="store_true",\ default=True,help="enable the remote scans through condor flocking." \ " This option should be deprecated as soon as the condor flocking is" \ " set up on every LIGO cluster.") options, filenames = parser.parse_args() if not filenames: filenames = [] return options, (filenames or []) |
injected_cols.extend(['injected_end_time', 'injected_end_time_ns', 'injected_end_time_utc__Px_click_for_daily_ihope_xP_']) | injected_cols.extend(['injected_decisive_distance','injected_end_time', 'injected_end_time_ns', 'injected_end_time_utc__Px_click_for_daily_ihope_xP_']) | def convert_duration( duration ): return sqlutils.convert_duration( duration, convert_durations ) |
from glue.lal import LIGOTimeGPS | def get_pyvalue(self): return generic_get_pyvalue(self) |
|
AND rank(""", decisive_distance, """) <= """, str(limit), """ | %s""" % (limit is not None and ''.join(['AND rank(', decisive_distance, ') <= ', str(limit)]) or ''), """ | def get_decisive_distance( *args ): return sorted(args)[1] |
dag.add_node(self) | if opts.do_makeCheckList: dag.add_node(self) | def __init__(self,dag,job,cp,opts): pipeline.CondorDAGNode.__init__(self,job) #Specify pipe location self.add_var_opt('followup-directory',cp.get("makeCheckListWiki", "location").strip()) #Specify pipe ini file self.add_var_opt('ini-file',cp.get("makeCheckListWiki", "ini-file").strip()) if not opts.disable_dag_categories: self.set_category(job.name.lower()) #Add this as child of all known jobs for parentNode in dag.get_nodes(): self.add_parent(parentNode) dag.add_node(self) |
twoDGreedyCL['ra_sb,dec_sb']=skyinjectionconfidence if min_sky_area_containing_injection: twoDGreedyInj['ra_sb,dec_sb']=min_sky_area_containing_injection | twoDGreedyInj['ra_sb,dec_sb']={} twoDGreedyInj['ra_sb,dec_sb']['confidence']=min_sky_area_containing_injection if min_sky_area_containing_injection: twoDGreedyInj['ra_sb,dec_sb']['area']=min_sky_area_containing_injection | def cbcBayesSkyRes(outdir,data,oneDMenu,twoDGreedyMenu,GreedyRes,confidence_levels,twoDplots,injfile=None,eventnum=None,skyres=None,bayesfactornoise=None,bayesfactorcoherent=None): if eventnum is not None and injfile is None: print "You specified an event number but no injection file. Ignoring!" if data is None: print 'You must specify an input data file' exit(1) # if outdir is None: print "You must specify an output directory." exit(1) if not os.path.isdir(outdir): os.makedirs(outdir) # summary_fo=open(os.path.join(outdir,'summary.ini'),'w') summary_file=ConfigParser() summary_file.add_section('metadata') summary_file.set('metadata','group_id','X') if eventnum: summary_file.set('metadata','event_id',str(eventnum)) summary_file.add_section('Confidence levels') summary_file.set('Confidence levels','confidence levels',str(confidence_levels)) # Load in the main data paramnames, pos=loadDataFile(data[0]) #Generate any required derived parameters if "m1" not in paramnames and "m2" not in paramnames and "mchirp" in paramnames and "eta" in paramnames: (m1,m2)=bppu.mc2ms(pos[:,paramnames.index('mchirp')],pos[:,paramnames.index('eta')]) pos=np.column_stack((pos,m1,m2)) paramnames.append("m1") paramnames.append("m2") # Nd=len(paramnames) print "Number of posterior samples: " + str(size(pos,0)) # Calculate means means = mean(pos,axis=0) meanStr=map(str,means) out=reduce(lambda a,b:a+'||'+b,meanStr) print 'Means:' print '||'+out+'||' RAdim=paramnames.index('RA') decdim=paramnames.index('dec') injection=None # Select injections using tc +/- 0.1s if it exists or eventnum from the injection file if injfile: import itertools injections = SimInspiralUtils.ReadSimInspiralFromFiles([injfile]) if(eventnum is not None): if(len(injections)<eventnum): print "Error: You asked for event %d, but %s contains only %d injections" %(eventnum,injfile,len(injections)) sys.exit(1) else: injection=injections[eventnum] else: if(len(injections)<1): print 'Warning: Cannot find injection with end time %f' %(means[2]) else: injection = itertools.ifilter(lambda a: abs(a.get_end() - means[2]) < 0.1, injections).next() #If injection parameter passed load object representation of injection #table entries. if injection: injpoint=map(lambda a: getinjpar(paramnames,injection,a),range(len(paramnames))) injvals=map(str,injpoint) out=reduce(lambda a,b:a+'||'+b,injvals) print 'Injected values:' print out #Add injection values to output file summary_file.add_section('Injection values') for parnum in range(len(paramnames)): summary_file.set('Injection values',paramnames[parnum],getinjpar(paramnames,injection,parnum)) # twoDGreedyCL={} twoDGreedyInj={} #If sky resolution parameter has been specified try and create sky map. skyreses=None if skyres is not None: RAvec=array(pos)[:,paramnames.index('RA')] decvec=array(pos)[:,paramnames.index('dec')] skypos=column_stack([RAvec,decvec]) injvalues=None if injection: injvalues=(injpoint[RAdim],injpoint[decdim]) skyreses,toppoints,skyinjectionconfidence,min_sky_area_containing_injection=bppu.plotSkyMap(skypos,skyres,injvalues,confidence_levels,outdir) if skyinjectionconfidence: twoDGreedyCL['ra_sb,dec_sb']=skyinjectionconfidence if min_sky_area_containing_injection: twoDGreedyInj['ra_sb,dec_sb']=min_sky_area_containing_injection # Add bayes factor information to summary file summary_file.add_section('bayesfactor') if bayesfactornoise is not None: bfile=open(bayesfactornoise,'r') BSN=bfile.read() bfile.close() summary_file.set('bayesfactor','BSN',BSN) if bayesfactorcoherent is not None: bfile=open(bayesfactorcoherent,'r') BCI=bfile.read() bfile.close() summary_file.set('bayesfactor','BCI',BCI) #Loop over parameter pairs in twoDGreedyMenu and bin the sample pairs #using a greedy algorithm . The ranked pixels (toppoints) are used #to plot 2D histograms and evaluate Bayesian confidence intervals. summary_file.add_section('2D greedy cl') summary_file.add_section('2D greedy cl inj') ncon=len(confidence_levels) pos_array=np.array(pos) for par1_name,par2_name in twoDGreedyMenu: print "Binning %s-%s to determine confidence levels ..."%(par1_name,par2_name) #Bin sizes try: par1_bin=GreedyRes[par1_name] except KeyError: print "Bin size is not set for %s, skipping %s/%s binning."%(par1_name,par1_name,par2_name) continue try: par2_bin=GreedyRes[par2_name] except KeyError: print "Bin size is not set for %s, skipping %s/%s binning."%(par2_name,par1_name,par2_name) continue #Get posterior samples try: par1_index=paramnames.index(par1_name) except ValueError: print "No input chain for %s, skipping %s/%s binning."%(par1_name,par1_name,par2_name) continue try: par2_index=paramnames.index(par2_name) except ValueError: print "No input chain for %s, skipping %s/%s binning."%(par2_name,par1_name,par2_name) continue pars_name=("%s,%s"%(par1_name,par2_name)).lower() par1pos=pos_array[:,par1_index] par2pos=pos_array[:,par2_index] injection_=None if injection: par1_injvalue=np.array(injpoint)[par1_index] par2_injvalue=np.array(injpoint)[par2_index] injection_=(par1_injvalue,par2_injvalue) posterior_array=column_stack([par1pos,par2pos]) toppoints,injectionconfidence,twoDGreedyCL[pars_name],twoDGreedyInj[pars_name]=bppu.greedyBin2(posterior_array,(par1_bin,par2_bin),confidence_levels,par_names=(par1_name,par2_name),injection=injection_) #Plot 2D histograms of greedily binned points if injection is not None and par1_injvalue is not None and par2_injvalue is not None: bppu.plot2Dbins(np.array(toppoints),(par1_bin,par2_bin),outdir,par_names=(par1_name,par2_name),injpoint=[par1_injvalue,par2_injvalue]) else: bppu.plot2Dbins(np.array(toppoints),(par1_bin,par2_bin),outdir,par_names=(par1_name,par2_name)) # summaryString='[' for frac,area in twoDGreedyCL[pars_name].items(): summaryString+=str(area) summary_file.set('2D greedy cl',pars_name,summaryString+']') summary_file.set('2D greedy cl inj',pars_name,str(injectionconfidence)) if not os.path.exists(os.path.join(outdir,'pickle')): os.makedirs(os.path.join(outdir,'pickle')) pickle_to_file(twoDGreedyCL,os.path.join(outdir,'pickle','GreedyCL2.pickle')) pickle_to_file(twoDGreedyInj,os.path.join(outdir,'pickle','GreedyInj2.pickle')) for par in twoDGreedyCL.keys(): oneD_dict_to_file(twoDGreedyCL[par],os.path.join(outdir,str(par)+'_greedy2.dat')) #1D binning summary_file.add_section('1D mean') summary_file.add_section('1D median') summary_file.add_section('1D mode') summary_file.add_section('1D contigious cl') summary_file.add_section('1D greedy cl') summary_file.add_section('1D stacc') oneDStats={} oneDGreedyCL={} oneDContCL={} oneDGreedyInj={} oneDContInj={} max_pos,max_i=posMode(pos_array) #Loop over each parameter and determine the contigious and greedy #confidence levels and some statistics. for par_name in oneDMenu: print "Binning %s to determine confidence levels ..."%par_name try: par_index=paramnames.index(par_name) except ValueError: print "No input chain for %s, skipping binning."%par_name continue try: par_bin=GreedyRes[par_name] except KeyError: print "Bin size is not set for %s, skipping binning."%par_name continue oneDGreedyCL[par_name]={} oneDStats[par_name]={} oneDContCL[par_name]={} oneDGreedyInj[par_name]={} oneDContInj[par_name]={} par_samps=pos_array[:,par_index] summary_file.set('1D mode',par_name,str(par_samps[max_i])) summary_file.set("1D mean",par_name,str(np.mean(par_samps))) summary_file.set("1D median",par_name,str(np.median(par_samps))) oneDStats[par_name]['mode']=par_samps[max_i] oneDStats[par_name]['mean']=np.mean(par_samps) oneDStats[par_name]['median']=np.median(par_samps) par_injvalue_=None if injection: par_injvalue_=np.array(injpoint)[par_index] oneDGreedyCL[par_name],oneDGreedyInj[par_name],toppoints,injectionconfidence = bppu.greedyBin1(par_samps,par_bin,confidence_levels,par_injvalue=par_injvalue_) oneDContCL[par_name],oneDContInj[par_name]=bppu.contCL1(par_samps,par_bin,confidence_levels,par_injvalue=par_injvalue_) #Ilya's standard accuracy statistic if injection: injvalue=np.array(injpoint)[par_index] if injvalue: stacc=bppu.stacc_stat(par_samps,injvalue) summary_file.set('1D stacc',par_name,str(stacc)) oneDStats[par_name]['stacc']=stacc pickle_to_file(oneDGreedyCL,os.path.join(outdir,'pickle','GreedyCL1.pickle')) pickle_to_file(oneDContCL,os.path.join(outdir,'pickle','ContCL1.pickle')) pickle_to_file(oneDStats,os.path.join(outdir,'pickle','Stats1.pickle')) pickle_to_file(oneDContInj,os.path.join(outdir,'pickle','ContInj1.pickle')) pickle_to_file(oneDGreedyInj,os.path.join(outdir,'pickle','GreedyInj1.pickle')) for par in oneDGreedyCL.keys(): oneD_dict_to_file(oneDGreedyCL[par],os.path.join(outdir,str(par)+'_greedy1.dat')) # for par in oneDGreedyCL.keys(): oneD_dict_to_file(oneDContCL[par],os.path.join(outdir,str(par)+'_cont.dat')) # for par in oneDStats.keys(): oneD_dict_to_file(oneDStats[par],os.path.join(outdir,str(par)+'_stats.dat')) # for par in oneDStats.keys(): oneD_dict_to_file(oneDContInj[par],os.path.join(outdir,str(par)+'_cont_inj.dat')) # #####Generate 2D kde plots and webpage######## margdir=os.path.join(outdir,'2D') if not os.path.isdir(margdir): os.makedirs(margdir) twoDKdePaths=[] for par1,par2 in twoDplots: try: i=paramnames.index(par1) except ValueError: print "No input chain for %s, skipping 2D plot of %s-%s."%(par1,par1,par2) continue try: j=paramnames.index(par2) except ValueError: print "No input chain for %s, skipping 2D plot of %s-%s."%(par2,par1,par2) continue print 'Generating %s-%s plot'%(paramnames[i],paramnames[j]) if (size(np.unique(pos[:,i]))<2 or size(np.unique(pos[:,j]))<2): continue par_injvalues_=None if injection and reduce (lambda a,b: a and b, map(lambda idx: getinjpar(paramnames,injection,idx)>min(pos[:,idx]) and getinjpar(paramnames,injection,idx)<max(pos[:,idx]),[i,j])) : if getinjpar(paramnames,injection,i) is not None and getinjpar(paramnames,injection,j) is not None: par_injvalues_=( getinjpar(paramnames,injection,i) , getinjpar(paramnames,injection,j) ) myfig=bppu.plot2Dkernel(pos[:,i],pos[:,j],50,50,par_names=(par1,par2),par_injvalues=par_injvalues_) twoDKdePath=os.path.join(margdir,paramnames[i]+'-'+paramnames[j]+'_2Dkernel.png') twoDKdePaths.append(twoDKdePath) myfig.savefig(twoDKdePath) htmlfile=open(os.path.join(outdir,'posplots.html'),'w') htmlfile.write('<HTML><HEAD><TITLE>Posterior PDFs</TITLE></HEAD><BODY><h3>'+str(means[2])+' Posterior PDFs</h3>') if(skyres is not None): htmlfile.write('<table border=1><tr><td>Confidence region<td>size (sq. deg)</tr>') for (frac,skysize) in skyreses: htmlfile.write('<tr><td>%f<td>%f</tr>'%(frac,skysize)) htmlfile.write('</table>') if bayesfactornoise is not None: htmlfile.write('<p>log Bayes factor ( coherent vs gaussian noise) = %s, Bayes factor=%f</p>'%(BSN,exp(float(BSN)))) if bayesfactorcoherent is not None: htmlfile.write('<p>log Bayes factor ( coherent vs incoherent OR noise ) = %s, Bayes factor=%f</p>'%(BCI,exp(float(BCI)))) htmlfile.write('Produced from '+str(size(pos,0))+' posterior samples.<br>') htmlfile.write('Samples read from %s<br>'%(data[0])) htmlfile.write('<h4>Mean parameter estimates</h4>') htmlfile.write('<table border=1><tr>') paramline=reduce(lambda a,b:a+'<td>'+b,paramnames) htmlfile.write('<td>'+paramline+'<td>logLmax</tr><tr>') meanline=reduce(lambda a,b:a+'<td>'+b,meanStr) htmlfile.write('<td>'+meanline+'</tr>') if injection: htmlfile.write('<tr><th colspan='+str(len(paramnames))+'>Injected values</tr>') injline=reduce(lambda a,b:a+'<td>'+b,injvals) htmlfile.write('<td>'+injline+'<td></tr>') htmlfile.write('</table>') if injection: if skyinjectionconfidence: htmlfile.write('<p>Injection found at confidence interval %f in sky location</p>'%(skyinjectionconfidence)) else: htmlfile.write('<p>Injection not found in posterior bins in sky location!</p>') htmlfile.write('<h5>2D Marginal PDFs</h5><br>') htmlfile.write('<table border=1 width=100%><tr>') #htmlfile.write('<td width=30%><img width=100% src="m1m2.png"></td>') #htmlfile.write('<td width=30%><img width=100% src="RAdec.png"></td>') #htmlfile.write('<td width=30%><img width=100% src="Meta.png"></td>') #htmlfile.write('</tr><tr><td width=30%><img width=100% src="2D/Mchirp (Msun)-geocenter time ISCO_2Dkernel.png"</td>') if skyres is not None: htmlfile.write('<td width=30%><img width=100% src="skymap.png"></td>') else: htmlfile.write('<td width=30%><img width=100% src="m1dist.png:></td>') #htmlfile.write('<td width=30%><img width=100% src="m2dist.png"></td>') row_switch=1 for par1,par2 in twoDplots: if row_switch==3: row_switch=0 plot_path=None if os.path.isfile(os.path.join(outdir,'2D',par1+'-'+par2+'_2Dkernel.png')): plot_path='2D/'+par1+'-'+par2+'_2Dkernel.png' elif os.path.isfile(os.path.join(outdir,'2D',par2+'-'+par1+'_2Dkernel.png')): plot_path='2D/'+par2+'-'+par1+'_2Dkernel.png' if plot_path: if row_switch==0: htmlfile.write('<tr>') htmlfile.write('<td width=30%><img width=100% src="'+plot_path+'"></td>') if row_switch==2: htmlfile.write('</tr>') row_switch+=1 # if row_switch==2: htmlfile.write('<td></td></tr>') elif row_switch==1: htmlfile.write('<td></td><td></td></tr>') htmlfile.write('</table>') htmlfile.write('<br><a href="2D/">All 2D Marginal PDFs</a><hr><h5>1D marginal posterior PDFs</h5><br>') summary_file.add_section('1D ranking kde') summary_file.add_section('1D ranking bins') oneDplotPaths=[] for param in oneDMenu: try: par_index=paramnames.index(param) i=par_index except ValueError: print "No input chain for %s, skipping 1D plot."%param continue pos_samps=pos[:,i] injpar_=None if injection: injpar_=getinjpar(paramnames,injection,i) print "Generating 1D plot for %s."%param rbins,plotFig=bppu.plot1DPDF(pos_samps,param,injpar=injpar_) figname=param+'.png' oneDplotPath=os.path.join(outdir,figname) plotFig.savefig(os.path.join(outdir,param+'.png')) if rbins: print "r of injected value of %s (bins) = %f"%(param, rbins) ##Produce plot of raw samples myfig=plt.figure(figsize=(4,3.5),dpi=80) plt.plot(pos_samps,'.',figure=myfig) if injpar_: if min(pos_samps)<injpar_ and max(pos_samps)>injpar_: plt.plot([0,len(pos_samps)],[injpar_,injpar_],'r-.') myfig.savefig(os.path.join(outdir,figname.replace('.png','_samps.png'))) #summary_file.set('1D ranking kde',param,rkde) summary_file.set('1D ranking bins',param,rbins) oneDplotPaths.append(figname) htmlfile.write('<table><tr><th>Histogram and Kernel Density Estimate</th><th>Samples used</th>') for plotPath in oneDplotPaths: htmlfile.write('<tr><td><img src="'+plotPath+'"></td><td><img src="'+plotPath.replace('.png','_samps.png')+'"></td>') htmlfile.write('</table>') htmlfile.write('<hr><br />Produced using cbcBayesSkyRes.py at '+strftime("%Y-%m-%d %H:%M:%S")) htmlfile.write('</BODY></HTML>') htmlfile.close() # Save posterior samples too... posfilename=os.path.join(outdir,'posterior_samples.dat') posfile=open(posfilename,'w') for row in pos: for i in row: posfile.write('%f\t'%(i)) posfile.write('\n') # #Close files posfile.close() summary_file.write(summary_fo) |
while True: | while interval<len(np.asarray(toppoints)[:,3]): | def greedyBin1(par_samps,par_bin,confidence_levels,par_injvalue=None): oneDGreedyCL={} oneDGreedyInj={} parpos_min=min(par_samps) parpos_max=max(par_samps) par_point=parpos_min parpos_Nbins= int(ceil((parpos_max - parpos_min)/par_bin))+1 greedyPoints=np.zeros((parpos_Nbins,2)) #2D so it can be put through same confidence level function greedyHist=np.zeros(parpos_Nbins,dtype='i8') #Bin up for i in range(parpos_Nbins): greedyPoints[i,0]=par_point greedyPoints[i,1]=par_point par_point+=par_bin for par_samp in par_samps: par_binNumber=int(floor((par_samp-parpos_min)/par_bin)) try: greedyHist[par_binNumber]+=1 except IndexError: print "IndexError: bin number: %i total bins: %i parsamp: %f "%(par_binNumber,parpos_Nbins,par_samp) injbin=None #Find injection bin if par_injvalue: par_binNumber=floor((par_injvalue-parpos_min)/par_bin) injbin=par_binNumber #Determine confidence levels (injectionconfidence,toppoints,reses)=calculateConfidenceLevels(greedyHist,greedyPoints,injbin,sqrt(par_bin),confidence_levels,len(par_samps)) #areastr='' for (frac,area) in reses: oneDGreedyCL[frac]=area if par_injvalue and injectionconfidence: oneDGreedyInj['confidence']=injectionconfidence #Recover interval containing injection point interval=0 while True: if injectionconfidence<np.asarray(toppoints)[interval,3]: break interval+=1 interval=interval*par_bin oneDGreedyInj['interval']=interval return oneDGreedyCL,oneDGreedyInj,toppoints,injectionconfidence |
galpt = fbins[sbin(fbins,inj_pt,fine_res)] | galpt = fbins[skylocutils.sbin(fbins,inj_pt,fine_res)] | def get_unique_filename(name): """ use this to avoid name collisions """ counter = 1 base_name, ext = os.path.splitext(name) while os.path.isfile(name): name = base_name + '_' + str(counter) + ext counter += 1 return name |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.