rem
stringlengths 0
322k
| add
stringlengths 0
2.05M
| context
stringlengths 8
228k
|
---|---|---|
flines[:,i]=asin(flines[:,i]) | flines[:,i]=arcsin(flines[:,i]) | def loadDataFile(filename): print filename infile=open(filename,'r') formatstr=infile.readline().lstrip() header=formatstr.split() llines=[] import re dec=re.compile(r'[^\d.-]+') for line in infile: sline=line.split() proceed=True for s in sline: if dec.search(s) is not None: print 'Warning! Ignoring non-numeric data after the header: %s'%(sline) proceed=False if proceed: llines.append(array(map(float,sline))) flines=array(llines) for i in range(0,len(header)): if header[i].lower().find('log')!=-1 and header[i].lower()!='logl': print 'exponentiating %s'%(header[i]) flines[:,i]=exp(flines[:,i]) header[i]=header[i].replace('log','') if header[i].lower().find('sin')!=-1: print 'asining %s'%(header[i]) flines[:,i]=asin(flines[:,i]) header[i]=header[i].replace('sin','') if header[i].lower().find('cos')!=-1: print 'acosing %s'%(header[i]) flines[:,i]=acos(flines[:,i]) header[i]=header[i].replace('cos','') header[i]=header[i].replace('(','') header[i]=header[i].replace(')','') print 'Read columns %s'%(str(header)) return header,flines |
flines[:,i]=acos(flines[:,i]) | flines[:,i]=arccos(flines[:,i]) | def loadDataFile(filename): print filename infile=open(filename,'r') formatstr=infile.readline().lstrip() header=formatstr.split() llines=[] import re dec=re.compile(r'[^\d.-]+') for line in infile: sline=line.split() proceed=True for s in sline: if dec.search(s) is not None: print 'Warning! Ignoring non-numeric data after the header: %s'%(sline) proceed=False if proceed: llines.append(array(map(float,sline))) flines=array(llines) for i in range(0,len(header)): if header[i].lower().find('log')!=-1 and header[i].lower()!='logl': print 'exponentiating %s'%(header[i]) flines[:,i]=exp(flines[:,i]) header[i]=header[i].replace('log','') if header[i].lower().find('sin')!=-1: print 'asining %s'%(header[i]) flines[:,i]=asin(flines[:,i]) header[i]=header[i].replace('sin','') if header[i].lower().find('cos')!=-1: print 'acosing %s'%(header[i]) flines[:,i]=acos(flines[:,i]) header[i]=header[i].replace('cos','') header[i]=header[i].replace('(','') header[i]=header[i].replace(')','') print 'Read columns %s'%(str(header)) return header,flines |
def add(self, *args): packing.Bin.add(self, *args) | def add(self, cache_entry): packing.Bin.add(self, cache_entry, cache_entry.to_segmentlistdict()) | def add(self, *args): packing.Bin.add(self, *args) self.extent = self.size.extent_all() return self |
new.add(cache_entry, cache_entry.to_segmentlistdict()) | new.add(cache_entry) | def pack(self, cache_entry): """ Find all bins in which this glue.lal.CacheEntry instance belongs, merge them, and add this cache entry to the result. Create a new bin for this cache entry if it does not belong in any of the existing bins. |
splits = [-segments.infinity()] + [lsctables.LIGOTimeGPS(origbin.extent[0] + i * float(origbin.extent[1] - origbin.extent[0]) / n) for i in range(1, n)] + [+segments.infinity()] | extents = [-segments.infinity()] + [lsctables.LIGOTimeGPS(origbin.extent[0] + i * float(abs(origbin.extent)) / n) for i in range(1, n)] + [+segments.infinity()] | def split_bins(cafepacker, extentlimit, verbose = False): """ Split bins in CafePacker so that each bin has an extent no longer than extentlimit. """ # # loop over all bins in cafepacker.bins # idx = 0 while idx < len(cafepacker.bins): # # retrieve bin # origbin = cafepacker.bins[idx] # # how many pieces? if bin doesn't need splitting move to # next # n = int(math.ceil(float(abs(origbin.extent)) / extentlimit)) if n <= 1: idx += 1 continue # # calculate the times of the splits, and then build # segmentlistdicts for clipping. # splits = [-segments.infinity()] + [lsctables.LIGOTimeGPS(origbin.extent[0] + i * float(origbin.extent[1] - origbin.extent[0]) / n) for i in range(1, n)] + [+segments.infinity()] if verbose: print >>sys.stderr, "\tsplitting cache spanning %s at %s" % (str(origbin.extent), ", ".join(str(split) for split in splits[1:-1])) splits = [segments.segmentlist([segments.segment(*bounds)]) for bounds in zip(splits[:-1], splits[1:])] splits = [segments.segmentlistdict.fromkeys(origbin.size, seglist) for seglist in splits] # # build new bins, populate sizes and extents # newbins = [] for split in splits: newbins.append(LALCacheBin()) newbins[-1].size = origbin.size & split for key in tuple(newbins[-1].size): if not newbins[-1].size[key]: del newbins[-1].size[key] newbins[-1].extent = newbins[-1].size.extent_all() # # pack objects from origbin into new bins # for bin in newbins: bin_extent_plus_max_gap = bin.extent.protract(cafepacker.max_gap) for cache_entry in origbin.objects: # # quick check of gap # if cache_entry.segment.disjoint(bin_extent_plus_max_gap): continue # # apply each offset vector # cache_entry_segs = cache_entry.to_segmentlistdict() for offset_vector in cafepacker.offset_vectors: cache_entry_segs.offsets.update(offset_vector) # # test against bin # if cache_entry_segs.intersects_segment(bin.extent): # # object is coicident with # bin # bin.objects.append(cache_entry) break # # replace original bin with split bins. increment idx to # skip over all new bins # cafepacker.bins[idx:idx+1] = newbins idx += len(newbins) # # done # |
print >>sys.stderr, "\tsplitting cache spanning %s at %s" % (str(origbin.extent), ", ".join(str(split) for split in splits[1:-1])) splits = [segments.segmentlist([segments.segment(*bounds)]) for bounds in zip(splits[:-1], splits[1:])] splits = [segments.segmentlistdict.fromkeys(origbin.size, seglist) for seglist in splits] | print >>sys.stderr, "\tsplitting cache spanning %s at %s" % (str(origbin.extent), ", ".join(str(extent) for extent in extents[1:-1])) extents = [segments.segment(*bounds) & origbin.extent for bounds in zip(extents[:-1], extents[1:])] | def split_bins(cafepacker, extentlimit, verbose = False): """ Split bins in CafePacker so that each bin has an extent no longer than extentlimit. """ # # loop over all bins in cafepacker.bins # idx = 0 while idx < len(cafepacker.bins): # # retrieve bin # origbin = cafepacker.bins[idx] # # how many pieces? if bin doesn't need splitting move to # next # n = int(math.ceil(float(abs(origbin.extent)) / extentlimit)) if n <= 1: idx += 1 continue # # calculate the times of the splits, and then build # segmentlistdicts for clipping. # splits = [-segments.infinity()] + [lsctables.LIGOTimeGPS(origbin.extent[0] + i * float(origbin.extent[1] - origbin.extent[0]) / n) for i in range(1, n)] + [+segments.infinity()] if verbose: print >>sys.stderr, "\tsplitting cache spanning %s at %s" % (str(origbin.extent), ", ".join(str(split) for split in splits[1:-1])) splits = [segments.segmentlist([segments.segment(*bounds)]) for bounds in zip(splits[:-1], splits[1:])] splits = [segments.segmentlistdict.fromkeys(origbin.size, seglist) for seglist in splits] # # build new bins, populate sizes and extents # newbins = [] for split in splits: newbins.append(LALCacheBin()) newbins[-1].size = origbin.size & split for key in tuple(newbins[-1].size): if not newbins[-1].size[key]: del newbins[-1].size[key] newbins[-1].extent = newbins[-1].size.extent_all() # # pack objects from origbin into new bins # for bin in newbins: bin_extent_plus_max_gap = bin.extent.protract(cafepacker.max_gap) for cache_entry in origbin.objects: # # quick check of gap # if cache_entry.segment.disjoint(bin_extent_plus_max_gap): continue # # apply each offset vector # cache_entry_segs = cache_entry.to_segmentlistdict() for offset_vector in cafepacker.offset_vectors: cache_entry_segs.offsets.update(offset_vector) # # test against bin # if cache_entry_segs.intersects_segment(bin.extent): # # object is coicident with # bin # bin.objects.append(cache_entry) break # # replace original bin with split bins. increment idx to # skip over all new bins # cafepacker.bins[idx:idx+1] = newbins idx += len(newbins) # # done # |
for split in splits: | for extent in extents: | def split_bins(cafepacker, extentlimit, verbose = False): """ Split bins in CafePacker so that each bin has an extent no longer than extentlimit. """ # # loop over all bins in cafepacker.bins # idx = 0 while idx < len(cafepacker.bins): # # retrieve bin # origbin = cafepacker.bins[idx] # # how many pieces? if bin doesn't need splitting move to # next # n = int(math.ceil(float(abs(origbin.extent)) / extentlimit)) if n <= 1: idx += 1 continue # # calculate the times of the splits, and then build # segmentlistdicts for clipping. # splits = [-segments.infinity()] + [lsctables.LIGOTimeGPS(origbin.extent[0] + i * float(origbin.extent[1] - origbin.extent[0]) / n) for i in range(1, n)] + [+segments.infinity()] if verbose: print >>sys.stderr, "\tsplitting cache spanning %s at %s" % (str(origbin.extent), ", ".join(str(split) for split in splits[1:-1])) splits = [segments.segmentlist([segments.segment(*bounds)]) for bounds in zip(splits[:-1], splits[1:])] splits = [segments.segmentlistdict.fromkeys(origbin.size, seglist) for seglist in splits] # # build new bins, populate sizes and extents # newbins = [] for split in splits: newbins.append(LALCacheBin()) newbins[-1].size = origbin.size & split for key in tuple(newbins[-1].size): if not newbins[-1].size[key]: del newbins[-1].size[key] newbins[-1].extent = newbins[-1].size.extent_all() # # pack objects from origbin into new bins # for bin in newbins: bin_extent_plus_max_gap = bin.extent.protract(cafepacker.max_gap) for cache_entry in origbin.objects: # # quick check of gap # if cache_entry.segment.disjoint(bin_extent_plus_max_gap): continue # # apply each offset vector # cache_entry_segs = cache_entry.to_segmentlistdict() for offset_vector in cafepacker.offset_vectors: cache_entry_segs.offsets.update(offset_vector) # # test against bin # if cache_entry_segs.intersects_segment(bin.extent): # # object is coicident with # bin # bin.objects.append(cache_entry) break # # replace original bin with split bins. increment idx to # skip over all new bins # cafepacker.bins[idx:idx+1] = newbins idx += len(newbins) # # done # |
newbins[-1].size = origbin.size & split for key in tuple(newbins[-1].size): if not newbins[-1].size[key]: del newbins[-1].size[key] newbins[-1].extent = newbins[-1].size.extent_all() for bin in newbins: bin_extent_plus_max_gap = bin.extent.protract(cafepacker.max_gap) | extent_plus_max_gap = extent.protract(cafepacker.max_gap) | def split_bins(cafepacker, extentlimit, verbose = False): """ Split bins in CafePacker so that each bin has an extent no longer than extentlimit. """ # # loop over all bins in cafepacker.bins # idx = 0 while idx < len(cafepacker.bins): # # retrieve bin # origbin = cafepacker.bins[idx] # # how many pieces? if bin doesn't need splitting move to # next # n = int(math.ceil(float(abs(origbin.extent)) / extentlimit)) if n <= 1: idx += 1 continue # # calculate the times of the splits, and then build # segmentlistdicts for clipping. # splits = [-segments.infinity()] + [lsctables.LIGOTimeGPS(origbin.extent[0] + i * float(origbin.extent[1] - origbin.extent[0]) / n) for i in range(1, n)] + [+segments.infinity()] if verbose: print >>sys.stderr, "\tsplitting cache spanning %s at %s" % (str(origbin.extent), ", ".join(str(split) for split in splits[1:-1])) splits = [segments.segmentlist([segments.segment(*bounds)]) for bounds in zip(splits[:-1], splits[1:])] splits = [segments.segmentlistdict.fromkeys(origbin.size, seglist) for seglist in splits] # # build new bins, populate sizes and extents # newbins = [] for split in splits: newbins.append(LALCacheBin()) newbins[-1].size = origbin.size & split for key in tuple(newbins[-1].size): if not newbins[-1].size[key]: del newbins[-1].size[key] newbins[-1].extent = newbins[-1].size.extent_all() # # pack objects from origbin into new bins # for bin in newbins: bin_extent_plus_max_gap = bin.extent.protract(cafepacker.max_gap) for cache_entry in origbin.objects: # # quick check of gap # if cache_entry.segment.disjoint(bin_extent_plus_max_gap): continue # # apply each offset vector # cache_entry_segs = cache_entry.to_segmentlistdict() for offset_vector in cafepacker.offset_vectors: cache_entry_segs.offsets.update(offset_vector) # # test against bin # if cache_entry_segs.intersects_segment(bin.extent): # # object is coicident with # bin # bin.objects.append(cache_entry) break # # replace original bin with split bins. increment idx to # skip over all new bins # cafepacker.bins[idx:idx+1] = newbins idx += len(newbins) # # done # |
if cache_entry.segment.disjoint(bin_extent_plus_max_gap): | if cache_entry.segment.disjoint(extent_plus_max_gap): | def split_bins(cafepacker, extentlimit, verbose = False): """ Split bins in CafePacker so that each bin has an extent no longer than extentlimit. """ # # loop over all bins in cafepacker.bins # idx = 0 while idx < len(cafepacker.bins): # # retrieve bin # origbin = cafepacker.bins[idx] # # how many pieces? if bin doesn't need splitting move to # next # n = int(math.ceil(float(abs(origbin.extent)) / extentlimit)) if n <= 1: idx += 1 continue # # calculate the times of the splits, and then build # segmentlistdicts for clipping. # splits = [-segments.infinity()] + [lsctables.LIGOTimeGPS(origbin.extent[0] + i * float(origbin.extent[1] - origbin.extent[0]) / n) for i in range(1, n)] + [+segments.infinity()] if verbose: print >>sys.stderr, "\tsplitting cache spanning %s at %s" % (str(origbin.extent), ", ".join(str(split) for split in splits[1:-1])) splits = [segments.segmentlist([segments.segment(*bounds)]) for bounds in zip(splits[:-1], splits[1:])] splits = [segments.segmentlistdict.fromkeys(origbin.size, seglist) for seglist in splits] # # build new bins, populate sizes and extents # newbins = [] for split in splits: newbins.append(LALCacheBin()) newbins[-1].size = origbin.size & split for key in tuple(newbins[-1].size): if not newbins[-1].size[key]: del newbins[-1].size[key] newbins[-1].extent = newbins[-1].size.extent_all() # # pack objects from origbin into new bins # for bin in newbins: bin_extent_plus_max_gap = bin.extent.protract(cafepacker.max_gap) for cache_entry in origbin.objects: # # quick check of gap # if cache_entry.segment.disjoint(bin_extent_plus_max_gap): continue # # apply each offset vector # cache_entry_segs = cache_entry.to_segmentlistdict() for offset_vector in cafepacker.offset_vectors: cache_entry_segs.offsets.update(offset_vector) # # test against bin # if cache_entry_segs.intersects_segment(bin.extent): # # object is coicident with # bin # bin.objects.append(cache_entry) break # # replace original bin with split bins. increment idx to # skip over all new bins # cafepacker.bins[idx:idx+1] = newbins idx += len(newbins) # # done # |
if cache_entry_segs.intersects_segment(bin.extent): | if cache_entry_segs.intersects_segment(extent): | def split_bins(cafepacker, extentlimit, verbose = False): """ Split bins in CafePacker so that each bin has an extent no longer than extentlimit. """ # # loop over all bins in cafepacker.bins # idx = 0 while idx < len(cafepacker.bins): # # retrieve bin # origbin = cafepacker.bins[idx] # # how many pieces? if bin doesn't need splitting move to # next # n = int(math.ceil(float(abs(origbin.extent)) / extentlimit)) if n <= 1: idx += 1 continue # # calculate the times of the splits, and then build # segmentlistdicts for clipping. # splits = [-segments.infinity()] + [lsctables.LIGOTimeGPS(origbin.extent[0] + i * float(origbin.extent[1] - origbin.extent[0]) / n) for i in range(1, n)] + [+segments.infinity()] if verbose: print >>sys.stderr, "\tsplitting cache spanning %s at %s" % (str(origbin.extent), ", ".join(str(split) for split in splits[1:-1])) splits = [segments.segmentlist([segments.segment(*bounds)]) for bounds in zip(splits[:-1], splits[1:])] splits = [segments.segmentlistdict.fromkeys(origbin.size, seglist) for seglist in splits] # # build new bins, populate sizes and extents # newbins = [] for split in splits: newbins.append(LALCacheBin()) newbins[-1].size = origbin.size & split for key in tuple(newbins[-1].size): if not newbins[-1].size[key]: del newbins[-1].size[key] newbins[-1].extent = newbins[-1].size.extent_all() # # pack objects from origbin into new bins # for bin in newbins: bin_extent_plus_max_gap = bin.extent.protract(cafepacker.max_gap) for cache_entry in origbin.objects: # # quick check of gap # if cache_entry.segment.disjoint(bin_extent_plus_max_gap): continue # # apply each offset vector # cache_entry_segs = cache_entry.to_segmentlistdict() for offset_vector in cafepacker.offset_vectors: cache_entry_segs.offsets.update(offset_vector) # # test against bin # if cache_entry_segs.intersects_segment(bin.extent): # # object is coicident with # bin # bin.objects.append(cache_entry) break # # replace original bin with split bins. increment idx to # skip over all new bins # cafepacker.bins[idx:idx+1] = newbins idx += len(newbins) # # done # |
bin.objects.append(cache_entry) | newbins[-1].add(cache_entry) | def split_bins(cafepacker, extentlimit, verbose = False): """ Split bins in CafePacker so that each bin has an extent no longer than extentlimit. """ # # loop over all bins in cafepacker.bins # idx = 0 while idx < len(cafepacker.bins): # # retrieve bin # origbin = cafepacker.bins[idx] # # how many pieces? if bin doesn't need splitting move to # next # n = int(math.ceil(float(abs(origbin.extent)) / extentlimit)) if n <= 1: idx += 1 continue # # calculate the times of the splits, and then build # segmentlistdicts for clipping. # splits = [-segments.infinity()] + [lsctables.LIGOTimeGPS(origbin.extent[0] + i * float(origbin.extent[1] - origbin.extent[0]) / n) for i in range(1, n)] + [+segments.infinity()] if verbose: print >>sys.stderr, "\tsplitting cache spanning %s at %s" % (str(origbin.extent), ", ".join(str(split) for split in splits[1:-1])) splits = [segments.segmentlist([segments.segment(*bounds)]) for bounds in zip(splits[:-1], splits[1:])] splits = [segments.segmentlistdict.fromkeys(origbin.size, seglist) for seglist in splits] # # build new bins, populate sizes and extents # newbins = [] for split in splits: newbins.append(LALCacheBin()) newbins[-1].size = origbin.size & split for key in tuple(newbins[-1].size): if not newbins[-1].size[key]: del newbins[-1].size[key] newbins[-1].extent = newbins[-1].size.extent_all() # # pack objects from origbin into new bins # for bin in newbins: bin_extent_plus_max_gap = bin.extent.protract(cafepacker.max_gap) for cache_entry in origbin.objects: # # quick check of gap # if cache_entry.segment.disjoint(bin_extent_plus_max_gap): continue # # apply each offset vector # cache_entry_segs = cache_entry.to_segmentlistdict() for offset_vector in cafepacker.offset_vectors: cache_entry_segs.offsets.update(offset_vector) # # test against bin # if cache_entry_segs.intersects_segment(bin.extent): # # object is coicident with # bin # bin.objects.append(cache_entry) break # # replace original bin with split bins. increment idx to # skip over all new bins # cafepacker.bins[idx:idx+1] = newbins idx += len(newbins) # # done # |
injection_area=bin_size*i | injection_area=bin_size*(i+1) | def _greedy_bin(greedyHist,greedyPoints,injection_bin_index,bin_size,Nsamples,confidence_levels): """ An interal function representing the common, dimensionally-independent part of the greedy binning algorithms. """ #Now call confidence level C extension function to determine top-ranked pixels (injectionconfidence,toppoints)=_calculate_confidence_levels( greedyHist, greedyPoints, injection_bin_index, bin_size, Nsamples ) #Determine interval/area contained within given confidence intervals nBins=0 confidence_levels.sort() reses={} toppoints=np.array(toppoints) for printcl in confidence_levels: nBins=1 #Start at top of list of ranked pixels... accl=toppoints[0,3] #Loop over next significant pixels and their confidence levels while accl<printcl and nBins<=len(toppoints): nBins=nBins+1 accl=toppoints[nBins-1,3] reses[printcl]=nBins*bin_size #Find area injection_area=None if injection_bin_index and injectionconfidence: i=list(np.nonzero(np.asarray(toppoints)[:,2]==injection_bin_index))[0] injection_area=bin_size*i return toppoints,injectionconfidence,reses,injection_area |
for (ifo,epoch) in ifoEpochList: | except: backgroundPickle=False sys.stderr.write("Error importing the pickle file! %s\n"\ %(pickleLocale)) return for (ifo,epoch) in ifoEpochList: | def createDQbackground(self,ifoEpochList=list(),pickleLocale=None): """ Two inputs a list of tuples (ifo,epochname) for each instrument. Also a place to save the potential pickle to for quick access later. """ if type(ifoEpochList) != type(list()): raise Exception, \ "Invalid input argument ifoEpochList,%s type(%s)"\ %(ifoEpochList,type(ifoEpochList)) #Make sure epoch exists for reach ifo for ifo,epoch in ifoEpochList: if ifo not in runEpochs.keys(): raise Exception, "Bad ifo specified, %s"%ifo if epoch not in runEpochs[ifo].keys(): raise Exception, "Bad ifo epoch specified, %s:%s"%(ifo,epoch) #If pickle location given try to load that pickle first. backgroundPickle=False if pickleLocale!=None: #If pickle file exists read it if not make sure we can #generate it properly otherwise skip creating background if os.path.isfile(pickleLocale): try: self.__backgroundDict__=cPickle.load(file(pickleLocale,'r')) backgroundPickle=True for (ifo,epoch) in ifoEpochList: if (ifo.upper().strip(),epoch.upper().strip()) \ not in self.__backgroundDict__["ifoepoch"]: raise Exception,\ "Invalid ifo and epoch information in \ |
generated background %s"%self.__backgroundDict__["ifoepoch"] except: backgroundPickle=False sys.stderr.write("Error importing the pickle file!\n") if os.access(os.path.split(pickleLocal)[0],os.W_OK): os.path.rename(pickleLocale,pickleLocale+".corrupt") | generated background expected %s got %s"%(\ self.__backgroundDict__["ifoepoch"], ifoEpochList) | def createDQbackground(self,ifoEpochList=list(),pickleLocale=None): """ Two inputs a list of tuples (ifo,epochname) for each instrument. Also a place to save the potential pickle to for quick access later. """ if type(ifoEpochList) != type(list()): raise Exception, \ "Invalid input argument ifoEpochList,%s type(%s)"\ %(ifoEpochList,type(ifoEpochList)) #Make sure epoch exists for reach ifo for ifo,epoch in ifoEpochList: if ifo not in runEpochs.keys(): raise Exception, "Bad ifo specified, %s"%ifo if epoch not in runEpochs[ifo].keys(): raise Exception, "Bad ifo epoch specified, %s:%s"%(ifo,epoch) #If pickle location given try to load that pickle first. backgroundPickle=False if pickleLocale!=None: #If pickle file exists read it if not make sure we can #generate it properly otherwise skip creating background if os.path.isfile(pickleLocale): try: self.__backgroundDict__=cPickle.load(file(pickleLocale,'r')) backgroundPickle=True for (ifo,epoch) in ifoEpochList: if (ifo.upper().strip(),epoch.upper().strip()) \ not in self.__backgroundDict__["ifoepoch"]: raise Exception,\ "Invalid ifo and epoch information in \ |
ifoEpochList=[(x,getRunEpoch(self.triggerTime,x)) for x in uniqIfos] | ifoEpochList=[(x,getRunEpoch(self.triggerTime,x)) for x in self.ifos] | def estimateDQbackground(self): """ This method looks at the self.resultlist inside the instance. Using this and 1000 generated time stamp it tabulates a ranking of flag prevelance, binomial probability 'p' """ if len(self.resultList) < 1: self.__backgroundResults__=list() self.__backgroundTimesDict__=dict() self.__backgroundDict__=dict() self.__haveBackgroundDict__=bool(False) return #Create DQ background, specify pickle locale to try and load first #Determine which IFOs from DQ listing uniqIfos=list() for ifo,b,c,d,e,f in self.resultList: if ifo not in uniqIfos: uniqIfos.append(ifo) ifoEpochList=[(x,getRunEpoch(self.triggerTime,x)) for x in uniqIfos] self.createDQbackground(ifoEpochList,self.__backgroundPickle__) #Calculate the binomial 'p' value for the flags in the table. if self.resultList < 1: sys.stderr.write("Aborting tabulate of binomial P\n") os.abort() seenFlags=dict() for ifo,name,version,comment,start,stop in self.resultList: if ifo.strip() not in seenFlags.keys(): seenFlags[ifo]=list() seenFlags[ifo].append(name) for myIfo,flagList in seenFlags.iteritems(): tmpFlags=list() for backgroundTime,backgroundFlags in \ self.__backgroundDict__[myIfo.strip()].iteritems(): tmpFlags.extend([name for ifo,name,ver,com,start,stop in backgroundFlags]) if myIfo not in self.__backgroundResults__.keys(): self.__backgroundResults__[myIfo]=dict() for myFlag in seenFlags[myIfo]: self.__backgroundResults__[myIfo][myFlag]=tmpFlags.count(myFlag)/float(self.__backgroundPoints__) self.__haveBackgroundDict__=True #Return background estimating |
sys.stderr.write("Aborting tabulate of binomial P\n") | sys.stderr.write("Aborting tabulation of binomial P\n") | def estimateDQbackground(self): """ This method looks at the self.resultlist inside the instance. Using this and 1000 generated time stamp it tabulates a ranking of flag prevelance, binomial probability 'p' """ if len(self.resultList) < 1: self.__backgroundResults__=list() self.__backgroundTimesDict__=dict() self.__backgroundDict__=dict() self.__haveBackgroundDict__=bool(False) return #Create DQ background, specify pickle locale to try and load first #Determine which IFOs from DQ listing uniqIfos=list() for ifo,b,c,d,e,f in self.resultList: if ifo not in uniqIfos: uniqIfos.append(ifo) ifoEpochList=[(x,getRunEpoch(self.triggerTime,x)) for x in uniqIfos] self.createDQbackground(ifoEpochList,self.__backgroundPickle__) #Calculate the binomial 'p' value for the flags in the table. if self.resultList < 1: sys.stderr.write("Aborting tabulate of binomial P\n") os.abort() seenFlags=dict() for ifo,name,version,comment,start,stop in self.resultList: if ifo.strip() not in seenFlags.keys(): seenFlags[ifo]=list() seenFlags[ifo].append(name) for myIfo,flagList in seenFlags.iteritems(): tmpFlags=list() for backgroundTime,backgroundFlags in \ self.__backgroundDict__[myIfo.strip()].iteritems(): tmpFlags.extend([name for ifo,name,ver,com,start,stop in backgroundFlags]) if myIfo not in self.__backgroundResults__.keys(): self.__backgroundResults__[myIfo]=dict() for myFlag in seenFlags[myIfo]: self.__backgroundResults__[myIfo][myFlag]=tmpFlags.count(myFlag)/float(self.__backgroundPoints__) self.__haveBackgroundDict__=True #Return background estimating |
for myIfo,flagList in seenFlags.iteritems(): | for myIfo in seenFlags.keys(): | def estimateDQbackground(self): """ This method looks at the self.resultlist inside the instance. Using this and 1000 generated time stamp it tabulates a ranking of flag prevelance, binomial probability 'p' """ if len(self.resultList) < 1: self.__backgroundResults__=list() self.__backgroundTimesDict__=dict() self.__backgroundDict__=dict() self.__haveBackgroundDict__=bool(False) return #Create DQ background, specify pickle locale to try and load first #Determine which IFOs from DQ listing uniqIfos=list() for ifo,b,c,d,e,f in self.resultList: if ifo not in uniqIfos: uniqIfos.append(ifo) ifoEpochList=[(x,getRunEpoch(self.triggerTime,x)) for x in uniqIfos] self.createDQbackground(ifoEpochList,self.__backgroundPickle__) #Calculate the binomial 'p' value for the flags in the table. if self.resultList < 1: sys.stderr.write("Aborting tabulate of binomial P\n") os.abort() seenFlags=dict() for ifo,name,version,comment,start,stop in self.resultList: if ifo.strip() not in seenFlags.keys(): seenFlags[ifo]=list() seenFlags[ifo].append(name) for myIfo,flagList in seenFlags.iteritems(): tmpFlags=list() for backgroundTime,backgroundFlags in \ self.__backgroundDict__[myIfo.strip()].iteritems(): tmpFlags.extend([name for ifo,name,ver,com,start,stop in backgroundFlags]) if myIfo not in self.__backgroundResults__.keys(): self.__backgroundResults__[myIfo]=dict() for myFlag in seenFlags[myIfo]: self.__backgroundResults__[myIfo][myFlag]=tmpFlags.count(myFlag)/float(self.__backgroundPoints__) self.__haveBackgroundDict__=True #Return background estimating |
for backgroundTime,backgroundFlags in \ self.__backgroundDict__[myIfo.strip()].iteritems(): tmpFlags.extend([name for ifo,name,ver,com,start,stop in backgroundFlags]) if myIfo not in self.__backgroundResults__.keys(): self.__backgroundResults__[myIfo]=dict() for myFlag in seenFlags[myIfo]: self.__backgroundResults__[myIfo][myFlag]=tmpFlags.count(myFlag)/float(self.__backgroundPoints__) | if myIfo.strip() not in self.__backgroundDict__.keys(): if myIfo not in self.__backgroundResults__.keys(): self.__backgroundResults__[myIfo]=dict() for outsideFlag in seenFlags[myIfo]: self.__backgroundResults__[myIfo][outsideFlag]=float(-0.0) else: for backgroundTime,backgroundFlags in \ self.__backgroundDict__[myIfo.strip()].iteritems(): tmpFlags.extend([name for ifo,name,ver,com,start,stop in backgroundFlags]) if myIfo not in self.__backgroundResults__.keys(): self.__backgroundResults__[myIfo]=dict() for myFlag in seenFlags[myIfo]: self.__backgroundResults__[myIfo][myFlag]=tmpFlags.count(myFlag)/float(self.__backgroundPoints__) | def estimateDQbackground(self): """ This method looks at the self.resultlist inside the instance. Using this and 1000 generated time stamp it tabulates a ranking of flag prevelance, binomial probability 'p' """ if len(self.resultList) < 1: self.__backgroundResults__=list() self.__backgroundTimesDict__=dict() self.__backgroundDict__=dict() self.__haveBackgroundDict__=bool(False) return #Create DQ background, specify pickle locale to try and load first #Determine which IFOs from DQ listing uniqIfos=list() for ifo,b,c,d,e,f in self.resultList: if ifo not in uniqIfos: uniqIfos.append(ifo) ifoEpochList=[(x,getRunEpoch(self.triggerTime,x)) for x in uniqIfos] self.createDQbackground(ifoEpochList,self.__backgroundPickle__) #Calculate the binomial 'p' value for the flags in the table. if self.resultList < 1: sys.stderr.write("Aborting tabulate of binomial P\n") os.abort() seenFlags=dict() for ifo,name,version,comment,start,stop in self.resultList: if ifo.strip() not in seenFlags.keys(): seenFlags[ifo]=list() seenFlags[ifo].append(name) for myIfo,flagList in seenFlags.iteritems(): tmpFlags=list() for backgroundTime,backgroundFlags in \ self.__backgroundDict__[myIfo.strip()].iteritems(): tmpFlags.extend([name for ifo,name,ver,com,start,stop in backgroundFlags]) if myIfo not in self.__backgroundResults__.keys(): self.__backgroundResults__[myIfo]=dict() for myFlag in seenFlags[myIfo]: self.__backgroundResults__[myIfo][myFlag]=tmpFlags.count(myFlag)/float(self.__backgroundPoints__) self.__haveBackgroundDict__=True #Return background estimating |
tableString+=rowString%(myColor,ifo,name,version,start,offset1,stop,offset2,size) | tableString+=rowString%(myColor,ifo,name,version,\ start,offset1,stop,offset2,\ size,myBackgroundRank,myCategory) | def generateHTMLTable(self,tableType="BOTH"): """ Return a HTML table already formatted using the module MARKUP to keep the HTML tags complient. This method does nothing but return the result of the last call to self.fetchInformation() The flag names associated with LIGO will have links to the channel wiki in them also. Types that will invoke a not everything behaviour are DQ and VETO """ ligo=["L1","H1","H2","V1"] channelWiki="https://ldas-jobs.ligo.caltech.edu/cgi-bin/chanwiki?%s" if self.triggerTime==int(-1): return "" myColor="grey" tableString="<table bgcolor=grey border=1px>" titleString="<tr>" tableEmptyString="<tr bgcolor=%s>"%myColor rowString="<tr bgcolor=%s> " for col in self.__columns__: titleString+="<th>%s</th>"%col rowString+="<td>%s</td>" tableEmptyString+="<td>None</td>" titleString+="</tr>\n" tableEmptyString+="</tr>\n" rowString+="</tr>\n" tableString+=titleString if len(self.resultList) == 0: tableString+=tableEmptyString for ifo,name,version,comment,start,stop in self.resultList: #If we have background information fetch it if self.__haveBackgroundDict__: myBackgroundRank=str("%3.1f"%(100.0*self.__backgroundResults__[ifo][name])).rjust(5) else: myBackgroundRank="None" if self.__havecategories__: myCategory=self.__category__[ifo][name] else: myCategory="None" offset1=start-self.triggerTime offset2=stop-self.triggerTime size=int(stop-start) if (offset1>=0) and (offset2>=0): myColor="green" if (offset1<=0) and (offset2<=0): myColor="yellow" if (offset1<=0) and (offset2>=0): myColor="red" if name.lower().__contains__('science'): myColor="skyblue" if tableType.upper().strip() == "DQ": if not name.upper().startswith("UPV"): tableString+=rowString%(myColor,ifo,name,version,start,offset1,stop,offset2,size) elif tableType.upper().strip() == "VETO": if name.upper().startswith("UPV"): tableString+=rowString%(myColor,ifo,name,version,start,offset1,stop,offset2,size) elif tableType.upper().strip() not in ["VETO","DQ"]: tableString+=rowString%(myColor,ifo,name,version,\ start,offset1,stop,offset2,size,\ myBackgroundRank,myCategory) tableString+="</table>" return tableString |
tableString+=emptyRowString%myColor | tableString+=emptyRowString | def generateMOINMOINTable(self,tableType="BOTH"): """ Return a MOINMOIN table. """ ligo=["L1","H1","H2","V1"] channelWiki="https://ldas-jobs.ligo.caltech.edu/cgi-bin/chanwiki?%s" if self.triggerTime==int(-1): return "" myColor="grey" tableString="" titleString="" emptyRowString="" rowString="" for i,col in enumerate(self.__columns__): if i == 0: titleString+="""||<rowbgcolor="%s"> %s """%(myColor,col) rowString+="""||<rowbgcolor="%s"> %s """ emptyRowString+="""||<rowbgcolor="%s"> None """%myColor else: titleString+="""|| %s """%col rowString+="""|| %s """ emptyRowString+="""|| None """ titleString+="""||\n""" rowString+="""||\n""" emptyRowString+="""||\n""" tableString+=titleString #Extract only DQ row or only VETO rows tmpResultList=list() for myRow in self.resultList: ifo,name,version,comment,start,stop=myRow #Select base on table type if ((tableType.upper() == "DQ") and \ (not name.strip().upper().startswith("UPV"))): tmpResultList.append(myRow) elif ((tableType.upper() == "VETO") and \ (name.strip().upper().startswith("UPV"))): tmpResultList.append(myRow) elif tableType.upper().strip() not in ["VETO","DQ"]: tmpResultList.append(myRow) if len(tmpResultList) == 0: tableString+=emptyRowString%myColor for ifo,name,version,comment,start,stop in tmpResultList: #If we have background information fetch it if self.__haveBackgroundDict__: myBackgroundRank=str("%3.1f"%(100.0*self.__backgroundResults__[ifo][name])).rjust(5) else: myBackgroundRank="None" if self.__havecategories__: myCategory=self.__category__[ifo][name] else: myCategory="None" offset1=start-self.triggerTime offset2=stop-self.triggerTime size=int(stop-start) if (offset1>=0) and (offset2>=0): myColor="green" if (offset1<=0) and (offset2<=0): myColor="yellow" if (offset1<=0) and (offset2>=0): myColor="red" if name.lower().__contains__('science'): myColor="skyblue" tableString+=rowString%(myColor,str(ifo).strip(),name,version,\ start,offset1,stop,offset2,size,\ myBackgroundRank,myCategory) tableString+="\n" return tableString |
myAngle=arcsin(dY/dX) | myAngle=arctan(dY/dX) | def scatterPointer(A=None,B=None): """ Expects two ordered pairs as tuples (x1,y1) and (x2,y2)... Then this defines an angle. From this angle we orient a triangle point and return this as a tuple of requested size. This can be plotted by the scatter plot to point in a particular orientation. The direction right(0,0)->(1,0) is angle 0 radians, then we rotate counter clockwise from there... What is returned in a three element tuple (numsides,style,angle) which can be put into a plot call via marker=X as a **kwarg """ if A == None or B == None: return (3,0,0) if( A != type(tuple()) and len(A) != 2) \ or \ ( B != type(tuple()) and len(B) != 2): return (3,0,0) # # Calculate orientation of triangle # Ang = arcsin(dY/dX) dY=float(B[-1]-A[-1]) dX=float(B[0]-A[0]) if dY>=0 and dX>=0: myAngle=arcsin(dY/dX) elif dY>=0 and dX<0: myAngle=pi-arcsin(dY/dX) elif dY<0 and dX<0: myAngle=arcsin(dY/dX)+pi elif dY<0 and dX>0: myAngle=(2.0*pi)-arcsin(dY/dX) else: myAngle=0 return (3,0,myAngle) |
myAngle=pi-arcsin(dY/dX) | myAngle=pi-arctan(dY/dX) | def scatterPointer(A=None,B=None): """ Expects two ordered pairs as tuples (x1,y1) and (x2,y2)... Then this defines an angle. From this angle we orient a triangle point and return this as a tuple of requested size. This can be plotted by the scatter plot to point in a particular orientation. The direction right(0,0)->(1,0) is angle 0 radians, then we rotate counter clockwise from there... What is returned in a three element tuple (numsides,style,angle) which can be put into a plot call via marker=X as a **kwarg """ if A == None or B == None: return (3,0,0) if( A != type(tuple()) and len(A) != 2) \ or \ ( B != type(tuple()) and len(B) != 2): return (3,0,0) # # Calculate orientation of triangle # Ang = arcsin(dY/dX) dY=float(B[-1]-A[-1]) dX=float(B[0]-A[0]) if dY>=0 and dX>=0: myAngle=arcsin(dY/dX) elif dY>=0 and dX<0: myAngle=pi-arcsin(dY/dX) elif dY<0 and dX<0: myAngle=arcsin(dY/dX)+pi elif dY<0 and dX>0: myAngle=(2.0*pi)-arcsin(dY/dX) else: myAngle=0 return (3,0,myAngle) |
myAngle=arcsin(dY/dX)+pi | myAngle=arctan(dY/dX)+pi | def scatterPointer(A=None,B=None): """ Expects two ordered pairs as tuples (x1,y1) and (x2,y2)... Then this defines an angle. From this angle we orient a triangle point and return this as a tuple of requested size. This can be plotted by the scatter plot to point in a particular orientation. The direction right(0,0)->(1,0) is angle 0 radians, then we rotate counter clockwise from there... What is returned in a three element tuple (numsides,style,angle) which can be put into a plot call via marker=X as a **kwarg """ if A == None or B == None: return (3,0,0) if( A != type(tuple()) and len(A) != 2) \ or \ ( B != type(tuple()) and len(B) != 2): return (3,0,0) # # Calculate orientation of triangle # Ang = arcsin(dY/dX) dY=float(B[-1]-A[-1]) dX=float(B[0]-A[0]) if dY>=0 and dX>=0: myAngle=arcsin(dY/dX) elif dY>=0 and dX<0: myAngle=pi-arcsin(dY/dX) elif dY<0 and dX<0: myAngle=arcsin(dY/dX)+pi elif dY<0 and dX>0: myAngle=(2.0*pi)-arcsin(dY/dX) else: myAngle=0 return (3,0,myAngle) |
myAngle=(2.0*pi)-arcsin(dY/dX) | myAngle=(2.0*pi)-arctan(dY/dX) | def scatterPointer(A=None,B=None): """ Expects two ordered pairs as tuples (x1,y1) and (x2,y2)... Then this defines an angle. From this angle we orient a triangle point and return this as a tuple of requested size. This can be plotted by the scatter plot to point in a particular orientation. The direction right(0,0)->(1,0) is angle 0 radians, then we rotate counter clockwise from there... What is returned in a three element tuple (numsides,style,angle) which can be put into a plot call via marker=X as a **kwarg """ if A == None or B == None: return (3,0,0) if( A != type(tuple()) and len(A) != 2) \ or \ ( B != type(tuple()) and len(B) != 2): return (3,0,0) # # Calculate orientation of triangle # Ang = arcsin(dY/dX) dY=float(B[-1]-A[-1]) dX=float(B[0]-A[0]) if dY>=0 and dX>=0: myAngle=arcsin(dY/dX) elif dY>=0 and dX<0: myAngle=pi-arcsin(dY/dX) elif dY<0 and dX<0: myAngle=arcsin(dY/dX)+pi elif dY<0 and dX>0: myAngle=(2.0*pi)-arcsin(dY/dX) else: myAngle=0 return (3,0,myAngle) |
type="string",default=None,\ | type="string",default="dummy",\ | def convert2DHistTo3ColVectors(myMatrix,xBins=None,yBins=None): """ Create three vectors X,Y,Z for ease of use in functs like contour, and scatter instead of imshow! If [x,y]Bins==None use bin index for X, Y Input matrix assumed square!! """ if xBins==None: xBins=range(len(myMatrix)) if yBins==None: yBins=range(len(myMatrix[0])) if len(xBins)*len(yBins) != myMatrix.size: raise Exception, "Input matrix and bin vectors disagree!" tX=empty(myMatrix.size,type(xBins[0])) tY=empty(myMatrix.size,type(yBins[0])) tZ=empty(myMatrix.size,type(myMatrix[0][0])) myIndex=0 for ii,iVal in enumerate(xBins): for jj,jVal in enumerate(yBins): tX[myIndex]=iVal tY[myIndex]=jVal tZ[myIndex]=myMatrix[ii][jj] myIndex=myIndex+1 return tX,tY,tZ |
origData=dict() origData["beam"]=beamSpigot.getDataStream(beamName,gpsStart,gpsEnd) | origData=dict() | def convert2DHistTo3ColVectors(myMatrix,xBins=None,yBins=None): """ Create three vectors X,Y,Z for ease of use in functs like contour, and scatter instead of imshow! If [x,y]Bins==None use bin index for X, Y Input matrix assumed square!! """ if xBins==None: xBins=range(len(myMatrix)) if yBins==None: yBins=range(len(myMatrix[0])) if len(xBins)*len(yBins) != myMatrix.size: raise Exception, "Input matrix and bin vectors disagree!" tX=empty(myMatrix.size,type(xBins[0])) tY=empty(myMatrix.size,type(yBins[0])) tZ=empty(myMatrix.size,type(myMatrix[0][0])) myIndex=0 for ii,iVal in enumerate(xBins): for jj,jVal in enumerate(yBins): tX[myIndex]=iVal tY[myIndex]=jVal tZ[myIndex]=myMatrix[ii][jj] myIndex=myIndex+1 return tX,tY,tZ |
tmpData=beamSpigot.getDataStream(myLabel[myKey],gpsA,gpsB) mySnipData[myKey]=interp(mySnipData["time"], getTimeStamps(tmpData), tmpData) | if beamName == "dummy" and myKey == "beam": mySnipData[myKey]=ones(size(mySnipData["time"])) else: tmpData=beamSpigot.getDataStream(myLabel[myKey],gpsA,gpsB) mySnipData[myKey]=interp(mySnipData["time"], getTimeStamps(tmpData), tmpData) | def convert2DHistTo3ColVectors(myMatrix,xBins=None,yBins=None): """ Create three vectors X,Y,Z for ease of use in functs like contour, and scatter instead of imshow! If [x,y]Bins==None use bin index for X, Y Input matrix assumed square!! """ if xBins==None: xBins=range(len(myMatrix)) if yBins==None: yBins=range(len(myMatrix[0])) if len(xBins)*len(yBins) != myMatrix.size: raise Exception, "Input matrix and bin vectors disagree!" tX=empty(myMatrix.size,type(xBins[0])) tY=empty(myMatrix.size,type(yBins[0])) tZ=empty(myMatrix.size,type(myMatrix[0][0])) myIndex=0 for ii,iVal in enumerate(xBins): for jj,jVal in enumerate(yBins): tX[myIndex]=iVal tY[myIndex]=jVal tZ[myIndex]=myMatrix[ii][jj] myIndex=myIndex+1 return tX,tY,tZ |
size=50,color='white') | size=starSize,color='white') | def convert2DHistTo3ColVectors(myMatrix,xBins=None,yBins=None): """ Create three vectors X,Y,Z for ease of use in functs like contour, and scatter instead of imshow! If [x,y]Bins==None use bin index for X, Y Input matrix assumed square!! """ if xBins==None: xBins=range(len(myMatrix)) if yBins==None: yBins=range(len(myMatrix[0])) if len(xBins)*len(yBins) != myMatrix.size: raise Exception, "Input matrix and bin vectors disagree!" tX=empty(myMatrix.size,type(xBins[0])) tY=empty(myMatrix.size,type(yBins[0])) tZ=empty(myMatrix.size,type(myMatrix[0][0])) myIndex=0 for ii,iVal in enumerate(xBins): for jj,jVal in enumerate(yBins): tX[myIndex]=iVal tY[myIndex]=jVal tZ[myIndex]=myMatrix[ii][jj] myIndex=myIndex+1 return tX,tY,tZ |
facecolor=None,\ | def convert2DHistTo3ColVectors(myMatrix,xBins=None,yBins=None): """ Create three vectors X,Y,Z for ease of use in functs like contour, and scatter instead of imshow! If [x,y]Bins==None use bin index for X, Y Input matrix assumed square!! """ if xBins==None: xBins=range(len(myMatrix)) if yBins==None: yBins=range(len(myMatrix[0])) if len(xBins)*len(yBins) != myMatrix.size: raise Exception, "Input matrix and bin vectors disagree!" tX=empty(myMatrix.size,type(xBins[0])) tY=empty(myMatrix.size,type(yBins[0])) tZ=empty(myMatrix.size,type(myMatrix[0][0])) myIndex=0 for ii,iVal in enumerate(xBins): for jj,jVal in enumerate(yBins): tX[myIndex]=iVal tY[myIndex]=jVal tZ[myIndex]=myMatrix[ii][jj] myIndex=myIndex+1 return tX,tY,tZ |
|
for offset_vector in self.offset_vectors: | for offset_vector in cafepacker.offset_vectors: | def split_bins(cafepacker, extentlimit): """ Split bins of stored in CafePacker until each bin has an extent no longer than extentlimit. """ # # loop overall the bins in cafepacker.bins. we pop items out of # cafepacker.bins and append new ones to the end so need a while loop # checking the extent of each bin in cafepacker.bins until all bins are # done being split # idx = 0 while idx < len(cafepacker.bins): if abs(cafepacker.bins[idx].extent) <= extentlimit: # # bin doesn't need splitting so move to next # idx += 1 continue # # split this bin so pop it out of the list # bigbin = cafepacker.bins.pop(idx) # # calculate the central time of the union of all the input # files in the bin # splittime = lsctables.LIGOTimeGPS(bigbin.extent[0] + (bigbin.extent[1] - bigbin.extent[0])/2) # # split the segmentlistdict at this time # splitseglistdict = segments.segmentlistdict() for key in bigbin.size.keys(): splitseglistdict[key] = segments.segmentlist([segments.segment(-segments.infinity(),splittime)]) # # create bins for the first and second halves # bin1 = LALCacheBin() bin1.size = bigbin.size & splitseglistdict bin1.extent = bigbin.extent & splitseglistdict.values()[0][0] bin2 = LALCacheBin() bin2.size = bigbin.size & ~splitseglistdict bin2.extent = bigbin.extent & (~splitseglistdict.values()[0])[0] # # remove unused keys from the smaller bins' segmentlistdicts # newsize = segments.segmentlistdict() for key in bin1.size.keys(): if len(bin1.size[key]): newsize[key] = bin1.size[key] bin1.size = newsize newsize = segments.segmentlistdict() for key in bin2.size.keys(): if len(bin2.size[key]): newsize[key] = bin2.size[key] bin2.size = newsize # # find which of the objects in bigbin.objects intersect the two # smaller bins # for cache in bigbin.objects: thisseglistdict = cache.to_segmentlistdict() coinc1 = 0 coinc2 = 0 for offset_vector in self.offset_vectors: # # loop over offset vectors updating the smaller # bins and the object we are checking # bin1.size.offsets.update(offset_vector) bin2.size.offsets.update(offset_vector) thisseglistdict.offsets.update(offset_vector) if not coinc1 and bin1.size.is_coincident(thisseglistdict, keys = offset_vector.keys()): # # object is coicident with bin1 # coinc1 = 1 bin1.objects.append(cache) if not coinc2 and bin2.size.is_coincident(thisseglistdict, keys = offset_vector.keys()): # # object is coincident with bin2 # coinc2 = 1 bin2.objects.append(cache) # # end loop if known to be coincident with both # bins # if coinc1 and coinc2: break # # clear offsets applied to object # thisseglistdict.offsets.clear() # # clear offsets applied to bins # bin1.size.offsets.clear() bin2.size.offsets.clear() # # append smaller bins to list of bins # cafepacker.bins.append(bin1) cafepacker.bins.append(bin2) # # do not increment idx as we popped the large bin out of # cafepacker.bins # # # sort the bins in cafepacker # cafepacker.bins.sort() return cafepacker |
num_days = int(round((duration)/86400)) | num_days = int(round((int(duration))/86400)) | def daily_ihope_cache(start,end,ifo,cluster=None): """ Generates cache list of daily ihope INSPIRAL xml files for give ifo and clustering (None,'30ms','100ms', or '16s') between start and end time """ #== daily path ihope_daily_path = '/archive/home/cbc/ihope_daily' #== set clustering tag if cluster==None or cluster.upper()=='UNCLUSTERED': cluster_tag='UNCLUSTERED' elif cluster.upper()=='100MS': cluster_tag='100MILLISEC_CLUSTERED' elif cluster.upper()=='30MS': cluster_tag='30MILLISEC_CLUSTERED' elif cluster.upper()=='16S': cluster_tag='16SEC_CLUSTERED' #== work out days day_start = int(GetCommandOutput('tconvert `tconvert '+str(start)+\ ' -f %D`')[0]) duration = end-start num_days = int(round((duration)/86400)) #== generate array of days day_end = day_start+num_days*86400 while day_end<end: day_end+=86400 days = numpy.arange(day_start,day_end,86400) cache=[] #== loop over days gathering files for day in days: date = GetCommandOutput('tconvert '+str(day)+' -f %Y%m%d')[0]\ .replace('\n','') day_path = os.path.join(ihope_daily_path,date[0:6],date) ls_cmd = 'ls '+day_path+'/'+ifo+'-INSPIRAL_'+cluster_tag+\ '*.xml.gz' cache_out = Popen(ls_cmd,shell=True,stdout=PIPE,stderr=PIPE) for line in cache_out.stdout.readlines(): trig_start = int(line.split('.xml')[0].split('-')[-2]) duration = int(line.split('.xml')[0].split('-')[-2]) if start<=trig_start<end or start<(trig_start+duration)<=end: cache.append(line.replace('\n','')) cache_out.stdout.close() return cache |
cellString=cellString+" %s "%self.linkedRemoteImage(thumbs[ifo][myOmegaIndex], | cellString=cellString+" %s "%self.linkedRemoteImage(thumbs[ifo][myOmegaIndexT], | def insertAnalyzeQscanTable(self, images=None, thumbs=None, indexes=None, imagesAQ=None, thumbsAQ=None, indexesAQ=None, channelRanks=None): """ Insert a multiple IFO table with 5 cols with the AQ underneath this depends on the numer of IFO keys in indexes dictionary. The option channelRanks is not required to change the plot order! Channel ranks is dict similar in shape to other args. Cells are shaded light grey if they are top N channels and that the trigger is greater in value that 0.5. Assuming the channelRanks dict is not empty. """ #channelRanks={'ifo':[[chan,Zvalue,rank]...[chan,Zvalue,rank]],'ifo2':[[ ]]} #Review the keys for Qscans and analyzeQscans. if not images.keys()==thumbs.keys()==indexes.keys(): sys.stderr.write("Error: Keys for Qscan tables creations inconsistent!\n") if not imagesAQ.keys()==thumbsAQ.keys()==indexesAQ.keys(): sys.stderr.write("Error: Keys for Qscan tables creations inconsistent!\n") |
cellString=cellString+" %s "%self.linkedRemoteImage(thumbsAQ[ifo][myAQIndex], | cellString=cellString+" %s "%self.linkedRemoteImage(thumbsAQ[ifo][myAQIndexT], | def insertAnalyzeQscanTable(self, images=None, thumbs=None, indexes=None, imagesAQ=None, thumbsAQ=None, indexesAQ=None, channelRanks=None): """ Insert a multiple IFO table with 5 cols with the AQ underneath this depends on the numer of IFO keys in indexes dictionary. The option channelRanks is not required to change the plot order! Channel ranks is dict similar in shape to other args. Cells are shaded light grey if they are top N channels and that the trigger is greater in value that 0.5. Assuming the channelRanks dict is not empty. """ #channelRanks={'ifo':[[chan,Zvalue,rank]...[chan,Zvalue,rank]],'ifo2':[[ ]]} #Review the keys for Qscans and analyzeQscans. if not images.keys()==thumbs.keys()==indexes.keys(): sys.stderr.write("Error: Keys for Qscan tables creations inconsistent!\n") if not imagesAQ.keys()==thumbsAQ.keys()==indexesAQ.keys(): sys.stderr.write("Error: Keys for Qscan tables creations inconsistent!\n") |
self.FAR = 99 | self.FAR = -1 | def __init__(self): """ here are all the things we need """ #start with data needed for every coinc self.ifo_list = [] self.ifo_coincs = [] self.snr = {} self.gps = {} self.eff_distances = {} self.mass1 = {} self.mass2 = {} self.time = None self.FAR = 99 #this stuff is only needed for injections self.is_injection = False self.latitude_inj = None self.longitude_inj = None self.mass1_inj = None self.mass2_inj = None self.distance_inj = None self.eff_distances_inj = {} |
def set_FAR(self,FAR_per_day): self.FAR=FAR_per_day | def set_inj_params(self,lat,lon,m1,m2,dist,effDs): """ set all of the injection parameters at once """ self.latitude_inj = lat self.longitude_inj = lon self.mass1_inj = m1 self.mass2_inj = m2 self.distance_inj = dist self.eff_distances_inj = effDs |
|
if ctab[0].false_alarm_rate is not None: coinc.set_FAR(ctab[0].false_alarm_rate) | def get_coincs_from_coinctable(self,files): """ read data from coinc tables (xml format) FIXME: currently assumes one coinc per file!!! """ for file in files: coinc = CoincData() xmldoc = utils.load_filename(file) sngltab = tab.get_table(xmldoc,lsctables.SnglInspiralTable.tableName) coinc.set_snr(dict((row.ifo, row.snr) for row in sngltab)) coinc.set_gps(dict((row.ifo, LIGOTimeGPS(row.get_end())) for row in sngltab)) coinc.set_effDs(dict((row.ifo,row.eff_distance) for row in sngltab)) coinc.set_masses(dict((row.ifo, row.mass1) for row in sngltab), \ dict((row.ifo, row.mass2) for row in sngltab)) ctab = tab.get_table(xmldoc,lsctables.CoincInspiralTable.tableName) coinc.set_ifos(list(ctab[0].get_ifos())) if ctab[0].false_alarm_rate is not None: coinc.set_FAR(ctab[0].false_alarm_rate) |
|
while os.path.isfile(base_name): base_name = base_name + '_' + str(counter) + ext | while os.path.isfile(name): name = base_name + '_' + str(counter) + ext | def get_unique_filename(name): """ use this to avoid name collisions """ counter = 1 base_name, ext = os.path.splitext(name) while os.path.isfile(base_name): base_name = base_name + '_' + str(counter) + ext counter += 1 return base_name + ext |
return base_name + ext | return name | def get_unique_filename(name): """ use this to avoid name collisions """ counter = 1 base_name, ext = os.path.splitext(name) while os.path.isfile(base_name): base_name = base_name + '_' + str(counter) + ext counter += 1 return base_name + ext |
def append_process(xmldoc, comment = None, force = None, ds_sq_threshold = None, save_small_coincs = None, vetoes_name = None, verbose = None): | def append_process(xmldoc, comment = None, force = None, ds_sq_threshold = None, save_small_coincs = None, vetoes_name = None, coinc_end_time_segment = None, verbose = None): | def append_process(xmldoc, comment = None, force = None, ds_sq_threshold = None, save_small_coincs = None, vetoes_name = None, verbose = None): process = llwapp.append_process(xmldoc, program = process_program_name, version = __version__, cvs_repository = u"lscsoft", cvs_entry_time = __date__, comment = comment) params = [ (u"--ds-sq-threshold", u"real_8", ds_sq_threshold) ] if comment is not None: params += [(u"--comment", u"lstring", comment)] if force is not None: params += [(u"--force", None, None)] if save_small_coincs is not None: params += [(u"--save-small-coincs", None, None)] if vetoes_name is not None: params += [(u"--vetoes-name", u"lstring", vetoes_name)] if verbose is not None: params += [(u"--verbose", None, None)] ligolw_process.append_process_params(xmldoc, process, params) return process |
def append_coinc(self, process_id, time_slide_id, coinc_def_id, events): | def append_coinc(self, process_id, node, coinc_def_id, events): | def append_coinc(self, process_id, time_slide_id, coinc_def_id, events): # # populate the coinc_event and coinc_event_map tables # |
time_slide_id = node.time_slide_id | def append_coinc(self, process_id, time_slide_id, coinc_def_id, events): # # populate the coinc_event and coinc_event_map tables # |
|
tstart = events[0].get_start() + self.time_slide_index[time_slide_id][events[0].ifo] coinc_ringdown.set_start(tstart + sum(event.snr * float(event.get_start() + self.time_slide_index[time_slide_id][event.ifo] - tstart) for event in events) / sum(event.snr for event in events)) | tstart = coinc_ringdown_start(events, node.offset_vector) coinc_ringdown.set_start(tstart) | def append_coinc(self, process_id, time_slide_id, coinc_def_id, events): # # populate the coinc_event and coinc_event_map tables # |
tstart = coinc_ringdown.get_start() | def append_coinc(self, process_id, time_slide_id, coinc_def_id, events): # # populate the coinc_event and coinc_event_map tables # |
|
coinc_tables.append_coinc(process_id, node.time_slide_id, coinc_def_id, ntuple) | coinc_tables.append_coinc(process_id, node, coinc_def_id, ntuple) | def ligolw_rinca( xmldoc, process_id, EventListType, CoincTables, coinc_definer_row, event_comparefunc, thresholds, ntuple_comparefunc = lambda events, offset_vector: False, small_coincs = False, veto_segments = None, verbose = False |
def __init__(self, config=None): | def __init__(self, configfile=None): | def __init__(self, config=None): cp = ConfigParser.ConfigParser() self.cp = cp self.time_now = "_".join([str(i) for i in time_method.gmtime()[0:6]]) self.ini_file=self.time_now + ".ini" home_base = home_dirs() # CONDOR SECTION NEEDED BY THINGS IN INSPIRAL.PY cp.add_section("condor") cp.set("condor","datafind",self.which("ligo_data_find")) cp.set("condor","inspiral",self.which("lalapps_inspiral")) cp.set("condor","chia", self.which("lalapps_coherent_inspiral")) cp.set("condor","universe","standard") # SECTIONS TO SHUT UP WARNINGS cp.add_section("inspiral") cp.add_section("data") # DATAFIND SECTION cp.add_section("datafind") |
if config: | if configfile: | def __init__(self, config=None): cp = ConfigParser.ConfigParser() self.cp = cp self.time_now = "_".join([str(i) for i in time_method.gmtime()[0:6]]) self.ini_file=self.time_now + ".ini" home_base = home_dirs() # CONDOR SECTION NEEDED BY THINGS IN INSPIRAL.PY cp.add_section("condor") cp.set("condor","datafind",self.which("ligo_data_find")) cp.set("condor","inspiral",self.which("lalapps_inspiral")) cp.set("condor","chia", self.which("lalapps_coherent_inspiral")) cp.set("condor","universe","standard") # SECTIONS TO SHUT UP WARNINGS cp.add_section("inspiral") cp.add_section("data") # DATAFIND SECTION cp.add_section("datafind") |
user_cp.read(config) | user_cp.read(configfile) | def __init__(self, config=None): cp = ConfigParser.ConfigParser() self.cp = cp self.time_now = "_".join([str(i) for i in time_method.gmtime()[0:6]]) self.ini_file=self.time_now + ".ini" home_base = home_dirs() # CONDOR SECTION NEEDED BY THINGS IN INSPIRAL.PY cp.add_section("condor") cp.set("condor","datafind",self.which("ligo_data_find")) cp.set("condor","inspiral",self.which("lalapps_inspiral")) cp.set("condor","chia", self.which("lalapps_coherent_inspiral")) cp.set("condor","universe","standard") # SECTIONS TO SHUT UP WARNINGS cp.add_section("inspiral") cp.add_section("data") # DATAFIND SECTION cp.add_section("datafind") |
if user_cp: self.overwrite_config(user_cp) | if user_cp: self.overwrite_config(user_cp,cp) | def __init__(self, config=None): cp = ConfigParser.ConfigParser() self.cp = cp self.time_now = "_".join([str(i) for i in time_method.gmtime()[0:6]]) self.ini_file=self.time_now + ".ini" home_base = home_dirs() # CONDOR SECTION NEEDED BY THINGS IN INSPIRAL.PY cp.add_section("condor") cp.set("condor","datafind",self.which("ligo_data_find")) cp.set("condor","inspiral",self.which("lalapps_inspiral")) cp.set("condor","chia", self.which("lalapps_coherent_inspiral")) cp.set("condor","universe","standard") # SECTIONS TO SHUT UP WARNINGS cp.add_section("inspiral") cp.add_section("data") # DATAFIND SECTION cp.add_section("datafind") |
def overwrite_config(self,config): | def overwrite_config(self,config,cp): | def overwrite_config(self,config): for section in config.sections(): if not cp.has_section(section): cp.add_section(section) for option in config.options(section): cp.set(section,option,config.get(section,option)) |
if dY>=0 and dX>=0: | if dY>=0 and dX>0: | def scatterPointer(A=None,B=None): """ Expects two ordered pairs as tuples (x1,y1) and (x2,y2)... Then this defines an angle. From this angle we orient a triangle point and return this as a tuple of requested size. This can be plotted by the scatter plot to point in a particular orientation. The direction right(0,0)->(1,0) is angle 0 radians, then we rotate counter clockwise from there... What is returned in a three element tuple (numsides,style,angle) which can be put into a plot call via marker=X as a **kwarg """ if A == None or B == None: return (3,0,0) if( A != type(tuple()) and len(A) != 2) \ or \ ( B != type(tuple()) and len(B) != 2): return (3,0,0) # # Calculate orientation of triangle # Ang = arcsin(dY/dX) dY=float(B[-1]-A[-1]) dX=float(B[0]-A[0]) if dY>=0 and dX>=0: myAngle=arctan(dY/dX) elif dY>=0 and dX<0: myAngle=pi-arctan(dY/dX) elif dY<0 and dX<0: myAngle=arctan(dY/dX)+pi elif dY<0 and dX>0: myAngle=(2.0*pi)-arctan(dY/dX) else: myAngle=0 return (3,0,myAngle) |
elif dY>=0 and dX<0: | elif dY>0 and dX<0: | def scatterPointer(A=None,B=None): """ Expects two ordered pairs as tuples (x1,y1) and (x2,y2)... Then this defines an angle. From this angle we orient a triangle point and return this as a tuple of requested size. This can be plotted by the scatter plot to point in a particular orientation. The direction right(0,0)->(1,0) is angle 0 radians, then we rotate counter clockwise from there... What is returned in a three element tuple (numsides,style,angle) which can be put into a plot call via marker=X as a **kwarg """ if A == None or B == None: return (3,0,0) if( A != type(tuple()) and len(A) != 2) \ or \ ( B != type(tuple()) and len(B) != 2): return (3,0,0) # # Calculate orientation of triangle # Ang = arcsin(dY/dX) dY=float(B[-1]-A[-1]) dX=float(B[0]-A[0]) if dY>=0 and dX>=0: myAngle=arctan(dY/dX) elif dY>=0 and dX<0: myAngle=pi-arctan(dY/dX) elif dY<0 and dX<0: myAngle=arctan(dY/dX)+pi elif dY<0 and dX>0: myAngle=(2.0*pi)-arctan(dY/dX) else: myAngle=0 return (3,0,myAngle) |
qscanList = stfu_pipe.getParamsFromCache(eval("opts.qscan_cache_" + type_string),type) | qscanList = getParamsFromCache(eval("opts.qscan_cache_" + type_string),type) | def getQscanTable(opts,type): summary = readSummaryFiles() if "FG" in type: type_string = "foreground" elif "BG" in type: type_string = "background" if eval("opts.qscan_cache_" + type_string): qscanList = stfu_pipe.getParamsFromCache(eval("opts.qscan_cache_" + type_string),type) else: try: inputPath = eval("opts." + type_string + "_input_path") qscanList = parseDirectoryList(inputPath) except: print >> sys.stderr, "cannot get input path for " + type_string print >> sys.stderr, "specify at least one of the following options:" print >> sys.stderr, "--qscan-cache, --" + type_string + "-input-path" sys.exit(1) table = summary.parseQscanList(qscanList) # perform a sanity check if not (len(table['channel_name']) == len(table['qscan_dir'])): print >> sys.stderr, "the length of channel_name does not match the length of qscan_dir in the " + type_string + " table" print >> sys.stderr, "check for data corruption in the qscan summary files in the " + type_string + " table" sys.exit(1) return table |
candidates_path = stfu_pipe.getParamsFromCache(opts.qscan_cache_foreground,type,ifo,time_string) | candidates_path = getParamsFromCache(opts.qscan_cache_foreground,type,ifo,time_string) | def plotHistogram(chan,opts,distribution,histoList,binList,percentiles=None,candidate=None,candidateRank=None): parameter = distribution.split('-')[0] step = binList[1] - binList[0] counter = sum(histoList) xlimInf = min(binList) if candidate and not parameter == 'dt': xlimSup = max(max(binList),candidate + 2.0) else: xlimSup = max(binList) pylab.figure() # semilogy(bins + step/2., z_dist+0.0001, 'r^',markerfacecolor="b",markersize=12) # plot(bins + step/2., z_dist) pylab.bar(binList[0:len(binList)-1], histoList, width=step, bottom=0) if percentiles: line1 = pylab.axvline(x=percentiles[0], ymin=0, ymax=max(histoList), color='g', label='50th percentile', linewidth=2, linestyle='--') line2 = pylab.axvline(x=percentiles[1], ymin=0, ymax=max(histoList), color='m', label='95th percentile', linewidth=2, linestyle='--') line3 = pylab.axvline(x=percentiles[2], ymin=0, ymax=max(histoList), color='r', label='99th percentile', linewidth=2, linestyle='--') if parameter == 'dt': pylab.axvline(x=-percentiles[0], ymin=0, ymax=max(histoList), color='g', label='50th percentile', linewidth=2, linestyle='--') pylab.axvline(x=-percentiles[1], ymin=0, ymax=max(histoList), color='m', label='95th percentile', linewidth=2, linestyle='--') pylab.axvline(x=-percentiles[2], ymin=0, ymax=max(histoList), color='r', label='99th percentile', linewidth=2, linestyle='--') if candidate: line0 = pylab.axvline(x=candidate, ymin=0, ymax=max(histoList), color='k', label='candidate value (%s percentile)' % (candidateRank), linewidth=2, linestyle='-') if percentiles and candidate: pylab.legend((line0,line1,line2,line3),('candidate','50%','95%','99%'),loc = 'upper right') if percentiles and not candidate: pylab.legend((line1,line2,line3),('50%','95%','99%'),loc = 'upper right') pylab.xlim(xlimInf,xlimSup) pylab.xlabel(parameter + ' value',size='large') # ylabel(r'#',size='x-large') pylab.grid() pylab.title("Histogram of the " + parameter + " value for " + chan + ', Statistics = ' + str(counter)) figText = chan.split(':')[0] + '_' + chan.split(':')[1] + '_' + parameter + '_dist' figFileName = InspiralUtils.set_figure_name(opts,figText) InspiralUtils.savefig_pylal(figFileName) pylab.close() return figFileName |
(home_dirs()+"/romain/followupbackgrounds/omega/VSR1b/background/background_937800015_947260815.cache",935798415,999999999,"V1") | (home_dirs()+"/romain/followupbackgrounds/omega/VSR2b/background/background_937800015_947260815.cache",935798415,999999999,"V1") | def figure_out_cache(time,ifo): cacheList=( (home_dirs()+"/romain/followupbackgrounds/omega/S5/background/background_815155213_875232014.cache",815155213,875232014,"H1H2L1"), (home_dirs()+"/romain/followupbackgrounds/omega/S6a/background/background_931035296_935798415.cache",931035296,935798415,"H1L1"), (home_dirs()+"/romain/followupbackgrounds/omega/S6b/background/background_937800015_944587815.cache",935798415,999999999,"H1L1"), (home_dirs()+"/romain/followupbackgrounds/omega/VSR1b/background/background_937800015_947260815.cache",935798415,999999999,"V1") ) foundCache = "" for cacheFile,start,stop,ifos in cacheList: if ((start<=time) and (time<stop) and ifo in ifos): foundCache = cacheFile break if 'phy.syr.edu' in get_hostname(): foundCache = foundCache.replace("romain","rgouaty") if foundCache == "": print ifo, time, " not found in method stfu_pipe.figure_out_cache" else: if not os.path.isfile(foundCache): print "file " + foundCache + " not found" foundCache = "" return foundCache |
if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): | if not "PEM" in myFile.upper() and not "SEI" in myFile.upper(): | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 sys.stdout.write("Available via browser for wiki upload at %s\n"\ %(file2URL.convert(wikiFilename))) # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") wikiPage.putText("UTC Time of trigger :%s"%(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") # imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() filesOmega=wikiFileFinder.get_RDS_R_L1_SEIS() filesAnalyze=wikiFileFinder.get_analyzeQscan_SEIS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() indexDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if "SEI" in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Search for analyzeQscan files timeString=str(float(sngl.time)).replace(".","_") indexDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) #Load of analyzeQscan z file if available zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if "SEI" in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only PEM channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if "PEM" in chan[0] and not "SEI" in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if "PEM" in chan[0] and not "SEI" in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only AUX channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls: indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if not "PEM" in chan[0] or not "SEI" in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if not "PEM" in chan[0] or not "SEI" in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
if not "PEM" in chan[0] or not "SEI" in chan[0]: | if not "PEM" in chan[0] and not "SEI" in chan[0]: | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 sys.stdout.write("Available via browser for wiki upload at %s\n"\ %(file2URL.convert(wikiFilename))) # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") wikiPage.putText("UTC Time of trigger :%s"%(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") # imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() filesOmega=wikiFileFinder.get_RDS_R_L1_SEIS() filesAnalyze=wikiFileFinder.get_analyzeQscan_SEIS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() indexDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*SEI*_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if "SEI" in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Search for analyzeQscan files timeString=str(float(sngl.time)).replace(".","_") indexDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString)) thumbDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_SEI*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString)) #Load of analyzeQscan z file if available zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if "SEI" in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only PEM channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if "PEM" in chan[0] and not "SEI" in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if "PEM" in myFile.upper() and not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if "PEM" in chan[0] and not "SEI" in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only AUX channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls: indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDict[sngl.ifo].append(myFile) zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_RDS_*/%s/*summary.txt"%(sngl.ifo,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if not "PEM" in chan[0] or not "SEI" in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not "PEM" in myFile.upper() or not "SEI" in myFile.upper(): thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if not "PEM" in chan[0] or not "SEI" in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
if param == 'channel-name': self.inputIfo = value[0:2] | def __init__(self, dag, job, cp, opts, sngl, frame_cache, chia, tag, p_nodes=[]): |
|
y.segment_def_cdb = x.creator_db AND \ NOT (y.start_time > %s OR %s > y.end_time) ) \ | y.segment_def_cdb = x.creator_db) AND \ NOT (segment.start_time > %s OR %s > segment.end_time) \ ORDER BY segment.start_time,segment_definer.segment_def_id,segment_definer.version \ | def getSciSegs(ifo=None, gpsStart=None, gpsStop=None, cut=bool(False), serverURL=None, segName="DMT-SCIENCE", seglenmin=None, segpading=0 |
def figure_out_site(ifo): siteDico = {"H1":"LHO","H2":"LHO","L1":"LLO","V1":"Virgo"} if ifo in siteDico: return siteDico[ifo] else: print >> sys.stderr, "ifo " + ifo + "is not defined in siteDico dictionary" sys.exit(1) | def figure_out_site(ifo): siteDico = {"H1":"LHO","H2":"LHO","L1":"LLO","V1":"Virgo"} if ifo in siteDico: return siteDico[ifo] else: print >> sys.stderr, "ifo " + ifo + "is not defined in siteDico dictionary" sys.exit(1) |
|
preString = "omega/" + science_run(time).upper() + "/background/" + figure_out_site(ifo) | preString = "omega/" + science_run(time).upper() + "/background" | def __init__(self, dag, job, cp, opts, time, ifo, frame_cache, p_nodes=[], type="ht", variety="fg"): """ """ pipeline.CondorDAGNode.__init__(self,job) |
preString = "omega/" + science_run(time).upper() + "/background/" + figure_out_site(ifo) | preString = "omega/" + science_run(time).upper() + "/background" | def __init__(self, dag, job, cp, opts, time, ifo, p_nodes=[], type="ht", variety="fg"): |
self.output_cache = lal.CacheEntry(ifo, job.name.replace("remoteScan_"+job.tag_base+".sh","wpipeline").upper(), segments.segment(float(time), float(time)), "file://localhost/"+self.output_path+"/"+str(time)) | self.output_cache = lal.CacheEntry(ifo, job.name.replace("remoteScan_"+job.dir+"_"+job.tag_base+".sh","wpipeline").upper(), segments.segment(float(time), float(time)), "file://localhost/"+self.output_path+"/"+str(time)) | def __init__(self, dag, job, cp, opts, time, ifo, p_nodes=[], type="ht", variety="fg"): |
segment_definer AS x WHERE x.name = segment_definer.name )\ | segment_definer AS x, segment AS y \ WHERE x.name = segment_definer.name AND \ x.segment_def_id = y.segment_def_id AND \ y.segment_def_cdb = x.creator_db AND \ NOT (y.start_time > %s OR %s > y.end_time) ) \ | def getSciSegs(ifo=None, gpsStart=None, gpsStop=None, cut=bool(False), serverURL=None, segName="DMT-SCIENCE", seglenmin=None, segpading=0 |
sqlQuery=query01%(segName,ifo,gpsStop,gpsStart) | sqlQuery=query01%(segName,ifo,gpsStop,gpsStart,gpsStop,gpsStart) | def getSciSegs(ifo=None, gpsStart=None, gpsStop=None, cut=bool(False), serverURL=None, segName="DMT-SCIENCE", seglenmin=None, segpading=0 |
if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype | frametype=__patchFrameTypeDef__(frametype,sngl.ifo,sngl.time) | def get_RDS_R_L1(self): """ """ tmpList=list() for sngl in self.coinc.sngls: #Determine file type frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype myMaskIndex="*/%s/*/%s/index.html"%(frametype,sngl.time) myMaskPNG="*/%s/*/%s/*.png"%(frametype,sngl.time) myMaskSummary="*/%s/*/%s/*summary.txt"%(frametype,sngl.time) tmpList.extend(fnmatch.filter(self.fsys,myMaskIndex)) tmpList.extend(fnmatch.filter(self.fsys,myMaskPNG)) tmpList.extend(fnmatch.filter(self.fsys,myMaskSummary)) return tmpList |
if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype | frametype=__patchFrameTypeDef__(frametype,sngl.ifo,sngl.time) | def get_RDS_R_L1_SEIS(self): """ """ tmpList=list() for sngl in self.coinc.sngls: #Determine file type frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype frametype = frametype + "_SEIS" myMaskIndex="*/%s*/%s/*.html"%(frametype,sngl.time) myMaskPNG="*/%s*/%s/*.png"%(frametype,sngl.time) myMaskSummary="*/%s*/%s/*summary.txt"%(frametype,sngl.time) tmpList.extend(fnmatch.filter(self.fsys,myMaskIndex)) tmpList.extend(fnmatch.filter(self.fsys,myMaskPNG)) tmpList.extend(fnmatch.filter(self.fsys,myMaskSummary)) return tmpList |
if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype | frametype=__patchFrameTypeDef__(frametype,sngl.ifo,sngl.time) | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 sys.stdout.write("Available via browser for wiki upload at %s\n"\ %(file2URL.convert(wikiFilename))) # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") wikiPage.putText("UTC Time of trigger :%s"%(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") # imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() filesOmega=wikiFileFinder.get_RDS_R_L1_SEIS() filesAnalyze=wikiFileFinder.get_analyzeQscan_SEIS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankey = "Em_SE" else: chankey = "SEI" indexDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*%s*_512.00_spectrogram_whitened.png"%\ (frametype,sngl.time,chankey)) thumbDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*%s*_512.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time,chankey)) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if chankey in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Search for analyzeQscan files timeString=str(float(sngl.time)).replace(".","_") indexDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_%s*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString,chankey)) thumbDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_%s*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString,chankey)) #Load of analyzeQscan z file if available zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if chankey in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only PEM channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankeyseis = "Em_SE" chankeyenv = "Em_" else: chankeyseis = "SEI" chankeyenv = "PEM" for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*html"%(frametype,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened.png"%\ (frametype,sngl.time)): if chankeyenv in myFile and not chankeyseis in myFile: imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time)): if chankeyenv in myFile and not chankeyseis in myFile: thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if chankeyenv in myFile and not chankeyseis in myFile: imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if chankeyenv in myFile and not chankeyseis in myFile: thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only AUX channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankeyseis = "Em_SE" chankeyenv = "Em_" else: chankeyseis = "SEI" chankeyenv = "PEM" indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*html"%(frametype,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened.png"%\ (frametype,sngl.time)): if not chankeyenv in myFile or not chankeyseis in myFile: imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time)): if not chankeyenv in myFile and not chankeyseis in myFile: thumbDict[sngl.ifo].append(myFile) zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if not chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not chankeyenv in myFile or not chankeyseis in myFile: imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not chankeyenv in myFile and not chankeyseis in myFile: thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if not chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) | sys.stdout.write("Omega scan summary file not found or seen empty for %s. ...continuing...\n"%sngl.ifo) | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 sys.stdout.write("Available via browser for wiki upload at %s\n"\ %(file2URL.convert(wikiFilename))) # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") wikiPage.putText("UTC Time of trigger :%s"%(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") # imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() filesOmega=wikiFileFinder.get_RDS_R_L1_SEIS() filesAnalyze=wikiFileFinder.get_analyzeQscan_SEIS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankey = "Em_SE" else: chankey = "SEI" indexDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*%s*_512.00_spectrogram_whitened.png"%\ (frametype,sngl.time,chankey)) thumbDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*%s*_512.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time,chankey)) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if chankey in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Search for analyzeQscan files timeString=str(float(sngl.time)).replace(".","_") indexDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_%s*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString,chankey)) thumbDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_%s*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString,chankey)) #Load of analyzeQscan z file if available zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if chankey in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only PEM channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankeyseis = "Em_SE" chankeyenv = "Em_" else: chankeyseis = "SEI" chankeyenv = "PEM" for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*html"%(frametype,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened.png"%\ (frametype,sngl.time)): if chankeyenv in myFile and not chankeyseis in myFile: imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time)): if chankeyenv in myFile and not chankeyseis in myFile: thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if chankeyenv in myFile and not chankeyseis in myFile: imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if chankeyenv in myFile and not chankeyseis in myFile: thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only AUX channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') if not sngl.ifo in frametype: frametype = sngl.ifo + "_" + frametype if sngl.ifo == "V1": chankeyseis = "Em_SE" chankeyenv = "Em_" else: chankeyseis = "SEI" chankeyenv = "PEM" indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*html"%(frametype,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened.png"%\ (frametype,sngl.time)): if not chankeyenv in myFile or not chankeyseis in myFile: imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time)): if not chankeyenv in myFile and not chankeyseis in myFile: thumbDict[sngl.ifo].append(myFile) zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if not chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not chankeyenv in myFile or not chankeyseis in myFile: imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not chankeyenv in myFile and not chankeyseis in myFile: thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if not chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
pud=os.path.abspath(publication_directory) minFileCount=1 for key,fileList in allSources.items(): if len(fileList) > minFileCount: commonPath=os.path.commonprefix(fileList) for singleFile in fileList: if not singleFile.__contains__(pud): myDestFile=singleFile.replace(commonPath,myDestPath+"/") if not os.path.exists(os.path.split(myDestFile)[0]): os.makedirs(os.path.split(myDestFile)[0]) shutil.copy2(singleFile,myDestFile) else: sys.stdout.write("Warning: Scanning (%s) found %s files.\n"%\ (key,len(fileList))) | def __init__(self,type=None,ifo=None,time=None,snr=None,chisqr=None,mass1=None,mass2=None,mchirp=None): """ """ self.type=str(type) self.ifo=str(ifo) self.time=float(time) self.snr=float(snr) self.chisqr=float(chisqr) self.mass1=float(mass1) self.mass2=float(mass2) self.mchirp=float(mchirp) |
|
self.output_cache = lal.CacheEntry(ifonames, job.name.upper(), segments.segment(p_nodes[0].start_time,p_nodes[0].end_time), "file://localhost/"+job.output_path+"/"+self.id) | self.output_cache = lal.CacheEntry(ifonames, job.name.upper(), segments.segment(p_nodes[0].start_time,p_nodes[0].end_time), "file://localhost/"+job.outputPath+"/"+self.id) | def __init__(self,job,coinc,cp,opts,dag,ifo,ifonames,p_nodes): pipeline.CondorDAGNode.__init__(self,job) |
self.add_var_opt("findVetoes",cp.get('findVetoes','blind')) | self.add_var_opt("blind",cp.get('findVetoes','blind')) | def __init__(self, dag, job, cp, opts, coincEvent=None): """ """ self.__conditionalLoadDefaults__(findVetosNode.defaults,cp) pipeline.CondorDAGNode.__init__(self,job) self.add_var_opt("trigger-time",coincEvent.time) #Output filename oFilename="%s-findVetos_%s_%s.wiki"%(coincEvent.instruments, coincEvent.ifos, coincEvent.time) self.add_var_opt("output-file",job.outputPath+'/DataProducts/'+oFilename) self.add_var_opt("segment-url",cp.get('findVetoes','segment-url')) self.add_var_opt("output-format",cp.get('findVetoes','output-format')) self.add_var_opt("window",cp.get('findVetoes','window')) if cp.has_option('findVetoes','estimate-background'): self.add_var_opt("estimate-background",cp.get('findVetoes','estimate-background')) if cp.has_option('findVetoes','background-location'): self.add_var_opt("background-location",cp.get('findVetoes','background-location')) if cp.has_option('findVetoes','blind'): self.add_var_opt("findVetoes",cp.get('findVetoes','blind')) |
a list of files or an empty list if nothing found | a list of files or an empty list if nothing found. It uses the pathing information from the files passed via cacheListing to aid in our filesystem search. | def __readCache__(self,cacheListing=list()): """ Simple mehtod to read in a cache or list of cache files and return a list of files or an empty list if nothing found """ #Open the cache entry and search for those entrys fileListing=list() for entry in cacheListing: #Cache files listed themselves comment out following line fileListing.append(entry) fileListing.extend([x.rstrip("\n") for x in file(entry)]) #PATCH START to add in the z distribution files for fname in fileListing: if ".html" in fname: zFile=fname.replace(".html",".txt") fileListing.append(zFile) #PATCH END finalList=list() for thisFile in fileListing: #Search filesystem for file full path finalList.extend(fnmatch.filter(self.fsys,"*%s"%thisFile)) #Look for potential matching thumbnails if thisFile.endswith(".png"): finalList.extend(fnmatch.filter(self.fsys,"*%s"%thisFile.replace(".png","?thumb?png"))) if len(finalList) < 1: return list() else: return finalList |
fileListing=list() | finalList=list() | def __readCache__(self,cacheListing=list()): """ Simple mehtod to read in a cache or list of cache files and return a list of files or an empty list if nothing found """ #Open the cache entry and search for those entrys fileListing=list() for entry in cacheListing: #Cache files listed themselves comment out following line fileListing.append(entry) fileListing.extend([x.rstrip("\n") for x in file(entry)]) #PATCH START to add in the z distribution files for fname in fileListing: if ".html" in fname: zFile=fname.replace(".html",".txt") fileListing.append(zFile) #PATCH END finalList=list() for thisFile in fileListing: #Search filesystem for file full path finalList.extend(fnmatch.filter(self.fsys,"*%s"%thisFile)) #Look for potential matching thumbnails if thisFile.endswith(".png"): finalList.extend(fnmatch.filter(self.fsys,"*%s"%thisFile.replace(".png","?thumb?png"))) if len(finalList) < 1: return list() else: return finalList |
finalList=list() for thisFile in fileListing: finalList.extend(fnmatch.filter(self.fsys,"*%s"%thisFile)) if thisFile.endswith(".png"): finalList.extend(fnmatch.filter(self.fsys,"*%s"%thisFile.replace(".png","?thumb?png"))) if len(finalList) < 1: return list() else: return finalList | pathingInfo=os.path.dirname(entry) for thisFile in fileListing: finalList.extend(fnmatch.filter(self.fsys,"*%s*%s"%(pathingInfo,thisFile))) if thisFile.endswith(".png"): finalList.extend(fnmatch.filter(self.fsys,"*%s"%thisFile.replace(".png","?thumb?png"))) return finalList | def __readCache__(self,cacheListing=list()): """ Simple mehtod to read in a cache or list of cache files and return a list of files or an empty list if nothing found """ #Open the cache entry and search for those entrys fileListing=list() for entry in cacheListing: #Cache files listed themselves comment out following line fileListing.append(entry) fileListing.extend([x.rstrip("\n") for x in file(entry)]) #PATCH START to add in the z distribution files for fname in fileListing: if ".html" in fname: zFile=fname.replace(".html",".txt") fileListing.append(zFile) #PATCH END finalList=list() for thisFile in fileListing: #Search filesystem for file full path finalList.extend(fnmatch.filter(self.fsys,"*%s"%thisFile)) #Look for potential matching thumbnails if thisFile.endswith(".png"): finalList.extend(fnmatch.filter(self.fsys,"*%s"%thisFile.replace(".png","?thumb?png"))) if len(finalList) < 1: return list() else: return finalList |
myCacheMask="*/%s-analyseQscan_%s_%s*_seis_rds*.cache"%(sngl.ifo,sngl.ifo,timeString) | myCacheMask="*%s*/%s-analyseQscan_%s_%s*_seis_rds*.cache"%\ (self.coinc.type,sngl.ifo,sngl.ifo,timeString) | def get_analyzeQscan_SEIS(self): """ This seeks out the html and png files associated with SEIS result of an analyzeQscan job. """ cacheList=list() cacheFiles=list() for sngl in self.coinc.sngls: timeString=str(float(sngl.time)).replace(".","_") myCacheMask="*/%s-analyseQscan_%s_%s*_seis_rds*.cache"%(sngl.ifo,sngl.ifo,timeString) #Read the cache file or files cacheList.extend(fnmatch.filter(self.fsys,myCacheMask)) cacheFiles=self.__readCache__(cacheList) return cacheFiles |
myCacheMask="*/%s-analyseQscan_%s_%s_rds*.cache"%(sngl.ifo,sngl.ifo,timeString) | myCacheMask="*%s*/%s-analyseQscan_%s_%s_rds*.cache"%\ (self.coint.type,sngl.ifo,sngl.ifo,timeString) | def get_analyzeQscan_RDS(self): """ """ #analyseQscan.py_FG_RDS_full_data/H1-analyseQscan_H1_931176926_116_rds-unspecified-gpstime.cache cacheList=list() cacheFiles=list() for sngl in self.coinc.sngls: timeString=str(float(sngl.time)).replace(".","_") myCacheMask="*/%s-analyseQscan_%s_%s_rds*.cache"%(sngl.ifo,sngl.ifo,timeString) #Ignore the files with seis_rds in them for x in fnmatch.filter(self.fsys,myCacheMask): if not x.__contains__('seis_rds'): cacheList.append(x) #Read the cache file or files cacheFiles=self.__readCache__(cacheList) return cacheFiles |
myCacheMask="*/%s-analyseQscan_%s_%s*_ht*.cache"%(sngl.ifo,sngl.ifo,timeString) | myCacheMask="*%s*/%s-analyseQscan_%s_%s*_ht*.cache"\ %(self.coinc.type,sngl.ifo,sngl.ifo,timeString) | def get_analyzeQscan_HT(self): """ """ #analyseQscan.py_FG_HT_full_data/H1-analyseQscan_H1_931176926_116_ht-unspecified-gpstime.cache cacheList=list() cacheFiles=list() for sngl in self.coinc.sngls: timeString=str(float(sngl.time)).replace(".","_") myCacheMask="*/%s-analyseQscan_%s_%s*_ht*.cache"%(sngl.ifo,sngl.ifo,timeString) cacheList.extend(fnmatch.filter(self.fsys,myCacheMask)) #Read the cache file or files cacheFiles=self.__readCache__(cacheList) return cacheFiles |
if injection: | if injection is not None and injectionconfidence is not None and injection_area is not None: | def cbcBayesPostProc(outdir,data,oneDMenu,twoDGreedyMenu,GreedyRes,confidence_levels,twoDplots,injfile=None,eventnum=None,skyres=None,bayesfactornoise=None,bayesfactorcoherent=None): """ This is a demonstration script for using the functionality/data structures contained in pylal.bayespputils . It will produce a webpage from a file containing posterior samples generated by the parameter estimation codes with 1D/2D plots and stats from the marginal posteriors for each parameter/set of parameters. """ if eventnum is not None and injfile is None: print "You specified an event number but no injection file. Ignoring!" if data is None: print 'You must specify an input data file' exit(1) # if outdir is None: print "You must specify an output directory." exit(1) if not os.path.isdir(outdir): os.makedirs(outdir) # commonOutputFileObj=open(data[0]) #Select injections using tc +/- 0.1s if it exists or eventnum from the injection file if injfile: import itertools injections = SimInspiralUtils.ReadSimInspiralFromFiles([injfile]) if(eventnum is not None): if(len(injections)<eventnum): print "Error: You asked for event %d, but %s contains only %d injections" %(eventnum,injfile,len(injections)) sys.exit(1) else: injection=injections[eventnum] else: if(len(injections)<1): print 'Warning: Cannot find injection with end time %f' %(means[2]) else: injection = itertools.ifilter(lambda a: abs(a.get_end() - means[2]) < 0.1, injections).next() ## Load Bayes factors ## # Add Bayes factor information to summary file # if bayesfactornoise is not None: bfile=open(bayesfactornoise,'r') BSN=bfile.read() bfile.close() print 'BSN: %s'%BSN if bayesfactorcoherent is not None: bfile=open(bayesfactorcoherent,'r') BCI=bfile.read() bfile.close() print 'BCI: %s'%BCI #Create an instance of the posterior class using the posterior values loaded #from the file and any injection information (if given). pos = bppu.Posterior(commonOutputFileObj,SimInspiralTableEntry=injection) if ('mc' in pos.names or 'mchirp' in pos.names) and \ 'eta' in pos.names and \ ('mass1' not in pos.names or 'm1' not in pos.names) and\ ('m2' not in pos.names or 'm2' not in pos.names): if 'mc' in pos.names: mchirp_name='mc' else: mchirp_name='mchirp' if injection: inj_mass1,inj_mass2=bppu.mc2ms(injection.mchirp,injection.eta) mass1_samps,mass2_samps=bppu.mc2ms(pos[mchirp_name].samples,pos['eta'].samples) mass1_pos=bppu.OneDPosterior('m1',mass1_samps,injected_value=inj_mass1) mass2_pos=bppu.OneDPosterior('m2',mass2_samps,injected_value=inj_mass2) pos.append(mass1_pos) pos.append(mass2_pos) ##Print some summary stats for the user...## #Number of samples print "Number of posterior samples: %i"%len(pos) # Means print 'Means:' print str(pos.means) #Median print 'Median:' print str(pos.medians) #maxL print 'maxL:' max_pos,max_pos_co=pos.maxL print max_pos_co #==================================================================# #Create web page #==================================================================# html=bppu.htmlPage('Posterior PDFs') #Create a section for meta-data/run information html_meta=html.add_section('Summary') html_meta.p('Produced from '+str(len(pos))+' posterior samples.') html_meta.p('Samples read from %s'%(data[0])) #Create a section for model selection results (if they exist) if bayesfactornoise is not None: html_model=html.add_section('Model selection') html_model.p('log Bayes factor ( coherent vs gaussian noise) = %s, Bayes factor=%f'%(BSN,exp(float(BSN)))) if bayesfactorcoherent is not None: html_model.p('log Bayes factor ( coherent vs incoherent OR noise ) = %s, Bayes factor=%f'%(BCI,exp(float(BCI)))) #Create a section for summary statistics html_stats=html.add_section('Summary statistics') html_stats.write(str(pos)) #==================================================================# #Generate sky map #==================================================================# #If sky resolution parameter has been specified try and create sky map... skyreses=None sky_injection_cl=None if skyres is not None and 'ra' in pos.names and 'dec' in pos.names: #Greedy bin sky samples (ra,dec) into a grid on the sky which preserves #? top_ranked_sky_pixels,sky_injection_cl,skyreses,injection_area=bppu.greedy_bin_sky(pos,skyres,confidence_levels) print "BCI for sky area:" print skyreses #Create sky map in outdir bppu.plot_sky_map(top_ranked_sky_pixels,outdir) #Create a web page section for sky localization results/plots html_sky=html.add_section('Sky Localization') if injection: if sky_injection_cl: html_sky.p('Injection found at confidence interval %f in sky location'%(sky_injection_cl)) else: html_sky.p('Injection not found in posterior bins in sky location!') html_sky.write('<img width="35%" src="skymap.png"/>') if skyres is not None: html_sky_write='<table border="1"><tr><th>Confidence region</th><th>size (sq. deg)</th></tr>' fracs=skyreses.keys() fracs.sort() skysizes=[skyreses[frac] for frac in fracs] for frac,skysize in zip(fracs,skysizes): html_sky_write+=('<tr><td>%f</td><td>%f</td></tr>'%(frac,skysize)) html_sky_write+=('</table>') html_sky.write(html_sky_write) #==================================================================# #2D posteriors #==================================================================# #Loop over parameter pairs in twoDGreedyMenu and bin the sample pairs #using a greedy algorithm . The ranked pixels (toppoints) are used #to plot 2D histograms and evaluate Bayesian confidence intervals. #Make a folder for the 2D kde plots margdir=os.path.join(outdir,'2Dkde') if not os.path.isdir(margdir): os.makedirs(margdir) twobinsdir=os.path.join(outdir,'2Dbins') if not os.path.isdir(twobinsdir): os.makedirs(twobinsdir) #Add a section to the webpage for a table of the confidence interval #results. html_tcig=html.add_section('2D confidence intervals (greedy binning)') #Generate the top part of the table html_tcig_write='<table width="100%" border="1"><tr><th/>' confidence_levels.sort() for cl in confidence_levels: html_tcig_write+='<th>%f</th>'%cl if injection: html_tcig_write+='<th>Injection Confidence Level</th>' html_tcig_write+='<th>Injection Confidence Interval</th>' html_tcig_write+='</tr>' #= Add a section for a table of 2D marginal PDFs (kde) html_tcmp=html.add_section('2D Marginal PDFs') html_tcmp.br() #Table matter html_tcmp_write='<table border="1" width="100%">' row_count=0 for par1_name,par2_name in twoDGreedyMenu: par1_name=par1_name.lower() par2_name=par2_name.lower() print "Binning %s-%s to determine confidence levels ..."%(par1_name,par2_name) try: pos[par1_name.lower()] except KeyError: print "No input chain for %s, skipping binning."%par1_name continue try: pos[par2_name.lower()] except KeyError: print "No input chain for %s, skipping binning."%par2_name continue #Bin sizes try: par1_bin=GreedyRes[par1_name] except KeyError: print "Bin size is not set for %s, skipping %s/%s binning."%(par1_name,par1_name,par2_name) continue try: par2_bin=GreedyRes[par2_name] except KeyError: print "Bin size is not set for %s, skipping %s/%s binning."%(par2_name,par1_name,par2_name) continue #Form greedy binning input structure greedy2Params={par1_name:par1_bin,par2_name:par2_bin} #Greedy bin the posterior samples toppoints,injection_cl,reses,injection_area=\ bppu.greedy_bin_two_param(pos,greedy2Params,confidence_levels) print "BCI %s-%s:"%(par1_name,par2_name) print reses #Generate new BCI html table row BCItableline='<tr><td>%s-%s</td>'%(par1_name,par2_name) cls=reses.keys() cls.sort() for cl in cls: BCItableline+='<td>%f</td>'%reses[cl] if injection is not None and injection_cl is not None: BCItableline+='<td>%f</td>'%injection_cl BCItableline+='<td>%f</td>'%injection_area BCItableline+='</tr>' #Append new table line to section html html_tcig_write+=BCItableline #= Plot 2D histograms of greedily binned points =# #greedy2PlotFig=bppu.plot_two_param_greedy_bins(np.array(toppoints),pos,greedy2Params) #greedy2PlotFig.savefig(os.path.join(twobinsdir,'%s-%s_greedy2.png'%(par1_name,par2_name))) #= Generate 2D kde plots =# print 'Generating %s-%s plot'%(par1_name,par2_name) par1_pos=pos[par1_name].samples par2_pos=pos[par2_name].samples if (size(np.unique(par1_pos))<2 or size(np.unique(par2_pos))<2): continue plot2DkdeParams={par1_name:50,par2_name:50} myfig=bppu.plot_two_param_kde(pos,plot2DkdeParams) twoDKdePath=os.path.join(margdir,par1_name+'-'+par2_name+'_2Dkernel.png') if row_count==0: html_tcmp_write+='<tr>' html_tcmp_write+='<td width="30%"><img width="100%" src="'+twoDKdePath+'"/></td>' row_count+=1 if row_count==3: html_tcmp_write+='</tr>' row_count=0 myfig.savefig(twoDKdePath) #Finish off the BCI table and write it into the etree html_tcig_write+='</table>' html_tcig.write(html_tcig_write) #Finish off the 2D kde plot table while row_count!=0: html_tcmp_write+='<td/>' row_count+=1 if row_count==3: row_count=0 html_tcmp_write+='</tr>' html_tcmp_write+='</table>' html_tcmp.write(html_tcmp_write) #Add a link to all plots html_tcmp.br() html_tcmp.a("2D/",'All 2D Marginal PDFs') html_tcmp.hr() #==================================================================# #1D posteriors #==================================================================# #Loop over each parameter and determine the contigious and greedy #confidence levels and some statistics. #Add section for 1D confidence intervals html_ogci=html.add_section('1D confidence intervals (greedy binning)') #Generate the top part of the table html_ogci_write='<table width="100%" border="1"><tr><th/>' confidence_levels.sort() for cl in confidence_levels: html_ogci_write+='<th>%f</th>'%cl if injection: html_ogci_write+='<th>Injection Confidence Level</th>' html_ogci_write+='<th>Injection Confidence Interval</th>' html_ogci_write+='</tr>' #Add section for 1D marginal PDFs and sample plots html_ompdf=html.add_section('1D marginal posterior PDFs') html_ompdf.br() #Table matter html_ompdf_write= '<table><tr><th>Histogram and Kernel Density Estimate</th><th>Samples used</th></tr>' onepdfdir=os.path.join(outdir,'1Dpdf') if not os.path.isdir(onepdfdir): os.makedirs(onepdfdir) sampsdir=os.path.join(outdir,'1Dsamps') if not os.path.isdir(sampsdir): os.makedirs(sampsdir) for par_name in oneDMenu: par_name=par_name.lower() print "Binning %s to determine confidence levels ..."%par_name try: pos[par_name.lower()] except KeyError: print "No input chain for %s, skipping binning."%par_name continue try: par_bin=GreedyRes[par_name] except KeyError: print "Bin size is not set for %s, skipping binning."%par_name continue binParams={par_name:par_bin} toppoints,injectionconfidence,reses,injection_area=bppu.greedy_bin_one_param(pos,binParams,confidence_levels) oneDContCL,oneDContInj = bppu.contigious_interval_one_param(pos,binParams,confidence_levels) #Generate new BCI html table row BCItableline='<tr><td>%s</td>'%(par_name) cls=reses.keys() cls.sort() for cl in cls: BCItableline+='<td>%f</td>'%reses[cl] if injection: BCItableline+='<td>%f</td>'%injectionconfidence BCItableline+='<td>%f</td>'%injection_area BCItableline+='</tr>' #Append new table line to section html html_ogci_write+=BCItableline #Generate 1D histogram/kde plots print "Generating 1D plot for %s."%par_name oneDPDFParams={par_name:50} rbins,plotFig=bppu.plot_one_param_pdf(pos,oneDPDFParams) figname=par_name+'.png' oneDplotPath=os.path.join(onepdfdir,figname) plotFig.savefig(oneDplotPath) if rbins: print "r of injected value of %s (bins) = %f"%(par_name, rbins) ##Produce plot of raw samples myfig=plt.figure(figsize=(4,3.5),dpi=80) pos_samps=pos[par_name].samples plt.plot(pos_samps,'.',figure=myfig) injpar=pos[par_name].injval if injpar: if min(pos_samps)<injpar and max(pos_samps)>injpar: plt.plot([0,len(pos_samps)],[injpar,injpar],'r-.') myfig.savefig(os.path.join(sampsdir,figname.replace('.png','_samps.png'))) html_ompdf_write+='<tr><td><img src="1Dpdf/'+figname+'"/></td><td><img src="1Dsamps/'+figname.replace('.png','_samps.png')+'"/></td></tr>' html_ompdf_write+='</table>' html_ompdf.write(html_ompdf_write) html_ogci_write+='</table>' html_ogci.write(html_ogci_write) html_ogci.hr() html_ogci.br() html_ompdf.hr() html_ompdf.br() html_footer=html.add_section('') html_footer.p('Produced using cbcBayesPostProc.py at '+strftime("%Y-%m-%d %H:%M:%S")+' .') html_footer.p(git_version.verbose_msg) #Save results page resultspage=open(os.path.join(outdir,'posplots.html'),'w') resultspage.write(str(html)) # Save posterior samples too... posfilename=os.path.join(outdir,'posterior_samples.dat') posfile=open(posfilename,'w') input_file=open(data[0]) posfile.write(input_file.read()) # posfilename2=os.path.join(outdir,'posterior_samples2.dat') pos.write_to_file(posfilename2) #Close files input_file.close() posfile.close() resultspage.close() |
np.savetxt('ranked_sky_pixels',np.column_stack([np.asarray(toppoints)[:,0:1],np.asarray(toppoints)[:,1],np.asarray(toppoints)[:,3]])) | np.savetxt(os.path.join(outdir,'ranked_sky_pixels.dat'),np.column_stack([np.asarray(toppoints)[:,0:1],np.asarray(toppoints)[:,1],np.asarray(toppoints)[:,3]])) | def plotSkyMap(skypos,skyres,sky_injpoint,confidence_levels,outdir): from mpl_toolkits.basemap import Basemap from pylal import skylocutils np.seterr(under='ignore') skypoints=np.array(skylocutils.gridsky(float(skyres))) skycarts=map(lambda s: pol2cart(s[1],s[0]),skypoints) skyinjectionconfidence=None shist=skyhist_cart(np.array(skycarts),skypos) #shist=skyhist_cart(skycarts,list(pos)) bins=skycarts # Find the bin of the injection if available injbin=None if sky_injpoint: injhist=skyhist_cart_slow(skycarts,np.array([sky_injpoint])) injbin=injhist.tolist().index(1) print 'Found injection in bin %d with co-ordinates %f,%f .'%(injbin,skypoints[injbin,0],skypoints[injbin,1]) (skyinjectionconfidence,toppoints,skyreses)=calculateConfidenceLevels(shist,skypoints,injbin,float(skyres),confidence_levels,len(skypos)) if injbin and skyinjectionconfidence: i=list(np.nonzero(np.asarray(toppoints)[:,2]==injbin))[0] min_sky_area_containing_injection=float(skyres)*float(skyres)*i print 'Minimum sky area containing injection point = %f square degrees'%min_sky_area_containing_injection myfig=plt.figure() plt.clf() m=Basemap(projection='moll',lon_0=180.0,lat_0=0.0) plx,ply=m(np.asarray(toppoints)[::-1,1]*57.296,np.asarray(toppoints)[::-1,0]*57.296) cnlevel=[1-tp for tp in np.asarray(toppoints)[::-1,3]] plt.scatter(plx,ply,s=5,c=cnlevel,faceted=False,cmap=mpl_cm.jet) m.drawmapboundary() m.drawparallels(np.arange(-90.,120.,45.),labels=[1,0,0,0],labelstyle='+/-') # draw parallels m.drawmeridians(np.arange(0.,360.,90.),labels=[0,0,0,1],labelstyle='+/-') # draw meridians plt.title("Skymap") # add a title plt.colorbar() myfig.savefig(os.path.join(outdir,'skymap.png')) plt.clf() #Save skypoints np.savetxt('ranked_sky_pixels',np.column_stack([np.asarray(toppoints)[:,0:1],np.asarray(toppoints)[:,1],np.asarray(toppoints)[:,3]])) return skyreses,skyinjectionconfidence |
self.add_var_arg(p_nodes[1].name_output_file) | self.add_var_arg(p_nodes[1].outputFileName) | def __init__(self, dag, job, cp, opts, ifo, time, p_nodes=[], type=""): |
self.add_var_opt('server','ldr-bologna.phys.uwm.edu') if not(cp.has_option('fu-remote-jobs','remote-jobs') and job.name in cp.get('fu-remote-jobs','remote-jobs') and cp.has_option('fu-remote-jobs','remote-ifos') and ifo in cp.get('fu-remote-jobs','remote-ifos')) or if opts.do_remoteScans: | self.add_var_arg('--server ldr-bologna.phys.uwm.edu') if not(cp.has_option('fu-remote-jobs','remote-jobs') and job.name in cp.get('fu-remote-jobs','remote-jobs') and cp.has_option('fu-remote-jobs','remote-ifos') and ifo in cp.get('fu-remote-jobs','remote-ifos')) or opts.do_remoteScans: | def __init__(self, dag, job, cp, opts, ifo, sngl=None, qscan=False, trigger_time=None, data_type="hoft", p_nodes=[]): |
return map(lambda (a,b,c): detector_threshold(min_threshold,ifos,a,b,c,sensitivities), zip(RA,dec,gps_time)) | return map(lambda (a,b,c): detector_thresholds(min_threshold,ifos,a,b,c,sensitivities), zip(RA,dec,gps_time)) | def detector_thresholds(min_threshold, ifos, RA, dec, gps_time, sensitivities=None): """ Return a dictionary of sensitivity thresholds for each detector, based on a minimum threshold of min_threshold in the least sensitive one, for a source at position (RA,dec) specified in radians at time gps_time. Specifying a dictionary of sensitivities allows one to weight also by the relative SNR of a reference system in each detector to handle different noise curves. """ # Recurse if multiple RA, dec and GPS times are specified if type(gps_time)!=float or type(RA)!=float or type(dec)!=float: assert len(gps_time)==len(RA),len(gps_time)==len(dec) return map(lambda (a,b,c): detector_threshold(min_threshold,ifos,a,b,c,sensitivities), zip(RA,dec,gps_time)) from pylal import antenna # Sensitivies specifies relative SNRs of a reference signal (BNS) if sensitivities is None: sensitivities={} for det in ifos: sensitivies[det]=1.0 else: assert len(ifos)==len(sensitivites) # Normalise sensitivities minsens=min(sensitivities.values()) for det in ifos: sensitivities[det]/=minsens resps={} threshs={} # Make a dictionary of average responses for det in ifos: resps[det]=antenna.response(gps_time,RA,dec,0,0,'radians',det)[2] worst_resp=min(resps.values()) # Assuming that lowest threshold is in worst detector, return thresholds for det in ifos: threshs[det]=min_threshold*(resps[det]/worst_resp)*sensitivities[det] return threshs |
for n, (coinc_event_id, time_slide_id) in enumerate(database.connection.cursor().execute("SELECT coinc_event_id, time_slide_id FROM coinc_event WHERE coinc_def_id == ?", (database.bb_definer_id,))): if verbose and not n % 200: print >>sys.stderr, "\t%.1f%%\r" % (100.0 * n / n_coincs), events = map(database.sngl_burst_table.row_from_cols, cursor.execute("""SELECT * FROM coinc_burst_map WHERE coinc_event_id == ?""", (coinc_event_id,))) cursor.execute(""" | def get_likelihood_ratio(coinc_event_id, time_slide_id, row_from_cols = database.sngl_burst_table.row_from_cols, cursor = cursor, time_slides = time_slides, params_func = params_func, params_func_extra_args = params_func_extra_args): events = map(row_from_cols, cursor.execute("""SELECT * FROM coinc_burst_map WHERE coinc_event_id == ?""", (coinc_event_id,))) likelihood_ratio(params_func, events, time_slides[ilwd.get_ilwdchar(time_slide_id)], *params_func_extra_args) database.connection.create_function("likelihood_ratio", 2, get_likelihood_ratio) database.connection.cursor().execute(""" | def ligolw_burca2(database, likelihood_ratio, params_func, verbose = False, params_func_extra_args = ()): """ Assigns likelihood ratio values to excess power coincidences. database is pylal.SnglBurstUtils.CoincDatabase instance, and likelihood_ratio is a LikelihoodRatio class instance. """ # # Find document parts. # time_slides = database.time_slide_table.as_dict() # # Iterate over all coincs, assigning likelihood ratios to # burst+burst coincs if the document contains them. # if verbose: print >>sys.stderr, "computing likelihood ratios ..." n_coincs = len(database.coinc_table) cursor = database.connection.cursor() cursor.execute(""" |
likelihood = ? | likelihood = likelihood_ratio(coinc_event_id, time_slide_id) | def ligolw_burca2(database, likelihood_ratio, params_func, verbose = False, params_func_extra_args = ()): """ Assigns likelihood ratio values to excess power coincidences. database is pylal.SnglBurstUtils.CoincDatabase instance, and likelihood_ratio is a LikelihoodRatio class instance. """ # # Find document parts. # time_slides = database.time_slide_table.as_dict() # # Iterate over all coincs, assigning likelihood ratios to # burst+burst coincs if the document contains them. # if verbose: print >>sys.stderr, "computing likelihood ratios ..." n_coincs = len(database.coinc_table) cursor = database.connection.cursor() cursor.execute(""" |
coinc_event_id == ? """, (likelihood_ratio(params_func, events, time_slides[time_slide_id], *params_func_extra_args), coinc_event_id)) | coinc_def_id == ? """, (database.bb_definer_id,)) | def ligolw_burca2(database, likelihood_ratio, params_func, verbose = False, params_func_extra_args = ()): """ Assigns likelihood ratio values to excess power coincidences. database is pylal.SnglBurstUtils.CoincDatabase instance, and likelihood_ratio is a LikelihoodRatio class instance. """ # # Find document parts. # time_slides = database.time_slide_table.as_dict() # # Iterate over all coincs, assigning likelihood ratios to # burst+burst coincs if the document contains them. # if verbose: print >>sys.stderr, "computing likelihood ratios ..." n_coincs = len(database.coinc_table) cursor = database.connection.cursor() cursor.execute(""" |
os.path.join("bin", "checkPerformedInjections.py") | os.path.join("bin", "pylal_cbc_select_hardware_injections") | def run(self): # remove the automatically generated user env scripts for script in ["pylal-user-env.sh", "pylal-user-env.csh"]: log.info("removing " + script ) try: os.unlink(os.path.join("etc", script)) except: pass |
if not os.path.isdir(os.path.join(margdir,'/')): os.mkdir(margdir) | if not os.path.isdir(margdir): os.makedirs(margdir) | def cbcBayesSkyRes(outdir,data,oneDMenu,twoDGreedyMenu,GreedyRes,confidence_levels,twoDplots,injfile=None,eventnum=None,skyres=None): if eventnum is not None and injfile is None: print "You specified an event number but no injection file. Ignoring!" if data is None: print 'You must specify an input data file' exit(1) # if outdir is None: print "You must specify an output directory." exit(1) if not os.path.isdir(outdir): os.makedirs(outdir) # summary_fo=open(os.path.join(outdir,'summary.ini'),'w') summary_file=ConfigParser() summary_file.add_section('metadata') summary_file.set('metadata','group_id','X') if eventnum: summary_file.set('metadata','event_id',str(eventnum)) summary_file.add_section('Confidence levels') summary_file.set('Confidence levels','confidence levels',str(confidence_levels)) # Load in the main data paramnames, pos=loadDataFile(data[0]) #Generate any required derived parameters if "m1" not in paramnames and "m2" not in paramnames and "mchirp" in paramnames and "eta" in paramnames: (m1,m2)=bppu.mc2ms(pos[:,paramnames.index('mchirp')],pos[:,paramnames.index('eta')]) pos=np.column_stack((pos,m1,m2)) paramnames.append("m1") paramnames.append("m2") # Nd=len(paramnames) print "Number of posterior samples: " + str(size(pos,0)) # Calculate means means = mean(pos,axis=0) meanStr=map(str,means) out=reduce(lambda a,b:a+'||'+b,meanStr) print 'Means:' print '||'+out+'||' RAdim=paramnames.index('RA') decdim=paramnames.index('dec') injection=None # Select injections using tc +/- 0.1s if it exists or eventnum from the injection file if injfile: import itertools injections = SimInspiralUtils.ReadSimInspiralFromFiles([injfile]) if(eventnum is not None): if(len(injections)<eventnum): print "Error: You asked for event %d, but %s contains only %d injections" %(eventnum,injfile,len(injections)) sys.exit(1) else: injection=injections[eventnum] else: if(len(injections)<1): print 'Warning: Cannot find injection with end time %f' %(means[2]) else: injection = itertools.ifilter(lambda a: abs(a.get_end() - means[2]) < 0.1, injections).next() #If injection parameter passed load object representation of injection #table entries. if injection: injpoint=map(lambda a: getinjpar(paramnames,injection,a),range(len(paramnames))) injvals=map(str,injpoint) out=reduce(lambda a,b:a+'||'+b,injvals) print 'Injected values:' print out #Add injection values to output file summary_file.add_section('Injection values') for parnum in range(len(paramnames)): summary_file.set('Injection values',paramnames[parnum],getinjpar(paramnames,injection,parnum)) # #If sky resolution parameter has been specified try and create sky map. skyreses=None if skyres is not None: RAvec=array(pos)[:,paramnames.index('RA')] decvec=array(pos)[:,paramnames.index('dec')] skypos=column_stack([RAvec,decvec]) injvalues=None if injection: injvalues=(injpoint[RAdim],injpoint[decdim]) skyreses,skyinjectionconfidence=bppu.plotSkyMap(skypos,skyres,injvalues,confidence_levels,outdir) #Loop over parameter pairs in twoDGreedyMenu and bin the sample pairs #using a greedy algorithm . The ranked pixels (toppoints) are used #to plot 2D histograms and evaluate Bayesian confidence intervals. summary_file.add_section('2D greedy cl') summary_file.add_section('2D greedy cl inj') ncon=len(confidence_levels) pos_array=np.array(pos) twoDGreedyCL={} twoDGreedyInj={} for par1_name,par2_name in twoDGreedyMenu: print "Binning %s-%s to determine confidence levels ..."%(par1_name,par2_name) #Bin sizes try: par1_bin=GreedyRes[par1_name] except KeyError: print "Bin size is not set for %s, skipping %s/%s binning."%(par1_name,par1_name,par2_name) continue try: par2_bin=GreedyRes[par2_name] except KeyError: print "Bin size is not set for %s, skipping %s/%s binning."%(par2_name,par1_name,par2_name) continue #Get posterior samples try: par1_index=paramnames.index(par1_name) except ValueError: print "No input chain for %s, skipping %s/%s binning."%(par1_name,par1_name,par2_name) continue try: par2_index=paramnames.index(par2_name) except ValueError: print "No input chain for %s, skipping %s/%s binning."%(par2_name,par1_name,par2_name) continue pars_name=("%s,%s"%(par1_name,par2_name)).lower() par1pos=pos_array[:,par1_index] par2pos=pos_array[:,par2_index] injection_=None if injection: par1_injvalue=np.array(injpoint)[par1_index] par2_injvalue=np.array(injpoint)[par2_index] injection_=(par1_injvalue,par2_injvalue) posterior_array=column_stack([par1pos,par2pos]) toppoints,injectionconfidence,twoDGreedyCL[pars_name],twoDGreedyInj[pars_name]=bppu.greedyBin2(posterior_array,(par1_bin,par2_bin),confidence_levels,par_names=(par1_name,par2_name),injection=injection_) #Plot 2D histograms of greedily binned points if injection is not None and par1_injvalue is not None and par2_injvalue is not None: bppu.plot2Dbins(np.array(toppoints),(par1_bin,par2_bin),outdir,par_names=(par1_name,par2_name),injpoint=[par1_injvalue,par2_injvalue]) else: bppu.plot2Dbins(np.array(toppoints),(par1_bin,par2_bin),outdir,par_names=(par1_name,par2_name)) # summaryString='[' for frac,area in twoDGreedyCL[pars_name].items(): summaryString+=str(area) summary_file.set('2D greedy cl',pars_name,summaryString+']') summary_file.set('2D greedy cl inj',pars_name,str(injectionconfidence)) if not os.path.exists(os.path.join(outdir,'pickle')): os.makedirs(os.path.join(outdir,'pickle')) pickle_to_file(twoDGreedyCL,os.path.join(outdir,'pickle','GreedyCL2.pickle')) pickle_to_file(twoDGreedyInj,os.path.join(outdir,'pickle','GreedyInj2.pickle')) for par in twoDGreedyCL.keys(): oneD_dict_to_file(twoDGreedyCL[par],os.path.join(outdir,str(par)+'_greedy2.dat')) #1D binning summary_file.add_section('1D mean') summary_file.add_section('1D median') summary_file.add_section('1D mode') summary_file.add_section('1D contigious cl') summary_file.add_section('1D greedy cl') summary_file.add_section('1D stacc') oneDStats={} oneDGreedyCL={} oneDContCL={} oneDGreedyInj={} oneDContInj={} max_pos,max_i=posMode(pos_array) #Loop over each parameter and determine the contigious and greedy #confidence levels and some statistics. for par_name in oneDMenu: print "Binning %s to determine confidence levels ..."%par_name try: par_index=paramnames.index(par_name) except ValueError: print "No input chain for %s, skipping binning."%par_name continue try: par_bin=GreedyRes[par_name] except KeyError: print "Bin size is not set for %s, skipping binning."%par_name continue oneDGreedyCL[par_name]={} oneDStats[par_name]={} oneDContCL[par_name]={} oneDGreedyInj[par_name]={} oneDContInj[par_name]={} par_samps=pos_array[:,par_index] summary_file.set('1D mode',par_name,str(par_samps[max_i])) summary_file.set("1D mean",par_name,str(np.mean(par_samps))) summary_file.set("1D median",par_name,str(np.median(par_samps))) oneDStats[par_name]['mode']=par_samps[max_i] oneDStats[par_name]['mean']=np.mean(par_samps) oneDStats[par_name]['median']=np.median(par_samps) par_injvalue_=None if injection: par_injvalue_=np.array(injpoint)[par_index] oneDGreedyCL[par_name],oneDGreedyInj[par_name],toppoints,injectionconfidence = bppu.greedyBin1(par_samps,par_bin,confidence_levels,par_injvalue=par_injvalue_) oneDContCL[par_name],oneDContInj[par_name]=bppu.contCL1(par_samps,par_bin,confidence_levels,par_injvalue=par_injvalue_) #Ilya's standard accuracy statistic if injection: injvalue=np.array(injpoint)[par_index] if injvalue: stacc=bppu.stacc_stat(par_samps,injvalue) summary_file.set('1D stacc',par_name,str(stacc)) oneDStats[par_name]['stacc']=stacc pickle_to_file(oneDGreedyCL,os.path.join(outdir,'pickle','GreedyCL1.pickle')) pickle_to_file(oneDContCL,os.path.join(outdir,'pickle','ContCL1.pickle')) pickle_to_file(oneDStats,os.path.join(outdir,'pickle','Stats1.pickle')) pickle_to_file(oneDContInj,os.path.join(outdir,'pickle','ContInj1.pickle')) pickle_to_file(oneDGreedyInj,os.path.join(outdir,'pickle','GreedyInj1.pickle')) for par in oneDGreedyCL.keys(): oneD_dict_to_file(oneDGreedyCL[par],os.path.join(outdir,str(par)+'_greedy1.dat')) # for par in oneDGreedyCL.keys(): oneD_dict_to_file(oneDContCL[par],os.path.join(outdir,str(par)+'_cont.dat')) # for par in oneDStats.keys(): oneD_dict_to_file(oneDStats[par],os.path.join(outdir,str(par)+'_stats.dat')) # for par in oneDStats.keys(): oneD_dict_to_file(oneDContInj[par],os.path.join(outdir,str(par)+'_cont_inj.dat')) # #####Generate 2D kde plots and webpage######## margdir=os.path.join(outdir,'2D') if not os.path.isdir(os.path.join(margdir,'/')): os.mkdir(margdir) twoDKdePaths=[] for par1,par2 in twoDplots: try: i=paramnames.index(par1) except ValueError: print "No input chain for %s, skipping 2D plot of %s-%s."%(par1,par1,par2) continue try: j=paramnames.index(par2) except ValueError: print "No input chain for %s, skipping 2D plot of %s-%s."%(par2,par1,par2) continue print 'Generating %s-%s plot'%(paramnames[i],paramnames[j]) if (size(np.unique(pos[:,i]))<2 or size(np.unique(pos[:,j]))<2): continue par_injvalues_=None if injection and reduce (lambda a,b: a and b, map(lambda idx: getinjpar(paramnames,injection,idx)>min(pos[:,idx]) and getinjpar(paramnames,injection,idx)<max(pos[:,idx]),[i,j])) : if getinjpar(paramnames,injection,i) is not None and getinjpar(paramnames,injection,j) is not None: par_injvalues_=( getinjpar(paramnames,injection,i) , getinjpar(paramnames,injection,j) ) myfig=bppu.plot2Dkernel(pos[:,i],pos[:,j],50,50,par_names=(par1,par2),par_injvalues=par_injvalues_) twoDKdePath=os.path.join(margdir,paramnames[i]+'-'+paramnames[j]+'_2Dkernel.png') twoDKdePaths.append(twoDKdePath) myfig.savefig(twoDKdePath) htmlfile=open(os.path.join(outdir,'posplots.html'),'w') htmlfile.write('<HTML><HEAD><TITLE>Posterior PDFs</TITLE></HEAD><BODY><h3>'+str(means[2])+' Posterior PDFs</h3>') if(skyres is not None): htmlfile.write('<table border=1><tr><td>Confidence region<td>size (sq. deg)</tr>') for (frac,skysize) in skyreses: htmlfile.write('<tr><td>%f<td>%f</tr>'%(frac,skysize)) htmlfile.write('</table>') htmlfile.write('Produced from '+str(size(pos,0))+' posterior samples.<br>') htmlfile.write('Samples read from %s<br>'%(data[0])) htmlfile.write('<h4>Mean parameter estimates</h4>') htmlfile.write('<table border=1><tr>') paramline=reduce(lambda a,b:a+'<td>'+b,paramnames) htmlfile.write('<td>'+paramline+'<td>logLmax</tr><tr>') meanline=reduce(lambda a,b:a+'<td>'+b,meanStr) htmlfile.write('<td>'+meanline+'</tr>') if injection: htmlfile.write('<tr><th colspan='+str(len(paramnames))+'>Injected values</tr>') injline=reduce(lambda a,b:a+'<td>'+b,injvals) htmlfile.write('<td>'+injline+'<td></tr>') htmlfile.write('</table>') if injection: if skyinjectionconfidence: htmlfile.write('<p>Injection found at confidence interval %f in sky location</p>'%(skyinjectionconfidence)) else: htmlfile.write('<p>Injection not found in posterior bins in sky location!</p>') htmlfile.write('<h5>2D Marginal PDFs</h5><br>') htmlfile.write('<table border=1><tr>') #htmlfile.write('<td width=30%><img width=100% src="m1m2.png"></td>') #htmlfile.write('<td width=30%><img width=100% src="RAdec.png"></td>') #htmlfile.write('<td width=30%><img width=100% src="Meta.png"></td>') #htmlfile.write('</tr><tr><td width=30%><img width=100% src="2D/Mchirp (Msun)-geocenter time ISCO_2Dkernel.png"</td>') if skyres is not None: htmlfile.write('<td width=30%><img width=100% src="skymap.png"></td>') else: htmlfile.write('<td width=30%><img width=100% src="m1dist.png:></td>') #htmlfile.write('<td width=30%><img width=100% src="m2dist.png"></td>') row_switch=1 for par1,par2 in twoDplots: if row_switch==3: row_switch=0 plot_path=None if os.path.isfile(os.path.join(outdir,'2D',par1+'-'+par2+'_2Dkernel.png')): plot_path='2D/'+par1+'-'+par2+'_2Dkernel.png' elif os.path.isfile(os.path.join(outdir,'2D',par2+'-'+par1+'_2Dkernel.png')): plot_path='2D/'+par2+'-'+par1+'_2Dkernel.png' if plot_path: if row_switch==0: htmlfile.write('<tr>') htmlfile.write('<td width=30%><img width=100% src="'+plot_path+'"></td>') if row_switch==2: htmlfile.write('</tr>') row_switch+=1 # if row_switch==2: htmlfile.write('<td></td></tr>') elif row_switch==1: htmlfile.write('<td></td><td></td></tr>') htmlfile.write('</table>') htmlfile.write('<br><a href="2D/">All 2D Marginal PDFs</a><hr><h5>1D marginal posterior PDFs</h5><br>') summary_file.add_section('1D ranking kde') summary_file.add_section('1D ranking bins') oneDplotPaths=[] for param in oneDMenu: try: par_index=paramnames.index(param) i=par_index except ValueError: print "No input chain for %s, skipping 1D plot."%param continue pos_samps=pos[:,i] injpar_=None if injection: injpar_=getinjpar(paramnames,injection,i) print "Generating 1D plot for %s."%param rbins,plotFig=bppu.plot1DPDF(pos_samps,param,injpar=injpar_) oneDplotPath=os.path.join(outdir,param+'.png') plotFig.savefig(os.path.join(outdir,param+'.png')) if rbins: print "r of injected value of %s (bins) = %f"%(param, rbins) ##Produce plot of raw samples myfig=plt.figure(figsize=(4,3.5),dpi=80) plt.plot(pos_samps,'.',figure=myfig) if injpar_: if min(pos_samps)<injpar_ and max(pos_samps)>injpar_: plt.plot([0,len(pos_samps)],[injpar_,injpar_],'r-.') myfig.savefig(os.path.join(outdir,param+'_samps.png')) #summary_file.set('1D ranking kde',param,rkde) summary_file.set('1D ranking bins',param,rbins) oneDplotPaths.append(oneDplotPath) for plotPath in oneDplotPaths: htmlfile.write('<img src="'+plotPath+'"><img src="'+plotPath.replace('.png','_samps.png')+'"><br>') htmlfile.write('<hr><br />Produced using cbcBayesSkyRes.py at '+strftime("%Y-%m-%d %H:%M:%S")) htmlfile.write('</BODY></HTML>') htmlfile.close() # Save posterior samples too... posfilename=os.path.join(outdir,'posterior_samples.dat') posfile=open(posfilename,'w') for row in pos: for i in row: posfile.write('%f\t'%(i)) posfile.write('\n') # #Close files posfile.close() summary_file.write(summary_fo) |
extent = off_segs.extent() | extent = (off_segs | on_segs).extent() | def get_exttrig_trials(on_segs, off_segs, veto_files): """ Return a tuple of (off-source time bins, off-source veto mask, index of trial that is on source). The off-source veto mask is a one-dimensional boolean array where True means vetoed. @param on_segs: On-source segments @param off_segs: Off-source segments @param veto_files: List of filenames containing vetoes """ # Check that offsource length is a multiple of the onsource segment length trial_len = int(abs(on_segs)) if abs(off_segs) % trial_len != 0: raise ValueError, "The provided file's analysis segment is not "\ "divisible by the fold time." extent = off_segs.extent() # generate bins for trials num_trials = int(abs(extent)) // trial_len trial_bins = rate.LinearBins(extent[0], extent[1], num_trials) # incorporate veto file; in trial_veto_mask, True means vetoed. trial_veto_mask = numpy.zeros(num_trials, dtype=numpy.bool8) for veto_file in veto_files: new_veto_segs = segmentsUtils.fromsegwizard(open(veto_file), coltype=int) if new_veto_segs.intersects(on_segs): print >>sys.stderr, "warning: %s overlaps on-source segment" \ % veto_file trial_veto_mask |= rate.bins_spanned(trial_bins, new_veto_segs, dtype=numpy.bool8) # identify onsource trial index onsource_mask = rate.bins_spanned(trial_bins, on_segs, dtype=numpy.bool8) if sum(onsource_mask) != 1: raise ValueError, "on-source segment spans more or less than one trial" onsource_ind = numpy.arange(len(onsource_mask))[onsource_mask] return trial_bins, trial_veto_mask, onsource_ind |
htmlfile.write('<img src="'+paramnames[i]+'.png"><img src="'+paramnames[i]+'_samps.png><br>') | htmlfile.write('<img src="'+paramnames[i]+'.png"><img src="'+paramnames[i]+'_samps.png"><br>') | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() |
def append_process(xmldoc, comment = None, force = None, e_thinca_parameter = None, effective_snr_factor = None, vetoes_name = None, trigger_program = None, effective_snr = None, verbose = None): | def append_process(xmldoc, comment = None, force = None, e_thinca_parameter = None, effective_snr_factor = None, vetoes_name = None, trigger_program = None, effective_snr = None, coinc_end_time_segment = None, verbose = None): | def append_process(xmldoc, comment = None, force = None, e_thinca_parameter = None, effective_snr_factor = None, vetoes_name = None, trigger_program = None, effective_snr = None, verbose = None): process = llwapp.append_process(xmldoc, program = process_program_name, version = __version__, cvs_repository = u"lscsoft", cvs_entry_time = __date__, comment = comment) params = [ (u"--e-thinca-parameter", u"real_8", e_thinca_parameter) ] if comment is not None: params += [(u"--comment", u"lstring", comment)] if force is not None: params += [(u"--force", None, None)] if effective_snr_factor is not None: params += [(u"--effective-snr-factor", u"real_8", effective_snr_factor)] if vetoes_name is not None: params += [(u"--vetoes-name", u"lstring", vetoes_name)] if trigger_program is not None: params += [(u"--trigger-program", u"lstring", trigger_program)] if effective_snr is not None: params += [(u"--effective-snr", u"lstring", effective_snr)] if verbose is not None: params += [(u"--verbose", None, None)] ligolw_process.append_process_params(xmldoc, process, params) return process |
coinc_inspiral.set_end(events[0].get_end() + self.time_slide_index[time_slide_id][events[0].ifo]) | coinc_inspiral.set_end(coinc_inspiral_end_time(events, self.time_slide_index[time_slide_id])) | def append_coinc(self, process_id, time_slide_id, coinc_def_id, events, effective_snr_factor): # # populate the coinc_event and coinc_event_map tables # |
ntuple_comparefunc = lambda events, offset_vector: False, | ntuple_comparefunc = default_ntuple_comparefunc, | def ligolw_thinca( xmldoc, process_id, EventListType, CoincTables, coinc_definer_row, event_comparefunc, thresholds, ntuple_comparefunc = lambda events, offset_vector: False, effective_snr_factor = 250.0, veto_segments = None, trigger_program = u"inspiral", verbose = False |
self.name_output_file = job.tag_base + "-" + str(time) + ".tgz" | self.name_output_file = job.tag_base + "-" + repr(time) + ".tgz" | def __init__(self, dag, job, cp, opts, time, ifo, p_nodes=[], type="ht", variety="fg"): |
return np.fmin(c, 2 * LAL_PI - c) | return np.where(c < LAL_PI, c, 2 * LAL_PI - c) | def _abs_diff(c): """ For some angular difference c = |a - b| in radians, find the magnitude of the difference, taking into account the wrap-around at 2*pi. """ c = abs(c) % (2 * LAL_PI) return np.fmin(c, 2 * LAL_PI - c) |
pol PDF: kappa / (2 * np.sinh(kappa)) * np.exp(kappa * np.cos(theta)) * np.sin(theta)) az PDF: uniform(0, 2*pi) | References: * http://en.wikipedia.org/wiki/Von_Mises–Fisher_distribution * http://arxiv.org/pdf/0902.0737v1 (states the Rayleigh limit) | def fisher_rvs(mu, sigma, size=1): """ Return a random (polar, azimuthal) angle drawn from the Fisher distribution. Assume that the concentration parameter (kappa) is large so that we can use a Rayleigh distribution about the north pole and rotate it to be centered at the (polar, azimuthal) coordinate mu. Assume kappa = 1 / sigma**2 pol PDF: kappa / (2 * np.sinh(kappa)) * np.exp(kappa * np.cos(theta)) * np.sin(theta)) az PDF: uniform(0, 2*pi) """ rayleigh_rv = \ np.array((np.random.rayleigh(scale=sigma, size=size), np.random.uniform(low=0, high=2*LAL_PI, size=size)))\ .reshape((2, size)).T # guarantee 2D and transpose a, b = new_z_to_euler(mu) return rotate_euler(rayleigh_rv, a, b, 0) |
if len(indexList) > 1: | if len(indexList) >= 1: | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # # Check to see if wiki file with name already exists # maxCount=0 while os.path.exists(wikiFilename) and maxCount < 10: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) if elems%3 != 0: sys.stdout.write("Generation of FOM links seems incomplete!\n") cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEIS?_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEIS?_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() #Select only PEM channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if myFile.upper().__contains__("PEM"): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if myFile.upper().__contains__("PEM"): thumbDict[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not myFile.upper().__contains__("PEM"): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not myFile.upper().__contains__("PEM"): thumbDict[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(thumbList[k]) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) > 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImaage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImaage(image,thumbList[i]) | cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # # Check to see if wiki file with name already exists # maxCount=0 while os.path.exists(wikiFilename) and maxCount < 10: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) if elems%3 != 0: sys.stdout.write("Generation of FOM links seems incomplete!\n") cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*index.html"%(sngl.ifo,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEIS?_512.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_RDS_R_L1_SEIS(),\ "*/%s_RDS_*/%s/*SEIS?_512.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict=dict() indexDict=dict() thumbDict=dict() #Select only PEM channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if myFile.upper().__contains__("PEM"): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if myFile.upper().__contains__("PEM"): thumbDict[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() #Select only AUX channels for sngl in wikiCoinc.sngls: imageDict[sngl.ifo]=list() indexDict[sngl.ifo]=list() thumbDict[sngl.ifo]=list() for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*html"%(sngl.ifo,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened.png"%\ (sngl.ifo,sngl.time)): if not myFile.upper().__contains__("PEM"): imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(wikiFileFinder.get_RDS_R_L1(),\ "*/%s_RDS_*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (sngl.ifo,sngl.time)): if not myFile.upper().__contains__("PEM"): thumbDict[sngl.ifo].append(myFile) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(thumbList[k]) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) > 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImaage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
os.path.join("bin", "search_upper_limit_by_s1_s2"), | os.path.join("bin", "search_upper_limit_by_s1z_s2z"), os.path.join("bin", "search_volume_by_s1z_s2z"), | def run(self): # remove the automatically generated user env scripts for script in ["pylal-user-env.sh", "pylal-user-env.csh"]: log.info("removing " + script ) try: os.unlink(os.path.join("etc", script)) except: pass |
(home_dirs()+"/romain/followupbackgrounds/omega/VSR2a/background/background_931035296_935798415.cache",931035296,935798415,"V1") | (home_dirs()+"/romain/followupbackgrounds/omega/VSR2a/background/background_931035296_935798415.cache",931035296,935798415,"V1"), | def figure_out_cache(time,ifo): cacheList=( (home_dirs()+"/romain/followupbackgrounds/omega/S5/background/background_815155213_875232014.cache",815155213,875232014,"H1H2L1"), (home_dirs()+"/romain/followupbackgrounds/omega/S6a/background/background_931035296_935798415.cache",931035296,935798415,"H1L1"), (home_dirs()+"/romain/followupbackgrounds/omega/S6b/background/background_937800015_944587815.cache",935798415,944587815,"H1L1"), (home_dirs()+"/romain/followupbackgrounds/omega/S6b/background/background_944587815_947260815.cache",944587815,999999999,"H1L1"), (home_dirs()+"/romain/followupbackgrounds/omega/VSR2a/background/background_931035296_935798415.cache",931035296,935798415,"V1") (home_dirs()+"/romain/followupbackgrounds/omega/VSR2b/background/background_937800015_947260815.cache",935798415,999999999,"V1") ) foundCache = "" for cacheFile,start,stop,ifos in cacheList: if ((start<=time) and (time<stop) and ifo in ifos): foundCache = cacheFile break if 'phy.syr.edu' in get_hostname(): foundCache = foundCache.replace("romain","rgouaty") if foundCache == "": print ifo, time, " not found in method stfu_pipe.figure_out_cache" else: if not os.path.isfile(foundCache): print "file " + foundCache + " not found" foundCache = "" return foundCache |
self.unused_coincs = reduce(lambda a, b: a | b, (set(component.get_coincs(eventlists, event_comparefunc, thresholds, verbose = verbose)) for component in self.components)) self.unused_coincs |= reduce(lambda a, b: a & b, (component.unused_coincs for component in self.components)) | for component in self.components: self.unused_coincs |= set(component.get_coincs(eventlists, event_comparefunc, thresholds, verbose = verbose)) for componenta, componentb in iterutils.choices(self.components, 2): self.unused_coincs |= componenta.unused_coincs & componentb.unused_coincs | def get_coincs(self, eventlists, event_comparefunc, thresholds, verbose = False): # # has this node already been visited? if so, return the # answer we already know # |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.