rem
stringlengths 0
322k
| add
stringlengths 0
2.05M
| context
stringlengths 8
228k
|
---|---|---|
def __init__(self,cp,ifo): | def __init__(self,cp,ifo,timeref): | def __init__(self,cp,ifo): depIfoIniConfig = ifo+'config' self.depIfoDir = ifo+'_qscans_config' depQscanList = ['bg-rds-qscan', 'bg-seismic-qscan'] |
qscanConfig = string.strip(cp.get("fu-"+depQscan, depIfoIniConfig)) | qscanConfig = self.fix_config_for_science_run( cp.get("fu-"+depQscan, depIfoIniConfig).strip(), timeref ) | def __init__(self,cp,ifo): depIfoIniConfig = ifo+'config' self.depIfoDir = ifo+'_qscans_config' depQscanList = ['bg-rds-qscan', 'bg-seismic-qscan'] |
if opts.prepare_scan_ccin2p3: for ifo in cp.get("fu-remote-jobs","remote-ifos").strip().split(","): CCRemoteScans = prepareLyonRemoteScans(cp,ifo) | def __init__(self,cp,ifo): depIfoIniConfig = ifo+'config' self.depIfoDir = ifo+'_qscans_config' depQscanList = ['bg-rds-qscan', 'bg-seismic-qscan'] |
|
if ifo in cp.get("fu-remote-jobs","remote-ifos").strip().split(",") and timeListFile: | if ifo in cp.get("fu-remote-jobs","remote-ifos").strip().split(",") and timeListFile and CCRemoteScans: | def __init__(self,cp,ifo): depIfoIniConfig = ifo+'config' self.depIfoDir = ifo+'_qscans_config' depQscanList = ['bg-rds-qscan', 'bg-seismic-qscan'] |
if opts.prepare_scan_ccin2p3: | if opts.prepare_scan_ccin2p3 and CCRemoteScans: | def __init__(self,cp,ifo): depIfoIniConfig = ifo+'config' self.depIfoDir = ifo+'_qscans_config' depQscanList = ['bg-rds-qscan', 'bg-seismic-qscan'] |
frametype=__patchFrameTypeDef__(frametype,sngl.ifo,sngl.time) | def prepareChecklist(wikiFilename=None,wikiCoinc=None,wikiTree=None,file2URL=None): """ Method to prepare a checklist where data products are isolated in directory. """ endOfS5=int(875232014) wikiFileFinder=findFileType(wikiTree,wikiCoinc) # Check to see if wiki file with name already exists maxCount=0 while os.path.exists(wikiFilename) and maxCount < 15: sys.stdout.write("File %s already exists.\n"%\ os.path.split(wikiFilename)[1]) wikiFilename=wikiFilename+".wiki" maxCount=maxCount+1 sys.stdout.write("Available via browser for wiki upload at %s\n"\ %(file2URL.convert(wikiFilename))) # #Create the wikipage object etc # wikiPage=wiki(wikiFilename) # # Create top two trigger params tables # cTable=wikiPage.wikiTable(2,9) cTable.data=[ ["Trigger Type", "Rank", "FAR", "SNR", "IFOS(Coinc)", "Instruments(Active)", "Coincidence Time (s)", "Total Mass (mSol)", "Chirp Mass (mSol)" ], ["%s"%(wikiCoinc.type), "%s"%(wikiCoinc.rank), "%s"%(wikiCoinc.far), "%s"%(wikiCoinc.snr), "%s"%(wikiCoinc.ifos), "%s"%(wikiCoinc.instruments), "%s"%(wikiCoinc.time), "%s"%(wikiCoinc.mass), "%s"%(wikiCoinc.mchirp) ] ] pTable=wikiPage.wikiTable(len(wikiCoinc.sngls_in_coinc())+1,7) pTable.data[0]=[ "IFO", "GPS Time(s)", "SNR", "CHISQR", "Mass 1", "Mass 2", "Chirp Mass" ] for row,cSngl in enumerate(wikiCoinc.sngls_in_coinc()): pTable.data[row+1]=[ "%s"%(cSngl.ifo), "%s"%(cSngl.time), "%s"%(cSngl.snr), "%s"%(cSngl.chisqr), "%s"%(cSngl.mass1), "%s"%(cSngl.mass2), "%s"%(cSngl.mchirp) ] #Write the tables into the Wiki object wikiPage.putText("Coincident Trigger Event Information: %s\n"\ %(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) wikiPage.insertTable(cTable) wikiPage.putText("Corresponding Coincident Single IFO Trigger Information\n") wikiPage.insertTable(pTable) #Generate a table of contents to appear after candidate params table wikiPage.tableOfContents(3) #Begin including each checklist item as section with subsections wikiPage.section("Follow-up Checklist") #Put each checklist item wikiPage.subsection("Checklist Summary") wikiPage.subsubsection("Does this candidate pass this checklist?") wikiPage.subsubsection("Answer") wikiPage.subsubsection("Relevant Information and Comments") wikiPage.insertHR() # #First real checklist item wikiPage.subsection("#0 False Alarm Probability") wikiPage.subsubsection("Question") wikiPage.putText("What is the false alarm rate associated with this candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") farTable=wikiPage.wikiTable(2,1) farTable.setTableStyle("background-color: yellow; text-align center;") farTable.data[0][0]="False Alarm Rate" farTable.data[1][0]="%s"%(wikiCoinc.far) wikiPage.insertTable(farTable) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#1 Data Quality Flags") wikiPage.subsubsection("Question") wikiPage.putText("Can the data quality flags coincident with this candidate be safely disregarded?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPath=os.path.split(wikiFilename)[0] dqFileList=wikiFileFinder.get_findFlags() if len(dqFileList) != 1: sys.stdout.write("Warning: DQ flags data product import problem.\n") print "Found %i files."%len(dqFileList) for mf in dqFileList: print mf for myFile in dqFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#2 Veto Investigations") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate survive the veto investigations performed at its time?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") vetoFileList=wikiFileFinder.get_findVetos() if len(vetoFileList) != 1: sys.stdout.write("Warning: Veto flags data product import problem.\n") for myFile in vetoFileList:print myFile for myFile in vetoFileList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#3 IFO Status") wikiPage.subsubsection("Question") wikiPage.putText("Are the interferometers operating normally with a reasonable level of sensitivity around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") #Add link to Daily Stats if wikiCoinc.time <= endOfS5: statsLink=wikiPage.makeExternalLink("http://blue.ligo-wa.caltech.edu/scirun/S5/DailyStatistics/",\ "S5 Daily Stats Page") else: statsLink="This should be a link to S6 Daily Stats!\n" wikiPage.putText(statsLink) #Link figures of merit #Get link for all members of wikiCoinc wikiPage.putText("Figures of Merit\n") wikiPage.putText("UTC Time of trigger :%s"%(stfu_pipe.gpsTimeToReadableDate(wikiCoinc.time))) if wikiCoinc.time > endOfS5: fomLinks=dict() elems=0 for wikiSngl in wikiCoinc.sngls: if not(wikiSngl.ifo.upper().rstrip().lstrip() == 'V1'): fomLinks[wikiSngl.ifo]=stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo) elems=elems+len(fomLinks[wikiSngl.ifo]) else: for myLabel,myLink,myThumb in stfu_pipe.getFOMLinks(wikiCoinc.time,wikiSngl.ifo): wikiPage.putText("%s\n"%(wikiPage.makeExternalLink(myLink,myLabel))) cols=4 rows=(elems/3)+1 fTable=wikiPage.wikiTable(rows,cols) fTable.data[0]=["IFO,Shift","FOM1","FOM2","FOM3"] currentIndex=0 for myIFOKey in fomLinks.keys(): for label,link,thumb in fomLinks[myIFOKey]: myRow=currentIndex/int(3)+1 myCol=currentIndex%int(3)+1 fTable.data[myRow][0]=label thumbURL=thumb fTable.data[myRow][myCol]="%s"%(wikiPage.linkedRemoteImage(thumb,link)) currentIndex=currentIndex+1 wikiPage.insertTable(fTable) else: wikiPage.putText("Can not automatically fetch S5 FOM links.") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # #Additional Checklist Item #First real checklist item wikiPage.subsection("#4 Candidate Appearance") wikiPage.subsubsection("Question") wikiPage.putText("Do the Qscan figures show what we would expect for a gravitational-wave event?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict=dict() indexDict=dict() thumbDict=dict() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'hoft') frametype=__patchFrameTypeDef__(frametype,sngl.ifo,sngl.time) indexDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*/%s/*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened.png"\ %(sngl.time,channelName)) thumbDict[sngl.ifo]=fnmatch.filter(wikiFileFinder.get_hoft_frame(),\ "*%s*_%s_16.00_spectrogram_whitened?thumb.png"\ %(sngl.time,channelName)) # #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("GW data channel scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >= 1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >= 1 if enoughImage and enoughIndex: wikiPage.insertQscanTable(imageDict,\ thumbDict,\ indexDict) else: sys.stdout.write("Warning: Candidate appearance plot import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#5 Seismic Plots") wikiPage.subsubsection("Question") wikiPage.putText("Is the seismic activity insignificant around the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") # imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() filesOmega=wikiFileFinder.get_RDS_R_L1_SEIS() filesAnalyze=wikiFileFinder.get_analyzeQscan_SEIS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') frametype=__patchFrameTypeDef__(frametype,sngl.ifo,sngl.time) if sngl.ifo == "V1": chankey = "Em_SE" else: chankey = "SEI" indexDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*index.html"%(frametype,sngl.time)) imageDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*%s*_512.00_spectrogram_whitened.png"%\ (frametype,sngl.time,chankey)) thumbDict[sngl.ifo]=fnmatch.filter(filesOmega,\ "*/%s_*/%s/*%s*_512.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time,chankey)) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s_*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if chankey in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not found or seen empty for %s. ...continuing...\n"%sngl.ifo) #Search for analyzeQscan files timeString=str(float(sngl.time)).replace(".","_") indexDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.html"%(sngl.ifo,timeString)) imageDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_%s*_z_scat-unspecified-gpstime.png"\ %(sngl.ifo,timeString,chankey)) thumbDictAQ[sngl.ifo]=fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_%s*_z_scat-unspecified-gpstime_thumb.png"\ %(sngl.ifo,timeString,chankey)) #Load of analyzeQscan z file if available zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*_%s_%s_*.txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if chankey in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Seismic scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: Seismic plots product import problem.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#6 Other environmental causes") wikiPage.subsubsection("Question") wikiPage.putText("Were the environmental disturbances (other than seismic) insignificant at the time of the candidate?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only PEM channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls_in_coinc(): indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') frametype=__patchFrameTypeDef__(frametype,sngl.ifo,sngl.time) if sngl.ifo == "V1": chankeyseis = "Em_SE" chankeyenv = "Em_" else: chankeyseis = "SEI" chankeyenv = "PEM" for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*html"%(frametype,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened.png"%\ (frametype,sngl.time)): if chankeyenv in myFile and not chankeyseis in myFile: imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time)): if chankeyenv in myFile and not chankeyseis in myFile: thumbDict[sngl.ifo].append(myFile) #Search for corresponding Omega summary.txt file zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not found or seen empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if chankeyenv in myFile and not chankeyseis in myFile: imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if chankeyenv in myFile and not chankeyseis in myFile: thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(imageDict[sngl.ifo]) < 1: wikiPage.putText("PEM scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: PEM plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#7 Auxiliary degree of freedom") wikiPage.subsubsection("Question") wikiPage.putText("Were the auxiliary channel transients coincident with the candidate insignificant?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") imageDict,indexDict,thumbDict,zValueDict = dict(),dict(),dict(),dict() imageDictAQ,indexDictAQ,thumbDictAQ,zValueDictAQ = dict(),dict(),dict(),dict() #Select only AUX channels filesOmega=wikiFileFinder.get_RDS_R_L1() filesAnalyze=wikiFileFinder.get_analyzeQscan_RDS() for sngl in wikiCoinc.sngls: frametype,channelName=stfu_pipe.figure_out_type(sngl.time,sngl.ifo,'rds') frametype=__patchFrameTypeDef__(frametype,sngl.ifo,sngl.time) if sngl.ifo == "V1": chankeyseis = "Em_SE" chankeyenv = "Em_" else: chankeyseis = "SEI" chankeyenv = "PEM" indexDict[sngl.ifo],imageDict[sngl.ifo],thumbDict[sngl.ifo],zValueDict[sngl.ifo]=list(),list(),list(),list() indexDictAQ[sngl.ifo],imageDictAQ[sngl.ifo],thumbDictAQ[sngl.ifo],zValueDictAQ[sngl.ifo]=list(),list(),list(),list() for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*html"%(frametype,sngl.time)): indexDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened.png"%\ (frametype,sngl.time)): if not chankeyenv in myFile or not chankeyseis in myFile: imageDict[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*_16.00_spectrogram_whitened?thumb.png"%\ (frametype,sngl.time)): if not chankeyenv in myFile and not chankeyseis in myFile: thumbDict[sngl.ifo].append(myFile) zValueDict[sngl.ifo]=list() for zFile in fnmatch.filter(filesOmega,\ "*/%s/*/%s/*summary.txt"%(frametype,sngl.time)): for chan in wikiFileFinder.__readSummary__(zFile): if not chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDict[sngl.ifo].append(chan) if len(zValueDict[sngl.ifo]) == 0: sys.stdout.write("Omega scan summary file not found or seen empty for %s. ...continuing...\n"%sngl.ifo) #Select associated analyzeQscans timeString=str(float(sngl.time)).replace(".","_") for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime.png"%\ (sngl.ifo,timeString)): if not chankeyenv in myFile or not chankeyseis in myFile: imageDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*_z_scat-unspecified-gpstime?thumb.png"%\ (sngl.ifo,timeString)): if not chankeyenv in myFile and not chankeyseis in myFile: thumbDictAQ[sngl.ifo].append(myFile) for myFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*html"%(sngl.ifo,timeString)): indexDictAQ[sngl.ifo].append(myFile) zValueDictAQ[sngl.ifo]=list() for zFile in fnmatch.filter(filesAnalyze,\ "*%s-*_%s_*txt"%(sngl.ifo,timeString)): for chan in wikiFileFinder.__readSummary__(zFile): if not chankeyenv in chan[0] and not chankeyseis in chan[0]: zValueDictAQ[sngl.ifo].append(chan) if len(zValueDictAQ[sngl.ifo]) == 0: sys.stdout.write("AnalyzeQscan summary file not or empty for %s. ...continuing...\n"%sngl.ifo) #Convert disk locals to URLs imageDict[sngl.ifo]=[file2URL.convert(x) for x in imageDict[sngl.ifo]] indexDict[sngl.ifo]=[file2URL.convert(x) for x in indexDict[sngl.ifo]] thumbDict[sngl.ifo]=[file2URL.convert(x) for x in thumbDict[sngl.ifo]] imageDictAQ[sngl.ifo]=[file2URL.convert(x) for x in imageDictAQ[sngl.ifo]] indexDictAQ[sngl.ifo]=[file2URL.convert(x) for x in indexDictAQ[sngl.ifo]] thumbDictAQ[sngl.ifo]=[file2URL.convert(x) for x in thumbDictAQ[sngl.ifo]] if len(indexDict[sngl.ifo]) < 1: wikiPage.putText("Other scans for %s not available.\n"%sngl.ifo) enoughImage=[len(imageDict[key])>0 for key in imageDict.keys()].count(True) >=1 enoughIndex=[len(indexDict[key])>0 for key in indexDict.keys()].count(True) >=1 if enoughImage and enoughIndex: wikiPage.insertAnalyzeQscanTable(imageDict, thumbDict, indexDict, zValueDict, imageDictAQ, thumbDictAQ, indexDictAQ, zValueDictAQ) else: sys.stdout.write("Warning: AUX plots import trouble.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#8 Electronic Log Book") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the comments posted by the sci-mons or the operators in the e-log?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiLinkLHOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"H1"), "Hanford eLog") wikiLinkLLOlog=wikiPage.makeExternalLink(stfu_pipe.getiLogURL(myCoinc.time,"L1"), "Livingston eLog") wikiPage.putText("%s\n\n%s\n\n"%(wikiLinkLHOlog,wikiLinkLLOlog)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#9 Glitch Report") wikiPage.subsubsection("Question") wikiPage.putText("Were the instruments behaving normally according to the weekly glitch report?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") if int(wikiCoinc.time) >= endOfS5: wikiLinkGlitch=wikiPage.makeExternalLink( "https://www.lsc-group.phys.uwm.edu/twiki/bin/view/DetChar/GlitchStudies", "Glitch Reports for S6" ) else: wikiLinkGlitch=wikiPage.makeExternalLink( "http://www.lsc-group.phys.uwm.edu/glitch/investigations/s5index.html#shift", "Glitch Reports for S5" ) wikiPage.putText("%s\n"%(wikiLinkGlitch)) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#10 Snr versus time") wikiPage.subsubsection("Question") wikiPage.putText("Is this trigger significant in a SNR versus time plot of all triggers in its analysis chunk?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#11 Parameters of the candidate") wikiPage.subsubsection("Question") wikiPage.putText("Does the candidate have a high likelihood of being a gravitational-wave according to its parameters?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Effective Distance Ratio Test\n") effDList=wikiFileFinder.get_effDRatio() if len(effDList) != 1: sys.stdout.write("Warning: Effective Distance Test import problem.\n") for myFile in effDList: wikiPage.putText("%s\n"%(file(myFile).read())) wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#12 Snr and Chisq") wikiPage.subsubsection("Question") wikiPage.putText("Are the SNR and CHISQ time series consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") # #Put plots SNR and Chi sqr # indexList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*.html") thumbList=fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_snr-*thumb.png") thumbList.extend(fnmatch.filter(wikiFileFinder.get_plotsnrchisq(),"*_chisq-*thumb.png")) thumbList.sort() indexList=[file2URL.convert(x) for x in indexList] thumbList=[file2URL.convert(x) for x in thumbList] #Two thumb types possible "_thumb.png" or ".thumb.png" imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] ifoCount=len(wikiCoinc.sngls) rowLabel={"SNR":1,"CHISQ":2} rowCount=len(rowLabel) colCount=ifoCount if len(indexList) >= 1: snrTable=wikiPage.wikiTable(rowCount+1,colCount+1) for i,sngl in enumerate(wikiCoinc.sngls): myIndex="" for indexFile in indexList: if indexFile.__contains__("_pipe_%s_FOLLOWUP_"%sngl.ifo): myIndex=indexFile if myIndex=="": snrTable.data[0][i+1]=" %s "%sngl.ifo else: snrTable.data[0][i+1]=wikiPage.makeExternalLink(myIndex,sngl.ifo) for col,sngl in enumerate(wikiCoinc.sngls): for row,label in enumerate(rowLabel.keys()): snrTable.data[row+1][0]=label for k,image in enumerate(imageList): if (image.__contains__("_%s-"%label.lower()) \ and image.__contains__("pipe_%s_FOLLOWUP"%sngl.ifo)): snrTable.data[row+1][col+1]=" %s "%(wikiPage.linkedRemoteImage(thumbList[k],thumbList[k])) wikiPage.insertTable(snrTable) else: sys.stdout.write("Warning: SNR and CHISQ plots not found.\n") wikiPage.putText("SNR and CHISQ plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#13 Template bank veto") wikiPage.subsubsection("Question") wikiPage.putText("Is the bank veto value consistent with our expectations for a gravitational wave?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#14 Coherent studies") wikiPage.subsubsection("Question") wikiPage.putText("Are the triggers found in multiple interferometers coherent with each other?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") indexList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),"*.html") if len(indexList) >= 1: myIndex=file2URL.convert(indexList[0]) wikiPage.putText(wikiPage.makeExternalLink(myIndex,\ "%s Coherence Study Results"%(wikiCoinc.ifos))) thumbList=fnmatch.filter(wikiFileFinder.get_plotchiatimeseries(),\ "PLOT_CHIA_%s_snr-squared*thumb.png"%(wikiCoinc.time)) imageList=[x.replace("_thumb.png",".png").replace(".thumb.png",".png") for x in thumbList] rowCount=len(imageList) colCount=1 cohSnrTimeTable=wikiPage.wikiTable(rowCount+1,colCount) cohSnrTimeTable.data[0][0]="%s Coherent SNR Squared Times Series"%(wikiCoinc.ifos) for i,image in enumerate(imageList): cohSnrTimeTable.data[i+1][0]=wikiPage.linkedRemoteImage(image,thumbList[i]) wikiPage.insertTable(cohSnrTimeTable) else: sys.stdout.write("Warning: Coherent plotting jobs not found.\n") wikiPage.putText("Coherent Studies plots not found.\n") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#15 Segmentation Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in segmentation?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # #Additional Checklist Item wikiPage.subsection("#16 Calibration Stability") wikiPage.subsubsection("Question") wikiPage.putText("Is the candidate stable against changes in calibration that are consistent with systematic uncertainties?") wikiPage.subsubsection("Answer") wikiPage.putText("Edit Here") wikiPage.subsubsection("Relevant Information") wikiPage.putText("Plots and pipeline data go here!") wikiPage.subsubsection("Investigator Comments") wikiPage.putText("Edit Here") wikiPage.insertHR() # # |
|
if int(gpstime)<=endOfS5: | if int(gpstime)<=endOfS5 or ifo=="V1": | def __patchFrameTypeDef__(frametype=None,ifo=None,gpstime=None): """ Temporary patch function, to adjust specfied frame type used in searching the filesystem for files to display in followup. """ if frametype == None: raise Exception, "input to __patchFrameTypeDef__ included a \ frametype argument specified as None\n" return None if gpstime == None: raise Warning, "input to __patchFrameTypeDef__ included a \ gps time argument specified as None\n" return frametype if ifo == None: raise Warning, "input to __patchFrameTypeDef__ included an \ ifo argument specified as None\n" return frametype endOfS5=int(875232014) if int(gpstime)<=endOfS5: if not frametype.lower().startswith(ifo.lower()): return ifo+"_"+frametype return frametype |
tm = date.XLALGPSToUTC(date.LIGOTimeGPS(grb.time)) | tm = date.XLALGPSToUTC(LIGOTimeGPS(grb.time)) | coldef = create_col(coldict['nolong']) |
if code>0: | if code>0 and len(err)>0: | def system_call(item, command, divert_output_to_log = True): """ Makes a system call. @params item: a text specifying the content of the text (e.g. number of the GRB the message is associated with) (see also 'info') @params command: the command to be executed on the bash @params divert_output_to_log: If this flag is set to True the output of the given command is automatically put into the log-file. If the output of some command itself is further used, like science segments, this flag must be set to False, so that the output is diverted where it should go. """ l = logfile_name() # put the command used into the log file info(item, ">>> "+command) # and the output (and error) of the command as well if divert_output_to_log: command_actual = command+' >>%s 2>>%s '%(l,l) else: command_actual = command +' 2>>%s '%l # perform the command code, out, err = external_call(command_actual) if code>0: info(item, "ERROR: " +err) |
output, error = internal_call(cmdtmp) | code, output, error = external_call(cmdtmp) | def make_cvs_copy(self, files, dest_dir): """ Copies all the files given in the list 'files' to dest_dir and creates a file 'cvs_versions.txt' in dest_dir containing the actual CVS version of the files @param files: list of files to be copied from self.input_dir @param dest_dir: destination directory """ |
if cp.has_option('followup-plotmcmc','burnin'): burnin = string.strip(cp.get('followup-plotmcmc','burnin')) | if cp.has_option('fu-plotmcmc','burnin'): burnin = string.strip(cp.get('fu-plotmcmc','burnin')) | def __init__(self,job,coinc,cp,opts,dag,ifo,ifonames,p_nodes): pipeline.CondorDAGNode.__init__(self,job) |
plot_routine = string.strip(cp.get('followup-plotmcmc','plot_routine')) executable = string.strip(cp.get('followup-plotmcmc','executable')) | plot_routine = string.strip(cp.get('fu-plotmcmc','plot_routine')) executable = string.strip(cp.get('fu-plotmcmc','executable')) | def __init__(self,job,coinc,cp,opts,dag,ifo,ifonames,p_nodes): pipeline.CondorDAGNode.__init__(self,job) |
def fisher_rvs(mu, sigma, size=None): | def fisher_rvs(mu, sigma, size=1): | def fisher_rvs(mu, sigma, size=None): """ Return a random (polar, azimuthal) angle drawn from the Fisher distribution. Assume that the concentration parameter (kappa) is large so that we can use a Rayleigh distribution about the north pole and rotate it to be centered at the (polar, azimuthal) coordinate mu. Assume kappa = 1 / sigma**2 pol PDF: kappa / (2 * np.sinh(kappa)) * np.exp(kappa * np.cos(theta)) * np.sin(theta)) az PDF: uniform(0, 2*pi) """ rayleigh_rv = \ np.array((np.random.rayleigh(scale=sigma, size=size), np.random.uniform(low=0, high=2*LAL_PI, size=size)))\ .reshape((2, size)).T # guarantee 2D and transpose a, b = new_z_to_euler(mu) return rotate_euler(rayleigh_rv, a, b, 0) |
try: cPickle.dump(self.__backgroundDict__,file(pickleLocale,'w')) except: sys.stdout.write("Problem saving pickle of DQ information.") sys.stdout.write("Trying to place pickle in your home directory.") | if not backgroundPickle: | def createDQbackground(self,ifoEpochList=list(),pickleLocale=None): """ Two inputs a list of tuples (ifo,epochname) for each instrument. Also a place to save the potential pickle to for quick access later. """ if type(ifoEpochList) != type(list()): raise Exception, \ "Invalid input argument ifoEpochList,%s type(%s)"\ %(ifoEpochList,type(ifoEpochList)) #Make sure epoch exists for reach ifo for ifo,epoch in ifoEpochList: if ifo not in runEpochs.keys(): raise Exception, "Bad ifo specified, %s"%ifo if epoch not in runEpochs[ifo].keys(): raise Exception, "Bad ifo epoch specified, %s:%s"%(ifo,epoch) #If pickle location given try to load that pickle first. backgroundPickle=False if pickleLocale!=None: #If pickle file exists read it if not make sure we can #generate it properly otherwise skip creating background if os.path.isfile(pickleLocale): try: self.__backgroundDict__=cPickle.load(file(pickleLocale,'r')) backgroundPickle=True except: backgroundPickle=False sys.stderr.write("Error importing the pickle file! %s\n"\ %(pickleLocale)) return for (ifo,epoch) in ifoEpochList: if (ifo.upper().strip(),epoch.upper().strip()) \ not in self.__backgroundDict__["ifoepoch"]: raise Exception,\ "Invalid ifo and epoch information in \ |
cPickle.dump(self.__backgroundDict__, file(home_dir()+"/"+os.path.basename(pickleLocale),'w')) | cPickle.dump(self.__backgroundDict__,file(pickleLocale,'w')) | def createDQbackground(self,ifoEpochList=list(),pickleLocale=None): """ Two inputs a list of tuples (ifo,epochname) for each instrument. Also a place to save the potential pickle to for quick access later. """ if type(ifoEpochList) != type(list()): raise Exception, \ "Invalid input argument ifoEpochList,%s type(%s)"\ %(ifoEpochList,type(ifoEpochList)) #Make sure epoch exists for reach ifo for ifo,epoch in ifoEpochList: if ifo not in runEpochs.keys(): raise Exception, "Bad ifo specified, %s"%ifo if epoch not in runEpochs[ifo].keys(): raise Exception, "Bad ifo epoch specified, %s:%s"%(ifo,epoch) #If pickle location given try to load that pickle first. backgroundPickle=False if pickleLocale!=None: #If pickle file exists read it if not make sure we can #generate it properly otherwise skip creating background if os.path.isfile(pickleLocale): try: self.__backgroundDict__=cPickle.load(file(pickleLocale,'r')) backgroundPickle=True except: backgroundPickle=False sys.stderr.write("Error importing the pickle file! %s\n"\ %(pickleLocale)) return for (ifo,epoch) in ifoEpochList: if (ifo.upper().strip(),epoch.upper().strip()) \ not in self.__backgroundDict__["ifoepoch"]: raise Exception,\ "Invalid ifo and epoch information in \ |
sys.stdout.write("Really ignoring pickle generation now!\n") | sys.stdout.write("Problem saving pickle of DQ information.") sys.stdout.write("Trying to place pickle in your home directory.") try: cPickle.dump(self.__backgroundDict__, file(home_dir()+"/"+os.path.basename(pickleLocale),'w')) except: sys.stdout.write("Really ignoring pickle generation now!\n") | def createDQbackground(self,ifoEpochList=list(),pickleLocale=None): """ Two inputs a list of tuples (ifo,epochname) for each instrument. Also a place to save the potential pickle to for quick access later. """ if type(ifoEpochList) != type(list()): raise Exception, \ "Invalid input argument ifoEpochList,%s type(%s)"\ %(ifoEpochList,type(ifoEpochList)) #Make sure epoch exists for reach ifo for ifo,epoch in ifoEpochList: if ifo not in runEpochs.keys(): raise Exception, "Bad ifo specified, %s"%ifo if epoch not in runEpochs[ifo].keys(): raise Exception, "Bad ifo epoch specified, %s:%s"%(ifo,epoch) #If pickle location given try to load that pickle first. backgroundPickle=False if pickleLocale!=None: #If pickle file exists read it if not make sure we can #generate it properly otherwise skip creating background if os.path.isfile(pickleLocale): try: self.__backgroundDict__=cPickle.load(file(pickleLocale,'r')) backgroundPickle=True except: backgroundPickle=False sys.stderr.write("Error importing the pickle file! %s\n"\ %(pickleLocale)) return for (ifo,epoch) in ifoEpochList: if (ifo.upper().strip(),epoch.upper().strip()) \ not in self.__backgroundDict__["ifoepoch"]: raise Exception,\ "Invalid ifo and epoch information in \ |
if paramnames(parnum)=='mchirp': return inj.mchirp if paramname(parnum)=='eta': return inj.eta if paramname(parnum)=='time': return inj.get_end() if paramname(parnum)=='phi0': return inj.phi0 if paramname(parnum)=='dist': return inj.distance if paramname(parnum)=='RA': return inj.longitude if paramname(parnum)=='dec': return inj.latitude if paramname(parnum)=='psi': return inj.polarization if paramname(parnum)=='iota': return inj.inclination | if paramnames[parnum]=='mchirp': return inj.mchirp if paramnames[parnum]=='eta': return inj.eta if paramnames[parnum]=='time': return inj.get_end() if paramnames[parnum]=='phi0': return inj.phi0 if paramnames[parnum]=='dist': return inj.distance if paramnames[parnum]=='RA': return inj.longitude if paramnames[parnum]=='dec': return inj.latitude if paramnames[parnum]=='psi': return inj.polarization if paramnames[parnum]=='iota': return inj.inclination | def getinjpar(inj,parnum): if paramnames(parnum)=='mchirp': return inj.mchirp if paramname(parnum)=='eta': return inj.eta if paramname(parnum)=='time': return inj.get_end() if paramname(parnum)=='phi0': return inj.phi0 if paramname(parnum)=='dist': return inj.distance if paramname(parnum)=='RA': return inj.longitude if paramname(parnum)=='dec': return inj.latitude if paramname(parnum)=='psi': return inj.polarization if paramname(parnum)=='iota': return inj.inclination return None |
if injection and getinjpar(injection,paramnames.index('mchirp'))<max(pos[:,paranmanes.index('mchirp')]) and getinjpar(injection,paramnames.index('mchirp'))>min(pos[:,paramnames.index('mchirp')]) and getinjpar(injection,paramnames.index('eta'))>min(pos[:,paramnames.index('eta')]) and getinjpar(injection,paramnames.index('eta'))<max(pos[:,paramnames.index('eta')]): | if injection and getinjpar(injection,paramnames.index('mchirp'))<max(pos[:,paramnames.index('mchirp')]) and getinjpar(injection,paramnames.index('mchirp'))>min(pos[:,paramnames.index('mchirp')]) and getinjpar(injection,paramnames.index('eta'))>min(pos[:,paramnames.index('eta')]) and getinjpar(injection,paramnames.index('eta'))<max(pos[:,paramnames.index('eta')]): | def plot2Dkernel(xdat,ydat,Nx,Ny): xax=linspace(min(xdat),max(xdat),Nx) yax=linspace(min(ydat),max(ydat),Ny) x,y=numpy.meshgrid(xax,yax) samp=array([xdat,ydat]) kde=stats.kde.gaussian_kde(samp) grid_coords = numpy.append(x.reshape(-1,1),y.reshape(-1,1),axis=1) z = kde(grid_coords.T) z = z.reshape(Nx,Ny) asp=xax.ptp()/yax.ptp() |
xmldoc,digest = ligolw_utils.load_fileobj(file) | xmldoc,digest = utils.load_fileobj(file) | def fromsegmentxml(file): """ Read a segmentlist from the file object file containing an xml segment table. """ xmldoc,digest = ligolw_utils.load_fileobj(file) seg_table = table.get_table(xmldoc,lsctables.SegmentTable.tableName) segs = segmentlist() for seg in seg_table: segs.append(segment(seg.start_time,seg.end_time)) segs = segs.coalesce() return segs |
def fromsegmentcsvCSV(csvfile): | def fromsegmentcsv(csvfile): | def fromsegmentcsvCSV(csvfile): """ Read a segmentlist from the file object file containing a comma separated list of segments. """ def CSVLineToSeg(line): tstart, tend = map(int, line.split(',')) return segment(tstart, tend) segs = segmentlist([CSVLineToSeg(line) for line in csvfile]) return segs.coalesce() |
exe = make_external_call('which ligolw_segment_query')[0] segment_cmd = ' '.join([exe,'--query-segments',\ '--database','--include-segments',flag,\ '--gps-start-time',str(start),\ '--gps-end-time',str(end)]) segxmlout,segerr = make_external_call(segment_cmd) segs = segmentlist() if not segerr: tmpfile = tempfile.TemporaryFile() tmpfile.write(segxmlout) tmpfile.seek(0) segs = fromsegmentxml(tmpfile) | start = int(start) end = int(end) database_location = os.environ['S6_SEGMENT_SERVER'] connection = segmentdb_utils.setup_database(database_location) engine = query_engine.LdbdQueryEngine(connection) spec = flag.split(':') if len(spec) < 2 or len(spec) > 3: print >>sys.stderr, "Included segements must be of the form ifo:name:version or ifo:name:*" sys.exit(1) ifo = spec[0] name = spec[1] if len(spec) is 3 and spec[2] is not '*': version = int(spec[2]) if version < 1: print >>sys.stderr, "Segment version numbers must be greater than zero" sys.exit(1) | def grab_segments(start,end,flag): """ Returns a segmentlist containing the segments during which the given flag was active in the given period. """ exe = make_external_call('which ligolw_segment_query')[0] #== construct segment query segment_cmd = ' '.join([exe,'--query-segments',\ '--database','--include-segments',flag,\ '--gps-start-time',str(start),\ '--gps-end-time',str(end)]) #== run segment query segxmlout,segerr = make_external_call(segment_cmd) segs = segmentlist() if not segerr: tmpfile = tempfile.TemporaryFile() tmpfile.write(segxmlout) tmpfile.seek(0) segs = fromsegmentxml(tmpfile) else: print >>sys.stderr, "Warning: Call to ligolw_segment_query failed with "+\ "command:" print >>sys.stderr, "\n"+segment_cmd+"\n" return segs |
print >>sys.stderr, "Warning: Call to ligolw_segment_query failed with "+\ "command:" print >>sys.stderr, "\n"+segment_cmd+"\n" | version = '*' segdefs = segmentdb_utils.expand_version_number(engine,(ifo,name,version,\ start,end,0,0)) segs = segmentdb_utils.query_segments(engine, 'segment', segdefs) segs = reduce(operator.or_, segs).coalesce() | def grab_segments(start,end,flag): """ Returns a segmentlist containing the segments during which the given flag was active in the given period. """ exe = make_external_call('which ligolw_segment_query')[0] #== construct segment query segment_cmd = ' '.join([exe,'--query-segments',\ '--database','--include-segments',flag,\ '--gps-start-time',str(start),\ '--gps-end-time',str(end)]) #== run segment query segxmlout,segerr = make_external_call(segment_cmd) segs = segmentlist() if not segerr: tmpfile = tempfile.TemporaryFile() tmpfile.write(segxmlout) tmpfile.seek(0) segs = fromsegmentxml(tmpfile) else: print >>sys.stderr, "Warning: Call to ligolw_segment_query failed with "+\ "command:" print >>sys.stderr, "\n"+segment_cmd+"\n" return segs |
"a60dt60dD": "real_4", "a90dt90dD": "real_4", | "a60rank": "real_4", "a90rank": "real_4", | def get_coincs_from_coire(self,files,stat='snr'): """ uses CoincInspiralUtils to get data from old-style (coire'd) coincs """ coincTrigs = CoincInspiralUtils.coincInspiralTable() inspTrigs = SnglInspiralUtils.ReadSnglInspiralFromFiles(files, \ mangle_event_id = True,verbose=None) statistic = CoincInspiralUtils.coincStatistic(stat,None,None) coincTrigs = CoincInspiralUtils.coincInspiralTable(inspTrigs,statistic) |
def populate_SkyLocTable(skyloctable,coinc,adt60,adt90,adt60dD60,adt90dD90,\ | def populate_SkyLocTable(skyloctable,coinc,adt60,adt90,arank60,arank90,\ | def populate_SkyLocTable(skyloctable,coinc,adt60,adt90,adt60dD60,adt90dD90,\ pt,grid_fname,skymap_fname=None): """ populate a row in a skyloctable """ row = skyloctable.RowType() row.end_time = coinc.time row.set_ifos(coinc.ifo_list) rhosquared = 0.0 for ifo in coinc.ifo_list: rhosquared += coinc.snr[ifo]*coinc.snr[ifo] row.comb_snr = sqrt(rhosquared) row.dec,row.ra = pt[0],pt[1] row.a60dt = adt60 row.a90dt = adt90 row.a60dt60dD = adt60dD60 row.a90dt90dD = adt90dD90 row.min_eff_distance = min(effD for effD in coinc.eff_distances.values()) if skymap_fname: row.skymap = os.path.basename(str(skymap_fname)) else: row.skymap = skymap_fname row.grid = os.path.basename(str(grid_fname)) skyloctable.append(row) |
row.a60dt60dD = adt60dD60 row.a90dt90dD = adt90dD90 | row.a60rank = arank60 row.a90rank = arank90 | def populate_SkyLocTable(skyloctable,coinc,adt60,adt90,adt60dD60,adt90dD90,\ pt,grid_fname,skymap_fname=None): """ populate a row in a skyloctable """ row = skyloctable.RowType() row.end_time = coinc.time row.set_ifos(coinc.ifo_list) rhosquared = 0.0 for ifo in coinc.ifo_list: rhosquared += coinc.snr[ifo]*coinc.snr[ifo] row.comb_snr = sqrt(rhosquared) row.dec,row.ra = pt[0],pt[1] row.a60dt = adt60 row.a90dt = adt90 row.a60dt60dD = adt60dD60 row.a90dt90dD = adt90dD90 row.min_eff_distance = min(effD for effD in coinc.eff_distances.values()) if skymap_fname: row.skymap = os.path.basename(str(skymap_fname)) else: row.skymap = skymap_fname row.grid = os.path.basename(str(grid_fname)) skyloctable.append(row) |
while True: | while areasize<len(np.asarray(toppoints)[:,3]): | def greedyBin2(posterior_array,par_bins,confidence_levels,par_names=None,injection=None): if par_names: par1_name,par2_name=par_names else: par1_name="Parameter 1" par2_name="Parameter 2" par1pos=posterior_array[:,0] par2pos=posterior_array[:,1] par1_bin,par2_bin=par_bins if injection: par1_injvalue,par2_injvalue=injection twoDGreedyCL={} twoDGreedyInj={} #Create 2D bin array par1pos_min=min(par1pos) par2pos_min=min(par2pos) par1pos_max=max(par1pos) par2pos_max=max(par2pos) par1pos_Nbins= int(ceil((par1pos_max - par1pos_min)/par1_bin))+1 par2pos_Nbins= int(ceil((par2pos_max - par2pos_min)/par2_bin))+1 greedyHist = np.zeros(par1pos_Nbins*par2pos_Nbins,dtype='i8') greedyPoints = np.zeros((par1pos_Nbins*par2pos_Nbins,2)) #Fill bin values par1_point=par1pos_min par2_point=par2pos_min for i in range(par2pos_Nbins): par1_point=par1pos_min for j in range(par1pos_Nbins): greedyPoints[j+par1pos_Nbins*i,0]=par1_point greedyPoints[j+par1pos_Nbins*i,1]=par2_point par1_point+=par1_bin par2_point+=par2_bin injbin=None #if injection point given find which bin its in if injection: if par1_injvalue is not None and par2_injvalue is not None: par1_binNumber=floor((par1_injvalue-par1pos_min)/par1_bin) par2_binNumber=floor((par2_injvalue-par2pos_min)/par2_bin) injbin=int(par1_binNumber+par2_binNumber*par1pos_Nbins) elif par1_injvalue is None and par2_injvalue is not None: print "Injection value not found for %s!"%par1_name elif par1_injvalue is not None and par2_injvalue is None: print "Injection value not found for %s!"%par2_name #Bin posterior samples for par1_samp,par2_samp in zip(par1pos,par2pos): par1_binNumber=floor((par1_samp-par1pos_min)/par1_bin) par2_binNumber=floor((par2_samp-par2pos_min)/par2_bin) greedyHist[par1_binNumber+par2_binNumber*par1pos_Nbins]+=1 #Now call usual confidence level function #print greedyHist,greedyPoints,injbin,sqrt(par1_bin*par2_bin),confidence_levels,len(par1pos) (injectionconfidence,toppoints,reses)=calculateConfidenceLevels(greedyHist,greedyPoints,injbin,float(sqrt(par1_bin*par2_bin)),confidence_levels,int(len(par1pos))) #Print confidence levels to file areastr='' for (frac,area) in reses: areastr+='%s,'%str(area) twoDGreedyCL[str(frac)]=area areastr=areastr.rstrip(',') if injection is not None and injectionconfidence is not None: twoDGreedyInj['confidence']=injectionconfidence #Recover area contained within injection point interval areasize=0 while True: if injectionconfidence<np.asarray(toppoints)[areasize,3]: break areasize+=1 areasize=areasize*par1_bin*par2_bin twoDGreedyInj['area']=areasize return toppoints,injectionconfidence,twoDGreedyCL,twoDGreedyInj |
self.coincs = [tuple(event.event_id for event in sorted(double, lambda a, b: cmp(a.ifo, b.ifo))) for double in CoincidentNTuples(eventlists, event_comparefunc, offset_instruments, thresholds, verbose = verbose)] self.coincs.sort() | self.coincs = sorted(tuple(event.event_id for event in sorted(double, lambda a, b: cmp(a.ifo, b.ifo))) for double in CoincidentNTuples(eventlists, event_comparefunc, offset_instruments, thresholds, verbose = verbose)) | def get_coincs(self, eventlists, event_comparefunc, thresholds, verbose = False): # # has this node already been visited? if so, return the # answer we already know # |
self.unused_coincs = reduce(lambda a, b: a & b, (component.unused_coincs for component in self.components)) | reduce(lambda a, b: a | b, (set(component.coincs) for component in self.components)) | self.unused_coincs = reduce(lambda a, b: a | b, (set(component.get_coincs(eventlists, event_comparefunc, thresholds, verbose = verbose)) for component in self.components)) self.unused_coincs |= reduce(lambda a, b: a & b, (component.unused_coincs for component in self.components)) | def get_coincs(self, eventlists, event_comparefunc, thresholds, verbose = False): # # has this node already been visited? if so, return the # answer we already know # |
return re.sub(r"([+-]?[.0-9]+)[Ee]?([+-]?[0-9]+)", r"\1 \\times 10^{\2}", s) | m, e = floatpattern.match(s).groups() return r"%s \\times 10^{%d}" % (m, int(e)) | def latexnumber(s): """ Convert a string of the form "d.dddde-dd" to "d.dddd \times 10^{-dd}" """ return re.sub(r"([+-]?[.0-9]+)[Ee]?([+-]?[0-9]+)", r"\1 \\times 10^{\2}", s) |
mcmcfilelist += node.outputName' | mcmcfilelist += node.outputName | def __init__(self,job,coinc,cp,opts,dag,ifo,ifonames,p_nodes): pipeline.CondorDAGNode.__init__(self,job) |
cp.set("fu-condor","mcmc", self.which("lalapps_spinspiral")) | cp.set("fu-condor","spinmcmc", self.which("lalapps_spinspiral")) | def __init__(self, configfile=None): cp = ConfigParser.ConfigParser() self.cp = cp self.time_now = "_".join([str(i) for i in time_method.gmtime()[0:6]]) self.ini_file=self.time_now + ".ini" home_base = home_dirs() # CONDOR SECTION NEEDED BY THINGS IN INSPIRAL.PY cp.add_section("condor") cp.set("condor","datafind",self.which("ligo_data_find")) cp.set("condor","inspiral",self.which("lalapps_inspiral")) cp.set("condor","chia", self.which("lalapps_coherent_inspiral")) cp.set("condor","universe","standard") # SECTIONS TO SHUT UP WARNINGS cp.add_section("inspiral") cp.add_section("data") # DATAFIND SECTION cp.add_section("datafind") |
cp.set("makeCheckListWiki","ini-file",self.ini_file) | cp.set("makeCheckListWiki","ini-file",os.path.abspath(self.ini_file)) | def __init__(self, configfile=None): cp = ConfigParser.ConfigParser() self.cp = cp self.time_now = "_".join([str(i) for i in time_method.gmtime()[0:6]]) self.ini_file=self.time_now + ".ini" home_base = home_dirs() # CONDOR SECTION NEEDED BY THINGS IN INSPIRAL.PY cp.add_section("condor") cp.set("condor","datafind",self.which("ligo_data_find")) cp.set("condor","inspiral",self.which("lalapps_inspiral")) cp.set("condor","chia", self.which("lalapps_coherent_inspiral")) cp.set("condor","universe","standard") # SECTIONS TO SHUT UP WARNINGS cp.add_section("inspiral") cp.add_section("data") # DATAFIND SECTION cp.add_section("datafind") |
if trigger.chisq_h1 < 4 or trigger.chisq_l < 4: | if trigger.chisq_h1 < 4 or trigger.chisq_v < 4: | def get_signal_vetoes(trigger,bankq=0,bankn=0,autoq=0,auton=0,chiq=0,chin=0,sigmaVals = None,fResp = None): sbvs = {} q = bankq nhigh = bankn q2 = autoq nhigh2 = auton if trigger.chisq == 0: sbvs['BestNR1'] = 0 else: if trigger.chisq < 60: sbvs['BestNR1'] = trigger.snr else: sbvs['BestNR1'] = trigger.snr/((1 + (trigger.chisq/60.)**(chiq/chin))/2.)**(1./chiq) |
df = float((event1.central_freq + 0.5*event1.bandwidth - event2.central_freq - 0.5*event2.bandwidth)/(event1.central_freq + 0.5*event1.bandwidth + event2.central_freq + 0.5*event2.bandwidth)) | f_cut1 = event1.central_freq + event1.bandwidth / 2 f_cut2 = event2.central_freq + event2.bandwidth / 2 df = float((f_cut1 - f_cut2) / (f_cut1 + f_cut2)) | def coinc_params_func(events, offsetvector): # # check for coincs that have been vetoed entirely # if len(events) < 2: return None params = {} # # zero-instrument parameters # params["nevents"] = (len(events),) # # one-instrument parameters # for event in events: prefix = "%s_" % event.ifo params["%ssnr2_chi2" % prefix] = (event.snr**2.0, event.chisq / event.chisq_dof) # # two-instrument parameters # for event1, event2 in iterutils.choices(sorted(events, key = lambda event: event.ifo), 2): assert event1.ifo != event2.ifo prefix = "%s_%s_" % (event1.ifo, event2.ifo) dt = float((event1.get_peak() + offsetvector[event1.ifo]) - (event2.get_peak() + offsetvector[event2.ifo])) params["%sdt" % prefix] = (dt,) dA = math.log10(abs(event1.amplitude / event2.amplitude)) params["%sdA" % prefix] = (dA,) df = float((event1.central_freq + 0.5*event1.bandwidth - event2.central_freq - 0.5*event2.bandwidth)/(event1.central_freq + 0.5*event1.bandwidth + event2.central_freq + 0.5*event2.bandwidth)) params["%sdf" % prefix] = (df,) # # done # return params |
def get_coincparamsdistributions(xmldoc): | def get_coincparamsdistributions(xmldoc, seglists = None): | def get_coincparamsdistributions(xmldoc): coincparamsdistributions, process_id = ligolw_burca_tailor.coinc_params_distributions_from_xml(xmldoc, u"string_cusp_likelihood") return coincparamsdistributions |
def load_likelihood_data(filenames, verbose = False): | def load_likelihood_data(filenames, seglists = None, verbose = False): | def load_likelihood_data(filenames, verbose = False): coincparamsdistributions = None for n, filename in enumerate(filenames): if verbose: print >>sys.stderr, "%d/%d:" % (n + 1, len(filenames)), xmldoc = utils.load_filename(filename, gz = (filename or "stdin").endswith(".gz"), verbose = verbose) if coincparamsdistributions is None: coincparamsdistributions = get_coincparamsdistributions(xmldoc) else: coincparamsdistributions += get_coincparamsdistributions(xmldoc) xmldoc.unlink() return coincparamsdistributions |
coincparamsdistributions = get_coincparamsdistributions(xmldoc) | coincparamsdistributions = get_coincparamsdistributions(xmldoc, seglists = seglists) | def load_likelihood_data(filenames, verbose = False): coincparamsdistributions = None for n, filename in enumerate(filenames): if verbose: print >>sys.stderr, "%d/%d:" % (n + 1, len(filenames)), xmldoc = utils.load_filename(filename, gz = (filename or "stdin").endswith(".gz"), verbose = verbose) if coincparamsdistributions is None: coincparamsdistributions = get_coincparamsdistributions(xmldoc) else: coincparamsdistributions += get_coincparamsdistributions(xmldoc) xmldoc.unlink() return coincparamsdistributions |
coincparamsdistributions += get_coincparamsdistributions(xmldoc) | coincparamsdistributions += get_coincparamsdistributions(xmldoc, seglists = seglists) | def load_likelihood_data(filenames, verbose = False): coincparamsdistributions = None for n, filename in enumerate(filenames): if verbose: print >>sys.stderr, "%d/%d:" % (n + 1, len(filenames)), xmldoc = utils.load_filename(filename, gz = (filename or "stdin").endswith(".gz"), verbose = verbose) if coincparamsdistributions is None: coincparamsdistributions = get_coincparamsdistributions(xmldoc) else: coincparamsdistributions += get_coincparamsdistributions(xmldoc) xmldoc.unlink() return coincparamsdistributions |
pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable) self.setupJob(name=self.name,dir=dir,cp=cp,tag_base=tag_base) | self.setupJob(name=self.name,dir=dir,cp=cp,tag_base=tag_base) | def __init__(self,opts,cp,dir='',tag_base=''): """ """ self.__executable = string.strip(cp.get('fu-condor','plotmcmc')) self.name = os.path.split(self.__executable.rstrip('/'))[1] self.__universe = "vanilla" |
os.path.join("bin", "search_volume_by_s1_s2"), | def run(self): # remove the automatically generated user env scripts for script in ["pylal-user-env.sh", "pylal-user-env.csh"]: log.info("removing " + script ) try: os.unlink(os.path.join("etc", script)) except: pass |
|
self.FAR = -1 | self.FAR = 99 | def __init__(self): """ here are all the things we need """ #start with data needed for every coinc self.ifo_list = [] self.ifo_coincs = [] self.snr = {} self.gps = {} self.eff_distances = {} self.mass1 = {} self.mass2 = {} self.time = None self.FAR = -1 #this stuff is only needed for injections self.is_injection = False self.latitude_inj = None self.longitude_inj = None self.mass1_inj = None self.mass2_inj = None self.distance_inj = None self.eff_distances_inj = {} |
if ctab[0].false_alarm_rate is not None: coinc.set_FAR(ctab[0].false_alarm_rate) | def get_coincs_from_coinctable(self,files): """ read data from coinc tables (xml format) FIXME: currently assumes one coinc per file!!! """ for file in files: coinc = CoincData() xmldoc = utils.load_filename(file) sngltab = tab.get_table(xmldoc,lsctables.SnglInspiralTable.tableName) coinc.set_snr(dict((row.ifo, row.snr) for row in sngltab)) coinc.set_gps(dict((row.ifo, LIGOTimeGPS(row.get_end())) for row in sngltab)) coinc.set_effDs(dict((row.ifo,row.eff_distance) for row in sngltab)) coinc.set_masses(dict((row.ifo, row.mass1) for row in sngltab), \ dict((row.ifo, row.mass2) for row in sngltab)) ctab = tab.get_table(xmldoc,lsctables.CoincInspiralTable.tableName) coinc.set_ifos(list(ctab[0].get_ifos())) try: simtab = tab.get_table(xmldoc,lsctables.SimInspiralTable.tableName) row = siminsptab[0] effDs_inj = {} for ifo in coinc.ifo_list: if ifo == 'H1': effDs_inj[ifo] = row.eff_dist_h elif ifo == 'L1': effDs_inj[ifo] = row.eff_dist_l elif ifo == 'V1': effDs_inj[ifo] = row.eff_dist_v dist_inj = row.distance coinc.set_inj_params(row.latitude,row.longitude,row.mass1,row.mass2, \ dist_inj,effDs_inj) coinc.is_injection = True #FIXME: name the exception! except: pass |
|
coincTrigs = CoincInspiralUtils.coincInspiralTable(inspTrigs,'snr') | statistic = CoincInspiralUtils.coincStatistic('snr',None,None) coincTrigs = CoincInspiralUtils.coincInspiralTable(inspTrigs,statistic) | def get_coincs_from_coire(self,files): """ uses CoincInspiralUtils to get data from old-style (coire'd) coincs """ coincTrigs = CoincInspiralUtils.coincInspiralTable() inspTrigs = SnglInspiralUtils.ReadSnglInspiralFromFiles(files, \ mangle_event_id = True,verbose=None) #note that it's hardcoded to use snr as the statistic coincTrigs = CoincInspiralUtils.coincInspiralTable(inspTrigs,'snr') try: inspInj = SimInspiralUtils.ReadSimInspiralFromFiles(files) coincTrigs.add_sim_inspirals(inspInj) #FIXME: name the exception! except: pass |
cacheSelected = cacheSelected.sieve(segment=segments.segment(math.floor(float(time)), math.ceil(float(time)))) | if math.floor(float(time)) != math.ceil(float(time)): cacheSelected = cacheSelected.sieve(segment=segments.segment(math.floor(float(time)), math.ceil(float(time)))) else: cacheSelected = cacheSelected.sieve(segment=segments.segment(math.floor(float(time))-0.5, math.floor(float(time))+0.5)) | def getParamsFromCache(fileName,type,ifo=None,time=None): qscanList = [] cacheList = lal.Cache.fromfile(open(fileName)) if not cacheList: return qscanList cacheSelected = cacheList.sieve(description=type,ifos=ifo) if time: cacheSelected = cacheSelected.sieve(segment=segments.segment(math.floor(float(time)), math.ceil(float(time)))) for cacheEntry in cacheSelected: path_output = cacheEntry.path() time_output = str(cacheEntry.segment[0]) type_output = cacheEntry.description ifo_output = cacheEntry.observatory qscanList.append([path_output,time_output,type_output,ifo_output]) return qscanList |
if re.search( table_name+r'[.]', table_param ) is None: | if table_param.find( table_name+'.' ) == -1: | def __init__( self, table_name, table_param, param_ranges_opt, verbose = False ): """ Parse --param-ranges option. Creates self.param which is the table_name and the table_param appended together (with a '.') and self.param_ranges, which is a list of tuples that give the lower parameter value, whether it is an open or closed boundary, and the same for the upper parameter. For example, if table_name is coinc_inspiral, table_param is mchirp and param_ranges_opt is '[2,8);[8,17]' will get: self.param = 'coinc_inspiral.mchirp' self.param_ranges = [ ( ('>=',2.0), ('<',8.0) ), ( ('>=',8.0), ('<=', 17.0) ) ] |
def __init__(self, xmldoc, b_b_def, sb_b_def, si_b_def, sb_c_e_def, sb_c_n_def, si_c_e_def, si_c_n_def, process): | def __init__(self, xmldoc, b_b_def, sb_b_def, si_b_def, sb_c_e_def, sb_c_n_def, si_c_e_def, si_c_n_def, process, livetime_program): | def __init__(self, xmldoc, b_b_def, sb_b_def, si_b_def, sb_c_e_def, sb_c_n_def, si_c_e_def, si_c_n_def, process): # # store the process row # |
self.burst_peak_time_window = 6.378140e6 / 299792458 * 1.25 if len(self.snglbursttable): self.burst_peak_time_window += max(self.snglbursttable.getColumnByName("duration")) self.coinc_peak_time_window = self.burst_peak_time_window + SimBurstUtils.burst_is_near_injection_window def bursts_near_peaktime(self, t): | def bursts_near_peaktime(self, t, window): | def __init__(self, xmldoc, b_b_def, sb_b_def, si_b_def, sb_c_e_def, sb_c_n_def, si_c_e_def, si_c_n_def, process): # # store the process row # |
within self.burst_peak_time_window of t. | within window seconds of t. This is not used to define any coincidences, only to provide a short list of burst events for use in more costly comparison tests. | def bursts_near_peaktime(self, t): """ Return a list of the burst events whose peak times are within self.burst_peak_time_window of t. """ return self.snglbursttable[bisect.bisect_left(self.snglbursttable, t - self.burst_peak_time_window):bisect.bisect_right(self.snglbursttable, t + self.burst_peak_time_window)] |
return self.snglbursttable[bisect.bisect_left(self.snglbursttable, t - self.burst_peak_time_window):bisect.bisect_right(self.snglbursttable, t + self.burst_peak_time_window)] def coincs_near_peaktime(self, t): | return self.snglbursttable[bisect.bisect_left(self.snglbursttable, t - window):bisect.bisect_right(self.snglbursttable, t + window)] def coincs_near_peaktime(self, t, window): | def bursts_near_peaktime(self, t): """ Return a list of the burst events whose peak times are within self.burst_peak_time_window of t. """ return self.snglbursttable[bisect.bisect_left(self.snglbursttable, t - self.burst_peak_time_window):bisect.bisect_right(self.snglbursttable, t + self.burst_peak_time_window)] |
which at least one burst event's peak time is within self.coinc_peak_time_window of t. | which at least one burst event's peak time is within window seconds of t. This is not used to define any coincidences, only to provide a short list of coinc events for use in more costly comparison tests. | def coincs_near_peaktime(self, t): """ Return a list of the (coinc_event_id, event list) tuples in which at least one burst event's peak time is within self.coinc_peak_time_window of t. """ # FIXME: this test does not consider the time slide # offsets that should be applied to the coinc, but for now # injections are done at zero lag so this isn't a problem # yet return [(coinc_event_id, events) for coinc_event_id, events in self.coincs if (t - self.coinc_peak_time_window <= events[-1].get_peak()) and (events[0].get_peak() <= t + self.coinc_peak_time_window)] |
return [(coinc_event_id, events) for coinc_event_id, events in self.coincs if (t - self.coinc_peak_time_window <= events[-1].get_peak()) and (events[0].get_peak() <= t + self.coinc_peak_time_window)] | near_events = set(self.bursts_near_peaktime(t, window)) return [(coinc_event_id, events) for coinc_event_id, events in self.coincs if set(events) & near_events] | def coincs_near_peaktime(self, t): """ Return a list of the (coinc_event_id, event list) tuples in which at least one burst event's peak time is within self.coinc_peak_time_window of t. """ # FIXME: this test does not consider the time slide # offsets that should be applied to the coinc, but for now # injections are done at zero lag so this isn't a problem # yet return [(coinc_event_id, events) for coinc_event_id, events in self.coincs if (t - self.coinc_peak_time_window <= events[-1].get_peak()) and (events[0].get_peak() <= t + self.coinc_peak_time_window)] |
Return False if the peak time of the injection sim lies within the time interval of burst. """ return SimBurstUtils.time_at_instrument(sim, burst.ifo) not in burst.get_period() | Return False (injection matches event) if an autocorrelation-width window centred on the injection is continuous with the time interval of the burst. """ tinj = SimBurstUtils.time_at_instrument(sim, burst.ifo) window = SimBurstUtils.stringcusp_autocorrelation_width / 2 return segments.segment(tinj - window, tinj + window).disjoint(burst.get_period()) | def StringCuspSnglCompare(sim, burst): """ Return False if the peak time of the injection sim lies within the time interval of burst. """ return SimBurstUtils.time_at_instrument(sim, burst.ifo) not in burst.get_period() |
Return False if the peak time and centre frequency of sim lie within the time-frequency tile of burst. """ return StringCuspSnglCompare(sim, burst) or (sim.frequency not in burst.get_band()) | Return False (injection matches event) if the peak time and centre frequency of sim lie within the time-frequency tile of burst. """ return (SimBurstUtils.time_at_instrument(sim, burst.ifo) not in burst.get_period()) or (sim.frequency not in burst.get_band()) | def ExcessPowerSnglCompare(sim, burst): """ Return False if the peak time and centre frequency of sim lie within the time-frequency tile of burst. """ return StringCuspSnglCompare(sim, burst) or (sim.frequency not in burst.get_band()) |
Return False if the peak time and centre frequency of sim lie within the time-frequency tile of burst. | Return False (injection matches event) if the time of the sim and the peak time of the burst event differ by less than or equal to delta_t seconds. | def OmegaSnglCompare(sim, burst, delta_t = 10.0): """ Return False if the peak time and centre frequency of sim lie within the time-frequency tile of burst. """ return abs(float(SimBurstUtils.time_at_instrument(sim, burst.ifo) - burst.get_peak())) > delta_t |
Return False if the peak time of the sim is "near" the burst event. """ return OmegaNearCoincCompare(sim, burst) | Return False (injection matches coinc) if the peak time of the sim is "near" the burst event. """ tinj = SimBurstUtils.time_at_instrument(sim, burst.ifo) window = SimBurstUtils.stringcusp_autocorrelation_width / 2 + SimBurstUtils.burst_is_near_injection_window return segments.segment(tinj - window, tinj + window).disjoint(burst.get_period()) | def StringCuspNearCoincCompare(sim, burst): """ Return False if the peak time of the sim is "near" the burst event. """ return OmegaNearCoincCompare(sim, burst) |
Return False if the peak time of the sim is "near" the burst event. | Return False (injection matches coinc) if the peak time of the sim is "near" the burst event. | def ExcessPowerNearCoincCompare(sim, burst): """ Return False if the peak time of the sim is "near" the burst event. """ return not SimBurstUtils.burst_is_near_injection(sim, burst.start_time, burst.start_time_ns, burst.duration, burst.ifo) |
Return False if the peak time of the sim is "near" the burst event. """ start_time = burst.get_peak() - burst.duration / 2.0 return not SimBurstUtils.burst_is_near_injection(sim, start_time.seconds, start_time.nanoseconds, burst.duration, burst.ifo) | Return False (injection matches coinc) if the peak time of the sim is "near" the burst event. """ return OmegaSnglCompare(sim, burst, delta_t = 20.0 + burst.duration / 2) | def OmegaNearCoincCompare(sim, burst): """ Return False if the peak time of the sim is "near" the burst event. """ start_time = burst.get_peak() - burst.duration / 2.0 return not SimBurstUtils.burst_is_near_injection(sim, start_time.seconds, start_time.nanoseconds, burst.duration, burst.ifo) |
def find_sngl_burst_matches(contents, sim, comparefunc): """ Scan the burst table for triggers matching sim. """ return [burst for burst in contents.bursts_near_peaktime(sim.get_time_geocent()) if not comparefunc(sim, burst)] | def find_sngl_burst_matches(contents, sim, comparefunc, sieve_window): """ Scan the burst table for triggers matching sim. sieve_window is used in a bisection search to quickly identify burst events within that many seconds of the injection's peak time at the geocentre; it should be larger than the greatest time difference that can separate a burst event's peak time from an injection's peak time at the geocentre and the two still be considered a match. """ return [burst for burst in contents.bursts_near_peaktime(sim.get_time_geocent(), sieve_window) if not comparefunc(sim, burst)] | def find_sngl_burst_matches(contents, sim, comparefunc): """ Scan the burst table for triggers matching sim. """ return [burst for burst in contents.bursts_near_peaktime(sim.get_time_geocent()) if not comparefunc(sim, burst)] |
def find_exact_coinc_matches(coincs, sim, comparefunc): | def find_exact_coinc_matches(coincs, sim, comparefunc, seglists): | def find_exact_coinc_matches(coincs, sim, comparefunc): """ Return a list of the coinc_event_ids of the burst<-->burst coincs in which all burst events match sim. """ # FIXME: this test does not consider the time slide offsets that # should be applied to the coinc, but for now injections are done # at zero lag so this isn't a problem yet return [coinc_event_id for coinc_event_id, events in coincs if True not in (bool(comparefunc(sim, event)) for event in events)] |
in which all burst events match sim. | in which all burst events match sim and to which all instruments on at the time of the sim contributed events. | def find_exact_coinc_matches(coincs, sim, comparefunc): """ Return a list of the coinc_event_ids of the burst<-->burst coincs in which all burst events match sim. """ # FIXME: this test does not consider the time slide offsets that # should be applied to the coinc, but for now injections are done # at zero lag so this isn't a problem yet return [coinc_event_id for coinc_event_id, events in coincs if True not in (bool(comparefunc(sim, event)) for event in events)] |
return [coinc_event_id for coinc_event_id, events in coincs if True not in (bool(comparefunc(sim, event)) for event in events)] | on_instruments = SimBurstUtils.on_instruments(sim, seglists) return set(coinc_event_id for coinc_event_id, events in coincs if on_instruments.issubset(set(event.ifo for event in events)) and not any(comparefunc(sim, event) for event in events)) | def find_exact_coinc_matches(coincs, sim, comparefunc): """ Return a list of the coinc_event_ids of the burst<-->burst coincs in which all burst events match sim. """ # FIXME: this test does not consider the time slide offsets that # should be applied to the coinc, but for now injections are done # at zero lag so this isn't a problem yet return [coinc_event_id for coinc_event_id, events in coincs if True not in (bool(comparefunc(sim, event)) for event in events)] |
return [coinc_event_id for coinc_event_id, events in coincs if False in (bool(comparefunc(sim, event)) for event in events)] | return set(coinc_event_id for coinc_event_id, events in coincs if not all(comparefunc(sim, event) for event in events)) | def find_near_coinc_matches(coincs, sim, comparefunc): """ Return a list of the coinc_event_ids of the burst<-->burst coincs in which at least one burst event matches sim. """ # FIXME: this test does not consider the time slide offsets that # should be applied to the coinc, but for now injections are done # at zero lag so this isn't a problem yet return [coinc_event_id for coinc_event_id, events in coincs if False in (bool(comparefunc(sim, event)) for event in events)] |
process = process | process = process, livetime_program = { "StringCusp": "StringSearch", "excesspower": "lalapps_power", "omega": None }[search] | si_c_n_def = si_c_n_def, |
events = find_sngl_burst_matches(contents, sim, snglcomparefunc) | events = find_sngl_burst_matches(contents, sim, snglcomparefunc, burst_peak_time_window) | si_c_n_def = si_c_n_def, |
coincs = contents.coincs_near_peaktime(sim.get_time_geocent()) coinc_event_ids = find_exact_coinc_matches(coincs, sim, snglcomparefunc) if coinc_event_ids: add_sim_coinc_coinc(contents, sim, coinc_event_ids, contents.sb_c_e_coinc_def_id) coinc_event_ids = find_near_coinc_matches(coincs, sim, nearcoinccomparefunc) if coinc_event_ids: add_sim_coinc_coinc(contents, sim, coinc_event_ids, contents.sb_c_n_coinc_def_id) | coincs = contents.coincs_near_peaktime(sim.get_time_geocent(), coinc_peak_time_window) exact_coinc_event_ids = find_exact_coinc_matches(coincs, sim, snglcomparefunc, contents.seglists) near_coinc_event_ids = find_near_coinc_matches(coincs, sim, nearcoinccomparefunc) assert exact_coinc_event_ids.issubset(near_coinc_event_ids) if exact_coinc_event_ids: add_sim_coinc_coinc(contents, sim, exact_coinc_event_ids, contents.sb_c_e_coinc_def_id) if near_coinc_event_ids: add_sim_coinc_coinc(contents, sim, near_coinc_event_ids, contents.sb_c_n_coinc_def_id) | si_c_n_def = si_c_n_def, |
coincs = contents.coincs_near_peaktime(sim.get_time_geocent()) coinc_event_ids = find_exact_coinc_matches(coincs, sim, snglcomparefunc) if coinc_event_ids: add_sim_coinc_coinc(contents, sim, coinc_event_ids, contents.si_c_e_coinc_def_id) coinc_event_ids = find_near_coinc_matches(coincs, sim, nearcoinccomparefunc) if coinc_event_ids: add_sim_coinc_coinc(contents, sim, coinc_event_ids, contents.si_c_n_coinc_def_id) | coincs = contents.coincs_near_peaktime(sim.get_time_geocent(), coinc_peak_time_window) exact_coinc_event_ids = find_exact_coinc_matches(coincs, sim, snglcomparefunc, contents.seglists) near_coinc_event_ids = find_near_coinc_matches(coincs, sim, nearcoinccomparefunc) assert exact_coinc_event_ids.issubset(near_coinc_event_ids) if exact_coinc_event_ids: add_sim_coinc_coinc(contents, sim, exact_coinc_event_ids, contents.si_c_e_coinc_def_id) if near_coinc_event_ids: add_sim_coinc_coinc(contents, sim, near_coinc_event_ids, contents.si_c_n_coinc_def_id) | si_c_n_def = si_c_n_def, |
sys.stdout.write("Found: %s\n",publication_directory) | sys.stdout.write("Found: %s\n" %(publication_directory)) | def __init__(self,type=None,ifo=None,time=None,snr=None,chisqr=None,mass1=None,mass2=None,mchirp=None): """ """ self.type=str(type) self.ifo=str(ifo) self.time=float(time) self.snr=float(snr) self.chisqr=float(chisqr) self.mass1=float(mass1) self.mass2=float(mass2) self.mchirp=float(mchirp) |
sys.stdout.write("Found: %s\n",publication_url) | sys.stdout.write("Found: %s\n" %(publication_url)) | def __init__(self,type=None,ifo=None,time=None,snr=None,chisqr=None,mass1=None,mass2=None,mchirp=None): """ """ self.type=str(type) self.ifo=str(ifo) self.time=float(time) self.snr=float(snr) self.chisqr=float(chisqr) self.mass1=float(mass1) self.mass2=float(mass2) self.mchirp=float(mchirp) |
coincTrigs = CoincInspiralUtils.coincInspiralTable(inspTriggers,'snr') | coincTrigs = CoincInspiralUtils.coincInspiralTable(inspTrigs,'snr') | def get_coincs_from_coire(self,files): """ uses CoincInspiralUtils to get data from old-style (coire'd) coincs """ coincTrigs = CoincInspiralUtils.coincInspiralTable() inspTrigs = SnglInspiralUtils.ReadSnglInspiralFromFiles(files, \ mangle_event_id = True,verbose=None) #note that it's hardcoded to use snr as the statistic coincTrigs = CoincInspiralUtils.coincInspiralTable(inspTriggers,'snr') try: inspInj = SimInspiralUtils.ReadSimInspiralFromFiles(files) coincTrigs.add_sim_inspirals(inspInj) #FIXME: name the exception! except: pass |
CODEPOINTS = {\n\ | def print_header(): print "\ |
|
print "],\nu'x%s':["%code[0], | if firsttime: firsttime = False print "CODEPOINTS = { \nu'x%s':["%code[0], else: print "],\nu'x%s':["%code[0], | def process_readings(): oucode = 0 olcode = 0 for line in open(source,'r'): items = line[:-1].split('\t') try: r = re.match(r'kKorea', items[1]) if r is not None: code = re.sub(r'U\+([0-9A-F]{2})([0-9A-F]{2})',r'\1\t\2',items[0]).split('\t') ucode = int(code[0],16) lcode = int(code[1],16) pron = items[2].split(' ')[0].capitalize() if oucode != ucode: print "],\nu'x%s':["%code[0], oucode = ucode olcode = -1 if (lcode - olcode) > 1: for i in range(lcode-olcode-1): print '"[?]",', olcode = lcode print '"'+pron+'",', except: continue |
print '"[?]",', | print "'',", | def process_readings(): oucode = 0 olcode = 0 for line in open(source,'r'): items = line[:-1].split('\t') try: r = re.match(r'kKorea', items[1]) if r is not None: code = re.sub(r'U\+([0-9A-F]{2})([0-9A-F]{2})',r'\1\t\2',items[0]).split('\t') ucode = int(code[0],16) lcode = int(code[1],16) pron = items[2].split(' ')[0].capitalize() if oucode != ucode: print "],\nu'x%s':["%code[0], oucode = ucode olcode = -1 if (lcode - olcode) > 1: for i in range(lcode-olcode-1): print '"[?]",', olcode = lcode print '"'+pron+'",', except: continue |
print '"'+pron+'",', | print "'"+pron+"',", | def process_readings(): oucode = 0 olcode = 0 for line in open(source,'r'): items = line[:-1].split('\t') try: r = re.match(r'kKorea', items[1]) if r is not None: code = re.sub(r'U\+([0-9A-F]{2})([0-9A-F]{2})',r'\1\t\2',items[0]).split('\t') ucode = int(code[0],16) lcode = int(code[1],16) pron = items[2].split(' ')[0].capitalize() if oucode != ucode: print "],\nu'x%s':["%code[0], oucode = ucode olcode = -1 if (lcode - olcode) > 1: for i in range(lcode-olcode-1): print '"[?]",', olcode = lcode print '"'+pron+'",', except: continue |
self.kakasi = CDLL("libkakasi") | self.kakasi = CDLL("\\kakasi\\lib\\kakasi") | def __init__(self): self.codepoints = CODEPOINTS self.codepoints.update(JACODES) |
pmap = { ord(u'â'):'a',ord(u'à'):'a',ord(u'ắ'):'a',ord(u'ă'):'a',ord(u'ấ'):'a', ord(u'ü'):'u',ord(u'ụ'):'u',ord(u'ú'):'u',ord(u'ử'):'u',ord(u'ư'):'u', ord(u'ù'):'u', ord(u'é'):'e', ord(u'ọ'):'o',ord(u'ố'):'o',ord(u'ộ'):'o',ord(u'ơ'):'o',ord(u'ớ'):'o', ord(u'ớ'):'o', } r1 = re.compile(r'U\+([0-9A-F]{2})([0-9A-F]{2}\b)') | def process_readings(self, source, fout): oucode = 0 |
|
code = re.sub(r'U\+([0-9A-F]{2})([0-9A-F]{2}\b)',r'\1\t\2',items[0]).split('\t') | code = r1.sub(r'\1\t\2',items[0]).split('\t') | def process_readings(self, source, fout): oucode = 0 |
pron = items[2].split(' ')[0].capitalize() if not all(ord(c) < 128 for c in pron): pron = re.sub('[^\x00-\x7f]',lambda x: self.pmap[ord(x)], pron) | ptmp = items[2].split(' ')[0].capitalize() pron = re.sub('[^\00-\x7f]', lambda x: pmap[ord(x.group())], ptmp) | def process_readings(self, source, fout): oucode = 0 |
from ipdb import set_trace; set_trace() debug(title) | def main_upload(arguments): """Upload video to Youtube.""" usage = """Usage: %prog [OPTIONS] EMAIL PASSWORD FILE TITLE DESCRIPTION CATEGORY KEYWORDS Upload a video to youtube spliting it if necessary (uses ffmpeg).""" parser = optparse.OptionParser(usage, version=VERSION) parser.add_option('-c', '--get-categories', dest='get_categories', action="store_true", default=False, help='Show video categories') parser.add_option('-s', '--split-only', dest='split_only', action="store_true", default=False, help='Split videos without uploading') parser.add_option('-n', '--no-split', dest='no_split', action="store_true", default=False, help='Skip video split') parser.add_option('-u', '--get-upload-form-info', dest='get_upload_form_data', action="store_true", default=False, help="Don't upload, just get the form info") parser.add_option('', '--private', dest='private', action="store_true", default=False, help='Set uploaded video as private') parser.add_option('', '--location', dest='location', type="string", default=None, metavar="COORDINATES", help='Video location (lat, lon). example: "37.0,-122.0"') options, args = parser.parse_args(arguments) if options.get_categories: print " ".join(Youtube.get_categories().keys()) return elif options.split_only: video_path, = args for path in split_youtube_video(video_path): print path return elif len(args) != 7: parser.print_usage() return 1 encoding = get_encoding() email, password0, video_path, title, description, category, skeywords = \ [unicode(s, encoding) for s in args] from ipdb import set_trace; set_trace() debug(title) password = (sys.stdin.readline().strip() if password0 == "-" else password0) videos = ([video_path] if options.no_split else list(split_youtube_video(video_path))) debug("connecting to Youtube API") yt = Youtube(DEVELOPER_KEY, email, password) keywords = filter(bool, [s.strip() for s in re.split('[,;\s]+', skeywords)]) for index, splitted_video_path in enumerate(videos): complete_title = ("%s [%d/%d]" % (title, index+1, len(videos)) if len(videos) > 1 else title) args = [splitted_video_path, complete_title, description, category, keywords] kwargs = dict(private=options.private, location=parse_location(options.location)) if options.get_upload_form_data: data = yt.get_upload_form_data(*args, **kwargs) print "|".join([splitted_video_path, data["token"], data["post_url"]]) else: debug("start upload: %s (%s)" % (splitted_video_path, complete_title)) entry = yt.upload_video(*args, **kwargs) print entry.GetHtmlLink().href.replace("&feature=youtube_gdata", "") |
|
password = (sys.stdin.read().strip() if password0 == "-" else password0) | password = (sys.stdin.readline().strip() if password0 == "-" else password0) | def main_upload(arguments): """Upload video to Youtube.""" usage = """Usage: %prog [OPTIONS] EMAIL PASSWORD FILE TITLE DESCRIPTION CATEGORY KEYWORDS Upload a video to youtube spliting it if necessary (uses ffmpeg).""" parser = optparse.OptionParser(usage, version=VERSION) parser.add_option('-c', '--get-categories', dest='get_categories', action="store_true", default=False, help='Show video categories') parser.add_option('-s', '--split-only', dest='split_only', action="store_true", default=False, help='Split videos without uploading') parser.add_option('-n', '--no-split', dest='no_split', action="store_true", default=False, help='Skip video split') parser.add_option('-u', '--get-upload-form-info', dest='get_upload_form_data', action="store_true", default=False, help="Don't upload, just get the form info") options, args = parser.parse_args(arguments) if options.get_categories: print " ".join(Youtube.get_categories().keys()) return elif options.split_only: video_path, = args for path in split_youtube_video(video_path): print path return elif len(args) != 7: parser.print_usage() return 1 email, password0, video_path, title, description, category, skeywords = args password = (sys.stdin.read().strip() if password0 == "-" else password0) videos = ([video_path] if options.no_split else list(split_youtube_video(video_path))) debug("connecting to Youtube API") yt = Youtube(DEVELOPER_KEY, email, password) keywords = filter(bool, map(str.strip, re.split('[,;\s]+', skeywords))) for index, splitted_video_path in enumerate(videos): if len(videos) > 1: complete_title = "%s [%d/%d]" % (title, index+1, len(videos)) else: complete_title = title args = [splitted_video_path, complete_title, description, category, keywords] if options.get_upload_form_data: data = yt.get_upload_form_data(*args) print "|".join([splitted_video_path, data["token"], data["post_url"]]) else: debug("start upload: %s (%s)" % (splitted_video_path, complete_title)) entry = yt.upload_video(*args) print entry.GetHtmlLink().href.replace("&feature=youtube_gdata", "") |
sys.stderr.write("--- " + str(obj) + "\n") | string = str(obj.encode(get_encoding()) if isinstance(obj, unicode) else obj) sys.stderr.write("--- " + string + "\n") def get_encoding(): return sys.stdout.encoding or locale.getpreferredencoding() | def debug(obj): """Write obj to standard error.""" sys.stderr.write("--- " + str(obj) + "\n") |
encoding = sys.stdout.encoding or locale.getpreferredencoding() | encoding = get_encoding() | def main_upload(arguments): """Upload video to Youtube.""" usage = """Usage: %prog [OPTIONS] EMAIL PASSWORD FILE TITLE DESCRIPTION CATEGORY KEYWORDS Upload a video to youtube spliting it if necessary (uses ffmpeg).""" parser = optparse.OptionParser(usage, version=VERSION) parser.add_option('-c', '--get-categories', dest='get_categories', action="store_true", default=False, help='Show video categories') parser.add_option('-s', '--split-only', dest='split_only', action="store_true", default=False, help='Split videos without uploading') parser.add_option('-n', '--no-split', dest='no_split', action="store_true", default=False, help='Skip video split') parser.add_option('-u', '--get-upload-form-info', dest='get_upload_form_data', action="store_true", default=False, help="Don't upload, just get the form info") parser.add_option('', '--private', dest='private', action="store_true", default=False, help='Set uploaded video as private') parser.add_option('', '--location', dest='location', type="string", default=None, metavar="COORDINATES", help='Video location (lat, lon). example: "37.0,-122.0"') options, args = parser.parse_args(arguments) if options.get_categories: print " ".join(Youtube.get_categories().keys()) return elif options.split_only: video_path, = args for path in split_youtube_video(video_path): print path return elif len(args) != 7: parser.print_usage() return 1 encoding = sys.stdout.encoding or locale.getpreferredencoding() email, password0, video_path, title, description, category, skeywords = \ [unicode(s, encoding) for s in args] password = (sys.stdin.readline().strip() if password0 == "-" else password0) videos = ([video_path] if options.no_split else list(split_youtube_video(video_path))) debug("connecting to Youtube API") yt = Youtube(DEVELOPER_KEY, email, password) keywords = filter(bool, [s.strip() for s in re.split('[,;\s]+', skeywords)]) for index, splitted_video_path in enumerate(videos): complete_title = ("%s [%d/%d]" % (title, index+1, len(videos)) if len(videos) > 1 else title) args = [splitted_video_path, complete_title, description, category, keywords] kwargs = dict(private=options.private, location=parse_location(options.location)) debug("kwargs = %s" % kwargs) if options.get_upload_form_data: data = yt.get_upload_form_data(*args, **kwargs) print "|".join([splitted_video_path, data["token"], data["post_url"]]) else: debug("start upload: %s (%s)" % (splitted_video_path, complete_title)) entry = yt.upload_video(*args, **kwargs) print entry.GetHtmlLink().href.replace("&feature=youtube_gdata", "") |
debug("kwargs = %s" % kwargs) | def main_upload(arguments): """Upload video to Youtube.""" usage = """Usage: %prog [OPTIONS] EMAIL PASSWORD FILE TITLE DESCRIPTION CATEGORY KEYWORDS Upload a video to youtube spliting it if necessary (uses ffmpeg).""" parser = optparse.OptionParser(usage, version=VERSION) parser.add_option('-c', '--get-categories', dest='get_categories', action="store_true", default=False, help='Show video categories') parser.add_option('-s', '--split-only', dest='split_only', action="store_true", default=False, help='Split videos without uploading') parser.add_option('-n', '--no-split', dest='no_split', action="store_true", default=False, help='Skip video split') parser.add_option('-u', '--get-upload-form-info', dest='get_upload_form_data', action="store_true", default=False, help="Don't upload, just get the form info") parser.add_option('', '--private', dest='private', action="store_true", default=False, help='Set uploaded video as private') parser.add_option('', '--location', dest='location', type="string", default=None, metavar="COORDINATES", help='Video location (lat, lon). example: "37.0,-122.0"') options, args = parser.parse_args(arguments) if options.get_categories: print " ".join(Youtube.get_categories().keys()) return elif options.split_only: video_path, = args for path in split_youtube_video(video_path): print path return elif len(args) != 7: parser.print_usage() return 1 encoding = sys.stdout.encoding or locale.getpreferredencoding() email, password0, video_path, title, description, category, skeywords = \ [unicode(s, encoding) for s in args] password = (sys.stdin.readline().strip() if password0 == "-" else password0) videos = ([video_path] if options.no_split else list(split_youtube_video(video_path))) debug("connecting to Youtube API") yt = Youtube(DEVELOPER_KEY, email, password) keywords = filter(bool, [s.strip() for s in re.split('[,;\s]+', skeywords)]) for index, splitted_video_path in enumerate(videos): complete_title = ("%s [%d/%d]" % (title, index+1, len(videos)) if len(videos) > 1 else title) args = [splitted_video_path, complete_title, description, category, keywords] kwargs = dict(private=options.private, location=parse_location(options.location)) debug("kwargs = %s" % kwargs) if options.get_upload_form_data: data = yt.get_upload_form_data(*args, **kwargs) print "|".join([splitted_video_path, data["token"], data["post_url"]]) else: debug("start upload: %s (%s)" % (splitted_video_path, complete_title)) entry = yt.upload_video(*args, **kwargs) print entry.GetHtmlLink().href.replace("&feature=youtube_gdata", "") |
|
status, v = STATUS.OK, specifier | status, v = STATUS.OK, obj.specifier | def execute_show_command (objs, db, tags, options): actions = { 'id': db.get_tag_value_by_id, 'about': db.get_tag_value_by_about, } for obj in objs: description = describe_by_mode (obj.specifier, obj.mode) print 'Object %s:' % description for tag in tags: fulltag = db.abs_tag_path (tag) if tag == '/id': if obj.mode == 'about': o = db.query ('fluiddb/about = "%s"' % obj.specifier) if type (o) == types.IntType: # error status, v = o, None else: status, v = STATUS.OK, o[0] else: status, v = STATUS.OK, specifier else: status, v = actions[obj.mode](obj.specifier, tag) if status == STATUS.OK: print ' %s' % formatted_tag_value (fulltag, v) elif status == STATUS.NOT_FOUND: print ' %s' % cli_bracket ('tag %s not present' % fulltag) else: print cli_bracket ('error code %d getting tag %s' % (status, fulltag)) |
o = db.create_object () self.assertEqual (type (o) != types.IntType, True) def testCreateObjectFail (self): bad = Credentials ('doesnotexist', 'certainlywiththispassword') db = FluidDB (bad) o = db.create_object ('DADGAD') self.assertEqual (o, STATUS.INTERNAL_SERVER_ERROR) def testCreateTag (self): | o = db.create_object() self.assertEqual(type(o) != types.IntType, True) def testCreateObjectFail(self): bad = Credentials('doesnotexist', 'certainlywiththispassword') db = FluidDB(bad) o = db.create_object('DADGAD') self.assertEqual(o, STATUS.UNAUTHORIZED) def testCreateTag(self): | def testCreateObjectNoAbout (self): db = self.db o = db.create_object () self.assertEqual (type (o) != types.IntType, True) |
def frag_ranking(): global db_conn print """\ <a name=" <ol>\ """ curs = db_conn.cursor() curs.execute(''' select fragger, count(*) as frags from frags where fragger != fragged group by lower(fragger) order by count(*) desc, lower(fragger) asc ''') for row in curs: print " <li>%s (%s)</li>" % (row[0], row[1]) print " </ol>" | def frag_ranking(): global db_conn print """\ <a name="#3"><h2>Frag-based ranking</h2></a> <ol>\ |
|
<a name=" | <a name="4"><h2>Frag/death ratio-based ranking</h2></a> | def fdratio_ranking(): global db_conn print """\ <a name="#4"><h2>Frag/death ratio-based ranking</h2></a> <ol>\ |
fdratio_ranking() | def main(): global db_conn if (len(sys.argv) < 2): sys.exit(1) create_db() if os.path.isdir(sys.argv[1]): for logrpath in os.listdir(sys.argv[1]): logfpath = ''.join([sys.argv[1], '/', logrpath]) parse_log(logfpath) else: parse_log(sys.argv[1]) print """\ |
|
ratios.append((players_row[0], float(frags_row[0]) / float(deaths_row[0]))) | try: ratios.append((players_row[0], float(frags_row[0]) / float(deaths_row[0]))) except ZeroDivisionError: ratios.append((players_row[0], 666.0)) | def fdratio_ranking(): global db_conn print """\ <a name="#4"><h2>Frag/death ratio-based ranking</h2></a> <ol>\ |
print " <li>%s (%i:%i:%i)</li>" % (row[0], int(row[1]) / 3600, int(row[1]) / 60, int(row[1]) % 60) | hours = int(row[1]) / 3600 minutes = (int(row[1]) - hours*3600) / 60 seconds = (int(row[1]) - minutes*60) % 60 print " <li>%s (%i:%i:%i)</li>" % (row[0], hours, minutes, seconds) | def presence_ranking(): global db_conn print """\ <a name="5"><h2>Presence-based ranking</h2></a> <ol>\ |
presence_ranking() | def main(): global db_conn if (len(sys.argv) < 2): sys.exit(1) create_db() if os.path.isdir(sys.argv[1]): for logrpath in os.listdir(sys.argv[1]): logfpath = ''.join([sys.argv[1], '/', logrpath]) parse_log(logfpath) else: parse_log(sys.argv[1]) print """\ |
|
db_conn.execute( '''update games set stop=? where player = ? and stop = -1''', (time, idd[m.group(3)])) del idd[m.group(3)] | try: db_conn.execute( '''update games set stop=? where player = ? and stop = -1''', (time, idd[m.group(3)])) del idd[m.group(3)] except KeyError: pass | def parse_log(logpath): global db_conn idd = {} logf = open(logpath, 'r') while 1: logline = logf.readline() if (not logline): break m = frag_prog.match(logline) if (m): # Update the frags table db_conn.execute( '''insert into frags values (?, ?, ?)''', (m.group(1), m.group(2), m.group(3))) continue m = playerjoins_prog.match(logline) if (m): if (m.group(3) not in idd): playerinfos = re.split(r"\\", m.group(4)) playername = playerinfos[playerinfos.index('name')+1] time = int(m.group(1))*60 + int(m.group(2)) # Update the players id dictionary idd[m.group(3)] = playername # And the player games table db_conn.execute( '''insert into games values (?, ?, -1)''', (playername, time)) continue m = playerquits_prog.match(logline) if (m): time = int(m.group(1))*60 + int(m.group(2)) # Update the games table db_conn.execute( '''update games set stop=? where player = ? and stop = -1''', (time, idd[m.group(3)])) # And the players id dictionary del idd[m.group(3)] continue m = endgame_prog.match(logline) if (m): time = int(m.group(1))*60 + int(m.group(2)) # New game, make everybody quits for k,v in idd.iteritems(): db_conn.execute( '''update games set stop=? where player = ? and stop = -1''', (time, v)) pass idd = {} continue m = item_prog.match(logline) if (m): if( m.group(2) == "team_CTF_redflag" or m.group(2) == "team_CTF_blueflag" ): db_conn.execute( '''insert into flags values (?, ?)''', (idd[m.group(1)], "CATCH")) pass continue m = flag_prog.match(logline) if (m): if int(m.group(2)) == 0 : db_conn.execute( '''insert into flags values (?, ?)''', (idd[m.group(1)], "DROP")) pass elif int(m.group(2)) == 1 : db_conn.execute( '''insert into flags values (?, ?)''', (idd[m.group(1)], "RETURN")) pass elif int(m.group(2)) == 2 : db_conn.execute( '''insert into flags values (?, ?)''', (idd[m.group(1)], "CAPTURE")) pass continue db_conn.commit() logf.close() |
mime = self.getContentType() if mime.startswith("text"): return file(self._filePath, "r", BUFFER_SIZE) | def getContent(self): """Open content as a stream for reading. See DAVResource.getContent() """ assert not self.isCollection mime = self.getContentType() if mime.startswith("text"): return file(self._filePath, "r", BUFFER_SIZE) return file(self._filePath, "rb", BUFFER_SIZE) |
|
object_to_etree(prop_set, set_props, namespace=namespace) | for p in set_props: prop_prop = ElementTree.SubElement(prop_set, '{DAV:}prop') object_to_etree(prop_prop, p, namespace=namespace) | def proppatch(self, path, set_props=None, remove_props=None, namespace='DAV:', headers=None): """Patch properties on a DAV resource. If namespace is not specified the DAV namespace is used for all properties""" root = ElementTree.Element('{DAV:}propertyupdate') if set_props is not None: prop_set = ElementTree.SubElement(root, '{DAV:}set') |
object_to_etree(prop_remove, remove_props, namespace=namespace) | for p in remove_props: prop_prop = ElementTree.SubElement(prop_remove, '{DAV:}prop') object_to_etree(prop_prop, p, namespace=namespace) | def proppatch(self, path, set_props=None, remove_props=None, namespace='DAV:', headers=None): """Patch properties on a DAV resource. If namespace is not specified the DAV namespace is used for all properties""" root = ElementTree.Element('{DAV:}propertyupdate') if set_props is not None: prop_set = ElementTree.SubElement(root, '{DAV:}set') |
lock["timeout"] = time.time() + timeout | if timeout < 0: lock["timeout"] = -1 else: lock["timeout"] = time.time() + timeout | def refresh(self, locktoken, timeout=None): """Set new timeout for lock, if existing and valid.""" if timeout is None: timeout = LockManager.LOCK_TIME_OUT_DEFAULT self._lock.acquireWrite() try: lock = self.getLock(locktoken) _logger.debug("refresh %s" % _lockString(lock)) if lock: lock["timeout"] = time.time() + timeout self._dict[locktoken] = lock self._sync() return lock finally: self._lock.release() |
if (not self.wsgiSentHeaders): | if not self.wsgiSentHeaders: | def runWSGIApp (self, application, scriptName, pathInfo, query): logging.info ("Running application with SCRIPT_NAME %s PATH_INFO %s" % (scriptName, pathInfo)) if self.command == "PUT": pass # breakpoint env = {"wsgi.version": (1, 0), "wsgi.url_scheme": "http", "wsgi.input": self.rfile, "wsgi.errors": sys.stderr, "wsgi.multithread": 1, "wsgi.multiprocess": 0, "wsgi.run_once": 0, "REQUEST_METHOD": self.command, "SCRIPT_NAME": scriptName, "PATH_INFO": pathInfo, "QUERY_STRING": query, "CONTENT_TYPE": self.headers.get("Content-Type", ""), "CONTENT_LENGTH": self.headers.get("Content-Length", ""), "REMOTE_ADDR": self.client_address[0], "SERVER_NAME": self.server.server_address[0], "SERVER_PORT": str(self.server.server_address[1]), "SERVER_PROTOCOL": self.request_version, } for httpHeader, httpValue in self.headers.items(): if not httpHeader.lower() in ("content-type", "content-length"): env ["HTTP_%s" % httpHeader.replace ("-", "_").upper()] = httpValue |
def do_SHUTDOWN (self): """Send 200 OK response, and set server.stop to True. http://code.activestate.com/recipes/336012/ """ print "got SHUTDOWN" self.send_response(200) self.end_headers() self.server.stop = True | def do_SHUTDOWN (self): """Send 200 OK response, and set server.stop to True. http://code.activestate.com/recipes/336012/ """ print "got SHUTDOWN" self.send_response(200) self.end_headers() self.server.stop = True |
|
assert hasattr(self, "stop"), "serve_forever_stoppable() must be called" | assert hasattr(self, "stop_request"), "serve_forever_stoppable() must be called before" assert not self.stop_request, "stop_serve_forever() must only be called once" self.stop_request = True time.sleep(.01) if self.stopped: return def _shutdownHandler(self): """Send 200 OK response, and set server.stop_request to True. http://code.activestate.com/recipes/336012/ """ self.send_response(200) self.end_headers() self.server.stop_request = True if not hasattr(ExtHandler, "do_SHUTDOWN"): setattr(ExtHandler, "do_SHUTDOWN", _shutdownHandler) | def stop_serve_forever(self): """Stop serve_forever_stoppable().""" assert hasattr(self, "stop"), "serve_forever_stoppable() must be called" (host, port) = self.server_address |
self.stop = True print "stopping serve_forever_stoppable... Sending %s:%s/ SHUTDOWN" % (host, port) | def stop_serve_forever(self): """Stop serve_forever_stoppable().""" assert hasattr(self, "stop"), "serve_forever_stoppable() must be called" (host, port) = self.server_address |
|
print "serve_forever_stoppable... stopped.", self.stop | assert self.stop_request | def stop_serve_forever(self): """Stop serve_forever_stoppable().""" assert hasattr(self, "stop"), "serve_forever_stoppable() must be called" (host, port) = self.server_address |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.