1  """ 
   2  This module contains condor jobs / node classes for the followup dag 
   3   
   4  This program creates cache files for the output of inspiral hipe 
   5  """ 
   6   
   7  __author__ = 'Chad Hanna <channa@phys.lsu.edu>' 
   8   
   9   
  10   
  11  import sys, os, copy, math 
  12  import math 
  13  import socket, time 
  14  import re, string 
  15  from optparse import * 
  16  import tempfile 
  17  import ConfigParser 
  18  import urlparse 
  19  from UserDict import UserDict 
  20  sys.path.append('@PYTHONLIBDIR@') 
  21   
  22   
  23   
  24  from glue import pipeline 
  25  from glue import lal 
  26  from glue import segments 
  27  from glue import segmentsUtils 
  28  from pylal.webUtils import * 
  29  from pylal.webCondor import * 
  30  from lalapps import inspiral 
  31  from pylal import fu_utils 
  32  from glue.ligolw import lsctables 
  33   
  34   
  35   
  36   
  38   
  39 -  def __init__(self, config_file, log_path): 
   40      self.basename = re.sub(r'\.ini',r'', config_file)  
  41      tempfile.tempdir = log_path 
  42      tempfile.template = self.basename + '.dag.log.' 
  43      logfile = tempfile.mktemp() 
  44      fh = open( logfile, "w" ) 
  45      fh.close() 
  46      pipeline.CondorDAG.__init__(self,logfile) 
  47      self.set_dag_file(self.basename) 
  48      self.jobsDict = {} 
  49       
  50       
  51      self.remote_nodes = [] 
    52   
  53   
  54   
  55   
  56   
  58    try: 
  59      if len(string.strip(cp.get('followup-hipe-cache','hipe-cache-path'))) > 0: 
  60        hipeCachePath = string.strip(cp.get('followup-hipe-cache','hipe-cache-path')) 
  61      else: 
  62        hipeCachePath = None 
  63      return(hipeCachePath) 
  64    except: 
  65      print >> sys.stderr, "ERROR: failure in checkHipeCachePath()" 
  66      return None 
   67   
  68   
  69   
  70   
  71   
  72   
  73   
  75    """ 
  76    This method takes in a cp object and give a set of defaults check to 
  77    make sure the section in question exists and at least the options in 
  78    the default are specified with some value.  It return TRUE or FALSE 
  79    depending if the cp object contains the sections and options 
  80    specified by the input DEFAULTS. 
  81    """ 
  82    return cp.has_section(defaults["section"]) and \ 
  83    all(cp.has_option(defaults["section"], opt) for opt in defaults["options"]) 
   84     
  85   
  87    """ 
  88    Appended the configuration information in defaults into the config 
  89    parser (cp) object and return a copy of this newly update cp object. 
  90    """ 
  91    if not(cp.has_section(defaults["section"])): 
  92      cp.add_section(defaults["section"]) 
  93    for key, val in defaults["options"].iteritems(): 
  94      if not cp.has_option(defaults["section"], key): 
  95        cp.set(defaults["section"], val) 
   96     
  97   
  98   
  99   
 100   
 101   
 103    defaults={ 
 104      "section":"condor", 
 105      "options":{ 
 106        "universe":"vanilla", 
 107        "inspiral_head":"lalapps_inspiral" 
 108        } 
 109      } 
  118   
 119   
 121     
 122 -  def __init__(self, inspJob, procParams, ifo, trig, cp,opts,dag, datafindCache, d_node, datafindCommand, type='plot', sngl_table = None): 
  123      self.sample_rate = string.strip(cp.get('coh-inspiral','sample-rate')) 
 124      if 1: 
 125        self.output_file_name = "" 
 126         
 127         
 128         
 129         
 130        pipeline.CondorDAGNode.__init__(self,inspJob) 
 131        injFile = self.checkInjections(cp)       
 132        hipeCache = checkHipeCachePath(cp) 
 133   
 134        if type == "plot" or type == "notrig" or type == "coh" or type == "chia": 
 135           
 136           
 137           
 138          hLengthAnalyzed = 1 
 139          if type == "coh" or type == "chia": hLengthAnalyzed = 1.0 
 140          self.set_trig_start( int(trig.gpsTime[ifo] - hLengthAnalyzed + 0.5) ) 
 141          self.set_trig_end( int(trig.gpsTime[ifo] + hLengthAnalyzed + 0.5) ) 
 142   
 143        if type == "plot" or type == "notrig" or type == "coh": 
 144          self.add_var_opt("write-snrsq","") 
 145          self.add_var_opt("write-chisq","") 
 146          self.add_var_opt("write-spectrum","") 
 147          self.add_var_opt("write-template","") 
 148        if type == "chia" or type == "notrig" or type == "coh": 
 149          self.add_var_opt("write-cdata","") 
 150   
 151        if injFile:  
 152          self.set_injections( injFile ) 
 153   
 154        skipParams = ['minimal-match', 'bank-file', 'user-tag', 'injection-file', 'trig-start-time', 'trig-end-time'] 
 155        if not hipeCache: 
 156          skipParams.append('frame-cache') 
 157          self.add_var_opt('frame-cache',datafindCache)         
 158   
 159         
 160         
 161         
 162        extension = ".xml" 
 163        for row in procParams: 
 164          param = row.param.strip("-") 
 165          value = row.value 
 166           
 167           
 168          if type == "coh" and cp.has_option("coh-inspiral",param): 
 169            value = cp.get("coh-inspiral",param) 
 170          if type == "chia" and cp.has_option("coh-inspiral",param): 
 171            value = cp.get("coh-inspiral",param) 
 172          if param == 'bank-file': 
 173            bankFile = value 
 174          if type == "notrig" or type == "coh" or type == "chia": 
 175           
 176           
 177            if param == 'snr-threshold': value = "0.1" 
 178             
 179            if param == 'do-rsq-veto': continue 
 180            if param == 'enable-rsq-veto': continue 
 181             
 182             
 183            if param == 'chisq-threshold': value = "1.0e+06" 
 184             
 185             
 186            if param == 'cluster-method': value = 'window' 
 187            if param == 'cluster-window': continue 
 188            pass 
 189          if param in skipParams: continue 
 190          self.add_var_opt(param,value) 
 191           
 192           
 193           
 194           
 195           
 196           
 197           
 198           
 199          if param == 'gps-end-time': 
 200            self.__end = value 
 201            self._AnalysisNode__end = int(value) 
 202          if param == 'gps-start-time': 
 203            self.__start = value 
 204            self._AnalysisNode__start = int(value) 
 205          if param == 'pad-data':  
 206            self._InspiralAnalysisNode__pad_data = int(value) 
 207          if param == 'ifo-tag': 
 208            self.__ifotag = value 
 209          if param == 'channel-name': self.inputIfo = value[0:2] 
 210          if param == 'write-compress': 
 211            extension = '.xml.gz' 
 212   
 213        if type == "notrig" or type == "coh" or type == "chia": 
 214          self.add_var_opt('cluster-window',str(hLengthAnalyzed/2.)) 
 215          self.add_var_opt('disable-rsq-veto',' ') 
 216   
 217         
 218         
 219        if cp.has_section("followup-inspiral-extra"): 
 220          for (name,value) in cp.items("followup-inspiral-extra"): 
 221            self.add_var_opt(name,value) 
 222   
 223        if type == "plot" or type == "coh": 
 224          bankFile = 'trigTemplateBank/' + self.inputIfo + '-TRIGBANK_FOLLOWUP_' + type + str(trig.eventID) + '.xml.gz' 
 225        if type == "chia": 
 226          bankFile = 'trigTemplateBank/' + self.inputIfo + '-TRIGBANK_FOLLOWUP_coh' + str(trig.eventID) + '.xml.gz' 
 227        if type == "notrig": 
 228          bankFile = 'trigTemplateBank/' + ifo + '-TRIGBANK_FOLLOWUP_' + type + str(trig.eventID) + '.xml.gz' 
 229        self.set_bank(bankFile) 
 230   
 231        if not ifo == self.inputIfo and not type == "coh" and not type == "chia": 
 232          second_user_tag = "_" + ifo + "tmplt" 
 233        else: 
 234          second_user_tag = "" 
 235        self.set_user_tag("FOLLOWUP_" + str(trig.eventID) + second_user_tag) 
 236        self.__usertag = "FOLLOWUP_" + str(trig.eventID) + second_user_tag 
 237   
 238   
 239         
 240        if (type == 'head'):  
 241          subBankSize = string.strip(cp.get('followup-inspiral-head','bank-veto-subbank-size')) 
 242          if opts.inspiral_head: 
 243            bankFileName = fu_utils.generateBankVetoBank(trig, ifo, str(trig.gpsTime[ifo]), sngl_table[ifo],int(subBankSize),'BankVetoBank') 
 244          else: bankFileName = 'none'       
 245          self.add_var_opt("bank-veto-subbank-size", string.strip(cp.get('followup-inspiral-head','bank-veto-subbank-size'))) 
 246          self.add_var_opt("order", string.strip(cp.get('followup-inspiral-head','order'))) 
 247          self.set_bank(bankFileName) 
 248   
 249   
 250         
 251        if type == "plot" or type == "notrig" or type == "coh" or type == "chia": 
 252          self.output_file_name = inspJob.outputPath + self.inputIfo + "-INSPIRAL_" + self.__ifotag + "_" + self.__usertag + "-" + self.__start + "-" + str(int(self.__end)-int(self.__start)) + extension 
 253   
 254        self.set_id(self.inputIfo + "-INSPIRAL_" + self.__ifotag + "_" + self.__usertag + "-" + self.__start + "-" + str(int(self.__end)-int(self.__start))) 
 255   
 256        self.outputCache = self.inputIfo + ' ' + 'INSPIRAL' + ' ' + str(self.__start) + ' ' + str(int(self.__end)-int(self.__start)) + ' ' + self.output_file_name  + '\n' + self.inputIfo + ' ' + 'INSPIRAL-FRAME' + ' ' + str(self.__start) + ' ' + str(int(self.__end)-int(self.__start)) + ' ' + self.output_file_name.replace(extension,".gwf") + '\n' 
 257   
 258        self.setupNodeWeb(inspJob,False,None,None,None,dag.cache) 
 259        self.add_var_opt("output-path",inspJob.outputPath) 
 260   
 261        if not opts.disable_dag_categories: 
 262          self.set_category(inspJob.name.lower()) 
 263   
 264        try: 
 265          if d_node.validNode and eval('opts.' + datafindCommand): 
 266            self.add_parent(d_node) 
 267        except:  
 268          print >> sys.stderr, "Didn't find a datafind job, I'll assume I don't need it" 
 269   
 270        if type == "plot" or type == "notrig": 
 271          if opts.inspiral: 
 272            dag.addNode(self,'inspiral') 
 273            self.validate() 
 274          else: self.invalidate() 
 275   
 276        if type == 'head': 
 277          if opts.inspiral_head: 
 278            dag.addNode(self,'inspiral-head') 
 279            self.validate() 
 280          else: self.invalidate() 
 281   
 282        if type == 'coh': 
 283          if opts.coh_inspiral: 
 284            dag.addNode(self,'coh-inspiral') 
 285            self.validate() 
 286          else: self.invalidate() 
 287   
 288        if type == "chia": 
 289          if opts.plot_chia: 
 290            dag.addNode(self,'chia-inspiral') 
 291            self.validate() 
 292          else: self.invalidate() 
 293   
 294      else:  
 295        try: 
 296          print "couldn't add inspiral job for " + self.inputIfo + "@ "+ str(trig.gpsTime[ifo]) 
 297           
 298        except: 
 299          print "couldn't add inspiral job for " + ifo + "@ "+ str(trig.gpsTime[ifo]) 
  300   
 302      try: 
 303        if len(string.strip(cp.get('followup-triggers','injection-file'))) > 0: 
 304          injectionFile = string.strip(cp.get('followup-triggers','injection-file')) 
 305        else: 
 306          injectionFile = None 
 307        return(injectionFile) 
 308      except: 
 309        print >> sys.stderr, "ERROR: failure in followUpInspNode.checkInjections()" 
 310        return None 
   311   
 312   
 313   
 314   
 315   
 316   
 317   
 319    """ 
 320    A followup plotting job for snr and chisq time series 
 321    """ 
 322    defaults={ 
 323      "section":"condor", 
 324      "options":{ 
 325        "universe":"vanilla", 
 326        "plotsnrchisq":"plotsnrchisq_pipe" 
 327        } 
 328      } 
 329 -  def __init__(self, options, cp, tag_base='PLOT_FOLLOWUP'): 
  330      """ 
 331      """ 
 332      if not(verifyCP(cp,self.defaults)): 
 333        modifyCP(cp,self.defaults) 
 334      self.__prog__ = 'plotSNRCHISQJob' 
 335      self.__executable = string.strip(cp.get('condor','plotsnrchisq')) 
 336      self.__universe = "vanilla" 
 337      pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable) 
 338      self.add_condor_cmd('getenv','True') 
 339      self.setupJobWeb(self.__prog__,tag_base) 
   340   
 341   
 342   
 343   
 345    """ 
 346    Runs an instance of a plotSNRCHISQ followup job 
 347    """ 
 348 -  def __init__(self,job,ifo,fileName,trig,page,dag,inspiralNode,opts,ifoString=None): 
  349      """ 
 350      job = A CondorDAGJob that can run an instance of plotSNRCHISQ followup. 
 351      """ 
 352      if ifoString: 
 353        time = trig.gpsTime[ifoString] 
 354      else: 
 355        time = trig.gpsTime[ifo] 
 356      self.friendlyName = 'Plot SNR/CHISQ/PSD' 
 357      try: 
 358        pipeline.CondorDAGNode.__init__(self,job) 
 359        self.output_file_name = "" 
 360        self.add_var_opt("frame-file",fileName.replace(".xml",".gwf").strip(".gz")) 
 361        self.add_var_opt("inspiral-xml-file",fileName) 
 362   
 363        duration = 2.0  
 364        self.add_var_opt("plot-width",duration) 
 365   
 366        self.add_var_opt("gps",time) 
 367        self.add_var_opt("gps-start-time",time-duration*.5) 
 368        self.add_var_opt("gps-end-time",time+duration*.5) 
 369   
 370        self.add_var_opt("ifo-times",ifo) 
 371        self.add_var_opt("ifo-tag","FOLLOWUP_" + ifo) 
 372   
 373        if ifoString: 
 374          self.add_var_opt("user-tag",ifoString+'tmplt_'+str(trig.eventID)) 
 375          self.id = job.name + '-' + ifo + '-' + ifoString + 'tmplt' + '-' + str(trig.statValue) + '_' + str(trig.eventID) 
 376        else: 
 377          self.add_var_opt("user-tag",str(trig.eventID)) 
 378          self.id = job.name + '-' + ifo + '-' + str(trig.statValue) + '_' + str(trig.eventID) 
 379        self.setupNodeWeb(job,True, dag.webPage.lastSection.lastSub,page,None,None) 
 380   
 381        if not opts.disable_dag_categories: 
 382          self.set_category(job.name.lower()) 
 383   
 384        if inspiralNode.validNode: self.add_parent(inspiralNode) 
 385        if opts.plots: 
 386          dag.addNode(self,self.friendlyName) 
 387          self.validate() 
 388        else: self.invalidate() 
 389      except:  
 390        self.invalidate() 
 391        print "couldn't add plot job for " + str(ifo) + "@ "+ str(time) 
   392   
 393   
 394   
 395   
 397    """ 
 398    Generates sky map data 
 399    """ 
 400    defaults={ 
 401      "section":"condor", 
 402      "options":{ 
 403        "universe":"vanilla", 
 404        "lalapps_skymap":"lalapps_skymap" 
 405        } 
 406      } 
 407 -  def __init__(self, options, cp, tag_base='SKY_MAP'): 
  408      """ 
 409      """ 
 410      if not(verifyCP(cp,self.defaults)): 
 411        modifyCP(cp,self.defaults) 
 412      self.__prog__ = 'lalapps_skyMapJob' 
 413      self.__executable = string.strip(cp.get('condor','lalapps_skymap')) 
 414      self.__universe = "standard" 
 415      pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable) 
 416      self.add_condor_cmd('getenv','True') 
 417      self.setupJobWeb(self.__prog__,tag_base) 
 418      self.ra_res = string.strip(cp.get('skymap','ra-res')) 
 419      self.dec_res = string.strip(cp.get('skymap','dec-res')) 
 420      self.sample_rate = string.strip(cp.get('coh-inspiral','sample-rate')) 
   421   
 422   
 423   
 424   
 426    """ 
 427    Plots the sky map output of lalapps_skymap 
 428    """ 
 429    defaults={ 
 430      "section":"condor", 
 431      "options":{ 
 432        "universe":"vanilla", 
 433        "pylal_skyPlotJob":"pylal_plot_inspiral_skymap" 
 434        } 
 435      } 
 436 -  def __init__(self, options, cp, tag_base='SKY_PLOT'): 
  437      """ 
 438      """ 
 439      if not(verifyCP(cp,self.defaults)): 
 440        modifyCP(cp,self.defaults) 
 441      self.__prog__ = 'pylal_skyPlotJob' 
 442      self.__executable = string.strip(cp.get('condor','pylal_skyPlotJob')) 
 443      self.__universe = "vanilla" 
 444      pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable) 
 445      self.add_condor_cmd('getenv','True') 
 446      self.setupJobWeb(self.__prog__,tag_base) 
 447      self.ra_res = string.strip(cp.get('skymap','ra-res')) 
 448      self.dec_res = string.strip(cp.get('skymap','dec-res')) 
 449      self.sample_rate = string.strip(cp.get('coh-inspiral','sample-rate')) 
   450   
 451   
 452   
 453   
 454   
 456    """ 
 457    A C code for computing the sky map 
 458    An example command line is: 
 459   
 460  lalapps_skymap --h1-frame-file H1-INSPIRAL_SECOND_H1H2L1V1_FOLLOWUP_866088314000001908-866088022-2048.gwf --l1-frame-file L1-INSPIRAL_SECOND_H1H2L1V1_FOLLOWUP_866088314000001908-866088022-2048.gwf --v1-frame-file V1-INSPIRAL_SECOND_H1H2L1V1_FOLLOWUP_866088314000001908-866088205-2048.gwf --event-id 866088314000001908 --ra-res 512 --dec-res 256 --h1-xml-file H1-INSPIRAL_SECOND_H1H2L1V1_FOLLOWUP_866088314000001908-866088022-2048.xml.gz --l1-xml-file L1-INSPIRAL_SECOND_H1H2L1V1_FOLLOWUP_866088314000001908-866088022-2048.xml.gz --v1-xml-file V1-INSPIRAL_SECOND_H1H2L1V1_FOLLOWUP_866088314000001908-866088205-2048.xml.gz --output-file chad.txt 
 461    """ 
 463      self.ifo_list = ["H1","L1","V1"] 
 464       
 465   
 466      self.ra_res = job.ra_res 
 467      self.dec_res = job.dec_res 
 468      self.sample_rate = job.sample_rate 
 469      pipeline.CondorDAGNode.__init__(self,job) 
 470      self.friendlyName = 'Produce sky map of event'     
 471      self.id = job.name + '-skymap-' + str(trig.statValue) + '_' + str(trig.eventID) 
 472      self.setupNodeWeb(job) 
 473       
 474       
 475      self.output_file_name = job.outputPath + self.id+".txt.gz" 
 476      self.add_var_opt("output-file",self.output_file_name) 
 477      self.add_var_opt("ra-res",self.ra_res) 
 478      self.add_var_opt("dec-res",self.dec_res) 
 479      self.add_var_opt("event-id",trig.eventID) 
 480      self.add_var_opt("h1-frame-file","none") 
 481      self.add_var_opt("h1-xml-file","none") 
 482      self.add_var_opt("h2-frame-file","none") 
 483      self.add_var_opt("h2-xml-file","none") 
 484      self.add_var_opt("l1-frame-file","none") 
 485      self.add_var_opt("l1-xml-file","none") 
 486      self.add_var_opt("v1-frame-file","none") 
 487      self.add_var_opt("v1-xml-file","none") 
 488      self.add_var_opt("sample-rate",self.sample_rate) 
 489   
 490      if not opts.disable_dag_categories: 
 491        self.set_category(job.name.lower()) 
  492   
 494      if ifo in self.ifo_list: 
 495        fileName = str(inspNode.output_file_name) 
 496        self.add_var_opt(ifo.lower()+"-frame-file",str(fileName.replace(".xml",".gwf").strip(".gz"))) 
 497        self.add_var_opt(ifo.lower()+"-xml-file",str(fileName)) 
 498        if inspNode.validNode: self.add_parent(inspNode) 
 499         
 500      else: pass  
  501   
 502   
  509         
 510   
 511   
 512   
 513   
 514   
 515   
 517    """ 
 518    A python code for plotting the sky map 
 519    An example command line is 
 520   
 521    /pylal_plot_inspiral_skymap --event-id 866088314000001908 --ra-res 512 --dec-res 256 --output-path . --page-rel-path . --output-web-file test.html --page . --injection-right-ascension 0 --injection-declination 0 --map-data-file chad.txt  
 522    """ 
 523 -  def __init__(self,job,trig,skyMapNode,dag,page,opts): 
  524       
 525      pipeline.CondorDAGNode.__init__(self,job) 
 526       
 527      self.friendlyName = 'Produce a plot of the sky map of an event' 
 528       
 529      self.id = job.name + '-skymap-plot' + str(trig.statValue) + '_' + str(trig.eventID) 
 530       
 531       
 532       
 533       
 534      self.setupNodeWeb(job,True, dag.webPage.lastSection,page,None,None) 
 535       
 536       
 537       
 538       
 539   
 540      self.add_var_opt("map-data-file",skyMapNode.output_file_name) 
 541      self.add_var_opt("user-tag",str(trig.eventID)) 
 542      self.add_var_opt("ifo-tag",trig.ifos) 
 543      self.add_var_opt("ifo-times",trig.ifos) 
 544      self.add_var_opt("ra-res",str(skyMapNode.ra_res)) 
 545      self.add_var_opt("dec-res",str(skyMapNode.dec_res)) 
 546      self.add_var_opt("stat-value", str(trig.statValue)) 
 547       
 548       
 549       
 550      if trig.is_found(): 
 551        inj_ra = trig.coincs.sim.longitude 
 552        inj_dec = trig.coincs.sim.latitude 
 553        self.add_var_opt("injection-right-ascension",str(inj_ra)) 
 554        self.add_var_opt("injection-declination",str(inj_dec)) 
 555   
 556      if not opts.disable_dag_categories: 
 557        self.set_category(job.name.lower()) 
 558   
 559      try: 
 560        if skyMapNode.validNode: self.add_parent(skyMapNode) 
 561      except: pass 
 562      if opts.sky_map_plot: 
 563        dag.addNode(self,self.friendlyName) 
 564        self.validate() 
 565      else: self.invalidate() 
   566    
 567   
 568   
 569   
 570   
 572    defaults={ 
 573      "section":"condor", 
 574      "options": 
 575        { 
 576        "universe":"vanilla", 
 577        "datafind":"ligo_data_find" 
 578        } 
 579      } 
 580   
 581 -  def __init__(self, config_file, source): 
  582   
 583      if source == 'futrig': 
 584        self.name = 'qscanDataFindJob' 
 585      if source == 'inspiral': 
 586        self.name = 'inspiralDataFindJob' 
 587   
 588       
 589      try: 
 590        os.mkdir(self.name) 
 591        os.mkdir(self.name + '/logs') 
 592      except: pass 
 593      pipeline.LSCDataFindJob.__init__(self, self.name, self.name + '/logs', config_file) 
 594      if source == 'futrig': 
 595        self.setup_cacheconv(config_file) 
 596      self.setupJobWeb(self.name)  
  597   
 599       
 600      convert_script = open(self.name + '/cacheconv.sh','w') 
 601      convert_script.write("""#!/bin/bash 
 602      if [ ${1} -ne 0 ] ; then 
 603        exit 1 
 604      else 
 605        %s ${2} ${3} 
 606      fi 
 607      """ % string.strip(cp.get('condor','convertcache'))) 
 608      convert_script.close() 
 609      os.chmod(self.name + '/cacheconv.sh',0755) 
   610   
 611   
 613    
 614 -  def __init__(self, job, source, type, cp, time, ifo, opts, dag, datafindCommand, procParams=None): 
  615      try: 
 616        self.outputFileName = "" 
 617        pipeline.LSCDataFindNode.__init__(self,job) 
 618        self.id = str(ifo) + '-' + repr(time) + '-' + str(type) 
 619        self.setupNodeWeb(job,False,None,None,None,dag.cache) 
 620        if source == 'futrig': 
 621          self.outputFileName = self.setup_fu_trig(job, cp, time, ifo, type) 
 622          nodeName = "qscan data find" 
 623        if source == 'inspiral': 
 624          self.outputFileName = self.setup_inspiral(cp,ifo,type,procParams) 
 625          nodeName = "inspiral data find" 
 626   
 627        if not opts.disable_dag_categories: 
 628          self.set_category(job.name.lower()) 
 629   
 630         
 631         
 632        if eval('opts.' + datafindCommand) and \ 
 633          not( cp.has_option("followup-"+type,"remote-ifo") and \ 
 634          cp.get("followup-"+type,"remote-ifo")==ifo ): 
 635            dag.addNode(self,nodeName) 
 636            self.validNode = True 
 637        else: self.validNode = False 
 638      except: 
 639        self.validNode = False 
 640        print >> sys.stderr, "could not set up the datafind jobs for " + type 
  641   
 643      for row in procParams: 
 644        param = row.param.strip("-") 
 645        value = row.value 
 646        if param == 'gps-start-time': startTime = value 
 647        if param == 'gps-end-time': endTime = value 
 648        if param == 'pad-data': paddataTime = value 
 649      self.set_observatory(ifo[0]) 
 650      self.set_start(int(startTime) - int(paddataTime)) 
 651      self.set_end(int(endTime) + int(paddataTime)) 
 652      self.set_type(cp.get("followup-"+type,ifo + '_type')) 
 653      lalCache = self.get_output() 
 654      return(lalCache) 
  655   
 657       
 658       
 659       
 660      self.q_time = cp.getint("followup-"+type,'search-time-range')/2 
 661      self.set_observatory(ifo[0]) 
 662      self.set_start(int( time - self.q_time - 1)) 
 663      self.set_end(int( time + self.q_time + 1)) 
 664      if cp.has_option("followup-"+type, ifo + '_type'):  
 665        self.set_type( cp.get("followup-"+type, ifo + '_type' )) 
 666      else: 
 667        if not( cp.has_option("followup-"+type,"remote-ifo") and \ 
 668        cp.get("followup-"+type,"remote-ifo")==ifo ): 
 669          self.set_type( cp.get("followup-"+type, 'type' )) 
 670        else: self.set_type("dummy") 
 671      lalCache = self.get_output() 
 672      qCache = lalCache.rstrip("cache") + "qcache" 
 673      self.set_post_script(job.name + "/cacheconv.sh $RETURN %s %s" %(lalCache,qCache) ) 
 674      return(qCache) 
   675   
 676   
 677   
 678   
 679 -class qscanJob(pipeline.CondorDAGJob, webTheJob): 
  680    """ 
 681    A qscan job 
 682    """ 
 683    defaults={ 
 684      "section":"condor", 
 685      "options":{ 
 686        "universe":"vanilla", 
 687        "qscan":"wpipeline" 
 688        } 
 689      } 
 690   
 691 -  def __init__(self, opts, cp, tag_base='QSCAN'): 
  701   
 703       
 704      checkdir_script = open(self.name + '/checkForDir.sh','w') 
 705      checkdir_script.write("""#!/bin/bash 
 706      if [ -d $1/$2 ] 
 707      then 
 708        matchingList=$(echo $(find $1 -name $2.bk*)) 
 709        COUNTER=1 
 710        for file in $matchingList 
 711        do 
 712          let COUNTER=COUNTER+1 
 713        done 
 714        mv $1/$2 $1/$2.bk.$COUNTER 
 715      fi 
 716      """) 
 717      checkdir_script.close() 
 718      os.chmod(self.name + '/checkForDir.sh',0755) 
   719   
 720   
 721   
 722   
 723 -class qscanNode(pipeline.CondorDAGNode,webTheNode): 
  724    """ 
 725    Runs an instance of a qscan job 
 726    """ 
 727 -  def __init__(self,job,time,cp,qcache,ifo,name, opts, d_node, dag, datafindCommand, qscanCommand, trig=None,qFlag=None): 
  728      """ 
 729      job = A CondorDAGJob that can run an instance of qscan. 
 730      """ 
 731      self.friendlyName = name 
 732      self.id = ifo + '-' + name + '-' + repr(time) 
 733   
 734      pipeline.CondorDAGNode.__init__(self,job) 
 735      if name.split('-')[0]=='background': 
 736        self.add_var_arg('scan') 
 737      else: 
 738        self.add_var_arg('scan -r') 
 739      qscanConfig = string.strip(cp.get("followup-"+name, ifo + 'config-file')) 
 740      self.add_var_arg("-c "+qscanConfig) 
 741      self.add_var_arg("-f "+qcache) 
 742   
 743      if cp.has_option("followup-"+name, ifo + 'output') and string.strip(cp.get("followup-"+name, ifo + 'output')): 
 744        output = string.strip(cp.get("followup-"+name, ifo + 'output')) 
 745      else: 
 746         
 747        output = job.name + '/' + name + '/' + ifo 
 748      if not os.access(output,os.F_OK): 
 749        os.makedirs(output) 
 750      else: 
 751        if not os.access(output,os.W_OK): 
 752          print >> sys.stderr, 'path '+output+' is not writable' 
 753          sys.exit(1) 
 754   
 755      self.add_var_arg("-o "+output+"/"+repr(time)) 
 756      self.add_var_arg(repr(time)) 
 757   
 758      self.set_pre_script(job.name + "/checkForDir.sh %s %s" \ 
 759      %(output, repr(time))) 
 760   
 761       
 762       
 763   
 764       
 765      self.outputName = output + '/' + repr(time) 
 766   
 767       
 768      self.outputCache = ifo + ' ' + name + ' ' + repr(time) + ' ' + self.outputName + '\n' 
 769   
 770       
 771      if job.name == 'QSCAN': 
 772        if cp.has_option("followup-"+name,ifo+'web') and string.strip(cp.get("followup-"+name,ifo+'web')): 
 773          pageOverride = string.strip(cp.get("followup-"+name,ifo+'web'))+'/'+repr(time) 
 774        else: 
 775           
 776          pageOverride = job.name + '/' + name + '/' + ifo + '/' + repr(time) 
 777        self.setupNodeWeb(job,False,dag.webPage.lastSection.lastSub,dag.page,pageOverride,dag.cache) 
 778   
 779      else: 
 780        self.setupNodeWeb(job,False,None,None,None,dag.cache)  
 781   
 782       
 783       
 784       
 785       
 786       
 787       
 788       
 789   
 790      if not opts.disable_dag_categories: 
 791        self.set_category(job.name.lower()) 
 792   
 793       
 794      try: 
 795        if d_node.validNode and eval('opts.' + datafindCommand): 
 796          self.add_parent(d_node) 
 797      except: pass 
 798   
 799       
 800       
 801      if eval('opts.' + qscanCommand): 
 802        if not(cp.has_option("followup-"+name,"remote-ifo") and \ 
 803        cp.get("followup-"+name,"remote-ifo")==ifo): 
 804          dag.addNode(self,self.friendlyName) 
 805          self.validNode = True 
 806        else: 
 807          dag.remote_nodes.append(self) 
 808      else: self.validNode = False 
   809    
 810    
 811    
 812   
 813   
 814   
 815   
 816   
 818    """ 
 819    A remote qscan job 
 820    """ 
 821    defaults={ 
 822      "section":"condor", 
 823      "options":{ 
 824        "universe":"vanilla", 
 825        "submit_remote_scan":"submit_remote_scan.py" 
 826        } 
 827      } 
 828   
 829 -  def __init__(self, opts, cp, tag_base='REMOTESCAN'): 
  830      """ 
 831      """ 
 832      if not(verifyCP(cp,self.defaults)): 
 833        modifyCP(cp,self.defaults) 
 834       
 835      if not os.path.exists(tag_base): 
 836         os.mkdir(tag_base) 
 837      self.setup_executable(tag_base) 
 838      self.__executable = tag_base + '/remote_scan_wrapper.sh' 
 839      self.__universe = "scheduler" 
 840      pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable) 
 841      self.setupJobWeb(tag_base,None) 
  842   
 844      starter_script = open(tag_base + '/remote_scan_wrapper.sh','w') 
 845      starter_script.write("""#!/bin/bash 
 846      dotbashrc=$1 
 847      executable=$2 
 848      gpstime=$3 
 849      configfile=$4 
 850      qscantype=$5 
 851      remoteoutput=$6 
 852      remotereceiver=$7 
 853      outputpath=$8 
 854      shift 8 
 855      source $dotbashrc 
 856      $executable --gps-time $gpstime --config-file $configfile --qscan-type $qscantype --remote-output $remoteoutput --remote-receiver $remotereceiver --output-path $outputpath 
 857      """) 
 858      starter_script.close() 
 859      os.chmod(tag_base + '/remote_scan_wrapper.sh',0755) 
   860   
 861   
 862   
 863   
 864   
 866    """ 
 867    Runs an instance of a remote qscan job 
 868    """ 
 869 -  def __init__(self,job,time,cp,ifo,name,opts,dag,qscanCommand): 
  870      """ 
 871      job = A CondorDAGJob that can run an instance of remote qscan. 
 872      """ 
 873      self.friendlyName = name 
 874      self.id = ifo + '-' + name + '-' + repr(time) 
 875      pipeline.CondorDAGNode.__init__(self,job) 
 876      self.add_macro("macroid", self.id) 
 877      self.jobName = job.name 
 878   
 879      self.add_var_arg(string.strip(cp.get("followup-remote-scan","virgo-env-path"))) 
 880      self.add_var_arg(string.strip(cp.get("condor","submit_remote_scan"))) 
 881      self.add_var_arg(repr(time)) 
 882      self.add_var_arg(string.strip(cp.get("followup-"+name,ifo+"config-file"))) 
 883      self.add_var_arg("_".join(name.split("-")[1:len(name.split("-"))])) 
 884      self.add_var_arg(string.strip(cp.get("followup-remote-scan","remote-output"))) 
 885      self.add_var_arg(string.strip(cp.get("followup-remote-scan","remote-server"))) 
 886   
 887      outputdir = 'QSCAN' + '/' + name + '/' + ifo + '/' + repr(time) 
 888      self.add_var_arg(outputdir) 
 889   
 890      if not opts.disable_dag_categories: 
 891        self.set_category(job.name.lower()) 
 892   
 893      if eval('opts.' + qscanCommand): 
 894          dag.addNode(self,"Remote " + self.friendlyName) 
 895          self.validate() 
 896      else: self.invalidate() 
   897   
 898   
 899   
 900   
 901   
 903    """ 
 904    A job to distribute the results of the qscans that have been run remotely (for LV search) 
 905    """ 
 906    defaults={ 
 907      "section":"condor", 
 908      "options":{ 
 909        "universe":"vanilla", 
 910        "distribute_q":"distrib_fu_qscan_results.py" 
 911        } 
 912      } 
 913   
 915      """ 
 916      """ 
 917      if not(verifyCP(cp,self.defaults)): 
 918        modifyCP(cp,self.defaults) 
 919      self.__prog__ = 'distributeQscanJob' 
 920      self.__executable = string.strip(cp.get('condor','distribute_q')) 
 921      self.__universe = "vanilla" 
 922      pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable) 
 923      self.add_condor_cmd('getenv','True') 
 924      self.setupJobWeb(self.__prog__) 
   925   
 926   
 927   
 928   
 930    """ 
 931    A node to distribute the results of the qscans that have been run remotely (for LV search) 
 932    """ 
 933 -  def __init__(self,job,foregroundCache,backgroundCache,ifo,inputFile,opts,dag): 
  934   
 935      self.friendlyName = "distributeQscanResults" 
 936   
 937      pipeline.CondorDAGNode.__init__(self,job) 
 938      self.add_var_opt('qscan-input-file',inputFile) 
 939      self.add_var_opt('qscan-cache-background',backgroundCache) 
 940      self.add_var_opt('qscan-cache-foreground',foregroundCache) 
 941      self.add_var_opt('remote-ifo',ifo) 
 942   
 943      typeList="" 
 944      for type in ["qscan","seismic-qscan"]: 
 945        typeList += type + "," 
 946      self.add_var_opt('qscan-type-list',typeList.strip(',')) 
 947   
 948      if opts.distrib_remote_q: 
 949        dag.addNode(self,self.friendlyName) 
 950        self.validNode = True 
 951      else: self.validNode = False 
   952   
 953   
 954   
 955   
 957    """ 
 958    A followup analyseQscan job to interprete the qscans 
 959    """ 
 960    defaults={ 
 961      "section":"condor", 
 962      "options":{ 
 963        "universe":"vanilla", 
 964        "analyseQscan":"analyseQscan.py" 
 965        } 
 966      } 
 967 -  def __init__(self,options,cp,tag_base='ANALYSE_QSCAN'): 
   976   
 977   
 978   
 979   
 981    """ 
 982    Runs an instance of a followup analyseQscan job 
 983    """ 
 984 -  def __init__(self,job,time,ifo,name,foregroundCache,backgroundCache,cp,opts,dag,command): 
  985      """ 
 986      job = A CondorDAGJob that can run an instance of analyseQscan followup. 
 987      """ 
 988      self.friendlyName = 'analyse ' + name 
 989      self.id = ifo + '-' + name + '-' + repr(time) 
 990   
 991      nameList = name.split('-')[1:len(name.split('-'))] 
 992      shortName = '' 
 993      for word in nameList: 
 994        shortName = shortName + word + '-' 
 995   
 996      try: 
 997        pipeline.CondorDAGNode.__init__(self,job) 
 998        if cp.has_option('followup-analyse-qscan','generate-qscan-xml'): 
 999          self.add_var_opt('generate-qscan-xml','') 
1000        self.add_var_opt('z-threshold',cp.getfloat('followup-analyse-qscan','z-threshold')) 
1001        if cp.has_option('followup-analyse-qscan','plot-z-distribution'): 
1002          self.add_var_opt('plot-z-distribution','') 
1003          self.add_var_opt('z-min',cp.getfloat('followup-analyse-qscan','z-min')) 
1004          self.add_var_opt('z-max',cp.getfloat('followup-analyse-qscan','z-max')) 
1005          self.add_var_opt('z-bins',cp.getfloat('followup-analyse-qscan','z-bins')) 
1006        if cp.has_option('followup-analyse-qscan','plot-dt-distribution'): 
1007          self.add_var_opt('plot-dt-distribution','') 
1008          self.add_var_opt('dt-min',cp.getfloat('followup-analyse-qscan',shortName + 'dt-min')) 
1009          self.add_var_opt('dt-max',cp.getfloat('followup-analyse-qscan',shortName + 'dt-max')) 
1010          self.add_var_opt('dt-bins',cp.getfloat('followup-analyse-qscan','dt-bins')) 
1011        if cp.has_option('followup-analyse-qscan','plot-z-scattered'): 
1012          self.add_var_opt('plot-z-scattered','') 
1013        if cp.has_option('followup-analyse-qscan','plot-z-scattered') or cp.has_option('followup-analyse-qscan','plot-dt-distribution'): 
1014          if not ifo=='V1': 
1015            refChannel = cp.get('followup-analyse-qscan',shortName + 'ref-channel').split(',')[0].strip() 
1016          else: 
1017            refChannel = cp.get('followup-analyse-qscan',shortName + 'ref-channel').split(',')[1].strip() 
1018          self.add_var_opt('ref-channel',refChannel) 
1019        self.add_var_opt('ifo-times',ifo) 
1020        self.add_var_opt('type',name) 
1021        self.add_var_opt('gps-string',repr(time)) 
1022        self.add_var_opt('ifo-tag',ifo) 
1023        self.add_var_opt('user-tag',repr(time).replace('.','_') + "_" + shortName.replace('-','_').strip("_")) 
1024   
1025        self.add_var_opt('qscan-cache-foreground',foregroundCache) 
1026        self.add_var_opt('qscan-cache-background',backgroundCache) 
1027   
1028        self.setupNodeWeb(job,True,None,dag.page,None,None) 
1029         
1030         
1031           
1032             
1033               
1034                 
1035                 
1036                 
1037                 
1038         
1039         
1040           
1041             
1042               
1043                 
1044                 
1045                 
1046   
1047        if not opts.disable_dag_categories: 
1048          self.set_category(job.name.lower()) 
1049         
1050         
1051        for node in dag.get_nodes(): 
1052           
1053           
1054          if isinstance(node,distributeQscanNode): 
1055            if cp.has_option("followup-"+name,"remote-ifo") and cp.get("followup-"+name,"remote-ifo")==ifo: 
1056              if node.validNode: 
1057                self.add_parent(node) 
1058           
1059           
1060          if isinstance(node,remoteQscanFgNode): 
1061            if cp.has_option("followup-"+name,"remote-ifo") and cp.get("followup-"+name,"remote-ifo")==ifo: 
1062              if node.friendlyName == name and node.validNode: 
1063                self.add_parent(node) 
1064           
1065          if isinstance(node,qscanNode):  
1066            if node.validNode: 
1067              if (node.friendlyName == name or \ 
1068              node.friendlyName.replace('background','foreground') == name) \ 
1069              and node.id.split('-')[0] == ifo: 
1070                self.add_parent(node) 
1071   
1072        if eval('opts.' + command): 
1073          dag.addNode(self,self.friendlyName) 
1074          self.validNode = True 
1075        else: self.validNode = False 
1076   
1077      except: 
1078        self.validNode = False 
1079        print "couldn't add " + name + " analyseQscan job for " + ifo + "@ "+ repr(time) 
  1080   
1081   
1082   
1083   
1085    """ 
1086    A h1h2 qevent job 
1087    """ 
1088    defaults={ 
1089      "section":"condor", 
1090      "options":{ 
1091        "universe":"vanilla", 
1092        "qscan":"wpipeline" 
1093        } 
1094      } 
1107   
1108   
1109   
1110   
1111   
1112   
1113   
1114   
1115   
1116   
1118       
1119       
1120      checkdir_script = open(self.name + '/checkForDir.sh','w') 
1121      checkdir_script.write("""#!/bin/bash 
1122      cat ${1} ${2} > ${3} 
1123      if [ -d $4/$5 ] 
1124      then 
1125        matchingList=$(echo $(find $4 -name $5.bk*)) 
1126        COUNTER=1 
1127        for file in $matchingList 
1128        do 
1129          let COUNTER=COUNTER+1 
1130        done 
1131        mv $4/$5 $4/$5.bk.$COUNTER 
1132      fi 
1133      """) 
1134      checkdir_script.close() 
1135      os.chmod(self.name + '/checkForDir.sh',0755) 
  1136   
1137   
1138   
1139   
1141    """ 
1142    Runs an instance of a qscan job 
1143    """ 
1144 -  def __init__(self,job,dNode,times,ifoList,name,cp,opts,dag,qeventCommand): 
 1145      """ 
1146      job = A CondorDAGJob that can run an instance of H1H2 qevent. 
1147      """ 
1148   
1149      ifoString = '' 
1150      for ifo in ifoList: 
1151        ifoString = ifoString + ifo 
1152   
1153      self.friendlyName = name 
1154      self.id = ifoString + '-' + name + '-' + str(times[ifoList[0]]) 
1155   
1156      pipeline.CondorDAGNode.__init__(self,job) 
1157   
1158      cache_type_temp = dNode[ifoList[0]].outputFileName.split('-')[1] 
1159      cache_type = cache_type_temp[3:len(cache_type_temp)] 
1160      cache_start = [] 
1161      cache_end = [] 
1162      for ifo in ifoList: 
1163        cache_temp = dNode[ifo].outputFileName.split('.')[0] 
1164        cache_start.append(cache_temp.split('-')[2]) 
1165        cache_end.append(cache_temp.split('-')[-1]) 
1166      cache_start_time = max(cache_start) 
1167   
1168      qeventcache = job.name + '/' + ifoString + '_' + cache_type + '-' + \ 
1169      str(max(cache_start)) + '-' + str(min(cache_end)) + '.qcache' 
1170   
1171   
1172      if cp.has_option("followup-"+name, ifoString + '-output') and string.strip(cp.get("followup-"+name, ifoString + '-output')): 
1173        output = string.strip(cp.get("followup-"+name, ifoString + '-output')) 
1174      else: 
1175         
1176        output = job.name + '/' + name + '/' + ifoString 
1177      if not os.access(output,os.F_OK): 
1178        os.makedirs(output) 
1179      else: 
1180        if not os.access(output,os.W_OK): 
1181          print >> sys.stderr, 'path '+output+' is not writable' 
1182          sys.exit(1) 
1183   
1184      self.add_var_arg('event') 
1185      qeventConfig = string.strip(cp.get("followup-"+name, ifoString + '-config-file')) 
1186      self.add_var_arg('-p '+qeventConfig) 
1187      self.add_file_arg('-f '+qeventcache) 
1188      self.add_var_arg('-o '+output+'/'+repr(times[ifoList[0]])) 
1189      self.add_var_arg(repr(times[ifoList[0]])) 
1190      eventDuration = string.strip(cp.get("followup-"+name, 'duration')) 
1191      self.add_var_arg(eventDuration) 
1192   
1193       
1194       
1195       
1196      self.set_pre_script(job.name + "/checkForDir.sh %s %s %s %s %s" \ 
1197      %(dNode[ifoList[0]].outputFileName, dNode[ifoList[1]].outputFileName, \ 
1198      qeventcache, output, repr(times[ifoList[0]]))) 
1199   
1200       
1201      absoutput = os.path.abspath(output) 
1202      self.outputName = absoutput + '/' + repr(times[ifoList[0]])  
1203   
1204       
1205      self.outputCache = ifoString + ' ' + name + ' ' + repr(times[ifoList[0]]) + ' ' + self.outputName + '\n' 
1206   
1207      if cp.has_option("followup-"+name,ifoString+'-web') and string.strip(cp.get("followup-"+name,ifoString+'-web')): 
1208        pageOverride = string.strip(cp.get("followup-"+name,ifoString+'-web'))+'/'+repr(times[ifoList[0]]) 
1209      else: 
1210         
1211        pageOverride = job.name + '/' + name + '/' + ifoString + '/' + repr(times[ifoList[0]]) 
1212      self.setupNodeWeb(job,False,dag.webPage.lastSection.lastSub,dag.page,pageOverride,dag.cache) 
1213   
1214      if not opts.disable_dag_categories: 
1215        self.set_category(job.name.lower()) 
1216   
1217      for ifo in ifoList: 
1218        if dNode[ifo].validNode: self.add_parent(dNode[ifo]) 
1219        else: pass 
1220   
1221      if eval('opts.' + qeventCommand): 
1222        dag.addNode(self,self.friendlyName) 
1223        self.validNode = True 
1224      else: self.validNode = False 
  1225   
1226   
1227   
1228   
1229   
1230 -class FrCheckJob(pipeline.CondorDAGJob, webTheJob): 
 1231    """ 
1232    A followup job for checking frames 
1233    """ 
1234    defaults={ 
1235      "section":"condor", 
1236      "options":{ 
1237        "universe":"vanilla", 
1238        "frame_check":"frame_check" 
1239        } 
1240      } 
1241 -  def __init__(self, options, cp, tag_base='FRCHECK'): 
 1242      """ 
1243      """ 
1244      if not(verifyCP(cp,self.defaults)): 
1245        modifyCP(cp,self.defaults) 
1246      self.__prog__ = 'FrCheckJob' 
1247      self.__executable = string.strip(cp.get('condor','frame_check')) 
1248      self.__universe = "vanilla" 
1249      pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable) 
1250      self.add_condor_cmd('getenv','True') 
1251      self.setupJobWeb(self.__prog__,tag_base) 
  1252   
1253   
1255    """ 
1256    Runs an instance of a FrCheck followup job 
1257    """ 
1258 -  def __init__(self, FrCheckJob, procParams, ifo, trig, cp, opts, dag, datafindCache, d_node, datafindCommand): 
 1259   
1260      try: 
1261        hipeCache = checkHipeCachePath(cp) 
1262   
1263        if not hipeCache: 
1264          cacheFile = datafindCache 
1265        else: 
1266          for row in procParams: 
1267            param = row.param.strip("-") 
1268            value = row.value 
1269            if param == 'frame-cache': cacheFile = value 
1270   
1271        self.friendlyName = 'Frame Check' 
1272    
1273        pipeline.CondorDAGNode.__init__(self,FrCheckJob) 
1274        self.add_var_opt("frame-cache", cacheFile) 
1275        self.add_var_opt("frame-check-executable", string.strip(cp.get('followup-frameCheck','executable'))) 
1276        self.add_var_opt("ifo-times",ifo) 
1277        self.add_var_opt("ifo-tag","FOLLOWUP_"+ifo) 
1278        self.add_var_opt("user-tag",trig.eventID) 
1279        self.id = FrCheckJob.name + '-' + ifo + '-' + str(trig.statValue) + '_' + str(trig.eventID) 
1280        self.setupNodeWeb(FrCheckJob,True, dag.webPage.lastSection.lastSub,dag.page,None,None) 
1281   
1282        if not opts.disable_dag_categories: 
1283          self.set_category(FrCheckJob.name.lower()) 
1284   
1285        try: 
1286          if d_node.validNode and eval('opts.' + datafindCommand): 
1287            self.add_parent(d_node) 
1288        except: pass 
1289   
1290        if opts.frame_check: 
1291          dag.addNode(self,self.friendlyName) 
1292          self.validate() 
1293        else: self.invalidate() 
1294   
1295      except: 
1296        self.invalidate() 
1297        print "couldn't add frame check job for " + str(ifo) + "@ "+ str(trig.gpsTime[ifo]) 
  1298   
1300    """ 
1301    A followup job for downloading summary plots 
1302    """ 
1303    defaults={ 
1304      "section":"condor", 
1305      "options":{ 
1306        "universe":"vanilla", 
1307        "IFOstatus_check":"IFOstatus_check" 
1308        } 
1309      } 
1310 -  def __init__(self, options, cp, tag_base='IFOSTATUS'): 
 1311      if not(verifyCP(cp,self.defaults)): 
1312        modifyCP(cp,self.defaults) 
1313      self.__prog__ = 'IFOstatus_checkJob' 
1314      self.__executable = string.strip(cp.get('condor','IFOstatus_check')) 
1315      self.__universe = "local" 
1316      pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable) 
1317      self.add_condor_cmd('getenv','True') 
1318      self.setupJobWeb(self.__prog__,tag_base) 
  1319       
1321    """ 
1322    Runs an instance of a FrCheck followup job 
1323    """ 
1324 -  def __init__(self, IFOstatus_checkJob, ifo, trig, cp,opts,dag): 
 1325   
1326      self.friendlyName = 'IFO status summary plots' 
1327      pipeline.CondorDAGNode.__init__(self,IFOstatus_checkJob) 
1328      self.add_var_opt("ifo-times", ifo) 
1329      self.add_var_opt("gps-time", trig.gpsTime[ifo]) 
1330      self.add_var_opt("ifo-tag", "FOLLOWUP_"+ifo) 
1331      self.add_var_opt("user-tag", str(trig.eventID)) 
1332      self.id = IFOstatus_checkJob.name + '-' + str(ifo) + '-' + str(trig.statValue) + '_' + str(trig.eventID) 
1333      self.setupNodeWeb(IFOstatus_checkJob,True, dag.webPage.lastSection.lastSub,dag.page,None,None) 
1334   
1335      if not opts.disable_dag_categories: 
1336        self.set_category(IFOstatus_checkJob.name.lower()) 
1337   
1338      if opts.ifo_status_check: 
1339        dag.addNode(self,self.friendlyName) 
1340        self.validate() 
1341      else: self.invalidate() 
  1342   
1343   
1344   
1346    """ 
1347    A model selection job 
1348    """ 
1349    defaults={ 
1350      "section":"condor", 
1351      "options":{ 
1352        "universe":"vanilla", 
1353        "followupodds":"lalapps_inspnest" 
1354        } 
1355      } 
1356 -  def __init__(self,options,cp,tag_base='FOLLOWUPODDS'): 
  1366   
1368    """ 
1369    Runs an instance of the model selection followup job 
1370    """ 
1371 -  def __init__(self,followupoddsJob,procParamsTable,trig,randomseed,cp,opts,dag): 
 1372      try: 
1373        IFOs = trig.ifolist_in_coinc 
1374        time_prior = string.strip(cp.get('followup-odds','time_prior')) 
1375         
1376        Nlive = string.strip(cp.get('followup-odds','min-live')) 
1377        Nmcmc = string.strip(cp.get('followup-odds','Nmcmc')) 
1378        srate = string.strip(cp.get('followup-odds','sample_rate')) 
1379        Approximant = string.strip(cp.get('followup-odds','approximant')) 
1380        self.friendlyName = 'Odds followup job' 
1381        pipeline.CondorDAGNode.__init__(self,followupoddsJob) 
1382        cacheFiles=[] 
1383        GPSstarts=[] 
1384        GPSends=[] 
1385        for ifo in IFOs: 
1386          for row in procParamsTable[ifo]: 
1387            param=row.param.strip("-") 
1388            value=row.value 
1389            if param == 'frame-cache': cacheFile=value 
1390            if param == 'gps-start-time': 
1391              GPSstarts.append(float(value)) 
1392            if param == 'gps-end-time': 
1393              GPSends.append(float(value)) 
1394          self.add_var_arg("--IFO "+str(ifo)) 
1395          self.add_var_arg("--cache " +str(cacheFile)) 
1396         
1397         
1398        GPSstart=str(max(GPSstarts)+64) 
1399        GPSend=str(min(GPSends)-64) 
1400   
1401         
1402        GPSstart=str(max(GPSstarts)+64) 
1403        GPSend=str(min(GPSends)-64) 
1404   
1405        outputname = followupoddsJob.name + '/'+followupoddsJob.name+'-' \ 
1406                     +trig.ifos+'-'+str(trig.statValue)+'_'+str(trig.eventID)+'_'+randomseed[0]+'.dat' 
1407        self.add_var_opt("Nlive",Nlive) 
1408        self.add_var_opt("GPSstart",GPSstart) 
1409        self.add_var_opt("length",str(float(GPSend)-float(GPSstart))) 
1410        self.add_var_opt("approximant",Approximant) 
1411        self.add_var_opt("out",outputname) 
1412        self.add_var_opt("Nsegs",str((int(float(GPSend))-int(float(GPSstart)))/8)) 
1413        self.add_var_opt("dt",time_prior) 
1414        self.add_var_opt("end_time",trig.gpsTime[ifo]) 
1415        self.add_var_opt("Mmin",2.8) 
1416        self.add_var_opt("Mmax",30) 
1417        self.add_var_opt("srate",srate) 
1418        self.add_var_opt("seed",randomseed[0]) 
1419         
1420        self.id = followupoddsJob.name + '-' + trig.ifos + '-' + str(trig.statValue) + '_' + str(trig.eventID) + '_' + randomseed[0] 
1421        self.outputCache = trig.ifos + ' ' + followupoddsJob.name + ' ' +\ 
1422                           self.id.split('-')[-1]+' '+outputname+'\n' 
1423        self.add_var_opt("channel",string.strip(cp.get("followup-coh-trigbank",trig.ifos[0:2]+"_channel"))) 
1424   
1425         
1426   
1427        self.setupNodeWeb(followupoddsJob,False,None,None,None,dag.cache) 
1428   
1429         
1430   
1431        if opts.odds and float(GPSend)-float(GPSstart)>=24: 
1432          dag.addNode(self,self.friendlyName) 
1433          self.validate() 
1434        else: self.invalidate() 
1435   
1436      except: 
1437        self.invalidate() 
1438        print "Couldn't add followupOdds job for " + str(trig.gpsTime[ifo]) 
  1439     
1440   
1441   
1442   
1443 -class followupOddsPostJob(pipeline.CondorDAGJob,webTheJob): 
 1444    """ 
1445    The post-processing of odds jobs 
1446    """ 
1447    defaults={ 
1448      "section":"condor", 
1449      "options":{ 
1450        "universe":"vanilla", 
1451        "oddsPostScript":"OddsPostProc.py" 
1452        } 
1453      } 
1454 -  def __init__(self,options,cp,tag_base='FOLLOWUPODDSPOST'): 
 1455      """ 
1456      """ 
1457      if not(verifyCP(cp,self.defaults)): 
1458        modifyCP(cp,self.defaults) 
1459      self.__prog__='followupOddsPostJob' 
1460      self.__executable=string.strip(cp.get('condor','oddsPostScript')) 
1461      self.__universe="vanilla" 
1462      pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable) 
1463      self.setupJobWeb(self.__prog__,tag_base) 
  1464   
1465   
1466   
1467   
1468 -class followupOddsPostNode(pipeline.CondorDAGNode,webTheNode): 
 1469    """ 
1470    Runs the post-processing script 
1471    """ 
1472 -  def __init__(self,oddsPostJob,procParams,trig,oddsjoblist,cp,opts,dag): 
 1473      try: 
1474        self.friendlyName = 'Odds plotting job' 
1475        pipeline.CondorDAGNode.__init__(self,oddsPostJob) 
1476   
1477         
1478        for oddsjobId in oddsjoblist: 
1479          oddsfile = oddsjobId.split('-')[0]+'/' + oddsjobId + '.dat'  
1480          self.add_var_arg("--data " + oddsfile) 
1481   
1482         
1483        Nlive = string.strip(cp.get('followup-odds','min-live')) 
1484        self.add_var_opt("Nlive",Nlive) 
1485   
1486        if cp.has_option('followup-odds','web') and string.strip(cp.get('followup-odds','web')): 
1487          outputpath = string.strip(cp.get('followup-odds','web')) 
1488        else: 
1489          outputpath = oddsPostJob.name + "/" + str(trig.eventID) 
1490        if not os.access(outputpath,os.F_OK): 
1491          os.mkdir(outputpath) 
1492   
1493        self.add_var_opt("outpath",outputpath) 
1494   
1495        self.id = oddsPostJob.name + '-' + trig.ifos + '-' + str(trig.statValue) + '_' + str(trig.eventID) 
1496   
1497         
1498        self.outputCache = self.id.replace('-',' ') + " " + os.path.abspath(outputpath) + "/" + self.id + "\n" 
1499   
1500        self.setupNodeWeb(oddsPostJob,False,dag.webPage.lastSection.lastSub,None,None,dag.cache) 
1501    
1502         
1503        for node in dag.get_nodes(): 
1504          if isinstance(node,followupoddsNode): 
1505            if not node.id.find(trig.ifos + '-' + str(trig.statValue) + '_' + str(trig.eventID)) == -1: 
1506              try: 
1507                if node.validNode: self.add_parent(node) 
1508              except: pass 
1509   
1510        if opts.odds: 
1511          dag.addNode(self,self.friendlyName) 
1512          self.validate() 
1513        else: 
1514          self.invalidate() 
1515   
1516      except: 
1517        self.invalidate() 
1518        print "couldn't add odds post job for " + str(trig.ifos) + "@ "+ str(trig.gpsTime[trig.ifolist_in_coinc[-1]]) 
  1519   
1520   
1521   
1522   
1524    defaults={ 
1525      "section":"condor", 
1526      "options":{ 
1527        "universe":"vanilla", 
1528        "followupmcmc":"lalapps_followupMcmc" 
1529        } 
1530      } 
1531    """ 
1532    An mcmc job 
1533    """ 
1534 -  def __init__(self, options, cp, tag_base='FOLLOWUPMCMC'): 
  1544   
1545   
1546   
1548    """ 
1549    Runs an instance of an mcmc followup job 
1550    """ 
1551     
1552 -  def __init__(self, followupmcmcJob, procParams, trig, randomseed, cp, opts, dag, ifo=None): 
 1553   
1554      try: 
1555        time_margin = string.strip(cp.get('followup-mcmc','prior-coal-time-marg')) 
1556        iterations = string.strip(cp.get('followup-mcmc','iterations')) 
1557        tbefore = string.strip(cp.get('followup-mcmc','tbefore')) 
1558        tafter = string.strip(cp.get('followup-mcmc','tafter')) 
1559        massmin = string.strip(cp.get('followup-mcmc','massmin')) 
1560        massmax = string.strip(cp.get('followup-mcmc','massmax')) 
1561        dist90 = string.strip(cp.get('followup-mcmc','dist90')) 
1562        dist10 = string.strip(cp.get('followup-mcmc','dist10')) 
1563   
1564        self.friendlyName = 'MCMC followup' 
1565        pipeline.CondorDAGNode.__init__(self,followupmcmcJob) 
1566   
1567        if ifo: 
1568          IFOs = [ifo] 
1569          self.ifonames = ifo 
1570        else: 
1571          IFOs = trig.ifolist_in_coinc 
1572          self.ifonames = trig.ifos 
1573   
1574        cacheFiles = "" 
1575        channelNames = "" 
1576        chunk_end_list = {} 
1577        chunk_start_list = {} 
1578        for itf in IFOs: 
1579          for row in procParams[itf]: 
1580            param = row.param.strip("-") 
1581            value = row.value 
1582            if param == 'frame-cache': cacheFile = value 
1583            if param == 'channel-name': channel = value 
1584            if param == 'gps-end-time': chunk_end = value 
1585            if param == 'gps-start-time': chunk_start = value 
1586          cacheFiles += cacheFile + "," 
1587          channelNames += channel + "," 
1588          chunk_end_list[itf] = chunk_end 
1589          chunk_start_list[itf] = chunk_start 
1590   
1591        if len(IFOs) > 1: 
1592          maxSNR = 0 
1593          maxIFO = "" 
1594          for trigger in trig.coincs: 
1595            snr = trigger.snr 
1596            if snr > maxSNR: 
1597              maxSNR = snr 
1598              maxIFO = trigger.ifo 
1599        else: 
1600          maxIFO = IFOs[0] 
1601        trig_tempo = getattr(trig.coincs,maxIFO) 
1602        triggerRef = copy.deepcopy(trig_tempo) 
1603        self.ifoRef = maxIFO 
1604           
1605        self.add_var_opt("template",string.strip(cp.get('followup-mcmc','template'))) 
1606        self.add_var_opt("iterations",iterations) 
1607        self.add_var_opt("randomseed",randomseed) 
1608        self.add_var_opt("tcenter","%0.3f"%trig.gpsTime[maxIFO]) 
1609        self.add_var_opt("tbefore",tbefore) 
1610        self.add_var_opt("tafter",tafter) 
1611   
1612        tmin = trig.gpsTime[maxIFO] - float(time_margin) 
1613        tmax = trig.gpsTime[maxIFO] + float(time_margin) 
1614        self.add_var_opt("priorparameters","[" + massmin + "," + massmax + "," + str(tmin) + "," + str(tmax) + "," + dist90 + "," + dist10 + "]") 
1615   
1616        param_mchirp = triggerRef.mchirp 
1617        param_eta = triggerRef.eta 
1618        param_distance = triggerRef.eff_distance 
1619        self.add_var_opt("guess","[" + str(param_mchirp) + "," + str(param_eta) + "," + str(trig.gpsTime[maxIFO]) + "," + str(param_distance) + "]") 
1620         
1621   
1622         
1623   
1624         
1625         
1626         
1627         
1628   
1629         
1630         
1631        self.add_var_opt("cachefile","["+cacheFiles.strip(",")+"]") 
1632        self.add_var_opt("filechannel","["+channelNames.strip(",")+"]") 
1633   
1634        psdEstimateStart = "" 
1635        psdEstimateEnd = "" 
1636        for itf in IFOs: 
1637          datainchunk_before = int(trig.gpsTime[maxIFO]) - 75 - 64 - int(chunk_start_list[itf]) 
1638          datainchunk_after = int(chunk_end_list[itf]) - 64 - int(trig.gpsTime[maxIFO]) - 32 
1639          if datainchunk_after > datainchunk_before: 
1640            psdEstimateStart += str(int(trig.gpsTime[maxIFO]) + 32) + "," 
1641            psdEstimateEnd += str(int(chunk_end_list[itf]) - 64) + "," 
1642          else: 
1643            psdEstimateStart += str(int(chunk_start_list[itf]) + 64) + "," 
1644            psdEstimateEnd += str(int(trig.gpsTime[maxIFO]) - 75) + "," 
1645   
1646        self.add_var_opt("psdestimatestart","["+psdEstimateStart.strip(",")+"]") 
1647        self.add_var_opt("psdestimateend","["+psdEstimateEnd.strip(",")+"]") 
1648   
1649        self.add_var_opt("importanceresample",10000) 
1650   
1651        self.id = followupmcmcJob.name + '-' + self.ifonames + '-' + str(trig.statValue) + '_' + str(trig.eventID) + '_' + randomseed 
1652        outputName = followupmcmcJob.name+'/'+self.id 
1653        self.outputCache = self.ifonames + ' ' + followupmcmcJob.name + ' ' + self.id.split('-')[-1] + ' ' + outputName + '.csv\n' 
1654   
1655        self.setupNodeWeb(followupmcmcJob,False,None,None,None,dag.cache) 
1656        self.add_var_opt("outfilename",outputName) 
1657   
1658        if not opts.disable_dag_categories: 
1659          self.set_category(followupmcmcJob.name.lower()) 
1660   
1661        if opts.mcmc: 
1662          dag.addNode(self,self.friendlyName) 
1663          self.validate() 
1664        else: self.invalidate() 
1665   
1666      except: 
1667        self.invalidate() 
1668        print "couldn't add followupmcmc job for " + self.ifonames + "@ "+ str(trig.gpsTime[maxIFO]) 
  1669   
1670   
1671   
1673    """ 
1674    A spinning MCMC job 
1675    """ 
1676    defaults={ 
1677      "section":"condor", 
1678      "options":{ 
1679        "universe":"vanilla", 
1680        "followupspinspiral":"SPINspiral" 
1681        } 
1682      } 
1683 -  def __init__(self, options, cp, tag_base='FOLLOWUPSPINMCMC'): 
  1693   
1694   
1695   
1696   
1698    """ 
1699    Runs an instance of spinning mcmc followup job 
1700    """ 
1701 -  def __init__(self, followupspinmcmcJob, procParams, trig, cp, opts, dag, chain_number): 
 1702   
1703      try: 
1704   
1705        self.friendlyName = 'SPIN MCMC followup' 
1706        pipeline.CondorDAGNode.__init__(self,followupspinmcmcJob) 
1707   
1708        IFOs = trig.ifolist_in_coinc 
1709   
1710        maxSNR = 0 
1711        maxIFO = "" 
1712        for trigger in trig.coincs: 
1713          snr = trigger.snr 
1714          if snr > maxSNR: 
1715            maxSNR = snr 
1716            maxIFO = trigger.ifo 
1717        trig_tempo = getattr(trig.coincs,maxIFO) 
1718        triggerRef = copy.deepcopy(trig_tempo) 
1719        self.ifoRef = maxIFO 
1720   
1721         
1722        self.add_var_arg(string.strip(cp.get("followup-spin-mcmc","input_file"))) 
1723   
1724        self.id = followupspinmcmcJob.name + '-' + trig.ifos + '-' + str(trig.statValue) + '_' + str(trig.eventID) + '_' + str(chain_number) 
1725   
1726        self.setupNodeWeb(followupspinmcmcJob,False,None,None,None,dag.cache) 
1727   
1728        if not opts.disable_dag_categories: 
1729          self.set_category(followupspinmcmcJob.name.lower()) 
1730   
1731        if opts.spin_mcmc: 
1732          dag.addNode(self,self.friendlyName) 
1733          self.validate() 
1734        else: self.invalidate() 
1735   
1736      except: 
1737        self.invalidate() 
1738        print "couldn't add followupspinmcmc job for " + trig.ifos + "@ "+ str(trig.gpsTime[maxIFO]) 
  1739   
1740   
1741   
1743    """ 
1744    A plot mcmc job 
1745    """ 
1746    defaults={ 
1747      "section":"condor", 
1748      "options":{ 
1749        "universe":"vanilla", 
1750        "plotmcmc":"plotmcmc.py" 
1751        } 
1752      } 
1753 -  def __init__(self, options, cp, tag_base='PLOTMCMC'): 
  1763   
1764   
1765   
1767    """ 
1768    Runs an instance of  plotmcmc job 
1769    """ 
1770 -  def __init__(self, plotmcmcjob, trig, mcmcIdList, cp, opts, dag, ifo, ifonames): 
 1771   
1772      try: 
1773        self.friendlyName = 'plot MCMC' 
1774        pipeline.CondorDAGNode.__init__(self,plotmcmcjob) 
1775   
1776        if cp.has_option('followup-plotmcmc','burnin'): 
1777          burnin = string.strip(cp.get('followup-plotmcmc','burnin')) 
1778          if burnin.strip(): 
1779            self.add_var_opt("burnin",burnin) 
1780   
1781        plot_routine = string.strip(cp.get('followup-plotmcmc','plot_routine')) 
1782        executable = string.strip(cp.get('followup-plotmcmc','executable')) 
1783        sim = None 
1784        try: 
1785          sim = isinstance(trig.coincs.sim,lsctables.SimInspiral) 
1786        except: 
1787          pass 
1788        if sim: 
1789          time = eval("trig.coincs.sim." + ifo[0:1].lower() + "_end_time") 
1790          time_ns = eval("trig.coincs.sim." + ifo[0:1].lower() + "_end_time_ns") 
1791          gps = float(time) + float(time_ns)/1000000000. 
1792          mchirp = trig.coincs.sim.mchirp 
1793          eta = trig.coincs.sim.eta 
1794          distance = trig.coincs.sim.distance 
1795          phi = trig.coincs.sim.phi0 
1796        else: 
1797          gps = trig.gpsTime[ifo] 
1798          mchirp = getattr(trig.coincs,ifo).mchirp 
1799          eta = getattr(trig.coincs,ifo).eta 
1800          distance = getattr(trig.coincs,ifo).eff_distance 
1801          phi = "0.0" 
1802   
1803        self.add_var_opt("plot-routine",plot_routine) 
1804        self.add_var_opt("executable",executable) 
1805        self.add_var_opt("reference-time",gps) 
1806        self.add_var_opt("reference-mchirp",mchirp) 
1807        self.add_var_opt("reference-eta",eta) 
1808        self.add_var_opt("reference-distance",distance) 
1809        self.add_var_opt("reference-phi",phi) 
1810         
1811        mcmcfilelist = "" 
1812        for mcmcId in mcmcIdList: 
1813          mcmcfilelist += mcmcId.split('-')[0]+'/' + mcmcId + '.csv,'  
1814        self.add_var_opt("mcmc-file",mcmcfilelist.strip(',')) 
1815   
1816        self.id = plotmcmcjob.name + '-' + ifonames + '-' + str(trig.statValue) + '_' + str(trig.eventID) 
1817        self.add_var_opt("identity",self.id) 
1818   
1819        if cp.has_option('followup-plotmcmc', 'output') and string.strip(cp.get('followup-plotmcmc', 'output')): 
1820          outputpath = string.strip(cp.get('followup-plotmcmc', 'output')) 
1821        else: 
1822           
1823          outputpath = plotmcmcjob.name 
1824        if not os.access(outputpath,os.F_OK): 
1825          os.makedirs(outputpath) 
1826        else: 
1827          if not os.access(outputpath,os.W_OK): 
1828            print >> sys.stderr, 'path '+outputpath+' is not writable' 
1829            sys.exit(1) 
1830   
1831        if cp.has_option('followup-plotmcmc','web') and string.strip(cp.get('followup-plotmcmc','web')): 
1832          webpath = string.strip(cp.get('followup-plotmcmc','web')) 
1833        else: 
1834           
1835          webpath = plotmcmcjob.name 
1836   
1837        output_page = webpath + '/' + self.id 
1838        self.outputCache = self.id.replace('-',' ') + " " + os.path.abspath(outputpath) + "/" + self.id + "\n" 
1839        self.setupNodeWeb(plotmcmcjob,False,dag.webPage.lastSection.lastSub,None,output_page,dag.cache) 
1840   
1841        self.add_var_opt("output-path",outputpath) 
1842   
1843        if not opts.disable_dag_categories: 
1844          self.set_category(plotmcmcjob.name.lower()) 
1845   
1846         
1847        for node in dag.get_nodes(): 
1848          if isinstance(node,followupmcmcNode): 
1849            if not node.id.find(ifonames + '-' + str(trig.statValue) + '_' + str(trig.eventID)) == -1: 
1850              try: 
1851                if node.validNode: self.add_parent(node) 
1852              except: pass 
1853   
1854        if opts.plot_mcmc: 
1855          dag.addNode(self,self.friendlyName) 
1856          self.validate() 
1857        else: 
1858          self.invalidate() 
1859   
1860      except: 
1861        self.invalidate() 
1862        print "couldn't add plot mcmc job for " + ifonames + "@ "+ str(trig.gpsTime[ifo]) 
  1863   
1864   
1865   
1866   
1868    """ 
1869    Generates coherent inspiral data 
1870    """ 
1871    defaults={ 
1872      "section":"condor", 
1873      "options":{ 
1874        "universe":"vanilla", 
1875        "chia":"lalapps_coherent_inspiral" 
1876        } 
1877      } 
1878 -  def __init__(self, options, cp, tag_base='CHIA'): 
 1879      """ 
1880      """ 
1881      if not(verifyCP(cp,self.defaults)): 
1882        modifyCP(cp,self.defaults) 
1883      self.__prog__ = 'followUpChiaJob' 
1884      self.__executable = string.strip(cp.get('condor','chia')) 
1885      self.__universe = "standard" 
1886      pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable) 
1887      self.add_condor_cmd('getenv','True') 
1888      self._InspiralAnalysisNode__pad_data = 0 
1889      self.setupJobWeb(self.__prog__,tag_base) 
  1890   
1891   
1892   
1894    """ 
1895    A C code for computing the coherent inspiral statistic. 
1896    An example command line is: 
1897  lalapps_coherent_inspiral --segment-length 1048576 --dynamic-range-exponent 6.900000e+01 --low-frequency-cutoff 4.000000e+01 --bank-file H1H2-COHBANK_COHERENT_H1H2_PLAYGROUND-823269333-600.xml --sample-rate 4096 --cohsnr-threshold 5.500000e+00 --ifo-tag H1H2 --frame-type LSC-STRAIN --H1-framefile H1-INSPIRAL_COHERENT_H1H2_PLAYGROUND-823269286-2048.gwf --H2-framefile H2-INSPIRAL_COHERENT_H1H2_PLAYGROUND-823268952-2048.gwf --gps-end-time 823269933 --gps-start-time 823269333 --write-cohsnr --write-cohnullstat --write-cohphasediff --write-events --verbose 
1898    """ 
1899   
1900     
1901   
1902 -  def __init__(self,job,trig,opts,dag,cp): 
 1903   
1904      if 1: 
1905         
1906         
1907         
1908        pipeline.CondorDAGNode.__init__(self,job) 
1909        self.friendlyName = 'Produce coherent inspiral plots of event' 
1910        self.id = job.name + '-CHIA-' + str(trig.statValue) + '_' + str(trig.eventID) 
1911         
1912        self.output_file_name = "" 
1913        self.add_var_opt("segment-length",string.strip(cp.get('chia','segment-length'))) 
1914        self.add_var_opt("dynamic-range-exponent",string.strip(cp.get('chia','dynamic-range-exponent'))) 
1915        self.add_var_opt("low-frequency-cutoff",string.strip(cp.get('chia','low-frequency-cutoff'))) 
1916        self.add_var_opt("sample-rate",string.strip(cp.get('chia','sample-rate'))) 
1917        self.add_var_opt("cohsnr-threshold",string.strip(cp.get('chia','cohsnr-threshold'))) 
1918        self.add_var_opt("ra-step",string.strip(cp.get('chia','ra-step'))) 
1919        self.add_var_opt("dec-step",string.strip(cp.get('chia','dec-step'))) 
1920        self.add_var_opt("numCohTrigs",string.strip(cp.get('chia','numCohTrigs'))) 
1921        self.add_var_opt("user-tag",str(trig.eventID)) 
1922        self.add_var_opt("ifo-tag",trig.ifoTag) 
1923         
1924        bankFile = 'trigTemplateBank/' + trig.ifoTag + '-COHBANK_FOLLOWUP_' + str(trig.eventID) + '-' + str(int(trig.gpsTime[trig.ifolist_in_coinc[0]])) + '-2048.xml.gz' 
1925   
1926        self.add_var_opt("write-events","") 
1927        self.add_var_opt("write-compress","") 
1928        self.add_var_opt("write-cohsnr","") 
1929        self.add_var_opt("write-cohnullstat","") 
1930        self.add_var_opt("write-h1h2nullstat","") 
1931        self.add_var_opt("write-cohh1h2snr","") 
1932         
1933        self.set_bank(bankFile) 
1934        self._InspiralAnalysisNode__pad_data = 0 
1935   
1936         
1937        self.setupNodeWeb(job,False,None,None,None,dag.cache) 
1938        self.add_var_opt("output-path",job.outputPath) 
1939   
1940         
1941         
1942         
1943        hLengthAnalyzed = 1 
1944        self.add_var_opt("gps-start-time",int(trig.gpsTime[trig.ifolist_in_coinc[0]]) - int(hLengthAnalyzed) ) 
1945        self.add_var_opt("gps-end-time",int(trig.gpsTime[trig.ifolist_in_coinc[0]]) + int(hLengthAnalyzed) ) 
1946        skipParams = ['minimal-match', 'bank-file', 'injection-file', 'trig-start-time', 'trig-end-time'] 
1947   
1948        if opts.plot_chia: 
1949          dag.addNode(self,'chia') 
1950          self.validate() 
1951        else: self.invalidate() 
1952   
1953      else:  
1954        print >> sys.stderr, "Didn't find a coherent inspiral job, I'll assume I don't need it" 
 1955         
1956         
1957   
1959      fileName = str(inspNode.output_file_name) 
1960       
1961       
1962      self.add_var_arg("--"+ifo+"-framefile "+str(fileName.replace(".xml",".gwf").strip(".gz"))) 
1963       
1964      if inspNode.validNode: self.add_parent(inspNode) 
 1965   
1966   
1968      if opts.plot_chia: 
1969        dag.addNode(self,self.friendlyName) 
1970        self.validate() 
1971      else: 
1972        self.invalidate() 
1973        print "couldn't add coherent-inspiral job for " + str(trig.eventID) 
  1974   
1975   
1976   
1977       
1978   
1979   
1980   
1982    """ 
1983    A clustering job for coherent inspiral triggers 
1984    """ 
1985    defaults={ 
1986      "section":"condor", 
1987      "options":{ 
1988        "universe":"vanilla", 
1989        "cohire":"lalapps_cohire" 
1990        } 
1991      } 
1992 -  def __init__(self, options, cp, tag_base='COHIRE'): 
 1993      """ 
1994      """ 
1995      if not(verifyCP(cp,self.defaults)): 
1996        modifyCP(cp,self.defaults) 
1997      self.__prog__ = 'followUpCohireJob' 
1998      self.__executable = string.strip(cp.get('condor','cohire')) 
1999      self.__universe = "vanilla" 
2000      pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable) 
2001      self.add_condor_cmd('getenv','True') 
2002      self.setupJobWeb(self.__prog__,tag_base) 
  2003   
2004   
2005   
2006   
2007   
2009    """ 
2010    Runs an instance of a cohire (coherent trigger clustering) job 
2011    """ 
2012 -  def __init__(self,job,chiaXmlFilePath,trig,chiaNode,dag,page,opts): 
 2013      """ 
2014      job = A CondorDAGJob that can run an instance of COHIRE clustering 
2015      """ 
2016      self.friendlyName = 'Produce xml file of clustered coherent triggers' 
2017      if 1:  
2018        pipeline.CondorDAGNode.__init__(self,job) 
2019        self.output_file_name = "" 
2020        inputFileName = 'trigTemplateBank/' + trig.ifoTag + '-COHIRE_FOLLOWUP_' + str(trig.eventID) + '-' + str(int(trig.gpsTime[trig.ifolist_in_coinc[0]]-1)) + '-2.txt' 
2021        outputXmlFile = chiaXmlFilePath + trig.ifoTag + '-COHIRE_FOLLOWUP_' + str(trig.eventID) + '-' + str(int(trig.gpsTime[trig.ifolist_in_coinc[0]]-1)) + '-2.xml.gz' 
2022        summaryFileName = chiaXmlFilePath + trig.ifoTag + '-COHIRE_SUMMARY_FOLLOWUP_' + str(trig.eventID) + '-' + str(int(trig.gpsTime[trig.ifolist_in_coinc[0]]-1)) + '-2.txt' 
2023        self.add_var_opt("input",inputFileName) 
2024        self.add_var_opt("data-type","all_data") 
2025        self.add_var_opt("output",outputXmlFile) 
2026        self.add_var_opt("summary-file",summaryFileName) 
2027        self.add_var_opt("cluster-algorithm","snr") 
2028        self.add_var_opt("sort-triggers","") 
2029        self.add_var_opt("snr-threshold",5.0) 
2030        self.add_var_opt("cluster-time",4000) 
2031        self.id = job.name + '-' + str(trig.statValue) + '-' + str(trig.eventID) 
2032        self.setupNodeWeb(job,False,None,None,None,dag.cache) 
2033        skipParams = ['enable-output','output-path'] 
2034   
2035        if not opts.disable_dag_categories: 
2036          self.set_category(job.name.lower()) 
2037   
2038         
2039        if chiaNode.validNode: self.add_parent(chiaNode) 
2040         
2041        if opts.plot_chia: 
2042          dag.addNode(self,self.friendlyName) 
2043          self.validate() 
2044        else: self.invalidate() 
  2045       
2046       
2047       
2048   
2049   
2050   
2051   
2052   
2053   
2054   
2055   
2056   
2058    """ 
2059    A followup plotting job for coherent inspiral search and null stat timeseries 
2060    """ 
2061    defaults={ 
2062      "section":"condor", 
2063      "options":{ 
2064        "universe":"vanilla", 
2065        "plotchiatimeseries":"plotchiatimeseries" 
2066        } 
2067      } 
2068   
2069 -  def __init__(self, options, cp, tag_base='PLOT_CHIA'): 
 2070      """ 
2071      """ 
2072      if not(verifyCP(cp,self.defaults)): 
2073        modifyCP(cp,self.defaults) 
2074      self.__prog__ = 'plotChiaJob' 
2075      self.__executable = string.strip(cp.get('condor','plotchiatimeseries')) 
2076      self.__universe = "vanilla" 
2077      pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable) 
2078      self.add_condor_cmd('getenv','True') 
2079      self.setupJobWeb(self.__prog__,tag_base) 
  2080   
2081   
2082   
2083   
2085    """ 
2086    Runs an instance of a plotChia followup job 
2087    """ 
2088 -  def __init__(self,job,chiaXmlFilePath,trig,cohireNode,dag,page,opts,cp): 
 2089      """ 
2090      job = A CondorDAGJob that can run an instance of plotChiaJob followup. 
2091      """ 
2092      self.friendlyName = 'Plot CHIA time-series' 
2093      try: 
2094        pipeline.CondorDAGNode.__init__(self,job) 
2095        self.output_file_name = "" 
2096        chiaXmlFileName = chiaXmlFilePath + trig.ifoTag + '-COHIRE_FOLLOWUP_' + str(trig.eventID) + '-' + str(int(trig.gpsTime[trig.ifolist_in_coinc[0]]-1)) + '-2.xml.gz' 
2097        self.add_var_opt("chiaXmlFile",chiaXmlFileName) 
2098        self.add_var_opt("gps-start-time",int(trig.gpsTime[trig.ifolist_in_coinc[0]]-1)) 
2099        self.add_var_opt("gps-end-time",int(trig.gpsTime[trig.ifolist_in_coinc[0]]+1)) 
2100        self.add_var_opt("sample-rate",string.strip(cp.get('chia','sample-rate'))) 
2101        self.add_var_opt("user-tag",str(trig.eventID)) 
2102        self.add_var_opt("ifo-tag",trig.ifoTag) 
2103        self.add_var_opt("ifo-times",trig.ifoTag) 
2104        self.id = job.name + '-' + str(trig.statValue) + '-' + str(trig.eventID) 
2105        self.setupNodeWeb(job,True,None,None,None,dag.cache) 
2106   
2107        if not opts.disable_dag_categories: 
2108          self.set_category(job.name.lower()) 
2109   
2110         
2111        if cohireNode.validNode: self.add_parent(cohireNode) 
2112         
2113        if opts.plot_chia: 
2114          dag.addNode(self,self.friendlyName) 
2115          self.validate() 
2116        else: self.invalidate() 
2117      except:  
2118        self.invalidate() 
2119        print "couldn't add chia plotting job for event " + str(trig.eventID) 
 2120   
2122      fileName = str(inspNode.output_file_name) 
2123      self.add_var_arg("--"+ifo+"-framefile "+str(fileName.replace(".xml",".gwf").strip(".gz"))) 
2124      if inspNode.validNode: self.add_parent(inspNode) 
  2125   
2126   
2127   
2128   
2129   
2130   
2131   
2132   
2133   
2135    """ 
2136    A job to prepare the checklist of a candidate 
2137    """ 
2138    defaults={ 
2139      "section":"condor", 
2140      "options":{ 
2141        "universe":"vanilla", 
2142        "makechecklist":"makeCheckList.py" 
2143        } 
2144      } 
2146      """ 
2147      """ 
2148      if not(verifyCP(cp,self.defaults)): 
2149        modifyCP(cp,self.defaults) 
2150      self.__prog__ = 'CHECKLIST' 
2151      self.__executable = string.strip(cp.get('condor','makechecklist')) 
2152      self.__universe = "local" 
2153      pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable) 
2154      self.add_condor_cmd('getenv','True') 
2155      self.setupJobWeb(self.__prog__) 
  2156   
2157   
2158   
2159   
2161    """ 
2162    A node to prepare the checklist of a candidate 
2163    """ 
2164 -  def __init__(self,job,trig,cp,opts,dag): 
 2165      """ 
2166      """ 
2167      self.friendlyName = 'Make checklist' 
2168      pipeline.CondorDAGNode.__init__(self,job) 
2169   
2170      self.id = job.name + "-" + str(trig.eventID) 
2171   
2172      self.add_var_opt("trigger-id",str(trig.eventID)) 
2173      gpsList = "" 
2174      ifolist = "" 
2175      for ifo in trig.ifolist_in_coinc: 
2176        ifolist += ifo 
2177        gpsList += repr(trig.gpsTime[ifo]) + "," 
2178      self.add_var_opt("trigger-gps",gpsList.strip(",")) 
2179      self.add_var_opt("ifolist-in-coinc",ifolist) 
2180      self.add_var_opt("user-tag",str(trig.eventID)+"_"+ifolist+"_"+str(int(trig.gpsTime[trig.ifolist_in_coinc[0]]))) 
2181      self.add_var_opt("ifo-times",trig.ifoTag) 
2182      self.add_var_opt("ifo-tag",trig.ifoTag) 
2183      if cp.has_option("followup-dq","input-sql"): 
2184        self.add_var_opt("data-quality-database",cp.get("followup-dq","input-sql")) 
2185      elif cp.has_option("followup-dq","server-url"): 
2186        self.add_var_opt("segment-url",cp.get("followup-dq","server-url")) 
2187      if cp.has_option("followup-ratiotest","input-pickle"): 
2188        self.add_var_opt("SNR-ratio-test",cp.get("followup-ratiotest","input-pickle")) 
2189      if cp.has_section("followup-analyse-qscan"): 
2190        if cp.has_option("followup-analyse-qscan","hoft-qscan-ref-channel"): 
2191          self.add_var_opt("hoft-channel-ref",cp.get("followup-analyse-qscan","hoft-qscan-ref-channel")) 
2192   
2193      if cp.has_option("followup-foreground-qscan","remote-ifo"): 
2194        remote_ifo = cp.get("followup-foreground-qscan","remote-ifo") 
2195        self.add_var_opt("remote-qscan-web",remote_ifo+","+string.strip(cp.get("followup-foreground-qscan",remote_ifo+"web"))) 
2196      if cp.has_option("followup-foreground-seismic-qscan","remote-ifo"): 
2197        remote_ifo = cp.get("followup-foreground-seismic-qscan","remote-ifo") 
2198        self.add_var_opt("remote-seismic-qscan-web",remote_ifo+","+string.strip(cp.get("followup-foreground-seismic-qscan",remote_ifo+"web"))) 
2199   
2200      self.setupNodeWeb(job,True,None,None,None,dag.cache) 
2201   
2202      if not opts.disable_dag_categories: 
2203        self.set_category(job.name.lower()) 
2204   
2205      for node in dag.get_nodes(): 
2206        if isinstance(node,IFOstatus_checkNode) or isinstance(node,FrCheckNode) or isinstance(node,plotSNRCHISQNode) or isinstance(node,pylal_skyPlotNode) or isinstance(node,plotChiaNode) or isinstance(node,plotmcmcNode) or isinstance(node,followupTriggerNode): 
2207          if str(trig.eventID) in node.id and node.validNode: 
2208            self.add_parent(node) 
2209       
2210      if opts.make_checklist: 
2211        dag.addNode(self,self.friendlyName) 
2212        self.validate() 
2213      else: self.invalidate() 
  2214   
2215   
2216   
2217   
2218   
2220    """ 
2221    A job to plot the triggers in the chunk 
2222    """ 
2223    defaults={ 
2224      "section":"condor", 
2225      "options":{ 
2226        "universe":"vanilla", 
2227        "fu_triggers":"fup_triggers.py" 
2228        } 
2229      } 
2231      """ 
2232      """ 
2233      if not(verifyCP(cp,self.defaults)): 
2234        modifyCP(cp,self.defaults) 
2235      self.__prog__ = 'followUpTriggers' 
2236      self.__executable = string.strip(cp.get('condor','fu_triggers')) 
2237      self.__universe = "vanilla" 
2238      pipeline.CondorDAGJob.__init__(self,self.__universe,self.__executable) 
2239      self.add_condor_cmd('getenv','True') 
2240      self.setupJobWeb(self.__prog__) 
  2241   
2242   
2243   
2244   
2246    """ 
2247    A node to plot triggers in the chunk 
2248    """ 
2249 -  def __init__(self,job,trig,cp,opts,dag): 
 2250      """ 
2251      """ 
2252      self.friendlyName = 'plot triggers' 
2253      pipeline.CondorDAGNode.__init__(self,job) 
2254   
2255      self.id = job.name + "-" + str(trig.eventID) 
2256   
2257      if opts.convert_eventid: 
2258        self.add_var_opt("old-document",True) 
2259   
2260      if opts.generate_fu_cache or not cp.has_option('followup-triggers','hipe-output-cache'): 
2261        cacheString = 'fu_hipe.cache' 
2262      else: 
2263        cacheString = string.strip(cp.get('followup-triggers','hipe-output-cache')) 
2264   
2265      followupTag = string.strip(cp.get("followup-triggersInChunk","tag")) 
2266   
2267      if cp.has_option("followup-triggersInChunk","exttrig"): 
2268        self.add_var_opt("followup-exttrig",True) 
2269   
2270      if cp.has_option("followup-triggersInChunk","sned"): 
2271        self.add_var_opt("followup-sned",string.strip(cp.get("followup-triggersInChunk","sned"))) 
2272   
2273      self.add_var_opt("gps-time",float(trig.gpsTime[trig.ifolist_in_coinc[0]])) 
2274      self.add_var_opt("ifo-times",trig.ifoTag) 
2275      self.add_var_opt("followup-tag",followupTag) 
2276      self.add_var_opt("windows",string.strip(cp.get('followup-triggersInChunk','windows'))) 
2277      self.add_var_opt("event-id",str(trig.eventID)) 
2278      self.add_var_opt("cache-file",cacheString) 
2279      self.setupNodeWeb(job,True,None,None,None,dag.cache) 
2280   
2281      if not opts.disable_dag_categories: 
2282        self.set_category(job.name.lower()) 
2283   
2284      if opts.followup_triggers: 
2285        dag.addNode(self,self.friendlyName) 
2286        self.validate() 
2287      else: self.invalidate() 
  2288