1  """ 
  2  This module contains condor jobs / node classes for the followup dag 
  3   
  4  This program creates cache files for the output of inspiral hipe 
  5  """ 
  6   
  7  __author__ = 'Chad Hanna <channa@phys.lsu.edu>' 
  8   
  9   
 10   
 11  import sys, os, copy, math 
 12  from subprocess import * 
 13  import socket, time 
 14  import re, string 
 15  from optparse import * 
 16  import tempfile 
 17  import ConfigParser  
 18  import urlparse 
 19  from UserDict import UserDict 
 20  sys.path.append('@PYTHONLIBDIR@') 
 21   
 22   
 23   
 24  from glue import pipeline 
 25  from glue import lal 
 26  from glue import segments 
 27  from glue import segmentsUtils 
 28  from pylal.webUtils import * 
 29  from lalapps import inspiral 
 30     
 31   
 32   
 33   
 35    """ 
 36    webTheJob is a class intended to be inherited by a class that subclasses 
 37    the condor DAG Job.  It is useful for setting up a standard structure to 
 38    webify the output of a dag.  You'll want to use webTheNode and webTheDAG 
 39    for your condor subclasses too.  
 40    """ 
 41   
 44   
 46       
 47       
 48       
 49      self.name = name 
 50      if not os.path.exists(name): 
 51         os.mkdir(name) 
 52      if not os.path.exists(name+'/logs'): 
 53         os.mkdir(name+'/logs') 
 54      if not os.path.exists(name+'/Images'): 
 55         os.mkdir(name+'/Images') 
 56       
 57      self.tag_base = tag_base 
 58      self.add_condor_cmd('environment',"KMP_LIBRARY=serial;MKL_SERIAL=yes") 
 59      self.set_sub_file(name+'.sub') 
 60      self.relPath = name + '/' 
 61      self.outputPath = os.getcwd() + '/' + name + '/' 
 62      self.set_stdout_file(self.outputPath+'/logs/'+name+'-$(macroid).out') 
 63      self.set_stderr_file(self.outputPath+'/logs/'+name+'-$(macroid).err') 
 64      if cp: 
 65        if cp.has_section("condor-memory-requirement") and \ 
 66                  cp.has_option("condor-memory-requirement",name): 
 67          requirement = cp.getint("condor-memory-requirement",name) 
 68          self.add_condor_cmd("Requirements", \ 
 69                  "(Memory > " + str(requirement) + ")") 
   70   
 72    """ 
 73    webTheNode is a class intended to be inherited by a class that subclasses 
 74    the condor DAG Node .  It is useful for setting up a standard structure to 
 75    webify the output of a dag.  You'll want to use webTheJob and webTheDAG 
 76    for your condor subclasses too.  
 77    """ 
 78   
 81   
 82 -  def setupNodeWeb(self, job, passItAlong=False, content=None, page=None,webOverride=None,cache=None): 
  83       
 84      self.add_macro("macroid", self.id) 
 85       
 86       
 87      self.jobName = job.name 
 88       
 89       
 90       
 91       
 92       
 93       
 94       
 95       
 96      if passItAlong: 
 97         
 98        self.add_var_opt("output-path",job.outputPath) 
 99        self.add_var_opt("enable-output","") 
100         
101         
102       
103      if cache: 
104        cache.appendCache(job.name,job.outputPath) 
105        try: 
106          if self.outputCache: 
107            cache.appendSubCache(job.name,self.outputCache) 
108        except: pass 
 109         
110           
111   
112           
113   
114 -  def writeContent(self,content): 
 115       
116       
117       
118       
119       
120       
121      content.appendTable(1,2,0,700) 
122      self.webTable = content.lastTable; 
123      content.lastTable.row[0].cell[0].link(self.webLink,self.friendlyName) 
 124       
125       
126       
127       
128       
129       
130       
131       
132       
133       
134       
135    
138     
140      self.validNode = True 
 141   
143      self.validNode = False 
  144   
146    """ 
147    webTheDAG is a class intended to be inherited by a class that subclasses 
148    the condor DAG Node .  It is useful for setting up a standard structure to 
149    webify the output of a dag.  You'll want to use webTheJob and webTheDAG 
150    for your condor subclasses too.  
151    """ 
152   
155   
157      self.publish_path = string.strip(cp.get('followup-output','page')) 
158      self.page = string.strip(cp.get('followup-output','url')) 
159      self.webPage = WebPage(title,filename,self.page) 
160      self.webDirs = {} 
161      self.cache = cacheStructure() 
162      try: 
163         os.mkdir('DAGWeb') 
164      except: pass 
165      if not opts.disable_dag_categories: 
166          for cp_opt in cp.options('condor-max-jobs'): 
167            self.add_maxjobs_category(cp_opt,cp.getint('condor-max-jobs',cp_opt)) 
 168   
169   
173   
175      self.webPage.appendSection(name) 
176      inifile = name.replace(" ","_").replace("@","-").replace("=",'-') + '.ini' 
177      file = open('DAGWeb/'+inifile,'a') 
178      file.close() 
 179   
180   
181   
182   
183   
184   
185   
186   
187   
188   
189   
190   
191   
192   
194      self.webPage.lastSection.appendSubSection(name) 
195      inifile = name.replace(" ","_").replace("@","-").replace("=",'-') + '.ini' 
196      file = open('DAGWeb/'+inifile,'a') 
197      file.close() 
 198   
199   
200   
201   
202   
203   
204   
205   
206   
207   
208   
209   
210   
211   
213      try: 
214        self.jobsDict[jobType] = self.jobsDict[jobType] + 1 
215        self.webDirs[node.jobName] = node.jobName 
216      except: 
217        self.jobsDict[jobType] = 1 
218      self.add_node(node) 
 219   
220   
222      dirStr = '' 
223      for dir in self.webDirs: 
224        dirStr += dir + ' ' 
225      dirStr = 'rsync -vrz '+dirStr+' DAGWeb index.html ' 
226      print dirStr 
227      copying_results = call(dirStr+self.publish_path, shell=True) 
228      if copying_results != 0: 
229        print >> sys.stderr, "the followup results could not be copied to "+self.publish_path 
230        sys.exit(1) 
 231   
233      for jobs in self.jobsDict: 
234        print "\nFound " + str(self.jobsDict[jobs]) + " " + str(jobs) + " Jobs" 
 235   
237      self.printNodeCounts() 
238      print "\n\n.......Writing DAG" 
239      self.write_sub_files() 
240      self.write_dag() 
241      self.write_script() 
242      self.writeDAGWeb(type) 
243      print "\n\n  Created a DAG file which can be submitted by executing" 
244      print "    condor_submit_dag " + self.get_dag_file() 
245      print """\n  from a condor submit machine 
246    Before submitting the dag, you must execute 
247   
248      export _CONDOR_DAGMAN_LOG_ON_NFS_IS_ERROR=FALSE 
249   
250    If you are running LSCdataFind jobs, do not forget to initialize your grid 
251    proxy certificate on the condor submit machine by running the commands 
252   
253      unset X509_USER_PROXY 
254      grid-proxy-init -hours 72 
255   
256    Enter your pass phrase when prompted. The proxy will be valid for 72 hours. 
257    If you expect the LSCdataFind jobs to take longer to complete, increase the 
258    time specified in the -hours option to grid-proxy-init. You can check that 
259    the grid proxy has been sucessfully created by executing the command: 
260   
261      grid-cert-info -all -file /tmp/x509up_u`id -u` 
262   
263    This will also give the expiry time of the proxy.""" 
  264