| Home | Trees | Indices | Help | 
        
  | 
  
|---|
| 
       | 
  
  1  #!/usr/bin/env python 
  2   
  3  # Copyright (C) 2011 Duncan Macleod 
  4  # 
  5  # This program is free software; you can redistribute it and/or modify it 
  6  # under the terms of the GNU General Public License as published by the 
  7  # Free Software Foundation; either version 3 of the License, or (at your 
  8  # option) any later version. 
  9  # 
 10  # This program is distributed in the hope that it will be useful, but 
 11  # WITHOUT ANY WARRANTY; without even the implied warranty of 
 12  # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General 
 13  # Public License for more details. 
 14  # 
 15  # You should have received a copy of the GNU General Public License along 
 16  # with this program; if not, write to the Free Software Foundation, Inc., 
 17  # 51 Franklin Street, Fifth Floor, Boston, MA  02110-1301, USA. 
 18   
 19  # ============================================================================= 
 20  # Preamble 
 21  # ============================================================================= 
 22   
 23  from __future__ import division 
 24  import re,numpy,math,subprocess,scipy,sys 
 25   
 26  from glue.ligolw import ligolw,table,lsctables,utils 
 27  from glue.ligolw.utils import process as ligolw_process 
 28  from glue import segments 
 29   
 30  from pylal.xlal.datatypes.ligotimegps import LIGOTimeGPS 
 31  import lal as XLALConstants 
 32  from pylal.dq import dqTriggerUtils 
 33   
 34  from matplotlib import use 
 35  use('Agg') 
 36  from pylab import hanning 
 37   
 38  # Hey, scipy, shut up about your nose already. 
 39  import warnings 
 40  warnings.filterwarnings("ignore") 
 41  from scipy import signal as signal 
 42  from scipy import interpolate 
 43   
 44  from matplotlib import mlab 
 45  from glue import git_version 
 46   
 47  __author__  = "Duncan Macleod <duncan.macleod@astro.cf.ac.uk>" 
 48  __version__ = "git id %s" % git_version.id 
 49  __date__    = git_version.date 
 50   
 51  """ 
 52  This module provides a bank of useful functions for manipulating triggers and trigger files for data quality investigations. 
 53  """ 
 54   
 55  # ============================================================================= 
 56  # Execute shell command and get output 
 57  # ============================================================================= 
 58   
 60   
 61    """ 
 62      Execute shell command and capture standard output and errors.  
 63      Returns tuple "(stdout,stderr)". 
 64    """ 
 65   
 66    p = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.PIPE, 
 67      shell=isinstance(command, str)) 
 68    out, err = p.communicate() 
 69   
 70    return out,err 
 71   
 72  # ============================================================================= 
 73  # Read injection files 
 74  # ============================================================================= 
 75   
 77     
 78    """ 
 79      Read generic injection file object file containing injections of the given 
 80      type string. Returns an 'Sim' lsctable of the corresponding type. 
 81   
 82      Arguments: 
 83      
 84        file : file object 
 85        type : [ "inspiral" | "burst" | "ringdown" ] 
 86   
 87      Keyword arguments: 
 88   
 89        ifo : [ "G1" | "H1" | "H2" | "L1" | "V1" ] 
 90    """ 
 91   
 92    # read type 
 93    type = type.lower() 
 94   
 95    # read injection xml 
 96    xml = re.compile('(xml$|xml.gz$)') 
 97    if re.search(xml,file.name): 
 98      xmldoc,digest = utils.load_fileobj(file) 
 99      injtable = table.get_table(xmldoc,'sim_%s:table' % (type)) 
100   
101    # read injection txt 
102    else: 
103      cchar = re.compile('[#%<!()_\[\]{}:;\'\"]+') 
104   
105      #== construct new Sim{Burst,Inspiral,Ringdown}Table 
106      injtable = lsctables.New(lsctables.__dict__['Sim%sTable' % (type.title())]) 
107      if type=='inspiral': 
108        columns = ['geocent_end_time.geocent_end_time_ns',\ 
109                   'h_end_time.h_end_time_ns',\ 
110                   'l_end_time.l_end_time_ns',\ 
111                   'v_end_time.v_end_time_ns',\ 
112                   'distance']  
113        for line in file.readlines(): 
114          if re.match(cchar,line): 
115            continue 
116          # set up siminspiral object 
117          inj = lsctables.SimInspiral() 
118          # split data 
119          sep = re.compile('[\s,=]+') 
120          data = sep.split(line) 
121          # set attributes 
122          inj.geocent_end_time    = int(data[0].split('.')[0]) 
123          inj.geocent_end_time_ns = int(data[0].split('.')[1]) 
124          inj.h_end_time          = int(data[1].split('.')[0]) 
125          inj.h_end_time_ns       = int(data[1].split('.')[1]) 
126          inj.l_end_time          = int(data[2].split('.')[0]) 
127          inj.l_end_time_ns       = int(data[2].split('.')[1]) 
128          inj.v_end_time          = int(data[3].split('.')[0]) 
129          inj.v_end_time_ns       = int(data[3].split('.')[1]) 
130          inj.distance            = float(data[4]) 
131   
132          injtable.append(inj) 
133   
134      if type=='burst': 
135        if file.readlines()[0].startswith('filestart'): 
136          # if given parsed burst file 
137          file.seek(0) 
138   
139          snrcol = { 'G1':23, 'H1':19, 'L1':21, 'V1':25 } 
140   
141          for line in file.readlines(): 
142            inj = lsctables.SimBurst() 
143            # split data 
144            sep = re.compile('[\s,=]+') 
145            data = sep.split(line) 
146            # set attributes 
147   
148            # gps time 
149            if 'burstgps' in data: 
150              idx = data.index('burstgps')+1 
151              geocent = LIGOTimeGPS(data[idx]) 
152   
153              inj.time_geocent_gps    = geocent.seconds 
154              inj.time_geocent_gps_ns = geocent.nanoseconds 
155            else: 
156              continue 
157   
158   
159            #inj.waveform            = data[4] 
160            #inj.waveform_number     = int(data[5]) 
161   
162            # frequency 
163            if 'freq' in data: 
164              idx = data.index('freq')+1 
165              inj.frequency = float(data[idx]) 
166            else: 
167              continue 
168   
169            # SNR a.k.a. amplitude 
170            if ifo and 'snr%s' % ifo in data: 
171              idx = data.index('snr%s' % ifo)+1 
172              inj.amplitude = float(data[idx]) 
173            elif 'rmsSNR' in data: 
174              idx = data.index('rmsSNR')+1 
175              inj.amplitude = float(data[idx]) 
176            else: 
177              continue 
178   
179            if 'phi' in data: 
180              idx = data.index('phi' )+1 
181              inj.ra = float(data[idx])*24/(2*math.pi)        
182   
183            if 'theta' in data: 
184              idx = data.index('theta' )+1  
185              inj.ra = 90-(float(data[idx])*180/math.pi) 
186   
187            if ifo and 'hrss%s' % ifo in data: 
188              idx = data.index('hrss%s' % ifo)+1 
189              inj.hrss = float(data[idx]) 
190            elif 'hrss' in data: 
191              idx = data.index('hrss')+1 
192              inj.hrss = float(data[idx]) 
193   
194            # extra columns to be added when I know how 
195            #inj.q = 0 
196            #inj.q                   = float(data[11]) 
197            #h_delay = LIGOTimeGPS(data[41]) 
198            #inj.h_peak_time         = inj.time_geocent_gps+h_delay.seconds 
199            #inj.h_peak_time_ns      = inj.time_geocent_gps_ns+h_delay.nanoseconds 
200            #l_delay = LIGOTimeGPS(data[43]) 
201            #inj.l_peak_time         = inj.time_geocent_gps+l_delay.seconds 
202            #inj.l_peak_time_ns      = inj.time_geocent_gps_ns+l_delay.nanoseconds 
203            #v_delay = LIGOTimeGPS(data[43]) 
204            #inj.v_peak_time         = inj.time_geocent_gps+v_delay.seconds 
205            #inj.v_peak_time_ns      = inj.time_geocent_gps_ns+v_delay.nanoseconds 
206   
207            injtable.append(inj) 
208   
209        else: 
210          # if given parsed burst file 
211          file.seek(0) 
212          for line in file.readlines(): 
213            inj = lsctables.SimBurst() 
214            # split data 
215            sep = re.compile('[\s,]+') 
216            data = sep.split(line) 
217            # set attributes 
218            geocent = LIGOTimeGPS(data[0]) 
219            inj.time_geocent_gps    = geocent.seconds 
220            inj.time_geocent_gps_ns = geocent.nanoseconds 
221   
222            injtable.append(inj) 
223   
224    injections = table.new_from_template(injtable) 
225    if not start:  start = 0 
226    if not end:    end   = 9999999999 
227    span = segments.segmentlist([ segments.segment(start, end) ]) 
228    get_time = dqTriggerUtils.def_get_time(injections.tableName) 
229    injections.extend(inj for inj in injtable if get_time(inj) in span) 
230   
231    return injections 
232   
233  # ============================================================================= 
234  # Calculate band-limited root-mean-square 
235  # ============================================================================= 
236   
237 -def blrms(data, sampling, average=1, band=None, ripple_db=50, width=2.0,\ 
238            remove_mean=False, return_filter=False, verbose=False): 
239   
240    """ 
241      This function will calculate the band-limited root-mean-square of the given 
242      data, using averages of the given length in the given [fmin,fmax] band 
243      with a kaiser window. 
244   
245      Options are included to offset the data, and weight frequencies given a  
246      dict object of (frequency:weight) pairs. 
247   
248      Arguments: 
249   
250        data : numpy.ndarray 
251          array of data points 
252        sampling : int 
253          number of data points per second 
254   
255      Keyword arguments: 
256   
257        average : float 
258          length of rms in seconds 
259        band : tuple 
260          [fmin, fmax] for bandpass 
261        ripple_db : int 
262          Attenuation in the stop band, in dB 
263        width : float 
264          Desired width of the transition from pass to stop, in Hz 
265        remove_mean : boolean 
266        verbose : boolean 
267    """ 
268   
269    nyq = sampling/2 
270   
271    # verify band variables 
272    if band==None: 
273      band=[0,sampling/2] 
274    fmin = float(band[0]) 
275    fmax = float(band[1]) 
276   
277    if verbose: 
278      sys.stdout.write("Calculating BLRMS in band %s-%s Hz...\n" % (fmin, fmax)) 
279   
280    # 
281    # remove mean 
282    # 
283   
284    if remove_mean: 
285      data = data-data.mean() 
286      if verbose: sys.stdout.write("Data mean removed.\n") 
287   
288    # 
289    # Bandpass data 
290    # 
291    if return_filter: 
292      data, filter = bandpass(data, sampling, fmin, fmax, ripple_db=ripple_db,\ 
293                              width=width, return_filter=True, verbose=verbose) 
294    else: 
295      data = bandpass(data, sampling, fmin, fmax, ripple_db=ripple_db,\ 
296                      width=width, return_filter=False, verbose=verbose) 
297   
298   
299    # 
300    # calculate rms 
301    # 
302   
303    # construct output array 
304    numsamp = int(average*sampling) 
305    numaverage = numpy.ceil(len(data)/sampling/average) 
306    output  = numpy.empty(numaverage) 
307   
308    # loop over averages 
309    for i in xrange(len(output)): 
310   
311      # get indices 
312      idxmin = i*sampling*average 
313      idxmax = idxmin + numsamp 
314   
315      # get data chunk 
316      chunk = data[idxmin:idxmax] 
317   
318      # get rms 
319      output[i] = (chunk**2).mean()**(1/2) 
320   
321    if verbose: sys.stdout.write("RMS calculated for %d averages.\n"\ 
322                                 % len(output)) 
323   
324    if return_filter: 
325      return output, filter 
326    else: 
327      return output 
328   
329  # ============================================================================= 
330  # Bandpass 
331  # ============================================================================= 
332   
333 -def bandpass(data, sampling, fmin, fmax, ripple_db=50, width=2.0,\ 
334               return_filter=False, verbose=False): 
335   
336    """ 
337      This function will bandpass filter data in the given [fmin,fmax] band 
338      using a kaiser window. 
339   
340      Arguments: 
341   
342        data : numpy.ndarray 
343          array of data points 
344        sampling : int 
345          number of data points per second 
346        fmin : float 
347          frequency of lowpass 
348        fmax : float 
349          frequency of highpass 
350   
351      Keyword arguments: 
352   
353        ripple_db : int 
354          Attenuation in the stop band, in dB 
355        width : float 
356          Desired width of the transition from pass to stop, in Hz 
357        return_filter: boolean 
358          Return filter 
359        verbose : boolean 
360    """ 
361   
362    # construct filter 
363    order, beta = signal.kaiserord(ripple_db, width*2/sampling) 
364   
365    lowpass = signal.firwin(order, fmin*2/sampling, window=('kaiser', beta)) 
366    highpass = - signal.firwin(order, fmax*2/sampling, window=('kaiser', beta)) 
367    highpass[order//2] = highpass[order//2] + 1 
368   
369    bandpass = -(lowpass + highpass); bandpass[order//2] = bandpass[order//2] + 1 
370   
371    # filter data forward then backward 
372    data = signal.lfilter(bandpass,1.0,data) 
373    data = data[::-1] 
374    data = signal.lfilter(bandpass,1.0,data) 
375    data = data[::-1] 
376   
377    if verbose: sys.stdout.write("Bandpass filter applied to data.\n") 
378   
379    if return_filter: 
380      return data, bandpass 
381    else: 
382      return data 
383   
384  # ============================================================================= 
385  # Lowpass 
386  # ============================================================================= 
387   
388 -def lowpass(data, sampling, fmin, ripple_db=50, width=2.0,\ 
389              return_filter=False, verbose=False): 
390   
391    """ 
392      This function will lowpass filter data in the given fmin band 
393      using a kaiser window. 
394   
395      Arguments: 
396   
397        data : numpy.ndarray 
398          array of data points 
399        sampling : int 
400          number of data points per second 
401        fmin : float 
402          frequency of lowpass 
403   
404      Keyword arguments: 
405   
406        ripple_db : int 
407          Attenuation in the stop band, in dB 
408        width : float 
409          Desired width of the transition from pass to stop, in Hz 
410        return_filter: boolean 
411          Return filter 
412        verbose : boolean 
413    """ 
414   
415    # construct filter 
416    order, beta = signal.kaiserord(ripple_db, width*2/sampling) 
417   
418    lowpass = signal.firwin(order, fmin*2/sampling, window=('kaiser', beta)) 
419   
420    # filter data forward then backward 
421    data = signal.lfilter(lowpass,1.0,data) 
422    data = data[::-1] 
423    data = signal.lfilter(lowpass,1.0,data) 
424    data = data[::-1] 
425   
426    if verbose: sys.stdout.write("Lowpass filter applied to data.\n") 
427   
428    if return_filter: 
429      return data, lowpass 
430    else: 
431      return data 
432   
433  # ============================================================================= 
434  # Highpass 
435  # ============================================================================= 
436   
437 -def highpass(data, sampling, fmax, ripple_db=50, width=2.0,\ 
438               return_filter=False, verbose=False): 
439   
440    """ 
441      This function will highpass filter data in the given fmax band 
442      using a kaiser window. 
443   
444      Arguments: 
445   
446        data : numpy.ndarray 
447          array of data points 
448        sampling : int 
449          number of data points per second 
450        fmax : float 
451          frequency of highpass 
452   
453      Keyword arguments: 
454   
455        ripple_db : int 
456          Attenuation in the stop band, in dB 
457        width : float 
458          Desired width of the transition from pass to stop, in Hz 
459        return_filter: boolean 
460          Return filter 
461        verbose : boolean 
462    """ 
463   
464    # construct filter 
465    order, beta = signal.kaiserord(ripple_db, width*2/sampling) 
466   
467    highpass = - signal.firwin(order, fmax*2/sampling, window=('kaiser', beta)) 
468    highpass[order//2] = highpass[order//2] + 1 
469   
470    # filter data forward then backward 
471    data = signal.lfilter(highpass,1.0,data) 
472    data = data[::-1] 
473    data = signal.lfilter(highpass,1.0,data) 
474    data = data[::-1] 
475   
476    if verbose: sys.stdout.write("Highpass filter applied to data.\n") 
477   
478    if return_filter: 
479      return data, highpass 
480    else: 
481      return data 
482   
483  # ============================================================================= 
484  # Calculate spectrum 
485  # ============================================================================= 
486   
487 -def spectrum(data, sampling, NFFT=256, overlap=0.5,\ 
488               window='hanning', detrender=mlab.detrend_linear,\ 
489               sides='onesided', scale='PSD'): 
490   
491    numpoints  = len(data) 
492    numoverlap = int(sampling * (1.0 - overlap)) 
493   
494    if isinstance(window,str): 
495      window=window.lower() 
496   
497    win = signal.get_window(window, NFFT) 
498   
499    # calculate PSD with given parameters 
500    spec,freq = mlab.psd(data, NFFT=NFFT, Fs=sampling, noverlap=numoverlap,\ 
501                         window=win, sides=sides, detrend=detrender) 
502   
503    # rescale data to meet user's request 
504    scale = scale.lower() 
505    if scale == 'asd': 
506      spec = numpy.sqrt(spec) * numpy.sqrt(2 / (sampling*sum(win**2))) 
507    elif scale == 'psd': 
508      spec *= 2/(sampling*sum(win**2)) 
509    elif scale == 'as': 
510      spec = nump.sqrt(spec) * numpy.sqrt(2) / sum(win) 
511    elif scale == 'ps': 
512      spec = spec * 2 / (sum(win)**2) 
513   
514    return freq, spec.flatten() 
515   
516  # ============================================================================= 
517  # Median Mean Spectrum 
518  # ============================================================================= 
519   
520 -def AverageSpectrumMedianMean(data, fs, NFFT=256, overlap=128,\ 
521                                window=('kaiser',24), sides='onesided',\ 
522                                verbose=False, log=False, warn=True): 
523   
524    """ 
525      Computes power spectral density of a data series using the median-mean 
526      average method. 
527    """ 
528   
529    if sides!='onesided': 
530      raise NotImplementedError('Only one sided spectrum implemented for the momen') 
531   
532    # cast data series to numpy array 
533    data = numpy.asarray(data) 
534   
535    # number of segments (must be even) 
536    if overlap==0: 
537      numseg = int(len(data)/NFFT) 
538    else: 
539      numseg = 1 + int((len(data)-NFFT)/overlap) 
540    assert (numseg - 1)*overlap + NFFT == len(data),\ 
541           "Data is wrong length to be covered completely, please resize" 
542   
543    # construct window 
544    win = scipy.signal.get_window(window, NFFT) 
545   
546    if verbose: sys.stdout.write("%s window constructed.\nConstructing " 
547                                 "median-mean average spectrum " 
548                                 "with %d segments...\n"\ 
549                                 % (window, numseg)) 
550   
551    # 
552    # construct PSD 
553    # 
554   
555    # fft scaling factor for units of Hz^-1 
556    scaling_factor = 1 / (fs * NFFT) 
557   
558    # construct frequency 
559    f = numpy.arange(NFFT//2 + 1) * (fs / NFFT) 
560   
561    odd  = numpy.arange(0, numseg, 2) 
562    even = numpy.arange(1, numseg, 2) 
563   
564    # if odd number of segments, ignore the first one (better suggestions welcome) 
565    if numseg == 1: 
566      odd = [0] 
567      even = [] 
568    elif numseg % 2 == 1: 
569      odd = odd[:-1] 
570      numseg -= 1 
571      if warn: 
572        sys.stderr.write("WARNING: odd number of FFT segments, skipping last.\n") 
573   
574    # get bias factor 
575    biasfac = MedianBias(numseg//2) 
576    # construct normalisation factor 
577    normfac = 1/(2*biasfac) 
578   
579    # set data holder 
580    S = numpy.empty((numseg, len(f))) 
581   
582    # loop over segments 
583    for i in xrange(numseg): 
584   
585      # get data 
586      chunk = data[i*overlap:i*overlap+NFFT] 
587      # apply window 
588      wdata = WindowDataSeries(chunk, win) 
589      # FFT 
590      S[i]  = PowerSpectrum(wdata, sides) * scaling_factor 
591   
592    if verbose: sys.stdout.write("Generated spectrum for each chunk.\n") 
593   
594    # compute median-mean average 
595    if numseg > 1: 
596      S_odd = numpy.median([S[i] for i in odd],0) 
597      S_even = numpy.median([S[i] for i in even],0) 
598      S = (S_even  + S_odd) * normfac 
599    else: 
600      S = S.flatten() 
601    if verbose: sys.stdout.write("Calculated median-mean average.\n") 
602   
603    if log: 
604      f_log = numpy.logspace(numpy.log10(f[1]), numpy.log10(f[-1]), num=len(f)/2,\ 
605                             endpoint=False) 
606      I = interpolate.interp1d(f, S) 
607      S = I(f_log) 
608      return f_log, S 
609   
610    return f, S 
611   
612  # ============================================================================= 
613  # Median bias factor 
614  # ============================================================================= 
615   
617   
618    """ 
619      Returns the median bias factor. 
620    """ 
621   
622    nmax = 1000; 
623    ans  = 1; 
624    n    = (nn - 1)//2; 
625    if nn >= nmax: 
626     return numpy.log(2) 
627   
628    for i in xrange(1, n+1): 
629      ans -= 1.0/(2*i); 
630      ans += 1.0/(2*i + 1); 
631   
632    return ans; 
633   
634  # ============================================================================= 
635  # Median average spectrum 
636  # ============================================================================= 
637   
638 -def AverageSpectrumMedian(data, fs, NFFT=256, overlap=128,\ 
639                            window='hanning', sides='onesided',\ 
640                            verbose=False): 
641   
642    """ 
643      Construct power spectral density for given data set using the median 
644      average method.   
645    """ 
646   
647    if sides!='onesided': 
648      raise NotImplementedError('Only one sided spectrum implemented for the momen') 
649   
650    # cast data series to numpy array 
651    data = numpy.asarray(data) 
652   
653    # number of segments (must be even) 
654    if overlap==0: 
655      numseg = int(len(data)/NFFT) 
656    else: 
657      numseg = 1 + int((len(data)-NFFT)/overlap) 
658    assert (numseg - 1)*overlap + NFFT == len(data),\ 
659           "Data is wrong length to be covered completely, please resize" 
660   
661    # construct window 
662    win = scipy.signal.get_window(window, NFFT) 
663   
664    if verbose: sys.stdout.write("%s window constructed.\nConstructing " 
665                                 "median average spectrum " 
666                                 "with %d segments...\n"\ 
667                                 % (window.title(), numseg)) 
668   
669    # 
670    # construct PSD 
671    # 
672   
673    # fft scaling factor for units of Hz^-1 
674    scaling_factor = 1 / (fs * NFFT) 
675   
676    # construct frequency 
677    f = numpy.arange(NFFT//2 + 1) * (fs / NFFT) 
678   
679    # get bias factor 
680    biasfac = MedianBias(numseg) 
681   
682    # construct normalisation factor 
683    normfac = 1/(biasfac) 
684   
685    # set data holder 
686    S = numpy.empty((numseg, len(f))) 
687   
688    # loop over segments 
689    for i in xrange(numseg): 
690   
691      # get data 
692      chunk = data[i*overlap:i*overlap+NFFT] 
693      # apply window 
694      wdata = WindowDataSeries(chunk, win) 
695      # FFT 
696      S[i]  = PowerSpectrum(wdata, sides) * scaling_factor 
697   
698    if verbose: sys.stdout.write("Generated spectrum for each chunk.\n") 
699   
700    # compute median-mean average 
701    if numseg > 1: 
702      S = scipy.median([S[i] for i in odd])*normfac 
703    else: 
704      S = S.flatten() 
705    if verbose: sys.stdout.write("Calculated median average.\n") 
706   
707    return f, S  
708   
709  # ============================================================================= 
710  # Apply window 
711  # ============================================================================= 
712   
714   
715    """ 
716      Apply window function to data set, defaults to Hanning window. 
717    """ 
718   
719    # generate default window 
720    if window == None: 
721      window = scipy.signal.hanning(len(series)) 
722   
723    # check dimensions 
724    assert len(series)==len(window), 'Window and data must be same shape' 
725   
726    # get sum of squares 
727    sumofsquares = (window**2).sum() 
728    assert sumofsquares > 0, 'Sum of squares of window non-positive.' 
729   
730    # generate norm 
731    norm = (len(window)/sumofsquares)**(1/2) 
732   
733    # apply window 
734    return series * window * norm 
735   
736  # ============================================================================= 
737  # Power spectrum 
738  # ============================================================================= 
739   
741   
742    """ 
743      Calculate power spectum of given series 
744    """ 
745   
746    if sides!='onesided': 
747      raise NotImplementedError('Only one sided spectrum implemented for the moment') 
748   
749    # apply FFT 
750    tmp = numpy.fft.fft(series, n=len(series)) 
751   
752    # construct spectrum 
753    if sides=='onesided': 
754      spec = numpy.empty(len(tmp)//2+1) 
755    elif sides=='twosided': 
756      spec = numpy.empty(len(tmp)) 
757   
758    # DC component 
759    spec[0] = tmp[0]**2 
760   
761    # others 
762    s = (len(series)+1)//2 
763    spec[1:s] = 2 * (tmp[1:s].real**2 + tmp[1:s].imag**2) 
764   
765    # Nyquist 
766    if len(series) % 2 == 0: 
767      spec[len(series)/2] = tmp[len(series)/2]**2 
768   
769    return spec 
770   
771  # ============================================================================= 
772  # Inspiral range 
773  # ============================================================================= 
774   
777   
778    """ 
779      Calculate inspiral range for a given spectrum. 
780    """ 
781   
782    Mpc = 10**6 * XLALConstants.PC_SI 
783   
784    # compute chirp mass and total mass (in units of solar masses) 
785    mtot = m1 + m2; 
786    reducedmass = m1*m2/mtot; 
787    mchirp = reducedmass**(3/5)*mtot**(2/5); 
788   
789    # calculate prefactor in m^2 
790    mchirp *= XLALConstants.MSUN_SI * XLALConstants.G_SI /\ 
791              XLALConstants.C_SI**2 
792    pre = (5 * XLALConstants.C_SI**(1/3) * mchirp**(5/3) * 1.77**2) /\ 
793          (96 * numpy.pi ** (4/3) * rho**2) 
794   
795    # include fisco 
796    fisco = XLALConstants.C_SI**3/XLALConstants.G_SI/XLALConstants.MSUN_SI/\ 
797        6**1.5/numpy.pi/mtot 
798   
799    # restrict to range, include fisco 
800    condition = (f >= fmin) & (f < min(fmax,fisco)) 
801    S         = S[condition] 
802    f         = f[condition] 
803   
804    # calculate integrand 
805    integrand = (f**(-7/3))/S 
806   
807    # integrate 
808    result = scipy.integrate.trapz(integrand, f) 
809   
810    R = (pre*result) ** 0.5 / Mpc 
811   
812    if horizon: R *= 2.26 
813   
814    return R 
815   
816  # ============================================================================= 
817  # Frequency dependent Burst range 
818  # ============================================================================= 
819   
821    """ 
822      Calculate GRB-like or supernov-like burst range for a given spectrum    and background trigger SNR at a given time as a function of freqeucy. 
823    """ 
824   
825    Mpc = 10**6 * XLALConstants.PC_SI 
826   
827    # generate frequency dependent range 
828    A = (((XLALConstants.G_SI * (E*XLALConstants.MSUN_SI) * 2/5)/(XLALConstants.PI**2 * XLALConstants.C_SI))**(1/2))/Mpc 
829    R = A/ (rho * S**(1/2) * f) 
830   
831    return R 
832   
833  # ============================================================================= 
834  # Burst range 
835  # ============================================================================= 
836   
838    """ 
839      Calculate GRB-like or supernova-like burst range for a given spectrum 
840      and background trigger SNR. 
841    """ 
842   
843    # restrict spectrum to given frequency range 
844    condition = (f>=fmin) & (f<fmax) 
845    S2 = S[condition] 
846    f2 = f[condition] 
847   
848    # calculate integral 
849    FOM1 = scipy.integrate.trapz(f_dependent_burst_range(f2, S2, rho, E)**3, f2) 
850    FOM2 = FOM1/(fmax-fmin) 
851   
852    return FOM2**(1/3) 
853   
855    """ 
856      Calculate range for sine-Gaussians for a given spectrum 
857      and background trigger SNR, assuming isotropic GW emission (unphysical but simple) 
858    """ 
859   
860    # restrict spectrum to given frequency range 
861    condition = (f>=fmin) & (f<fmax) 
862    S2 = S[condition] 
863    f2 = f[condition] 
864   
865    # generate frequency dependent range 
866    Mpc = 10**6 * XLALConstants.PC_SI 
867    1/centralFreq 
868    A = (((XLALConstants.G_SI * (E*XLALConstants.MSUN_SI) )/(XLALConstants.PI**2 * XLALConstants.C_SI))**(1/2))/Mpc/centralFreq 
869    sigmaSq = Q**2 / (4 * XLALConstants.PI**2 * centralFreq**2) 
870    sg = numpy.exp( - (f2 - centralFreq)**2 * sigmaSq / 2)  
871    normSG = scipy.integrate.trapz(sg**2, f2)**(1/2) 
872    sg = sg/normSG 
873    R = A * sg / (rho * S2**(1/2) ) 
874   
875    # calculate integral 
876    FOM1 = scipy.integrate.trapz(R**2, f2) 
877   
878    # factor 0.36, median antenna pattern factor for a single detector 
879    # and linearly polarized GWs 
880    return FOM1**(1/2)*0.36 
881   
| Home | Trees | Indices | Help | 
        
  | 
  
|---|
| Generated by Epydoc 3.0.1 on Tue Dec 12 01:21:46 2017 | http://epydoc.sourceforge.net |