casa  $Rev:20696$
 All Classes Namespaces Files Functions Variables
scantable.py
Go to the documentation of this file.
00001 """This module defines the scantable class."""
00002 
00003 import os
00004 import tempfile
00005 import numpy
00006 try:
00007     from functools import wraps as wraps_dec
00008 except ImportError:
00009     from asap.compatibility import wraps as wraps_dec
00010 
00011 from asap.env import is_casapy
00012 from asap._asap import Scantable
00013 from asap._asap import filler, msfiller
00014 from asap.parameters import rcParams
00015 from asap.logging import asaplog, asaplog_post_dec
00016 from asap.selector import selector
00017 from asap.linecatalog import linecatalog
00018 from asap.coordinate import coordinate
00019 from asap.utils import _n_bools, mask_not, mask_and, mask_or, page
00020 from asap.asapfitter import fitter
00021 
00022 def preserve_selection(func):
00023     @wraps_dec(func)
00024     def wrap(obj, *args, **kw):
00025         basesel = obj.get_selection()
00026         try:
00027             val = func(obj, *args, **kw)
00028         finally:
00029             obj.set_selection(basesel)
00030         return val
00031     return wrap
00032 
00033 def is_scantable(filename):
00034     """Is the given file a scantable?
00035 
00036     Parameters:
00037 
00038         filename: the name of the file/directory to test
00039 
00040     """
00041     if ( os.path.isdir(filename)
00042          and os.path.exists(filename+'/table.info')
00043          and os.path.exists(filename+'/table.dat') ):
00044         f=open(filename+'/table.info')
00045         l=f.readline()
00046         f.close()
00047         #if ( l.find('Scantable') != -1 ):
00048         if ( l.find('Measurement Set') == -1 ):
00049             return True
00050         else:
00051             return False
00052     else:
00053         return False
00054 ##     return (os.path.isdir(filename)
00055 ##             and not os.path.exists(filename+'/table.f1')
00056 ##             and os.path.exists(filename+'/table.info'))
00057 
00058 def is_ms(filename):
00059     """Is the given file a MeasurementSet?
00060 
00061     Parameters:
00062 
00063         filename: the name of the file/directory to test
00064 
00065     """
00066     if ( os.path.isdir(filename)
00067          and os.path.exists(filename+'/table.info')
00068          and os.path.exists(filename+'/table.dat') ):
00069         f=open(filename+'/table.info')
00070         l=f.readline()
00071         f.close()
00072         if ( l.find('Measurement Set') != -1 ):
00073             return True
00074         else:
00075             return False
00076     else:
00077         return False
00078 
00079 def normalise_edge_param(edge):
00080     """\
00081     Convert a given edge value to a one-dimensional array that can be
00082     given to baseline-fitting/subtraction functions.
00083     The length of the output value will be an even because values for
00084     the both sides of spectra are to be contained for each IF. When
00085     the length is 2, the values will be applied to all IFs. If the length
00086     is larger than 2, it will be 2*ifnos(). 
00087     Accepted format of edge include:
00088             * an integer - will be used for both sides of spectra of all IFs.
00089                   e.g. 10 is converted to [10,10]
00090             * an empty list/tuple [] - converted to [0, 0] and used for all IFs. 
00091             * a list/tuple containing an integer - same as the above case.
00092                   e.g. [10] is converted to [10,10]
00093             * a list/tuple containing two integers - will be used for all IFs. 
00094                   e.g. [5,10] is output as it is. no need to convert.
00095             * a list/tuple of lists/tuples containing TWO integers -
00096                   each element of edge will be used for each IF.
00097                   e.g. [[5,10],[15,20]] - [5,10] for IF[0] and [15,20] for IF[1].
00098                   
00099                   If an element contains the same integer values, the input 'edge'
00100                   parameter can be given in a simpler shape in the following cases:
00101                       ** when len(edge)!=2
00102                           any elements containing the same values can be replaced
00103                           to single integers.
00104                           e.g. [[15,15]] can be simplified to [15] (or [15,15] or 15 also). 
00105                           e.g. [[1,1],[2,2],[3,3]] can be simplified to [1,2,3]. 
00106                       ** when len(edge)=2
00107                           care is needed for this case: ONLY ONE of the
00108                           elements can be a single integer,
00109                           e.g. [[5,5],[10,10]] can be simplified to [5,[10,10]]
00110                                or [[5,5],10], but can NOT be simplified to [5,10].
00111                                when [5,10] given, it is interpreted as
00112                                [[5,10],[5,10],[5,10],...] instead, as shown before. 
00113     """
00114     from asap import _is_sequence_or_number as _is_valid
00115     if isinstance(edge, list) or isinstance(edge, tuple):
00116         for edgepar in edge:
00117             if not _is_valid(edgepar, int):
00118                 raise ValueError, "Each element of the 'edge' tuple has \
00119                                    to be a pair of integers or an integer."
00120             if isinstance(edgepar, list) or isinstance(edgepar, tuple):
00121                 if len(edgepar) != 2:
00122                     raise ValueError, "Each element of the 'edge' tuple has \
00123                                        to be a pair of integers or an integer."
00124     else:
00125         if not _is_valid(edge, int):
00126             raise ValueError, "Parameter 'edge' has to be an integer or a \
00127                                pair of integers specified as a tuple. \
00128                                Nested tuples are allowed \
00129                                to make individual selection for different IFs."
00130         
00131 
00132     if isinstance(edge, int):
00133         edge = [ edge, edge ]                 # e.g. 3   => [3,3]
00134     elif isinstance(edge, list) or isinstance(edge, tuple):
00135         if len(edge) == 0:
00136             edge = [0, 0]                     # e.g. []  => [0,0]
00137         elif len(edge) == 1:
00138             if isinstance(edge[0], int):
00139                 edge = [ edge[0], edge[0] ]   # e.g. [1] => [1,1]
00140 
00141     commonedge = True
00142     if len(edge) > 2: commonedge = False
00143     else:
00144         for edgepar in edge:
00145             if isinstance(edgepar, list) or isinstance(edgepar, tuple):
00146                 commonedge = False
00147                 break
00148 
00149     if commonedge:
00150         if len(edge) > 1:
00151             norm_edge = edge
00152         else:
00153             norm_edge = edge + edge
00154     else:
00155         norm_edge = []
00156         for edgepar in edge:
00157             if isinstance(edgepar, int):
00158                 norm_edge += [edgepar, edgepar]
00159             else:
00160                 norm_edge += edgepar
00161 
00162     return norm_edge
00163 
00164 def raise_fitting_failure_exception(e):
00165     msg = "The fit failed, possibly because it didn't converge."
00166     if rcParams["verbose"]:
00167         asaplog.push(str(e))
00168         asaplog.push(str(msg))
00169     else:
00170         raise RuntimeError(str(e)+'\n'+msg)
00171 
00172 def pack_progress_params(showprogress, minnrow):
00173     return str(showprogress).lower() + ',' + str(minnrow)
00174 
00175 class scantable(Scantable):
00176     """\
00177         The ASAP container for scans (single-dish data).
00178     """
00179 
00180     @asaplog_post_dec
00181     def __init__(self, filename, average=None, unit=None, parallactify=None,
00182                  **args):
00183         """\
00184         Create a scantable from a saved one or make a reference
00185 
00186         Parameters:
00187 
00188             filename:     the name of an asap table on disk
00189                           or
00190                           the name of a rpfits/sdfits/ms file
00191                           (integrations within scans are auto averaged
00192                           and the whole file is read) or
00193                           [advanced] a reference to an existing scantable
00194 
00195             average:      average all integrations withinb a scan on read.
00196                           The default (True) is taken from .asaprc.
00197 
00198             unit:         brightness unit; must be consistent with K or Jy.
00199                           Over-rides the default selected by the filler
00200                           (input rpfits/sdfits/ms) or replaces the value
00201                           in existing scantables
00202 
00203             antenna:      for MeasurementSet input data only:
00204                           Antenna selection. integer (id) or string 
00205                           (name or id).
00206 
00207             parallactify: Indicate that the data had been parallactified. 
00208                           Default (false) is taken from rc file.
00209 
00210         """
00211         if average is None:
00212             average = rcParams['scantable.autoaverage']
00213         parallactify = parallactify or rcParams['scantable.parallactify']
00214         varlist = vars()
00215         from asap._asap import stmath
00216         self._math = stmath( rcParams['insitu'] )
00217         if isinstance(filename, Scantable):
00218             Scantable.__init__(self, filename)
00219         else:
00220             if isinstance(filename, str):
00221                 filename = os.path.expandvars(filename)
00222                 filename = os.path.expanduser(filename)
00223                 if not os.path.exists(filename):
00224                     s = "File '%s' not found." % (filename)
00225                     raise IOError(s)
00226                 if is_scantable(filename):
00227                     ondisk = rcParams['scantable.storage'] == 'disk'
00228                     Scantable.__init__(self, filename, ondisk)
00229                     if unit is not None:
00230                         self.set_fluxunit(unit)
00231                     if average:
00232                         self._assign( self.average_time( scanav=True ) )
00233                     # do not reset to the default freqframe
00234                     #self.set_freqframe(rcParams['scantable.freqframe'])
00235                 elif is_ms(filename):
00236                     # Measurement Set
00237                     opts={'ms': {}}
00238                     mskeys=['getpt','antenna']
00239                     for key in mskeys:
00240                         if key in args.keys():
00241                             opts['ms'][key] = args[key]
00242                     self._fill([filename], unit, average, opts)
00243                 elif os.path.isfile(filename):
00244                     self._fill([filename], unit, average)
00245                     # only apply to new data not "copy constructor"
00246                     self.parallactify(parallactify)
00247                 else:
00248                     msg = "The given file '%s'is not a valid " \
00249                           "asap table." % (filename)
00250                     raise IOError(msg)
00251             elif (isinstance(filename, list) or isinstance(filename, tuple)) \
00252                   and isinstance(filename[-1], str):
00253                 self._fill(filename, unit, average)
00254         self.parallactify(parallactify)
00255         self._add_history("scantable", varlist)
00256 
00257     @asaplog_post_dec
00258     def save(self, name=None, format=None, overwrite=False):
00259         """\
00260         Store the scantable on disk. This can be an asap (aips++) Table,
00261         SDFITS or MS2 format.
00262 
00263         Parameters:
00264 
00265             name:        the name of the outputfile. For format 'ASCII'
00266                          this is the root file name (data in 'name'.txt
00267                          and header in 'name'_header.txt)
00268 
00269             format:      an optional file format. Default is ASAP.
00270                          Allowed are:
00271 
00272                             * 'ASAP' (save as ASAP [aips++] Table),
00273                             * 'SDFITS' (save as SDFITS file)
00274                             * 'ASCII' (saves as ascii text file)
00275                             * 'MS2' (saves as an casacore MeasurementSet V2)
00276                             * 'FITS' (save as image FITS - not readable by 
00277                                       class)
00278                             * 'CLASS' (save as FITS readable by CLASS)
00279 
00280             overwrite:   If the file should be overwritten if it exists.
00281                          The default False is to return with warning
00282                          without writing the output. USE WITH CARE.
00283 
00284         Example::
00285 
00286             scan.save('myscan.asap')
00287             scan.save('myscan.sdfits', 'SDFITS')
00288 
00289         """
00290         from os import path
00291         format = format or rcParams['scantable.save']
00292         suffix = '.'+format.lower()
00293         if name is None or name == "":
00294             name = 'scantable'+suffix
00295             msg = "No filename given. Using default name %s..." % name
00296             asaplog.push(msg)
00297         name = path.expandvars(name)
00298         if path.isfile(name) or path.isdir(name):
00299             if not overwrite:
00300                 msg = "File %s exists." % name
00301                 raise IOError(msg)
00302         format2 = format.upper()
00303         if format2 == 'ASAP':
00304             self._save(name)
00305         elif format2 == 'MS2':
00306             msopt = {'ms': {'overwrite': overwrite } }
00307             from asap._asap import mswriter
00308             writer = mswriter( self )
00309             writer.write( name, msopt ) 
00310         else:
00311             from asap._asap import stwriter as stw
00312             writer = stw(format2)
00313             writer.write(self, name)
00314         return
00315 
00316     def copy(self):
00317         """Return a copy of this scantable.
00318 
00319         *Note*:
00320 
00321             This makes a full (deep) copy. scan2 = scan1 makes a reference.
00322 
00323         Example::
00324 
00325             copiedscan = scan.copy()
00326 
00327         """
00328         sd = scantable(Scantable._copy(self))
00329         return sd
00330 
00331     def drop_scan(self, scanid=None):
00332         """\
00333         Return a new scantable where the specified scan number(s) has(have)
00334         been dropped.
00335 
00336         Parameters:
00337 
00338             scanid:    a (list of) scan number(s)
00339 
00340         """
00341         from asap import _is_sequence_or_number as _is_valid
00342         from asap import _to_list
00343         from asap import unique
00344         if not _is_valid(scanid):
00345             raise RuntimeError( 'Please specify a scanno to drop from the'
00346                                 ' scantable' )
00347         scanid = _to_list(scanid)
00348         allscans = unique([ self.getscan(i) for i in range(self.nrow())])
00349         for sid in scanid: allscans.remove(sid)
00350         if len(allscans) == 0:
00351             raise ValueError("Can't remove all scans")
00352         sel = selector(scans=allscans)
00353         return self._select_copy(sel)
00354 
00355     def _select_copy(self, selection):
00356         orig = self.get_selection()
00357         self.set_selection(orig+selection)
00358         cp = self.copy()
00359         self.set_selection(orig)
00360         return cp
00361 
00362     def get_scan(self, scanid=None):
00363         """\
00364         Return a specific scan (by scanno) or collection of scans (by
00365         source name) in a new scantable.
00366 
00367         *Note*:
00368 
00369             See scantable.drop_scan() for the inverse operation.
00370 
00371         Parameters:
00372 
00373             scanid:    a (list of) scanno or a source name, unix-style
00374                        patterns are accepted for source name matching, e.g.
00375                        '*_R' gets all 'ref scans
00376 
00377         Example::
00378 
00379             # get all scans containing the source '323p459'
00380             newscan = scan.get_scan('323p459')
00381             # get all 'off' scans
00382             refscans = scan.get_scan('*_R')
00383             # get a susbset of scans by scanno (as listed in scan.summary())
00384             newscan = scan.get_scan([0, 2, 7, 10])
00385 
00386         """
00387         if scanid is None:
00388             raise RuntimeError( 'Please specify a scan no or name to '
00389                                 'retrieve from the scantable' )
00390         try:
00391             bsel = self.get_selection()
00392             sel = selector()
00393             if type(scanid) is str:
00394                 sel.set_name(scanid)
00395                 return self._select_copy(sel)
00396             elif type(scanid) is int:
00397                 sel.set_scans([scanid])
00398                 return self._select_copy(sel)
00399             elif type(scanid) is list:
00400                 sel.set_scans(scanid)
00401                 return self._select_copy(sel)
00402             else:
00403                 msg = "Illegal scanid type, use 'int' or 'list' if ints."
00404                 raise TypeError(msg)
00405         except RuntimeError:
00406             raise
00407 
00408     def __str__(self):
00409         tempFile = tempfile.NamedTemporaryFile()
00410         Scantable._summary(self, tempFile.name)
00411         tempFile.seek(0)
00412         asaplog.clear()
00413         return tempFile.file.read()
00414 
00415     @asaplog_post_dec
00416     def summary(self, filename=None):
00417         """\
00418         Print a summary of the contents of this scantable.
00419 
00420         Parameters:
00421 
00422             filename:    the name of a file to write the putput to
00423                          Default - no file output
00424 
00425         """
00426         if filename is not None:
00427             if filename is "":
00428                 filename = 'scantable_summary.txt'
00429             from os.path import expandvars, isdir
00430             filename = expandvars(filename)
00431             if isdir(filename):
00432                 msg = "Illegal file name '%s'." % (filename)
00433                 raise IOError(msg)
00434         else:
00435             filename = ""
00436         Scantable._summary(self, filename)
00437 
00438     def get_spectrum(self, rowno):
00439         """Return the spectrum for the current row in the scantable as a list.
00440 
00441         Parameters:
00442 
00443              rowno:   the row number to retrieve the spectrum from
00444 
00445         """
00446         return self._getspectrum(rowno)
00447 
00448     def get_mask(self, rowno):
00449         """Return the mask for the current row in the scantable as a list.
00450 
00451         Parameters:
00452 
00453              rowno:   the row number to retrieve the mask from
00454 
00455         """
00456         return self._getmask(rowno)
00457 
00458     def set_spectrum(self, spec, rowno):
00459         """Set the spectrum for the current row in the scantable.
00460 
00461         Parameters:
00462 
00463              spec:   the new spectrum
00464 
00465              rowno:  the row number to set the spectrum for
00466 
00467         """
00468         assert(len(spec) == self.nchan(self.getif(rowno)))
00469         return self._setspectrum(spec, rowno)
00470 
00471     def get_coordinate(self, rowno):
00472         """Return the (spectral) coordinate for a a given 'rowno'.
00473 
00474         *Note*:
00475 
00476             * This coordinate is only valid until a scantable method modifies
00477               the frequency axis.
00478             * This coordinate does contain the original frequency set-up
00479               NOT the new frame. The conversions however are done using the user
00480               specified frame (e.g. LSRK/TOPO). To get the 'real' coordinate,
00481               use scantable.freq_align first. Without it there is no closure,
00482               i.e.::
00483 
00484                   c = myscan.get_coordinate(0)
00485                   c.to_frequency(c.get_reference_pixel()) != c.get_reference_value()
00486 
00487         Parameters:
00488 
00489              rowno:    the row number for the spectral coordinate
00490 
00491         """
00492         return coordinate(Scantable.get_coordinate(self, rowno))
00493 
00494     def get_selection(self):
00495         """\
00496         Get the selection object currently set on this scantable.
00497 
00498         Example::
00499 
00500             sel = scan.get_selection()
00501             sel.set_ifs(0)              # select IF 0
00502             scan.set_selection(sel)     # apply modified selection
00503 
00504         """
00505         return selector(self._getselection())
00506 
00507     def set_selection(self, selection=None, **kw):
00508         """\
00509         Select a subset of the data. All following operations on this scantable
00510         are only applied to thi selection.
00511 
00512         Parameters:
00513 
00514             selection:    a selector object (default unset the selection), or
00515                           any combination of 'pols', 'ifs', 'beams', 'scans',
00516                           'cycles', 'name', 'query'
00517 
00518         Examples::
00519 
00520             sel = selector()         # create a selection object
00521             self.set_scans([0, 3])    # select SCANNO 0 and 3
00522             scan.set_selection(sel)  # set the selection
00523             scan.summary()           # will only print summary of scanno 0 an 3
00524             scan.set_selection()     # unset the selection
00525             # or the equivalent
00526             scan.set_selection(scans=[0,3])
00527             scan.summary()           # will only print summary of scanno 0 an 3
00528             scan.set_selection()     # unset the selection
00529 
00530         """
00531         if selection is None:
00532             # reset
00533             if len(kw) == 0:
00534                 selection = selector()
00535             else:
00536                 # try keywords
00537                 for k in kw:
00538                     if k not in selector.fields:
00539                         raise KeyError("Invalid selection key '%s', "
00540                                        "valid keys are %s" % (k, 
00541                                                               selector.fields))
00542                 selection = selector(**kw)
00543         self._setselection(selection)
00544 
00545     def get_row(self, row=0, insitu=None):
00546         """\
00547         Select a row in the scantable.
00548         Return a scantable with single row.
00549 
00550         Parameters:
00551 
00552             row:    row no of integration, default is 0.
00553             insitu: if False a new scantable is returned. Otherwise, the
00554                     scaling is done in-situ. The default is taken from .asaprc
00555                     (False)
00556 
00557         """
00558         if insitu is None: 
00559             insitu = rcParams['insitu']
00560         if not insitu:
00561             workscan = self.copy()
00562         else:
00563             workscan = self
00564         # Select a row
00565         sel = selector()
00566         sel.set_rows([row])
00567         workscan.set_selection(sel)
00568         if not workscan.nrow() == 1:
00569             msg = "Could not identify single row. %d rows selected." \
00570                 % (workscan.nrow())
00571             raise RuntimeError(msg)
00572         if insitu:
00573             self._assign(workscan)
00574         else:
00575             return workscan
00576 
00577     @asaplog_post_dec
00578     def stats(self, stat='stddev', mask=None, form='3.3f', row=None):
00579         """\
00580         Determine the specified statistic of the current beam/if/pol
00581         Takes a 'mask' as an optional parameter to specify which
00582         channels should be excluded.
00583 
00584         Parameters:
00585 
00586             stat:    'min', 'max', 'min_abc', 'max_abc', 'sumsq', 'sum',
00587                      'mean', 'var', 'stddev', 'avdev', 'rms', 'median'
00588 
00589             mask:    an optional mask specifying where the statistic
00590                      should be determined.
00591 
00592             form:    format string to print statistic values
00593 
00594             row:     row number of spectrum to process.
00595                      (default is None: for all rows)
00596 
00597         Example:
00598             scan.set_unit('channel')
00599             msk = scan.create_mask([100, 200], [500, 600])
00600             scan.stats(stat='mean', mask=m)
00601 
00602         """
00603         mask = mask or []
00604         if not self._check_ifs():
00605             raise ValueError("Cannot apply mask as the IFs have different "
00606                              "number of channels. Please use setselection() "
00607                              "to select individual IFs")
00608         rtnabc = False
00609         if stat.lower().endswith('_abc'): rtnabc = True
00610         getchan = False
00611         if stat.lower().startswith('min') or stat.lower().startswith('max'):
00612             chan = self._math._minmaxchan(self, mask, stat)
00613             getchan = True
00614             statvals = []
00615         if not rtnabc:
00616             if row == None:
00617                 statvals = self._math._stats(self, mask, stat)
00618             else:
00619                 statvals = self._math._statsrow(self, mask, stat, int(row))
00620 
00621         #def cb(i):
00622         #    return statvals[i]
00623 
00624         #return self._row_callback(cb, stat)
00625 
00626         label=stat
00627         #callback=cb
00628         out = ""
00629         #outvec = []
00630         sep = '-'*50
00631 
00632         if row == None:
00633             rows = xrange(self.nrow())
00634         elif isinstance(row, int):
00635             rows = [ row ]
00636 
00637         for i in rows:
00638             refstr = ''
00639             statunit= ''
00640             if getchan:
00641                 qx, qy = self.chan2data(rowno=i, chan=chan[i])
00642                 if rtnabc:
00643                     statvals.append(qx['value'])
00644                     refstr = ('(value: %'+form) % (qy['value'])+' ['+qy['unit']+'])'
00645                     statunit= '['+qx['unit']+']'
00646                 else:
00647                     refstr = ('(@ %'+form) % (qx['value'])+' ['+qx['unit']+'])'
00648 
00649             tm = self._gettime(i)
00650             src = self._getsourcename(i)
00651             out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
00652             out += 'Time[%s]:\n' % (tm)
00653             if self.nbeam(-1) > 1: out +=  ' Beam[%d] ' % (self.getbeam(i))
00654             if self.nif(-1) > 1:   out +=  ' IF[%d] ' % (self.getif(i))
00655             if self.npol(-1) > 1:  out +=  ' Pol[%d] ' % (self.getpol(i))
00656             #outvec.append(callback(i))
00657             if len(rows) > 1:
00658                 # out += ('= %'+form) % (outvec[i]) +'   '+refstr+'\n'
00659                 out += ('= %'+form) % (statvals[i]) +'   '+refstr+'\n'
00660             else:
00661                 # out += ('= %'+form) % (outvec[0]) +'   '+refstr+'\n'
00662                 out += ('= %'+form) % (statvals[0]) +'   '+refstr+'\n'
00663             out +=  sep+"\n"
00664 
00665         import os
00666         if os.environ.has_key( 'USER' ):
00667             usr = os.environ['USER']
00668         else:
00669             import commands
00670             usr = commands.getoutput( 'whoami' )
00671         tmpfile = '/tmp/tmp_'+usr+'_casapy_asap_scantable_stats'
00672         f = open(tmpfile,'w')
00673         print >> f, sep
00674         print >> f, ' %s %s' % (label, statunit)
00675         print >> f, sep
00676         print >> f, out
00677         f.close()
00678         f = open(tmpfile,'r')
00679         x = f.readlines()
00680         f.close()
00681         asaplog.push(''.join(x), False)
00682 
00683         return statvals
00684 
00685     def chan2data(self, rowno=0, chan=0):
00686         """\
00687         Returns channel/frequency/velocity and spectral value
00688         at an arbitrary row and channel in the scantable.
00689 
00690         Parameters:
00691 
00692             rowno:   a row number in the scantable. Default is the
00693                      first row, i.e. rowno=0
00694 
00695             chan:    a channel in the scantable. Default is the first
00696                      channel, i.e. pos=0
00697 
00698         """
00699         if isinstance(rowno, int) and isinstance(chan, int):
00700             qx = {'unit': self.get_unit(),
00701                   'value': self._getabcissa(rowno)[chan]}
00702             qy = {'unit': self.get_fluxunit(),
00703                   'value': self._getspectrum(rowno)[chan]}
00704             return qx, qy
00705 
00706     def stddev(self, mask=None):
00707         """\
00708         Determine the standard deviation of the current beam/if/pol
00709         Takes a 'mask' as an optional parameter to specify which
00710         channels should be excluded.
00711 
00712         Parameters:
00713 
00714             mask:    an optional mask specifying where the standard
00715                      deviation should be determined.
00716 
00717         Example::
00718 
00719             scan.set_unit('channel')
00720             msk = scan.create_mask([100, 200], [500, 600])
00721             scan.stddev(mask=m)
00722 
00723         """
00724         return self.stats(stat='stddev', mask=mask);
00725 
00726 
00727     def get_column_names(self):
00728         """\
00729         Return a  list of column names, which can be used for selection.
00730         """
00731         return list(Scantable.get_column_names(self))
00732 
00733     def get_tsys(self, row=-1):
00734         """\
00735         Return the System temperatures.
00736 
00737         Parameters:
00738 
00739             row:    the rowno to get the information for. (default all rows)
00740 
00741         Returns:
00742 
00743             a list of Tsys values for the current selection
00744 
00745         """
00746         if row > -1:
00747             return self._get_column(self._gettsys, row)
00748         return self._row_callback(self._gettsys, "Tsys")
00749 
00750     def get_tsysspectrum(self, row=-1):
00751         """\
00752         Return the channel dependent system temperatures.
00753 
00754         Parameters:
00755 
00756             row:    the rowno to get the information for. (default all rows)
00757 
00758         Returns:
00759 
00760             a list of Tsys values for the current selection
00761 
00762         """
00763         return self._get_column( self._gettsysspectrum, row )
00764 
00765     def get_weather(self, row=-1):
00766         """\
00767         Return the weather informations.
00768 
00769         Parameters:
00770 
00771             row:    the rowno to get the information for. (default all rows)
00772 
00773         Returns:
00774 
00775             a dict or list of of dicts of values for the current selection
00776 
00777         """
00778 
00779         values = self._get_column(self._get_weather, row)
00780         if row > -1:
00781             return {'temperature': values[0],
00782                     'pressure': values[1], 'humidity' : values[2],
00783                     'windspeed' : values[3], 'windaz' : values[4]
00784                     }
00785         else:
00786             out = []
00787             for r in values:
00788 
00789                 out.append({'temperature': r[0],
00790                             'pressure': r[1], 'humidity' : r[2],
00791                             'windspeed' : r[3], 'windaz' : r[4]
00792                     })
00793             return out
00794 
00795     def _row_callback(self, callback, label):
00796         out = ""
00797         outvec = []
00798         sep = '-'*50
00799         for i in range(self.nrow()):
00800             tm = self._gettime(i)
00801             src = self._getsourcename(i)
00802             out += 'Scan[%d] (%s) ' % (self.getscan(i), src)
00803             out += 'Time[%s]:\n' % (tm)
00804             if self.nbeam(-1) > 1:
00805                 out +=  ' Beam[%d] ' % (self.getbeam(i))
00806             if self.nif(-1) > 1: out +=  ' IF[%d] ' % (self.getif(i))
00807             if self.npol(-1) > 1: out +=  ' Pol[%d] ' % (self.getpol(i))
00808             outvec.append(callback(i))
00809             out += '= %3.3f\n' % (outvec[i])
00810             out +=  sep+'\n'
00811 
00812         asaplog.push(sep)
00813         asaplog.push(" %s" % (label))
00814         asaplog.push(sep)
00815         asaplog.push(out)
00816         asaplog.post()
00817         return outvec
00818 
00819     def _get_column(self, callback, row=-1, *args):
00820         """
00821         """
00822         if row == -1:
00823             return [callback(i, *args) for i in range(self.nrow())]
00824         else:
00825             if  0 <= row < self.nrow():
00826                 return callback(row, *args)
00827 
00828 
00829     def get_time(self, row=-1, asdatetime=False, prec=-1):
00830         """\
00831         Get a list of time stamps for the observations.
00832         Return a datetime object or a string (default) for each
00833         integration time stamp in the scantable.
00834 
00835         Parameters:
00836 
00837             row:          row no of integration. Default -1 return all rows
00838 
00839             asdatetime:   return values as datetime objects rather than strings
00840 
00841             prec:         number of digits shown. Default -1 to automatic 
00842                           calculation.
00843                           Note this number is equals to the digits of MVTime,
00844                           i.e., 0<prec<3: dates with hh:: only,
00845                           <5: with hh:mm:, <7 or 0: with hh:mm:ss,
00846                           and 6> : with hh:mm:ss.tt... (prec-6 t's added)
00847 
00848         """
00849         from datetime import datetime
00850         if prec < 0:
00851             # automagically set necessary precision +1
00852             prec = 7 - \
00853                 numpy.floor(numpy.log10(numpy.min(self.get_inttime(row))))
00854             prec = max(6, int(prec))
00855         else:
00856             prec = max(0, prec)
00857         if asdatetime:
00858             #precision can be 1 millisecond at max
00859             prec = min(12, prec)
00860 
00861         times = self._get_column(self._gettime, row, prec)
00862         if not asdatetime:
00863             return times
00864         format = "%Y/%m/%d/%H:%M:%S.%f"
00865         if prec < 7:
00866             nsub = 1 + (((6-prec)/2) % 3)
00867             substr = [".%f","%S","%M"]
00868             for i in range(nsub):
00869                 format = format.replace(substr[i],"")
00870         if isinstance(times, list):
00871             return [datetime.strptime(i, format) for i in times]
00872         else:
00873             return datetime.strptime(times, format)
00874 
00875 
00876     def get_inttime(self, row=-1):
00877         """\
00878         Get a list of integration times for the observations.
00879         Return a time in seconds for each integration in the scantable.
00880 
00881         Parameters:
00882 
00883             row:    row no of integration. Default -1 return all rows.
00884 
00885         """
00886         return self._get_column(self._getinttime, row)
00887 
00888 
00889     def get_sourcename(self, row=-1):
00890         """\
00891         Get a list source names for the observations.
00892         Return a string for each integration in the scantable.
00893         Parameters:
00894 
00895             row:    row no of integration. Default -1 return all rows.
00896 
00897         """
00898         return self._get_column(self._getsourcename, row)
00899 
00900     def get_elevation(self, row=-1):
00901         """\
00902         Get a list of elevations for the observations.
00903         Return a float for each integration in the scantable.
00904 
00905         Parameters:
00906 
00907             row:    row no of integration. Default -1 return all rows.
00908 
00909         """
00910         return self._get_column(self._getelevation, row)
00911 
00912     def get_azimuth(self, row=-1):
00913         """\
00914         Get a list of azimuths for the observations.
00915         Return a float for each integration in the scantable.
00916 
00917         Parameters:
00918             row:    row no of integration. Default -1 return all rows.
00919 
00920         """
00921         return self._get_column(self._getazimuth, row)
00922 
00923     def get_parangle(self, row=-1):
00924         """\
00925         Get a list of parallactic angles for the observations.
00926         Return a float for each integration in the scantable.
00927 
00928         Parameters:
00929 
00930             row:    row no of integration. Default -1 return all rows.
00931 
00932         """
00933         return self._get_column(self._getparangle, row)
00934 
00935     def get_direction(self, row=-1):
00936         """
00937         Get a list of Positions on the sky (direction) for the observations.
00938         Return a string for each integration in the scantable.
00939 
00940         Parameters:
00941 
00942             row:    row no of integration. Default -1 return all rows
00943 
00944         """
00945         return self._get_column(self._getdirection, row)
00946 
00947     def get_directionval(self, row=-1):
00948         """\
00949         Get a list of Positions on the sky (direction) for the observations.
00950         Return a float for each integration in the scantable.
00951 
00952         Parameters:
00953 
00954             row:    row no of integration. Default -1 return all rows
00955 
00956         """
00957         return self._get_column(self._getdirectionvec, row)
00958 
00959     @asaplog_post_dec
00960     def set_unit(self, unit='channel'):
00961         """\
00962         Set the unit for all following operations on this scantable
00963 
00964         Parameters:
00965 
00966             unit:    optional unit, default is 'channel'. Use one of '*Hz',
00967                      'km/s', 'channel' or equivalent ''
00968 
00969         """
00970         varlist = vars()
00971         if unit in ['', 'pixel', 'channel']:
00972             unit = ''
00973         inf = list(self._getcoordinfo())
00974         inf[0] = unit
00975         self._setcoordinfo(inf)
00976         self._add_history("set_unit", varlist)
00977 
00978     @asaplog_post_dec
00979     def set_instrument(self, instr):
00980         """\
00981         Set the instrument for subsequent processing.
00982 
00983         Parameters:
00984 
00985             instr:    Select from 'ATPKSMB', 'ATPKSHOH', 'ATMOPRA',
00986                       'DSS-43' (Tid), 'CEDUNA', and 'HOBART'
00987 
00988         """
00989         self._setInstrument(instr)
00990         self._add_history("set_instument", vars())
00991 
00992     @asaplog_post_dec
00993     def set_feedtype(self, feedtype):
00994         """\
00995         Overwrite the feed type, which might not be set correctly.
00996 
00997         Parameters:
00998 
00999             feedtype:     'linear' or 'circular'
01000 
01001         """
01002         self._setfeedtype(feedtype)
01003         self._add_history("set_feedtype", vars())
01004 
01005     @asaplog_post_dec
01006     def set_doppler(self, doppler='RADIO'):
01007         """\
01008         Set the doppler for all following operations on this scantable.
01009 
01010         Parameters:
01011 
01012             doppler:    One of 'RADIO', 'OPTICAL', 'Z', 'BETA', 'GAMMA'
01013 
01014         """
01015         varlist = vars()
01016         inf = list(self._getcoordinfo())
01017         inf[2] = doppler
01018         self._setcoordinfo(inf)
01019         self._add_history("set_doppler", vars())
01020 
01021     @asaplog_post_dec
01022     def set_freqframe(self, frame=None):
01023         """\
01024         Set the frame type of the Spectral Axis.
01025 
01026         Parameters:
01027 
01028             frame:   an optional frame type, default 'LSRK'. Valid frames are:
01029                      'TOPO', 'LSRD', 'LSRK', 'BARY',
01030                      'GEO', 'GALACTO', 'LGROUP', 'CMB'
01031 
01032         Example::
01033 
01034             scan.set_freqframe('BARY')
01035 
01036         """
01037         frame = frame or rcParams['scantable.freqframe']
01038         varlist = vars()
01039         # "REST" is not implemented in casacore
01040         #valid = ['REST', 'TOPO', 'LSRD', 'LSRK', 'BARY', \
01041         #           'GEO', 'GALACTO', 'LGROUP', 'CMB']
01042         valid = ['TOPO', 'LSRD', 'LSRK', 'BARY', \
01043                    'GEO', 'GALACTO', 'LGROUP', 'CMB']
01044 
01045         if frame in valid:
01046             inf = list(self._getcoordinfo())
01047             inf[1] = frame
01048             self._setcoordinfo(inf)
01049             self._add_history("set_freqframe", varlist)
01050         else:
01051             msg  = "Please specify a valid freq type. Valid types are:\n", valid
01052             raise TypeError(msg)
01053 
01054     @asaplog_post_dec
01055     def set_dirframe(self, frame=""):
01056         """\
01057         Set the frame type of the Direction on the sky.
01058 
01059         Parameters:
01060 
01061             frame:   an optional frame type, default ''. Valid frames are:
01062                      'J2000', 'B1950', 'GALACTIC'
01063 
01064         Example:
01065 
01066             scan.set_dirframe('GALACTIC')
01067 
01068         """
01069         varlist = vars()
01070         Scantable.set_dirframe(self, frame)
01071         self._add_history("set_dirframe", varlist)
01072 
01073     def get_unit(self):
01074         """\
01075         Get the default unit set in this scantable
01076 
01077         Returns:
01078 
01079             A unit string
01080 
01081         """
01082         inf = self._getcoordinfo()
01083         unit = inf[0]
01084         if unit == '': unit = 'channel'
01085         return unit
01086 
01087     @asaplog_post_dec
01088     def get_abcissa(self, rowno=0):
01089         """\
01090         Get the abcissa in the current coordinate setup for the currently
01091         selected Beam/IF/Pol
01092 
01093         Parameters:
01094 
01095             rowno:    an optional row number in the scantable. Default is the
01096                       first row, i.e. rowno=0
01097 
01098         Returns:
01099 
01100             The abcissa values and the format string (as a dictionary)
01101 
01102         """
01103         abc = self._getabcissa(rowno)
01104         lbl = self._getabcissalabel(rowno)
01105         return abc, lbl
01106 
01107     @asaplog_post_dec
01108     def flag(self, mask=None, unflag=False, row=-1):
01109         """\
01110         Flag the selected data using an optional channel mask.
01111 
01112         Parameters:
01113 
01114             mask:   an optional channel mask, created with create_mask. Default
01115                     (no mask) is all channels.
01116 
01117             unflag:    if True, unflag the data
01118 
01119             row:    an optional row number in the scantable.
01120                       Default -1 flags all rows
01121                       
01122         """
01123         varlist = vars()
01124         mask = mask or []
01125         self._flag(row, mask, unflag)
01126         self._add_history("flag", varlist)
01127 
01128     @asaplog_post_dec
01129     def flag_row(self, rows=None, unflag=False):
01130         """\
01131         Flag the selected data in row-based manner.
01132 
01133         Parameters:
01134 
01135             rows:   list of row numbers to be flagged. Default is no row
01136                     (must be explicitly specified to execute row-based 
01137                     flagging).
01138 
01139             unflag: if True, unflag the data.
01140 
01141         """
01142         varlist = vars()
01143         if rows is None:
01144             rows = []
01145         self._flag_row(rows, unflag)
01146         self._add_history("flag_row", varlist)
01147 
01148     @asaplog_post_dec
01149     def clip(self, uthres=None, dthres=None, clipoutside=True, unflag=False):
01150         """\
01151         Flag the selected data outside a specified range (in channel-base)
01152 
01153         Parameters:
01154 
01155             uthres:      upper threshold.
01156 
01157             dthres:      lower threshold
01158 
01159             clipoutside: True for flagging data outside the range 
01160                          [dthres:uthres].
01161                          False for flagging data inside the range.
01162 
01163             unflag:      if True, unflag the data.
01164 
01165         """
01166         varlist = vars()
01167         self._clip(uthres, dthres, clipoutside, unflag)
01168         self._add_history("clip", varlist)
01169 
01170     @asaplog_post_dec
01171     def lag_flag(self, start, end, unit="MHz", insitu=None):
01172         """\
01173         Flag the data in 'lag' space by providing a frequency to remove.
01174         Flagged data in the scantable get interpolated over the region.
01175         No taper is applied.
01176 
01177         Parameters:
01178 
01179             start:    the start frequency (really a period within the
01180                       bandwidth)  or period to remove
01181 
01182             end:      the end frequency or period to remove
01183 
01184             unit:     the frequency unit (default 'MHz') or '' for
01185                       explicit lag channels
01186 
01187         *Notes*:
01188 
01189             It is recommended to flag edges of the band or strong
01190             signals beforehand.
01191 
01192         """
01193         if insitu is None: insitu = rcParams['insitu']
01194         self._math._setinsitu(insitu)
01195         varlist = vars()
01196         base = { "GHz": 1000000000., "MHz": 1000000., "kHz": 1000., "Hz": 1.}
01197         if not (unit == "" or base.has_key(unit)):
01198             raise ValueError("%s is not a valid unit." % unit)
01199         if unit == "":
01200             s = scantable(self._math._lag_flag(self, start, end, "lags"))
01201         else:
01202             s = scantable(self._math._lag_flag(self, start*base[unit],
01203                                                end*base[unit], "frequency"))
01204         s._add_history("lag_flag", varlist)
01205         if insitu:
01206             self._assign(s)
01207         else:
01208             return s
01209 
01210     @asaplog_post_dec
01211     def fft(self, rowno=None, mask=None, getrealimag=False):
01212         """\
01213         Apply FFT to the spectra.
01214         Flagged data in the scantable get interpolated over the region.
01215 
01216         Parameters:
01217 
01218             rowno:          The row number(s) to be processed. int, list 
01219                             and tuple are accepted. By default (None), FFT 
01220                             is applied to the whole data.
01221 
01222             mask:           Auxiliary channel mask(s). Given as a boolean
01223                             list, it is applied to all specified rows.
01224                             A list of boolean lists can also be used to 
01225                             apply different masks. In the latter case, the
01226                             length of 'mask' must be the same as that of
01227                             'rowno'. The default is None. 
01228         
01229             getrealimag:    If True, returns the real and imaginary part 
01230                             values of the complex results.
01231                             If False (the default), returns the amplitude
01232                             (absolute value) normalised with Ndata/2 and
01233                             phase (argument, in unit of radian).
01234 
01235         Returns:
01236 
01237             A list of dictionaries containing the results for each spectrum.
01238             Each dictionary contains two values, the real and the imaginary 
01239             parts when getrealimag = True, or the amplitude(absolute value)
01240             and the phase(argument) when getrealimag = False. The key for
01241             these values are 'real' and 'imag', or 'ampl' and 'phase',
01242             respectively.
01243         """
01244         if rowno is None:
01245             rowno = []
01246         if isinstance(rowno, int):
01247             rowno = [rowno]
01248         elif not (isinstance(rowno, list) or isinstance(rowno, tuple)):
01249             raise TypeError("The row number(s) must be int, list or tuple.")
01250         if len(rowno) == 0: rowno = [i for i in xrange(self.nrow())]
01251 
01252         usecommonmask = True
01253         
01254         if mask is None:
01255             mask = []
01256         if isinstance(mask, list) or isinstance(mask, tuple):
01257             if len(mask) == 0:
01258                 mask = [[]]
01259             else:
01260                 if isinstance(mask[0], bool):
01261                     if len(mask) != self.nchan(self.getif(rowno[0])):
01262                         raise ValueError("The spectra and the mask have "
01263                                          "different length.")
01264                     mask = [mask]
01265                 elif isinstance(mask[0], list) or isinstance(mask[0], tuple):
01266                     usecommonmask = False
01267                     if len(mask) != len(rowno):
01268                         raise ValueError("When specifying masks for each "
01269                                          "spectrum, the numbers of them "
01270                                          "must be identical.")
01271                     for i in xrange(mask):
01272                         if len(mask[i]) != self.nchan(self.getif(rowno[i])):
01273                             raise ValueError("The spectra and the mask have "
01274                                              "different length.")
01275                 else:
01276                     raise TypeError("The mask must be a boolean list or "
01277                                     "a list of boolean list.")
01278         else:
01279             raise TypeError("The mask must be a boolean list or a list of "
01280                             "boolean list.")
01281 
01282         res = []
01283 
01284         imask = 0
01285         for whichrow in rowno:
01286             fspec = self._fft(whichrow, mask[imask], getrealimag)
01287             nspec = len(fspec)
01288             
01289             i = 0
01290             v1 = []
01291             v2 = []
01292             reselem = {"real":[],"imag":[]} if getrealimag \
01293                                             else {"ampl":[],"phase":[]}
01294             
01295             while (i < nspec):
01296                 v1.append(fspec[i])
01297                 v2.append(fspec[i+1])
01298                 i += 2
01299             
01300             if getrealimag:
01301                 reselem["real"]  += v1
01302                 reselem["imag"]  += v2
01303             else:
01304                 reselem["ampl"]  += v1
01305                 reselem["phase"] += v2
01306             
01307             res.append(reselem)
01308             
01309             if not usecommonmask: 
01310                 imask += 1
01311         
01312         return res
01313 
01314     @asaplog_post_dec
01315     def create_mask(self, *args, **kwargs):
01316         """\
01317         Compute and return a mask based on [min, max] windows.
01318         The specified windows are to be INCLUDED, when the mask is
01319         applied.
01320 
01321         Parameters:
01322 
01323             [min, max], [min2, max2], ...
01324                 Pairs of start/end points (inclusive)specifying the regions
01325                 to be masked
01326 
01327             invert:     optional argument. If specified as True,
01328                         return an inverted mask, i.e. the regions
01329                         specified are EXCLUDED
01330 
01331             row:        create the mask using the specified row for
01332                         unit conversions, default is row=0
01333                         only necessary if frequency varies over rows.
01334 
01335         Examples::
01336 
01337             scan.set_unit('channel')
01338             # a)
01339             msk = scan.create_mask([400, 500], [800, 900])
01340             # masks everything outside 400 and 500
01341             # and 800 and 900 in the unit 'channel'
01342 
01343             # b)
01344             msk = scan.create_mask([400, 500], [800, 900], invert=True)
01345             # masks the regions between 400 and 500
01346             # and 800 and 900 in the unit 'channel'
01347 
01348             # c)
01349             #mask only channel 400
01350             msk =  scan.create_mask([400])
01351 
01352         """
01353         row = kwargs.get("row", 0)
01354         data = self._getabcissa(row)
01355         u = self._getcoordinfo()[0]
01356         if u == "":
01357             u = "channel"
01358         msg = "The current mask window unit is %s" % u
01359         i = self._check_ifs()
01360         if not i:
01361             msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
01362         asaplog.push(msg)
01363         n = len(data)
01364         msk = _n_bools(n, False)
01365         # test if args is a 'list' or a 'normal *args - UGLY!!!
01366 
01367         ws = (isinstance(args[-1][-1], int) 
01368               or isinstance(args[-1][-1], float)) and args or args[0]
01369         for window in ws:
01370             if len(window) == 1:
01371                 window = [window[0], window[0]]
01372             if len(window) == 0 or  len(window) > 2:
01373                 raise ValueError("A window needs to be defined as "
01374                                  "[start(, end)]")
01375             if window[0] > window[1]:
01376                 tmp = window[0]
01377                 window[0] = window[1]
01378                 window[1] = tmp
01379             for i in range(n):
01380                 if data[i] >= window[0] and data[i] <= window[1]:
01381                     msk[i] = True
01382         if kwargs.has_key('invert'):
01383             if kwargs.get('invert'):
01384                 msk = mask_not(msk)
01385         return msk
01386 
01387     def get_masklist(self, mask=None, row=0, silent=False):
01388         """\
01389         Compute and return a list of mask windows, [min, max].
01390 
01391         Parameters:
01392 
01393             mask:       channel mask, created with create_mask.
01394 
01395             row:        calcutate the masklist using the specified row
01396                         for unit conversions, default is row=0
01397                         only necessary if frequency varies over rows.
01398 
01399         Returns:
01400 
01401             [min, max], [min2, max2], ...
01402                 Pairs of start/end points (inclusive)specifying
01403                 the masked regions
01404 
01405         """
01406         if not (isinstance(mask,list) or isinstance(mask, tuple)):
01407             raise TypeError("The mask should be list or tuple.")
01408         if len(mask) <= 0:
01409             raise TypeError("The mask elements should be > 0")
01410         data = self._getabcissa(row)
01411         if len(data) != len(mask):
01412             msg = "Number of channels in scantable != number of mask elements"
01413             raise TypeError(msg)
01414         u = self._getcoordinfo()[0]
01415         if u == "":
01416             u = "channel"
01417         msg = "The current mask window unit is %s" % u
01418         i = self._check_ifs()
01419         if not i:
01420             msg += "\nThis mask is only valid for IF=%d" % (self.getif(i))
01421         if not silent:
01422             asaplog.push(msg)
01423         masklist = []
01424         ist, ien = None, None
01425         ist, ien=self.get_mask_indices(mask)
01426         if ist is not None and ien is not None:
01427             for i in xrange(len(ist)):
01428                 range=[data[ist[i]],data[ien[i]]]
01429                 range.sort()
01430                 masklist.append([range[0],range[1]])
01431         return masklist
01432 
01433     def get_mask_indices(self, mask=None):
01434         """\
01435         Compute and Return lists of mask start indices and mask end indices.
01436 
01437         Parameters:
01438 
01439             mask:       channel mask, created with create_mask.
01440 
01441         Returns:
01442 
01443             List of mask start indices and that of mask end indices,
01444             i.e., [istart1,istart2,....], [iend1,iend2,....].
01445 
01446         """
01447         if not (isinstance(mask,list) or isinstance(mask, tuple)):
01448             raise TypeError("The mask should be list or tuple.")
01449         if len(mask) <= 0:
01450             raise TypeError("The mask elements should be > 0")
01451         istart = []
01452         iend = []
01453         if mask[0]: 
01454             istart.append(0)
01455         for i in range(len(mask)-1):
01456             if not mask[i] and mask[i+1]:
01457                 istart.append(i+1)
01458             elif mask[i] and not mask[i+1]:
01459                 iend.append(i)
01460         if mask[len(mask)-1]: 
01461             iend.append(len(mask)-1)
01462         if len(istart) != len(iend):
01463             raise RuntimeError("Numbers of mask start != mask end.")
01464         for i in range(len(istart)):
01465             if istart[i] > iend[i]:
01466                 raise RuntimeError("Mask start index > mask end index")
01467                 break
01468         return istart,iend
01469 
01470     @asaplog_post_dec
01471     def parse_maskexpr(self, maskstring):
01472         """
01473         Parse CASA type mask selection syntax (IF dependent).
01474 
01475         Parameters:
01476             maskstring : A string mask selection expression.
01477                          A comma separated selections mean different IF -
01478                          channel combinations. IFs and channel selections
01479                          are partitioned by a colon, ':'.
01480                      examples:
01481                          ''          = all IFs (all channels)
01482                          '<2,4~6,9'  = IFs 0,1,4,5,6,9 (all channels)
01483                          '3:3~45;60' = channels 3 to 45 and 60 in IF 3
01484                          '0~1:2~6,8' = channels 2 to 6 in IFs 0,1, and
01485                                        all channels in IF8
01486         Returns:
01487         A dictionary of selected (valid) IF and masklist pairs,
01488         e.g. {'0': [[50,250],[350,462]], '2': [[100,400],[550,974]]}
01489         """
01490         if not isinstance(maskstring,str):
01491             asaplog.post()
01492             asaplog.push("Mask expression should be a string.")
01493             asaplog.post("ERROR")
01494         
01495         valid_ifs = self.getifnos()
01496         frequnit = self.get_unit()
01497         seldict = {}
01498         if maskstring == "":
01499             maskstring = str(valid_ifs)[1:-1]
01500         ## split each selection "IF range[:CHAN range]"
01501         sellist = maskstring.split(',')
01502         for currselstr in sellist:
01503             selset = currselstr.split(':')
01504             # spw and mask string (may include ~, < or >)
01505             spwmasklist = self._parse_selection(selset[0], typestr='integer',
01506                                                 minval=min(valid_ifs),
01507                                                 maxval=max(valid_ifs))
01508             for spwlist in spwmasklist:
01509                 selspws = []
01510                 for ispw in range(spwlist[0],spwlist[1]+1):
01511                     # Put into the list only if ispw exists
01512                     if valid_ifs.count(ispw):
01513                         selspws.append(ispw)
01514             del spwmasklist, spwlist
01515 
01516             # parse frequency mask list
01517             if len(selset) > 1:
01518                 freqmasklist = self._parse_selection(selset[1], typestr='float',
01519                                                      offset=0.)
01520             else:
01521                 # want to select the whole spectrum
01522                 freqmasklist = [None]
01523 
01524             ## define a dictionary of spw - masklist combination
01525             for ispw in selspws:
01526                 #print "working on", ispw
01527                 spwstr = str(ispw)
01528                 if len(selspws) == 0:
01529                     # empty spw
01530                     continue
01531                 else:
01532                     ## want to get min and max of the spw and
01533                     ## offset to set for '<' and '>'
01534                     if frequnit == 'channel':
01535                         minfreq = 0
01536                         maxfreq = self.nchan(ifno=ispw)
01537                         offset = 0.5
01538                     else:
01539                         ## This is ugly part. need improvement
01540                         for ifrow in xrange(self.nrow()):
01541                             if self.getif(ifrow) == ispw:
01542                                 #print "IF",ispw,"found in row =",ifrow
01543                                 break
01544                         freqcoord = self.get_coordinate(ifrow)
01545                         freqs = self._getabcissa(ifrow)
01546                         minfreq = min(freqs)
01547                         maxfreq = max(freqs)
01548                         if len(freqs) == 1:
01549                             offset = 0.5
01550                         elif frequnit.find('Hz') > 0:
01551                             offset = abs(freqcoord.to_frequency(1,
01552                                                                 unit=frequnit)
01553                                          -freqcoord.to_frequency(0,
01554                                                                  unit=frequnit)
01555                                          )*0.5
01556                         elif frequnit.find('m/s') > 0:
01557                             offset = abs(freqcoord.to_velocity(1,
01558                                                                unit=frequnit)
01559                                          -freqcoord.to_velocity(0,
01560                                                                 unit=frequnit)
01561                                          )*0.5
01562                         else:
01563                             asaplog.post()
01564                             asaplog.push("Invalid frequency unit")
01565                             asaplog.post("ERROR")
01566                         del freqs, freqcoord, ifrow
01567                     for freq in freqmasklist:
01568                         selmask = freq or [minfreq, maxfreq]
01569                         if selmask[0] == None:
01570                             ## selection was "<freq[1]".
01571                             if selmask[1] < minfreq:
01572                                 ## avoid adding region selection
01573                                 selmask = None
01574                             else:
01575                                 selmask = [minfreq,selmask[1]-offset]
01576                         elif selmask[1] == None:
01577                             ## selection was ">freq[0]"
01578                             if selmask[0] > maxfreq:
01579                                 ## avoid adding region selection
01580                                 selmask = None
01581                             else:
01582                                 selmask = [selmask[0]+offset,maxfreq]
01583                         if selmask:
01584                             if not seldict.has_key(spwstr):
01585                                 # new spw selection
01586                                 seldict[spwstr] = []
01587                             seldict[spwstr] += [selmask]
01588                     del minfreq,maxfreq,offset,freq,selmask
01589                 del spwstr
01590             del freqmasklist
01591         del valid_ifs
01592         if len(seldict) == 0:
01593             asaplog.post()
01594             asaplog.push("No valid selection in the mask expression: "
01595                          +maskstring)
01596             asaplog.post("WARN")
01597             return None
01598         msg = "Selected masklist:\n"
01599         for sif, lmask in seldict.iteritems():
01600             msg += "   IF"+sif+" - "+str(lmask)+"\n"
01601         asaplog.push(msg)
01602         return seldict
01603 
01604     @asaplog_post_dec
01605     def parse_idx_selection(self, mode, selexpr):
01606         """
01607         Parse CASA type mask selection syntax of SCANNO, IFNO, POLNO,
01608         BEAMNO, and row number
01609 
01610         Parameters:
01611             mode       : which column to select.
01612                          ['scan',|'if'|'pol'|'beam'|'row']
01613             selexpr    : A comma separated selection expression.
01614                      examples:
01615                          ''          = all (returns [])
01616                          '<2,4~6,9'  = indices less than 2, 4 to 6 and 9
01617                                        (returns [0,1,4,5,6,9])
01618         Returns:
01619         A List of selected indices
01620         """
01621         if selexpr == "":
01622             return []
01623         valid_modes = {'s': 'scan', 'i': 'if', 'p': 'pol',
01624                        'b': 'beam', 'r': 'row'}
01625         smode =  mode.lower()[0]
01626         if not (smode in valid_modes.keys()):
01627             msg = "Invalid mode '%s'. Valid modes are %s" %\
01628                   (mode, str(valid_modes.values()))
01629             asaplog.post()
01630             asaplog.push(msg)
01631             asaplog.post("ERROR")
01632         mode = valid_modes[smode]
01633         minidx = None
01634         maxidx = None
01635         if smode == 'r':
01636             minidx = 0
01637             maxidx = self.nrow()
01638         else:
01639             idx = getattr(self,"get"+mode+"nos")()
01640             minidx = min(idx)
01641             maxidx = max(idx)
01642             del idx
01643         sellist = selexpr.split(',')
01644         idxlist = []
01645         for currselstr in sellist:
01646             # single range (may include ~, < or >)
01647             currlist = self._parse_selection(currselstr, typestr='integer',
01648                                              minval=minidx,maxval=maxidx)
01649             for thelist in currlist:
01650                 idxlist += range(thelist[0],thelist[1]+1)
01651         msg = "Selected %s: %s" % (mode.upper()+"NO", str(idxlist))
01652         asaplog.push(msg)
01653         return idxlist
01654 
01655     def _parse_selection(self, selstr, typestr='float', offset=0.,
01656                          minval=None, maxval=None):
01657         """
01658         Parameters:
01659             selstr :    The Selection string, e.g., '<3;5~7;100~103;9'
01660             typestr :   The type of the values in returned list
01661                         ('integer' or 'float')
01662             offset :    The offset value to subtract from or add to
01663                         the boundary value if the selection string
01664                         includes '<' or '>' [Valid only for typestr='float']
01665             minval, maxval :  The minimum/maximum values to set if the
01666                               selection string includes '<' or '>'.
01667                               The list element is filled with None by default.
01668         Returns:
01669             A list of min/max pair of selections.
01670         Example:
01671             _parse_selection('<3;5~7;9',typestr='int',minval=0)
01672             --> returns [[0,2],[5,7],[9,9]]
01673             _parse_selection('<3;5~7;9',typestr='float',offset=0.5,minval=0)
01674             --> returns [[0.,2.5],[5.0,7.0],[9.,9.]]
01675         """
01676         selgroups = selstr.split(';')
01677         sellists = []
01678         if typestr.lower().startswith('int'):
01679             formatfunc = int
01680             offset = 1
01681         else:
01682             formatfunc = float
01683         
01684         for currsel in  selgroups:
01685             if currsel.find('~') > 0:
01686                 # val0 <= x <= val1
01687                 minsel = formatfunc(currsel.split('~')[0].strip())
01688                 maxsel = formatfunc(currsel.split('~')[1].strip()) 
01689             elif currsel.strip().find('<=') > -1:
01690                 bound = currsel.split('<=')
01691                 try: # try "x <= val"
01692                     minsel = minval
01693                     maxsel = formatfunc(bound[1].strip())
01694                 except ValueError: # now "val <= x"
01695                     minsel = formatfunc(bound[0].strip())
01696                     maxsel = maxval
01697             elif currsel.strip().find('>=') > -1:
01698                 bound = currsel.split('>=')
01699                 try: # try "x >= val"
01700                     minsel = formatfunc(bound[1].strip())
01701                     maxsel = maxval
01702                 except ValueError: # now "val >= x"
01703                     minsel = minval
01704                     maxsel = formatfunc(bound[0].strip())
01705             elif currsel.strip().find('<') > -1:
01706                 bound = currsel.split('<')
01707                 try: # try "x < val"
01708                     minsel = minval
01709                     maxsel = formatfunc(bound[1].strip()) \
01710                              - formatfunc(offset)
01711                 except ValueError: # now "val < x"
01712                     minsel = formatfunc(bound[0].strip()) \
01713                          + formatfunc(offset)
01714                     maxsel = maxval
01715             elif currsel.strip().find('>') > -1:
01716                 bound = currsel.split('>')
01717                 try: # try "x > val"
01718                     minsel = formatfunc(bound[1].strip()) \
01719                              + formatfunc(offset)
01720                     maxsel = maxval
01721                 except ValueError: # now "val > x"
01722                     minsel = minval
01723                     maxsel = formatfunc(bound[0].strip()) \
01724                              - formatfunc(offset)
01725             else:
01726                 minsel = formatfunc(currsel)
01727                 maxsel = formatfunc(currsel)
01728             sellists.append([minsel,maxsel])
01729         return sellists
01730 
01731 #    def get_restfreqs(self):
01732 #        """
01733 #        Get the restfrequency(s) stored in this scantable.
01734 #        The return value(s) are always of unit 'Hz'
01735 #        Parameters:
01736 #            none
01737 #        Returns:
01738 #            a list of doubles
01739 #        """
01740 #        return list(self._getrestfreqs())
01741 
01742     def get_restfreqs(self, ids=None):
01743         """\
01744         Get the restfrequency(s) stored in this scantable.
01745         The return value(s) are always of unit 'Hz'
01746 
01747         Parameters:
01748 
01749             ids: (optional) a list of MOLECULE_ID for that restfrequency(s) to
01750                  be retrieved
01751 
01752         Returns:
01753 
01754             dictionary containing ids and a list of doubles for each id
01755 
01756         """
01757         if ids is None:
01758             rfreqs = {}
01759             idlist = self.getmolnos()
01760             for i in idlist:
01761                 rfreqs[i] = list(self._getrestfreqs(i))
01762             return rfreqs
01763         else:
01764             if type(ids) == list or type(ids) == tuple:
01765                 rfreqs = {}
01766                 for i in ids:
01767                     rfreqs[i] = list(self._getrestfreqs(i))
01768                 return rfreqs
01769             else:
01770                 return list(self._getrestfreqs(ids))
01771 
01772     @asaplog_post_dec
01773     def set_restfreqs(self, freqs=None, unit='Hz'):
01774         """\
01775         Set or replace the restfrequency specified and
01776         if the 'freqs' argument holds a scalar,
01777         then that rest frequency will be applied to all the selected
01778         data.  If the 'freqs' argument holds
01779         a vector, then it MUST be of equal or smaller length than
01780         the number of IFs (and the available restfrequencies will be
01781         replaced by this vector).  In this case, *all* data have
01782         the restfrequency set per IF according
01783         to the corresponding value you give in the 'freqs' vector.
01784         E.g. 'freqs=[1e9, 2e9]'  would mean IF 0 gets restfreq 1e9 and
01785         IF 1 gets restfreq 2e9.
01786 
01787         You can also specify the frequencies via a linecatalog.
01788 
01789         Parameters:
01790 
01791             freqs:   list of rest frequency values or string idenitfiers
01792 
01793             unit:    unit for rest frequency (default 'Hz')
01794 
01795 
01796         Example::
01797 
01798             # set the given restfrequency for the all currently selected IFs
01799             scan.set_restfreqs(freqs=1.4e9)
01800             # set restfrequencies for the n IFs  (n > 1) in the order of the
01801             # list, i.e
01802             # IF0 -> 1.4e9, IF1 ->  1.41e9, IF3 -> 1.42e9
01803             # len(list_of_restfreqs) == nIF
01804             # for nIF == 1 the following will set multiple restfrequency for
01805             # that IF
01806             scan.set_restfreqs(freqs=[1.4e9, 1.41e9, 1.42e9])
01807             # set multiple restfrequencies per IF. as a list of lists where
01808             # the outer list has nIF elements, the inner s arbitrary
01809             scan.set_restfreqs(freqs=[[1.4e9, 1.41e9], [1.67e9]])
01810 
01811        *Note*:
01812 
01813             To do more sophisticate Restfrequency setting, e.g. on a
01814             source and IF basis, use scantable.set_selection() before using
01815             this function::
01816 
01817                 # provided your scantable is called scan
01818                 selection = selector()
01819                 selection.set_name('ORION*')
01820                 selection.set_ifs([1])
01821                 scan.set_selection(selection)
01822                 scan.set_restfreqs(freqs=86.6e9)
01823 
01824         """
01825         varlist = vars()
01826         from asap import linecatalog
01827         # simple  value
01828         if isinstance(freqs, int) or isinstance(freqs, float):
01829             self._setrestfreqs([freqs], [""], unit)
01830         # list of values
01831         elif isinstance(freqs, list) or isinstance(freqs, tuple):
01832             # list values are scalars
01833             if isinstance(freqs[-1], int) or isinstance(freqs[-1], float):
01834                 if len(freqs) == 1:
01835                     self._setrestfreqs(freqs, [""], unit)
01836                 else:
01837                     # allow the 'old' mode of setting mulitple IFs
01838                     savesel = self._getselection()
01839                     sel = self.get_selection()
01840                     iflist = self.getifnos()
01841                     if len(freqs)>len(iflist):
01842                         raise ValueError("number of elements in list of list "
01843                                          "exeeds the current IF selections")
01844                     iflist = self.getifnos()
01845                     for i, fval in enumerate(freqs):
01846                         sel.set_ifs(iflist[i])
01847                         self._setselection(sel)
01848                         self._setrestfreqs([fval], [""], unit)
01849                     self._setselection(savesel)
01850 
01851             # list values are dict, {'value'=, 'name'=)
01852             elif isinstance(freqs[-1], dict):
01853                 values = []
01854                 names = []
01855                 for d in freqs:
01856                     values.append(d["value"])
01857                     names.append(d["name"])
01858                 self._setrestfreqs(values, names, unit)
01859             elif isinstance(freqs[-1], list) or isinstance(freqs[-1], tuple):
01860                 savesel = self._getselection()
01861                 sel = self.get_selection()
01862                 iflist = self.getifnos()
01863                 if len(freqs)>len(iflist):
01864                     raise ValueError("number of elements in list of list exeeds"
01865                                      " the current IF selections")
01866                 for i, fval in enumerate(freqs):
01867                     sel.set_ifs(iflist[i])
01868                     self._setselection(sel)
01869                     self._setrestfreqs(fval, [""], unit)
01870                 self._setselection(savesel)
01871         # freqs are to be taken from a linecatalog
01872         elif isinstance(freqs, linecatalog):
01873             savesel = self._getselection()
01874             sel = self.get_selection()
01875             for i in xrange(freqs.nrow()):
01876                 sel.set_ifs(iflist[i])
01877                 self._setselection(sel)
01878                 self._setrestfreqs([freqs.get_frequency(i)],
01879                                    [freqs.get_name(i)], "MHz")
01880                 # ensure that we are not iterating past nIF
01881                 if i == self.nif()-1: break
01882             self._setselection(savesel)
01883         else:
01884             return
01885         self._add_history("set_restfreqs", varlist)
01886 
01887     @asaplog_post_dec
01888     def shift_refpix(self, delta):
01889         """\
01890         Shift the reference pixel of the Spectra Coordinate by an
01891         integer amount.
01892 
01893         Parameters:
01894 
01895             delta:   the amount to shift by
01896 
01897         *Note*:
01898 
01899             Be careful using this with broadband data.
01900 
01901         """
01902         varlist = vars()
01903         Scantable.shift_refpix(self, delta)
01904         s._add_history("shift_refpix", varlist)
01905 
01906     @asaplog_post_dec
01907     def history(self, filename=None):
01908         """\
01909         Print the history. Optionally to a file.
01910 
01911         Parameters:
01912 
01913             filename:    The name of the file to save the history to.
01914 
01915         """
01916         hist = list(self._gethistory())
01917         out = "-"*80
01918         for h in hist:
01919             if h.startswith("---"):
01920                 out = "\n".join([out, h])
01921             else:
01922                 items = h.split("##")
01923                 date = items[0]
01924                 func = items[1]
01925                 items = items[2:]
01926                 out += "\n"+date+"\n"
01927                 out += "Function: %s\n  Parameters:" % (func)
01928                 for i in items:
01929                     if i == '':
01930                         continue
01931                     s = i.split("=")
01932                     out += "\n   %s = %s" % (s[0], s[1])
01933                 out = "\n".join([out, "-"*80])
01934         if filename is not None:
01935             if filename is "":
01936                 filename = 'scantable_history.txt'
01937             import os
01938             filename = os.path.expandvars(os.path.expanduser(filename))
01939             if not os.path.isdir(filename):
01940                 data = open(filename, 'w')
01941                 data.write(out)
01942                 data.close()
01943             else:
01944                 msg = "Illegal file name '%s'." % (filename)
01945                 raise IOError(msg)
01946         return page(out)
01947 
01948     #
01949     # Maths business
01950     #
01951     @asaplog_post_dec
01952     def average_time(self, mask=None, scanav=False, weight='tint', align=False):
01953         """\
01954         Return the (time) weighted average of a scan. Scans will be averaged
01955         only if the source direction (RA/DEC) is within 1' otherwise
01956 
01957         *Note*:
01958 
01959             in channels only - align if necessary
01960 
01961         Parameters:
01962 
01963             mask:     an optional mask (only used for 'var' and 'tsys'
01964                       weighting)
01965 
01966             scanav:   True averages each scan separately
01967                       False (default) averages all scans together,
01968 
01969             weight:   Weighting scheme.
01970                       'none'     (mean no weight)
01971                       'var'      (1/var(spec) weighted)
01972                       'tsys'     (1/Tsys**2 weighted)
01973                       'tint'     (integration time weighted)
01974                       'tintsys'  (Tint/Tsys**2)
01975                       'median'   ( median averaging)
01976                       The default is 'tint'
01977 
01978             align:    align the spectra in velocity before averaging. It takes
01979                       the time of the first spectrum as reference time.
01980 
01981         Example::
01982 
01983             # time average the scantable without using a mask
01984             newscan = scan.average_time()
01985 
01986         """
01987         varlist = vars()
01988         weight = weight or 'TINT'
01989         mask = mask or ()
01990         scanav = (scanav and 'SCAN') or 'NONE'
01991         scan = (self, )
01992 
01993         if align:
01994             scan = (self.freq_align(insitu=False), )
01995         s = None
01996         if weight.upper() == 'MEDIAN':
01997             s = scantable(self._math._averagechannel(scan[0], 'MEDIAN',
01998                                                      scanav))
01999         else:
02000             s = scantable(self._math._average(scan, mask, weight.upper(),
02001                           scanav))
02002         s._add_history("average_time", varlist)
02003         return s
02004 
02005     @asaplog_post_dec
02006     def convert_flux(self, jyperk=None, eta=None, d=None, insitu=None):
02007         """\
02008         Return a scan where all spectra are converted to either
02009         Jansky or Kelvin depending upon the flux units of the scan table.
02010         By default the function tries to look the values up internally.
02011         If it can't find them (or if you want to over-ride), you must
02012         specify EITHER jyperk OR eta (and D which it will try to look up
02013         also if you don't set it). jyperk takes precedence if you set both.
02014 
02015         Parameters:
02016 
02017             jyperk:      the Jy / K conversion factor
02018 
02019             eta:         the aperture efficiency
02020 
02021             d:           the geometric diameter (metres)
02022 
02023             insitu:      if False a new scantable is returned.
02024                          Otherwise, the scaling is done in-situ
02025                          The default is taken from .asaprc (False)
02026 
02027         """
02028         if insitu is None: insitu = rcParams['insitu']
02029         self._math._setinsitu(insitu)
02030         varlist = vars()
02031         jyperk = jyperk or -1.0
02032         d = d or -1.0
02033         eta = eta or -1.0
02034         s = scantable(self._math._convertflux(self, d, eta, jyperk))
02035         s._add_history("convert_flux", varlist)
02036         if insitu: self._assign(s)
02037         else: return s
02038 
02039     @asaplog_post_dec
02040     def gain_el(self, poly=None, filename="", method="linear", insitu=None):
02041         """\
02042         Return a scan after applying a gain-elevation correction.
02043         The correction can be made via either a polynomial or a
02044         table-based interpolation (and extrapolation if necessary).
02045         You specify polynomial coefficients, an ascii table or neither.
02046         If you specify neither, then a polynomial correction will be made
02047         with built in coefficients known for certain telescopes (an error
02048         will occur if the instrument is not known).
02049         The data and Tsys are *divided* by the scaling factors.
02050 
02051         Parameters:
02052 
02053             poly:        Polynomial coefficients (default None) to compute a
02054                          gain-elevation correction as a function of
02055                          elevation (in degrees).
02056 
02057             filename:    The name of an ascii file holding correction factors.
02058                          The first row of the ascii file must give the column
02059                          names and these MUST include columns
02060                          'ELEVATION' (degrees) and 'FACTOR' (multiply data
02061                          by this) somewhere.
02062                          The second row must give the data type of the
02063                          column. Use 'R' for Real and 'I' for Integer.
02064                          An example file would be
02065                          (actual factors are arbitrary) :
02066 
02067                          TIME ELEVATION FACTOR
02068                          R R R
02069                          0.1 0 0.8
02070                          0.2 20 0.85
02071                          0.3 40 0.9
02072                          0.4 60 0.85
02073                          0.5 80 0.8
02074                          0.6 90 0.75
02075 
02076             method:      Interpolation method when correcting from a table.
02077                          Values are  'nearest', 'linear' (default), 'cubic'
02078                          and 'spline'
02079 
02080             insitu:      if False a new scantable is returned.
02081                          Otherwise, the scaling is done in-situ
02082                          The default is taken from .asaprc (False)
02083 
02084         """
02085 
02086         if insitu is None: insitu = rcParams['insitu']
02087         self._math._setinsitu(insitu)
02088         varlist = vars()
02089         poly = poly or ()
02090         from os.path import expandvars
02091         filename = expandvars(filename)
02092         s = scantable(self._math._gainel(self, poly, filename, method))
02093         s._add_history("gain_el", varlist)
02094         if insitu:
02095             self._assign(s)
02096         else:
02097             return s
02098 
02099     @asaplog_post_dec
02100     def freq_align(self, reftime=None, method='cubic', insitu=None):
02101         """\
02102         Return a scan where all rows have been aligned in frequency/velocity.
02103         The alignment frequency frame (e.g. LSRK) is that set by function
02104         set_freqframe.
02105 
02106         Parameters:
02107 
02108             reftime:     reference time to align at. By default, the time of
02109                          the first row of data is used.
02110 
02111             method:      Interpolation method for regridding the spectra.
02112                          Choose from 'nearest', 'linear', 'cubic' (default)
02113                          and 'spline'
02114 
02115             insitu:      if False a new scantable is returned.
02116                          Otherwise, the scaling is done in-situ
02117                          The default is taken from .asaprc (False)
02118 
02119         """
02120         if insitu is None: insitu = rcParams["insitu"]
02121         oldInsitu = self._math._insitu()
02122         self._math._setinsitu(insitu)
02123         varlist = vars()
02124         reftime = reftime or ""
02125         s = scantable(self._math._freq_align(self, reftime, method))
02126         s._add_history("freq_align", varlist)
02127         self._math._setinsitu(oldInsitu)
02128         if insitu: 
02129             self._assign(s)
02130         else: 
02131             return s
02132 
02133     @asaplog_post_dec
02134     def opacity(self, tau=None, insitu=None):
02135         """\
02136         Apply an opacity correction. The data
02137         and Tsys are multiplied by the correction factor.
02138 
02139         Parameters:
02140 
02141             tau:         (list of) opacity from which the correction factor is
02142                          exp(tau*ZD)
02143                          where ZD is the zenith-distance.
02144                          If a list is provided, it has to be of length nIF,
02145                          nIF*nPol or 1 and in order of IF/POL, e.g.
02146                          [opif0pol0, opif0pol1, opif1pol0 ...]
02147                          if tau is `None` the opacities are determined from a
02148                          model.
02149 
02150             insitu:      if False a new scantable is returned.
02151                          Otherwise, the scaling is done in-situ
02152                          The default is taken from .asaprc (False)
02153 
02154         """
02155         if insitu is None: 
02156             insitu = rcParams['insitu']
02157         self._math._setinsitu(insitu)
02158         varlist = vars()
02159         if not hasattr(tau, "__len__"):
02160             tau = [tau]
02161         s = scantable(self._math._opacity(self, tau))
02162         s._add_history("opacity", varlist)
02163         if insitu: 
02164             self._assign(s)
02165         else: 
02166             return s
02167 
02168     @asaplog_post_dec
02169     def bin(self, width=5, insitu=None):
02170         """\
02171         Return a scan where all spectra have been binned up.
02172 
02173         Parameters:
02174 
02175             width:       The bin width (default=5) in pixels
02176 
02177             insitu:      if False a new scantable is returned.
02178                          Otherwise, the scaling is done in-situ
02179                          The default is taken from .asaprc (False)
02180 
02181         """
02182         if insitu is None: 
02183             insitu = rcParams['insitu']
02184         self._math._setinsitu(insitu)
02185         varlist = vars()
02186         s = scantable(self._math._bin(self, width))
02187         s._add_history("bin", varlist)
02188         if insitu:
02189             self._assign(s)
02190         else:
02191             return s
02192 
02193     @asaplog_post_dec
02194     def reshape(self, first, last, insitu=None):
02195         """Resize the band by providing first and last channel. 
02196         This will cut off all channels outside [first, last].
02197         """
02198         if insitu is None: 
02199             insitu = rcParams['insitu']
02200         varlist = vars()
02201         if last < 0:
02202             last = self.nchan()-1 + last
02203         s = None
02204         if insitu:
02205             s = self
02206         else:
02207             s = self.copy()
02208         s._reshape(first,last)
02209         s._add_history("reshape", varlist)
02210         if not insitu:
02211             return s        
02212 
02213     @asaplog_post_dec
02214     def resample(self, width=5, method='cubic', insitu=None):
02215         """\
02216         Return a scan where all spectra have been binned up.
02217 
02218         Parameters:
02219 
02220             width:       The bin width (default=5) in pixels
02221 
02222             method:      Interpolation method when correcting from a table.
02223                          Values are  'nearest', 'linear', 'cubic' (default)
02224                          and 'spline'
02225 
02226             insitu:      if False a new scantable is returned.
02227                          Otherwise, the scaling is done in-situ
02228                          The default is taken from .asaprc (False)
02229 
02230         """
02231         if insitu is None: 
02232             insitu = rcParams['insitu']
02233         self._math._setinsitu(insitu)
02234         varlist = vars()
02235         s = scantable(self._math._resample(self, method, width))
02236         s._add_history("resample", varlist)
02237         if insitu: 
02238             self._assign(s)
02239         else: 
02240             return s
02241 
02242     @asaplog_post_dec
02243     def average_pol(self, mask=None, weight='none'):
02244         """\
02245         Average the Polarisations together.
02246 
02247         Parameters:
02248 
02249             mask:        An optional mask defining the region, where the
02250                          averaging will be applied. The output will have all
02251                          specified points masked.
02252 
02253             weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
02254                          weighted), or 'tsys' (1/Tsys**2 weighted)
02255 
02256         """
02257         varlist = vars()
02258         mask = mask or ()
02259         s = scantable(self._math._averagepol(self, mask, weight.upper()))
02260         s._add_history("average_pol", varlist)
02261         return s
02262 
02263     @asaplog_post_dec
02264     def average_beam(self, mask=None, weight='none'):
02265         """\
02266         Average the Beams together.
02267 
02268         Parameters:
02269             mask:        An optional mask defining the region, where the
02270                          averaging will be applied. The output will have all
02271                          specified points masked.
02272 
02273             weight:      Weighting scheme. 'none' (default), 'var' (1/var(spec)
02274                          weighted), or 'tsys' (1/Tsys**2 weighted)
02275 
02276         """
02277         varlist = vars()
02278         mask = mask or ()
02279         s = scantable(self._math._averagebeams(self, mask, weight.upper()))
02280         s._add_history("average_beam", varlist)
02281         return s
02282 
02283     def parallactify(self, pflag):
02284         """\
02285         Set a flag to indicate whether this data should be treated as having
02286         been 'parallactified' (total phase == 0.0)
02287 
02288         Parameters:
02289 
02290             pflag:  Bool indicating whether to turn this on (True) or
02291                     off (False)
02292 
02293         """
02294         varlist = vars()
02295         self._parallactify(pflag)
02296         self._add_history("parallactify", varlist)
02297 
02298     @asaplog_post_dec
02299     def convert_pol(self, poltype=None):
02300         """\
02301         Convert the data to a different polarisation type.
02302         Note that you will need cross-polarisation terms for most conversions.
02303 
02304         Parameters:
02305 
02306             poltype:    The new polarisation type. Valid types are:
02307                         'linear', 'circular', 'stokes' and 'linpol'
02308 
02309         """
02310         varlist = vars()
02311         s = scantable(self._math._convertpol(self, poltype))
02312         s._add_history("convert_pol", varlist)
02313         return s
02314 
02315     @asaplog_post_dec
02316     def smooth(self, kernel="hanning", width=5.0, order=2, plot=False, 
02317                insitu=None):
02318         """\
02319         Smooth the spectrum by the specified kernel (conserving flux).
02320 
02321         Parameters:
02322 
02323             kernel:     The type of smoothing kernel. Select from
02324                         'hanning' (default), 'gaussian', 'boxcar', 'rmedian'
02325                         or 'poly'
02326 
02327             width:      The width of the kernel in pixels. For hanning this is
02328                         ignored otherwise it defauls to 5 pixels.
02329                         For 'gaussian' it is the Full Width Half
02330                         Maximum. For 'boxcar' it is the full width.
02331                         For 'rmedian' and 'poly' it is the half width.
02332 
02333             order:      Optional parameter for 'poly' kernel (default is 2), to
02334                         specify the order of the polnomial. Ignored by all other
02335                         kernels.
02336 
02337             plot:       plot the original and the smoothed spectra.
02338                         In this each indivual fit has to be approved, by
02339                         typing 'y' or 'n'
02340 
02341             insitu:     if False a new scantable is returned.
02342                         Otherwise, the scaling is done in-situ
02343                         The default is taken from .asaprc (False)
02344 
02345         """
02346         if insitu is None: insitu = rcParams['insitu']
02347         self._math._setinsitu(insitu)
02348         varlist = vars()
02349 
02350         if plot: orgscan = self.copy()
02351 
02352         s = scantable(self._math._smooth(self, kernel.lower(), width, order))
02353         s._add_history("smooth", varlist)
02354 
02355         action = 'H'
02356         if plot:
02357             from asap.asapplotter import new_asaplot
02358             theplot = new_asaplot(rcParams['plotter.gui'])
02359             from matplotlib import rc as rcp
02360             rcp('lines', linewidth=1)
02361             theplot.set_panels()
02362             ylab=s._get_ordinate_label()
02363             #theplot.palette(0,["#777777","red"])
02364             for r in xrange(s.nrow()):
02365                 xsm=s._getabcissa(r)
02366                 ysm=s._getspectrum(r)
02367                 xorg=orgscan._getabcissa(r)
02368                 yorg=orgscan._getspectrum(r)
02369                 if action != "N": #skip plotting if rejecting all
02370                     theplot.clear()
02371                     theplot.hold()
02372                     theplot.set_axes('ylabel',ylab)
02373                     theplot.set_axes('xlabel',s._getabcissalabel(r))
02374                     theplot.set_axes('title',s._getsourcename(r))
02375                     theplot.set_line(label='Original',color="#777777")
02376                     theplot.plot(xorg,yorg)
02377                     theplot.set_line(label='Smoothed',color="red")
02378                     theplot.plot(xsm,ysm)
02379                     ### Ugly part for legend
02380                     for i in [0,1]:
02381                         theplot.subplots[0]['lines'].append(
02382                             [theplot.subplots[0]['axes'].lines[i]]
02383                             )
02384                     theplot.release()
02385                     ### Ugly part for legend
02386                     theplot.subplots[0]['lines']=[]
02387                 res = self._get_verify_action("Accept smoothing?",action)
02388                 #print "IF%d, POL%d: got result = %s" %(s.getif(r),s.getpol(r),res)
02389                 if r == 0: action = None
02390                 #res = raw_input("Accept smoothing ([y]/n): ")
02391                 if res.upper() == 'N':
02392                     # reject for the current rows
02393                     s._setspectrum(yorg, r)
02394                 elif res.upper() == 'R':
02395                     # reject all the following rows
02396                     action = "N"
02397                     s._setspectrum(yorg, r)
02398                 elif res.upper() == 'A':
02399                     # accept all the following rows
02400                     break
02401             theplot.quit()
02402             del theplot
02403             del orgscan
02404 
02405         if insitu: self._assign(s)
02406         else: return s
02407 
02408     @asaplog_post_dec
02409     def regrid_channel(self, width=5, plot=False, insitu=None):
02410         """\
02411         Regrid the spectra by the specified channel width
02412 
02413         Parameters:
02414 
02415             width:      The channel width (float) of regridded spectra
02416                         in the current spectral unit.
02417 
02418             plot:       [NOT IMPLEMENTED YET]
02419                         plot the original and the regridded spectra.
02420                         In this each indivual fit has to be approved, by
02421                         typing 'y' or 'n'
02422 
02423             insitu:     if False a new scantable is returned.
02424                         Otherwise, the scaling is done in-situ
02425                         The default is taken from .asaprc (False)
02426 
02427         """
02428         if insitu is None: insitu = rcParams['insitu']
02429         varlist = vars()
02430 
02431         if plot:
02432            asaplog.post()
02433            asaplog.push("Verification plot is not implemtnetd yet.")
02434            asaplog.post("WARN")
02435 
02436         s = self.copy()
02437         s._regrid_specchan(width)
02438 
02439         s._add_history("regrid_channel", varlist)
02440 
02441 #         if plot:
02442 #             from asap.asapplotter import new_asaplot
02443 #             theplot = new_asaplot(rcParams['plotter.gui'])
02444 #             from matplotlib import rc as rcp
02445 #             rcp('lines', linewidth=1)
02446 #             theplot.set_panels()
02447 #             ylab=s._get_ordinate_label()
02448 #             #theplot.palette(0,["#777777","red"])
02449 #             for r in xrange(s.nrow()):
02450 #                 xsm=s._getabcissa(r)
02451 #                 ysm=s._getspectrum(r)
02452 #                 xorg=orgscan._getabcissa(r)
02453 #                 yorg=orgscan._getspectrum(r)
02454 #                 theplot.clear()
02455 #                 theplot.hold()
02456 #                 theplot.set_axes('ylabel',ylab)
02457 #                 theplot.set_axes('xlabel',s._getabcissalabel(r))
02458 #                 theplot.set_axes('title',s._getsourcename(r))
02459 #                 theplot.set_line(label='Original',color="#777777")
02460 #                 theplot.plot(xorg,yorg)
02461 #                 theplot.set_line(label='Smoothed',color="red")
02462 #                 theplot.plot(xsm,ysm)
02463 #                 ### Ugly part for legend
02464 #                 for i in [0,1]:
02465 #                     theplot.subplots[0]['lines'].append(
02466 #                         [theplot.subplots[0]['axes'].lines[i]]
02467 #                         )
02468 #                 theplot.release()
02469 #                 ### Ugly part for legend
02470 #                 theplot.subplots[0]['lines']=[]
02471 #                 res = raw_input("Accept smoothing ([y]/n): ")
02472 #                 if res.upper() == 'N':
02473 #                     s._setspectrum(yorg, r)
02474 #             theplot.quit()
02475 #             del theplot
02476 #             del orgscan
02477 
02478         if insitu: self._assign(s)
02479         else: return s
02480 
02481     @asaplog_post_dec
02482     def _parse_wn(self, wn):
02483         if isinstance(wn, list) or isinstance(wn, tuple):
02484             return wn
02485         elif isinstance(wn, int):
02486             return [ wn ]
02487         elif isinstance(wn, str):
02488             if '-' in wn:                            # case 'a-b' : return [a,a+1,...,b-1,b]
02489                 val = wn.split('-')
02490                 val = [int(val[0]), int(val[1])]
02491                 val.sort()
02492                 res = [i for i in xrange(val[0], val[1]+1)]
02493             elif wn[:2] == '<=' or wn[:2] == '=<':   # cases '<=a','=<a' : return [0,1,...,a-1,a]
02494                 val = int(wn[2:])+1
02495                 res = [i for i in xrange(val)]
02496             elif wn[-2:] == '>=' or wn[-2:] == '=>': # cases 'a>=','a=>' : return [0,1,...,a-1,a]
02497                 val = int(wn[:-2])+1
02498                 res = [i for i in xrange(val)]
02499             elif wn[0] == '<':                       # case '<a' :         return [0,1,...,a-2,a-1]
02500                 val = int(wn[1:])
02501                 res = [i for i in xrange(val)]
02502             elif wn[-1] == '>':                      # case 'a>' :         return [0,1,...,a-2,a-1]
02503                 val = int(wn[:-1])
02504                 res = [i for i in xrange(val)]
02505             elif wn[:2] == '>=' or wn[:2] == '=>':   # cases '>=a','=>a' : return [a,-999], which is
02506                                                      #                     then interpreted in C++
02507                                                      #                     side as [a,a+1,...,a_nyq]
02508                                                      #                     (CAS-3759)
02509                 val = int(wn[2:])
02510                 res = [val, -999]
02511                 #res = [i for i in xrange(val, self.nchan()/2+1)]
02512             elif wn[-2:] == '<=' or wn[-2:] == '=<': # cases 'a<=','a=<' : return [a,-999], which is
02513                                                      #                     then interpreted in C++
02514                                                      #                     side as [a,a+1,...,a_nyq]
02515                                                      #                     (CAS-3759)
02516                 val = int(wn[:-2])
02517                 res = [val, -999]
02518                 #res = [i for i in xrange(val, self.nchan()/2+1)]
02519             elif wn[0] == '>':                       # case '>a' :         return [a+1,-999], which is
02520                                                      #                     then interpreted in C++
02521                                                      #                     side as [a+1,a+2,...,a_nyq]
02522                                                      #                     (CAS-3759)
02523                 val = int(wn[1:])+1
02524                 res = [val, -999]
02525                 #res = [i for i in xrange(val, self.nchan()/2+1)]
02526             elif wn[-1] == '<':                      # case 'a<' :         return [a+1,-999], which is
02527                                                      #                     then interpreted in C++
02528                                                      #                     side as [a+1,a+2,...,a_nyq]
02529                                                      #                     (CAS-3759)
02530                 val = int(wn[:-1])+1
02531                 res = [val, -999]
02532                 #res = [i for i in xrange(val, self.nchan()/2+1)]
02533 
02534             return res
02535         else:
02536             msg = 'wrong value given for addwn/rejwn'
02537             raise RuntimeError(msg)
02538 
02539 
02540     @asaplog_post_dec
02541     def sinusoid_baseline(self, insitu=None, mask=None, applyfft=None, 
02542                           fftmethod=None, fftthresh=None,
02543                           addwn=None, rejwn=None, clipthresh=None,
02544                           clipniter=None, plot=None,
02545                           getresidual=None, showprogress=None, 
02546                           minnrow=None, outlog=None, blfile=None, csvformat=None):
02547         """\
02548         Return a scan which has been baselined (all rows) with sinusoidal 
02549         functions.
02550 
02551         Parameters:
02552             insitu:        if False a new scantable is returned.
02553                            Otherwise, the scaling is done in-situ
02554                            The default is taken from .asaprc (False)
02555             mask:          an optional mask
02556             applyfft:      if True use some method, such as FFT, to find
02557                            strongest sinusoidal components in the wavenumber
02558                            domain to be used for baseline fitting.
02559                            default is True.
02560             fftmethod:     method to find the strong sinusoidal components.
02561                            now only 'fft' is available and it is the default.
02562             fftthresh:     the threshold to select wave numbers to be used for
02563                            fitting from the distribution of amplitudes in the
02564                            wavenumber domain. 
02565                            both float and string values accepted. 
02566                            given a float value, the unit is set to sigma.
02567                            for string values, allowed formats include:
02568                                'xsigma' or 'x' (= x-sigma level. e.g., 
02569                                '3sigma'), or
02570                                'topx' (= the x strongest ones, e.g. 'top5'). 
02571                            default is 3.0 (unit: sigma). 
02572             addwn:         the additional wave numbers to be used for fitting.
02573                            list or integer value is accepted to specify every
02574                            wave numbers. also string value can be used in case
02575                            you need to specify wave numbers in a certain range, 
02576                            e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
02577                                  '<a'  (= 0,1,...,a-2,a-1),
02578                                  '>=a' (= a, a+1, ... up to the maximum wave
02579                                         number corresponding to the Nyquist
02580                                         frequency for the case of FFT).
02581                            default is [0]. 
02582             rejwn:         the wave numbers NOT to be used for fitting. 
02583                            can be set just as addwn but has higher priority: 
02584                            wave numbers which are specified both in addwn
02585                            and rejwn will NOT be used. default is []. 
02586             clipthresh:    Clipping threshold. (default is 3.0, unit: sigma)
02587             clipniter:     maximum number of iteration of 'clipthresh'-sigma 
02588                            clipping (default is 0)
02589             plot:      *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
02590                            plot the fit and the residual. In this each
02591                            indivual fit has to be approved, by typing 'y'
02592                            or 'n'
02593             getresidual:   if False, returns best-fit values instead of
02594                            residual. (default is True)
02595             showprogress:  show progress status for large data.
02596                            default is True.
02597             minnrow:       minimum number of input spectra to show.
02598                            default is 1000.
02599             outlog:        Output the coefficients of the best-fit
02600                            function to logger (default is False)
02601             blfile:        Name of a text file in which the best-fit
02602                            parameter values to be written
02603                            (default is '': no file/logger output)
02604             csvformat:     if True blfile is csv-formatted, default is False.
02605 
02606         Example:
02607             # return a scan baselined by a combination of sinusoidal curves 
02608             # having wave numbers in spectral window up to 10, 
02609             # also with 3-sigma clipping, iteration up to 4 times
02610             bscan = scan.sinusoid_baseline(addwn='<=10',clipthresh=3.0,clipniter=4)
02611 
02612         Note:
02613             The best-fit parameter values output in logger and/or blfile are now
02614             based on specunit of 'channel'. 
02615         """
02616         
02617         try:
02618             varlist = vars()
02619         
02620             if insitu is None: insitu = rcParams['insitu']
02621             if insitu:
02622                 workscan = self
02623             else:
02624                 workscan = self.copy()
02625             
02626             #if mask          is None: mask          = [True for i in xrange(workscan.nchan())]
02627             if mask          is None: mask          = []
02628             if applyfft      is None: applyfft      = True
02629             if fftmethod     is None: fftmethod     = 'fft'
02630             if fftthresh     is None: fftthresh     = 3.0
02631             if addwn         is None: addwn         = [0]
02632             if rejwn         is None: rejwn         = []
02633             if clipthresh    is None: clipthresh    = 3.0
02634             if clipniter     is None: clipniter     = 0
02635             if plot          is None: plot          = False
02636             if getresidual   is None: getresidual   = True
02637             if showprogress  is None: showprogress  = True
02638             if minnrow       is None: minnrow       = 1000
02639             if outlog        is None: outlog        = False
02640             if blfile        is None: blfile        = ''
02641             if csvformat     is None: csvformat     = False
02642 
02643             if csvformat:
02644                 scsvformat = "T"
02645             else:
02646                 scsvformat = "F"
02647 
02648             #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method. 
02649             workscan._sinusoid_baseline(mask, applyfft, fftmethod.lower(), 
02650                                         str(fftthresh).lower(), 
02651                                         workscan._parse_wn(addwn), 
02652                                         workscan._parse_wn(rejwn),
02653                                         clipthresh, clipniter,
02654                                         getresidual, 
02655                                         pack_progress_params(showprogress, 
02656                                                              minnrow),
02657                                         outlog, scsvformat+blfile)
02658             workscan._add_history('sinusoid_baseline', varlist)
02659             
02660             if insitu:
02661                 self._assign(workscan)
02662             else:
02663                 return workscan
02664             
02665         except RuntimeError, e:
02666             raise_fitting_failure_exception(e)
02667 
02668 
02669     @asaplog_post_dec
02670     def auto_sinusoid_baseline(self, insitu=None, mask=None, applyfft=None, 
02671                                fftmethod=None, fftthresh=None,
02672                                addwn=None, rejwn=None, clipthresh=None,
02673                                clipniter=None, edge=None, threshold=None,
02674                                chan_avg_limit=None, plot=None,
02675                                getresidual=None, showprogress=None, 
02676                                minnrow=None, outlog=None, blfile=None, csvformat=None):
02677         """\
02678         Return a scan which has been baselined (all rows) with sinusoidal 
02679         functions.
02680         Spectral lines are detected first using linefinder and masked out
02681         to avoid them affecting the baseline solution.
02682 
02683         Parameters:
02684             insitu:         if False a new scantable is returned.
02685                             Otherwise, the scaling is done in-situ
02686                             The default is taken from .asaprc (False)
02687             mask:           an optional mask retreived from scantable
02688             applyfft:       if True use some method, such as FFT, to find
02689                             strongest sinusoidal components in the wavenumber
02690                             domain to be used for baseline fitting.
02691                             default is True.
02692             fftmethod:      method to find the strong sinusoidal components.
02693                             now only 'fft' is available and it is the default.
02694             fftthresh:      the threshold to select wave numbers to be used for
02695                             fitting from the distribution of amplitudes in the
02696                             wavenumber domain. 
02697                             both float and string values accepted. 
02698                             given a float value, the unit is set to sigma.
02699                             for string values, allowed formats include:
02700                                 'xsigma' or 'x' (= x-sigma level. e.g., 
02701                                 '3sigma'), or
02702                                 'topx' (= the x strongest ones, e.g. 'top5'). 
02703                             default is 3.0 (unit: sigma). 
02704             addwn:          the additional wave numbers to be used for fitting.
02705                             list or integer value is accepted to specify every
02706                             wave numbers. also string value can be used in case
02707                             you need to specify wave numbers in a certain range, 
02708                             e.g., 'a-b' (= a, a+1, a+2, ..., b-1, b),
02709                                   '<a'  (= 0,1,...,a-2,a-1),
02710                                   '>=a' (= a, a+1, ... up to the maximum wave
02711                                          number corresponding to the Nyquist
02712                                          frequency for the case of FFT).
02713                             default is [0]. 
02714             rejwn:          the wave numbers NOT to be used for fitting. 
02715                             can be set just as addwn but has higher priority: 
02716                             wave numbers which are specified both in addwn
02717                             and rejwn will NOT be used. default is []. 
02718             clipthresh:     Clipping threshold. (default is 3.0, unit: sigma)
02719             clipniter:      maximum number of iteration of 'clipthresh'-sigma 
02720                             clipping (default is 0)
02721             edge:           an optional number of channel to drop at
02722                             the edge of spectrum. If only one value is
02723                             specified, the same number will be dropped
02724                             from both sides of the spectrum. Default
02725                             is to keep all channels. Nested tuples
02726                             represent individual edge selection for
02727                             different IFs (a number of spectral channels
02728                             can be different)
02729             threshold:      the threshold used by line finder. It is
02730                             better to keep it large as only strong lines
02731                             affect the baseline solution.
02732             chan_avg_limit: a maximum number of consequtive spectral
02733                             channels to average during the search of
02734                             weak and broad lines. The default is no
02735                             averaging (and no search for weak lines).
02736                             If such lines can affect the fitted baseline
02737                             (e.g. a high order polynomial is fitted),
02738                             increase this parameter (usually values up
02739                             to 8 are reasonable). Most users of this
02740                             method should find the default value sufficient.
02741             plot:       *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
02742                             plot the fit and the residual. In this each
02743                             indivual fit has to be approved, by typing 'y'
02744                             or 'n'
02745             getresidual:    if False, returns best-fit values instead of
02746                             residual. (default is True)
02747             showprogress:   show progress status for large data.
02748                             default is True.
02749             minnrow:        minimum number of input spectra to show.
02750                             default is 1000.
02751             outlog:         Output the coefficients of the best-fit
02752                             function to logger (default is False)
02753             blfile:         Name of a text file in which the best-fit
02754                             parameter values to be written
02755                             (default is "": no file/logger output)
02756             csvformat:      if True blfile is csv-formatted, default is False.
02757 
02758         Example:
02759             bscan = scan.auto_sinusoid_baseline(addwn='<=10', insitu=False)
02760         
02761         Note:
02762             The best-fit parameter values output in logger and/or blfile are now
02763             based on specunit of 'channel'. 
02764         """
02765 
02766         try:
02767             varlist = vars()
02768 
02769             if insitu is None: insitu = rcParams['insitu']
02770             if insitu:
02771                 workscan = self
02772             else:
02773                 workscan = self.copy()
02774             
02775             #if mask           is None: mask           = [True for i in xrange(workscan.nchan())]
02776             if mask           is None: mask           = []
02777             if applyfft       is None: applyfft       = True
02778             if fftmethod      is None: fftmethod      = 'fft'
02779             if fftthresh      is None: fftthresh      = 3.0
02780             if addwn          is None: addwn          = [0]
02781             if rejwn          is None: rejwn          = []
02782             if clipthresh     is None: clipthresh     = 3.0
02783             if clipniter      is None: clipniter      = 0
02784             if edge           is None: edge           = (0,0)
02785             if threshold      is None: threshold      = 3
02786             if chan_avg_limit is None: chan_avg_limit = 1
02787             if plot           is None: plot           = False
02788             if getresidual    is None: getresidual    = True
02789             if showprogress   is None: showprogress   = True
02790             if minnrow        is None: minnrow        = 1000
02791             if outlog         is None: outlog         = False
02792             if blfile         is None: blfile         = ''
02793             if csvformat      is None: csvformat      = False
02794 
02795             if csvformat:
02796                 scsvformat = "T"
02797             else:
02798                 scsvformat = "F"
02799 
02800             #CURRENTLY, PLOT=true is UNAVAILABLE UNTIL sinusoidal fitting is implemented as a fitter method. 
02801             workscan._auto_sinusoid_baseline(mask, applyfft, 
02802                                              fftmethod.lower(), 
02803                                              str(fftthresh).lower(), 
02804                                              workscan._parse_wn(addwn), 
02805                                              workscan._parse_wn(rejwn), 
02806                                              clipthresh, clipniter, 
02807                                              normalise_edge_param(edge), 
02808                                              threshold, chan_avg_limit, 
02809                                              getresidual, 
02810                                              pack_progress_params(showprogress,
02811                                                                   minnrow), 
02812                                              outlog, scsvformat+blfile)
02813             workscan._add_history("auto_sinusoid_baseline", varlist)
02814             
02815             if insitu:
02816                 self._assign(workscan)
02817             else:
02818                 return workscan
02819             
02820         except RuntimeError, e:
02821             raise_fitting_failure_exception(e)
02822 
02823     @asaplog_post_dec
02824     def cspline_baseline(self, insitu=None, mask=None, npiece=None, 
02825                          clipthresh=None, clipniter=None, plot=None, 
02826                          getresidual=None, showprogress=None, minnrow=None, 
02827                          outlog=None, blfile=None, csvformat=None):
02828         """\
02829         Return a scan which has been baselined (all rows) by cubic spline 
02830         function (piecewise cubic polynomial).
02831 
02832         Parameters:
02833             insitu:       If False a new scantable is returned.
02834                           Otherwise, the scaling is done in-situ
02835                           The default is taken from .asaprc (False)
02836             mask:         An optional mask
02837             npiece:       Number of pieces. (default is 2)
02838             clipthresh:   Clipping threshold. (default is 3.0, unit: sigma)
02839             clipniter:    maximum number of iteration of 'clipthresh'-sigma 
02840                           clipping (default is 0)
02841             plot:     *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
02842                           plot the fit and the residual. In this each
02843                           indivual fit has to be approved, by typing 'y'
02844                           or 'n'
02845             getresidual:  if False, returns best-fit values instead of
02846                           residual. (default is True)
02847             showprogress: show progress status for large data.
02848                           default is True.
02849             minnrow:      minimum number of input spectra to show.
02850                           default is 1000.
02851             outlog:       Output the coefficients of the best-fit
02852                           function to logger (default is False)
02853             blfile:       Name of a text file in which the best-fit
02854                           parameter values to be written
02855                           (default is "": no file/logger output)
02856             csvformat:    if True blfile is csv-formatted, default is False.
02857 
02858         Example:
02859             # return a scan baselined by a cubic spline consisting of 2 pieces 
02860             # (i.e., 1 internal knot),
02861             # also with 3-sigma clipping, iteration up to 4 times
02862             bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
02863         
02864         Note:
02865             The best-fit parameter values output in logger and/or blfile are now
02866             based on specunit of 'channel'. 
02867         """
02868         
02869         try:
02870             varlist = vars()
02871             
02872             if insitu is None: insitu = rcParams['insitu']
02873             if insitu:
02874                 workscan = self
02875             else:
02876                 workscan = self.copy()
02877 
02878             #if mask         is None: mask         = [True for i in xrange(workscan.nchan())]
02879             if mask         is None: mask         = []
02880             if npiece       is None: npiece       = 2
02881             if clipthresh   is None: clipthresh   = 3.0
02882             if clipniter    is None: clipniter    = 0
02883             if plot         is None: plot         = False
02884             if getresidual  is None: getresidual  = True
02885             if showprogress is None: showprogress = True
02886             if minnrow      is None: minnrow      = 1000
02887             if outlog       is None: outlog       = False
02888             if blfile       is None: blfile       = ''
02889             if csvformat     is None: csvformat     = False
02890 
02891             if csvformat:
02892                 scsvformat = "T"
02893             else:
02894                 scsvformat = "F"
02895 
02896             #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method. 
02897             workscan._cspline_baseline(mask, npiece, clipthresh, clipniter, 
02898                                        getresidual, 
02899                                        pack_progress_params(showprogress, 
02900                                                             minnrow),
02901                                        outlog, scsvformat+blfile)
02902             workscan._add_history("cspline_baseline", varlist)
02903             
02904             if insitu:
02905                 self._assign(workscan)
02906             else:
02907                 return workscan
02908             
02909         except RuntimeError, e:
02910             raise_fitting_failure_exception(e)
02911 
02912     @asaplog_post_dec
02913     def auto_cspline_baseline(self, insitu=None, mask=None, npiece=None, 
02914                               clipthresh=None, clipniter=None,
02915                               edge=None, threshold=None, chan_avg_limit=None, 
02916                               getresidual=None, plot=None,
02917                               showprogress=None, minnrow=None, outlog=None,
02918                               blfile=None, csvformat=None):
02919         """\
02920         Return a scan which has been baselined (all rows) by cubic spline
02921         function (piecewise cubic polynomial).
02922         Spectral lines are detected first using linefinder and masked out
02923         to avoid them affecting the baseline solution.
02924 
02925         Parameters:
02926             insitu:         if False a new scantable is returned.
02927                             Otherwise, the scaling is done in-situ
02928                             The default is taken from .asaprc (False)
02929             mask:           an optional mask retreived from scantable
02930             npiece:         Number of pieces. (default is 2)
02931             clipthresh:     Clipping threshold. (default is 3.0, unit: sigma)
02932             clipniter:      maximum number of iteration of 'clipthresh'-sigma 
02933                             clipping (default is 0)
02934             edge:           an optional number of channel to drop at
02935                             the edge of spectrum. If only one value is
02936                             specified, the same number will be dropped
02937                             from both sides of the spectrum. Default
02938                             is to keep all channels. Nested tuples
02939                             represent individual edge selection for
02940                             different IFs (a number of spectral channels
02941                             can be different)
02942             threshold:      the threshold used by line finder. It is
02943                             better to keep it large as only strong lines
02944                             affect the baseline solution.
02945             chan_avg_limit: a maximum number of consequtive spectral
02946                             channels to average during the search of
02947                             weak and broad lines. The default is no
02948                             averaging (and no search for weak lines).
02949                             If such lines can affect the fitted baseline
02950                             (e.g. a high order polynomial is fitted),
02951                             increase this parameter (usually values up
02952                             to 8 are reasonable). Most users of this
02953                             method should find the default value sufficient.
02954             plot:       *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
02955                             plot the fit and the residual. In this each
02956                             indivual fit has to be approved, by typing 'y'
02957                             or 'n'
02958             getresidual:    if False, returns best-fit values instead of
02959                             residual. (default is True)
02960             showprogress:   show progress status for large data.
02961                             default is True.
02962             minnrow:        minimum number of input spectra to show.
02963                             default is 1000.
02964             outlog:         Output the coefficients of the best-fit
02965                             function to logger (default is False)
02966             blfile:         Name of a text file in which the best-fit
02967                             parameter values to be written
02968                             (default is "": no file/logger output)
02969             csvformat:      if True blfile is csv-formatted, default is False.
02970 
02971         Example:
02972             bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
02973         
02974         Note:
02975             The best-fit parameter values output in logger and/or blfile are now
02976             based on specunit of 'channel'. 
02977         """
02978 
02979         try:
02980             varlist = vars()
02981 
02982             if insitu is None: insitu = rcParams['insitu']
02983             if insitu:
02984                 workscan = self
02985             else:
02986                 workscan = self.copy()
02987             
02988             #if mask           is None: mask           = [True for i in xrange(workscan.nchan())]
02989             if mask           is None: mask           = []
02990             if npiece         is None: npiece         = 2
02991             if clipthresh     is None: clipthresh     = 3.0
02992             if clipniter      is None: clipniter      = 0
02993             if edge           is None: edge           = (0, 0)
02994             if threshold      is None: threshold      = 3
02995             if chan_avg_limit is None: chan_avg_limit = 1
02996             if plot           is None: plot           = False
02997             if getresidual    is None: getresidual    = True
02998             if showprogress   is None: showprogress   = True
02999             if minnrow        is None: minnrow        = 1000
03000             if outlog         is None: outlog         = False
03001             if blfile         is None: blfile         = ''
03002             if csvformat      is None: csvformat      = False
03003 
03004             if csvformat:
03005                 scsvformat = "T"
03006             else:
03007                 scsvformat = "F"
03008 
03009             #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
03010             workscan._auto_cspline_baseline(mask, npiece, clipthresh, 
03011                                             clipniter, 
03012                                             normalise_edge_param(edge), 
03013                                             threshold, 
03014                                             chan_avg_limit, getresidual, 
03015                                             pack_progress_params(showprogress, 
03016                                                                  minnrow), 
03017                                             outlog, scsvformat+blfile)
03018             workscan._add_history("auto_cspline_baseline", varlist)
03019             
03020             if insitu:
03021                 self._assign(workscan)
03022             else:
03023                 return workscan
03024             
03025         except RuntimeError, e:
03026             raise_fitting_failure_exception(e)
03027 
03028     @asaplog_post_dec
03029     def chebyshev_baseline(self, insitu=None, mask=None, order=None, 
03030                            clipthresh=None, clipniter=None, plot=None, 
03031                            getresidual=None, showprogress=None, minnrow=None, 
03032                            outlog=None, blfile=None, csvformat=None):
03033         """\
03034         Return a scan which has been baselined (all rows) by Chebyshev polynomials.
03035 
03036         Parameters:
03037             insitu:       If False a new scantable is returned.
03038                           Otherwise, the scaling is done in-situ
03039                           The default is taken from .asaprc (False)
03040             mask:         An optional mask
03041             order:        the maximum order of Chebyshev polynomial (default is 5)
03042             clipthresh:   Clipping threshold. (default is 3.0, unit: sigma)
03043             clipniter:    maximum number of iteration of 'clipthresh'-sigma 
03044                           clipping (default is 0)
03045             plot:     *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
03046                           plot the fit and the residual. In this each
03047                           indivual fit has to be approved, by typing 'y'
03048                           or 'n'
03049             getresidual:  if False, returns best-fit values instead of
03050                           residual. (default is True)
03051             showprogress: show progress status for large data.
03052                           default is True.
03053             minnrow:      minimum number of input spectra to show.
03054                           default is 1000.
03055             outlog:       Output the coefficients of the best-fit
03056                           function to logger (default is False)
03057             blfile:       Name of a text file in which the best-fit
03058                           parameter values to be written
03059                           (default is "": no file/logger output)
03060             csvformat:    if True blfile is csv-formatted, default is False.
03061 
03062         Example:
03063             # return a scan baselined by a cubic spline consisting of 2 pieces 
03064             # (i.e., 1 internal knot),
03065             # also with 3-sigma clipping, iteration up to 4 times
03066             bscan = scan.cspline_baseline(npiece=2,clipthresh=3.0,clipniter=4)
03067         
03068         Note:
03069             The best-fit parameter values output in logger and/or blfile are now
03070             based on specunit of 'channel'. 
03071         """
03072         
03073         try:
03074             varlist = vars()
03075             
03076             if insitu is None: insitu = rcParams['insitu']
03077             if insitu:
03078                 workscan = self
03079             else:
03080                 workscan = self.copy()
03081 
03082             #if mask         is None: mask         = [True for i in xrange(workscan.nchan())]
03083             if mask         is None: mask         = []
03084             if order        is None: order        = 5
03085             if clipthresh   is None: clipthresh   = 3.0
03086             if clipniter    is None: clipniter    = 0
03087             if plot         is None: plot         = False
03088             if getresidual  is None: getresidual  = True
03089             if showprogress is None: showprogress = True
03090             if minnrow      is None: minnrow      = 1000
03091             if outlog       is None: outlog       = False
03092             if blfile       is None: blfile       = ''
03093             if csvformat     is None: csvformat     = False
03094 
03095             if csvformat:
03096                 scsvformat = "T"
03097             else:
03098                 scsvformat = "F"
03099 
03100             #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method. 
03101             workscan._chebyshev_baseline(mask, order, clipthresh, clipniter, 
03102                                          getresidual, 
03103                                          pack_progress_params(showprogress, 
03104                                                               minnrow),
03105                                          outlog, scsvformat+blfile)
03106             workscan._add_history("chebyshev_baseline", varlist)
03107             
03108             if insitu:
03109                 self._assign(workscan)
03110             else:
03111                 return workscan
03112             
03113         except RuntimeError, e:
03114             raise_fitting_failure_exception(e)
03115 
03116     @asaplog_post_dec
03117     def auto_chebyshev_baseline(self, insitu=None, mask=None, order=None, 
03118                               clipthresh=None, clipniter=None,
03119                               edge=None, threshold=None, chan_avg_limit=None, 
03120                               getresidual=None, plot=None,
03121                               showprogress=None, minnrow=None, outlog=None,
03122                               blfile=None, csvformat=None):
03123         """\
03124         Return a scan which has been baselined (all rows) by Chebyshev polynomials. 
03125         Spectral lines are detected first using linefinder and masked out
03126         to avoid them affecting the baseline solution.
03127 
03128         Parameters:
03129             insitu:         if False a new scantable is returned.
03130                             Otherwise, the scaling is done in-situ
03131                             The default is taken from .asaprc (False)
03132             mask:           an optional mask retreived from scantable
03133             order:          the maximum order of Chebyshev polynomial (default is 5)
03134             clipthresh:     Clipping threshold. (default is 3.0, unit: sigma)
03135             clipniter:      maximum number of iteration of 'clipthresh'-sigma 
03136                             clipping (default is 0)
03137             edge:           an optional number of channel to drop at
03138                             the edge of spectrum. If only one value is
03139                             specified, the same number will be dropped
03140                             from both sides of the spectrum. Default
03141                             is to keep all channels. Nested tuples
03142                             represent individual edge selection for
03143                             different IFs (a number of spectral channels
03144                             can be different)
03145             threshold:      the threshold used by line finder. It is
03146                             better to keep it large as only strong lines
03147                             affect the baseline solution.
03148             chan_avg_limit: a maximum number of consequtive spectral
03149                             channels to average during the search of
03150                             weak and broad lines. The default is no
03151                             averaging (and no search for weak lines).
03152                             If such lines can affect the fitted baseline
03153                             (e.g. a high order polynomial is fitted),
03154                             increase this parameter (usually values up
03155                             to 8 are reasonable). Most users of this
03156                             method should find the default value sufficient.
03157             plot:       *** CURRENTLY UNAVAILABLE, ALWAYS FALSE ***
03158                             plot the fit and the residual. In this each
03159                             indivual fit has to be approved, by typing 'y'
03160                             or 'n'
03161             getresidual:    if False, returns best-fit values instead of
03162                             residual. (default is True)
03163             showprogress:   show progress status for large data.
03164                             default is True.
03165             minnrow:        minimum number of input spectra to show.
03166                             default is 1000.
03167             outlog:         Output the coefficients of the best-fit
03168                             function to logger (default is False)
03169             blfile:         Name of a text file in which the best-fit
03170                             parameter values to be written
03171                             (default is "": no file/logger output)
03172             csvformat:      if True blfile is csv-formatted, default is False.
03173 
03174         Example:
03175             bscan = scan.auto_cspline_baseline(npiece=3, insitu=False)
03176         
03177         Note:
03178             The best-fit parameter values output in logger and/or blfile are now
03179             based on specunit of 'channel'. 
03180         """
03181 
03182         try:
03183             varlist = vars()
03184 
03185             if insitu is None: insitu = rcParams['insitu']
03186             if insitu:
03187                 workscan = self
03188             else:
03189                 workscan = self.copy()
03190             
03191             #if mask           is None: mask           = [True for i in xrange(workscan.nchan())]
03192             if mask           is None: mask           = []
03193             if order          is None: order          = 5
03194             if clipthresh     is None: clipthresh     = 3.0
03195             if clipniter      is None: clipniter      = 0
03196             if edge           is None: edge           = (0, 0)
03197             if threshold      is None: threshold      = 3
03198             if chan_avg_limit is None: chan_avg_limit = 1
03199             if plot           is None: plot           = False
03200             if getresidual    is None: getresidual    = True
03201             if showprogress   is None: showprogress   = True
03202             if minnrow        is None: minnrow        = 1000
03203             if outlog         is None: outlog         = False
03204             if blfile         is None: blfile         = ''
03205             if csvformat      is None: csvformat      = False
03206 
03207             if csvformat:
03208                 scsvformat = "T"
03209             else:
03210                 scsvformat = "F"
03211 
03212             #CURRENTLY, PLOT=true UNAVAILABLE UNTIL cubic spline fitting is implemented as a fitter method.
03213             workscan._auto_chebyshev_baseline(mask, order, clipthresh, 
03214                                               clipniter, 
03215                                               normalise_edge_param(edge), 
03216                                               threshold, 
03217                                               chan_avg_limit, getresidual, 
03218                                               pack_progress_params(showprogress, 
03219                                                                    minnrow), 
03220                                               outlog, scsvformat+blfile)
03221             workscan._add_history("auto_chebyshev_baseline", varlist)
03222             
03223             if insitu:
03224                 self._assign(workscan)
03225             else:
03226                 return workscan
03227             
03228         except RuntimeError, e:
03229             raise_fitting_failure_exception(e)
03230 
03231     @asaplog_post_dec
03232     def poly_baseline(self, mask=None, order=None, insitu=None, plot=None, 
03233                       getresidual=None, showprogress=None, minnrow=None, 
03234                       outlog=None, blfile=None, csvformat=None):
03235         """\
03236         Return a scan which has been baselined (all rows) by a polynomial.
03237         Parameters:
03238             insitu:       if False a new scantable is returned.
03239                           Otherwise, the scaling is done in-situ
03240                           The default is taken from .asaprc (False)
03241             mask:         an optional mask
03242             order:        the order of the polynomial (default is 0)
03243             plot:         plot the fit and the residual. In this each
03244                           indivual fit has to be approved, by typing 'y'
03245                           or 'n'
03246             getresidual:  if False, returns best-fit values instead of
03247                           residual. (default is True)
03248             showprogress: show progress status for large data.
03249                           default is True.
03250             minnrow:      minimum number of input spectra to show.
03251                           default is 1000.
03252             outlog:       Output the coefficients of the best-fit
03253                           function to logger (default is False)
03254             blfile:       Name of a text file in which the best-fit
03255                           parameter values to be written
03256                           (default is "": no file/logger output)
03257             csvformat:    if True blfile is csv-formatted, default is False.
03258 
03259         Example:
03260             # return a scan baselined by a third order polynomial,
03261             # not using a mask
03262             bscan = scan.poly_baseline(order=3)
03263         """
03264         
03265         try:
03266             varlist = vars()
03267         
03268             if insitu is None: 
03269                 insitu = rcParams["insitu"]
03270             if insitu:
03271                 workscan = self
03272             else:
03273                 workscan = self.copy()
03274 
03275             #if mask         is None: mask         = [True for i in \
03276             #                                           xrange(workscan.nchan())]
03277             if mask         is None: mask         = []
03278             if order        is None: order        = 0
03279             if plot         is None: plot         = False
03280             if getresidual  is None: getresidual  = True
03281             if showprogress is None: showprogress = True
03282             if minnrow      is None: minnrow      = 1000
03283             if outlog       is None: outlog       = False
03284             if blfile       is None: blfile       = ""
03285             if csvformat    is None: csvformat    = False
03286 
03287             if csvformat:
03288                 scsvformat = "T"
03289             else:
03290                 scsvformat = "F"
03291 
03292             if plot:
03293                 outblfile = (blfile != "") and \
03294                     os.path.exists(os.path.expanduser(
03295                                     os.path.expandvars(blfile))
03296                                    )
03297                 if outblfile: 
03298                     blf = open(blfile, "a")
03299                 
03300                 f = fitter()
03301                 f.set_function(lpoly=order)
03302                 
03303                 rows = xrange(workscan.nrow())
03304                 #if len(rows) > 0: workscan._init_blinfo()
03305 
03306                 action = "H"
03307                 for r in rows:
03308                     f.x = workscan._getabcissa(r)
03309                     f.y = workscan._getspectrum(r)
03310                     if mask:
03311                         f.mask = mask_and(mask, workscan._getmask(r))    # (CAS-1434)
03312                     else: # mask=None
03313                         f.mask = workscan._getmask(r)
03314                     
03315                     f.data = None
03316                     f.fit()
03317 
03318                     if action != "Y": # skip plotting when accepting all
03319                         f.plot(residual=True)
03320                     #accept_fit = raw_input("Accept fit ( [y]/n ): ")
03321                     #if accept_fit.upper() == "N":
03322                     #    #workscan._append_blinfo(None, None, None)
03323                     #    continue
03324                     accept_fit = self._get_verify_action("Accept fit?",action)
03325                     if r == 0: action = None
03326                     if accept_fit.upper() == "N":
03327                         continue
03328                     elif accept_fit.upper() == "R":
03329                         break
03330                     elif accept_fit.upper() == "A":
03331                         action = "Y"
03332                     
03333                     blpars = f.get_parameters()
03334                     masklist = workscan.get_masklist(f.mask, row=r, silent=True)
03335                     #workscan._append_blinfo(blpars, masklist, f.mask)
03336                     workscan._setspectrum((f.fitter.getresidual() 
03337                                            if getresidual else f.fitter.getfit()), r)
03338                     
03339                     if outblfile:
03340                         rms = workscan.get_rms(f.mask, r)
03341                         dataout = \
03342                             workscan.format_blparams_row(blpars["params"], 
03343                                                          blpars["fixed"], 
03344                                                          rms, str(masklist), 
03345                                                          r, True, csvformat)
03346                         blf.write(dataout)
03347 
03348                 f._p.unmap()
03349                 f._p = None
03350 
03351                 if outblfile: 
03352                     blf.close()
03353             else:
03354                 workscan._poly_baseline(mask, order, getresidual, 
03355                                         pack_progress_params(showprogress, 
03356                                                              minnrow), 
03357                                         outlog, scsvformat+blfile)
03358             
03359             workscan._add_history("poly_baseline", varlist)
03360             
03361             if insitu:
03362                 self._assign(workscan)
03363             else:
03364                 return workscan
03365             
03366         except RuntimeError, e:
03367             raise_fitting_failure_exception(e)
03368 
03369     @asaplog_post_dec
03370     def auto_poly_baseline(self, mask=None, order=None, edge=None, 
03371                            threshold=None, chan_avg_limit=None,
03372                            plot=None, insitu=None,
03373                            getresidual=None, showprogress=None, 
03374                            minnrow=None, outlog=None, blfile=None, csvformat=None):
03375         """\
03376         Return a scan which has been baselined (all rows) by a polynomial.
03377         Spectral lines are detected first using linefinder and masked out
03378         to avoid them affecting the baseline solution.
03379 
03380         Parameters:
03381             insitu:         if False a new scantable is returned.
03382                             Otherwise, the scaling is done in-situ
03383                             The default is taken from .asaprc (False)
03384             mask:           an optional mask retreived from scantable
03385             order:          the order of the polynomial (default is 0)
03386             edge:           an optional number of channel to drop at
03387                             the edge of spectrum. If only one value is
03388                             specified, the same number will be dropped
03389                             from both sides of the spectrum. Default
03390                             is to keep all channels. Nested tuples
03391                             represent individual edge selection for
03392                             different IFs (a number of spectral channels
03393                             can be different)
03394             threshold:      the threshold used by line finder. It is
03395                             better to keep it large as only strong lines
03396                             affect the baseline solution.
03397             chan_avg_limit: a maximum number of consequtive spectral
03398                             channels to average during the search of
03399                             weak and broad lines. The default is no
03400                             averaging (and no search for weak lines).
03401                             If such lines can affect the fitted baseline
03402                             (e.g. a high order polynomial is fitted),
03403                             increase this parameter (usually values up
03404                             to 8 are reasonable). Most users of this
03405                             method should find the default value sufficient.
03406             plot:           plot the fit and the residual. In this each
03407                             indivual fit has to be approved, by typing 'y'
03408                             or 'n'
03409             getresidual:    if False, returns best-fit values instead of
03410                             residual. (default is True)
03411             showprogress:   show progress status for large data.
03412                             default is True.
03413             minnrow:        minimum number of input spectra to show.
03414                             default is 1000.
03415             outlog:         Output the coefficients of the best-fit
03416                             function to logger (default is False)
03417             blfile:         Name of a text file in which the best-fit
03418                             parameter values to be written
03419                             (default is "": no file/logger output)
03420             csvformat:      if True blfile is csv-formatted, default is False.
03421 
03422         Example:
03423             bscan = scan.auto_poly_baseline(order=7, insitu=False)
03424         """
03425 
03426         try:
03427             varlist = vars()
03428 
03429             if insitu is None: 
03430                 insitu = rcParams['insitu']
03431             if insitu:
03432                 workscan = self
03433             else:
03434                 workscan = self.copy()
03435 
03436             #if mask           is None: mask           = [True for i in xrange(workscan.nchan())]
03437             if mask           is None: mask           = []
03438             if order          is None: order          = 0
03439             if edge           is None: edge           = (0, 0)
03440             if threshold      is None: threshold      = 3
03441             if chan_avg_limit is None: chan_avg_limit = 1
03442             if plot           is None: plot           = False
03443             if getresidual    is None: getresidual    = True
03444             if showprogress   is None: showprogress   = True
03445             if minnrow        is None: minnrow        = 1000
03446             if outlog         is None: outlog         = False
03447             if blfile         is None: blfile         = ''
03448             if csvformat      is None: csvformat      = False
03449 
03450             if csvformat:
03451                 scsvformat = "T"
03452             else:
03453                 scsvformat = "F"
03454 
03455             edge = normalise_edge_param(edge)
03456 
03457             if plot:
03458                 outblfile = (blfile != "") and \
03459                     os.path.exists(os.path.expanduser(os.path.expandvars(blfile)))
03460                 if outblfile: blf = open(blfile, "a")
03461                 
03462                 from asap.asaplinefind import linefinder
03463                 fl = linefinder()
03464                 fl.set_options(threshold=threshold, avg_limit=chan_avg_limit)
03465                 fl.set_scan(workscan)
03466                 
03467                 f = fitter()
03468                 f.set_function(lpoly=order)
03469 
03470                 rows = xrange(workscan.nrow())
03471                 #if len(rows) > 0: workscan._init_blinfo()
03472 
03473                 action = "H"
03474                 for r in rows:
03475                     idx = 2*workscan.getif(r)
03476                     if mask:
03477                         msk = mask_and(mask, workscan._getmask(r)) # (CAS-1434)
03478                     else: # mask=None
03479                         msk = workscan._getmask(r)
03480                     fl.find_lines(r, msk, edge[idx:idx+2])  
03481 
03482                     f.x = workscan._getabcissa(r)
03483                     f.y = workscan._getspectrum(r)
03484                     f.mask = fl.get_mask()
03485                     f.data = None
03486                     f.fit()
03487 
03488                     if action != "Y": # skip plotting when accepting all
03489                         f.plot(residual=True)
03490                     #accept_fit = raw_input("Accept fit ( [y]/n ): ")
03491                     accept_fit = self._get_verify_action("Accept fit?",action)
03492                     if r == 0: action = None
03493                     if accept_fit.upper() == "N":
03494                         #workscan._append_blinfo(None, None, None)
03495                         continue
03496                     elif accept_fit.upper() == "R":
03497                         break
03498                     elif accept_fit.upper() == "A":
03499                         action = "Y"
03500 
03501                     blpars = f.get_parameters()
03502                     masklist = workscan.get_masklist(f.mask, row=r, silent=True)
03503                     #workscan._append_blinfo(blpars, masklist, f.mask)
03504                     workscan._setspectrum(
03505                         (f.fitter.getresidual() if getresidual 
03506                                                 else f.fitter.getfit()), r
03507                         )
03508 
03509                     if outblfile:
03510                         rms = workscan.get_rms(f.mask, r)
03511                         dataout = \
03512                             workscan.format_blparams_row(blpars["params"], 
03513                                                          blpars["fixed"], 
03514                                                          rms, str(masklist), 
03515                                                          r, True, csvformat)
03516                         blf.write(dataout)
03517                     
03518                 f._p.unmap()
03519                 f._p = None
03520 
03521                 if outblfile: blf.close()
03522             else:
03523                 workscan._auto_poly_baseline(mask, order, edge, threshold, 
03524                                              chan_avg_limit, getresidual, 
03525                                              pack_progress_params(showprogress,
03526                                                                   minnrow), 
03527                                              outlog, scsvformat+blfile)
03528 
03529             workscan._add_history("auto_poly_baseline", varlist)
03530             
03531             if insitu:
03532                 self._assign(workscan)
03533             else:
03534                 return workscan
03535             
03536         except RuntimeError, e:
03537             raise_fitting_failure_exception(e)
03538 
03539     def _init_blinfo(self):
03540         """\
03541         Initialise the following three auxiliary members:
03542            blpars : parameters of the best-fit baseline, 
03543            masklists : mask data (edge positions of masked channels) and 
03544            actualmask : mask data (in boolean list), 
03545         to keep for use later (including output to logger/text files). 
03546         Used by poly_baseline() and auto_poly_baseline() in case of
03547         'plot=True'. 
03548         """
03549         self.blpars = []
03550         self.masklists = []
03551         self.actualmask = []
03552         return
03553 
03554     def _append_blinfo(self, data_blpars, data_masklists, data_actualmask):
03555         """\
03556         Append baseline-fitting related info to blpars, masklist and
03557         actualmask. 
03558         """
03559         self.blpars.append(data_blpars)
03560         self.masklists.append(data_masklists)
03561         self.actualmask.append(data_actualmask)
03562         return
03563         
03564     @asaplog_post_dec
03565     def rotate_linpolphase(self, angle):
03566         """\
03567         Rotate the phase of the complex polarization O=Q+iU correlation.
03568         This is always done in situ in the raw data.  So if you call this
03569         function more than once then each call rotates the phase further.
03570 
03571         Parameters:
03572 
03573             angle:   The angle (degrees) to rotate (add) by.
03574 
03575         Example::
03576 
03577             scan.rotate_linpolphase(2.3)
03578 
03579         """
03580         varlist = vars()
03581         self._math._rotate_linpolphase(self, angle)
03582         self._add_history("rotate_linpolphase", varlist)
03583         return
03584 
03585     @asaplog_post_dec
03586     def rotate_xyphase(self, angle):
03587         """\
03588         Rotate the phase of the XY correlation.  This is always done in situ
03589         in the data.  So if you call this function more than once
03590         then each call rotates the phase further.
03591 
03592         Parameters:
03593 
03594             angle:   The angle (degrees) to rotate (add) by.
03595 
03596         Example::
03597 
03598             scan.rotate_xyphase(2.3)
03599 
03600         """
03601         varlist = vars()
03602         self._math._rotate_xyphase(self, angle)
03603         self._add_history("rotate_xyphase", varlist)
03604         return
03605 
03606     @asaplog_post_dec
03607     def swap_linears(self):
03608         """\
03609         Swap the linear polarisations XX and YY, or better the first two
03610         polarisations as this also works for ciculars.
03611         """
03612         varlist = vars()
03613         self._math._swap_linears(self)
03614         self._add_history("swap_linears", varlist)
03615         return
03616 
03617     @asaplog_post_dec
03618     def invert_phase(self):
03619         """\
03620         Invert the phase of the complex polarisation
03621         """
03622         varlist = vars()
03623         self._math._invert_phase(self)
03624         self._add_history("invert_phase", varlist)
03625         return
03626 
03627     @asaplog_post_dec
03628     def add(self, offset, insitu=None):
03629         """\
03630         Return a scan where all spectra have the offset added
03631 
03632         Parameters:
03633 
03634             offset:      the offset
03635 
03636             insitu:      if False a new scantable is returned.
03637                          Otherwise, the scaling is done in-situ
03638                          The default is taken from .asaprc (False)
03639 
03640         """
03641         if insitu is None: insitu = rcParams['insitu']
03642         self._math._setinsitu(insitu)
03643         varlist = vars()
03644         s = scantable(self._math._unaryop(self, offset, "ADD", False))
03645         s._add_history("add", varlist)
03646         if insitu:
03647             self._assign(s)
03648         else:
03649             return s
03650 
03651     @asaplog_post_dec
03652     def scale(self, factor, tsys=True, insitu=None):
03653         """\
03654 
03655         Return a scan where all spectra are scaled by the given 'factor'
03656 
03657         Parameters:
03658 
03659             factor:      the scaling factor (float or 1D float list)
03660 
03661             insitu:      if False a new scantable is returned.
03662                          Otherwise, the scaling is done in-situ
03663                          The default is taken from .asaprc (False)
03664 
03665             tsys:        if True (default) then apply the operation to Tsys
03666                          as well as the data
03667 
03668         """
03669         if insitu is None: insitu = rcParams['insitu']
03670         self._math._setinsitu(insitu)
03671         varlist = vars()
03672         s = None
03673         import numpy
03674         if isinstance(factor, list) or isinstance(factor, numpy.ndarray):
03675             if isinstance(factor[0], list) or isinstance(factor[0], 
03676                                                          numpy.ndarray):
03677                 from asapmath import _array2dOp
03678                 s = _array2dOp( self, factor, "MUL", tsys, insitu )
03679             else:
03680                 s = scantable( self._math._arrayop( self, factor, 
03681                                                     "MUL", tsys ) )
03682         else:
03683             s = scantable(self._math._unaryop(self, factor, "MUL", tsys))
03684         s._add_history("scale", varlist)
03685         if insitu:
03686             self._assign(s)
03687         else:
03688             return s
03689 
03690     @preserve_selection
03691     def set_sourcetype(self, match, matchtype="pattern", 
03692                        sourcetype="reference"):
03693         """\
03694         Set the type of the source to be an source or reference scan
03695         using the provided pattern.
03696 
03697         Parameters:
03698 
03699             match:          a Unix style pattern, regular expression or selector
03700 
03701             matchtype:      'pattern' (default) UNIX style pattern or
03702                             'regex' regular expression
03703 
03704             sourcetype:     the type of the source to use (source/reference)
03705 
03706         """
03707         varlist = vars()
03708         stype = -1
03709         if sourcetype.lower().startswith("r") or sourcetype.lower() == "off":
03710             stype = 1
03711         elif sourcetype.lower().startswith("s") or sourcetype.lower() == "on":
03712             stype = 0
03713         else:
03714             raise ValueError("Illegal sourcetype use s(ource)/on or r(eference)/off")
03715         if matchtype.lower().startswith("p"):
03716             matchtype = "pattern"
03717         elif matchtype.lower().startswith("r"):
03718             matchtype = "regex"
03719         else:
03720             raise ValueError("Illegal matchtype, use p(attern) or r(egex)")
03721         sel = selector()
03722         if isinstance(match, selector):
03723             sel = match
03724         else:
03725             sel.set_query("SRCNAME=%s('%s')" % (matchtype, match))
03726         self.set_selection(sel)
03727         self._setsourcetype(stype)
03728         self._add_history("set_sourcetype", varlist)
03729 
03730     @asaplog_post_dec
03731     @preserve_selection
03732     def auto_quotient(self, preserve=True, mode='paired', verify=False):
03733         """\
03734         This function allows to build quotients automatically.
03735         It assumes the observation to have the same number of
03736         "ons" and "offs"
03737 
03738         Parameters:
03739 
03740             preserve:       you can preserve (default) the continuum or
03741                             remove it.  The equations used are
03742 
03743                             preserve: Output = Toff * (on/off) - Toff
03744 
03745                             remove:   Output = Toff * (on/off) - Ton
03746 
03747             mode:           the on/off detection mode
03748                             'paired' (default)
03749                             identifies 'off' scans by the
03750                             trailing '_R' (Mopra/Parkes) or
03751                             '_e'/'_w' (Tid) and matches
03752                             on/off pairs from the observing pattern
03753                             'time'
03754                             finds the closest off in time
03755 
03756         .. todo:: verify argument is not implemented
03757 
03758         """
03759         varlist = vars()
03760         modes = ["time", "paired"]
03761         if not mode in modes:
03762             msg = "please provide valid mode. Valid modes are %s" % (modes)
03763             raise ValueError(msg)
03764         s = None
03765         if mode.lower() == "paired":
03766             sel = self.get_selection()
03767             sel.set_query("SRCTYPE==psoff")
03768             self.set_selection(sel)
03769             offs = self.copy()
03770             sel.set_query("SRCTYPE==pson")
03771             self.set_selection(sel)
03772             ons = self.copy()
03773             s = scantable(self._math._quotient(ons, offs, preserve))
03774         elif mode.lower() == "time":
03775             s = scantable(self._math._auto_quotient(self, mode, preserve))
03776         s._add_history("auto_quotient", varlist)
03777         return s
03778 
03779     @asaplog_post_dec
03780     def mx_quotient(self, mask = None, weight='median', preserve=True):
03781         """\
03782         Form a quotient using "off" beams when observing in "MX" mode.
03783 
03784         Parameters:
03785 
03786             mask:           an optional mask to be used when weight == 'stddev'
03787 
03788             weight:         How to average the off beams.  Default is 'median'.
03789 
03790             preserve:       you can preserve (default) the continuum or
03791                             remove it.  The equations used are:
03792 
03793                                 preserve: Output = Toff * (on/off) - Toff
03794 
03795                                 remove:   Output = Toff * (on/off) - Ton
03796 
03797         """
03798         mask = mask or ()
03799         varlist = vars()
03800         on = scantable(self._math._mx_extract(self, 'on'))
03801         preoff = scantable(self._math._mx_extract(self, 'off'))
03802         off = preoff.average_time(mask=mask, weight=weight, scanav=False)
03803         from asapmath  import quotient
03804         q = quotient(on, off, preserve)
03805         q._add_history("mx_quotient", varlist)
03806         return q
03807 
03808     @asaplog_post_dec
03809     def freq_switch(self, insitu=None):
03810         """\
03811         Apply frequency switching to the data.
03812 
03813         Parameters:
03814 
03815             insitu:      if False a new scantable is returned.
03816                          Otherwise, the swictching is done in-situ
03817                          The default is taken from .asaprc (False)
03818 
03819         """
03820         if insitu is None: insitu = rcParams['insitu']
03821         self._math._setinsitu(insitu)
03822         varlist = vars()
03823         s = scantable(self._math._freqswitch(self))
03824         s._add_history("freq_switch", varlist)
03825         if insitu:
03826             self._assign(s)
03827         else:
03828             return s
03829 
03830     @asaplog_post_dec
03831     def recalc_azel(self):
03832         """Recalculate the azimuth and elevation for each position."""
03833         varlist = vars()
03834         self._recalcazel()
03835         self._add_history("recalc_azel", varlist)
03836         return
03837 
03838     @asaplog_post_dec
03839     def __add__(self, other):
03840         """
03841         implicit on all axes and on Tsys
03842         """
03843         varlist = vars()
03844         s = self.__op( other, "ADD" ) 
03845         s._add_history("operator +", varlist)
03846         return s
03847 
03848     @asaplog_post_dec
03849     def __sub__(self, other):
03850         """
03851         implicit on all axes and on Tsys
03852         """
03853         varlist = vars()
03854         s = self.__op( other, "SUB" ) 
03855         s._add_history("operator -", varlist)
03856         return s
03857 
03858     @asaplog_post_dec
03859     def __mul__(self, other):
03860         """
03861         implicit on all axes and on Tsys
03862         """
03863         varlist = vars()
03864         s = self.__op( other, "MUL" ) ;
03865         s._add_history("operator *", varlist)
03866         return s
03867 
03868 
03869     @asaplog_post_dec
03870     def __div__(self, other):
03871         """
03872         implicit on all axes and on Tsys
03873         """
03874         varlist = vars()
03875         s = self.__op( other, "DIV" ) 
03876         s._add_history("operator /", varlist)
03877         return s
03878 
03879     @asaplog_post_dec
03880     def __op( self, other, mode ):
03881         s = None
03882         if isinstance(other, scantable):
03883             s = scantable(self._math._binaryop(self, other, mode))
03884         elif isinstance(other, float):
03885             if other == 0.0:
03886                 raise ZeroDivisionError("Dividing by zero is not recommended")
03887             s = scantable(self._math._unaryop(self, other, mode, False))
03888         elif isinstance(other, list) or isinstance(other, numpy.ndarray):
03889             if isinstance(other[0], list) \
03890                     or isinstance(other[0], numpy.ndarray):
03891                 from asapmath import _array2dOp
03892                 s = _array2dOp( self, other, mode, False )
03893             else:
03894                 s = scantable( self._math._arrayop( self, other, 
03895                                                     mode, False ) )
03896         else:
03897             raise TypeError("Other input is not a scantable or float value")
03898         return s
03899 
03900     @asaplog_post_dec
03901     def get_fit(self, row=0):
03902         """\
03903         Print or return the stored fits for a row in the scantable
03904 
03905         Parameters:
03906 
03907             row:    the row which the fit has been applied to.
03908 
03909         """
03910         if row > self.nrow():
03911             return
03912         from asap.asapfit import asapfit
03913         fit = asapfit(self._getfit(row))
03914         asaplog.push( '%s' %(fit) )
03915         return fit.as_dict()
03916 
03917     @preserve_selection
03918     def flag_nans(self):
03919         """\
03920         Utility function to flag NaN values in the scantable.
03921         """
03922         import numpy
03923         basesel = self.get_selection()
03924         for i in range(self.nrow()):
03925             sel = self.get_row_selector(i)
03926             self.set_selection(basesel+sel)
03927             nans = numpy.isnan(self._getspectrum(0))
03928         if numpy.any(nans):
03929             bnans = [ bool(v) for v in nans]
03930             self.flag(bnans)
03931 
03932     def get_row_selector(self, rowno):
03933         return selector(rows=[rowno])
03934 
03935     def _add_history(self, funcname, parameters):
03936         if not rcParams['scantable.history']:
03937             return
03938         # create date
03939         sep = "##"
03940         from datetime import datetime
03941         dstr = datetime.now().strftime('%Y/%m/%d %H:%M:%S')
03942         hist = dstr+sep
03943         hist += funcname+sep#cdate+sep
03944         if parameters.has_key('self'): 
03945             del parameters['self']
03946         for k, v in parameters.iteritems():
03947             if type(v) is dict:
03948                 for k2, v2 in v.iteritems():
03949                     hist += k2
03950                     hist += "="
03951                     if isinstance(v2, scantable):
03952                         hist += 'scantable'
03953                     elif k2 == 'mask':
03954                         if isinstance(v2, list) or isinstance(v2, tuple):
03955                             hist += str(self._zip_mask(v2))
03956                         else:
03957                             hist += str(v2)
03958                     else:
03959                         hist += str(v2)
03960             else:
03961                 hist += k
03962                 hist += "="
03963                 if isinstance(v, scantable):
03964                     hist += 'scantable'
03965                 elif k == 'mask':
03966                     if isinstance(v, list) or isinstance(v, tuple):
03967                         hist += str(self._zip_mask(v))
03968                     else:
03969                         hist += str(v)
03970                 else:
03971                     hist += str(v)
03972             hist += sep
03973         hist = hist[:-2] # remove trailing '##'
03974         self._addhistory(hist)
03975 
03976 
03977     def _zip_mask(self, mask):
03978         mask = list(mask)
03979         i = 0
03980         segments = []
03981         while mask[i:].count(1):
03982             i += mask[i:].index(1)
03983             if mask[i:].count(0):
03984                 j = i + mask[i:].index(0)
03985             else:
03986                 j = len(mask)
03987             segments.append([i, j])
03988             i = j
03989         return segments
03990 
03991     def _get_ordinate_label(self):
03992         fu = "("+self.get_fluxunit()+")"
03993         import re
03994         lbl = "Intensity"
03995         if re.match(".K.", fu):
03996             lbl = "Brightness Temperature "+ fu
03997         elif re.match(".Jy.", fu):
03998             lbl = "Flux density "+ fu
03999         return lbl
04000 
04001     def _check_ifs(self):
04002 #        return len(set([self.nchan(i) for i in self.getifnos()])) == 1
04003         nchans = [self.nchan(i) for i in self.getifnos()]
04004         nchans = filter(lambda t: t > 0, nchans)
04005         return (sum(nchans)/len(nchans) == nchans[0])
04006 
04007     @asaplog_post_dec
04008     def _fill(self, names, unit, average, opts={}):
04009         first = True
04010         fullnames = []
04011         for name in names:
04012             name = os.path.expandvars(name)
04013             name = os.path.expanduser(name)
04014             if not os.path.exists(name):
04015                 msg = "File '%s' does not exists" % (name)
04016                 raise IOError(msg)
04017             fullnames.append(name)
04018         if average:
04019             asaplog.push('Auto averaging integrations')
04020         stype = int(rcParams['scantable.storage'].lower() == 'disk')
04021         for name in fullnames:
04022             tbl = Scantable(stype)
04023             if is_ms( name ):
04024                 r = msfiller( tbl )
04025             else:
04026                 r = filler( tbl )
04027             msg = "Importing %s..." % (name)
04028             asaplog.push(msg, False)
04029             r.open(name, opts)
04030             rx = rcParams['scantable.reference']
04031             r.setreferenceexpr(rx)
04032             r.fill()
04033             if average:
04034                 tbl = self._math._average((tbl, ), (), 'NONE', 'SCAN')
04035             if not first:
04036                 tbl = self._math._merge([self, tbl])
04037             Scantable.__init__(self, tbl)
04038             r.close()
04039             del r, tbl
04040             first = False
04041             #flush log
04042         asaplog.post()
04043         if unit is not None:
04044             self.set_fluxunit(unit)
04045         if not is_casapy():
04046             self.set_freqframe(rcParams['scantable.freqframe'])
04047 
04048     def _get_verify_action( self, msg, action=None ):
04049         valid_act = ['Y', 'N', 'A', 'R']
04050         if not action or not isinstance(action, str):
04051             action = raw_input("%s [Y/n/a/r] (h for help): " % msg)
04052         if action == '':
04053             return "Y"
04054         elif (action.upper()[0] in valid_act):
04055             return action.upper()[0]
04056         elif (action.upper()[0] in ['H','?']):
04057             print "Available actions of verification [Y|n|a|r]"
04058             print " Y : Yes for current data (default)"
04059             print " N : No for current data"
04060             print " A : Accept all in the following and exit from verification"
04061             print " R : Reject all in the following and exit from verification"
04062             print " H or ?: help (show this message)"
04063             return self._get_verify_action(msg)
04064         else:
04065             return 'Y'
04066 
04067     def __getitem__(self, key):
04068         if key < 0:
04069             key += self.nrow()
04070         if key >= self.nrow():
04071             raise IndexError("Row index out of range.")
04072         return self._getspectrum(key)
04073 
04074     def __setitem__(self, key, value):
04075         if key < 0:
04076             key += self.nrow()
04077         if key >= self.nrow():
04078             raise IndexError("Row index out of range.")
04079         if not hasattr(value, "__len__") or \
04080                 len(value) > self.nchan(self.getif(key)):
04081             raise ValueError("Spectrum length doesn't match.")
04082         return self._setspectrum(value, key)
04083 
04084     def __len__(self):
04085         return self.nrow()
04086 
04087     def __iter__(self):
04088         for i in range(len(self)):
04089             yield self[i]