#!/user/bin/python
#
# data_logger - classes for consistently logging data in an organized
-# fasion. See the test functions for some usage examples
+# fasion. See the doctests for some usage examples.
#
# Copyright (C) 2008-2010 William Trevor King
#
# write to Trevor King, Drexel University, Physics Dept., 3141 Chestnut St.,
# Philadelphia PA 19104, USA.
+from __future__ import with_statement
+
import os, os.path
import stat
import cPickle as pickle
import time
import string
+
import numpy
__version__ = "0.3"
DEFAULT_PATH = "~/rsrch/data"
-DEFAULT_PATH_REPLACE_STRING = "$DEFAULT$/"
+DEFAULT_PATH_REPLACE_STRING = "${DEFAULT}/"
-class Error (Exception) :
+class Error (Exception):
"Basic module error class"
pass
-class ErrorDirExists (Error) :
+class ErrorDirExists (Error):
"The specified directory already exists"
+ pass
-def normalize_logdir(log_dir):
- length = len(DEFAULT_PATH_REPLACE_STRING)
- if log_dir[:length] == DEFAULT_PATH_REPLACE_STRING:
- log_dir = os.path.join(DEFAULT_PATH, log_dir[length:])
- log_dir = os.path.expanduser(log_dir)
- return log_dir
+def normalize_log_dir(log_dir):
+ """Normalize a log directory.
-class data_log :
- """
- Data logging class.
- Creates consistent, timestamped log files.
+ Expands the user symbol `~`, as well as
+ `DEFAULT_PATH_REPLACE_STRING`.
+
+ Parameters
+ ----------
+ log_dir : path
+ Raw `log_dir` passed into `.__init__()`.
+
+ Returns
+ -------
+ log_dir : path
+ Normalized version of the input `log_dir`.
- Initialized with log_dir and log_name.
- log_dir specifies the base data directory.
- If it doesn't exist, log_dir is created.
-
- If log_dir begins with '$DEFAULT$/', that portion of the path is replaced
- with the then-current contents of the DEFAULT_PATH module global.
+ Examples
+ --------
+ >>> normalize_log_dir('~/.log') # doctest: +ELLIPSIS
+ '/.../.log'
+ >>> normalize_log_dir('${DEFAULT}/hi/there') # doctest: +ELLIPSIS
+ '/.../rsrch/data/hi/there'
+ """
+ if log_dir.startswith(DEFAULT_PATH_REPLACE_STRING):
+ length = len(DEFAULT_PATH_REPLACE_STRING)
+ log_dir = os.path.join(DEFAULT_PATH, log_dir[length:])
+ log_dir = os.path.expanduser(log_dir)
+ return log_dir
- A subdir of log_dir is created (if necessary) named YYYYMMDD,
- where YYYYMMDD is the current day in localtime.
- If noclobber_logsubdir == True, this dir must not exist yet.
- log_name specifies the base name for the created log files (in the log subdir).
- The created log filenames are prefixed with a YYYYMMDDHHMMSS timestamp.
- If the target filename already exists, the filename is postfixed with
- '_N', where N is the lowest integer that doesn't clobber an existing file.
+class DataLog (object):
+ """Create consistent, timestamped log files.
- General data is saved to the log files with the write(obj) method.
- By default, write() cPickles the object passed.
- You can save in other formats by overriding write()
+ General data is saved to the log files with the `write(obj)`
+ method. By default, `write()` `cPickles` the object passed. You
+ can save in other formats by overriding `write()`.
Binary data is can be saved directly to the log files with the
- write_binary(binary_string) method.
+ `write_binary(binary_string)` method.
All file names are stripped of possibly troublesome characters.
+
+ Parameters
+ ----------
+ log_dir : path
+ `log_dir` sets the base data directory. If it doesn't exist,
+ `log_dir` is created.
+
+ If log_dir begins with '$DEFAULT$/', that portion of the path
+ is replaced with the then-current contents of the
+ `DEFAULT_PATH` module global.
+
+ A subdir of log_dir is created (if necessary) named
+ `YYYYMMDD`, where `YYYYMMDD` is the current day in local time.
+ log_name : string
+ `log_name` specifies the base name for the created log files
+ (in the log subdir). The created log filenames are prefixed
+ with a `YYYYMMDDHHMMSS` timestamp. If the target filename
+ already exists, the filename is postfixed with `_N`, where
+ `N` is the lowest integer that doesn't clobber an existing
+ file.
+ noclobber_log_subdir : bool
+ `noclobber_log_subdir == True`, the `YYYMMDD` subdir of
+ `log_dir` must not exist yet.
+ timestamp : string
+ Overide default subdir `timestamp` (%Y%m%d).
+
+ Examples
+ --------
+
+ >>> import shutil
+ >>> dl = DataLog('test_data_log', 'temperature', timestamp='20101103',
+ ... )
+ >>> data = {'test':True, 'data':[1, 2, 3, 4]}
+ >>> files = [None]*10
+ >>> for i in range(10):
+ ... files[i],ts = dl.write(data, timestamp='20101103235959')
+ >>> print '\\n'.join(files)
+ test_data_log/20101103/20101103235959_log
+ test_data_log/20101103/20101103235959_log_1
+ test_data_log/20101103/20101103235959_log_2
+ test_data_log/20101103/20101103235959_log_3
+ test_data_log/20101103/20101103235959_log_4
+ test_data_log/20101103/20101103235959_log_5
+ test_data_log/20101103/20101103235959_log_6
+ test_data_log/20101103/20101103235959_log_7
+ test_data_log/20101103/20101103235959_log_8
+ test_data_log/20101103/20101103235959_log_9
+ >>> shutil.rmtree(dl._log_dir)
"""
- def __init__(self, log_dir=".", noclobber_logsubdir=False,
- log_name="log",
- timestamp=None) :
+ def __init__(self, log_dir=".", noclobber_log_subdir=False,
+ log_name="log", timestamp=None):
+ self._setup_character_translation()
+ self._log_name = self._clean_filename(log_name) # last check.
+ self._log_dir = self._create_log_dir(log_dir) # will not clobber.
+ self._subdir,self._timestamp = self._create_log_subdir(
+ self._log_dir, noclobber_log_subdir, timestamp)
+
+ def _setup_character_translation(self):
+ """Setup `._delete_chars` and `._trans_table` for `._clean_filename()`.
+ """
# generate lists of not-allowed characters
- unaltered_chars = "-._" + string.digits + string.letters
+ unaltered_chars = '-._' + string.digits + string.letters
mapped_pairs = {' ':'_'}
- allowed_chars = unaltered_chars + "".join(mapped_pairs.keys())
+ allowed_chars = unaltered_chars + ''.join(mapped_pairs.keys())
all_chars = string.maketrans('','')
- self.delete_chars = all_chars.translate(all_chars, allowed_chars)
- trans_from = "".join(mapped_pairs.keys())
- trans_to = "".join(mapped_pairs.values()) # same order as keys, since no modifications to mapped_pairs were made in between the two calls
- self.transtable = string.maketrans(trans_from, trans_to)
-
- self._log_name = self._clean_filename(log_name) # never checked after this...
- self._log_dir = self._create_logdir(log_dir) # will not clobber.
- subdir, timestamp = self._create_logsubdir(self._log_dir,
- noclobber_logsubdir,
- timestamp)
- self.subdir = subdir
- self.timestamp = timestamp
- def _clean_filename(self, filename) :
- """
- Currently only works on filenames, since it deletes '/'.
- If you need it to work on full paths, use os.path.split(your_path)[1]
- to strip of the filename portion...
+ self._delete_chars = all_chars.translate(all_chars, allowed_chars)
+ trans_from = ''.join(mapped_pairs.keys())
+ trans_to = ''.join(mapped_pairs.values())
+ # values in trans_to are in the same order as the keys in
+ # trans_from, since no modifications to mapped_pairs were made
+ # in between the two calls.
+ self._trans_table = string.maketrans(trans_from, trans_to)
+
+ def _clean_filename(self, filename):
+ """Remove troublesome characters from filenames.
+
+ This method only works on filenames, since it deletes '/'. If
+ you need it to work on full paths, use
+ `os.path.split(your_path)` and clean the portions separately.
+
+ Parameters
+ ----------
+ filename : string
+
+ Examples
+ --------
+ >>> import shutil
+ >>> dl = DataLog(log_dir="test_clean_filename")
+ >>> dl._clean_filename('hi there')
+ 'hi_there'
+ >>> dl._clean_filename('hello\\tthe/castle')
+ 'hellothecastle'
+ >>> shutil.rmtree(dl._log_dir)
"""
- cleanname = filename.translate(self.transtable, self.delete_chars)
+ cleanname = filename.translate(self._trans_table, self._delete_chars)
return cleanname
- def _create_logdir(self, log_dir) :
- log_dir = normalize_logdir(log_dir)
- if not os.path.exists(log_dir) :
+
+ def _create_log_dir(self, log_dir):
+ """Create a clean base log dir (if necessary).
+
+ Parameters
+ ----------
+ log_dir : path
+ Raw `log_dir` passed into `.__init__()`.
+
+ Returns
+ -------
+ log_dir : path
+ Normalized version of the input `log_dir`.
+
+ Examples
+ --------
+ >>> import shutil
+ >>> dl = DataLog(log_dir='test_create_log_dir')
+ >>> shutil.rmtree(dl._log_dir)
+ """
+ log_dir = normalize_log_dir(log_dir)
+ if not os.path.exists(log_dir):
os.mkdir(log_dir, 0755)
return log_dir
- def _create_logsubdir(self, log_dir, noclobber_logsubdir,
- timestamp=None) :
- if timestamp == None :
+
+ def _create_log_subdir(self, log_dir, noclobber_log_subdir=False,
+ timestamp=None):
+ """Create a clean log dir for logging.
+
+ Parameters
+ ----------
+ log_dir : path
+ Normalized version of the input `log_dir`.
+ noclobber_log_subdir : bool
+ `noclobber_log_subdir` passed into `.__init__()`.
+ timestamp : string
+ Overide default `timestamp` (%Y%m%d).
+
+ Returns
+ -------
+ subdir : path
+ Path to the timestamped subdir of `log_dir`.
+ timestamp : string
+ The timestamp used to generate `subdir`.
+
+ Examples
+ --------
+ >>> import os
+ >>> import shutil
+ >>> dl = DataLog(log_dir='test_create_log_subdir',
+ ... timestamp='20101103')
+ >>> os.listdir(dl._log_dir)
+ ['20101103']
+ >>> dl._create_log_subdir(dl._log_dir, noclobber_log_subdir=True,
+ ... timestamp=dl._timestamp)
+ Traceback (most recent call last):
+ ...
+ ErrorDirExists: test_create_log_subdir/20101103 exists
+ >>> dl._create_log_subdir(dl._log_dir, noclobber_log_subdir=False,
+ ... timestamp=dl._timestamp)
+ ('test_create_log_subdir/20101103', '20101103')
+ >>> dl._create_log_subdir(dl._log_dir) # doctest: +ELLIPSIS
+ ('test_create_log_subdir/...', '...')
+ >>> shutil.rmtree(dl._log_dir)
+ """
+ if timestamp == None:
timestamp = time.strftime("%Y%m%d") # %H%M%S
subdir = os.path.join(log_dir, timestamp)
- if os.path.exists(subdir) :
- if noclobber_logsubdir:
+ if os.path.exists(subdir):
+ if noclobber_log_subdir:
raise ErrorDirExists, "%s exists" % subdir
- else :
+ else:
os.mkdir(subdir, 0755)
return (subdir, timestamp)
- def get_filename(self, timestamp=None) :
- """
- Get a filename (using localtime if timestamp==None),
- appending integers as necessary to avoid clobbering.
- For use in write() routines.
- Returns (filepath, timestamp)
+
+ def _get_filename(self, timestamp=None):
+ """Get a filename for a new data log for `.write()`.
+
+ Append integers as necessary to avoid clobbering. Note that
+ the appended integers are *not* thread-safe. You need to
+ actually create the file to reserve the name.
+
+ Parameters
+ ----------
+ log_dir : path
+ Normalized version of the input `log_dir`.
+ noclobber_log_subdir : bool
+ `noclobber_log_subdir` passed into `.__init__()`.
+ timestamp : string
+ Overide default `timestamp` (%Y%m%d%H%M%S).
+
+ Returns
+ -------
+ filepath : path
+ Path to the timestamped log file.
+ timestamp : string
+ The timestamp used to generate `subdir`.
+
+ Examples
+ --------
+ >>> import shutil
+ >>> dl = DataLog(log_dir='test_get_filename',
+ ... log_name='my-log', timestamp='20101103')
+ >>> f,t = dl._get_filename('20100103235959')
+ >>> f
+ 'test_get_filename/20101103/20100103235959_my-log'
+ >>> t
+ '20100103235959'
+ >>> open(f, 'w').write('dummy content')
+ >>> f,t = dl._get_filename('20100103235959')
+ >>> f
+ 'test_get_filename/20101103/20100103235959_my-log_1'
+ >>> t
+ '20100103235959'
+ >>> open(f, 'w').write('dummy content')
+ >>> f,t = dl._get_filename('20100103235959')
+ >>> f
+ 'test_get_filename/20101103/20100103235959_my-log_2'
+ >>> t
+ '20100103235959'
+ >>> dl._get_filename() # doctest: +ELLIPSIS
+ ('test_get_filename/20101103/..._my-log', '...')
+ >>> shutil.rmtree(dl._log_dir)
"""
- if timestamp == None :
+ if timestamp == None:
timestamp = time.strftime("%Y%m%d%H%M%S")
filename = "%s_%s" % (timestamp, self._log_name)
- fullname = os.path.join(self.subdir, filename)
+ fullname = os.path.join(self._subdir, filename)
filepath = fullname
i = 1
- while os.path.exists(filepath) :
+ while os.path.exists(filepath):
filepath = "%s_%d" % (fullname, i)
i+=1
return (filepath, timestamp)
- def write(self, obj, timestamp=None) :
- """
- Save object to a timestamped file with pickle.
- If timestamp == None, use the current localtime.
- Returns (filepath, timestamp)
+
+ def write(self, obj, timestamp=None):
+ """Save object to a timestamped file with `cPickle`.
+
+ Parameters
+ ----------
+ obj : object
+ Object to save.
+ timestamp : string
+ Passed on to `._get_filename()`.
+
+ Returns
+ -------
+ filepath : path
+ Path to the timestamped log file.
+ timestamp : string
+ The timestamp used to generate the log file.
+
+ Examples
+ --------
+ >>> import shutil
+ >>> dl = DataLog(log_dir='test_write',
+ ... log_name='my-log', timestamp='20101103')
+ >>> f,t = dl.write([1, 2, 3])
+ >>> a = pickle.load(open(f, 'rb'))
+ >>> a
+ [1, 2, 3]
+ >>> shutil.rmtree(dl._log_dir)
"""
- filepath, timestamp = self.get_filename(timestamp)
- fd = open(filepath, 'wb')
- os.chmod(filepath, 0644)
- pickle.dump(obj, fd)
- fd.close()
+ filepath, timestamp = self._get_filename(timestamp)
+ with open(filepath, 'wb') as fd:
+ os.chmod(filepath, 0644)
+ pickle.dump(obj, fd)
return (filepath, timestamp)
- def write_binary(self, binary_string, timestamp=None) :
- """
- Save binary_string to a timestamped file.
- If timestamp == None, use the current localtime.
- Returns (filepath, timestamp)
+
+ def write_binary(self, binary_string, timestamp=None):
+ """Save a binary string to a timestamped file.
+
+ Parameters
+ ----------
+ binary_string : buffer
+ Binary string to save.
+ timestamp : string
+ Passed on to `._get_filename()`.
+
+ Returns
+ -------
+ filepath : path
+ Path to the timestamped log file.
+ timestamp : string
+ The timestamp used to generate the log file(s).
+
+ Examples
+ --------
+ >>> import shutil
+ >>> import numpy
+ >>> dl = DataLog(log_dir='test_write_binary',
+ ... log_name='my-log', timestamp='20101103')
+ >>> data = numpy.arange(5, dtype=numpy.uint16)
+ >>> filepath,ts = dl.write_binary(data.tostring())
+ >>> data_in = numpy.fromfile(filepath, dtype=numpy.uint16, count=-1)
+ >>> data_in
+ array([0, 1, 2, 3, 4], dtype=uint16)
+ >>> (data == data_in).all()
+ True
+ >>> shutil.rmtree(dl._log_dir)
"""
- filepath, timestamp = self.get_filename(timestamp)
+ filepath, timestamp = self._get_filename(timestamp)
# open a new file in readonly mode, don't clobber.
fd = os.open(filepath, os.O_WRONLY | os.O_CREAT | os.O_EXCL, 0644)
bytes_written = 0
bytes_remaining = len(binary_string)
- while bytes_remaining > 0 :
+ while bytes_remaining > 0:
bw = os.write(fd, binary_string[bytes_written:])
bytes_written += bw
bytes_remaining -= bw
os.close(fd)
return (filepath, timestamp)
- def _write_dict_of_arrays(self, d, base_filepath) :
+
+ def _write_dict_of_arrays(self, d, base_filepath):
+ """Save dict of (string, numpy_array) pairs under `base_filepath`.
+
+ Parameters
+ ----------
+ d : dict
+ Dictionary to save.
+ base_filepath : path
+ Path for table of contents and from which per-pair paths
+ are constructed.
+ """
# open a new file in readonly mode, don't clobber.
bfd = open(base_filepath, 'w', 0644)
bfd.write("Contents (key : file-extension : format):\n")
- for key in d.keys() :
+ for key in d.keys():
clean_key = self._clean_filename(key)
bfd.write("%s : %s : %s\n" % (key, clean_key, str(d[key].dtype)))
# write the keyed array to it's own file
filepath = "%s_%s" % (base_filepath, clean_key)
d[key].tofile(filepath)
bfd.close()
- def write_dict_of_arrays(self, d, timestamp=None) :
- """
- Save dict of (string, numpy_array) pairs to timestamped files.
- If timestamp == None, use the current localtime.
- Returns (base_filepath, timestamp)
+
+ def write_dict_of_arrays(self, d, timestamp=None):
+ """Save dict of (string, numpy_array) pairs to timestamped files.
+
+ Parameters
+ ----------
+ d : dict
+ Dictionary to save.
+ timestamp : string
+ Passed on to `._get_filename()`.
+
+ Returns
+ -------
+ filepath : path
+ Path to the timestamped log file.
+ timestamp : string
+ The timestamp used to generate the log file(s).
+
+ Examples
+ --------
+ >>> import os
+ >>> import shutil
+ >>> import numpy
+ >>> dl = DataLog(log_dir='test_write_dict_of_arrays',
+ ... log_name='my-log', timestamp='20101103')
+ >>> d = {'data1':numpy.arange(5, dtype=numpy.int16),
+ ... 'd\/at:$a 2':numpy.arange(3, dtype=numpy.float64)}
+ >>> filepath,ts = dl.write_dict_of_arrays(
+ ... d, timestamp='20101103235959')
+ >>> filepath
+ 'test_write_dict_of_arrays/20101103/20101103235959_my-log'
+ >>> print '\\n'.join(sorted(os.listdir(dl._subdir)))
+ 20101103235959_my-log
+ 20101103235959_my-log_data1
+ 20101103235959_my-log_data_2
+ >>> contents = open(filepath, 'r').read()
+ >>> print contents
+ Contents (key : file-extension : format):
+ data1 : data1 : int16
+ d\/at:$a 2 : data_2 : float64
+ <BLANKLINE>
+ >>> data1_in = numpy.fromfile(
+ ... filepath+'_data1', dtype=numpy.int16, count=-1)
+ >>> data1_in
+ array([0, 1, 2, 3, 4], dtype=int16)
+ >>> data2_in = numpy.fromfile(
+ ... filepath+'_data_2', dtype=numpy.float64, count=-1)
+ >>> data2_in
+ array([ 0., 1., 2.])
+ >>> shutil.rmtree(dl._log_dir)
"""
- base_filepath, timestamp = self.get_filename(timestamp)
+ base_filepath,timestamp = self._get_filename(timestamp)
self._write_dict_of_arrays(d, base_filepath)
return (base_filepath, timestamp)
-class data_load :
- """
- Loads data logged by data_log.
+
+class DataLoad (object):
+ """Load data logged by `DataLog`.
"""
- def read(self, file) :
- """
- Load an object saved with data_log.write()
- """
- return pickle.load(open(file, 'rb'))
- def read_binary(self, file) :
+ def read(self, filename):
+ """Load an object saved with `DataLog.write()`.
+
+ Parameters
+ ----------
+ filename : path
+ `filename` returned by `DataLog.write()`.
+
+ Returns
+ -------
+ obj : object
+ The saved object.
+
+ Examples
+ --------
+ >>> import shutil
+ >>> dl = DataLog(log_dir='test_read',
+ ... log_name='my-log', timestamp='20101103')
+ >>> f,t = dl.write([1, 2, 3])
+ >>> load = DataLoad()
+ >>> d = load.read(f)
+ >>> d
+ [1, 2, 3]
+ >>> shutil.rmtree(dl._log_dir)
"""
- Load an object saved with data_log.write_binary()
- The file-name must not have been altered.
+ return pickle.load(open(filename, 'rb'))
+
+ def read_binary(self, filename):
+ """Load an object saved with `DataLog.write_binary()`.
+
+ Warning: this method *requires* `filename` to end with
+ `_float` and *assumes* that the file contains `numpy.float`
+ data. That is terrible. Use `h5py` instead of this module!
+
+ Parameters
+ ----------
+ filename : path
+ `filename` returned by `DataLog.write_binary()`.
+
+ Returns
+ -------
+ obj : object
+ The saved object.
+
+ Examples
+ --------
+ >>> import shutil
+ >>> import numpy
+ >>> dl = DataLog(log_dir='test_read_binary',
+ ... log_name='my-log_float', timestamp='20101103')
+ >>> f,t = dl.write_binary(numpy.array([1, 2, 3], dtype=numpy.float))
+ >>> load = DataLoad()
+ >>> d = load.read_binary(f)
+ >>> d
+ array([ 1., 2., 3.])
+ >>> shutil.rmtree(dl._log_dir)
"""
- type = file.split("_")[-1]
- if type == "float" :
+ type_ = filename.split("_")[-1]
+ if type_ == "float":
t = numpy.float
- else :
- raise Exception, "read_binary() not implemented for type %s" % (type)
- return numpy.fromfile(file, dtype=t)
- def read_dict_of_arrays(self, basefile) :
- """
- Load an object saved with data_log.write_binary()
- The file-names must not have been altered.
+ else:
+ raise Exception(
+ "read_binary() not implemented for type %s" % (type_))
+ return numpy.fromfile(filename, dtype=t)
+
+ def read_dict_of_arrays(self, basefile):
+ """Load an object saved with `DataLog.write_dict_of_arrays()`.
+
+ The filenames must not have been altered.
+
+ Parameters
+ ----------
+ filename : path
+ `filename` returned by `DataLog.write_dict_of_arrays()`.
+
+ Returns
+ -------
+ obj : object
+ The saved object.
+
+ Examples
+ --------
+ >>> import pprint
+ >>> import shutil
+ >>> import numpy
+ >>> dl = DataLog(log_dir='test_read_dict_of_arrays',
+ ... log_name='my-log', timestamp='20101103')
+ >>> d = {'data1':numpy.arange(5, dtype=numpy.int16),
+ ... 'd\/at:$a 2':numpy.arange(3, dtype=numpy.float64)}
+ >>> f,t = dl.write_dict_of_arrays(d, timestamp='20101103235959')
+ >>> load = DataLoad()
+ >>> d = load.read_dict_of_arrays(f)
+ >>> pprint.pprint(d)
+ {'d\\\\/at:$a 2': array([ 0., 1., 2.]),
+ 'data1': array([0, 1, 2, 3, 4], dtype=int16)}
+ >>> shutil.rmtree(dl._log_dir)
"""
obj = {}
i=0
realbasefile = os.path.realpath(basefile)
- for line in file(realbasefile) :
+ for line in file(realbasefile):
if i > 0 : # ignore first line
ldata = line.split(' : ')
name = ldata[0]
fpath = "%s_%s" % (realbasefile, ldata[1])
- exec 'typ = numpy.%s' % ldata[2]
- obj[name] = numpy.fromfile(fpath, dtype=typ)
+ type_ = getattr(numpy, ldata[2].strip())
+ obj[name] = numpy.fromfile(fpath, dtype=type_)
i += 1
return obj
-_test_dir = "."
-
-def _check_data_logsubdir_clobber() :
- log1 = data_log(_test_dir, noclobber_logsubdir=True)
- try :
- log2 = data_log(_test_dir, noclobber_logsubdir=True)
- raise Error, "Didn't detect old log"
- except ErrorDirExists :
- pass # everything as it should be
- os.rmdir(log1.subdir)
-
-def _check_data_log_filenames() :
- data = {"Test":True, "Data":[1,2,3,4]}
- log = data_log(_test_dir, noclobber_logsubdir=True)
- files = [None]*10
- for i in range(10):
- files[i], ts = log.write(data)
- print "Contents of log directory (should be 10 identical logs)"
- os.system('ls -l %s' % log.subdir)
- for file in files :
- os.remove( file )
- os.rmdir(log.subdir)
-
-def _check_data_log_pickle_integrity() :
- data = {"Test":True, "Data":[1,2,3,4]}
- # save the data
- log = data_log(_test_dir, noclobber_logsubdir=True)
- filepath, ts = log.write(data)
- # read it back in
- fd = open(filepath, 'rb')
- data_in = pickle.load(fd)
- fd.close()
- # compare
- if data != data_in :
- print "Saved : ", data
- print "Read back: ", data_in
- raise Error, "Poorly pickled"
- os.remove(filepath)
- os.rmdir(log.subdir)
-
-def _check_data_log_binary_integrity() :
- from numpy import zeros, uint16, fromfile
- npts = 100
- data = zeros((npts,), dtype=uint16)
- for i in range(npts) :
- data[i] = i
- # save the data
- log = data_log(_test_dir, noclobber_logsubdir=True)
- filepath, ts = log.write_binary(data.tostring())
- # read it back in
- data_in = fromfile(filepath, dtype=uint16, count=-1)
- # compare
- if npts != len(data_in) :
- raise Error, "Saved %d uint16s, read %d" % (npts, len(data_in))
- for i in range(npts) :
- if data_in[i] != data[i] :
- print "Disagreement in element %d" % i
- print "Saved %d, read back %d" % (data[i], data_in[i])
- raise Error, "Poorly saved"
- os.remove(filepath)
- os.rmdir(log.subdir)
-
-def _check_data_loc_dict_of_arrays() :
- from numpy import zeros, uint16, fromfile
- npts = 100
- data1 = zeros((npts,), dtype=uint16)
- for i in range(npts) :
- data1[i] = i
- data2 = zeros((npts,), dtype=uint16)
- for i in range(npts) :
- data2[i] = npts-i
- data={"data1":data1, "d\/at:$a 2":data2}
- # save the data
- log = data_log(_test_dir, noclobber_logsubdir=True)
- filepath, ts = log.write_dict_of_arrays(data)
- # checking
- print "Contents of log directory (should be 3 logs)"
- os.system('ls -l %s' % log.subdir)
- print "The table of contents file:"
- os.system('cat %s' % (filepath))
- data1_in = fromfile(filepath+"_data1", dtype=uint16)
- data2_in = fromfile(filepath+"_data_2", dtype=uint16)
- for i in range(npts) :
- if data1_in[i] != data1[i] :
- print "Disagreement in element %d of data1" % i
- print "Saved %d, read back %d" % (data1[i], data1_in[i])
- raise Error, "Poorly saved"
- if data2_in[i] != data2[i] :
- print "Disagreement in element %d of data2" % i
- print "Saved %d, read back %d" % (data2[i], data2_in[i])
- raise Error, "Poorly saved"
- os.remove(filepath)
- os.remove(filepath+"_data1")
- os.remove(filepath+"_data_2")
- os.rmdir(log.subdir)
-
-def test() :
- _check_data_logsubdir_clobber()
- _check_data_log_filenames()
- _check_data_log_pickle_integrity()
- _check_data_log_binary_integrity()
- _check_data_loc_dict_of_arrays()
-
-if __name__ == "__main__" :
+
+def test():
+ import doctest
+ import sys
+
+ result = doctest.testmod()
+ sys.exit(min(result.failed, 127))
+
+if __name__ == "__main__":
test()