[ 20., 21.]])
>>> d.info
{'columns': ['distance (m)', 'force (N)']}
+
+ The information gets passed on to slices.
+
>>> row_a = d[:,0]
>>> row_a
Data([ 0., 10., 20.])
>>> row_a.info
{'columns': ['distance (m)', 'force (N)']}
+
+ The data-type is also pickleable, to ensure we can move it between
+ processes with :class:`multiprocessing.Queue`\s.
+
+ >>> import pickle
+ >>> s = pickle.dumps(d)
+ >>> z = pickle.loads(s)
+ >>> z
+ Data([[ 0., 1.],
+ [ 10., 11.],
+ [ 20., 21.]])
+ >>> z.info
+ {'columns': ['distance (m)', 'force (N)']}
"""
def __new__(subtype, shape, dtype=numpy.float, buffer=None, offset=0,
strides=None, order=None, info=None):
# We do not need to return anything
def __reduce__(self):
- base_class_state = list(numpy.ndarray.__reduce__(self))
- own_state = (self.info,)
- return (base_class_state, own_state)
+ """Collapse an instance for pickling.
+
+ Returns
+ -------
+ reconstruct : callable
+ Called to create the initial version of the object.
+ args : tuple
+ A tuple of arguments for `reconstruct`
+ state : (optional)
+ The state to be passed to __setstate__, if present.
+ iter : iterator (optional)
+ Yielded items will be appended to the reconstructed
+ object.
+ dict : iterator (optional)
+ Yielded (key,value) tuples pushed back onto the
+ reconstructed object.
+ """
+ base_reduce = list(numpy.ndarray.__reduce__(self))
+ # tack our stuff onto ndarray's setstate portion.
+ base_reduce[2] = (base_reduce[2], (self.info,))
+ return tuple(base_reduce)
- def __setstate__(self,state):
+ def __setstate__(self, state):
base_class_state,own_state = state
numpy.ndarray.__setstate__(self, base_class_state)
self.info, = own_state