From: W. Trevor King Date: Thu, 4 Nov 2010 12:41:49 +0000 (-0400) Subject: Fix 'Creating an HDF5 file' headline and convert tabs to spaces in HDF5 post. X-Git-Url: http://git.tremily.us/?a=commitdiff_plain;h=3db1c0330e4e69e8d584305310fc890ac396ef6f;p=mw2txt.git Fix 'Creating an HDF5 file' headline and convert tabs to spaces in HDF5 post. --- diff --git a/posts/HDF5.mdwn b/posts/HDF5.mdwn index 068394a..8ee60fa 100644 --- a/posts/HDF5.mdwn +++ b/posts/HDF5.mdwn @@ -27,7 +27,7 @@ filesystem concept map: Creating an HDF5 file ----------------------" +--------------------- >>> import h5py >>> f = h5py.File('file.h5', 'w') @@ -171,7 +171,7 @@ Creating groups using absolute and relative names >>> f = h5py.File('groups.h5', 'w') >>> g1 = f.create_group('/MyGroup') >>> g2 = f.create_group('/MyGroup/Group_A') - >>> g3 = g1.create_group('Group_B') + >>> g3 = g1.create_group('Group_B') >>> f.keys() ['MyGroup'] >>> f['MyGroup'].keys() @@ -241,8 +241,8 @@ Just use the [Numpy slice indexing][slice] you're used to. >>> import numpy >>> f = h5py.File('hype.h5', 'w') >>> f['IntArray'] = numpy.ones((8, 10)) - >>> dset = f['IntArray'] - >>> dset.value + >>> dset = f['IntArray'] + >>> dset.value array([[ 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.], [ 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.], [ 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.], @@ -251,7 +251,7 @@ Just use the [Numpy slice indexing][slice] you're used to. [ 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.], [ 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.], [ 1., 1., 1., 1., 1., 1., 1., 1., 1., 1.]]) - >>> f['IntArray'][:,5:] = 2 + >>> f['IntArray'][:,5:] = 2 >>> dset.value array([[ 1., 1., 1., 1., 1., 2., 2., 2., 2., 2.], [ 1., 1., 1., 1., 1., 2., 2., 2., 2., 2.], @@ -262,8 +262,8 @@ Just use the [Numpy slice indexing][slice] you're used to. [ 1., 1., 1., 1., 1., 2., 2., 2., 2., 2.], [ 1., 1., 1., 1., 1., 2., 2., 2., 2., 2.]]) >>> dset[1:4,2:6] = 5 - >>> f['IntArray'].value - array([[ 1., 1., 1., 1., 1., 2., 2., 2., 2., 2.], + >>> f['IntArray'].value + array([[ 1., 1., 1., 1., 1., 2., 2., 2., 2., 2.], [ 1., 1., 5., 5., 5., 5., 2., 2., 2., 2.], [ 1., 1., 5., 5., 5., 5., 2., 2., 2., 2.], [ 1., 1., 5., 5., 5., 5., 2., 2., 2., 2.], @@ -283,16 +283,16 @@ Your array's `numpy.dtype` will be preserved. >>> import h5py >>> f = h5py.File('dtype.h5', 'w') >>> f['complex'] = 2 + 3j - >>> f['complex'].dtype + >>> f['complex'].dtype dtype('complex128') - >>> type(f['complex'].value) + >>> type(f['complex'].value) - >>> f['complex array'] = [1 + 2j, 3 + 4j] - >>> f['complex array'].dtype + >>> f['complex array'] = [1 + 2j, 3 + 4j] + >>> f['complex array'].dtype dtype('complex128') - >>> type(f['complex array'].value) + >>> type(f['complex array'].value) - >>> f.close() + >>> f.close() Which creates @@ -343,20 +343,20 @@ Chunking and extendible datasets Extendible datasets must be chunked. >>> import h5py - >>> import numpy + >>> import numpy >>> f = h5py.File('ext.h5', 'w') >>> f['simple'] = [1, 2, 3] # not chunked - >>> s = f['simple'] - >>> s.chunks == None - True - >>> s.resize((6,)) + >>> s = f['simple'] + >>> s.chunks == None + True + >>> s.resize((6,)) Traceback (most recent call last): ... TypeError: Only chunked datasets can be resized - >>> c = f.create_dataset('chunked', (3,), numpy.int32, chunks=(2,)) - >>> c.chunks + >>> c = f.create_dataset('chunked', (3,), numpy.int32, chunks=(2,)) + >>> c.chunks (2,) - >>> c[:] = [9, 8, 7] + >>> c[:] = [9, 8, 7] >>> c.resize((6,)) >>> c.value array([1, 2, 3, 0, 0, 0]) @@ -364,7 +364,7 @@ Extendible datasets must be chunked. Traceback (most recent call last): ... TypeError: New shape length (2) must match dataset rank (1) - >>> f.close() + >>> f.close() The "chunkiness" of data is not listed by `h5dump`,