4 Library for interpreting Picoforce force spectroscopy files. Alternate version
6 Copyright (C) 2006 Massimo Sandal (University of Bologna, Italy).
7 Copyright (C) 2008 Alberto Gomez-Casado (University of Twente, Netherlands).
9 This program is released under the GNU General Public License version 2.
13 from scipy import arange
15 from .. import libhookecurve as lhc
17 __version__='0.0.0.20081706'
21 class DataChunk(list):
22 #Dummy class to provide ext and ret methods to the data list.
26 return self[0:halflen]
32 class picoforcealtDriver(lhc.Driver):
34 #Construction and other special methods
36 def __init__(self,filename):
41 self.textfile=file(filename)
42 self.binfile=file(filename,'rb')
44 #The 0,1,2 data chunks are:
51 #TODO eliminate the need to set chunk numbers
53 self.filepath=filename
56 self.filetype='picoforce'
57 self.experiment='smfs'
61 def _get_samples_line(self):
63 Gets the samples per line parameters in the file, to understand trigger behaviour.
67 samps_expr=re.compile(".*Samps")
70 for line in self.textfile.readlines():
71 if samps_expr.match(line):
73 samps=int(line.split()[2]) #the third word splitted is the offset (in bytes)
74 samps_values.append(samps)
78 #We raise a flag for the fact we meet an offset, otherwise we would take spurious data length arguments.
80 return int(samps_values[0])
82 def _get_chunk_coordinates(self):
84 This method gets the coordinates (offset and length) of a data chunk in our
87 It returns a list containing two tuples:
88 the first element of each tuple is the data_offset, the second is the corresponding
91 In near future probably each chunk will get its own data structure, with
92 offset, size, type, etc.
96 offset_expr=re.compile(".*Data offset")
97 length_expr=re.compile(".*Data length")
103 for line in self.textfile.readlines():
105 if offset_expr.match(line):
106 offset=int(line.split()[2]) #the third word splitted is the offset (in bytes)
107 data_offsets.append(offset)
108 #We raise a flag for the fact we meet an offset, otherwise we would take spurious data length arguments.
111 #same for the data length
112 if length_expr.match(line) and flag_offset:
113 size=int(line.split()[2])
114 data_sizes.append(size)
115 #Put down the offset flag until the next offset is met.
118 return zip(data_offsets,data_sizes)
120 def _get_data_chunk(self,whichchunk):
122 reads a data chunk and converts it in 16bit signed int.
124 offset,size=self._get_chunk_coordinates()[whichchunk]
127 self.binfile.seek(offset)
128 raw_chunk=self.binfile.read(size)
131 for data_position in range(0,len(raw_chunk),2):
132 data_unit_bytes=raw_chunk[data_position:data_position+2]
133 #The unpack function converts 2-bytes in a signed int ('h').
134 #we use output[0] because unpack returns a 1-value tuple, and we want the number only
135 data_unit=struct.unpack('h',data_unit_bytes)[0]
136 my_chunk.append(data_unit)
138 return DataChunk(my_chunk)
141 #returns force vector
142 Kspring=self.get_spring_constant()
143 return DataChunk([(meter*Kspring) for meter in self._deflection()])
145 def _deflection(self):
146 #for internal use (feeds _force)
148 z_scale=self._get_Z_scale()
149 deflsensitivity=self.get_deflection_sensitivity()
150 volts=[((float(lsb))*voltrange*z_scale) for lsb in self.data_chunks[self.forcechunk]]
151 deflect=[volt*deflsensitivity for volt in volts]
157 #returns distance vector (calculated instead than from data chunk)
158 rampsize=self._get_rampsize()
159 sampsline=self._get_samples_line()
160 senszscan=self._get_Z_scan_sens()
162 xstep=senszscan*rampsize/sampsline*10**(-9)
164 xext=arange(sampsline*xstep,0,-xstep)
165 xret=arange(sampsline*xstep,0,-xstep)
167 return DataChunk(xext.tolist()+xret.tolist())
169 def _get_Z_scale(self):
170 self.textfile.seek(0)
171 expr=re.compile(".*@4:Z scale")
173 for line in self.textfile.readlines():
175 zscale=float((line.split()[5]).strip("() []"))
179 def _get_rampsize(self):
180 self.textfile.seek(0)
181 expr=re.compile(".*@4:Ramp size:")
183 for line in self.textfile.readlines():
185 zsens=float((line.split()[7]).strip("() []"))
189 def _get_Z_scan_sens(self):
190 self.textfile.seek(0)
191 expr=re.compile(".*@Sens. Zsens")
193 for line in self.textfile.readlines():
195 zsens=float((line.split()[3]).strip("() []"))
201 def get_deflection_sensitivity(self):
203 gets deflection sensitivity
205 self.textfile.seek(0)
207 def_sensitivity_expr=re.compile(".*@Sens. DeflSens")
209 for line in self.textfile.readlines():
210 if def_sensitivity_expr.match(line):
211 def_sensitivity=float(line.split()[3])
213 #return it in SI units (that is: m/V, not nm/V)
214 return def_sensitivity*(10**(-9))
216 def get_spring_constant(self):
218 gets spring constant.
219 We actually find *three* spring constant values, one for each data chunk (F/t, Z/t, F/z).
220 They are normally all equal, but we retain all three for future...
222 self.textfile.seek(0)
224 springconstant_expr=re.compile(".*Spring Constant")
228 for line in self.textfile.readlines():
229 if springconstant_expr.match(line):
230 constants.append(float(line.split()[2]))
236 self-identification of file type magic
238 curve_file=file(self.filepath)
239 header=curve_file.read(30)
242 if header[2:17] == 'Force file list': #header of a picoforce file
243 #here DONT translate chunk
244 self.data_chunks=[self._get_data_chunk(num) for num in [0,1,2]]
251 Explicitly closes all files
253 self.textfile.close()
256 def default_plots(self):
258 creates the default PlotObject
262 samples=self._get_samples_line()
263 main_plot=lhc.PlotObject()
264 main_plot.vectors=[[zdomain.ext()[0:samples], force.ext()[0:samples]],[zdomain.ret()[0:samples], force.ret()[0:samples]]]
265 main_plot.normalize_vectors()
266 main_plot.units=['meters','newton']
267 main_plot.destination=0
268 main_plot.title=self.filepath
273 def deflection(self):
274 #interface for correct plotmanip and others
275 deflectionchunk=DataChunk(self._deflection())
276 return deflectionchunk.ext(),deflectionchunk.ret()