6 Library for interpreting Picoforce force spectroscopy files. Alternate version
8 Copyright (C) 2006 Massimo Sandal (University of Bologna, Italy).
9 Copyright (C) 2008 Alberto Gomez-Casado (University of Twente, Netherlands).
11 This program is released under the GNU General Public License version 2.
15 from scipy import arange
17 import libhookecurve as lhc
19 __version__='0.0.0.20081706'
23 class DataChunk(list):
24 #Dummy class to provide ext and ret methods to the data list.
28 return self[0:halflen]
34 class picoforcealtDriver(lhc.Driver):
36 #Construction and other special methods
38 def __init__(self,filename):
43 self.textfile=file(filename)
44 self.binfile=file(filename,'rb')
46 #The 0,1,2 data chunks are:
53 #TODO eliminate the need to set chunk numbers
55 self.filepath=filename
58 self.filetype='picoforce'
59 self.experiment='smfs'
63 def _get_samples_line(self):
65 Gets the samples per line parameters in the file, to understand trigger behaviour.
69 samps_expr=re.compile(".*Samps")
72 for line in self.textfile.readlines():
73 if samps_expr.match(line):
75 samps=int(line.split()[2]) #the third word splitted is the offset (in bytes)
76 samps_values.append(samps)
80 #We raise a flag for the fact we meet an offset, otherwise we would take spurious data length arguments.
82 return int(samps_values[0])
84 def _get_chunk_coordinates(self):
86 This method gets the coordinates (offset and length) of a data chunk in our
89 It returns a list containing two tuples:
90 the first element of each tuple is the data_offset, the second is the corresponding
93 In near future probably each chunk will get its own data structure, with
94 offset, size, type, etc.
98 offset_expr=re.compile(".*Data offset")
99 length_expr=re.compile(".*Data length")
105 for line in self.textfile.readlines():
107 if offset_expr.match(line):
108 offset=int(line.split()[2]) #the third word splitted is the offset (in bytes)
109 data_offsets.append(offset)
110 #We raise a flag for the fact we meet an offset, otherwise we would take spurious data length arguments.
113 #same for the data length
114 if length_expr.match(line) and flag_offset:
115 size=int(line.split()[2])
116 data_sizes.append(size)
117 #Put down the offset flag until the next offset is met.
120 return zip(data_offsets,data_sizes)
122 def _get_data_chunk(self,whichchunk):
124 reads a data chunk and converts it in 16bit signed int.
126 offset,size=self._get_chunk_coordinates()[whichchunk]
129 self.binfile.seek(offset)
130 raw_chunk=self.binfile.read(size)
133 for data_position in range(0,len(raw_chunk),2):
134 data_unit_bytes=raw_chunk[data_position:data_position+2]
135 #The unpack function converts 2-bytes in a signed int ('h').
136 #we use output[0] because unpack returns a 1-value tuple, and we want the number only
137 data_unit=struct.unpack('h',data_unit_bytes)[0]
138 my_chunk.append(data_unit)
140 return DataChunk(my_chunk)
143 #returns force vector
144 Kspring=self.get_spring_constant()
145 return DataChunk([(meter*Kspring) for meter in self._deflection()])
147 def _deflection(self):
148 #for internal use (feeds _force)
150 z_scale=self._get_Z_scale()
151 deflsensitivity=self.get_deflection_sensitivity()
152 volts=[((float(lsb))*voltrange*z_scale) for lsb in self.data_chunks[self.forcechunk]]
153 deflect=[volt*deflsensitivity for volt in volts]
159 #returns distance vector (calculated instead than from data chunk)
160 rampsize=self._get_rampsize()
161 sampsline=self._get_samples_line()
162 senszscan=self._get_Z_scan_sens()
164 xstep=senszscan*rampsize/sampsline*10**(-9)
166 xext=arange(sampsline*xstep,0,-xstep)
167 xret=arange(sampsline*xstep,0,-xstep)
169 return DataChunk(xext.tolist()+xret.tolist())
171 def _get_Z_scale(self):
172 self.textfile.seek(0)
173 expr=re.compile(".*@4:Z scale")
175 for line in self.textfile.readlines():
177 zscale=float((line.split()[5]).strip("() []"))
181 def _get_rampsize(self):
182 self.textfile.seek(0)
183 expr=re.compile(".*@4:Ramp size:")
185 for line in self.textfile.readlines():
187 zsens=float((line.split()[7]).strip("() []"))
191 def _get_Z_scan_sens(self):
192 self.textfile.seek(0)
193 expr=re.compile(".*@Sens. Zsens")
195 for line in self.textfile.readlines():
197 zsens=float((line.split()[3]).strip("() []"))
203 def get_deflection_sensitivity(self):
205 gets deflection sensitivity
207 self.textfile.seek(0)
209 def_sensitivity_expr=re.compile(".*@Sens. DeflSens")
211 for line in self.textfile.readlines():
212 if def_sensitivity_expr.match(line):
213 def_sensitivity=float(line.split()[3])
215 #return it in SI units (that is: m/V, not nm/V)
216 return def_sensitivity*(10**(-9))
218 def get_spring_constant(self):
220 gets spring constant.
221 We actually find *three* spring constant values, one for each data chunk (F/t, Z/t, F/z).
222 They are normally all equal, but we retain all three for future...
224 self.textfile.seek(0)
226 springconstant_expr=re.compile(".*Spring Constant")
230 for line in self.textfile.readlines():
231 if springconstant_expr.match(line):
232 constants.append(float(line.split()[2]))
238 self-identification of file type magic
240 curve_file=file(self.filepath)
241 header=curve_file.read(30)
244 if header[2:17] == 'Force file list': #header of a picoforce file
245 #here DONT translate chunk
246 self.data_chunks=[self._get_data_chunk(num) for num in [0,1,2]]
253 Explicitly closes all files
255 self.textfile.close()
258 def default_plots(self):
260 creates the default PlotObject
264 samples=self._get_samples_line()
265 main_plot=lhc.PlotObject()
266 main_plot.vectors=[[zdomain.ext()[0:samples], force.ext()[0:samples]],[zdomain.ret()[0:samples], force.ret()[0:samples]]]
267 main_plot.normalize_vectors()
268 main_plot.units=['meters','newton']
269 main_plot.destination=0
270 main_plot.title=self.filepath
275 def deflection(self):
276 #interface for correct plotmanip and others
277 deflectionchunk=DataChunk(self._deflection())
278 return deflectionchunk.ext(),deflectionchunk.ret()