3 """Library for interpreting Picoforce force spectroscopy files.
5 An alternave implementation of :mod:`hooke.driver.picoforce`.
9 from scipy import arange
11 from .. import curve as lhc
13 __version__='0.0.0.20081706'
17 class DataChunk(list):
18 #Dummy class to provide ext and ret methods to the data list.
22 return self[0:halflen]
28 class picoforcealtDriver(lhc.Driver):
30 #Construction and other special methods
32 def __init__(self,filename):
37 self.textfile=file(filename)
38 self.binfile=file(filename,'rb')
40 #The 0,1,2 data chunks are:
47 #TODO eliminate the need to set chunk numbers
49 self.filepath=filename
52 self.filetype='picoforce'
53 self.experiment='smfs'
57 def _get_samples_line(self):
59 Gets the samples per line parameters in the file, to understand trigger behaviour.
63 samps_expr=re.compile(".*Samps")
66 for line in self.textfile.readlines():
67 if samps_expr.match(line):
69 samps=int(line.split()[2]) #the third word splitted is the offset (in bytes)
70 samps_values.append(samps)
74 #We raise a flag for the fact we meet an offset, otherwise we would take spurious data length arguments.
76 return int(samps_values[0])
78 def _get_chunk_coordinates(self):
80 This method gets the coordinates (offset and length) of a data chunk in our
83 It returns a list containing two tuples:
84 the first element of each tuple is the data_offset, the second is the corresponding
87 In near future probably each chunk will get its own data structure, with
88 offset, size, type, etc.
92 offset_expr=re.compile(".*Data offset")
93 length_expr=re.compile(".*Data length")
99 for line in self.textfile.readlines():
101 if offset_expr.match(line):
102 offset=int(line.split()[2]) #the third word splitted is the offset (in bytes)
103 data_offsets.append(offset)
104 #We raise a flag for the fact we meet an offset, otherwise we would take spurious data length arguments.
107 #same for the data length
108 if length_expr.match(line) and flag_offset:
109 size=int(line.split()[2])
110 data_sizes.append(size)
111 #Put down the offset flag until the next offset is met.
114 return zip(data_offsets,data_sizes)
116 def _get_data_chunk(self,whichchunk):
118 reads a data chunk and converts it in 16bit signed int.
120 offset,size=self._get_chunk_coordinates()[whichchunk]
123 self.binfile.seek(offset)
124 raw_chunk=self.binfile.read(size)
127 for data_position in range(0,len(raw_chunk),2):
128 data_unit_bytes=raw_chunk[data_position:data_position+2]
129 #The unpack function converts 2-bytes in a signed int ('h').
130 #we use output[0] because unpack returns a 1-value tuple, and we want the number only
131 data_unit=struct.unpack('h',data_unit_bytes)[0]
132 my_chunk.append(data_unit)
134 return DataChunk(my_chunk)
137 #returns force vector
138 Kspring=self.get_spring_constant()
139 return DataChunk([(meter*Kspring) for meter in self._deflection()])
141 def _deflection(self):
142 #for internal use (feeds _force)
144 z_scale=self._get_Z_scale()
145 deflsensitivity=self.get_deflection_sensitivity()
146 volts=[((float(lsb))*voltrange*z_scale) for lsb in self.data_chunks[self.forcechunk]]
147 deflect=[volt*deflsensitivity for volt in volts]
153 #returns distance vector (calculated instead than from data chunk)
154 rampsize=self._get_rampsize()
155 sampsline=self._get_samples_line()
156 senszscan=self._get_Z_scan_sens()
158 xstep=senszscan*rampsize/sampsline*10**(-9)
160 xext=arange(sampsline*xstep,0,-xstep)
161 xret=arange(sampsline*xstep,0,-xstep)
163 return DataChunk(xext.tolist()+xret.tolist())
165 def _get_Z_scale(self):
166 self.textfile.seek(0)
167 expr=re.compile(".*@4:Z scale")
169 for line in self.textfile.readlines():
171 zscale=float((line.split()[5]).strip("() []"))
175 def _get_rampsize(self):
176 self.textfile.seek(0)
177 expr=re.compile(".*@4:Ramp size:")
179 for line in self.textfile.readlines():
181 zsens=float((line.split()[7]).strip("() []"))
185 def _get_Z_scan_sens(self):
186 self.textfile.seek(0)
187 expr=re.compile(".*@Sens. Zsens")
189 for line in self.textfile.readlines():
191 zsens=float((line.split()[3]).strip("() []"))
197 def get_deflection_sensitivity(self):
199 gets deflection sensitivity
201 self.textfile.seek(0)
203 def_sensitivity_expr=re.compile(".*@Sens. DeflSens")
205 for line in self.textfile.readlines():
206 if def_sensitivity_expr.match(line):
207 def_sensitivity=float(line.split()[3])
209 #return it in SI units (that is: m/V, not nm/V)
210 return def_sensitivity*(10**(-9))
212 def get_spring_constant(self):
214 gets spring constant.
215 We actually find *three* spring constant values, one for each data chunk (F/t, Z/t, F/z).
216 They are normally all equal, but we retain all three for future...
218 self.textfile.seek(0)
220 springconstant_expr=re.compile(".*Spring Constant")
224 for line in self.textfile.readlines():
225 if springconstant_expr.match(line):
226 constants.append(float(line.split()[2]))
232 self-identification of file type magic
234 curve_file=file(self.filepath)
235 header=curve_file.read(30)
238 if header[2:17] == 'Force file list': #header of a picoforce file
239 #here DONT translate chunk
240 self.data_chunks=[self._get_data_chunk(num) for num in [0,1,2]]
247 Explicitly closes all files
249 self.textfile.close()
252 def default_plots(self):
254 creates the default PlotObject
258 samples=self._get_samples_line()
259 main_plot=lhc.PlotObject()
260 main_plot.vectors=[[zdomain.ext()[0:samples], force.ext()[0:samples]],[zdomain.ret()[0:samples], force.ret()[0:samples]]]
261 main_plot.normalize_vectors()
262 main_plot.units=['meters','newton']
263 main_plot.destination=0
264 main_plot.title=self.filepath
269 def deflection(self):
270 #interface for correct plotmanip and others
271 deflectionchunk=DataChunk(self._deflection())
272 return deflectionchunk.ext(),deflectionchunk.ret()