6 Library for interpreting Picoforce force spectroscopy files.
8 Copyright (C) 2006 Massimo Sandal (University of Bologna, Italy).
10 This program is released under the GNU General Public License version 2.
14 from scipy import arange
16 import libhookecurve as lhc
18 __version__='0.0.0.20080404'
21 class DataChunk(list):
22 #Dummy class to provide ext and ret methods to the data list.
26 return self[0:halflen]
32 class picoforceDriver(lhc.Driver):
34 #Construction and other special methods
36 def __init__(self,filename):
41 self.textfile=file(filename)
42 self.binfile=file(filename,'rb')
44 #The 0,1,2 data chunks are:
50 self.filepath=filename
53 self.filetype='picoforce'
54 self.experiment='smfs'
57 #Hidden methods. These are meant to be used only by API functions. If needed, however,
58 #they can be called just like API methods.
60 def _get_samples_line(self):
62 Gets the samples per line parameters in the file, to understand trigger behaviour.
66 samps_expr=re.compile(".*Samps")
69 for line in self.textfile.readlines():
70 if samps_expr.match(line):
72 samps=int(line.split()[2]) #the third word splitted is the offset (in bytes)
73 samps_values.append(samps)
77 #We raise a flag for the fact we meet an offset, otherwise we would take spurious data length arguments.
79 return int(samps_values[0])
81 def _get_chunk_coordinates(self):
83 This method gets the coordinates (offset and length) of a data chunk in our
86 It returns a list containing two tuples:
87 the first element of each tuple is the data_offset, the second is the corresponding
90 In near future probably each chunk will get its own data structure, with
91 offset, size, type, etc.
95 offset_expr=re.compile(".*Data offset")
96 length_expr=re.compile(".*Data length")
102 for line in self.textfile.readlines():
104 if offset_expr.match(line):
105 offset=int(line.split()[2]) #the third word splitted is the offset (in bytes)
106 data_offsets.append(offset)
107 #We raise a flag for the fact we meet an offset, otherwise we would take spurious data length arguments.
110 #same for the data length
111 if length_expr.match(line) and flag_offset:
112 size=int(line.split()[2])
113 data_sizes.append(size)
114 #Put down the offset flag until the next offset is met.
117 return zip(data_offsets,data_sizes)
119 def _get_data_chunk(self,whichchunk):
121 reads a data chunk and converts it in 16bit signed int.
123 offset,size=self._get_chunk_coordinates()[whichchunk]
126 self.binfile.seek(offset)
127 raw_chunk=self.binfile.read(size)
130 for data_position in range(0,len(raw_chunk),2):
131 data_unit_bytes=raw_chunk[data_position:data_position+2]
132 #The unpack function converts 2-bytes in a signed int ('h').
133 #we use output[0] because unpack returns a 1-value tuple, and we want the number only
134 data_unit=struct.unpack('h',data_unit_bytes)[0]
135 my_chunk.append(data_unit)
137 return DataChunk(my_chunk)
139 def _get_Zscan_info(self,index):
141 gets the Z scan informations needed to interpret the data chunk.
142 These info come from the general section, BEFORE individual chunk headers.
144 By itself, the function will parse for three parameters.
145 (index) that tells the function what to return when called by
147 index=0 : returns Zscan_V_LSB
148 index=1 : returns Zscan_V_start
149 index=2 : returns Zscan_V_size
151 self.textfile.seek(0)
153 ciaoforcelist_expr=re.compile(".*Ciao force")
154 zscanstart_expr=re.compile(".*@Z scan start")
155 zscansize_expr=re.compile(".*@Z scan size")
159 for line in self.textfile.readlines():
160 if ciaoforcelist_expr.match(line):
161 ciaoforce_flag=1 #raise a flag: zscanstart and zscansize params to read are later
163 if ciaoforce_flag and zscanstart_expr.match(line):
164 raw_Zscanstart_line=line.split()
166 if ciaoforce_flag and zscansize_expr.match(line):
167 raw_Zscansize_line=line.split()
171 for itemscanstart,itemscansize in zip(raw_Zscanstart_line,raw_Zscansize_line):
172 Zscanstart_line.append(itemscanstart.strip('[]()'))
173 Zscansize_line.append(itemscansize.strip('[]()'))
175 Zscan_V_LSB=float(Zscanstart_line[6])
176 Zscan_V_start=float(Zscanstart_line[8])
177 Zscan_V_size=float(Zscansize_line[8])
179 return (Zscan_V_LSB,Zscan_V_start,Zscan_V_size)[index]
181 def _get_Z_magnify_scale(self,whichchunk):
183 gets Z scale and Z magnify
184 Here we get Z scale/magnify from the 'whichchunk' only.
186 TODO: make it coherent with data_chunks syntaxis (0,1,2)
188 In future, should we divide the *file* itself into chunk descriptions and gain
189 true chunk data structures?
191 self.textfile.seek(0)
193 z_scale_expr=re.compile(".*@4:Z scale")
194 z_magnify_expr=re.compile(".*@Z magnify")
196 ramp_size_expr=re.compile(".*@4:Ramp size")
197 ramp_offset_expr=re.compile(".*@4:Ramp offset")
203 for line in self.textfile.readlines():
204 if z_magnify_expr.match(line):
206 if occurrences==whichchunk:
208 raw_z_magnify_expression=line.split()
212 if found_right and z_scale_expr.match(line):
213 raw_z_scale_expression=line.split()
214 if found_right and ramp_size_expr.match(line):
215 raw_ramp_size_expression=line.split()
216 if found_right and ramp_offset_expr.match(line):
217 raw_ramp_offset_expression=line.split()
219 return float(raw_z_magnify_expression[5]),float(raw_z_scale_expression[7]), float(raw_ramp_size_expression[7]), float(raw_ramp_offset_expression[7]), float(raw_z_scale_expression[5][1:])
223 #These are the methods that are meant to be called from external apps.
225 def LSB_to_volt(self,chunknum,voltrange=20):
227 Converts the LSB data of a given chunk (chunknum=0,1,2) in volts.
228 First step to get the deflection and the force.
231 item.LSB_to_volt(chunknum, [voltrange])
233 The voltrange is by default set to 20 V.
235 return DataChunk([((float(lsb)/65535)*voltrange) for lsb in self.data_chunks[chunknum]])
237 def LSB_to_deflection(self,chunknum,deflsensitivity=None,voltrange=20):
239 Converts the LSB data in deflection (meters).
242 item.LSB_to_deflection(chunknum, [deflection sensitivity], [voltrange])
244 chunknum is the chunk you want to parse (0,1,2)
246 The deflection sensitivity by default is the one parsed from the file.
247 The voltrange is by default set to 20 V.
249 if deflsensitivity is None:
250 deflsensitivity=self.get_deflection_sensitivity()
252 lsbvolt=self.LSB_to_volt(chunknum)
253 return DataChunk([volt*deflsensitivity for volt in lsbvolt])
255 def deflection(self):
257 Get the actual force curve deflection.
259 deflchunk= self.LSB_to_deflection(2)
260 return deflchunk.ext(),deflchunk.ret()
262 def LSB_to_force(self,chunknum=2,Kspring=None,voltrange=20):
264 Converts the LSB data (of deflection) in force (newtons).
267 item.LSB_to_force([chunknum], [spring constant], [voltrange])
269 chunknum is the chunk you want to parse (0,1,2). The chunk used is by default 2.
270 The spring constant by default is the one parsed from the file.
271 The voltrange is by default set to 20 V.
274 Kspring=self.get_spring_constant()
276 lsbdefl=self.LSB_to_deflection(chunknum)
277 return DataChunk([(meter*Kspring) for meter in lsbdefl])
279 def get_Zscan_V_start(self):
280 return self._get_Zscan_info(1)
282 def get_Zscan_V_size(self):
283 return self._get_Zscan_info(2)
285 def get_Z_scan_sensitivity(self):
289 self.textfile.seek(0)
291 z_sensitivity_expr=re.compile(".*@Sens. Zsens")
293 for line in self.textfile.readlines():
294 if z_sensitivity_expr.match(line):
295 z_sensitivity=float(line.split()[3])
296 #return it in SI units (that is: m/V, not nm/V)
297 return z_sensitivity*(10**(-9))
299 def get_Z_magnify(self,whichchunk):
301 Gets the Z magnify factor. Normally it is 1, unknown exact use as of 2006-01-13
303 return self._get_Z_magnify_scale(whichchunk)[0]
305 def get_Z_scale(self,whichchunk):
309 return self._get_Z_magnify_scale(whichchunk)[1]
311 def get_ramp_size(self,whichchunk):
313 Gets the -user defined- ramp size
315 return self._get_Z_magnify_scale(whichchunk)[2]
317 def get_ramp_offset(self,whichchunk):
321 return self._get_Z_magnify_scale(whichchunk)[3]
323 def get_Z_scale_LSB(self,whichchunk):
325 Gets the LSB-to-volt conversion factor of the Z data.
326 (so called hard-scale in the Nanoscope documentation)
329 return self._get_Z_magnify_scale(whichchunk)[4]
331 def get_deflection_sensitivity(self):
333 gets deflection sensitivity
335 self.textfile.seek(0)
337 def_sensitivity_expr=re.compile(".*@Sens. DeflSens")
339 for line in self.textfile.readlines():
340 if def_sensitivity_expr.match(line):
341 def_sensitivity=float(line.split()[3])
343 #return it in SI units (that is: m/V, not nm/V)
344 return def_sensitivity*(10**(-9))
346 def get_spring_constant(self):
348 gets spring constant.
349 We actually find *three* spring constant values, one for each data chunk (F/t, Z/t, F/z).
350 They are normally all equal, but we retain all three for future...
352 self.textfile.seek(0)
354 springconstant_expr=re.compile(".*Spring Constant")
358 for line in self.textfile.readlines():
359 if springconstant_expr.match(line):
360 constants.append(float(line.split()[2]))
364 def get_Zsensorsens(self):
366 gets Zsensorsens for Z data.
368 This is the sensitivity needed to convert the LSB data in nanometers for the Z-vs-T data chunk.
370 self.textfile.seek(0)
372 zsensorsens_expr=re.compile(".*Sens. ZSensorSens")
374 for line in self.textfile.readlines():
375 if zsensorsens_expr.match(line):
376 zsensorsens_raw_expression=line.split()
377 #we must take only first occurrence, so we exit from the cycle immediately
380 return (float(zsensorsens_raw_expression[3]))*(10**(-9))
384 returns converted ext and ret Z curves.
385 They're on the second chunk (Z vs t).
387 #Zmagnify_zt=self.get_Z_magnify(2)
388 #Zscale_zt=self.get_Z_scale(2)
389 Zlsb_zt=self.get_Z_scale_LSB(2)
390 #rampsize_zt=self.get_ramp_size(2)
391 #rampoffset_zt=self.get_ramp_offset(2)
392 zsensorsens=self.get_Zsensorsens()
395 The magic formula that converts the Z data is:
397 meters = LSB * V_lsb_conversion_factor * ZSensorSens
400 #z_curves=[item*Zlsb_zt*zsensorsens for item in self.data_chunks[1].pair['ext']],[item*Zlsb_zt*zsensorsens for item in self.data_chunks[1].pair['ret']]
401 z_curves=[item*Zlsb_zt*zsensorsens for item in self.data_chunks[1].ext()],[item*Zlsb_zt*zsensorsens for item in self.data_chunks[1].ret()]
402 z_curves=[DataChunk(item) for item in z_curves]
405 def Z_extremes(self):
407 returns the extremes of the Z values
409 zcurves=self.Z_data()
411 z_extremes['ext']=zcurves[0][0],zcurves[0][-1]
412 z_extremes['ret']=zcurves[1][0],zcurves[1][-1]
418 returns the calculated step between the Z values
423 z_extremes=self.Z_extremes()
425 zrange['ext']=abs(z_extremes['ext'][0]-z_extremes['ext'][1])
426 zrange['ret']=abs(z_extremes['ret'][0]-z_extremes['ret'][1])
428 #We must take 1 from the calculated zpoints, or when I use the arange function gives me a point more
429 #with the step. That is, if I have 1000 points, and I use arange(start,stop,step), I have 1001 points...
430 #For cleanness, solution should really be when using arange, but oh well...
431 zpoints['ext']=len(self.Z_data()[0])-1
432 zpoints['ret']=len(self.Z_data()[1])-1
433 #this syntax must become coherent!!
434 return (zrange['ext']/zpoints['ext']),(zrange['ret']/zpoints['ret'])
438 returns the Z domains on which to plot the force data.
440 The Z domains are returned as a single long DataChunk() extended list. The extension and retraction part
441 can be extracted using ext() and ret() methods.
443 x1step=self.Z_step()[0]
444 x2step=self.Z_step()[1]
447 xext=arange(self.Z_extremes()['ext'][0],self.Z_extremes()['ext'][1],-x1step)
448 xret=arange(self.Z_extremes()['ret'][0],self.Z_extremes()['ret'][1],-x2step)
452 print 'picoforce.py: Warning. xext, xret domains cannot be extracted.'
454 if not (len(xext)==len(xret)):
457 print "picoforce.py: Warning. Extension and retraction domains have different sizes."
458 print "length extension: ", len(xext)
459 print "length retraction: ", len(xret)
460 print "You cannot trust the resulting curve."
461 print "Until a solution is found, I substitute the ext domain with the ret domain. Sorry."
464 return DataChunk(xext.tolist()+xret.tolist())
466 def Z_scan_size(self):
467 return self.get_Zscan_V_size()*self.get_Z_scan_sensitivity()
470 return self.get_Zscan_V_start()*self.get_Z_scan_sensitivity()
472 def ramp_size(self,whichchunk):
474 to be implemented if needed
476 raise "Not implemented yet."
479 def ramp_offset(self,whichchunk):
481 to be implemented if needed
483 raise "Not implemented yet."
485 def detriggerize(self, forcext):
487 Cuts away the trigger-induced s**t on the extension curve.
490 startvalue=forcext[0]
492 for index in range(len(forcext)-1,2,-2):
493 if forcext[index]>startvalue:
504 self-identification of file type magic
506 curve_file=file(self.filepath)
507 header=curve_file.read(30)
510 if header[2:17] == 'Force file list': #header of a picoforce file
511 self.data_chunks=[self._get_data_chunk(num) for num in [0,1,2]]
518 Explicitly closes all files
520 self.textfile.close()
523 def default_plots(self):
525 creates the default PlotObject
529 force=self.LSB_to_force()
530 zdomain=self.Z_domains()
532 samples=self._get_samples_line()
534 #cutindex=self.detriggerize(force.ext())
536 main_plot=lhc.PlotObject()
538 main_plot.vectors=[[zdomain.ext()[0:samples], force.ext()[0:samples]],[zdomain.ret()[0:samples], force.ret()[0:samples]]]
539 main_plot.normalize_vectors()
540 main_plot.units=['meters','newton']
541 main_plot.destination=0
542 main_plot.title=self.filepath