4 Library for interpreting Picoforce force spectroscopy files.
6 Copyright (C) 2006 Massimo Sandal (University of Bologna, Italy).
8 This program is released under the GNU General Public License version 2.
13 from scipy import arange
15 #from .. import libhooke as lh
16 from .. import curve as lhc
19 __version__='0.0.0.20090923'
22 class DataChunk(list):
23 #Dummy class to provide ext and ret methods to the data list.
27 return self[0:halflen]
33 class picoforceDriver(lhc.Driver):
35 #Construction and other special methods
37 def __init__(self,filename):
41 filename = lh.get_file_path(filename)
42 self.textfile=file(filename)
43 self.binfile=file(filename,'rb')
45 #The 0,1,2 data chunks are:
51 self.filepath=filename
54 self.filetype='picoforce'
55 self.experiment='smfs'
58 #Hidden methods. These are meant to be used only by API functions. If needed, however,
59 #they can be called just like API methods.
61 def _get_samples_line(self):
63 Gets the samples per line parameters in the file, to understand trigger behaviour.
67 samps_expr=re.compile(".*Samps")
70 for line in self.textfile.readlines():
71 if samps_expr.match(line):
73 samps=int(line.split()[2]) #the third word splitted is the offset (in bytes)
74 samps_values.append(samps)
78 #We raise a flag for the fact we meet an offset, otherwise we would take spurious data length arguments.
80 return int(samps_values[0])
82 def _get_chunk_coordinates(self):
84 This method gets the coordinates (offset and length) of a data chunk in our
87 It returns a list containing two tuples:
88 the first element of each tuple is the data_offset, the second is the corresponding
91 In near future probably each chunk will get its own data structure, with
92 offset, size, type, etc.
96 offset_expr=re.compile(".*Data offset")
97 length_expr=re.compile(".*Data length")
103 for line in self.textfile.readlines():
105 if offset_expr.match(line):
106 offset=int(line.split()[2]) #the third word splitted is the offset (in bytes)
107 data_offsets.append(offset)
108 #We raise a flag for the fact we meet an offset, otherwise we would take spurious data length arguments.
111 #same for the data length
112 if length_expr.match(line) and flag_offset:
113 size=int(line.split()[2])
114 data_sizes.append(size)
115 #Put down the offset flag until the next offset is met.
118 return zip(data_offsets,data_sizes)
120 def _get_data_chunk(self,whichchunk):
122 reads a data chunk and converts it in 16bit signed int.
124 offset,size=self._get_chunk_coordinates()[whichchunk]
127 self.binfile.seek(offset)
128 raw_chunk=self.binfile.read(size)
131 for data_position in range(0,len(raw_chunk),2):
132 data_unit_bytes=raw_chunk[data_position:data_position+2]
133 #The unpack function converts 2-bytes in a signed int ('h').
134 #we use output[0] because unpack returns a 1-value tuple, and we want the number only
135 data_unit=struct.unpack('h',data_unit_bytes)[0]
136 my_chunk.append(data_unit)
138 return DataChunk(my_chunk)
140 def _get_Zscan_info(self,index):
142 gets the Z scan informations needed to interpret the data chunk.
143 These info come from the general section, BEFORE individual chunk headers.
145 By itself, the function will parse for three parameters.
146 (index) that tells the function what to return when called by
148 index=0 : returns Zscan_V_LSB
149 index=1 : returns Zscan_V_start
150 index=2 : returns Zscan_V_size
152 self.textfile.seek(0)
154 ciaoforcelist_expr=re.compile(".*Ciao force")
155 zscanstart_expr=re.compile(".*@Z scan start")
156 zscansize_expr=re.compile(".*@Z scan size")
160 for line in self.textfile.readlines():
161 if ciaoforcelist_expr.match(line):
162 ciaoforce_flag=1 #raise a flag: zscanstart and zscansize params to read are later
164 if ciaoforce_flag and zscanstart_expr.match(line):
165 raw_Zscanstart_line=line.split()
167 if ciaoforce_flag and zscansize_expr.match(line):
168 raw_Zscansize_line=line.split()
172 for itemscanstart,itemscansize in zip(raw_Zscanstart_line,raw_Zscansize_line):
173 Zscanstart_line.append(itemscanstart.strip('[]()'))
174 Zscansize_line.append(itemscansize.strip('[]()'))
176 Zscan_V_LSB=float(Zscanstart_line[6])
177 Zscan_V_start=float(Zscanstart_line[8])
178 Zscan_V_size=float(Zscansize_line[8])
180 return (Zscan_V_LSB,Zscan_V_start,Zscan_V_size)[index]
182 def _get_Z_magnify_scale(self,whichchunk):
184 gets Z scale and Z magnify
185 Here we get Z scale/magnify from the 'whichchunk' only.
187 TODO: make it coherent with data_chunks syntaxis (0,1,2)
189 In future, should we divide the *file* itself into chunk descriptions and gain
190 true chunk data structures?
192 self.textfile.seek(0)
194 z_scale_expr=re.compile(".*@4:Z scale")
195 z_magnify_expr=re.compile(".*@Z magnify")
197 ramp_size_expr=re.compile(".*@4:Ramp size")
198 ramp_offset_expr=re.compile(".*@4:Ramp offset")
204 for line in self.textfile.readlines():
205 if z_magnify_expr.match(line):
207 if occurrences==whichchunk:
209 raw_z_magnify_expression=line.split()
213 if found_right and z_scale_expr.match(line):
214 raw_z_scale_expression=line.split()
215 if found_right and ramp_size_expr.match(line):
216 raw_ramp_size_expression=line.split()
217 if found_right and ramp_offset_expr.match(line):
218 raw_ramp_offset_expression=line.split()
220 return float(raw_z_magnify_expression[5]),float(raw_z_scale_expression[7]), float(raw_ramp_size_expression[7]), float(raw_ramp_offset_expression[7]), float(raw_z_scale_expression[5][1:])
224 #These are the methods that are meant to be called from external apps.
226 def LSB_to_volt(self,chunknum,voltrange=20):
228 Converts the LSB data of a given chunk (chunknum=0,1,2) in volts.
229 First step to get the deflection and the force.
232 item.LSB_to_volt(chunknum, [voltrange])
234 The voltrange is by default set to 20 V.
236 return DataChunk([((float(lsb)/65535)*voltrange) for lsb in self.data_chunks[chunknum]])
238 def LSB_to_deflection(self,chunknum,deflsensitivity=None,voltrange=20):
240 Converts the LSB data in deflection (meters).
243 item.LSB_to_deflection(chunknum, [deflection sensitivity], [voltrange])
245 chunknum is the chunk you want to parse (0,1,2)
247 The deflection sensitivity by default is the one parsed from the file.
248 The voltrange is by default set to 20 V.
250 if deflsensitivity is None:
251 deflsensitivity=self.get_deflection_sensitivity()
253 lsbvolt=self.LSB_to_volt(chunknum)
254 return DataChunk([volt*deflsensitivity for volt in lsbvolt])
256 def deflection(self):
258 Get the actual force curve deflection.
260 deflchunk= self.LSB_to_deflection(2)
261 return deflchunk.ext(),deflchunk.ret()
263 def LSB_to_force(self,chunknum=2,Kspring=None,voltrange=20):
265 Converts the LSB data (of deflection) in force (newtons).
268 item.LSB_to_force([chunknum], [spring constant], [voltrange])
270 chunknum is the chunk you want to parse (0,1,2). The chunk used is by default 2.
271 The spring constant by default is the one parsed from the file.
272 The voltrange is by default set to 20 V.
275 Kspring=self.get_spring_constant()
277 lsbdefl=self.LSB_to_deflection(chunknum)
278 return DataChunk([(meter*Kspring) for meter in lsbdefl])
280 def get_Zscan_V_start(self):
281 return self._get_Zscan_info(1)
283 def get_Zscan_V_size(self):
284 return self._get_Zscan_info(2)
286 def get_Z_scan_sensitivity(self):
290 self.textfile.seek(0)
292 z_sensitivity_expr=re.compile(".*@Sens. Zsens")
294 for line in self.textfile.readlines():
295 if z_sensitivity_expr.match(line):
296 z_sensitivity=float(line.split()[3])
297 #return it in SI units (that is: m/V, not nm/V)
298 return z_sensitivity*(10**(-9))
300 def get_Z_magnify(self,whichchunk):
302 Gets the Z magnify factor. Normally it is 1, unknown exact use as of 2006-01-13
304 return self._get_Z_magnify_scale(whichchunk)[0]
306 def get_Z_scale(self,whichchunk):
310 return self._get_Z_magnify_scale(whichchunk)[1]
312 def get_ramp_size(self,whichchunk):
314 Gets the -user defined- ramp size
316 return self._get_Z_magnify_scale(whichchunk)[2]
318 def get_ramp_offset(self,whichchunk):
322 return self._get_Z_magnify_scale(whichchunk)[3]
324 def get_Z_scale_LSB(self,whichchunk):
326 Gets the LSB-to-volt conversion factor of the Z data.
327 (so called hard-scale in the Nanoscope documentation)
330 return self._get_Z_magnify_scale(whichchunk)[4]
332 def get_deflection_sensitivity(self):
334 gets deflection sensitivity
336 self.textfile.seek(0)
338 def_sensitivity_expr=re.compile(".*@Sens. DeflSens")
340 for line in self.textfile.readlines():
341 if def_sensitivity_expr.match(line):
342 def_sensitivity=float(line.split()[3])
344 #return it in SI units (that is: m/V, not nm/V)
345 return def_sensitivity*(10**(-9))
347 def get_spring_constant(self):
349 gets spring constant.
350 We actually find *three* spring constant values, one for each data chunk (F/t, Z/t, F/z).
351 They are normally all equal, but we retain all three for future...
353 self.textfile.seek(0)
355 springconstant_expr=re.compile(".*Spring Constant")
359 for line in self.textfile.readlines():
360 if springconstant_expr.match(line):
361 constants.append(float(line.split()[2]))
365 def get_Zsensorsens(self):
367 gets Zsensorsens for Z data.
369 This is the sensitivity needed to convert the LSB data in nanometers for the Z-vs-T data chunk.
371 self.textfile.seek(0)
373 zsensorsens_expr=re.compile(".*Sens. ZSensorSens")
375 for line in self.textfile.readlines():
376 if zsensorsens_expr.match(line):
377 zsensorsens_raw_expression=line.split()
378 #we must take only first occurrence, so we exit from the cycle immediately
381 return (float(zsensorsens_raw_expression[3]))*(10**(-9))
385 returns converted ext and ret Z curves.
386 They're on the second chunk (Z vs t).
388 #Zmagnify_zt=self.get_Z_magnify(2)
389 #Zscale_zt=self.get_Z_scale(2)
390 Zlsb_zt=self.get_Z_scale_LSB(2)
391 #rampsize_zt=self.get_ramp_size(2)
392 #rampoffset_zt=self.get_ramp_offset(2)
393 zsensorsens=self.get_Zsensorsens()
396 The magic formula that converts the Z data is:
398 meters = LSB * V_lsb_conversion_factor * ZSensorSens
401 #z_curves=[item*Zlsb_zt*zsensorsens for item in self.data_chunks[1].pair['ext']],[item*Zlsb_zt*zsensorsens for item in self.data_chunks[1].pair['ret']]
402 z_curves=[item*Zlsb_zt*zsensorsens for item in self.data_chunks[1].ext()],[item*Zlsb_zt*zsensorsens for item in self.data_chunks[1].ret()]
403 z_curves=[DataChunk(item) for item in z_curves]
406 def Z_extremes(self):
408 returns the extremes of the Z values
410 zcurves=self.Z_data()
412 z_extremes['ext']=zcurves[0][0],zcurves[0][-1]
413 z_extremes['ret']=zcurves[1][0],zcurves[1][-1]
419 returns the calculated step between the Z values
424 z_extremes=self.Z_extremes()
426 zrange['ext']=abs(z_extremes['ext'][0]-z_extremes['ext'][1])
427 zrange['ret']=abs(z_extremes['ret'][0]-z_extremes['ret'][1])
429 #We must take 1 from the calculated zpoints, or when I use the arange function gives me a point more
430 #with the step. That is, if I have 1000 points, and I use arange(start,stop,step), I have 1001 points...
431 #For cleanness, solution should really be when using arange, but oh well...
432 zpoints['ext']=len(self.Z_data()[0])-1
433 zpoints['ret']=len(self.Z_data()[1])-1
434 #this syntax must become coherent!!
435 return (zrange['ext']/zpoints['ext']),(zrange['ret']/zpoints['ret'])
439 returns the Z domains on which to plot the force data.
441 The Z domains are returned as a single long DataChunk() extended list. The extension and retraction part
442 can be extracted using ext() and ret() methods.
444 x1step=self.Z_step()[0]
445 x2step=self.Z_step()[1]
448 xext=arange(self.Z_extremes()['ext'][0],self.Z_extremes()['ext'][1],-x1step)
449 xret=arange(self.Z_extremes()['ret'][0],self.Z_extremes()['ret'][1],-x2step)
453 print 'picoforce.py: Warning. xext, xret domains cannot be extracted.'
455 if not (len(xext)==len(xret)):
458 print "picoforce.py: Warning. Extension and retraction domains have different sizes."
459 print "length extension: ", len(xext)
460 print "length retraction: ", len(xret)
461 print "You cannot trust the resulting curve."
462 print "Until a solution is found, I substitute the ext domain with the ret domain. Sorry."
465 return DataChunk(xext.tolist()+xret.tolist())
467 def Z_scan_size(self):
468 return self.get_Zscan_V_size()*self.get_Z_scan_sensitivity()
471 return self.get_Zscan_V_start()*self.get_Z_scan_sensitivity()
473 def ramp_size(self,whichchunk):
475 to be implemented if needed
477 raise "Not implemented yet."
480 def ramp_offset(self,whichchunk):
482 to be implemented if needed
484 raise "Not implemented yet."
486 def detriggerize(self, forcext):
488 Cuts away the trigger-induced s**t on the extension curve.
491 startvalue=forcext[0]
493 for index in range(len(forcext)-1,2,-2):
494 if forcext[index]>startvalue:
505 self-identification of file type magic
507 curve_file=file(self.filepath)
508 header=curve_file.read(30)
511 if header[2:17] == 'Force file list': #header of a picoforce file
512 self.data_chunks=[self._get_data_chunk(num) for num in [0,1,2]]
519 Explicitly closes all files
521 self.textfile.close()
524 def default_plots(self):
526 creates the default PlotObject
530 force=self.LSB_to_force()
531 zdomain=self.Z_domains()
533 samples=self._get_samples_line()
535 #cutindex=self.detriggerize(force.ext())
537 main_plot=lhc.PlotObject()
539 main_plot.vectors = [[zdomain.ext()[0:samples], force.ext()[0:samples]],[zdomain.ret()[0:samples], force.ret()[0:samples]]]
540 main_plot.normalize_vectors()
541 main_plot.units = ['meters','newton']
542 main_plot.destination = 0
543 main_plit.filename = self.filepath
544 main_plot.title = self.filepath
545 main_plot.colors = ['red', 'blue']
546 main_plit.styles = ['plot', 'plot']