6 Library for interpreting Picoforce force spectroscopy files.
8 Copyright 2006 by Massimo Sandal (University of Bologna, Italy)
9 with modifications by Dr. Rolf Schmidt (Concordia University, Canada)
11 This program is released under the GNU General Public License version 2.
17 from scipy import arange
19 import lib.libhooke as lh
24 __version__='0.0.0.20090923'
27 class DataChunk(list):
28 #Dummy class to provide ext and ret methods to the data list.
32 return self[0:halflen]
38 class picoforceDriver(lib.driver.Driver):
40 #Construction and other special methods
42 def __init__(self, filename):
47 filename = lh.get_file_path(filename)
48 self.filename = filename
50 #The 0,1,2 data chunks are:
55 self.retract_velocity = None
59 self.filetype = 'picoforce'
60 self.experiment = 'smfs'
62 #Hidden methods. These are meant to be used only by API functions. If needed, however,
63 #they can be called just like API methods.
65 def _get_samples_line(self):
67 Gets the samples per line parameters in the file, to understand trigger behaviour.
69 textfile = file(self.filename)
71 samps_expr=re.compile(".*Samps")
74 for line in textfile.readlines():
75 if samps_expr.match(line):
77 samps=int(line.split()[2]) #the third word splitted is the offset (in bytes)
78 samps_values.append(samps)
82 #We raise a flag for the fact we meet an offset, otherwise we would take spurious data length arguments.
86 return int(samps_values[0])
88 def _get_chunk_coordinates(self):
90 This method gets the coordinates (offset and length) of a data chunk in our
93 It returns a list containing two tuples:
94 the first element of each tuple is the data_offset, the second is the corresponding
97 In near future probably each chunk will get its own data structure, with
98 offset, size, type, etc.
100 textfile = file(self.filename)
102 offset_expr=re.compile(".*Data offset")
103 length_expr=re.compile(".*Data length")
109 for line in textfile.readlines():
111 if offset_expr.match(line):
112 offset=int(line.split()[2]) #the third word splitted is the offset (in bytes)
113 data_offsets.append(offset)
114 #We raise a flag for the fact we meet an offset, otherwise we would take spurious data length arguments.
117 #same for the data length
118 if length_expr.match(line) and flag_offset:
119 size=int(line.split()[2])
120 data_sizes.append(size)
121 #Put down the offset flag until the next offset is met.
126 return zip(data_offsets,data_sizes)
128 def _get_data_chunk(self, whichchunk):
130 reads a data chunk and converts it in 16bit signed int.
132 binfile = file(self.filename,'rb')
134 offset,size=self._get_chunk_coordinates()[whichchunk]
137 raw_chunk = binfile.read(size)
140 for data_position in range(0,len(raw_chunk),2):
141 data_unit_bytes=raw_chunk[data_position:data_position+2]
142 #The unpack function converts 2-bytes in a signed int ('h').
143 #we use output[0] because unpack returns a 1-value tuple, and we want the number only
144 data_unit=struct.unpack('h',data_unit_bytes)[0]
145 my_chunk.append(data_unit)
149 return DataChunk(my_chunk)
151 def _get_Zscan_info(self,index):
153 gets the Z scan informations needed to interpret the data chunk.
154 These info come from the general section, BEFORE individual chunk headers.
156 By itself, the function will parse for three parameters.
157 (index) that tells the function what to return when called by
159 index=0 : returns Zscan_V_LSB
160 index=1 : returns Zscan_V_start
161 index=2 : returns Zscan_V_size
163 textfile = file(self.filename)
165 ciaoforcelist_expr=re.compile(".*Ciao force")
166 zscanstart_expr=re.compile(".*@Z scan start")
167 zscansize_expr=re.compile(".*@Z scan size")
171 for line in textfile.readlines():
172 if ciaoforcelist_expr.match(line):
173 ciaoforce_flag=1 #raise a flag: zscanstart and zscansize params to read are later
175 if ciaoforce_flag and zscanstart_expr.match(line):
176 raw_Zscanstart_line=line.split()
178 if ciaoforce_flag and zscansize_expr.match(line):
179 raw_Zscansize_line=line.split()
183 for itemscanstart,itemscansize in zip(raw_Zscanstart_line,raw_Zscansize_line):
184 Zscanstart_line.append(itemscanstart.strip('[]()'))
185 Zscansize_line.append(itemscansize.strip('[]()'))
187 Zscan_V_LSB=float(Zscanstart_line[6])
188 Zscan_V_start=float(Zscanstart_line[8])
189 Zscan_V_size=float(Zscansize_line[8])
193 return (Zscan_V_LSB,Zscan_V_start,Zscan_V_size)[index]
195 def _get_Z_magnify_scale(self,whichchunk):
197 gets Z scale and Z magnify
198 Here we get Z scale/magnify from the 'whichchunk' only.
200 TODO: make it coherent with data_chunks syntax (0,1,2)
202 In future, should we divide the *file* itself into chunk descriptions and gain
203 true chunk data structures?
205 textfile = file(self.filename)
207 z_scale_expr=re.compile(".*@4:Z scale")
208 z_magnify_expr=re.compile(".*@Z magnify")
210 ramp_size_expr=re.compile(".*@4:Ramp size")
211 ramp_offset_expr=re.compile(".*@4:Ramp offset")
216 for line in textfile.readlines():
217 if z_magnify_expr.match(line):
219 if occurrences==whichchunk:
221 raw_z_magnify_expression=line.split()
225 if found_right and z_scale_expr.match(line):
226 raw_z_scale_expression=line.split()
227 if found_right and ramp_size_expr.match(line):
228 raw_ramp_size_expression=line.split()
229 if found_right and ramp_offset_expr.match(line):
230 raw_ramp_offset_expression=line.split()
234 return float(raw_z_magnify_expression[5]),float(raw_z_scale_expression[7]), float(raw_ramp_size_expression[7]), float(raw_ramp_offset_expression[7]), float(raw_z_scale_expression[5][1:])
238 #These are the methods that are meant to be called from external apps.
240 def LSB_to_volt(self,chunknum,voltrange=20):
242 Converts the LSB data of a given chunk (chunknum=0,1,2) in volts.
243 First step to get the deflection and the force.
246 item.LSB_to_volt(chunknum, [voltrange])
248 The voltrange is by default set to 20 V.
250 return DataChunk([((float(lsb)/65535)*voltrange) for lsb in self.data_chunks[chunknum]])
252 def LSB_to_deflection(self,chunknum,deflsensitivity=None,voltrange=20):
254 Converts the LSB data in deflection (meters).
257 item.LSB_to_deflection(chunknum, [deflection sensitivity], [voltrange])
259 chunknum is the chunk you want to parse (0,1,2)
261 The deflection sensitivity by default is the one parsed from the file.
262 The voltrange is by default set to 20 V.
264 if deflsensitivity is None:
265 deflsensitivity=self.get_deflection_sensitivity()
267 lsbvolt=self.LSB_to_volt(chunknum)
268 return DataChunk([volt*deflsensitivity for volt in lsbvolt])
270 def deflection(self):
272 Get the actual force curve deflection.
274 deflchunk= self.LSB_to_deflection(2)
275 return deflchunk.ext(),deflchunk.ret()
277 def LSB_to_force(self,chunknum=2,Kspring=None,voltrange=20):
279 Converts the LSB data (of deflection) in force (newtons).
282 item.LSB_to_force([chunknum], [spring constant], [voltrange])
284 chunknum is the chunk you want to parse (0,1,2). The chunk used is by default 2.
285 The spring constant by default is the one parsed from the file.
286 The voltrange is by default set to 20 V.
289 Kspring=self.get_spring_constant()
291 lsbdefl=self.LSB_to_deflection(chunknum)
292 return DataChunk([(meter*Kspring) for meter in lsbdefl])
294 def get_Zscan_V_start(self):
295 return self._get_Zscan_info(1)
297 def get_Zscan_V_size(self):
298 return self._get_Zscan_info(2)
300 def get_Z_scan_sensitivity(self):
304 textfile = file(self.filename)
306 z_sensitivity_expr=re.compile(".*@Sens. Zsens")
308 for line in textfile.readlines():
309 if z_sensitivity_expr.match(line):
310 z_sensitivity=float(line.split()[3])
314 #return it in SI units (that is: m/V, not nm/V)
315 return z_sensitivity*(10**(-9))
317 def get_Z_magnify(self,whichchunk):
319 Gets the Z magnify factor. Normally it is 1, unknown exact use as of 2006-01-13
321 return self._get_Z_magnify_scale(whichchunk)[0]
323 def get_Z_scale(self,whichchunk):
327 return self._get_Z_magnify_scale(whichchunk)[1]
329 def get_ramp_size(self,whichchunk):
331 Gets the -user defined- ramp size
333 return self._get_Z_magnify_scale(whichchunk)[2]
335 def get_ramp_offset(self,whichchunk):
339 return self._get_Z_magnify_scale(whichchunk)[3]
341 def get_Z_scale_LSB(self,whichchunk):
343 Gets the LSB-to-volt conversion factor of the Z data.
344 (so called hard-scale in the Nanoscope documentation)
347 return self._get_Z_magnify_scale(whichchunk)[4]
349 def get_deflection_sensitivity(self):
351 gets deflection sensitivity
353 textfile = file(self.filename)
355 def_sensitivity_expr=re.compile(".*@Sens. DeflSens")
357 for line in textfile.readlines():
358 if def_sensitivity_expr.match(line):
359 def_sensitivity=float(line.split()[3])
364 #return it in SI units (that is: m/V, not nm/V)
365 return def_sensitivity*(10**(-9))
367 def get_spring_constant(self):
369 gets spring constant.
370 We actually find *three* spring constant values, one for each data chunk (F/t, Z/t, F/z).
371 They are normally all equal, but we retain all three for future...
373 textfile = file(self.filename)
375 springconstant_expr=re.compile(".*Spring Constant")
379 for line in textfile.readlines():
380 if springconstant_expr.match(line):
381 constants.append(float(line.split()[2]))
387 def get_Zsensorsens(self):
389 gets Zsensorsens for Z data.
391 This is the sensitivity needed to convert the LSB data in nanometers for the Z-vs-T data chunk.
393 textfile = file(self.filename)
395 zsensorsens_expr=re.compile(".*Sens. ZSensorSens")
397 for line in textfile.readlines():
398 if zsensorsens_expr.match(line):
399 zsensorsens_raw_expression=line.split()
400 #we must take only first occurrence, so we exit from the cycle immediately
405 return (float(zsensorsens_raw_expression[3]))*(10**(-9))
409 returns converted ext and ret Z curves.
410 They're on the second chunk (Z vs t).
412 #Zmagnify_zt=self.get_Z_magnify(2)
413 #Zscale_zt=self.get_Z_scale(2)
414 Zlsb_zt=self.get_Z_scale_LSB(2)
415 #rampsize_zt=self.get_ramp_size(2)
416 #rampoffset_zt=self.get_ramp_offset(2)
417 zsensorsens=self.get_Zsensorsens()
420 The magic formula that converts the Z data is:
422 meters = LSB * V_lsb_conversion_factor * ZSensorSens
425 #z_curves=[item*Zlsb_zt*zsensorsens for item in self.data_chunks[1].pair['ext']],[item*Zlsb_zt*zsensorsens for item in self.data_chunks[1].pair['ret']]
426 z_curves=[item*Zlsb_zt*zsensorsens for item in self.data_chunks[1].ext()],[item*Zlsb_zt*zsensorsens for item in self.data_chunks[1].ret()]
427 z_curves=[DataChunk(item) for item in z_curves]
430 def Z_extremes(self):
432 returns the extremes of the Z values
434 zcurves=self.Z_data()
436 z_extremes['ext']=zcurves[0][0],zcurves[0][-1]
437 z_extremes['ret']=zcurves[1][0],zcurves[1][-1]
443 returns the calculated step between the Z values
448 z_extremes=self.Z_extremes()
450 zrange['ext']=abs(z_extremes['ext'][0]-z_extremes['ext'][1])
451 zrange['ret']=abs(z_extremes['ret'][0]-z_extremes['ret'][1])
453 #We must take 1 from the calculated zpoints, or when I use the arange function gives me a point more
454 #with the step. That is, if I have 1000 points, and I use arange(start,stop,step), I have 1001 points...
455 #For cleanness, solution should really be when using arange, but oh well...
456 zpoints['ext']=len(self.Z_data()[0])-1
457 zpoints['ret']=len(self.Z_data()[1])-1
458 #this syntax must become coherent!!
459 return (zrange['ext']/zpoints['ext']),(zrange['ret']/zpoints['ret'])
463 returns the Z domains on which to plot the force data.
465 The Z domains are returned as a single long DataChunk() extended list. The extension and retraction part
466 can be extracted using ext() and ret() methods.
468 x1step=self.Z_step()[0]
469 x2step=self.Z_step()[1]
472 xext=arange(self.Z_extremes()['ext'][0],self.Z_extremes()['ext'][1],-x1step)
473 xret=arange(self.Z_extremes()['ret'][0],self.Z_extremes()['ret'][1],-x2step)
477 print 'picoforce.py: Warning. xext, xret domains cannot be extracted.'
479 if not (len(xext)==len(xret)):
482 print "picoforce.py: Warning. Extension and retraction domains have different sizes."
483 print "length extension: ", len(xext)
484 print "length retraction: ", len(xret)
485 print "You cannot trust the resulting curve."
486 print "Until a solution is found, I substitute the ext domain with the ret domain. Sorry."
489 return DataChunk(xext.tolist()+xret.tolist())
491 def Z_scan_size(self):
492 return self.get_Zscan_V_size()*self.get_Z_scan_sensitivity()
495 return self.get_Zscan_V_start()*self.get_Z_scan_sensitivity()
497 def ramp_size(self,whichchunk):
499 to be implemented if needed
501 raise "Not implemented yet."
504 def ramp_offset(self,whichchunk):
506 to be implemented if needed
508 raise "Not implemented yet."
510 def detriggerize(self, forcext):
512 Cuts away the trigger-induced s**t on the extension curve.
515 startvalue=forcext[0]
517 for index in range(len(forcext)-1,2,-2):
518 if forcext[index]>startvalue:
529 self-identification of file type magic
531 curve_file=file(self.filename)
532 header=curve_file.read(30)
535 if header[2:17] == 'Force file list': #header of a picoforce file
536 self.data_chunks=[self._get_data_chunk(num) for num in [0,1,2]]
543 Explicitly closes all files
547 def default_plots(self):
551 force = self.LSB_to_force()
552 zdomain = self.Z_domains()
554 samples = self._get_samples_line()
556 #cutindex=self.detriggerize(force.ext())
557 extension = lib.curve.Curve()
558 retraction = lib.curve.Curve()
560 extension.color = 'red'
561 extension.label = 'extension'
562 extension.style = 'plot'
563 extension.title = 'Force curve'
564 extension.units.x = 'm'
565 extension.units.y = 'N'
566 extension.x = zdomain.ext()[0:samples]
567 extension.y = force.ext()[0:samples]
568 retraction.color = 'blue'
569 retraction.label = 'retraction'
570 retraction.style = 'plot'
571 retraction.title = 'Force curve'
572 retraction.units.x = 'm'
573 retraction.units.y = 'N'
574 retraction.x = zdomain.ret()[0:samples]
575 retraction.y = force.ret()[0:samples]
577 plot = lib.plot.Plot()
578 plot.title = os.path.basename(self.filename)
579 plot.curves.append(extension)
580 plot.curves.append(retraction)