4 Library for interpreting Picoforce force spectroscopy files.
6 Copyright (C) 2006 Massimo Sandal (University of Bologna, Italy).
8 This program is released under the GNU General Public License version 2.
12 from scipy import arange
14 import libhookecurve as lhc
16 __version__='0.0.0.20080404'
19 class DataChunk(list):
20 #Dummy class to provide ext and ret methods to the data list.
24 return self[0:halflen]
30 class picoforceDriver(lhc.Driver):
32 #Construction and other special methods
34 def __init__(self,filename):
39 self.textfile=file(filename)
40 self.binfile=file(filename,'rb')
42 #The 0,1,2 data chunks are:
48 self.filepath=filename
51 self.filetype='picoforce'
52 self.experiment='smfs'
55 #Hidden methods. These are meant to be used only by API functions. If needed, however,
56 #they can be called just like API methods.
58 def _get_samples_line(self):
60 Gets the samples per line parameters in the file, to understand trigger behaviour.
64 samps_expr=re.compile(".*Samps")
67 for line in self.textfile.readlines():
68 if samps_expr.match(line):
70 samps=int(line.split()[2]) #the third word splitted is the offset (in bytes)
71 samps_values.append(samps)
75 #We raise a flag for the fact we meet an offset, otherwise we would take spurious data length arguments.
77 return int(samps_values[0])
79 def _get_chunk_coordinates(self):
81 This method gets the coordinates (offset and length) of a data chunk in our
84 It returns a list containing two tuples:
85 the first element of each tuple is the data_offset, the second is the corresponding
88 In near future probably each chunk will get its own data structure, with
89 offset, size, type, etc.
93 offset_expr=re.compile(".*Data offset")
94 length_expr=re.compile(".*Data length")
100 for line in self.textfile.readlines():
102 if offset_expr.match(line):
103 offset=int(line.split()[2]) #the third word splitted is the offset (in bytes)
104 data_offsets.append(offset)
105 #We raise a flag for the fact we meet an offset, otherwise we would take spurious data length arguments.
108 #same for the data length
109 if length_expr.match(line) and flag_offset:
110 size=int(line.split()[2])
111 data_sizes.append(size)
112 #Put down the offset flag until the next offset is met.
115 return zip(data_offsets,data_sizes)
117 def _get_data_chunk(self,whichchunk):
119 reads a data chunk and converts it in 16bit signed int.
121 offset,size=self._get_chunk_coordinates()[whichchunk]
124 self.binfile.seek(offset)
125 raw_chunk=self.binfile.read(size)
128 for data_position in range(0,len(raw_chunk),2):
129 data_unit_bytes=raw_chunk[data_position:data_position+2]
130 #The unpack function converts 2-bytes in a signed int ('h').
131 #we use output[0] because unpack returns a 1-value tuple, and we want the number only
132 data_unit=struct.unpack('h',data_unit_bytes)[0]
133 my_chunk.append(data_unit)
135 return DataChunk(my_chunk)
137 def _get_Zscan_info(self,index):
139 gets the Z scan informations needed to interpret the data chunk.
140 These info come from the general section, BEFORE individual chunk headers.
142 By itself, the function will parse for three parameters.
143 (index) that tells the function what to return when called by
145 index=0 : returns Zscan_V_LSB
146 index=1 : returns Zscan_V_start
147 index=2 : returns Zscan_V_size
149 self.textfile.seek(0)
151 ciaoforcelist_expr=re.compile(".*Ciao force")
152 zscanstart_expr=re.compile(".*@Z scan start")
153 zscansize_expr=re.compile(".*@Z scan size")
157 for line in self.textfile.readlines():
158 if ciaoforcelist_expr.match(line):
159 ciaoforce_flag=1 #raise a flag: zscanstart and zscansize params to read are later
161 if ciaoforce_flag and zscanstart_expr.match(line):
162 raw_Zscanstart_line=line.split()
164 if ciaoforce_flag and zscansize_expr.match(line):
165 raw_Zscansize_line=line.split()
169 for itemscanstart,itemscansize in zip(raw_Zscanstart_line,raw_Zscansize_line):
170 Zscanstart_line.append(itemscanstart.strip('[]()'))
171 Zscansize_line.append(itemscansize.strip('[]()'))
173 Zscan_V_LSB=float(Zscanstart_line[6])
174 Zscan_V_start=float(Zscanstart_line[8])
175 Zscan_V_size=float(Zscansize_line[8])
177 return (Zscan_V_LSB,Zscan_V_start,Zscan_V_size)[index]
179 def _get_Z_magnify_scale(self,whichchunk):
181 gets Z scale and Z magnify
182 Here we get Z scale/magnify from the 'whichchunk' only.
184 TODO: make it coherent with data_chunks syntaxis (0,1,2)
186 In future, should we divide the *file* itself into chunk descriptions and gain
187 true chunk data structures?
189 self.textfile.seek(0)
191 z_scale_expr=re.compile(".*@4:Z scale")
192 z_magnify_expr=re.compile(".*@Z magnify")
194 ramp_size_expr=re.compile(".*@4:Ramp size")
195 ramp_offset_expr=re.compile(".*@4:Ramp offset")
201 for line in self.textfile.readlines():
202 if z_magnify_expr.match(line):
204 if occurrences==whichchunk:
206 raw_z_magnify_expression=line.split()
210 if found_right and z_scale_expr.match(line):
211 raw_z_scale_expression=line.split()
212 if found_right and ramp_size_expr.match(line):
213 raw_ramp_size_expression=line.split()
214 if found_right and ramp_offset_expr.match(line):
215 raw_ramp_offset_expression=line.split()
217 return float(raw_z_magnify_expression[5]),float(raw_z_scale_expression[7]), float(raw_ramp_size_expression[7]), float(raw_ramp_offset_expression[7]), float(raw_z_scale_expression[5][1:])
221 #These are the methods that are meant to be called from external apps.
223 def LSB_to_volt(self,chunknum,voltrange=20):
225 Converts the LSB data of a given chunk (chunknum=0,1,2) in volts.
226 First step to get the deflection and the force.
229 item.LSB_to_volt(chunknum, [voltrange])
231 The voltrange is by default set to 20 V.
233 return DataChunk([((float(lsb)/65535)*voltrange) for lsb in self.data_chunks[chunknum]])
235 def LSB_to_deflection(self,chunknum,deflsensitivity=None,voltrange=20):
237 Converts the LSB data in deflection (meters).
240 item.LSB_to_deflection(chunknum, [deflection sensitivity], [voltrange])
242 chunknum is the chunk you want to parse (0,1,2)
244 The deflection sensitivity by default is the one parsed from the file.
245 The voltrange is by default set to 20 V.
247 if deflsensitivity is None:
248 deflsensitivity=self.get_deflection_sensitivity()
250 lsbvolt=self.LSB_to_volt(chunknum)
251 return DataChunk([volt*deflsensitivity for volt in lsbvolt])
253 def deflection(self):
255 Get the actual force curve deflection.
257 deflchunk= self.LSB_to_deflection(2)
258 return deflchunk.ext(),deflchunk.ret()
260 def LSB_to_force(self,chunknum=2,Kspring=None,voltrange=20):
262 Converts the LSB data (of deflection) in force (newtons).
265 item.LSB_to_force([chunknum], [spring constant], [voltrange])
267 chunknum is the chunk you want to parse (0,1,2). The chunk used is by default 2.
268 The spring constant by default is the one parsed from the file.
269 The voltrange is by default set to 20 V.
272 Kspring=self.get_spring_constant()
274 lsbdefl=self.LSB_to_deflection(chunknum)
275 return DataChunk([(meter*Kspring) for meter in lsbdefl])
277 def get_Zscan_V_start(self):
278 return self._get_Zscan_info(1)
280 def get_Zscan_V_size(self):
281 return self._get_Zscan_info(2)
283 def get_Z_scan_sensitivity(self):
287 self.textfile.seek(0)
289 z_sensitivity_expr=re.compile(".*@Sens. Zsens")
291 for line in self.textfile.readlines():
292 if z_sensitivity_expr.match(line):
293 z_sensitivity=float(line.split()[3])
294 #return it in SI units (that is: m/V, not nm/V)
295 return z_sensitivity*(10**(-9))
297 def get_Z_magnify(self,whichchunk):
299 Gets the Z magnify factor. Normally it is 1, unknown exact use as of 2006-01-13
301 return self._get_Z_magnify_scale(whichchunk)[0]
303 def get_Z_scale(self,whichchunk):
307 return self._get_Z_magnify_scale(whichchunk)[1]
309 def get_ramp_size(self,whichchunk):
311 Gets the -user defined- ramp size
313 return self._get_Z_magnify_scale(whichchunk)[2]
315 def get_ramp_offset(self,whichchunk):
319 return self._get_Z_magnify_scale(whichchunk)[3]
321 def get_Z_scale_LSB(self,whichchunk):
323 Gets the LSB-to-volt conversion factor of the Z data.
324 (so called hard-scale in the Nanoscope documentation)
327 return self._get_Z_magnify_scale(whichchunk)[4]
329 def get_deflection_sensitivity(self):
331 gets deflection sensitivity
333 self.textfile.seek(0)
335 def_sensitivity_expr=re.compile(".*@Sens. DeflSens")
337 for line in self.textfile.readlines():
338 if def_sensitivity_expr.match(line):
339 def_sensitivity=float(line.split()[3])
341 #return it in SI units (that is: m/V, not nm/V)
342 return def_sensitivity*(10**(-9))
344 def get_spring_constant(self):
346 gets spring constant.
347 We actually find *three* spring constant values, one for each data chunk (F/t, Z/t, F/z).
348 They are normally all equal, but we retain all three for future...
350 self.textfile.seek(0)
352 springconstant_expr=re.compile(".*Spring Constant")
356 for line in self.textfile.readlines():
357 if springconstant_expr.match(line):
358 constants.append(float(line.split()[2]))
362 def get_Zsensorsens(self):
364 gets Zsensorsens for Z data.
366 This is the sensitivity needed to convert the LSB data in nanometers for the Z-vs-T data chunk.
368 self.textfile.seek(0)
370 zsensorsens_expr=re.compile(".*Sens. ZSensorSens")
372 for line in self.textfile.readlines():
373 if zsensorsens_expr.match(line):
374 zsensorsens_raw_expression=line.split()
375 #we must take only first occurrence, so we exit from the cycle immediately
378 return (float(zsensorsens_raw_expression[3]))*(10**(-9))
382 returns converted ext and ret Z curves.
383 They're on the second chunk (Z vs t).
385 #Zmagnify_zt=self.get_Z_magnify(2)
386 #Zscale_zt=self.get_Z_scale(2)
387 Zlsb_zt=self.get_Z_scale_LSB(2)
388 #rampsize_zt=self.get_ramp_size(2)
389 #rampoffset_zt=self.get_ramp_offset(2)
390 zsensorsens=self.get_Zsensorsens()
393 The magic formula that converts the Z data is:
395 meters = LSB * V_lsb_conversion_factor * ZSensorSens
398 #z_curves=[item*Zlsb_zt*zsensorsens for item in self.data_chunks[1].pair['ext']],[item*Zlsb_zt*zsensorsens for item in self.data_chunks[1].pair['ret']]
399 z_curves=[item*Zlsb_zt*zsensorsens for item in self.data_chunks[1].ext()],[item*Zlsb_zt*zsensorsens for item in self.data_chunks[1].ret()]
400 z_curves=[DataChunk(item) for item in z_curves]
403 def Z_extremes(self):
405 returns the extremes of the Z values
407 zcurves=self.Z_data()
409 z_extremes['ext']=zcurves[0][0],zcurves[0][-1]
410 z_extremes['ret']=zcurves[1][0],zcurves[1][-1]
416 returns the calculated step between the Z values
421 z_extremes=self.Z_extremes()
423 zrange['ext']=abs(z_extremes['ext'][0]-z_extremes['ext'][1])
424 zrange['ret']=abs(z_extremes['ret'][0]-z_extremes['ret'][1])
426 #We must take 1 from the calculated zpoints, or when I use the arange function gives me a point more
427 #with the step. That is, if I have 1000 points, and I use arange(start,stop,step), I have 1001 points...
428 #For cleanness, solution should really be when using arange, but oh well...
429 zpoints['ext']=len(self.Z_data()[0])-1
430 zpoints['ret']=len(self.Z_data()[1])-1
431 #this syntax must become coherent!!
432 return (zrange['ext']/zpoints['ext']),(zrange['ret']/zpoints['ret'])
436 returns the Z domains on which to plot the force data.
438 The Z domains are returned as a single long DataChunk() extended list. The extension and retraction part
439 can be extracted using ext() and ret() methods.
441 x1step=self.Z_step()[0]
442 x2step=self.Z_step()[1]
445 xext=arange(self.Z_extremes()['ext'][0],self.Z_extremes()['ext'][1],-x1step)
446 xret=arange(self.Z_extremes()['ret'][0],self.Z_extremes()['ret'][1],-x2step)
450 print 'picoforce.py: Warning. xext, xret domains cannot be extracted.'
452 if not (len(xext)==len(xret)):
455 print "picoforce.py: Warning. Extension and retraction domains have different sizes."
456 print "length extension: ", len(xext)
457 print "length retraction: ", len(xret)
458 print "You cannot trust the resulting curve."
459 print "Until a solution is found, I substitute the ext domain with the ret domain. Sorry."
462 return DataChunk(xext.tolist()+xret.tolist())
464 def Z_scan_size(self):
465 return self.get_Zscan_V_size()*self.get_Z_scan_sensitivity()
468 return self.get_Zscan_V_start()*self.get_Z_scan_sensitivity()
470 def ramp_size(self,whichchunk):
472 to be implemented if needed
474 raise "Not implemented yet."
477 def ramp_offset(self,whichchunk):
479 to be implemented if needed
481 raise "Not implemented yet."
483 def detriggerize(self, forcext):
485 Cuts away the trigger-induced s**t on the extension curve.
488 startvalue=forcext[0]
490 for index in range(len(forcext)-1,2,-2):
491 if forcext[index]>startvalue:
502 self-identification of file type magic
504 curve_file=file(self.filepath)
505 header=curve_file.read(30)
508 if header[2:17] == 'Force file list': #header of a picoforce file
509 self.data_chunks=[self._get_data_chunk(num) for num in [0,1,2]]
516 Explicitly closes all files
518 self.textfile.close()
521 def default_plots(self):
523 creates the default PlotObject
527 force=self.LSB_to_force()
528 zdomain=self.Z_domains()
530 samples=self._get_samples_line()
532 #cutindex=self.detriggerize(force.ext())
534 main_plot=lhc.PlotObject()
536 main_plot.vectors=[[zdomain.ext()[0:samples], force.ext()[0:samples]],[zdomain.ret()[0:samples], force.ret()[0:samples]]]
537 main_plot.normalize_vectors()
538 main_plot.units=['meters','newton']
539 main_plot.destination=0
540 main_plot.title=self.filepath