3 """Library for interpreting Picoforce force spectroscopy files.
8 from scipy import arange
10 #from .. import libhooke as lh
11 from .. import curve as lhc
14 __version__='0.0.0.20090923'
17 class DataChunk(list):
18 #Dummy class to provide ext and ret methods to the data list.
22 return self[0:halflen]
28 class picoforceDriver(lhc.Driver):
30 #Construction and other special methods
32 def __init__(self,filename):
36 filename = lh.get_file_path(filename)
37 self.textfile=file(filename)
38 self.binfile=file(filename,'rb')
40 #The 0,1,2 data chunks are:
46 self.filepath=filename
49 self.filetype='picoforce'
50 self.experiment='smfs'
53 #Hidden methods. These are meant to be used only by API functions. If needed, however,
54 #they can be called just like API methods.
56 def _get_samples_line(self):
58 Gets the samples per line parameters in the file, to understand trigger behaviour.
62 samps_expr=re.compile(".*Samps")
65 for line in self.textfile.readlines():
66 if samps_expr.match(line):
68 samps=int(line.split()[2]) #the third word splitted is the offset (in bytes)
69 samps_values.append(samps)
73 #We raise a flag for the fact we meet an offset, otherwise we would take spurious data length arguments.
75 return int(samps_values[0])
77 def _get_chunk_coordinates(self):
79 This method gets the coordinates (offset and length) of a data chunk in our
82 It returns a list containing two tuples:
83 the first element of each tuple is the data_offset, the second is the corresponding
86 In near future probably each chunk will get its own data structure, with
87 offset, size, type, etc.
91 offset_expr=re.compile(".*Data offset")
92 length_expr=re.compile(".*Data length")
98 for line in self.textfile.readlines():
100 if offset_expr.match(line):
101 offset=int(line.split()[2]) #the third word splitted is the offset (in bytes)
102 data_offsets.append(offset)
103 #We raise a flag for the fact we meet an offset, otherwise we would take spurious data length arguments.
106 #same for the data length
107 if length_expr.match(line) and flag_offset:
108 size=int(line.split()[2])
109 data_sizes.append(size)
110 #Put down the offset flag until the next offset is met.
113 return zip(data_offsets,data_sizes)
115 def _get_data_chunk(self,whichchunk):
117 reads a data chunk and converts it in 16bit signed int.
119 offset,size=self._get_chunk_coordinates()[whichchunk]
122 self.binfile.seek(offset)
123 raw_chunk=self.binfile.read(size)
126 for data_position in range(0,len(raw_chunk),2):
127 data_unit_bytes=raw_chunk[data_position:data_position+2]
128 #The unpack function converts 2-bytes in a signed int ('h').
129 #we use output[0] because unpack returns a 1-value tuple, and we want the number only
130 data_unit=struct.unpack('h',data_unit_bytes)[0]
131 my_chunk.append(data_unit)
133 return DataChunk(my_chunk)
135 def _get_Zscan_info(self,index):
137 gets the Z scan informations needed to interpret the data chunk.
138 These info come from the general section, BEFORE individual chunk headers.
140 By itself, the function will parse for three parameters.
141 (index) that tells the function what to return when called by
143 index=0 : returns Zscan_V_LSB
144 index=1 : returns Zscan_V_start
145 index=2 : returns Zscan_V_size
147 self.textfile.seek(0)
149 ciaoforcelist_expr=re.compile(".*Ciao force")
150 zscanstart_expr=re.compile(".*@Z scan start")
151 zscansize_expr=re.compile(".*@Z scan size")
155 for line in self.textfile.readlines():
156 if ciaoforcelist_expr.match(line):
157 ciaoforce_flag=1 #raise a flag: zscanstart and zscansize params to read are later
159 if ciaoforce_flag and zscanstart_expr.match(line):
160 raw_Zscanstart_line=line.split()
162 if ciaoforce_flag and zscansize_expr.match(line):
163 raw_Zscansize_line=line.split()
167 for itemscanstart,itemscansize in zip(raw_Zscanstart_line,raw_Zscansize_line):
168 Zscanstart_line.append(itemscanstart.strip('[]()'))
169 Zscansize_line.append(itemscansize.strip('[]()'))
171 Zscan_V_LSB=float(Zscanstart_line[6])
172 Zscan_V_start=float(Zscanstart_line[8])
173 Zscan_V_size=float(Zscansize_line[8])
175 return (Zscan_V_LSB,Zscan_V_start,Zscan_V_size)[index]
177 def _get_Z_magnify_scale(self,whichchunk):
179 gets Z scale and Z magnify
180 Here we get Z scale/magnify from the 'whichchunk' only.
182 TODO: make it coherent with data_chunks syntaxis (0,1,2)
184 In future, should we divide the *file* itself into chunk descriptions and gain
185 true chunk data structures?
187 self.textfile.seek(0)
189 z_scale_expr=re.compile(".*@4:Z scale")
190 z_magnify_expr=re.compile(".*@Z magnify")
192 ramp_size_expr=re.compile(".*@4:Ramp size")
193 ramp_offset_expr=re.compile(".*@4:Ramp offset")
199 for line in self.textfile.readlines():
200 if z_magnify_expr.match(line):
202 if occurrences==whichchunk:
204 raw_z_magnify_expression=line.split()
208 if found_right and z_scale_expr.match(line):
209 raw_z_scale_expression=line.split()
210 if found_right and ramp_size_expr.match(line):
211 raw_ramp_size_expression=line.split()
212 if found_right and ramp_offset_expr.match(line):
213 raw_ramp_offset_expression=line.split()
215 return float(raw_z_magnify_expression[5]),float(raw_z_scale_expression[7]), float(raw_ramp_size_expression[7]), float(raw_ramp_offset_expression[7]), float(raw_z_scale_expression[5][1:])
219 #These are the methods that are meant to be called from external apps.
221 def LSB_to_volt(self,chunknum,voltrange=20):
223 Converts the LSB data of a given chunk (chunknum=0,1,2) in volts.
224 First step to get the deflection and the force.
227 item.LSB_to_volt(chunknum, [voltrange])
229 The voltrange is by default set to 20 V.
231 return DataChunk([((float(lsb)/65535)*voltrange) for lsb in self.data_chunks[chunknum]])
233 def LSB_to_deflection(self,chunknum,deflsensitivity=None,voltrange=20):
235 Converts the LSB data in deflection (meters).
238 item.LSB_to_deflection(chunknum, [deflection sensitivity], [voltrange])
240 chunknum is the chunk you want to parse (0,1,2)
242 The deflection sensitivity by default is the one parsed from the file.
243 The voltrange is by default set to 20 V.
245 if deflsensitivity is None:
246 deflsensitivity=self.get_deflection_sensitivity()
248 lsbvolt=self.LSB_to_volt(chunknum)
249 return DataChunk([volt*deflsensitivity for volt in lsbvolt])
251 def deflection(self):
253 Get the actual force curve deflection.
255 deflchunk= self.LSB_to_deflection(2)
256 return deflchunk.ext(),deflchunk.ret()
258 def LSB_to_force(self,chunknum=2,Kspring=None,voltrange=20):
260 Converts the LSB data (of deflection) in force (newtons).
263 item.LSB_to_force([chunknum], [spring constant], [voltrange])
265 chunknum is the chunk you want to parse (0,1,2). The chunk used is by default 2.
266 The spring constant by default is the one parsed from the file.
267 The voltrange is by default set to 20 V.
270 Kspring=self.get_spring_constant()
272 lsbdefl=self.LSB_to_deflection(chunknum)
273 return DataChunk([(meter*Kspring) for meter in lsbdefl])
275 def get_Zscan_V_start(self):
276 return self._get_Zscan_info(1)
278 def get_Zscan_V_size(self):
279 return self._get_Zscan_info(2)
281 def get_Z_scan_sensitivity(self):
285 self.textfile.seek(0)
287 z_sensitivity_expr=re.compile(".*@Sens. Zsens")
289 for line in self.textfile.readlines():
290 if z_sensitivity_expr.match(line):
291 z_sensitivity=float(line.split()[3])
292 #return it in SI units (that is: m/V, not nm/V)
293 return z_sensitivity*(10**(-9))
295 def get_Z_magnify(self,whichchunk):
297 Gets the Z magnify factor. Normally it is 1, unknown exact use as of 2006-01-13
299 return self._get_Z_magnify_scale(whichchunk)[0]
301 def get_Z_scale(self,whichchunk):
305 return self._get_Z_magnify_scale(whichchunk)[1]
307 def get_ramp_size(self,whichchunk):
309 Gets the -user defined- ramp size
311 return self._get_Z_magnify_scale(whichchunk)[2]
313 def get_ramp_offset(self,whichchunk):
317 return self._get_Z_magnify_scale(whichchunk)[3]
319 def get_Z_scale_LSB(self,whichchunk):
321 Gets the LSB-to-volt conversion factor of the Z data.
322 (so called hard-scale in the Nanoscope documentation)
325 return self._get_Z_magnify_scale(whichchunk)[4]
327 def get_deflection_sensitivity(self):
329 gets deflection sensitivity
331 self.textfile.seek(0)
333 def_sensitivity_expr=re.compile(".*@Sens. DeflSens")
335 for line in self.textfile.readlines():
336 if def_sensitivity_expr.match(line):
337 def_sensitivity=float(line.split()[3])
339 #return it in SI units (that is: m/V, not nm/V)
340 return def_sensitivity*(10**(-9))
342 def get_spring_constant(self):
344 gets spring constant.
345 We actually find *three* spring constant values, one for each data chunk (F/t, Z/t, F/z).
346 They are normally all equal, but we retain all three for future...
348 self.textfile.seek(0)
350 springconstant_expr=re.compile(".*Spring Constant")
354 for line in self.textfile.readlines():
355 if springconstant_expr.match(line):
356 constants.append(float(line.split()[2]))
360 def get_Zsensorsens(self):
362 gets Zsensorsens for Z data.
364 This is the sensitivity needed to convert the LSB data in nanometers for the Z-vs-T data chunk.
366 self.textfile.seek(0)
368 zsensorsens_expr=re.compile(".*Sens. ZSensorSens")
370 for line in self.textfile.readlines():
371 if zsensorsens_expr.match(line):
372 zsensorsens_raw_expression=line.split()
373 #we must take only first occurrence, so we exit from the cycle immediately
376 return (float(zsensorsens_raw_expression[3]))*(10**(-9))
380 returns converted ext and ret Z curves.
381 They're on the second chunk (Z vs t).
383 #Zmagnify_zt=self.get_Z_magnify(2)
384 #Zscale_zt=self.get_Z_scale(2)
385 Zlsb_zt=self.get_Z_scale_LSB(2)
386 #rampsize_zt=self.get_ramp_size(2)
387 #rampoffset_zt=self.get_ramp_offset(2)
388 zsensorsens=self.get_Zsensorsens()
391 The magic formula that converts the Z data is:
393 meters = LSB * V_lsb_conversion_factor * ZSensorSens
396 #z_curves=[item*Zlsb_zt*zsensorsens for item in self.data_chunks[1].pair['ext']],[item*Zlsb_zt*zsensorsens for item in self.data_chunks[1].pair['ret']]
397 z_curves=[item*Zlsb_zt*zsensorsens for item in self.data_chunks[1].ext()],[item*Zlsb_zt*zsensorsens for item in self.data_chunks[1].ret()]
398 z_curves=[DataChunk(item) for item in z_curves]
401 def Z_extremes(self):
403 returns the extremes of the Z values
405 zcurves=self.Z_data()
407 z_extremes['ext']=zcurves[0][0],zcurves[0][-1]
408 z_extremes['ret']=zcurves[1][0],zcurves[1][-1]
414 returns the calculated step between the Z values
419 z_extremes=self.Z_extremes()
421 zrange['ext']=abs(z_extremes['ext'][0]-z_extremes['ext'][1])
422 zrange['ret']=abs(z_extremes['ret'][0]-z_extremes['ret'][1])
424 #We must take 1 from the calculated zpoints, or when I use the arange function gives me a point more
425 #with the step. That is, if I have 1000 points, and I use arange(start,stop,step), I have 1001 points...
426 #For cleanness, solution should really be when using arange, but oh well...
427 zpoints['ext']=len(self.Z_data()[0])-1
428 zpoints['ret']=len(self.Z_data()[1])-1
429 #this syntax must become coherent!!
430 return (zrange['ext']/zpoints['ext']),(zrange['ret']/zpoints['ret'])
434 returns the Z domains on which to plot the force data.
436 The Z domains are returned as a single long DataChunk() extended list. The extension and retraction part
437 can be extracted using ext() and ret() methods.
439 x1step=self.Z_step()[0]
440 x2step=self.Z_step()[1]
443 xext=arange(self.Z_extremes()['ext'][0],self.Z_extremes()['ext'][1],-x1step)
444 xret=arange(self.Z_extremes()['ret'][0],self.Z_extremes()['ret'][1],-x2step)
448 print 'picoforce.py: Warning. xext, xret domains cannot be extracted.'
450 if not (len(xext)==len(xret)):
453 print "picoforce.py: Warning. Extension and retraction domains have different sizes."
454 print "length extension: ", len(xext)
455 print "length retraction: ", len(xret)
456 print "You cannot trust the resulting curve."
457 print "Until a solution is found, I substitute the ext domain with the ret domain. Sorry."
460 return DataChunk(xext.tolist()+xret.tolist())
462 def Z_scan_size(self):
463 return self.get_Zscan_V_size()*self.get_Z_scan_sensitivity()
466 return self.get_Zscan_V_start()*self.get_Z_scan_sensitivity()
468 def ramp_size(self,whichchunk):
470 to be implemented if needed
472 raise "Not implemented yet."
475 def ramp_offset(self,whichchunk):
477 to be implemented if needed
479 raise "Not implemented yet."
481 def detriggerize(self, forcext):
483 Cuts away the trigger-induced s**t on the extension curve.
486 startvalue=forcext[0]
488 for index in range(len(forcext)-1,2,-2):
489 if forcext[index]>startvalue:
500 self-identification of file type magic
502 curve_file=file(self.filepath)
503 header=curve_file.read(30)
506 if header[2:17] == 'Force file list': #header of a picoforce file
507 self.data_chunks=[self._get_data_chunk(num) for num in [0,1,2]]
514 Explicitly closes all files
516 self.textfile.close()
519 def default_plots(self):
521 creates the default PlotObject
525 force=self.LSB_to_force()
526 zdomain=self.Z_domains()
528 samples=self._get_samples_line()
530 #cutindex=self.detriggerize(force.ext())
532 main_plot=lhc.PlotObject()
534 main_plot.vectors = [[zdomain.ext()[0:samples], force.ext()[0:samples]],[zdomain.ret()[0:samples], force.ret()[0:samples]]]
535 main_plot.normalize_vectors()
536 main_plot.units = ['meters','newton']
537 main_plot.destination = 0
538 main_plit.filename = self.filepath
539 main_plot.title = self.filepath
540 main_plot.colors = ['red', 'blue']
541 main_plit.styles = ['plot', 'plot']