-#!/usr/bin/python
+#!/usr/bin/env python
import hooke.hooke
fc_showimposed="0"\r
fc_interesting="0"\r
tccd_threshold="0"\r
- tccd_coincident="0"/>\r
+ tccd_coincident="0"\r
+ centerzero="1"\r
+/>\r
\r
<!--\r
The following section defines the default playlist to load at startup.\r
-->\r
<plugins>\r
<fit/>\r
+ <curvetools/>\r
<procplots/>\r
<flatfilts/>\r
<generalclamp/>\r
<pcluster/>\r
<generaltccd/>\r
<multidistance/>\r
+ <jumpstat/>\r
+ <review/>\r
+ <multifit/>\r
</plugins>\r
\r
<!--\r
This section defines which drivers have to be loaded by Hooke.\r
-->\r
<drivers>\r
- <picoforce/>\r
- <!-- picoforcealt/ -->\r
+ <!--picoforce/-->\r
+ <picoforcealt/>\r
<hemingclamp/>\r
<csvdriver/>\r
<!-- tutorialdriver/ -->\r
<jpk/>\r
<mfp1dexport/>\r
<mcs/>\r
+ <!-- hdf5/ -->\r
+ <mfp3d/>\r
</drivers>\r
\r
<!--\r
<multiplier/>\r
<clamp/>\r
<threshold/>\r
- <coincident/>\r
+ <coincident/>
+ <centerzero/>\r
</plotmanips>\r
\r
</config>\r
--- /dev/null
+#!/usr/bin/env python
+
+'''
+hdf5.py
+
+Driver for text-exported HDF5 files from Igor pro
+
+Alberto Gomez-Casado (c) 2010
+Massimo Sandal (c) 2009
+'''
+
+from .. import libhookecurve as lhc
+from .. import libhooke as lh
+
+class hdf5Driver(lhc.Driver):
+
+ def __init__(self, filename):
+
+ self.filename=filename
+ self.filedata=open(filename,'rU')
+ self.lines=list(self.filedata.readlines())
+ self.filedata.close()
+
+ self.filetype='hdf5'
+ self.experiment='smfs'
+
+ def close_all(self):
+ self.filedata.close()
+
+ def is_me(self):
+ self.raw_header=self.lines[0]
+
+ if 'IGP-HDF5-Hooke' in self.raw_header:
+ return True
+ else:
+ return False
+
+ def _read_columns(self):
+
+ self.raw_columns=self.lines[4:]
+
+ kline=None
+ for line in self.lines:
+ if line[:7]=='SpringC':
+ kline=line
+ break
+
+ kline=kline.split(':')
+
+ self.k=float(kline[1])
+
+
+ xext=[]
+ xret=[]
+ yext=[]
+ yret=[]
+ for line in self.raw_columns:
+ spline=line.split()
+ xext.append(float(spline[0]))
+ yext.append(float(spline[1]))
+ xret.append(float(spline[2]))
+ yret.append(float(spline[3]))
+
+ return [[xext,yext],[xret,yret]]
+
+ def deflection(self):
+ self.data=self._read_columns()
+ return self.data[0][1],self.data[1][1]
+
+
+ def default_plots(self):
+ main_plot=lhc.PlotObject()
+ defl_ext,defl_ret=self.deflection()
+ yextforce=[i*self.k for i in defl_ext]
+ yretforce=[i*self.k for i in defl_ret]
+ main_plot.add_set(self.data[0][0],yextforce)
+ main_plot.add_set(self.data[1][0],yretforce)
+ main_plot.normalize_vectors()
+ main_plot.units=['Z','force'] #FIXME: if there's an header saying something about the time count, should be used
+ main_plot.destination=0
+ main_plot.title=self.filename
+ #main_plot.colors=['red','blue']
+ return [main_plot]
--- /dev/null
+#!/usr/bin/env python
+
+'''
+mfp3d.py
+
+Driver for MFP-3D files.
+
+Copyright 2010 by Dr. Rolf Schmidt (Concordia University, Canada)
+This driver is based on the work of R. Naud and A. Seeholzer (see below)
+to read Igor binary waves. Code used with permission.
+
+Modified for usage with Hooke CLI by Alberto Gomez-Casado (University of Twente, The Netherlands)
+
+This program is released under the GNU General Public License version 2.
+'''
+
+# DEFINITION:
+# Reads Igor's (Wavemetric) binary wave format, .ibw, files.
+#
+# ALGORITHM:
+# Parsing proper to version 2, 3, or version 5 (see Technical notes TN003.ifn:
+# http://mirror.optus.net.au/pub/wavemetrics/IgorPro/Technical_Notes/) and data
+# type 2 or 4 (non complex, single or double precision vector, real values).
+#
+# AUTHORS:
+# Matlab version: R. Naud August 2008 (http://lcn.epfl.ch/~naud/Home.html)
+# Python port: A. Seeholzer October 2008
+#
+# VERSION: 0.1
+#
+# COMMENTS:
+# Only tested for version 2 Igor files for now, testing for 3 and 5 remains to be done.
+# More header data could be passed back if wished. For significance of ignored bytes see
+# the technical notes linked above.
+
+import numpy
+import os.path
+import struct
+
+from .. import libhookecurve as lhc
+
+
+__version__='0.0.0.20100310'
+
+
+class DataChunk(list):
+ #Dummy class to provide ext and ret methods to the data list.
+
+ def ext(self):
+ halflen=(len(self)/2)
+ return self[0:halflen]
+
+ def ret(self):
+ halflen=(len(self)/2)
+ return self[halflen:]
+
+class mfp3dDriver(lhc.Driver):
+
+ #Construction and other special methods
+
+ def __init__(self,filename):
+ '''
+ constructor method
+ '''
+
+ self.textfile =file(filename)
+ self.binfile=file(filename,'rb')
+ #unnecesary, but some other part of the program expects these to be open
+
+ self.forcechunk=0
+ self.distancechunk=1
+ #TODO eliminate the need to set chunk numbers
+
+ self.filepath=filename
+ self.debug=True
+
+ self.data = []
+ self.note = []
+ self.retract_velocity = None
+ self.spring_constant = None
+ self.filename = filename
+
+ self.filedata = open(filename,'rU')
+ self.lines = list(self.filedata.readlines())
+ self.filedata.close()
+
+ self.filetype = 'mfp3d'
+ self.experiment = 'smfs'
+
+
+ def _get_data_chunk(self,whichchunk):
+
+ data = None
+ f = open(self.filename, 'rb')
+ ####################### ORDERING
+ # machine format for IEEE floating point with big-endian
+ # byte ordering
+ # MacIgor use the Motorola big-endian 'b'
+ # WinIgor use Intel little-endian 'l'
+ # If the first byte in the file is non-zero, then the file is a WinIgor
+ firstbyte = struct.unpack('b', f.read(1))[0]
+ if firstbyte == 0:
+ format = '>'
+ else:
+ format = '<'
+ ####################### CHECK VERSION
+ f.seek(0)
+ version = struct.unpack(format+'h', f.read(2))[0]
+ ####################### READ DATA AND ACCOMPANYING INFO
+ if version == 2 or version == 3:
+ # pre header
+ wfmSize = struct.unpack(format+'i', f.read(4))[0] # The size of the WaveHeader2 data structure plus the wave data plus 16 bytes of padding.
+ noteSize = struct.unpack(format+'i', f.read(4))[0] # The size of the note text.
+ if version==3:
+ formulaSize = struct.unpack(format+'i', f.read(4))[0]
+ pictSize = struct.unpack(format+'i', f.read(4))[0] # Reserved. Write zero. Ignore on read.
+ checksum = struct.unpack(format+'H', f.read(2))[0] # Checksum over this header and the wave header.
+ # wave header
+ dtype = struct.unpack(format+'h', f.read(2))[0]
+ if dtype == 2:
+ dtype = numpy.float32(.0).dtype
+ elif dtype == 4:
+ dtype = numpy.double(.0).dtype
+ else:
+ assert False, "Wave is of type '%i', not supported" % dtype
+ dtype = dtype.newbyteorder(format)
+
+ ignore = f.read(4) # 1 uint32
+ bname = self._flatten(struct.unpack(format+'20c', f.read(20)))
+ ignore = f.read(4) # 2 int16
+ ignore = f.read(4) # 1 uint32
+ dUnits = self._flatten(struct.unpack(format+'4c', f.read(4)))
+ xUnits = self._flatten(struct.unpack(format+'4c', f.read(4)))
+ npnts = struct.unpack(format+'i', f.read(4))[0]
+ amod = struct.unpack(format+'h', f.read(2))[0]
+ dx = struct.unpack(format+'d', f.read(8))[0]
+ x0 = struct.unpack(format+'d', f.read(8))[0]
+ ignore = f.read(4) # 2 int16
+ fsValid = struct.unpack(format+'h', f.read(2))[0]
+ topFullScale = struct.unpack(format+'d', f.read(8))[0]
+ botFullScale = struct.unpack(format+'d', f.read(8))[0]
+ ignore = f.read(16) # 16 int8
+ modDate = struct.unpack(format+'I', f.read(4))[0]
+ ignore = f.read(4) # 1 uint32
+ # Numpy algorithm works a lot faster than struct.unpack
+ data = numpy.fromfile(f, dtype, npnts)
+
+ elif version == 5:
+ # pre header
+ checksum = struct.unpack(format+'H', f.read(2))[0] # Checksum over this header and the wave header.
+ wfmSize = struct.unpack(format+'i', f.read(4))[0] # The size of the WaveHeader2 data structure plus the wave data plus 16 bytes of padding.
+ formulaSize = struct.unpack(format+'i', f.read(4))[0]
+ noteSize = struct.unpack(format+'i', f.read(4))[0] # The size of the note text.
+ dataEUnitsSize = struct.unpack(format+'i', f.read(4))[0]
+ dimEUnitsSize = struct.unpack(format+'4i', f.read(16))
+ dimLabelsSize = struct.unpack(format+'4i', f.read(16))
+ sIndicesSize = struct.unpack(format+'i', f.read(4))[0]
+ optionSize1 = struct.unpack(format+'i', f.read(4))[0]
+ optionSize2 = struct.unpack(format+'i', f.read(4))[0]
+
+ # header
+ ignore = f.read(4)
+ CreationDate = struct.unpack(format+'I',f.read(4))[0]
+ modData = struct.unpack(format+'I',f.read(4))[0]
+ npnts = struct.unpack(format+'i',f.read(4))[0]
+ # wave header
+ dtype = struct.unpack(format+'h',f.read(2))[0]
+ if dtype == 2:
+ dtype = numpy.float32(.0).dtype
+ elif dtype == 4:
+ dtype = numpy.double(.0).dtype
+ else:
+ assert False, "Wave is of type '%i', not supported" % dtype
+ dtype = dtype.newbyteorder(format)
+
+ ignore = f.read(2) # 1 int16
+ ignore = f.read(6) # 6 schar, SCHAR = SIGNED CHAR? ignore = fread(fid,6,'schar'); #
+ ignore = f.read(2) # 1 int16
+ bname = self._flatten(struct.unpack(format+'32c',f.read(32)))
+ ignore = f.read(4) # 1 int32
+ ignore = f.read(4) # 1 int32
+ ndims = struct.unpack(format+'4i',f.read(16)) # Number of of items in a dimension -- 0 means no data.
+ sfA = struct.unpack(format+'4d',f.read(32))
+ sfB = struct.unpack(format+'4d',f.read(32))
+ dUnits = self._flatten(struct.unpack(format+'4c',f.read(4)))
+ xUnits = self._flatten(struct.unpack(format+'16c',f.read(16)))
+ fsValid = struct.unpack(format+'h',f.read(2))
+ whpad3 = struct.unpack(format+'h',f.read(2))
+ ignore = f.read(16) # 2 double
+ ignore = f.read(40) # 10 int32
+ ignore = f.read(64) # 16 int32
+ ignore = f.read(6) # 3 int16
+ ignore = f.read(2) # 2 char
+ ignore = f.read(4) # 1 int32
+ ignore = f.read(4) # 2 int16
+ ignore = f.read(4) # 1 int32
+ ignore = f.read(8) # 2 int32
+
+ data = numpy.fromfile(f, dtype, npnts)
+ note_str = f.read(noteSize)
+ note_lines = note_str.split('\r')
+ self.note = {}
+ for line in note_lines:
+ if ':' in line:
+ key, value = line.split(':', 1)
+ self.note[key] = value
+ self.retract_velocity = float(self.note['Velocity'])
+ self.spring_constant = float(self.note['SpringConstant'])
+ else:
+ assert False, "Fileversion is of type '%i', not supported" % dtype
+ data = []
+
+ f.close()
+ if len(data) > 0:
+ #we have 3 columns: deflection, LVDT, raw
+ #TODO detect which is each one
+ count = npnts / 3
+ lvdt = data[:count]
+ deflection = data[count:2 * count]
+ #every column contains data for extension and retraction
+ #we assume the same number of points for each
+ #we could possibly extract this info from the note
+ count = npnts / 6
+
+ forcechunk=deflection*self.spring_constant
+ distancechunk=lvdt
+
+ if whichchunk==self.forcechunk:
+ return forcechunk
+ if whichchunk==self.distancechunk:
+ return distancechunk
+ else:
+ return None
+
+ def _force(self):
+ #returns force vector
+ Kspring=self.spring_constant
+ return DataChunk([(meter*Kspring) for meter in self._deflection()])
+
+ def _deflection(self):
+ #for internal use (feeds _force)
+ deflect=self.data_chunks[self.forcechunk]/self.spring_constant
+ return deflect
+
+ def _flatten(self, tup):
+ out = ''
+ for ch in tup:
+ out += ch
+ return out
+
+ def _Z(self):
+ return DataChunk(self.data_chunks[self.distancechunk])
+
+ def is_me(self):
+ if len(self.lines) < 34:
+ return False
+
+ name, extension = os.path.splitext(self.filename)
+ if extension == '.ibw':
+ for line in self.lines:
+ if line.startswith('ForceNote:'):
+ self.data_chunks=[self._get_data_chunk(num) for num in [0,1,2]]
+ return True
+ else:
+ return False
+ else:
+ return False
+
+ def close_all(self):
+ '''
+ Explicitly closes all files
+ '''
+ self.textfile.close()
+ self.binfile.close()
+
+ def default_plots(self):
+ '''
+ creates the default PlotObject
+ '''
+ force=self._force()
+ zdomain=self._Z()
+ main_plot=lhc.PlotObject()
+ main_plot.vectors=[[zdomain.ext(), force.ext()],[zdomain.ret(), force.ret()]]
+ main_plot.normalize_vectors()
+ main_plot.units=['meters','newton']
+ main_plot.destination=0
+ main_plot.title=self.filepath
+
+
+ return [main_plot]
+
+ def deflection(self):
+ #interface for correct plotmanip and others
+ deflectionchunk=DataChunk(self._deflection())
+ return deflectionchunk.ext(),deflectionchunk.ret()
#!/usr/bin/env python\r
+# -*- coding: utf-8 -*-\r
\r
'''\r
HOOKE - A force spectroscopy review & analysis tool\r
self.figures=[control.get_figure() for control in self.controls]\r
self.axes=[figure.gca() for figure in self.figures]\r
\r
+ for i in range(len(self.axes)):\r
+ self.axes[i].xaxis.set_major_formatter(EngrFormatter())\r
+ self.axes[i].yaxis.set_major_formatter(EngrFormatter(2))\r
+\r
+\r
self.cpanels[1].Hide()\r
self.mainpanel.splitter.Initialize(self.cpanels[0])\r
\r
ylim=self.axes[dest].get_ylim() \r
self.axes[dest].set_ylim((ylim[1],ylim[0])) \r
\r
+ for i in range(len(self.axes)):\r
+ self.axes[i].xaxis.set_major_formatter(EngrFormatter())\r
+ self.axes[i].yaxis.set_major_formatter(EngrFormatter(2))\r
+\r
+\r
self.controls[dest].draw()\r
\r
\r
'''\r
if dest==None:\r
dest=self.current_plot_dest\r
-\r
- plot=None\r
- for aplot in self.plots:\r
- if aplot.destination == dest:\r
- plot=aplot\r
- return plot\r
+ try:\r
+ plot=None\r
+ for aplot in self.plots:\r
+ if aplot.destination == dest:\r
+ plot=aplot\r
+ return plot\r
+ except:\r
+ print "No curve available"\r
+ return None\r
\r
def _replot(self):\r
'''\r
#!/usr/bin/env python
+# -*- coding: utf-8 -*-
'''
hooke_cli.py
else:
SLASH="/"
if list_path[-1] == SLASH:
- list_path=list_path+'*.*'
- else:
- list_path=list_path+SLASH+'*.*'
-
- #expanding correctly the input list with the glob module :)
+ list_path=list_path+'*'
+ else:
+ list_path=list_path+SLASH+'*'
+
+ #expanding correctly the input list with the glob module :)
list_files=glob.glob(list_path)
list_files.sort()
except:
pass
- columns=[]
+ try:
+ outofplot=self.plots[whichplot].vectors
+ except:
+ print "Plot index out of range."
+ return 0
+
+ columns=[]
for dataset in self.plots[whichplot].vectors:
for i in range(0,len(dataset)):
columns.append([])
print self.current_list[self.pointer].notes
else:
if self.notes_filename == None:
+ if not os.path.exists(os.path.realpath('output')):
+ os.mkdir('output')
self.notes_filename=raw_input('Notebook filename? ')
+ self.notes_filename=os.path.join(os.path.realpath('output'),self.notes_filename)
title_line='Notes taken at '+time.asctime()+'\n'
- f=open(self.notes_filename,'w')
+ f=open(self.notes_filename,'a')
f.write(title_line)
f.close()
#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
'''
libhooke.py
import os.path
import string
import csv
+from matplotlib.ticker import ScalarFormatter
+
from . import libhookecurve as lhc
pass
-class ClickedPoint(object):
+class EngrFormatter(ScalarFormatter):
+ """A variation of the standard ScalarFormatter, using only multiples of
+three
+in the mantissa. A fixed number of decimals can be displayed with the optional
+parameter `ndec` . If `ndec` is None (default), the number of decimals is
+defined
+from the current ticks.
+ """
+ def __init__(self, ndec=None, useOffset=True, useMathText=False):
+ ScalarFormatter.__init__(self, useOffset, useMathText)
+ if ndec is None or ndec < 0:
+ self.format = None
+ elif ndec == 0:
+ self.format = "%d"
+ else:
+ self.format = "%%1.%if" % ndec
+ #........................
+
+ def _set_orderOfMagnitude(self, mrange):
+ """Sets the order of magnitude."""
+ locs = numpy.absolute(self.locs)
+ if self.offset:
+ oom = numpy.floor(numpy.log10(mrange))
+ else:
+ if locs[0] > locs[-1]:
+ val = locs[0]
+ else:
+ val = locs[-1]
+ if val == 0:
+ oom = 0
+ else:
+ oom = numpy.floor(numpy.log10(val))
+ if oom <= -3:
+ self.orderOfMagnitude = 3*(oom//3)
+ elif oom <= -1:
+ self.orderOfMagnitude = -3
+ elif oom >= 4:
+ self.orderOfMagnitude = 3*(oom//3)
+ else:
+ self.orderOfMagnitude = 0
+
+
+ #........................
+ def _set_format(self):
+ """Sets the format string to format all ticklabels."""
+ # set the format string to format all the ticklabels
+ locs = (numpy.array(self.locs)-self.offset) / 10**self.orderOfMagnitude+1e-15
+ sigfigs = [len(str('%1.3f'% loc).split('.')[1].rstrip('0')) \
+ for loc in locs]
+ sigfigs.sort()
+ if self.format is None:
+ self.format = '%1.' + str(sigfigs[-1]) + 'f'
+ if self._usetex or self._useMathText: self.format = '$%s$'%self.format
+
+
+
+class ClickedPoint:
'''
this class defines what a clicked point on the curve plot is
'''
'''
#FIXME: a general algorithm using min() is needed!
- print '---DEPRECATED FIND_GRAPH_COORDS_OLD---'
+ #print '---DEPRECATED FIND_GRAPH_COORDS_OLD---'
best_index=0
best_dist=10**9 #should be more than enough given the scale
outside of which the peak is automatically discarded (in nm)
'''
- #MACROS.
- #FIXME: to move outside function
- def fit_interval_nm(start_index,plot,nm,backwards):
- '''
- Calculates the number of points to fit, given a fit interval in nm
- start_index: index of point
- plot: plot to use
- backwards: if true, finds a point backwards.
- '''
- whatset=1 #FIXME: should be decidable
- x_vect=plot.vectors[1][0]
-
- c=0
- i=start_index
- start=x_vect[start_index]
- maxlen=len(x_vect)
- while abs(x_vect[i]-x_vect[start_index])*(10**9) < nm:
- if i==0 or i==maxlen-1: #we reached boundaries of vector!
- return c
-
- if backwards:
- i-=1
- else:
- i+=1
- c+=1
- return c
-
- def pickup_contact_point():
- '''macro to pick up the contact point by clicking'''
- contact_point=self._measure_N_points(N=1, whatset=1)[0]
- contact_point_index=contact_point.index
- self.wlccontact_point=contact_point
- self.wlccontact_index=contact_point.index
- self.wlccurrent=self.current.path
- return contact_point, contact_point_index
-
- def find_current_peaks(noflatten):
- #Find peaks.
- defplot=self.current.curve.default_plots()[0]
- if not noflatten:
- flatten=self._find_plotmanip('flatten') #Extract flatten plotmanip
- defplot=flatten(defplot, self.current, customvalue=1) #Flatten curve before feeding it to has_peaks
- peak_location,peak_size=self.has_peaks(defplot, self.convfilt_config['mindeviation'])
- return peak_location, peak_size
-
#default fit etc. variables
pl_value=None
T=self.config['temperature']
#--Contact point arguments
if 'reclick' in args.split():
print 'Click contact point'
- contact_point, contact_point_index = pickup_contact_point()
+ contact_point, contact_point_index = self.pickup_contact_point()
elif 'noauto' in args.split():
if self.wlccontact_index==None or self.wlccurrent != self.current.path:
print 'Click contact point'
- contact_point , contact_point_index = pickup_contact_point()
+ contact_point , contact_point_index = self.pickup_contact_point()
else:
contact_point=self.wlccontact_point
contact_point_index=self.wlccontact_index
cindex=self.find_contact_point()
contact_point=self._clickize(displayed_plot.vectors[1][0], displayed_plot.vectors[1][1], cindex)
#--END COMMAND LINE PARSING--
-
-
- peak_location, peak_size = find_current_peaks(noflatten)
-
+
+
+ peak_location, peak_size = self.find_current_peaks(noflatten)
+
if len(peak_location) == 0:
print 'No peaks to fit.'
return
#Pick up force baseline
if rebase:
- clicks=self.config['baseline_clicks']
- if clicks==0:
- self.basepoints=[]
- base_index_0=peak_location[-1]+fit_interval_nm(peak_location[-1], displayed_plot, self.config['auto_right_baseline'],False)
- self.basepoints.append(self._clickize(displayed_plot.vectors[1][0],displayed_plot.vectors[1][1],base_index_0))
- base_index_1=self.basepoints[0].index+fit_interval_nm(self.basepoints[0].index, displayed_plot, self.config['auto_left_baseline'],False)
- self.basepoints.append(self._clickize(displayed_plot.vectors[1][0],displayed_plot.vectors[1][1],base_index_1))
- elif clicks>0:
- print 'Select baseline'
- if clicks==1:
- self.basepoints=self._measure_N_points(N=1, whatset=whatset)
- base_index_1=self.basepoints[0].index+fit_interval_nm(self.basepoints[0].index, displayed_plot, self.config['auto_left_baseline'], False)
- self.basepoints.append(self._clickize(displayed_plot.vectors[1][0],displayed_plot.vectors[1][1],base_index_1))
- else:
- self.basepoints=self._measure_N_points(N=2, whatset=whatset)
-
- self.basecurrent=self.current.path
-
+ self.basepoints=self.baseline_points(peak_location, displayed_plot)
+
boundaries=[self.basepoints[0].index, self.basepoints[1].index]
boundaries.sort()
to_average=displayed_plot.vectors[1][1][boundaries[0]:boundaries[1]] #y points to average
#WLC FITTING
#define fit interval
if not usepoints:
- fit_points=fit_interval_nm(peak, displayed_plot, self.config['auto_fit_nm'], True)
+ fit_points=self.fit_interval_nm(peak, displayed_plot, self.config['auto_fit_nm'], True)
peak_point=self._clickize(displayed_plot.vectors[1][0],displayed_plot.vectors[1][1],peak)
other_fit_point=self._clickize(displayed_plot.vectors[1][0],displayed_plot.vectors[1][1],peak-fit_points)
#Show wlc fits and peak locations
self._send_plot([fitplot])
- #self.do_peaks('')
-
+
print 'Using fit function: ',self.config['fit_function']
print 'Measurements for all peaks detected:'
print 'contour (nm)', c_lengths
--- /dev/null
+# -*- coding: utf-8 -*-
+from ..libhooke import WX_GOOD, ClickedPoint
+
+import wxversion
+wxversion.select(WX_GOOD)
+from wx import PostEvent
+import numpy as np
+import scipy as sp
+import copy
+import os.path
+import time
+
+
+class curvetoolsCommands:
+
+ def fit_interval_nm(self,start_index,plot,nm,backwards):
+ '''
+ Calculates the number of points to fit, given a fit interval in nm
+ start_index: index of point
+ plot: plot to use
+ backwards: if true, finds a point backwards.
+ '''
+ whatset=1 #FIXME: should be decidable
+ x_vect=plot.vectors[1][0]
+
+ c=0
+ i=start_index
+ start=x_vect[start_index]
+ maxlen=len(x_vect)
+ while abs(x_vect[i]-x_vect[start_index])*(10**9) < nm:
+ if i==0 or i==maxlen-1: #we reached boundaries of vector!
+ return c
+
+ if backwards:
+ i-=1
+ else:
+ i+=1
+ c+=1
+ return c
+
+
+
+ def find_current_peaks(self,noflatten, a=True, maxpeak=True):
+ #Find peaks.
+ if a==True:
+ a=self.convfilt_config['mindeviation']
+ try:
+ abs_devs=float(a)
+ except:
+ print "Bad input, using default."
+ abs_devs=self.convfilt_config['mindeviation']
+
+ defplot=self.current.curve.default_plots()[0]
+ if not noflatten:
+ flatten=self._find_plotmanip('flatten') #Extract flatten plotmanip
+ defplot=flatten(defplot, self.current, customvalue=1) #Flatten curve before feeding it to has_peaks
+ pk_location,peak_size=self.has_peaks(defplot, abs_devs, maxpeak)
+ return pk_location, peak_size
+
+
+ def pickup_contact_point(self,N=1,whatset=1):
+ '''macro to pick up the contact point by clicking'''
+ contact_point=self._measure_N_points(N=1, whatset=1)[0]
+ contact_point_index=contact_point.index
+ self.wlccontact_point=contact_point
+ self.wlccontact_index=contact_point.index
+ self.wlccurrent=self.current.path
+ return contact_point, contact_point_index
+
+
+
+ def baseline_points(self,peak_location, displayed_plot):
+ clicks=self.config['baseline_clicks']
+ if clicks==0:
+ self.basepoints=[]
+ base_index_0=peak_location[-1]+self.fit_interval_nm(peak_location[-1], displayed_plot, self.config['auto_right_baseline'],False)
+ self.basepoints.append(self._clickize(displayed_plot.vectors[1][0],displayed_plot.vectors[1][1],base_index_0))
+ base_index_1=self.basepoints[0].index+self.fit_interval_nm(self.basepoints[0].index, displayed_plot, self.config['auto_left_baseline'],False)
+ self.basepoints.append(self._clickize(displayed_plot.vectors[1][0],displayed_plot.vectors[1][1],base_index_1))
+ elif clicks>0:
+ print 'Select baseline'
+ if clicks==1:
+ self.basepoints=self._measure_N_points(N=1, whatset=1)
+ base_index_1=self.basepoints[0].index+self.fit_interval_nm(self.basepoints[0].index, displayed_plot, self.config['auto_left_baseline'], False)
+ self.basepoints.append(self._clickize(displayed_plot.vectors[1][0],displayed_plot.vectors[1][1],base_index_1))
+ else:
+ self.basepoints=self._measure_N_points(N=2, whatset=1)
+
+ self.basecurrent=self.current.path
+ return self.basepoints
+
+
+
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
'''
FIT
Non-standard Dependencies:
procplots.py (plot processing plugin)
'''
-from hooke.libhooke import WX_GOOD, ClickedPoint
+from ..libhooke import WX_GOOD, ClickedPoint
import wxversion
wxversion.select(WX_GOOD)
self.wlccurrent=None
self.wlccontact_point=None
self.wlccontact_index=None
-
+
+ def dist2fit(self):
+ '''Calculates the average distance from data to fit, scaled by the standard deviation
+ of the free cantilever area (thermal noise)
+ '''
+
+
+
def wlc_fit(self,clicked_points,xvector,yvector, pl_value, T=293, return_errors=False):
'''
Worm-like chain model fitting.
'''
#STEP 1: Prepare the vectors to apply the fit.
-
-
if pl_value is not None:
pl_value=pl_value/(10**9)
ODR STUFF
fixme: remove these comments after testing
'''
-
-
+ def dist(px,py,linex,liney):
+ distancesx=scipy.array([(px-x)**2 for x in linex])
+ minindex=np.argmin(distancesx)
+ print px, linex[0], linex[-1]
+ return (py-liney[minindex])**2
+
def f_wlc(params,x,T=T):
'''
wlc function for ODR fitting
therm=Kb*T #so we have thermal energy
return ( (therm*pii/4.0) * (((1-(x*lambd))**-2.0) - 1 + (4.0*x*lambd)) )
-
+
+
#STEP 3: plotting the fit
#obtain domain to plot the fit - from contact point to last_index plus 20 points
yfit=wlc_eval(xfit_chunk_corr_up, out.beta, pl_value,T)
yfit_down=[-y for y in yfit]
yfit_corr_down=[y+clicked_points[0].graph_coords[1] for y in yfit_down]
-
+
+
+ #calculate fit quality
+ qsum=0
+ yqeval=wlc_eval(xchunk_corr_up,out.beta,pl_value,T)
+ #we need to cut the extra from thuleindex
+ for qindex in np.arange(0,len(yqeval)):
+ qsum+=(yqeval[qindex]-ychunk_corr_up[qindex])**2
+ qstd=np.sqrt(qsum/len(ychunk_corr_up))
+
+
if return_errors:
- return fit_out, yfit_corr_down, xfit_chunk, fit_errors
+ return fit_out, yfit_corr_down, xfit_chunk, fit_errors, qstd
else:
- return fit_out, yfit_corr_down, xfit_chunk, None
-
+ return fit_out, yfit_corr_down, xfit_chunk, None, qstd
+
def fjc_fit(self,clicked_points,xvector,yvector, pl_value, T=293, return_errors=False):
'''
Freely-jointed chain function
ODR STUFF
fixme: remove these comments after testing
'''
+ def dist(px,py,linex,liney):
+ distancesx=scipy.array([(px-x)**2 for x in linex])
+ minindex=np.argmin(distancesx)
+ print minindex, px, linex[0], linex[-1]
+ return (py-liney[minindex])**2
+
def coth(z):
'''
hyperbolic cotangent
return (1/lambd)*(coth(y*(1/pii)/therm) - (therm*pii)/y)
+
#STEP 3: plotting the fit
#obtain domain to plot the fit - from contact point to last_index plus 20 points
thule_index=last_index+10
deltay=yfit_down[normalize_index]-clicked_points[0].graph_coords[1]
yfit_corr_down=[y-deltay for y in yfit_down]
-
+
+
+ #calculate fit quality
+ #creates dense y vector
+ yqeval=np.linspace(np.min(ychunk_corr_up)/2,np.max(ychunk_corr_up)*2,10*len(ychunk_corr_up))
+ #corresponding fitted x
+ xqeval=fjc_eval(yqeval,out.beta,pl_value,T)
+
+ qsum=0
+ for qindex in np.arange(0,len(ychunk_corr_up)):
+ qsum+=dist(xchunk_corr_up[qindex],ychunk_corr_up[qindex],xqeval,yqeval)
+ qstd=np.sqrt(qsum/len(ychunk_corr_up))
+
+
if return_errors:
- return fit_out, yfit_corr_down[normalize_index+1:], xfit_chunk_corr_up[normalize_index+1:], fit_errors
+ return fit_out, yfit_corr_down[normalize_index+1:], xfit_chunk_corr_up[normalize_index+1:], fit_errors, qstd
else:
- return fit_out, yfit_corr_down[normalize_index+1:], xfit_chunk_corr_up[normalize_index+1:], None
+ return fit_out, yfit_corr_down[normalize_index+1:], xfit_chunk_corr_up[normalize_index+1:], None, qstd
+
+ def efjc_fit(self,clicked_points,xvector,yvector, pl_value, T=293.0, return_errors=False):
+ '''
+ Extended Freely-jointed chain function
+ ref: F Oesterhelt, M Rief and H E Gaub, New Journal of Physics 1 (1999) 6.1–6.11
+ Please note that 2-parameter fitting (length and kl) usually does not converge, use fixed kl
+ '''
+ '''
+ clicked_points[0] is the contact point (calculated or hand-clicked)
+ clicked_points[1] and [2] are edges of chunk
+
+ '''
+ #Fixed parameters from reference
+ Kb=(1.38065e-2) #in pN.nm
+ Lp=0.358 #planar, nm
+ Lh=0.280 #helical, nm
+ Ks=150e3 #pN/nm
+
+
+ #STEP 1: Prepare the vectors to apply the fit.
+
+ #indexes of the selected chunk
+ first_index=min(clicked_points[1].index, clicked_points[2].index)
+ last_index=max(clicked_points[1].index, clicked_points[2].index)
+
+ #getting the chunk and reverting it
+ xchunk,ychunk=xvector[first_index:last_index],yvector[first_index:last_index]
+ xchunk.reverse()
+ ychunk.reverse()
+ #put contact point at zero and flip around the contact point (the fit wants a positive growth for extension and force)
+ xchunk_corr_up=[-(x-clicked_points[0].graph_coords[0]) for x in xchunk]
+ ychunk_corr_up=[-(y-clicked_points[0].graph_coords[1]) for y in ychunk]
+
+
+ #make them arrays
+ xchunk_corr_up=scipy.array(xchunk_corr_up)
+ ychunk_corr_up=scipy.array(ychunk_corr_up)
+
+ xchunk_corr_up_nm=xchunk_corr_up*1e9
+ ychunk_corr_up_pn=ychunk_corr_up*1e12
+
+
+ #STEP 2: actually do the fit
+
+ #Find furthest point of chunk and add it a bit; the fit must converge
+ #from an excess!
+ xchunk_high=max(xchunk_corr_up_nm)
+ xchunk_high+=(xchunk_high/10.0)
+
+ #Here are the linearized start parameters for the WLC.
+ #[Ns , pii=1/P]
+ #max number of monomers (all helical)for a contour length xchunk_high
+ excessNs=xchunk_high/(Lp)
+ p0=[excessNs,(1.0/(0.7))]
+ p0_plfix=[(excessNs)]
+
+ def dist(px,py,linex,liney):
+ distancesx=scipy.array([(px-x)**2 for x in linex])
+ minindex=np.argmin(distancesx)
+ return (py-liney[minindex])**2
+
+ def deltaG(f):
+ dG0=12.4242 #3kt in pN.nm
+ dL=0.078 #planar-helical
+ return dG0-f*dL
+
+ def Lfactor(f,T=T):
+ Lp=0.358 #planar, nm
+ Lh=0.280 #helical, nm
+ Kb=(1.38065e-2)
+ therm=Kb*T
+ dG=deltaG(f)
+
+ return Lp/(np.exp(dG/therm)+1)+Lh/(np.exp(-dG/therm)+1)
+
+ def coth(z):
+ '''
+ hyperbolic cotangent
+ '''
+ return 1.0/np.tanh(z)
+
+ def x_efjc(params,f,T=T,Ks=Ks):
+ '''
+ efjc function for ODR fitting
+ '''
+
+ Ns=params[0]
+ invkl=params[1]
+ Kb=(1.38065e-2)
+ therm=Kb*T
+ beta=(f/therm)/invkl
+
+ x=Ns*Lfactor(f)*(coth(beta)-1.0/beta)+Ns*f/Ks
+ return x
+
+ def x_efjc_plfix(params,f,kl_value=pl_value,T=T,Ks=Ks):
+ '''
+ efjc function for ODR fitting
+ '''
+
+ Ns=params
+ invkl=1.0/kl_value
+ Kb=(1.38065e-2)
+ therm=Kb*T
+ beta=(f/therm)/invkl
+
+ x=Ns*Lfactor(f)*(coth(beta)-1.0/beta)+Ns*f/Ks
+ return x
+
+ #make the ODR fit
+ realdata=scipy.odr.RealData(ychunk_corr_up_pn,xchunk_corr_up_nm)
+ if pl_value:
+ model=scipy.odr.Model(x_efjc_plfix)
+ o = scipy.odr.ODR(realdata, model, p0_plfix)
+ else:
+ print 'WARNING eFJC fit with free pl sometimes does not converge'
+ model=scipy.odr.Model(x_efjc)
+ o = scipy.odr.ODR(realdata, model, p0)
+
+ o.set_job(fit_type=2)
+ out=o.run()
+
+
+ Ns=out.beta[0]
+ Lc=Ns*Lp*1e-9
+ if len(out.beta)>1:
+ kfit=1e-9/out.beta[1]
+ kfitnm=1/out.beta[1]
+ else:
+ kfit=1e-9*pl_value
+ kfitnm=pl_value
+
+ fit_out=[Lc, kfit]
+
+ #Calculate fit errors from output standard deviations.
+ fit_errors=[]
+ fit_errors.append(out.sd_beta[0]*Lp*1e-9)
+ if len(out.beta)>1:
+ fit_errors.append(1e9*out.sd_beta[1]*kfit**2)
+
+
+
+ def efjc_eval(y,params,pl_value,T=T,Lfactor=Lfactor,Ks=Ks):
+ '''
+ Evaluates the eFJC function
+ '''
+ if not pl_value:
+ Ns, invkl = params
+ else:
+ Ns = params
+
+ if pl_value:
+ invkl=1.0/pl_value
+
+ Kb=(1.38065e-2) #boltzmann constant
+ therm=Kb*T #so we have thermal energy
+ beta=(y/therm)/invkl
+ x= Ns*Lfactor(y)*(coth(beta)-1.0/beta)+Ns*y/Ks
+
+ return x
+
+ #STEP 3: plotting the fit
+ #obtain domain to plot the fit - from contact point to last_index plus 20 points
+ thule_index=last_index+10
+ if thule_index > len(xvector): #for rare cases in which we fit something at the END of whole curve.
+ thule_index = len(xvector)
+ #reverse etc. the domain
+ ychunk=yvector[clicked_points[0].index:thule_index]
+ if len(ychunk)>0:
+ y_evalchunk=np.linspace(min(ychunk),max(ychunk),100)
+ else:
+ #Empty y-chunk. It happens whenever we set the contact point after a recognized peak,
+ #or other buggy situations. Kludge to live with it now...
+ ychunk=yvector[:thule_index]
+ y_evalchunk=np.linspace(min(ychunk),max(ychunk),100)
+
+ yfit_down=[-y for y in y_evalchunk]
+ yfit_corr_down=[y+clicked_points[0].graph_coords[1] for y in yfit_down]
+ yfit_corr_down=scipy.array(yfit_corr_down)
+
+ #the fitted curve: reflip, re-uncorrect
+ xfit=efjc_eval(1e12*yfit_corr_down, out.beta, pl_value,T)*1e-9
+ xfit=list(xfit)
+ xfit.reverse()
+ xfit_chunk_corr_up=[-(x-clicked_points[0].graph_coords[0]) for x in xfit]
+
+ #xfit_chunk_corr_up=scipy.array(xfit_chunk_corr_up)
+ #deltay=yfit_down[0]-yvector[clicked_points[0].index]
+
+ #This is a terrible, terrible kludge to find the point where it should normalize (and from where it should plot)
+ xxxdists=[]
+ for index in scipy.arange(1,len(xfit_chunk_corr_up),1):
+ xxxdists.append((clicked_points[0].graph_coords[0]-xfit_chunk_corr_up[index])**2)
+ normalize_index=xxxdists.index(min(xxxdists))
+ #End of kludge
+
+ deltay=yfit_down[normalize_index]-clicked_points[0].graph_coords[1]
+ yfit_corr_down=[y-deltay for y in yfit_down]
+
+ #calculate fit quality
+ #creates dense y vector
+ yqeval=np.linspace(np.min(ychunk_corr_up_pn)/2,np.max(ychunk_corr_up_pn)*2,10*len(ychunk_corr_up_pn))
+ #corresponding fitted x
+ xqeval=efjc_eval(yqeval,out.beta,pl_value)
+
+ qsum=0
+ for qindex in np.arange(0,len(ychunk_corr_up_pn)):
+ qsum+=dist(xchunk_corr_up_nm[qindex],ychunk_corr_up_pn[qindex],xqeval,yqeval)
+ qstd=1e-12*np.sqrt(qsum/len(ychunk_corr_up_pn))
+
+ if return_errors:
+ return fit_out, yfit_corr_down[normalize_index+1:], xfit_chunk_corr_up[normalize_index+1:], fit_errors, qstd
+ else:
+ return fit_out, yfit_corr_down[normalize_index+1:], xfit_chunk_corr_up[normalize_index+1:], None, qstd
+
+
+
+
def do_wlc(self,args):
'''
WLC
First you have to click a contact point.
Then you have to click the two edges of the data you want to fit.
-
+
+ Fit quality compares the distance to the fit with the thermal noise (a good fit should be close to 1)
+
The fit function depends on the fit_function variable. You can set it with the command
"set fit_function wlc" or "set fit_function fjc" depending on the function you prefer.
For FJC, ref:
C.Ray and B.B. Akhremitchev; http://www.chem.duke.edu/~boris/research/force_spectroscopy/fit_efjc.pdf
+
+ For eFJC, ref:
+ F Oesterhelt, M Rief and H E Gaub, New Journal of Physics 1 (1999) 6.1–6.11 (section 4.2)
+ NOTE: use fixed pl for better results.
Arguments:
pl=[value] : Use a fixed persistent length (WLC) or Kuhn length (FJC) for the fit. If pl is not given,
print 'Click edges of chunk'
points=self._measure_N_points(N=2, whatset=1)
points=[contact_point]+points
+
try:
if self.config['fit_function']=='wlc':
- params, yfit, xfit, fit_errors = self.wlc_fit(points, displayed_plot.vectors[1][0], displayed_plot.vectors[1][1],pl_value,T, return_errors=True )
+ params, yfit, xfit, fit_errors,qstd = self.wlc_fit(points, displayed_plot.vectors[1][0], displayed_plot.vectors[1][1],pl_value,T, return_errors=True )
name_of_charlength='Persistent length'
elif self.config['fit_function']=='fjc':
- params, yfit, xfit, fit_errors = self.fjc_fit(points, displayed_plot.vectors[1][0], displayed_plot.vectors[1][1],pl_value,T, return_errors=True )
+ params, yfit, xfit, fit_errors,qstd = self.fjc_fit(points, displayed_plot.vectors[1][0], displayed_plot.vectors[1][1],pl_value,T, return_errors=True )
name_of_charlength='Kuhn length'
+ elif self.config['fit_function']=='efjc':
+ params, yfit, xfit, fit_errors,qstd = self.efjc_fit(points, displayed_plot.vectors[1][0], displayed_plot.vectors[1][1],pl_value,T, return_errors=True )
+ name_of_charlength='Kuhn length (e)'
else:
print 'No recognized fit function defined!'
- print 'Set your fit function to wlc or fjc.'
+ print 'Set your fit function to wlc, fjc or efjc.'
return
except:
print 'Standard deviation (contour length)', fit_nm[0]
if len(fit_nm)>1:
print 'Standard deviation ('+name_of_charlength+')', fit_nm[1]
-
-
+
+ print 'Fit quality: '+str(qstd/np.std(displayed_plot.vectors[1][1][-20:-1]))
+
+
#add the clicked points in the final PlotObject
clickvector_x, clickvector_y=[], []
for item in points:
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
+
'''
FLATFILTS
#-----CONVFILT-------------------------------------------------
#-----Convolution-based peak recognition and filtering.
#Requires the libpeakspot.py library
-
- def has_peaks(self, plot, abs_devs=None):
+
+ def has_peaks(self, plot, abs_devs=None, maxpeak=True, window=10):
'''
Finds peak position in a force curve.
FIXME: should be moved in libpeakspot.py
noise_level=lps.noise_absdev(convoluted[cut_index:], self.convfilt_config['positive'], self.convfilt_config['maxcut'], self.convfilt_config['stable'])
above=lps.abovenoise(convoluted,noise_level,cut_index,abs_devs)
peak_location,peak_size=lps.find_peaks(above,seedouble=self.convfilt_config['seedouble'])
-
- #take the maximum
+
+ #take the minimum or the maximum of a peak
for i in range(len(peak_location)):
peak=peak_location[i]
- maxpk=min(yret[peak-10:peak+10])
- index_maxpk=yret[peak-10:peak+10].index(maxpk)+(peak-10)
- peak_location[i]=index_maxpk
-
+ valpk=min(yret[peak-window:peak+window]) #maximum in force (near the unfolding point)
+ index_pk=yret[peak-window:peak+window].index(valpk)+(peak-window)
+
+ if maxpeak==False:
+ valpk=max(yret[peak:peak+window]) #minimum in force, near the baseline
+ index_pk=yret[peak:peak+window].index(valpk)+(peak)
+
+# Let's explain that for the minimum. Immaging that we know that there is a peak at position/region 100 and you have found its y-value,
+# Now you look in the array, from 100-10 to 100+10 (if the window is 10).
+# This "100-10 to 100+10" is substancially a new array with its index. In this array you have 20
+# elements, so the index of your y-value will be 10.
+# Now to find the index in the TOTAL array you have to add the "position" of the "region" (that in this case
+# correspond to 100) and also substract the window size ---> (+100-10)
+
+ peak_location[i]=index_pk
+
return peak_location,peak_size
item.curve=None #empty the item object, to further avoid memory leak
notflat_list.append(item)
+ for i in range(1000):
+ k=0
+
#Warn that no flattening had been done.
if not ('flatten' in self.config['plotmanips']):
print 'Flatten manipulator was not found. Processing was done without flattening.'
+#!/usr/bin/env python
+# -*- coding: iso-8859-1 -*-
+
'''
generalvclamp.py
print str(forcebase*(10**12))+' pN'
to_dump='forcebase '+self.current.path+' '+str(forcebase*(10**12))+' pN'
self.outlet.push(to_dump)
-\r
+
def plotmanip_multiplier(self, plot, current):
'''
- Multiplies all the Y values of an SMFS curve by a value stored in the 'force_multiplier'\r
+ Multiplies all the Y values of an SMFS curve by a value stored in the 'force_multiplier'
configuration variable. Useful for calibrations and other stuff.
'''
#multiplier is 1...
if (self.config['force_multiplier']==1):
- return plot\r
-\r
+ return plot
+
for i in range(len(plot.vectors[0][1])):
- plot.vectors[0][1][i]=plot.vectors[0][1][i]*self.config['force_multiplier'] \r
-\r
- for i in range(len(plot.vectors[1][1])):
- plot.vectors[1][1][i]=plot.vectors[1][1][i]*self.config['force_multiplier']\r
-\r
- return plot \r
+ plot.vectors[0][1][i]=plot.vectors[0][1][i]*self.config['force_multiplier']
+ for i in range(len(plot.vectors[1][1])):
+ plot.vectors[1][1][i]=plot.vectors[1][1][i]*self.config['force_multiplier']
+ return plot
+
+
def plotmanip_flatten(self, plot, current, customvalue=False):
'''
Subtracts a polynomial fit to the non-contact part of the curve, as to flatten it.
valn=[[] for item in range(max_exponent)]
yrn=[0.0 for item in range(max_exponent)]
errn=[0.0 for item in range(max_exponent)]
-
+
+ #Check if we have a proper numerical value
+ try:
+ zzz=int(max_cycles)
+ except:
+ #Loudly and annoyingly complain if it's not a number, then fallback to zero
+ print '''Warning: flatten value is not a number!
+ Use "set flatten" or edit hooke.conf to set it properly
+ Using zero.'''
+ max_cycles=0
+
for i in range(int(max_cycles)):
x_ext=plot.vectors[0][0][contact_index+delta_contact:]
if fitspan == 0:
# Gets the Xs of two clicked points as indexes on the current curve vector
print 'Click twice to delimit chunk'
- clickedpoints=[]
points=self._measure_N_points(N=2,whatset=1)
- clickedpoints=[points[0].index,points[1].index]
- clickedpoints.sort()
else:
print 'Click once on the leftmost point of the chunk (i.e.usually the peak)'
- clickedpoints=[]
points=self._measure_N_points(N=1,whatset=1)
- clickedpoints=[points[0].index-fitspan,points[0].index]
+
+ slope=self._slope(points,fitspan)
- # Calls the function linefit_between
+ # Outputs the relevant slope parameter
+ print 'Slope:'
+ print str(slope)
+ to_dump='slope '+self.current.path+' '+str(slope)
+ self.outlet.push(to_dump)
+
+ def _slope(self,points,fitspan):
+ # Calls the function linefit_between
parameters=[0,0,[],[]]
+ try:
+ clickedpoints=[points[0].index,points[1].index]
+ clickedpoints.sort()
+ except:
+ clickedpoints=[points[0].index-fitspan,points[0].index]
+
try:
parameters=self.linefit_between(clickedpoints[0],clickedpoints[1])
except:
print 'Cannot fit. Did you click twice the same point?'
return
-
- # Outputs the relevant slope parameter
- print 'Slope:'
- print str(parameters[0])
- to_dump='slope '+self.current.path+' '+str(parameters[0])
- self.outlet.push(to_dump)
-
+
# Makes a vector with the fitted parameters and sends it to the GUI
xtoplot=parameters[2]
ytoplot=[]
else:
lineplot.styles+=[None,'scatter']
if lineplot.colors==[]:
- lineplot.styles=[None,None,None,None]
+ lineplot.colors=[None,None,'black',None]
else:
- lineplot.colors+=[None,None]
+ lineplot.colors+=['black',None]
+
+
+ self._send_plot([lineplot])
+ return parameters[0]
- self._send_plot([lineplot])
def linefit_between(self,index1,index2,whatset=1):
'''
--- /dev/null
+# -*- coding: utf-8 -*-
+from ..libhooke import WX_GOOD, ClickedPoint
+import wxversion
+wxversion.select(WX_GOOD)
+from wx import PostEvent
+import numpy as np
+import scipy as sp
+import copy
+import os.path
+import time
+import sys
+import warnings
+warnings.simplefilter('ignore',np.RankWarning)
+
+
+class jumpstatCommands():
+
+ def do_jumpstat(self,args):
+ '''
+ JUMPSTAT
+ jumpstat.py
+ Based on the convolution recognition automatically give:
+ - the delta distance between the peaks,
+ - the delta-force from the top of the peaks and subsequent relaxation,
+ - the delta-force from the top of the peaks and the baseline
+ The command allow also to remove the unwanted peaks that can be due to interference.
+ When you first issue the command, it will ask for the filename. If you are giving the filename
+ of an existing file, autopeak will resume it and append measurements to it. If you are giving
+ a new filename, it will create the file and append to it until you close Hooke.
+ You can also define a minimun deviation of the peaks.
+
+ Syntax:
+ jumpstat [deviation]
+ deviation = number of times the convolution signal is above the noise absolute deviation.
+ '''
+
+
+ #finding the max and the minimum positions for all the peaks
+ noflatten=False
+ #we use if else only to avoid a "bad input" message from find_current_peaks
+ if (len(args)==0):
+ max_peaks_location, peak_size=self.find_current_peaks(noflatten)
+ min_peaks_location, pks2=self.find_current_peaks(noflatten, True, False)
+ else:
+ max_peaks_location, peak_size=self.find_current_peaks(noflatten, args)
+ min_peaks_location, pks2=self.find_current_peaks(noflatten, args, False)
+
+
+ #print "max_peaks_location: "+str(len(max_peaks_location))
+ #print "min_peaks_location: "+str(len(min_peaks_location))
+
+ #if no peaks, we have nothing to plot. exit.
+ if len(max_peaks_location)==0:
+ print "No peaks on this curve."
+ return
+
+ if len(max_peaks_location)!=len(min_peaks_location):
+ print "Something went wrong in peaks recognition, number of minima is different from number of maxima. Exiting."
+ return
+
+ #otherwise, we plot the peak locations.
+ xplotted_ret=self.plots[0].vectors[1][0]
+ yplotted_ret=self.plots[0].vectors[1][1]
+ xgood=[xplotted_ret[index] for index in max_peaks_location]
+ ygood=[yplotted_ret[index] for index in max_peaks_location]
+
+ xafter=[xplotted_ret[index] for index in min_peaks_location]
+ yafter=[yplotted_ret[index] for index in min_peaks_location]
+
+ recplot=self._get_displayed_plot()
+ recplot2=self._get_displayed_plot()
+ recplot.vectors.append([xgood,ygood])
+ recplot2.vectors.append([xafter,yafter])
+
+ if recplot.styles==[]:
+ recplot.styles=[None,None,'scatter']
+ recplot.colors=[None,None,None]
+ else:
+ recplot.styles+=['scatter']
+ recplot.colors+=[None]
+
+ if recplot2.styles==[]:
+ recplot2.styles=[None,None,None]
+ recplot2.colors=[None,'1.0',None]
+ else:
+ recplot2.styles+=['scatter']
+ recplot2.colors+=['0.5']
+
+ self._send_plot([recplot])
+ self._send_plot([recplot2])
+
+
+ #finding the baseline
+ self.basepoints=self.baseline_points(max_peaks_location, recplot)
+ boundaries=[self.basepoints[0].index, self.basepoints[1].index]
+ boundaries.sort()
+ to_average=recplot.vectors[1][1][boundaries[0]:boundaries[1]] #y points to average
+ avg=np.mean(to_average)
+
+
+ dist=[]
+ jumpforce=[]
+ force=[]
+
+ #we calculate the distance vector
+ for g in range(len(max_peaks_location)-1):
+ dist.append((10**9)*(xplotted_ret[max_peaks_location[g]]-xplotted_ret[max_peaks_location[g+1]]))
+ print "Distance values for the peaks in nm:"
+ print dist
+
+ #the jump-force vector
+ for g in range(len(max_peaks_location)):
+ jumpforce.append((10**12) *(yplotted_ret[min_peaks_location[g]] -yplotted_ret[max_peaks_location[g]]) )
+ print "Force values for the jumps of the peaks in pN:"
+ print jumpforce
+
+ #the force from baseline vector
+ for g in range(len(max_peaks_location)):
+ force.append((10**12)*(avg-yplotted_ret[max_peaks_location[g]]))
+ print "Force values for the peaks in pN:"
+ print force
+
+
+
+ #Now ask for the peaks that we don't want
+ print 'Peaks to ignore (0,1...n from contact point,return to take all)'
+ print 'N to discard measurement'
+ exclude_raw=raw_input('Input:')
+ if exclude_raw=='N':
+ print 'Discarded.'
+ return
+
+ if not exclude_raw=='':
+ exclude=exclude_raw.split(',')
+ #we convert in numbers the input
+ try:
+ exclude=[int(item) for item in exclude]
+ except:
+ print 'Bad input, taking nothing.'
+ return
+
+# we remove the peaks that we don't want from the list, we need a counter beacause if we remove
+# a peaks the other peaks in the list are shifted by one at each step
+ count=0
+ for a in exclude:
+ if (a==0):
+ max_peaks_location=max_peaks_location[1:]
+ min_peaks_location=min_peaks_location[1:]
+ else:
+ new_a=a-count
+ max_peaks_location= max_peaks_location[0:new_a]+max_peaks_location[new_a+1:]
+ min_peaks_location= min_peaks_location[0:new_a]+min_peaks_location[new_a+1:]
+ peak_size= peak_size[0:new_a]+peak_size[new_a+1:]
+ count+=1
+
+
+ #print "max_peaks_location: "+str(len(max_peaks_location))
+ #print "min_peaks_location: "+str(len(min_peaks_location))
+
+
+ dist=[]
+ jumpforce=[]
+ force=[]
+ #we recalculate the distances and the forces after the removing of the unwanted peaks
+ for g in range(len(max_peaks_location)-1):
+ dist.append(xplotted_ret[max_peaks_location[g]]-xplotted_ret[max_peaks_location[g+1]])
+ for g in range(len(max_peaks_location)):
+ jumpforce.append( yplotted_ret[min_peaks_location[g]] - yplotted_ret[max_peaks_location[g]] )
+ for g in range(len(max_peaks_location)):
+ force.append(avg - yplotted_ret[max_peaks_location[g]])
+
+
+
+
+
+ #Save file info
+ if self.autofile=='':
+ self.autofile=raw_input('Jumpstat filename? (return to ignore) ')
+ if self.autofile=='':
+ print 'Not saved.'
+ return
+
+ if not os.path.exists(self.autofile):
+ f=open(self.autofile,'w+')
+ f.write('Analysis started '+time.asctime()+'\n')
+ f.write('----------------------------------------\n')
+ f.write('; Delta Distance length (m); Jump Force pN; Standard Force pN\n')
+ f.write(self.current.path+'\n')
+ for k in range(len(dist)):
+ f.write(";")
+ f.write(str(dist[k])+";"+str(jumpforce[k])+";"+str(force[k])+"\n" )
+ f.write("\n")
+ f.close()
+
+ else:
+ f=open(self.autofile,'a+')
+ f.write(self.current.path+'\n')
+ for k in range(len(dist)):
+ f.write(";")
+ f.write(str(dist[k])+";"+str(jumpforce[k])+";"+str(force[k])+"\n" )
+ f.write("\n")
+ f.close()
+
+ print 'Saving...'
deviation = number of times the convolution signal is above the noise absolute deviation.
'''
- def find_current_peaks(noflatten, a):
- #Find peaks.
- if len(a)==0:
- a=self.convfilt_config['mindeviation']
- try:
- abs_devs=float(a)
- except:
- print "Bad input, using default."
- abs_devs=self.convfilt_config['mindeviation']
-
- defplot=self.current.curve.default_plots()[0]
- if not noflatten:
- flatten=self._find_plotmanip('flatten') #Extract flatten plotmanip
- defplot=flatten(defplot, self.current, customvalue=1) #Flatten curve before feeding it to has_peaks
- pk_loc,peak_size=self.has_peaks(defplot, abs_devs)
- return pk_loc, peak_size
-
-
+
noflatten=False
- peaks_location, peak_size=find_current_peaks(noflatten, args)
-
+ peaks_location, peak_size=self.find_current_peaks(noflatten)
+
#if no peaks, we have nothing to plot. exit.
if len(peaks_location)==0:
return
--- /dev/null
+#!/usr/bin/env python
+
+'''
+multifit.py
+
+Alberto Gomez-Casado, (c) 2010, University of Twente (The Netherlands)
+Licensed under GNU GPL v2
+'''
+
+#FIXME clean this, probably some dependencies are not needed
+
+from ..libhooke import WX_GOOD, ClickedPoint
+import wxversion
+wxversion.select(WX_GOOD)
+from wx import PostEvent
+import numpy as np
+import scipy as sp
+import copy
+import os.path
+import time
+import tempfile
+import warnings
+warnings.simplefilter('ignore',np.RankWarning)
+import Queue
+
+global measure_wlc
+global EVT_MEASURE_WLC
+
+global events_from_fit
+events_from_fit=Queue.Queue() #GUI ---> CLI COMMUNICATION
+
+
+class multifitCommands:
+
+ def do_multifit(self,args):
+ '''
+ MULTIFIT
+ (multifit.py)
+ Presents curves for manual analysis in a comfortable mouse-only fashion.
+ Obtains contour length, persistance length, rupture force and
+ slope - loading rate.
+ WLC is shown in red, eFJC in blue.
+ -------------
+ Syntax:
+ multifit [pl=value] [kl=value] [t=value] [slopew=value] [basew=value]
+ [slope2p] [justone]
+
+ pl=[value] and kl=[value]: Use a fixed persistent length (WLC) or Kuhn
+ length (eFJC) for the fit. If pl is not given, the fit will be
+ a 2-variable fit.
+ DO NOT put spaces between 'pl', '=' and the value.
+ eFJC fit works better with a fixed kl
+ The value must be in nanometers.
+
+ t=[value] : Use a user-defined temperature. The value must be in
+ kelvins; by default it is 293 K.
+ DO NOT put spaces between 't', '=' and the value.
+
+ slopew and basew : width in points for slope fitting (points to the
+ right of clicked rupture) and base level fitting (points to
+ the left of clicked top of rupture).
+ If slopew is not provided, the routine will ask for a 5th
+ point to fit the slope.
+ DO NOT put spaces between 'slopew' or 'basew', '=' value.
+
+ slope2p : calculates the slope from the two clicked points,
+ instead of fitting data between them
+
+ justone : performs the fits over current curve instead of iterating
+
+ See fit command help for more information on the options and fit
+ procedures.
+ NOTE: centerzero plot modifier should be activated (set centerzero 1).
+ '''
+
+ #NOTE duplicates a lot of code from do_fit in fit.py, a call to it could be used directly
+ #but it is easier to control the program flow bypassing it
+
+ pl_value=None
+ kl_value=None
+ T=self.config['temperature']
+ slopew=None
+ basew=15
+ justone=False
+ twops=False
+
+ #FIXME spaces are not allowed between pl or t and value
+ for arg in args.split():
+ #look for a persistent length argument.
+ if 'pl=' in arg:
+ pl_expression=arg.split('=')
+ pl_value=float(pl_expression[1]) #actual value
+ if 'kl=' in arg:
+ kl_expression=arg.split('=')
+ kl_value=float(kl_expression[1]) #actual value
+ #look for a T argument.
+ if ('t=' in arg) or ('T=' in arg):
+ t_expression=arg.split('=')
+ T=float(t_expression[1])
+ #look for a basew argument.
+ if ('basew=' in arg):
+ basew_expression=arg.split('=')
+ basew=int(basew_expression[1])
+ #look for a slopew argument.
+ if ('slopew=' in arg):
+ slopew_expression=arg.split('=')
+ slopew=int(slopew_expression[1])
+ if('justone' in arg):
+ justone=True
+ if('slope2p' in arg):
+ twops=True
+
+ counter=0
+ savecounter=0
+ curveindex=0
+
+ if not justone:
+ print 'What curve no. you would like to start? (enter for ignoring)'
+ skip=raw_input()
+
+ if skip.isdigit()==False:
+ skip=0
+ else:
+ skip=int(skip)-1
+ print 'Skipping '+str(skip)+ ' curves'
+ else:
+ skip=0
+ #begin presenting curves for analysis
+ while curveindex <len(self.current_list):
+ if not justone:
+ counter+=1
+ curve=self.current_list[curveindex]
+ if skip>curveindex:
+ curveindex+=1
+ continue
+
+ #give periodically the opportunity to stop the analysis
+ if counter%20==0 and counter>0:
+ print '\nYou checked '+str(counter)+' curves. Do you want to continue?'
+ self.current=curve
+ self.do_plot(0)
+ if self.YNclick():
+ print 'Going on...'
+ else:
+ break
+ else:
+ self.current=curve
+ self.do_plot(0)
+ else:
+ curve=self.current
+ self.do_plot(0)
+ if not justone:
+ print '\nCurve '+str(curveindex+1)+' of '+str(len(self.current_list))
+ print 'Click contact point or left end (red area)of the curve to skip'
+ #hightlights an area dedicated to reject current curve
+ sizeret=len(self.plots[0].vectors[1][0])
+ noarea=self.plots[0].vectors[1][0][-sizeret/6:-1]
+ noareaplot=copy.deepcopy(self._get_displayed_plot())
+ #noise dependent slight shift to improve visibility
+ noareaplot.add_set(noarea,np.ones_like(noarea)*5.0*np.std(self.plots[0].vectors[1][1][-sizeret/6:-1]))
+ noareaplot.styles+=['scatter']
+ noareaplot.colors+=["red"]
+ self._send_plot([noareaplot])
+ contact_point=self._measure_N_points(N=1, whatset=1)[0]
+ contact_point_index=contact_point.index
+ noareaplot.remove_set(-1)
+ #remove set leaves some styles disturbing the next graph, fixed by doing do_plot
+ self.do_plot(0)
+ if contact_point_index>5.0/6.0*sizeret:
+ if justone:
+ break
+ curveindex+=1
+ continue
+
+ self.wlccontact_point=contact_point
+ self.wlccontact_index=contact_point.index
+ self.wlccurrent=self.current.path
+
+ print 'Now click two points for the fitting area (one should be the rupture point)'
+ wlcpoints=self._measure_N_points(N=2,whatset=1)
+ print 'And one point of the top of the jump'
+ toppoint=self._measure_N_points(N=1,whatset=1)
+ slopepoint=ClickedPoint()
+ if slopew==None:
+ print 'Click a point to calculate slope'
+ slopepoint=self._measure_N_points(N=1,whatset=1)
+
+ fitpoints=[contact_point]+wlcpoints
+ #use the currently displayed plot for the fit
+ displayed_plot=self._get_displayed_plot()
+
+ #use both fit functions
+ try:
+ wlcparams, wlcyfit, wlcxfit, wlcfit_errors,wlc_qstd = self.wlc_fit(fitpoints, displayed_plot.vectors[1][0], displayed_plot.vectors[1][1],pl_value,T, return_errors=True )
+ wlcerror=False
+ except:
+ print 'WLC fit not possible'
+ wlcerror=True
+
+ try:
+ fjcparams, fjcyfit, fjcxfit, fjcfit_errors,fjc_qstd = self.efjc_fit(fitpoints, displayed_plot.vectors[1][0], displayed_plot.vectors[1][1],kl_value,T, return_errors=True )
+ fjcerror=False
+ except:
+ print 'eFJC fit not possible'
+ fjcerror=True
+
+ #Measure rupture force
+ ruptpoint=ClickedPoint()
+ if wlcpoints[0].graph_coords[1]<wlcpoints[1].graph_coords[1]:
+ ruptpoint=wlcpoints[0]
+ else:
+ ruptpoint=wlcpoints[1]
+ tpindex=toppoint[0].index
+ toplevel=np.average(displayed_plot.vectors[1][1][tpindex:tpindex+basew])
+ force=toplevel-ruptpoint.graph_coords[1]
+
+ #Measure the slope - loading rate
+
+ if slopew==None:
+ if twops:
+ xint=ruptpoint.graph_coords[0]-slopepoint[0].graph_coords[0]
+ yint=ruptpoint.graph_coords[1]-slopepoint[0].graph_coords[1]
+ slope=yint/xint
+ slopeplot=self._get_displayed_plot(0) #get topmost displayed plot
+ slopeplot.add_set([ruptpoint.graph_coords[0],slopepoint[0].graph_coords[0]],[ruptpoint.graph_coords[1],slopepoint[0].graph_coords[1]])
+ if slopeplot.styles==[]:
+ slopeplot.styles=[None,None,'scatter']
+ slopeplot.colors=[None,None,'red']
+ else:
+ slopeplot.styles+=['scatter']
+ slopeplot.colors+=['red']
+ else:
+ slope=self._slope([ruptpoint]+slopepoint,slopew)
+ else:
+ slope=self._slope([ruptpoint],slopew)
+
+ #plot results (_slope already did)
+
+ #now we have the fit, we can plot it
+ #add the clicked points in the final PlotObject
+ clickvector_x, clickvector_y=[], []
+ for item in wlcpoints:
+ clickvector_x.append(item.graph_coords[0])
+ clickvector_y.append(item.graph_coords[1])
+
+ #create a custom PlotObject to gracefully plot the fit along the curves
+
+ #first fix those irritating zoom-destroying fits
+ lowestpoint=min(displayed_plot.vectors[1][1])
+ for i in np.arange(0,len(wlcyfit)):
+ if wlcyfit[i] < 1.2*lowestpoint:
+ wlcyfit[i]=toplevel
+ for i in np.arange(0,len(fjcyfit)):
+ if fjcyfit[i] < 1.2*lowestpoint:
+ fjcyfit[i]=toplevel
+
+ fitplot=copy.deepcopy(displayed_plot)
+ fitplot.add_set(wlcxfit,wlcyfit)
+ fitplot.add_set(fjcxfit,fjcyfit)
+ fitplot.add_set(clickvector_x,clickvector_y)
+
+ fitplot.styles+=[None,None,'scatter']
+ fitplot.colors+=["red","blue",None]
+
+ self._send_plot([fitplot])
+
+
+ #present results of measurement
+ if len(wlcparams)==1:
+ wlcparams.append(pl_value*1e-9)
+ if len(fjcparams)==1:
+ fjcparams.append(kl_value*1e-9)
+
+ if fjcfit_errors:
+ fjcfit_nm=[i*(10**9) for i in fjcfit_errors]
+ if len(fjcfit_nm)==1:
+ fjcfit_nm.append(0)
+ else:
+ fjcfit_errors=[0,0]
+ if wlcfit_errors:
+ wlcfit_nm=[i*(10**9) for i in wlcfit_errors]
+ if len(wlcfit_nm)==1:
+ wlcfit_nm.append(0)
+ else:
+ wlcfit_errors=[0,0]
+
+ wlc_fitq=wlc_qstd/np.std(displayed_plot.vectors[1][1][-20:-1])
+ fjc_fitq=fjc_qstd/np.std(displayed_plot.vectors[1][1][-20:-1])
+
+ print '\nRESULTS'
+ print 'WLC contour : '+str(1e9*wlcparams[0])+u' \u00b1 '+str(wlcfit_nm[0])+' nm'
+ print 'Per. length : '+str(1e9*wlcparams[1])+u' \u00b1 '+str(wlcfit_nm[1])+' nm'
+ print 'Quality :'+str(wlc_fitq)
+ print '---'
+ print 'eFJC contour : '+str(1e9*fjcparams[0])+u' \u00b1 '+str(fjcfit_nm[0])+' nm'
+ print 'Kuhn length (e): '+str(1e9*fjcparams[1])+u' \u00b1 '+str(fjcfit_nm[1])+' nm'
+ print 'Quality :'+str(fjc_fitq)
+ print '---'
+ print 'Force : '+str(1e12*force)+' pN'
+ print 'Slope : '+str(slope)+' N/m'
+
+ try:
+ #FIXME all drivers should provide retract velocity, in SI or homogeneous units
+ lrate=slope*self.current.curve.retract_velocity
+ print 'Loading rate (UNTESTED):'+str(lrate)
+ except:
+ print 'Loading rate : n/a'
+ lrate='n/a'
+
+ if justone:
+ return
+
+ #save accept/discard/repeat
+ print '\n\nDo you want to save these?'
+ if self.YNclick():
+
+ #Save file info
+ if self.autofile=='':
+ self.autofile=raw_input('Results filename? (press enter for a random generated name)')
+ if self.autofile=='':
+ [osf,name]=tempfile.mkstemp(prefix='analysis-',suffix='.txt',text=True,dir=self.config['hookedir'])
+ print 'saving in '+name
+ self.autofile=name
+ os.close(osf)
+ f=open(self.autofile,'a+')
+ f.write('Analysis started '+time.asctime()+'\n')
+ f.write('----------------------------------------\n')
+ f.write(' File ; WLC Cont. length (nm) ; Sigma WLC cl; Per. Length (nm) ; Sigma pl ; WLC-Q ; eFJC Cont. length (nm) ; Sigma eFJC cl ; Kuhn length (nm); Sigma kl ; FJC-Q ;Force (pN) ; Slope (N/m) ; (untested) Loading rate (pN/s)\n')
+ f.close()
+
+ if not os.path.exists(self.autofile):
+ f=open(self.autofile,'a+')
+ f.write('Analysis started '+time.asctime()+'\n')
+ f.write('----------------------------------------\n')
+ f.write(' File ; WLC Cont. length (nm) ; Sigma WLC cl; Per. Length (nm) ; Sigma pl; WLC-Q ;eFJC Cont. length (nm) ; Sigma eFJC cl ; Kuhn length (nm); Sigma kl ; FJC-Q ;Force (pN) ; Slope (N/m) ; (untested) Loading rate (pN/s)\n')
+ f.close()
+
+ print 'Saving...'
+ savecounter+=1
+ f=open(self.autofile,'a+')
+ f.write(self.current.path)
+ f.write(' ; '+str(1e9*wlcparams[0])+' ; '+str(wlcfit_nm[0])+' ; '+str(1e9*wlcparams[1])+' ; '+str(wlcfit_nm[1])+' ; '+ str(wlc_fitq)+' ; '+str(1e9*fjcparams[0])+' ; '+str(fjcfit_nm[0])+' ; '+str(1e9*fjcparams[1])+' ; '+str(fjcfit_nm[1])+' ; '+str(fjc_fitq)+' ; '+str(1e12*force)+' ; '+ str(slope)+' ; '+str(lrate)+'\n')
+ f.close()
+ else:
+ print '\nWould you like to try again on this same curve?'
+ if self.YNclick():
+ continue
+ curveindex+=1
+
+ if not justone:
+ print 'You saved '+str(savecounter)+' out of '+str(len(self.current_list))+' total curves.'
+
+
+
+ def YNclick(self):
+ #shows something in the graph to indicate we expect YNclick
+ #FIXME is there an easy way to write in the graph?
+ askplot=self._get_displayed_plot()
+ center=np.min(askplot.vectors[1][0])+15e-9
+ scale=np.std(askplot.vectors[1][1][-50:-1])
+ hscale=np.mean(np.diff(askplot.vectors[1][0][-10:-1]))
+ xhline=[center-20*hscale,center-10*hscale,center+10*hscale,center+20*hscale]
+ ytopline=[15*scale,20*scale,20*scale,15*scale]
+ ybotline=[-15*scale,-20*scale,-20*scale,-15*scale]
+ yvline=np.arange(-25*scale,26*scale,scale/2)
+ xvline=np.ones_like(yvline)*center
+ xshow=np.concatenate((xvline,xhline,xhline))
+ yshow=np.concatenate((yvline,ytopline,ybotline))
+ askplot.add_set(xshow,yshow)
+ askplot.styles+=['scatter']
+ askplot.colors+=["black"]
+ self._send_plot([askplot])
+ print 'Click any point over y=0 for Yes, under it for No'
+ point=self._measure_N_points(N=1,whatset=1)[0]
+ value=point.absolute_coords[1]
+ askplot.remove_set(-1)
+ self._send_plot([askplot])
+ if value<0:
+ return False
+ else:
+ return True
+
+
+
+
+#!/usr/bin/env python
+# -*- coding: utf-8 -*-
from ..libhooke import WX_GOOD, ClickedPoint, config_file_path
from mdp import pca
f.write('----------------------------------------\n')
f.write('; Peak number ; 1 peak Length (nm) ; 1 peak Force (pN) ; 2 peak Length (nm) ; 2 peak Force (pN) ; 3 peak Length (nm) ; 3 peak Force (pN) ; 4 peak Length (nm) ; 4 peak Force (pN) ; 5 peak Length (nm) ; 5 peak Force (pN) ; 6 peak Length (nm) ; 6 peak Force (pN) ; 7 peak Length (nm) ; 7 peak Force (pN) ; 8 peak Length (nm) ; 8 peak Force (pN)\n')
f.close()
-
- # ------ FUNCTION ------
- def fit_interval_nm(start_index,plot,nm,backwards):
- '''
- Calculates the number of points to fit, given a fit interval in nm
- start_index: index of point
- plot: plot to use
- backwards: if true, finds a point backwards.
- '''
- whatset=1 #FIXME: should be decidable
- x_vect=plot.vectors[1][0]
-
- c=0
- i=start_index
- start=x_vect[start_index]
- maxlen=len(x_vect)
- while abs(x_vect[i]-x_vect[start_index])*(10**9) < nm:
- if i==0 or i==maxlen-1: #we reached boundaries of vector!
- return c
- if backwards:
- i-=1
- else:
- i+=1
- c+=1
- return c
+
def plot_informations(itplot,pl_value):
'''
cindex=self.find_contact_point(itplot[0]) #Automatically find contact point <158, libhooke.ClickedPoint>
contact_point=self._clickize(itplot[0].vectors[1][0], itplot[0].vectors[1][1], cindex)
self.basepoints=[]
- base_index_0=peak_location[-1]+fit_interval_nm(peak_location[-1], itplot[0], self.config['auto_right_baseline'],False)
+ base_index_0=peak_location[-1]+self.fit_interval_nm(peak_location[-1], itplot[0], self.config['auto_right_baseline'],False)
self.basepoints.append(self._clickize(itplot[0].vectors[1][0],itplot[0].vectors[1][1],base_index_0))
- base_index_1=self.basepoints[0].index+fit_interval_nm(self.basepoints[0].index, itplot[0], self.config['auto_left_baseline'],False)
+ base_index_1=self.basepoints[0].index+self.fit_interval_nm(self.basepoints[0].index, itplot[0], self.config['auto_left_baseline'],False)
self.basepoints.append(self._clickize(itplot[0].vectors[1][0],itplot[0].vectors[1][1],base_index_1))
self.basecurrent=self.current.path
boundaries=[self.basepoints[0].index, self.basepoints[1].index]
return plot
+
+ def plotmanip_centerzero(self, plot, current, customvalue=None):
+ '''
+ Centers the force curve so the median (the free level) corresponds to 0 N
+ Assumes that:
+ - plot.vectors[0][1] is the Y of extension curve
+ - plot.vectors[1][1] is the Y of retraction curve
+
+
+ '''
+ #use only for force spectroscopy experiments!
+ if current.curve.experiment != 'smfs':
+ return plot
+
+ if customvalue != None:
+ execute_me=customvalue
+ else:
+ execute_me=self.config['centerzero']
+ if not execute_me:
+ return plot
+
+
+
+ #levelapp=float(np.median(plot.vectors[0][1]))
+ levelret=float(np.median(plot.vectors[1][1][-300:-1]))
+
+ level=levelret
+
+ approach=[i-level for i in plot.vectors[0][1]]
+ retract=[i-level for i in plot.vectors[1][1]]
+
+ plot.vectors[0][1]=approach
+ plot.vectors[1][1]=retract
+ return plot
+
'''
def plotmanip_detriggerize(self, plot, current, customvalue=None):
#DEPRECATED
--- /dev/null
+#!/usr/bin/env python
+
+'''review.py
+Alberto Gomez-Casado (c) 2010 University of Twente
+'''
+
+from ..libhooke import WX_GOOD
+import wxversion
+wxversion.select(WX_GOOD)
+from wx import PostEvent
+import numpy as np
+import shutil
+import libinput as linp
+import copy
+import os.path
+
+import warnings
+warnings.simplefilter('ignore',np.RankWarning)
+
+
+class reviewCommands:
+
+ def do_review(self,args):
+ '''
+ REVIEW
+ (review.py)
+ Presents curves (in current playlist) in groups of ten. User can indicate which curves will be selected to be saved in a separate directory for further analysis.
+ By default curves are presented separated -30 nm in x and -100 pN in y.
+ Curve number one of each set is the one showing the approach.
+ ------------
+ Syntax:
+ review [x spacing (nm)] [y spacing (pN]
+
+ '''
+
+ args=args.split()
+
+ if len(args)==2:
+ try:
+ xgap=int(args[0])*1e-9 #scale to SI units
+ ygap=int(args[1])*1e-12
+ except:
+ print 'Spacings must be numeric! Using defaults'
+ xgap=-30*1e-9
+ ygap=-100*1e-12
+ else:
+ xgap=-30*1e-9
+ ygap=-100*1e-12
+
+ print 'Processing playlist...'
+ print '(Please wait)'
+ keep_list=[]
+
+ c=0
+ print 'You can stop the review at any moment by entering \'q\' you can go back ten curves entering \'b\''
+ print 'What curve no. you would like to start? (Enter for starting from the first)'
+ skip=raw_input()
+
+ if skip.isdigit()==False:
+ skip=0
+ else:
+ skip=int(skip)
+ print 'Skipping '+str(skip)+ ' curves'
+ c=skip
+
+ while c < len(self.current_list):
+
+
+ #take a group of ten curves and plot them with some spacing
+
+ curveset=self.current_list[c:c+10]
+
+ base=curveset[0]
+ self.current=base
+ self.do_plot(0)
+ multiplot=copy.deepcopy(self._get_displayed_plot(0))
+ self.current.curve.close_all()
+
+ for i in range(1,10):
+ if i >= len(curveset):
+ print 'End of the list'
+ print 'WARNING: maybe you want to finish!'
+ break
+ nextitem=curveset[i]
+ if not nextitem.identify(self.drivers):
+ continue
+ nextplot=self.plotmanip_correct(nextitem.curve.default_plots()[0],nextitem)
+ nextvect=nextplot.vectors
+ nextitem.curve.close_all()
+
+ nextx=nextvect[1][0]
+ nexty=nextvect[1][1]
+ #center y around 0
+ ymedian=np.median(nexty)
+ pos=0
+ for j in range(0,len(nextx)):
+ nextx[j]=nextx[j]+i*xgap
+ nexty[j]=nexty[j]+i*ygap-ymedian
+ multiplot.add_set(nextx,nexty)
+ multiplot.styles.append('lines')
+ multiplot.colors.append(None)
+
+ self._send_plot([multiplot])
+
+
+ print 'Which ones you want to keep?'
+ keep=raw_input()
+ if keep.isalpha():
+ if keep=='b':
+ print 'Going back ten curves'
+ c-=10
+ if c<0:
+ print 'We are already at the start'
+ c=0
+ continue
+ if keep=='q':
+ break
+ else:
+ for i in keep.split():
+ if i.isdigit() and int(i)>0 and int(i)<11: #if it is not digit the int() call is never made, so no exception should be happening
+ keep_item=curveset[int(i)-1].path
+ if keep_item in keep_list:
+ print 'This curve ('+keep_item+') was already selected, skipping'
+ else:
+ keep_list.append(keep_item)
+ else:
+ print 'You entered an invalid value: '+i
+
+ c+=10
+
+ #FIXME I don't know why the print below gives errors sometimes
+ try:
+ print 'Kept '+str(len(keep_list))+' curves from '+str(min(c+i+1,len(self.current_list)))
+ except:
+ print 'Display error, never mind, we continue. Below the amount of kept and total curves:'
+ print str(len(keep_list))
+ print str(len(self.current_list))
+
+ allok=0 #flag to keep from losing all the work in a slight mistake
+ while allok==0:
+ if len(keep_list) == 0:
+ return
+ save=linp.safeinput('Do you want to save the selected curves?',['y','n'])
+ if save=='y':
+ savedir=linp.safeinput('Destination directory?')
+ savedir=os.path.abspath(savedir)
+ if not os.path.isdir(savedir):
+ print 'Destination is not a directory. Try again'
+ continue
+ if save=='n':
+ allok=1
+ return
+
+ for item in keep_list:
+ try:
+ shutil.copy(item, savedir)
+ allok=1
+ except (OSError, IOError):
+ print 'Cannot copy file. '+item+' Perhaps you gave me a wrong directory?'
+ allok=0
+ break
+
+ return
--- /dev/null
+#!/usr/bin/env python
+
+'''
+FMjoin.py
+Copies all .ibw files contained in a folder and its subfolders into a single folder. Useful for force maps.
+
+Usage:
+python FMjoin.py origindir destdir
+
+
+Alberto Gomez-Casado (c) 2010, University of Twente (The Netherlands)
+This program is released under the GNU General Public License version 2.
+'''
+
+import os
+import shutil
+import sys
+
+def main(*args):
+ if len(sys.argv) < 2:
+ print 'You must at least specify origin and destination folders.'
+ return 0
+ origin=sys.argv[1]
+ dest=sys.argv[2]
+
+ if os.path.exists(origin):
+ if os.path.exists(dest):
+ if os.listdir(dest)!=[]:
+ print 'Destination folder is not empty! Use another folder.'
+ return 0
+ else:
+ print 'Destination folder does not exist, will create it'
+ os.mkdir(dest)
+ else:
+ print 'You provided a wrong origin folder name, try again.'
+
+ origin=os.path.abspath(origin)
+ dest=os.path.abspath(dest)
+
+ for root, dirs, files in os.walk(origin):
+ for filename in files:
+ if filename.split('.')[1]!="ibw":
+ continue
+ filepath=os.path.join(root,filename)
+ #to avoid overwriting, we collapse unique paths into filenames
+ rawdest=filepath.split(os.path.commonprefix([origin, filepath]))[1]
+ rawdest=rawdest.replace('/','') #for linux
+ rawdest=rawdest.replace('\\','') #for windows
+ destfile=os.path.join(dest,rawdest)
+ print 'Copying '+rawdest
+ shutil.copy(filepath,destfile)
+
+ return 0
+
+if __name__ == '__main__':
+ sys.exit(main(*sys.argv))
+
+
--- /dev/null
+import h5py
+import numpy
+import os
+import sys
+
+h5file=os.path.realpath(sys.argv[-1])
+h5dir=os.path.dirname(h5file)
+
+f=h5py.File(h5file)
+
+exportdir=os.path.join(h5dir,'exported')
+try:
+ os.mkdir(exportdir)
+except:
+ print 'mkdir error, maybe the export directory already exists?'
+
+def h5exportfunc(name):
+ Deflname=name
+ if Deflname.endswith('Defl'): #search for _Defl dataset
+ LVDTname=str.replace(Deflname,'Defl','LVDT') #and correspondant LVDT dataset
+ Defldata=f[Deflname][:] #store the data in local var
+ LVDTdata=f[LVDTname][:]
+ #find useful attr (springc)
+ try:
+ notes=f[Deflname].attrs['IGORWaveNote']
+ springmatch=notes.index("SpringConstant: ")+len("SpringConstant: ")
+ springc=notes[springmatch:].split("\r",1)[0] #probably extracting the leading numbers can be way more elegant than this
+ print Deflname
+ except:
+ print 'Something bad happened with '+Deflname+', ignoring it'
+ return None
+ #returning anything but None halts the visit procedure
+
+ fp=open(os.path.join(exportdir,name.replace('/',''))+'.txt','w')
+ #uses the full HDF5 path (slashes out) to avoid potential overwriting
+ #write attr
+ fp.writelines("IGP-HDF5-Hooke\n")
+ fp.writelines('SpringConstant: '+springc+'\n\n')
+ fp.writelines('App x\tApp y\tRet x\tRet y\n')
+ #write LVDT and Defl data
+ half=Defldata.size/2
+ for i in numpy.arange(0,half):
+ fp.writelines(str(LVDTdata[i])+'\t'+str(Defldata[i])+'\t'+str(LVDTdata[i+half])+'\t'+str(Defldata[i+half])+'\n')
+ #close the file
+ fp.close()
+ return None
+
+
+f.visit(h5exportfunc)