--- /dev/null
+To answer "How much does the X stretch?"
--- /dev/null
+Author: W. Trevor King <wking@drexel.edu>
+
+
+Content-type: text/plain
+
+
+Date: Sat, 15 Aug 2009 11:36:32 +0000
+
--- /dev/null
+creator: W. Trevor King <wking@drexel.edu>
+
+
+reporter: W. Trevor King <wking@drexel.edu>
+
+
+severity: minor
+
+
+status: open
+
+
+summary: Allow selectable tension-group length output in -V runs.
+
+
+time: Sat, 15 Aug 2009 11:36:03 +0000
+
--- /dev/null
+all : bell_rate.png
+
+view : all
+ qiv bell_rate.png
+
+clean :
+ rm -f bell_rate_*.gp bell_rate_*.hist bell_rate_*.d bell_rate_*.png bell_rate_*.compare
+
+bell_rate.png : run_test.sh
+ ./$<
--- /dev/null
+#!/bin/bash
+#
+# Use a single hooke domain with a constant unfolding rate, so
+# unfolding force is proportional to time, and connect multiple
+# identical Bell unfolding domains. We expect an peaked death
+# histogram.
+#
+# e.g.
+# spring constant k=df/dx \__
+# velocity v=dx/dt / `--> df/dt = kv --> f = kvt (+ f(t=0))
+# unfolding rate K= a e^(bf) (frac/s)
+# let f(t=0) = 0
+# let P(f) be the population existing at force f (normed so P(t=f=0)=1).
+# the probability of dying (normed # deaths) between f and f+df is given by
+# dD = K P(f) * dt = K P(f) df/kv since dt = df/kv
+# this is what we plot in the normalized histogram with dD in a bin of width
+# df. The continuous analog is the function dD/df = K/kv P(f).
+#
+# To find P(f), we need to solve
+# dP/dt = -KP
+# dP/P = -K dt = -a e^{bf} df/kv = -z e^{bf} df where z ≡ a/kv
+# ln(P) = -z/b e^{bf) + c
+# P = C e^{-z/b e^{bf}}
+# Applying our f(t=0)=0 boundary condition we see that C = P(t=f=0) e^{z/b}.
+# Plugging P(f) into our formula for dD/df yields
+# dD/df = K/kv P(f) = z e^{bf} P(t=f=0) e^{z/b} e^{-z/b e^{bf}}
+# dD/df = P(t=f=0) z e^{bf + z/b(1-e^{bf})}
+#
+# The histogram scaling isn't dD/df, though, it's dD/d(bin)
+# dD/d(bin) = dD/df df/d(bin)
+# = binwidth P(t=f=0) z e^{bf + z/b(1-e^{bf})}
+#
+# Reminders:
+# spring constant k=df/dx (N/m)
+# velocity v=dx/dt (m/s)
+# unfolding rate K= a e^(bf) = K0 e^(fdx/kBT) (frac/s)
+# usage: bell_rate.sh <num_domains> <v> <k> <K0> <dx> <T>
+
+if [ $# -ne 6 ]
+then
+ echo "usage: $0 <num_domains> <v> <k> <K0> <dx> <T>"
+ echo ""
+ echo "where"
+ echo "spring constant k=df/dx (N/m)"
+ echo "velocity v=dx/dt (m/s)"
+ echo "unfolding rate K= K0 e^(fdx/kBT) (frac/s)"
+ exit 1
+fi
+
+NDOM=$1
+V=$2
+SPRING=$3
+K0=$4
+DX=$5
+T=$6
+DEBUG=0
+
+# since we're on the cluster, we'll go crazy and get really good statistics ;)
+N=10000
+
+SAWSIM=../../bin/sawsim
+DATA=bell_rate_${NDOM}_${V}_${SPRING}_${K0}_${DX}_${T}.d
+HIST=bell_rate_${NDOM}_${V}_${SPRING}_${K0}_${DX}_${T}.hist
+COMPFILE=bell_rate_${NDOM}_${V}_${SPRING}_${K0}_${DX}_${T}.compare
+GPFILE=bell_rate_${NDOM}_${V}_${SPRING}_${K0}_${DX}_${T}.gp
+PNGFILE=bell_rate_${NDOM}_${V}_${SPRING}_${K0}_${DX}_${T}.png
+
+# set up the theory
+# our predicted histogram (see notes above) is
+# dD/d(bin) = binwidth P0 z e^{bf + z/b(1-e^{bf})}
+# where z ≡ a/kv, and b = Δx/kBT
+KB=1.3806503e-23 # J/K
+B=`python -c "print ($DX/($KB*$T))"`
+Z=`python -c "print (float($K0)/($SPRING*$V))"`
+# find a good df (histogram bin width).
+# currently, scale based only on e^(bf) term in exponent.
+df=`python -c "print (0.01/$B)"`
+
+THEORY_FN="$df * $N * $NDOM * $Z * exp($B*x + $Z/$B *(1 - exp($B*x)))"
+if [ $DEBUG -eq 1 ]; then
+ echo "b = dx/kBT = $B"
+ echo "z = K0/kv = $Z"
+ echo "Theory --> P(x=f) = $THEORY_FN"
+fi
+
+# run the experiments
+> $DATA
+if [ "$ISACLUSTER" -eq 1 ]
+then
+ # Sawsim >= 0.6
+ qcmd -w14:00:00 xtimes $N $SAWSIM -T$T -v$V -s cantilever,hooke,$SPRING -N1 \
+ -s folded,null -N$NDOM -s unfolded,null \
+ -k "'folded,unfolded,bell,{$K0,$DX}'" -q folded -P 1e-5 \
+ | grep -v '^#' | cut -f1 >> $DATA
+else
+ # Sawsim >= 0.6
+ xtimes $N $SAWSIM -T$T -v$V -s cantilever,hooke,$SPRING -N1 \
+ -s folded,null -N$NDOM -s unfolded,null \
+ -k "folded,unfolded,bell,{$K0,$DX}" -q folded \
+ | grep -v '^#' | cut -f1 >> $DATA
+fi
+
+# histogram the data
+if [ $DEBUG -eq 1 ]; then
+ echo "cat $DATA | stem_leaf -v-1 -b$df -c | sed 's/://' > $HIST"
+fi
+cat $DATA | stem_leaf -v-1 -b$df -c | sed 's/://' > $HIST
+wc -l "$HIST"
+# fit the data to an exponential decay
+if [ $DEBUG -eq 1 ]; then
+ echo "cat $HIST | ./fit_bell"
+fi
+FIT=`cat $HIST | ./fit_bell` || exit 1
+FITZ=`echo "$FIT" | sed -n 's/a:[[:space:]]*//p'`
+FITB=`echo "$FIT" | sed -n 's/b:[[:space:]]*//p'`
+echo -e "z = K0/kv:\t$FITZ\t(expected $Z)" > $COMPFILE
+echo -e "b = dx/kBT:\t$FITB\t(expected $B)" >> $COMPFILE
+cat $COMPFILE
+
+# generate gnuplot script
+if [ $DEBUG -eq 1 ]; then
+ echo "generate Gnuplot script"
+fi
+echo "set terminal png" > $GPFILE
+echo "set output '$PNGFILE'" >> $GPFILE
+echo "set logscale y" >> $GPFILE
+echo "set xlabel 'Force'" >> $GPFILE
+echo "set ylabel 'Deaths'" >> $GPFILE
+echo "theory(x) = $THEORY_FN" >> $GPFILE
+echo "fit(x) = $df * $N * $NDOM * $FITZ * exp($FITB*x + $FITZ/$FITB*(1-exp($FITB*x)))" >> $GPFILE
+echo "plot theory(x), '$HIST' with points, fit(x)" >> $GPFILE
+
+if [ $DEBUG -eq 1 ]; then
+ echo "Generate PNG"
+fi
+gnuplot $GPFILE
--- /dev/null
+# num domains velocity spring constant unfolding rate unfolding distance temperature
+# 7.24296e22 = 1/kB (in SI units)
+1 1 1 1 1 7.24296e22
+5 1 1 1 1 7.24296e22
+1 5 1 1 1 7.24296e22
+1 1 5 1 1 7.24296e22
+1 1 1 5 1 7.24296e22
+1 1 1 1 5 7.24296e22
+# Now use reasonable protein parameters
+1 1e-6 0.05 1e-3 1e-9 300
+50 1e-6 0.05 1e-3 1e-9 300
#!/bin/bash
#
-# Use a single hooke domain with a bell unfolding rate, so unfolding force
-# is proportional to time, and we expect a peaked death histogram.
+# run const_rate.sh with a variety of parameters
#
-# e.g.
-# spring constant k=df/dx=1 \__
-# velocity v=dx/dt=1 / `--> df/dt = kv = 1 --> f = t (+ f0)
-# unfolding rate K= a e^(bf) (frac/s)
-# let f0 = 0
-# let P(f) be the population existing at force f (normed so P(0)=0).
-# the probability of dying (normed # deaths) between f and f+df is given by
-# dD = K P(f) * dt = K P(f) df/kv since dt = df/kv
-# this is what whe plot in the normalized histogram with dD in a bin of width
-# df. The continuous analog is the function dD/df = K/kv P(f).
-#
-# dP/dt = - KP
-# dP/P = -K dt = -a e^{bf} df/kv = -z e^{bf} df where z ≡ a/kv
-# ln(P) = -z/b e^{bf) + c
-# P = C e^{-z/b e^{bf}}
-# Applying our f0=0 boundary condition we see that C = P0 e^{z/b}.
-# Plugging P(f) into our formula for dD/df yields
-# dD/df = K/kv P(f) = z e^{bf} P0 e^{z/b} e^{-z/b e^{bf}}
-# dD/df = P0 z e^{bf + z/b(1-e^{bf})}
-#
-# The histogram scaling isn't dD/df, though, it's dD/d(bin)
-# dD/d(bin) = dD/df df/d(bin) = binwidth P0 z e^{bf + z/b(1-e^{bf})}
-#
-# test with lots of runs of
-# v- T=1/kB v- v=1 v- k=1
-# sawsim -T7.24296e22 -v1 -s folded,hooke,1 -N1 -s unfolded,null \
-# -k "folded,unfolded,bell,{1,1}" -q folded
-# ^- {a,dx}={1,1}
-# (kB = 1.3806503 10-23 J/K, we use T=1/kB to get the bell K=e^{-f})
-#
-# 60 runs of that takes ~1 second:
-# time xtimes 60 sawsim -T7.24296e22 -v1 -s folded,hooke,1 -N1 \
-# -s unfolded,null -k "folded,unfolded,bell,{1,1}" -q folded \
-# > /dev/null
-#
-# usage: bell_rate.sh
-
-# since we're on the cluster, we'll go crazy and get really good statistics ;)
-N=10000
-
-SAWSIM=../../bin/sawsim
-DATA=bell_rate.d
-HIST=bell_rate.hist
-GPFILE=bell_rate.gp
-PNGFILE=bell_rate.png
-
-# set up the theory
-# dD/d(bin) = binwidth P0 z e^{bf + z/b(1-e^{bf})}
-# where z ≡ a/kv, and b = Δx/kBT
-KB=1.3806503e-23 # J/K
-#STYLE="ones"
-STYLE="protein"
-if [ "$STYLE" == "ones" ]
-then
- df=1e-1 # histogram bin width
- DX=1
- T=7.24296e22 # 1/kB
- A=1
- K=1
- V=1
-else
- df=1e-12
- DX=1e-9
- T=300
- A=1e-3
- K=0.05
- V=1e-6
-fi
-B=`echo "print ($DX/($KB*$T))" | python`
-Z=`echo "print (float($A)/($K*$V))" | python`
-THEORY_FN="$df * $N * $Z * exp($B*x + $Z/$B *(1 - exp($B*x)))"
-
-# run the experiments
-> $DATA
-if [ "$ISACLUSTER" -eq 1 ]
-then
- # Sawsim <= 0.5
- #qcmd "xtimes $N $SAWSIM -T$T -v$V -mhooke -a$K -kbell -K$A,$DX -F1 | grep -v '^#' >> $DATA"
- # Sawsim >= 0.6
- qcmd "xtimes $N $SAWSIM -T$T -v$V -s folded,hooke,$K -N1 -s unfolded,null -k 'folded,unfolded,bell,{$A,$DX}' -q folded | grep -v '^#' | cut -f1 >> $DATA"
-else
- # Sawsim <= 0.5
- #xtimes $N $SAWSIM -T$T -v$V -mhooke -a$K -kbell -K$A,$DX -F1 | grep -v '^#' >> $DATA
- # Sawsim >= 0.6
- xtimes $N $SAWSIM -T$T -v$V -s folded,hooke,$K -N1 -s unfolded,null -k "folded,unfolded,bell,{$A,$DX}" -q folded | grep -v '^#' | cut -f1 >> $DATA
-fi
-
-# histogram the data
-cat $DATA | stem_leaf -v-1 -b$df | sed 's/://' > $HIST
-
-# fit the data
-FIT=`cat $HIST | ./fit_bell`
-#echo "$FIT"
-FITZ=`echo "$FIT" | sed -n 's/a:[[:space:]]*//p'`
-FITB=`echo "$FIT" | sed -n 's/b:[[:space:]]*//p'`
-echo -e "z:\t$FITZ\t(expected $Z)"
-echo -e "b:\t$FITB\t(expected $B)"
+# usage: run_test.sh
-# generate gnuplot script
-echo "set terminal png" > $GPFILE
-echo "set output '$PNGFILE'" >> $GPFILE
-echo "set logscale y" >> $GPFILE
-echo "set xlabel 'Time'" >> $GPFILE
-echo "set ylabel 'Deaths'" >> $GPFILE
-echo "theory(x) = $THEORY_FN" >> $GPFILE
-echo "fit(x) = $df * $N * $FITZ * exp($FITB*x + $FITZ/$FITB*(1-exp($FITB*x)))" >> $GPFILE
-echo "plot theory(x), '$HIST' with points, fit(x)" >> $GPFILE
+while read LINE
+do
+ echo ./bell_rate.sh $LINE
+ ./bell_rate.sh $LINE
+done < <(cat params | grep -v '^#')
-gnuplot $GPFILE
+exit 0
--- /dev/null
+all : const_rate.png
+
+view : all
+ qiv const_rate.png
+
+clean :
+ rm -f const_rate_*.gp const_rate_*.hist const_rate_*.d const_rate_*.png const_rate_*.compare
+
+const_rate.png : run_test.sh
+ ./$<
#!/bin/bash
#
-# Use a single hooke domain with a constant unfolding rate, so unfolding force
-# is proportional to time, and we expect an exponential population decay.
+# Use a single hooke domain with a constant unfolding rate, so
+# unfolding force is proportional to time, and connect multiple
+# identical constant unfolding domains. We expect an exponential
+# population decay.
#
# e.g.
# spring constant k=df/dx \__
# so histogram of unfolding vs. force p(F) normalized to p(0)=1 should follow
# -binwidth N0 dp/dF = binwidth N0 pK/kv = binwidth N0 K/kv exp(-FK/kv)
#
-# test with lots of runs of
-# sawsim -v1 -mhooke -a1 -kconst -K1 -F1
-# which is not a problem, because 200 runs of that takes ~1 second:
-# time xtimes 200 sawsim -v1 -mhooke -a1 -kconst -K1 -F1 > /dev/null
-#
-# usage: const_rate.sh <velocity> <spring_constant> <unfolding_rate>
+# usage: const_rate.sh <num_domains> <velocity> <spring_constant> <unfolding_rate>
-if [ $# -ne 3 ]
+if [ $# -ne 4 ]
then
- echo "usage: const_rate.sh <velocity> <spring_constant> <unfolding_rate>"
+ echo "usage: $0 <num_domains> <velocity> <spring_constant> <unfolding_rate>"
exit 1
fi
-V=$1
-K=$2
-RATE=$3
+NDOM=$1
+V=$2
+K=$3
+RATE=$4
DEBUG=0
# since we're on the cluster, we'll go crazy and get really good statistics ;)
df=`python -c "print 0.1/$RATE"`
SAWSIM=../../bin/sawsim
-DATA=const_rate_${V}_${K}_$RATE.d
-HIST=const_rate_${V}_${K}_$RATE.hist
-COMPFILE=const_rate_${V}_${K}_$RATE.compare
-GPFILE=const_rate_${V}_${K}_$RATE.gp
-PNGFILE=const_rate_${V}_${K}_$RATE.png
+DATA=const_rate_${NDOM}_${V}_${K}_${RATE}.d
+HIST=const_rate_${NDOM}_${V}_${K}_${RATE}.hist
+COMPFILE=const_rate_${NDOM}_${V}_${K}_${RATE}.compare
+GPFILE=const_rate_${NDOM}_${V}_${K}_${RATE}.gp
+PNGFILE=const_rate_${NDOM}_${V}_${K}_${RATE}.png
# set up the theory
# our predicted histogram (see notes above) is
# hist(F) = binwidth N0 K/kv exp(-FK/kv) = A exp(F/B)
-theoryA=`python -c "print $df*$N*$RATE/float($K*$V)"`
+theoryA=`python -c "print $df*$N*$NDOM*$RATE/float($K*$V)"`
theoryB=`python -c "print float(-$K*$V)/$RATE"`
THEORY_FN="$theoryA * exp(x / ($theoryB))"
> $DATA
if [ "$ISACLUSTER" -eq 1 ]
then
- # Sawsim <= 0.5
- #qcmd "xtimes $N $SAWSIM -v$V -mhooke -a$K -kconst -K$RATE -F1 | grep -v '^#' >> $DATA"
# Sawsim >= 0.6
- qcmd xtimes $N $SAWSIM -v$V -s folded,hooke,$K -N1 -s unfolded,null \
+ qcmd xtimes $N $SAWSIM -v$V -s cantilever,hooke,$K -N1 \
+ -s folded,null -N$NDOM -s unfolded,null \
-k folded,unfolded,const,$RATE -q folded \
| grep -v '^#' | cut -f1 >> $DATA
else
- # Sawsim <= 0.5
- #xtimes $N $SAWSIM -v$V -mhooke -a$K -kconst -K$RATE -F1 | grep -v '^#' >> $DATA
# Sawsim >= 0.6
- xtimes $N $SAWSIM -v$V -s folded,hooke,$K -N1 -s unfolded,null \
+ xtimes $N $SAWSIM -v$V -s cantilever,hooke,$K -N1 \
+ -s folded,null -N$NDOM -s unfolded,null \
-k folded,unfolded,const,$RATE -q folded \
| grep -v '^#' | cut -f1 >> $DATA
fi
# histogram the data
if [ $DEBUG -eq 1 ]; then
- echo "cat $DATA | stem_leaf -v-1 -b$df | sed 's/://' > $HIST"
+ echo "cat $DATA | stem_leaf -v-1 -b$df -c | sed 's/://' > $HIST"
fi
-cat $DATA | stem_leaf -v-1 -b$df | sed 's/://' > $HIST
+cat $DATA | stem_leaf -v-1 -b$df -c | sed 's/://' > $HIST
# fit the data to an exponential decay
if [ $DEBUG -eq 1 ]; then
-# velocity spring constant unfolding rate
-1 1 1
-2 1 1
-1 2 1
-1 1 2
+# num domains velocity spring constant unfolding rate
+1 1 1 1
+1 2 1 1
+1 1 2 1
+1 1 1 2
+50 1 1 1
+50 2 1 1
+50 1 2 1
+50 1 1 2