changeset 451:0a4362d1f138

finishing up chapter 3.
author Robert McIntyre <rlm@mit.edu>
date Wed, 26 Mar 2014 20:38:17 -0400
parents 432f2c4646cb
children f339e3d5cc8c
files assets/Models/worm/touch-profile.png org/util.org org/worm_learn.clj thesis/cortex.org thesis/images/aurellem-gray.png thesis/images/empathy-1.png thesis/rlm-cortex-meng.tex
diffstat 7 files changed, 345 insertions(+), 137 deletions(-) [+]
line wrap: on
line diff
     1.1 Binary file assets/Models/worm/touch-profile.png has changed
     2.1 --- a/org/util.org	Wed Mar 26 03:18:57 2014 -0400
     2.2 +++ b/org/util.org	Wed Mar 26 20:38:17 2014 -0400
     2.3 @@ -148,13 +148,13 @@
     2.4  
     2.5  (defn speed-up 
     2.6    "Increase the dismally slow speed of the world's camera."
     2.7 -  [world]
     2.8 -  (.setMoveSpeed (.getFlyByCamera world)
     2.9 -                 (float 60))
    2.10 -  (.setRotationSpeed (.getFlyByCamera world)
    2.11 -                     (float 3))
    2.12 -  world)
    2.13 -
    2.14 +  ([world] (speed-up world 1))
    2.15 +  ([world amount]
    2.16 +     (.setMoveSpeed (.getFlyByCamera world)
    2.17 +                    (float (* amount 60)))
    2.18 +     (.setRotationSpeed (.getFlyByCamera world)
    2.19 +                        (float (* amount 3)))
    2.20 +     world))
    2.21  
    2.22  (defn no-logging 
    2.23    "Disable all of jMonkeyEngine's logging."
     3.1 --- a/org/worm_learn.clj	Wed Mar 26 03:18:57 2014 -0400
     3.2 +++ b/org/worm_learn.clj	Wed Mar 26 20:38:17 2014 -0400
     3.3 @@ -34,6 +34,14 @@
     3.4       :proprioception (proprioception! model)
     3.5       :muscles (movement! model)}))
     3.6  
     3.7 +(defn worm* []
     3.8 +  (let [model (load-blender-model "Models/worm/worm-of-the-imagination.blend")]
     3.9 +    {:body (doto model (body!))
    3.10 +     :touch (touch! model)
    3.11 +     :proprioception (proprioception! model)
    3.12 +     :muscles (movement! model)}))
    3.13 +
    3.14 +
    3.15  (def output-base (File. "/home/r/proj/cortex/render/worm-learn/curl"))
    3.16  
    3.17  
    3.18 @@ -198,19 +206,22 @@
    3.19  (defn wiggling?
    3.20    "Is the worm wiggling?"
    3.21    [experiences]
    3.22 -  (let [analysis-interval 0x40]
    3.23 +  (let [analysis-interval 96]
    3.24      (when (> (count experiences) analysis-interval)
    3.25        (let [a-flex 3
    3.26              a-ex   2
    3.27              muscle-activity
    3.28              (map :muscle (vector:last-n experiences analysis-interval))
    3.29              base-activity
    3.30 -            (map #(- (% a-flex) (% a-ex)) muscle-activity)]
    3.31 -        (= 2
    3.32 -           (first
    3.33 -            (max-indexed
    3.34 -             (map #(Math/abs %)
    3.35 -                  (take 20 (fft base-activity))))))))))
    3.36 +            (map #(- (% a-flex) (% a-ex)) muscle-activity)
    3.37 +            accept?
    3.38 +            (fn [activity]
    3.39 +              (->> activity (fft) (take 20) (map #(Math/abs %))
    3.40 +                   (max-indexed) (first) (<= 2)))]
    3.41 +        (or (accept? (take 64 base-activity))
    3.42 +            (accept? (take 64 (drop 20 base-activity))))))))
    3.43 +
    3.44 +
    3.45  
    3.46  (def worm-segment-bottom-tip (rect-region [15 15] [22 22]))
    3.47  
    3.48 @@ -223,8 +234,8 @@
    3.49         (let [worm-touch (:touch (peek experiences))
    3.50               tail-touch (worm-touch 0)
    3.51               head-touch (worm-touch 4)]
    3.52 -         (and (< 0.55 (contact worm-segment-bottom-tip tail-touch))
    3.53 -              (< 0.55 (contact worm-segment-top-tip    head-touch))))))
    3.54 +         (and (< 0.1 (contact worm-segment-bottom-tip tail-touch))
    3.55 +              (< 0.1 (contact worm-segment-top-tip    head-touch))))))
    3.56  
    3.57  
    3.58  (declare phi-space phi-scan debug-experience) 
    3.59 @@ -250,7 +261,7 @@
    3.60              :record nil
    3.61              :experiences (atom [])
    3.62              :experience-watch debug-experience
    3.63 -            :worm-model worm-model
    3.64 +            :worm worm
    3.65              :end-frame nil})))
    3.66  
    3.67  (defn dir! [file]
    3.68 @@ -283,14 +294,15 @@
    3.69     (grand-circle? experiences) (.setText text "Grand Circle")
    3.70     (curled? experiences)       (.setText text "Curled")
    3.71     (wiggling? experiences)     (.setText text "Wiggling")
    3.72 -   (resting? experiences)      (.setText text "Resting")))
    3.73 +   (resting? experiences)      (.setText text "Resting")
    3.74 +   :else                       (.setText text "Unknown")))
    3.75  
    3.76  
    3.77  (defn worm-world
    3.78 -  [& {:keys [record motor-control keybindings view experiences
    3.79 -             worm-model end-frame experience-watch] :as settings}]
    3.80 +  [& {:keys    [record motor-control keybindings view experiences
    3.81 +                worm end-frame experience-watch] :as settings}]
    3.82    (let [{:keys [record motor-control keybindings view experiences
    3.83 -                worm-model end-frame experience-watch]}
    3.84 +                worm end-frame experience-watch]}
    3.85          (merge (worm-world-defaults) settings)
    3.86         
    3.87          touch-display  (view-touch)
    3.88 @@ -333,7 +345,7 @@
    3.89             (Capture/captureVideo
    3.90              world
    3.91              (dir! (File. record "main-view"))))
    3.92 -         (speed-up world)
    3.93 +         (speed-up world 0.5)
    3.94           ;;(light-up-everything world)
    3.95           )
    3.96         (fn [world tpf]
    3.97 @@ -475,22 +487,21 @@
    3.98  ;;(infer-nils [nil 2 1 1]) [2 2 1 1]       
    3.99    
   3.100  
   3.101 -(defn debug-experience-phi []
   3.102 +(defn empathy-demonstration []
   3.103    (let [proprio (atom ())]
   3.104      (fn
   3.105 -      [experiences]
   3.106 +      [experiences text]
   3.107        (let [phi-indices (phi-scan (:proprioception (peek experiences)))]
   3.108          (swap! proprio (partial cons phi-indices))
   3.109          (let [exp-thread (longest-thread (take 300 @proprio))
   3.110 -              phi-loop (mapv phi-space (infer-nils exp-thread))]
   3.111 +              empathy (mapv phi-space (infer-nils exp-thread))]
   3.112            (println-repl (vector:last-n exp-thread 22))
   3.113            (cond
   3.114 -           (grand-circle? phi-loop) (println "Grand Circle")
   3.115 -           (curled? phi-loop)       (println "Curled")
   3.116 -           (wiggling? phi-loop)     (println "Wiggling")
   3.117 -           (resting? phi-loop)      (println "Resting")
   3.118 -           :else                    (println "Unknown")))))))
   3.119 -
   3.120 +           (grand-circle? empathy) (.setText text "Grand Circle")
   3.121 +           (curled? empathy)       (.setText text "Curled")
   3.122 +           (wiggling? empathy)     (.setText text "Wiggling")
   3.123 +           (resting? empathy)      (.setText text "Resting")
   3.124 +           :else                   (.setText text "Unknown")))))))
   3.125  
   3.126  (defn init-interactive []
   3.127    (def phi-space
   3.128 @@ -503,7 +514,49 @@
   3.129           {:experiences experiences})))
   3.130        @experiences))
   3.131    (def phi-scan (gen-phi-scan phi-space)))
   3.132 -    
   3.133  
   3.134 -(defn run-experiment-1 []
   3.135 -  (.start (worm-world :experience-watch (debug-experience-phi))))
   3.136 \ No newline at end of file
   3.137 +(defn empathy-experiment-1 [record]
   3.138 +  (.start (worm-world :experience-watch (empathy-demonstration)
   3.139 +                      :record record :worm worm*)))
   3.140 +
   3.141 +
   3.142 +(def worm-action-label
   3.143 +  (juxt grand-circle? curled? wiggling?))
   3.144 +
   3.145 +(defn compare-empathy-with-baseline [accuracy]
   3.146 +  (let [proprio (atom ())]
   3.147 +    (fn
   3.148 +      [experiences text]
   3.149 +      (let [phi-indices (phi-scan (:proprioception (peek experiences)))]
   3.150 +        (swap! proprio (partial cons phi-indices))
   3.151 +        (let [exp-thread (longest-thread (take 300 @proprio))
   3.152 +              empathy (mapv phi-space (infer-nils exp-thread))
   3.153 +              experience-matches-empathy
   3.154 +              (= (worm-action-label experiences)
   3.155 +                 (worm-action-label empathy))]
   3.156 +          (cond
   3.157 +           (grand-circle? empathy) (.setText text "Grand Circle")
   3.158 +           (curled? empathy)       (.setText text "Curled")
   3.159 +           (wiggling? empathy)     (.setText text "Wiggling")
   3.160 +           (resting? empathy)      (.setText text "Resting")
   3.161 +           :else                   (.setText text "Unknown"))
   3.162 +
   3.163 +          (println-repl experience-matches-empathy)
   3.164 +          (swap! accuracy #(conj % experience-matches-empathy)))))))
   3.165 +              
   3.166 +(defn accuracy [v]
   3.167 +  (float (/ (count (filter true? v)) (count v))))
   3.168 +
   3.169 +(defn test-empathy-accuracy []
   3.170 +  (let [res (atom [])]
   3.171 +    (run-world
   3.172 +     (worm-world :experience-watch
   3.173 +                 (compare-empathy-with-baseline res)
   3.174 +                 :worm worm*))
   3.175 +    (accuracy @res)))
   3.176 +
   3.177 +
   3.178 +
   3.179 +
   3.180 +
   3.181 +
     4.1 --- a/thesis/cortex.org	Wed Mar 26 03:18:57 2014 -0400
     4.2 +++ b/thesis/cortex.org	Wed Mar 26 20:38:17 2014 -0400
     4.3 @@ -3,9 +3,9 @@
     4.4  #+email: rlm@mit.edu
     4.5  #+description: Using embodied AI to facilitate Artificial Imagination.
     4.6  #+keywords: AI, clojure, embodiment
     4.7 +#+LaTeX_CLASS_OPTIONS: [nofloat]
     4.8  
     4.9 -
    4.10 -* Empathy and Embodiment as problem solving strategies
    4.11 +* Empathy and Embodiment as problem solving strategieszzzzzzz
    4.12    
    4.13    By the end of this thesis, you will have seen a novel approach to
    4.14    interpreting video using embodiment and empathy. You will have also
    4.15 @@ -297,35 +297,36 @@
    4.16       in the real world (such as telekenisis or a semantic sense)
    4.17     - imagination using subworlds
    4.18  
    4.19 -   During one test with =CORTEX=, I created 3,000 entities each with
    4.20 +   During one test with =CORTEX=, I created 3,000 creatures each with
    4.21     their own independent senses and ran them all at only 1/80 real
    4.22     time. In another test, I created a detailed model of my own hand,
    4.23     equipped with a realistic distribution of touch (more sensitive at
    4.24     the fingertips), as well as eyes and ears, and it ran at around 1/4
    4.25 -   real time. 
    4.26 +   real time.
    4.27  
    4.28 -   #+BEGIN_LaTeX
    4.29 +#+BEGIN_LaTeX
    4.30     \begin{sidewaysfigure}
    4.31     \includegraphics[width=9.5in]{images/full-hand.png}
    4.32 -   \caption{Here is the worm from above modeled in Blender, 
    4.33 -   a free 3D-modeling program. Senses and joints are described
    4.34 -   using special nodes in Blender. The senses are displayed on 
    4.35 -   the right, and the simulation is displayed on the left. Notice
    4.36 -   that the hand is curling its fingers, that it can see its own 
    4.37 -   finger from the eye in its palm, and thta it can feel its own 
    4.38 -   thumb touching its palm.}
    4.39 +   \caption{
    4.40 +   I modeled my own right hand in Blender and rigged it with all the
    4.41 +   senses that {\tt CORTEX} supports. My simulated hand has a
    4.42 +   biologically inspired distribution of touch sensors. The senses are
    4.43 +   displayed on the right, and the simulation is displayed on the
    4.44 +   left. Notice that my hand is curling its fingers, that it can see
    4.45 +   its own finger from the eye in its palm, and that it can feel its
    4.46 +   own thumb touching its palm.}
    4.47     \end{sidewaysfigure}
    4.48 -   #+END_LaTeX
    4.49 +#+END_LaTeX
    4.50  
    4.51  ** Contributions
    4.52  
    4.53 -   I built =CORTEX=, a comprehensive platform for embodied AI
    4.54 -   experiments. =CORTEX= many new features lacking in other systems,
    4.55 -   such as sound. It is easy to create new creatures using Blender, a
    4.56 -   free 3D modeling program.
    4.57 +   - I built =CORTEX=, a comprehensive platform for embodied AI
    4.58 +     experiments. =CORTEX= supports many features lacking in other
    4.59 +     systems, such proper simulation of hearing. It is easy to create
    4.60 +     new =CORTEX= creatures using Blender, a free 3D modeling program.
    4.61  
    4.62 -   I built =EMPATH=, which uses =CORTEX= to identify the actions of a
    4.63 -   worm-like creature using a computational model of empathy.
    4.64 +   - I built =EMPATH=, which uses =CORTEX= to identify the actions of
    4.65 +     a worm-like creature using a computational model of empathy.
    4.66     
    4.67  * Building =CORTEX=
    4.68  
    4.69 @@ -372,7 +373,7 @@
    4.70    #+caption: It is composed of 5 segments. Each segment has a 
    4.71    #+caption: pair of extensor and flexor muscles. Each of the 
    4.72    #+caption: worm's four joints is a hinge joint which allows 
    4.73 -  #+caption: 30 degrees of rotation to either side. Each segment
    4.74 +  #+caption: about 30 degrees of rotation to either side. Each segment
    4.75    #+caption: of the worm is touch-capable and has a uniform 
    4.76    #+caption: distribution of touch sensors on each of its faces.
    4.77    #+caption: Each joint has a proprioceptive sense to detect 
    4.78 @@ -418,7 +419,7 @@
    4.79     efficient at describing body-centered actions. It is the ``right
    4.80     language for the job''. For example, it takes only around 5 lines
    4.81     of LISP code to describe the action of ``curling'' using embodied
    4.82 -   primitives. It takes about 8 lines to describe the seemingly
    4.83 +   primitives. It takes about 10 lines to describe the seemingly
    4.84     complicated action of wiggling.
    4.85  
    4.86     The following action predicates each take a stream of sensory
    4.87 @@ -578,19 +579,20 @@
    4.88  
    4.89     #+caption: Using =debug-experience=, the body-centered predicates
    4.90     #+caption: work together to classify the behaviour of the worm. 
    4.91 -   #+caption: while under manual motor control.
    4.92 +   #+caption: the predicates are operating with access to the worm's
    4.93 +   #+caption: full sensory data.
    4.94     #+name: basic-worm-view
    4.95     #+ATTR_LaTeX: :width 10cm
    4.96     [[./images/worm-identify-init.png]]
    4.97  
    4.98     These action predicates satisfy the recognition requirement of an
    4.99 -   empathic recognition system. There is a lot of power in the
   4.100 -   simplicity of the action predicates. They describe their actions
   4.101 -   without getting confused in visual details of the worm. Each one is
   4.102 -   frame independent, but more than that, they are each indepent of
   4.103 +   empathic recognition system. There is power in the simplicity of
   4.104 +   the action predicates. They describe their actions without getting
   4.105 +   confused in visual details of the worm. Each one is frame
   4.106 +   independent, but more than that, they are each indepent of
   4.107     irrelevant visual details of the worm and the environment. They
   4.108     will work regardless of whether the worm is a different color or
   4.109 -   hevaily textured, or of the environment has strange lighting.
   4.110 +   hevaily textured, or if the environment has strange lighting.
   4.111  
   4.112     The trick now is to make the action predicates work even when the
   4.113     sensory data on which they depend is absent. If I can do that, then
   4.114 @@ -601,61 +603,42 @@
   4.115     As a first step towards building empathy, I need to gather all of
   4.116     the worm's experiences during free play. I use a simple vector to
   4.117     store all the experiences. 
   4.118 -   
   4.119 -   #+caption: Program to gather the worm's experiences into a vector for 
   4.120 -   #+caption: further processing. The =motor-control-program= line uses
   4.121 -   #+caption: a motor control script that causes the worm to execute a series
   4.122 -   #+caption: of ``exercices'' that include all the action predicates.
   4.123 -   #+name: generate-phi-space
   4.124 -   #+begin_listing clojure
   4.125 -   #+begin_src clojure
   4.126 -(defn generate-phi-space []
   4.127 -  (let [experiences (atom [])]
   4.128 -    (run-world
   4.129 -     (apply-map 
   4.130 -      worm-world
   4.131 -      (merge
   4.132 -       (worm-world-defaults)
   4.133 -       {:end-frame 700
   4.134 -        :motor-control
   4.135 -        (motor-control-program worm-muscle-labels do-all-the-things)
   4.136 -        :experiences experiences})))
   4.137 -    @experiences))
   4.138 -   #+end_src
   4.139 -   #+end_listing
   4.140  
   4.141     Each element of the experience vector exists in the vast space of
   4.142     all possible worm-experiences. Most of this vast space is actually
   4.143     unreachable due to physical constraints of the worm's body. For
   4.144     example, the worm's segments are connected by hinge joints that put
   4.145 -   a practical limit on the worm's degrees of freedom. Also, the worm
   4.146 -   can not be bent into a circle so that its ends are touching and at
   4.147 -   the same time not also experience the sensation of touching itself.
   4.148 +   a practical limit on the worm's range of motions without limiting
   4.149 +   its degrees of freedom. Some groupings of senses are impossible;
   4.150 +   the worm can not be bent into a circle so that its ends are
   4.151 +   touching and at the same time not also experience the sensation of
   4.152 +   touching itself.
   4.153  
   4.154 -   As the worm moves around during free play and the vector grows
   4.155 -   larger, the vector begins to define a subspace which is all the
   4.156 -   practical experiences the worm can experience during normal
   4.157 -   operation, which I call \Phi-space, short for physical-space. The
   4.158 -   vector defines a path through \Phi-space. This path has interesting
   4.159 -   properties that all derive from embodiment. The proprioceptive
   4.160 -   components are completely smooth, because in order for the worm to
   4.161 -   move from one position to another, it must pass through the
   4.162 -   intermediate positions. The path invariably forms loops as actions
   4.163 -   are repeated. Finally and most importantly, proprioception actually
   4.164 -   gives very strong inference about the other senses. For example,
   4.165 -   when the worm is flat, you can infer that it is touching the ground
   4.166 -   and that its muscles are not active, because if the muscles were
   4.167 -   active, the worm would be moving and would not be perfectly flat.
   4.168 -   In order to stay flat, the worm has to be touching the ground, or
   4.169 -   it would again be moving out of the flat position due to gravity.
   4.170 -   If the worm is positioned in such a way that it interacts with
   4.171 -   itself, then it is very likely to be feeling the same tactile
   4.172 -   feelings as the last time it was in that position, because it has
   4.173 -   the same body as then. If you observe multiple frames of
   4.174 -   proprioceptive data, then you can become increasingly confident
   4.175 -   about the exact activations of the worm's muscles, because it
   4.176 -   generally takes a unique combination of muscle contractions to
   4.177 -   transform the worm's body along a specific path through \Phi-space.
   4.178 +   As the worm moves around during free play and its experience vector
   4.179 +   grows larger, the vector begins to define a subspace which is all
   4.180 +   the sensations the worm can practicaly experience during normal
   4.181 +   operation. I call this subspace \Phi-space, short for
   4.182 +   physical-space. The experience vector defines a path through
   4.183 +   \Phi-space. This path has interesting properties that all derive
   4.184 +   from physical embodiment. The proprioceptive components are
   4.185 +   completely smooth, because in order for the worm to move from one
   4.186 +   position to another, it must pass through the intermediate
   4.187 +   positions. The path invariably forms loops as actions are repeated.
   4.188 +   Finally and most importantly, proprioception actually gives very
   4.189 +   strong inference about the other senses. For example, when the worm
   4.190 +   is flat, you can infer that it is touching the ground and that its
   4.191 +   muscles are not active, because if the muscles were active, the
   4.192 +   worm would be moving and would not be perfectly flat. In order to
   4.193 +   stay flat, the worm has to be touching the ground, or it would
   4.194 +   again be moving out of the flat position due to gravity. If the
   4.195 +   worm is positioned in such a way that it interacts with itself,
   4.196 +   then it is very likely to be feeling the same tactile feelings as
   4.197 +   the last time it was in that position, because it has the same body
   4.198 +   as then. If you observe multiple frames of proprioceptive data,
   4.199 +   then you can become increasingly confident about the exact
   4.200 +   activations of the worm's muscles, because it generally takes a
   4.201 +   unique combination of muscle contractions to transform the worm's
   4.202 +   body along a specific path through \Phi-space.
   4.203  
   4.204     There is a simple way of taking \Phi-space and the total ordering
   4.205     provided by an experience vector and reliably infering the rest of
   4.206 @@ -664,34 +647,38 @@
   4.207  ** Empathy is the process of tracing though \Phi-space 
   4.208  
   4.209     Here is the core of a basic empathy algorithm, starting with an
   4.210 -   experience vector: First, group the experiences into tiered
   4.211 -   proprioceptive bins. I use powers of 10 and 3 bins, and the
   4.212 -   smallest bin has and approximate size of 0.001 radians in all
   4.213 -   proprioceptive dimensions.
   4.214 +   experience vector:
   4.215 +
   4.216 +   First, group the experiences into tiered proprioceptive bins. I use
   4.217 +   powers of 10 and 3 bins, and the smallest bin has an approximate
   4.218 +   size of 0.001 radians in all proprioceptive dimensions.
   4.219     
   4.220     Then, given a sequence of proprioceptive input, generate a set of
   4.221 -   matching experience records for each input. 
   4.222 +   matching experience records for each input, using the tiered
   4.223 +   proprioceptive bins. 
   4.224  
   4.225     Finally, to infer sensory data, select the longest consective chain
   4.226 -   of experiences as determined by the indexes into the experience
   4.227 -   vector. 
   4.228 +   of experiences. Conecutive experience means that the experiences
   4.229 +   appear next to each other in the experience vector.
   4.230  
   4.231     This algorithm has three advantages: 
   4.232  
   4.233     1. It's simple
   4.234  
   4.235 -   3. It's very fast -- both tracing through possibilites and
   4.236 -      retrieving possible interpretations take essentially constant
   4.237 -      time. 
   4.238 +   3. It's very fast -- retrieving possible interpretations takes
   4.239 +      constant time. Tracing through chains of interpretations takes
   4.240 +      time proportional to the average number of experiences in a
   4.241 +      proprioceptive bin. Redundant experiences in \Phi-space can be
   4.242 +      merged to save computation.
   4.243  
   4.244     2. It protects from wrong interpretations of transient ambiguous
   4.245 -      proprioceptive data : for example, if the worm is flat for just
   4.246 +      proprioceptive data. For example, if the worm is flat for just
   4.247        an instant, this flattness will not be interpreted as implying
   4.248        that the worm has its muscles relaxed, since the flattness is
   4.249        part of a longer chain which includes a distinct pattern of
   4.250 -      muscle activation. A memoryless statistical model such as a
   4.251 -      markov model that operates on individual frames may very well
   4.252 -      make this mistake.
   4.253 +      muscle activation. Markov chains or other memoryless statistical
   4.254 +      models that operate on individual frames may very well make this
   4.255 +      mistake.
   4.256  
   4.257     #+caption: Program to convert an experience vector into a 
   4.258     #+caption: proprioceptively binned lookup function.
   4.259 @@ -725,6 +712,30 @@
   4.260     #+end_src
   4.261     #+end_listing
   4.262  
   4.263 +   #+caption: =longest-thread= finds the longest path of consecutive 
   4.264 +   #+caption: experiences to explain proprioceptive worm data.
   4.265 +   #+name: phi-space-history-scan
   4.266 +   #+ATTR_LaTeX: :width 10cm
   4.267 +   [[./images/aurellem-gray.png]]
   4.268 +
   4.269 +   =longest-thread= infers sensory data by stitching together pieces
   4.270 +   from previous experience. It prefers longer chains of previous
   4.271 +   experience to shorter ones. For example, during training the worm
   4.272 +   might rest on the ground for one second before it performs its
   4.273 +   excercises. If during recognition the worm rests on the ground for
   4.274 +   five seconds, =longest-thread= will accomodate this five second
   4.275 +   rest period by looping the one second rest chain five times.
   4.276 +
   4.277 +   =longest-thread= takes time proportinal to the average number of
   4.278 +   entries in a proprioceptive bin, because for each element in the
   4.279 +   starting bin it performes a series of set lookups in the preceeding
   4.280 +   bins. If the total history is limited, then this is only a constant
   4.281 +   multiple times the number of entries in the starting bin. This
   4.282 +   analysis also applies even if the action requires multiple longest
   4.283 +   chains -- it's still the average number of entries in a
   4.284 +   proprioceptive bin times the desired chain length. Because
   4.285 +   =longest-thread= is so efficient and simple, I can interpret
   4.286 +   worm-actions in real time.
   4.287  
   4.288     #+caption: Program to calculate empathy by tracing though \Phi-space
   4.289     #+caption: and finding the longest (ie. most coherent) interpretation
   4.290 @@ -761,14 +772,13 @@
   4.291     #+end_src
   4.292     #+end_listing
   4.293  
   4.294 -
   4.295 -There is one final piece, which is to replace missing sensory data
   4.296 -with a best-guess estimate. While I could fill in missing data by
   4.297 -using a gradient over the closest known sensory data points, averages
   4.298 -can be misleading. It is certainly possible to create an impossible
   4.299 -sensory state by averaging two possible sensory states. Therefore, I
   4.300 -simply replicate the most recent sensory experience to fill in the
   4.301 -gaps. 
   4.302 +   There is one final piece, which is to replace missing sensory data
   4.303 +   with a best-guess estimate. While I could fill in missing data by
   4.304 +   using a gradient over the closest known sensory data points,
   4.305 +   averages can be misleading. It is certainly possible to create an
   4.306 +   impossible sensory state by averaging two possible sensory states.
   4.307 +   Therefore, I simply replicate the most recent sensory experience to
   4.308 +   fill in the gaps.
   4.309  
   4.310     #+caption: Fill in blanks in sensory experience by replicating the most 
   4.311     #+caption: recent experience.
   4.312 @@ -789,14 +799,158 @@
   4.313            (recur i (assoc! v i 0))))))
   4.314     #+end_src
   4.315     #+end_listing
   4.316 -
   4.317    
   4.318  ** Efficient action recognition with =EMPATH=
   4.319 +   
   4.320 +   To use =EMPATH= with the worm, I first need to gather a set of
   4.321 +   experiences from the worm that includes the actions I want to
   4.322 +   recognize. The =generate-phi-space= program (listint
   4.323 +   \ref{generate-phi-space} runs the worm through a series of
   4.324 +   exercices and gatheres those experiences into a vector. The
   4.325 +   =do-all-the-things= program is a routine expressed in a simple
   4.326 +   muscle contraction script language for automated worm control.
   4.327  
   4.328 -   In my exploration with the worm, I can generally infer actions from
   4.329 -   proprioceptive data exactly as well as when I have the complete
   4.330 -   sensory data. To reach this level, I have to train the worm with
   4.331 -   verious exercices for about 1 minute.
   4.332 +   #+caption: Program to gather the worm's experiences into a vector for 
   4.333 +   #+caption: further processing. The =motor-control-program= line uses
   4.334 +   #+caption: a motor control script that causes the worm to execute a series
   4.335 +   #+caption: of ``exercices'' that include all the action predicates.
   4.336 +   #+name: generate-phi-space
   4.337 +   #+attr_latex: [!H]
   4.338 +   #+begin_listing clojure 
   4.339 +   #+begin_src clojure
   4.340 +(def do-all-the-things 
   4.341 +  (concat
   4.342 +   curl-script
   4.343 +   [[300 :d-ex 40]
   4.344 +    [320 :d-ex 0]]
   4.345 +   (shift-script 280 (take 16 wiggle-script))))
   4.346 +
   4.347 +(defn generate-phi-space []
   4.348 +  (let [experiences (atom [])]
   4.349 +    (run-world
   4.350 +     (apply-map 
   4.351 +      worm-world
   4.352 +      (merge
   4.353 +       (worm-world-defaults)
   4.354 +       {:end-frame 700
   4.355 +        :motor-control
   4.356 +        (motor-control-program worm-muscle-labels do-all-the-things)
   4.357 +        :experiences experiences})))
   4.358 +    @experiences))
   4.359 +   #+end_src
   4.360 +   #+end_listing
   4.361 +
   4.362 +   #+caption: Use longest thread and a phi-space generated from a short
   4.363 +   #+caption: exercise routine to interpret actions during free play.
   4.364 +   #+name: empathy-debug
   4.365 +   #+begin_listing clojure
   4.366 +   #+begin_src clojure
   4.367 +(defn init []
   4.368 +  (def phi-space (generate-phi-space))
   4.369 +  (def phi-scan (gen-phi-scan phi-space)))
   4.370 +
   4.371 +(defn empathy-demonstration []
   4.372 +  (let [proprio (atom ())]
   4.373 +    (fn
   4.374 +      [experiences text]
   4.375 +      (let [phi-indices (phi-scan (:proprioception (peek experiences)))]
   4.376 +        (swap! proprio (partial cons phi-indices))
   4.377 +        (let [exp-thread (longest-thread (take 300 @proprio))
   4.378 +              empathy (mapv phi-space (infer-nils exp-thread))]
   4.379 +          (println-repl (vector:last-n exp-thread 22))
   4.380 +          (cond
   4.381 +           (grand-circle? empathy) (.setText text "Grand Circle")
   4.382 +           (curled? empathy)       (.setText text "Curled")
   4.383 +           (wiggling? empathy)     (.setText text "Wiggling")
   4.384 +           (resting? empathy)      (.setText text "Resting")
   4.385 +           :else                       (.setText text "Unknown")))))))
   4.386 +
   4.387 +(defn empathy-experiment [record]
   4.388 +  (.start (worm-world :experience-watch (debug-experience-phi)
   4.389 +                      :record record :worm worm*)))
   4.390 +   #+end_src
   4.391 +   #+end_listing
   4.392 +   
   4.393 +   The result of running =empathy-experiment= is that the system is
   4.394 +   generally able to interpret worm actions using the action-predicates
   4.395 +   on simulated sensory data just as well as with actual data. Figure
   4.396 +   \ref{empathy-debug-image} was generated using =empathy-experiment=:
   4.397 +
   4.398 +  #+caption: From only proprioceptive data, =EMPATH= was able to infer 
   4.399 +  #+caption: the complete sensory experience and classify four poses
   4.400 +  #+caption: (The last panel shows a composite image of \emph{wriggling}, 
   4.401 +  #+caption: a dynamic pose.)
   4.402 +  #+name: empathy-debug-image
   4.403 +  #+ATTR_LaTeX: :width 10cm :placement [H]
   4.404 +  [[./images/empathy-1.png]]
   4.405 +
   4.406 +  One way to measure the performance of =EMPATH= is to compare the
   4.407 +  sutiability of the imagined sense experience to trigger the same
   4.408 +  action predicates as the real sensory experience. 
   4.409 +  
   4.410 +   #+caption: Determine how closely empathy approximates actual 
   4.411 +   #+caption: sensory data.
   4.412 +   #+name: test-empathy-accuracy
   4.413 +   #+begin_listing clojure
   4.414 +   #+begin_src clojure
   4.415 +(def worm-action-label
   4.416 +  (juxt grand-circle? curled? wiggling?))
   4.417 +
   4.418 +(defn compare-empathy-with-baseline [matches]
   4.419 +  (let [proprio (atom ())]
   4.420 +    (fn
   4.421 +      [experiences text]
   4.422 +      (let [phi-indices (phi-scan (:proprioception (peek experiences)))]
   4.423 +        (swap! proprio (partial cons phi-indices))
   4.424 +        (let [exp-thread (longest-thread (take 300 @proprio))
   4.425 +              empathy (mapv phi-space (infer-nils exp-thread))
   4.426 +              experience-matches-empathy
   4.427 +              (= (worm-action-label experiences)
   4.428 +                 (worm-action-label empathy))]
   4.429 +          (println-repl experience-matches-empathy)
   4.430 +          (swap! matches #(conj % experience-matches-empathy)))))))
   4.431 +              
   4.432 +(defn accuracy [v]
   4.433 +  (float (/ (count (filter true? v)) (count v))))
   4.434 +
   4.435 +(defn test-empathy-accuracy []
   4.436 +  (let [res (atom [])]
   4.437 +    (run-world
   4.438 +     (worm-world :experience-watch
   4.439 +                 (compare-empathy-with-baseline res)
   4.440 +                 :worm worm*))
   4.441 +    (accuracy @res)))
   4.442 +   #+end_src
   4.443 +   #+end_listing
   4.444 +
   4.445 +  Running =test-empathy-accuracy= using the very short exercise
   4.446 +  program defined in listing \ref{generate-phi-space}, and then doing
   4.447 +  a similar pattern of activity manually yeilds an accuracy of around
   4.448 +  73%. This is based on very limited worm experience. By training the
   4.449 +  worm for longer, the accuracy dramatically improves.
   4.450 +
   4.451 +   #+caption: Program to generate \Phi-space using manual training.
   4.452 +   #+name: manual-phi-space
   4.453 +   #+begin_listing clojure
   4.454 +   #+begin_src clojure
   4.455 +(defn init-interactive []
   4.456 +  (def phi-space
   4.457 +    (let [experiences (atom [])]
   4.458 +      (run-world
   4.459 +       (apply-map 
   4.460 +        worm-world
   4.461 +        (merge
   4.462 +         (worm-world-defaults)
   4.463 +         {:experiences experiences})))
   4.464 +      @experiences))
   4.465 +  (def phi-scan (gen-phi-scan phi-space)))
   4.466 +   #+end_src
   4.467 +   #+end_listing
   4.468 +
   4.469 +  After about 1 minute of manual training, I was able to achieve 95%
   4.470 +  accuracy on manual testing of the worm using =init-interactive= and
   4.471 +  =test-empathy-accuracy=. The ability of the system to infer sensory
   4.472 +  states is truly impressive.
   4.473  
   4.474  ** Digression: bootstrapping touch using free exploration
   4.475  
     5.1 Binary file thesis/images/aurellem-gray.png has changed
     6.1 Binary file thesis/images/empathy-1.png has changed
     7.1 --- a/thesis/rlm-cortex-meng.tex	Wed Mar 26 03:18:57 2014 -0400
     7.2 +++ b/thesis/rlm-cortex-meng.tex	Wed Mar 26 20:38:17 2014 -0400
     7.3 @@ -26,6 +26,7 @@
     7.4  %% above. 
     7.5  
     7.6  \documentclass[12pt,twoside,singlespace,vi]{mitthesis}
     7.7 +%\documentclass[12pt,twoside,vi]{mitthesis}
     7.8  \usepackage[utf8]{inputenc}
     7.9  \usepackage[T1]{fontenc}
    7.10  \usepackage{fixltx2e}