changeset 449:09b7c8dd4365

first chapter done, half of last chapter done.
author Robert McIntyre <rlm@mit.edu>
date Wed, 26 Mar 2014 02:42:01 -0400
parents af13fc73e851
children 432f2c4646cb
files org/worm_learn.clj thesis/cortex.org thesis/images/basic-worm-view.png thesis/images/full-hand.png thesis/images/worm-identify-init.png thesis/rlm-cortex-meng.tex
diffstat 6 files changed, 455 insertions(+), 81 deletions(-) [+]
line wrap: on
line diff
     1.1 --- a/org/worm_learn.clj	Tue Mar 25 22:54:41 2014 -0400
     1.2 +++ b/org/worm_learn.clj	Wed Mar 26 02:42:01 2014 -0400
     1.3 @@ -27,6 +27,13 @@
     1.4  (defn worm-model []
     1.5    (load-blender-model "Models/worm/worm.blend"))
     1.6  
     1.7 +(defn worm []
     1.8 +  (let [model (load-blender-model "Models/worm/worm.blend")]
     1.9 +    {:body (doto model (body!))
    1.10 +     :touch (touch! model)
    1.11 +     :proprioception (proprioception! model)
    1.12 +     :muscles (movement! model)}))
    1.13 +
    1.14  (def output-base (File. "/home/r/proj/cortex/render/worm-learn/curl"))
    1.15  
    1.16  
    1.17 @@ -220,15 +227,8 @@
    1.18                (< 0.55 (contact worm-segment-top-tip    head-touch))))))
    1.19  
    1.20  
    1.21 -(declare phi-space phi-scan)
    1.22 +(declare phi-space phi-scan debug-experience) 
    1.23  
    1.24 -(defn debug-experience
    1.25 -  [experiences text]
    1.26 -  (cond
    1.27 -   (grand-circle? experiences) (.setText text "Grand Circle")
    1.28 -   (curled? experiences)       (.setText text "Curled")
    1.29 -   (wiggling? experiences)     (.setText text "Wiggling")
    1.30 -   (resting? experiences)      (.setText text "Resting")))
    1.31  
    1.32  
    1.33  (def standard-world-view
    1.34 @@ -277,16 +277,14 @@
    1.35            (.setFilterMode  PssmShadowRenderer$FilterMode/Bilinear))]
    1.36      (.addProcessor (.getViewPort world) pssm)))
    1.37          
    1.38 +(defn debug-experience
    1.39 +  [experiences text]
    1.40 +  (cond
    1.41 +   (grand-circle? experiences) (.setText text "Grand Circle")
    1.42 +   (curled? experiences)       (.setText text "Curled")
    1.43 +   (wiggling? experiences)     (.setText text "Wiggling")
    1.44 +   (resting? experiences)      (.setText text "Resting")))
    1.45  
    1.46 -(defn display-text [[x y :as location]]
    1.47 -  (let []
    1.48 -    (.setLocalTranslation text 300 (.getLineHeight text) 0)
    1.49 -   (fn [world]
    1.50 -     
    1.51 -  
    1.52 -  
    1.53 -  
    1.54 -  (fn [new-text]
    1.55  
    1.56  (defn worm-world
    1.57    [& {:keys [record motor-control keybindings view experiences
    1.58 @@ -294,14 +292,11 @@
    1.59    (let [{:keys [record motor-control keybindings view experiences
    1.60                  worm-model end-frame experience-watch]}
    1.61          (merge (worm-world-defaults) settings)
    1.62 -        worm (doto (worm-model) (body!))
    1.63 -        touch   (touch! worm)
    1.64 -        prop    (proprioception! worm)
    1.65 -        muscles (movement! worm)
    1.66 -        
    1.67 +       
    1.68          touch-display  (view-touch)
    1.69          prop-display   (view-proprioception)
    1.70          muscle-display (view-movement)
    1.71 +        {:keys [proprioception touch muscles body]} (worm)
    1.72          
    1.73          floor
    1.74          (box 5 1 5 :position (Vector3f. 0 -10 0)
    1.75 @@ -316,7 +311,7 @@
    1.76                        (.setColor (ColorRGBA/Black)))]
    1.77  
    1.78      (world
    1.79 -     (nodify [worm floor])
    1.80 +     (nodify [body floor])
    1.81         (merge standard-debug-controls keybindings)
    1.82         (fn [world]
    1.83           (.setLocalTranslation
    1.84 @@ -324,7 +319,7 @@
    1.85           (.attachChild (.getGuiNode world) worm-action)
    1.86           
    1.87           (enable-good-shadows world)
    1.88 -         (.setShadowMode worm RenderQueue$ShadowMode/CastAndReceive)
    1.89 +         (.setShadowMode body RenderQueue$ShadowMode/CastAndReceive)
    1.90           (.setShadowMode floor RenderQueue$ShadowMode/Receive)
    1.91                    
    1.92           (.setBackgroundColor (.getViewPort world) (ColorRGBA/White))
    1.93 @@ -332,7 +327,7 @@
    1.94           (.setDisplayFps world false)
    1.95           (position-camera world view)
    1.96           (.setTimer world timer)
    1.97 -         (display-dilated-time world timer)
    1.98 +         ;;(display-dilated-time world timer)
    1.99           (when record
   1.100             (dir! record)
   1.101             (Capture/captureVideo
   1.102 @@ -345,7 +340,7 @@
   1.103           (if (and end-frame (> (.getTime timer) end-frame))
   1.104             (.stop world))
   1.105           (let [muscle-data (vec (motor-control muscles))
   1.106 -               proprioception-data (prop)
   1.107 +               proprioception-data (proprioception)
   1.108                 touch-data (mapv #(% (.getRootNode world)) touch)]
   1.109             (when experiences
   1.110               (record-experience!
     2.1 --- a/thesis/cortex.org	Tue Mar 25 22:54:41 2014 -0400
     2.2 +++ b/thesis/cortex.org	Wed Mar 26 02:42:01 2014 -0400
     2.3 @@ -226,7 +226,7 @@
     2.4     #+end_listing
     2.5  
     2.6  
     2.7 -** =CORTEX= is a toolkit for building sensate creatures
     2.8 +**  =CORTEX= is a toolkit for building sensate creatures
     2.9  
    2.10     I built =CORTEX= to be a general AI research platform for doing
    2.11     experiments involving multiple rich senses and a wide variety and
    2.12 @@ -269,14 +269,16 @@
    2.13     engine designed to create cross-platform 3D desktop games. =CORTEX=
    2.14     is mainly written in clojure, a dialect of =LISP= that runs on the
    2.15     java virtual machine (JVM). The API for creating and simulating
    2.16 -   creatures is entirely expressed in clojure. Hearing is implemented
    2.17 -   as a layer of clojure code on top of a layer of java code on top of
    2.18 -   a layer of =C++= code which implements a modified version of
    2.19 -   =OpenAL= to support multiple listeners. =CORTEX= is the only
    2.20 -   simulation environment that I know of that can support multiple
    2.21 -   entities that can each hear the world from their own perspective.
    2.22 -   Other senses also require a small layer of Java code. =CORTEX= also
    2.23 -   uses =bullet=, a physics simulator written in =C=.
    2.24 +   creatures and senses is entirely expressed in clojure, though many
    2.25 +   senses are implemented at the layer of jMonkeyEngine or below. For
    2.26 +   example, for the sense of hearing I use a layer of clojure code on
    2.27 +   top of a layer of java JNI bindings that drive a layer of =C++=
    2.28 +   code which implements a modified version of =OpenAL= to support
    2.29 +   multiple listeners. =CORTEX= is the only simulation environment
    2.30 +   that I know of that can support multiple entities that can each
    2.31 +   hear the world from their own perspective. Other senses also
    2.32 +   require a small layer of Java code. =CORTEX= also uses =bullet=, a
    2.33 +   physics simulator written in =C=.
    2.34  
    2.35     #+caption: Here is the worm from above modeled in Blender, a free 
    2.36     #+caption: 3D-modeling program. Senses and joints are described
    2.37 @@ -285,26 +287,46 @@
    2.38     #+ATTR_LaTeX: :width 12cm
    2.39     [[./images/blender-worm.png]]
    2.40  
    2.41 +   Here are some thing I anticipate that =CORTEX= might be used for:
    2.42 +
    2.43 +   - exploring new ideas about sensory integration
    2.44 +   - distributed communication among swarm creatures
    2.45 +   - self-learning using free exploration, 
    2.46 +   - evolutionary algorithms involving creature construction
    2.47 +   - exploration of exoitic senses and effectors that are not possible
    2.48 +     in the real world (such as telekenisis or a semantic sense)
    2.49 +   - imagination using subworlds
    2.50 +
    2.51     During one test with =CORTEX=, I created 3,000 entities each with
    2.52     their own independent senses and ran them all at only 1/80 real
    2.53     time. In another test, I created a detailed model of my own hand,
    2.54     equipped with a realistic distribution of touch (more sensitive at
    2.55     the fingertips), as well as eyes and ears, and it ran at around 1/4
    2.56 -   real time.
    2.57 +   real time. 
    2.58  
    2.59 -   #+caption: Here is the worm from above modeled in Blender, a free 
    2.60 -   #+caption: 3D-modeling program. Senses and joints are described
    2.61 -   #+caption: using special nodes in Blender.
    2.62 -   #+name: worm-recognition-intro
    2.63 -   #+ATTR_LaTeX: :width 15cm
    2.64 -   [[./images/full-hand.png]]
    2.65 -   
    2.66 -   
    2.67 -   
    2.68 +   #+BEGIN_LaTeX
    2.69 +   \begin{sidewaysfigure}
    2.70 +   \includegraphics[width=9.5in]{images/full-hand.png}
    2.71 +   \caption{Here is the worm from above modeled in Blender, 
    2.72 +   a free 3D-modeling program. Senses and joints are described
    2.73 +   using special nodes in Blender. The senses are displayed on 
    2.74 +   the right, and the simulation is displayed on the left. Notice
    2.75 +   that the hand is curling its fingers, that it can see its own 
    2.76 +   finger from the eye in its palm, and thta it can feel its own 
    2.77 +   thumb touching its palm.}
    2.78 +   \end{sidewaysfigure}
    2.79 +   #+END_LaTeX
    2.80  
    2.81 -   
    2.82  ** Contributions
    2.83  
    2.84 +   I built =CORTEX=, a comprehensive platform for embodied AI
    2.85 +   experiments. =CORTEX= many new features lacking in other systems,
    2.86 +   such as sound. It is easy to create new creatures using Blender, a
    2.87 +   free 3D modeling program.
    2.88 +
    2.89 +   I built =EMPATH=, which uses =CORTEX= to identify the actions of a
    2.90 +   worm-like creature using a computational model of empathy.
    2.91 +   
    2.92  * Building =CORTEX=
    2.93  
    2.94  ** To explore embodiment, we need a world, body, and senses
    2.95 @@ -331,52 +353,409 @@
    2.96  
    2.97  * Empathy in a simulated worm
    2.98  
    2.99 +  Here I develop a computational model of empathy, using =CORTEX= as a
   2.100 +  base. Empathy in this context is the ability to observe another
   2.101 +  creature and infer what sorts of sensations that creature is
   2.102 +  feeling. My empathy algorithm involves multiple phases. First is
   2.103 +  free-play, where the creature moves around and gains sensory
   2.104 +  experience. From this experience I construct a representation of the
   2.105 +  creature's sensory state space, which I call \Phi-space. Using
   2.106 +  \Phi-space, I construct an efficient function which takes the
   2.107 +  limited data that comes from observing another creature and enriches
   2.108 +  it full compliment of imagined sensory data. I can then use the
   2.109 +  imagined sensory data to recognize what the observed creature is
   2.110 +  doing and feeling, using straightforward embodied action predicates.
   2.111 +  This is all demonstrated with using a simple worm-like creature, and
   2.112 +  recognizing worm-actions based on limited data.
   2.113 +
   2.114 +  #+caption: Here is the worm with which we will be working. 
   2.115 +  #+caption: It is composed of 5 segments. Each segment has a 
   2.116 +  #+caption: pair of extensor and flexor muscles. Each of the 
   2.117 +  #+caption: worm's four joints is a hinge joint which allows 
   2.118 +  #+caption: 30 degrees of rotation to either side. Each segment
   2.119 +  #+caption: of the worm is touch-capable and has a uniform 
   2.120 +  #+caption: distribution of touch sensors on each of its faces.
   2.121 +  #+caption: Each joint has a proprioceptive sense to detect 
   2.122 +  #+caption: relative positions. The worm segments are all the 
   2.123 +  #+caption: same except for the first one, which has a much
   2.124 +  #+caption: higher weight than the others to allow for easy 
   2.125 +  #+caption: manual motor control.
   2.126 +  #+name: basic-worm-view
   2.127 +  #+ATTR_LaTeX: :width 10cm
   2.128 +  [[./images/basic-worm-view.png]]
   2.129 +
   2.130 +  #+caption: Program for reading a worm from a blender file and 
   2.131 +  #+caption: outfitting it with the senses of proprioception, 
   2.132 +  #+caption: touch, and the ability to move, as specified in the 
   2.133 +  #+caption: blender file.
   2.134 +  #+name: get-worm
   2.135 +  #+begin_listing clojure
   2.136 +  #+begin_src clojure
   2.137 +(defn worm []
   2.138 +  (let [model (load-blender-model "Models/worm/worm.blend")]
   2.139 +    {:body (doto model (body!))
   2.140 +     :touch (touch! model)
   2.141 +     :proprioception (proprioception! model)
   2.142 +     :muscles (movement! model)}))
   2.143 +  #+end_src
   2.144 +  #+end_listing
   2.145 +  
   2.146  ** Embodiment factors action recognition into managable parts
   2.147  
   2.148 +   Using empathy, I divide the problem of action recognition into a
   2.149 +   recognition process expressed in the language of a full compliment
   2.150 +   of senses, and an imaganitive process that generates full sensory
   2.151 +   data from partial sensory data. Splitting the action recognition
   2.152 +   problem in this manner greatly reduces the total amount of work to
   2.153 +   recognize actions: The imaganitive process is mostly just matching
   2.154 +   previous experience, and the recognition process gets to use all
   2.155 +   the senses to directly describe any action.
   2.156 +
   2.157  ** Action recognition is easy with a full gamut of senses
   2.158  
   2.159 -** Digression: bootstrapping touch using free exploration
   2.160 +   Embodied representations using multiple senses such as touch,
   2.161 +   proprioception, and muscle tension turns out be be exceedingly
   2.162 +   efficient at describing body-centered actions. It is the ``right
   2.163 +   language for the job''. For example, it takes only around 5 lines
   2.164 +   of LISP code to describe the action of ``curling'' using embodied
   2.165 +   primitives. It takes about 8 lines to describe the seemingly
   2.166 +   complicated action of wiggling.
   2.167 +
   2.168 +   The following action predicates each take a stream of sensory
   2.169 +   experience, observe however much of it they desire, and decide
   2.170 +   whether the worm is doing the action they describe. =curled?=
   2.171 +   relies on proprioception, =resting?= relies on touch, =wiggling?=
   2.172 +   relies on a fourier analysis of muscle contraction, and
   2.173 +   =grand-circle?= relies on touch and reuses =curled?= as a gaurd.
   2.174 +   
   2.175 +   #+caption: Program for detecting whether the worm is curled. This is the 
   2.176 +   #+caption: simplest action predicate, because it only uses the last frame 
   2.177 +   #+caption: of sensory experience, and only uses proprioceptive data. Even 
   2.178 +   #+caption: this simple predicate, however, is automatically frame 
   2.179 +   #+caption: independent and ignores vermopomorphic differences such as 
   2.180 +   #+caption: worm textures and colors.
   2.181 +   #+name: curled
   2.182 +   #+begin_listing clojure
   2.183 +   #+begin_src clojure
   2.184 +(defn curled?
   2.185 +  "Is the worm curled up?"
   2.186 +  [experiences]
   2.187 +  (every?
   2.188 +   (fn [[_ _ bend]]
   2.189 +     (> (Math/sin bend) 0.64))
   2.190 +   (:proprioception (peek experiences))))
   2.191 +   #+end_src
   2.192 +   #+end_listing
   2.193 +
   2.194 +   #+caption: Program for summarizing the touch information in a patch 
   2.195 +   #+caption: of skin.
   2.196 +   #+name: touch-summary
   2.197 +   #+begin_listing clojure
   2.198 +   #+begin_src clojure
   2.199 +(defn contact
   2.200 +  "Determine how much contact a particular worm segment has with
   2.201 +   other objects. Returns a value between 0 and 1, where 1 is full
   2.202 +   contact and 0 is no contact."
   2.203 +  [touch-region [coords contact :as touch]]
   2.204 +  (-> (zipmap coords contact)
   2.205 +      (select-keys touch-region)
   2.206 +      (vals)
   2.207 +      (#(map first %))
   2.208 +      (average)
   2.209 +      (* 10)
   2.210 +      (- 1)
   2.211 +      (Math/abs)))
   2.212 +   #+end_src
   2.213 +   #+end_listing
   2.214 +
   2.215 +
   2.216 +   #+caption: Program for detecting whether the worm is at rest. This program
   2.217 +   #+caption: uses a summary of the tactile information from the underbelly 
   2.218 +   #+caption: of the worm, and is only true if every segment is touching the 
   2.219 +   #+caption: floor. Note that this function contains no references to 
   2.220 +   #+caption: proprioction at all.
   2.221 +   #+name: resting
   2.222 +   #+begin_listing clojure
   2.223 +   #+begin_src clojure
   2.224 +(def worm-segment-bottom (rect-region [8 15] [14 22]))
   2.225 +
   2.226 +(defn resting?
   2.227 +  "Is the worm resting on the ground?"
   2.228 +  [experiences]
   2.229 +  (every?
   2.230 +   (fn [touch-data]
   2.231 +     (< 0.9 (contact worm-segment-bottom touch-data)))
   2.232 +   (:touch (peek experiences))))
   2.233 +   #+end_src
   2.234 +   #+end_listing
   2.235 +
   2.236 +   #+caption: Program for detecting whether the worm is curled up into a 
   2.237 +   #+caption: full circle. Here the embodied approach begins to shine, as
   2.238 +   #+caption: I am able to both use a previous action predicate (=curled?=)
   2.239 +   #+caption: as well as the direct tactile experience of the head and tail.
   2.240 +   #+name: grand-circle
   2.241 +   #+begin_listing clojure
   2.242 +   #+begin_src clojure
   2.243 +(def worm-segment-bottom-tip (rect-region [15 15] [22 22]))
   2.244 +
   2.245 +(def worm-segment-top-tip (rect-region [0 15] [7 22]))
   2.246 +
   2.247 +(defn grand-circle?
   2.248 +  "Does the worm form a majestic circle (one end touching the other)?"
   2.249 +  [experiences]
   2.250 +  (and (curled? experiences)
   2.251 +       (let [worm-touch (:touch (peek experiences))
   2.252 +             tail-touch (worm-touch 0)
   2.253 +             head-touch (worm-touch 4)]
   2.254 +         (and (< 0.55 (contact worm-segment-bottom-tip tail-touch))
   2.255 +              (< 0.55 (contact worm-segment-top-tip    head-touch))))))
   2.256 +   #+end_src
   2.257 +   #+end_listing
   2.258 +
   2.259 +
   2.260 +   #+caption: Program for detecting whether the worm has been wiggling for 
   2.261 +   #+caption: the last few frames. It uses a fourier analysis of the muscle 
   2.262 +   #+caption: contractions of the worm's tail to determine wiggling. This is 
   2.263 +   #+caption: signigicant because there is no particular frame that clearly 
   2.264 +   #+caption: indicates that the worm is wiggling --- only when multiple frames 
   2.265 +   #+caption: are analyzed together is the wiggling revealed. Defining 
   2.266 +   #+caption: wiggling this way also gives the worm an opportunity to learn 
   2.267 +   #+caption: and recognize ``frustrated wiggling'', where the worm tries to 
   2.268 +   #+caption: wiggle but can't. Frustrated wiggling is very visually different 
   2.269 +   #+caption: from actual wiggling, but this definition gives it to us for free.
   2.270 +   #+name: wiggling
   2.271 +   #+begin_listing clojure
   2.272 +   #+begin_src clojure
   2.273 +(defn fft [nums]
   2.274 +  (map
   2.275 +   #(.getReal %)
   2.276 +   (.transform
   2.277 +    (FastFourierTransformer. DftNormalization/STANDARD)
   2.278 +    (double-array nums) TransformType/FORWARD)))
   2.279 +
   2.280 +(def indexed (partial map-indexed vector))
   2.281 +
   2.282 +(defn max-indexed [s]
   2.283 +  (first (sort-by (comp - second) (indexed s))))
   2.284 +
   2.285 +(defn wiggling?
   2.286 +  "Is the worm wiggling?"
   2.287 +  [experiences]
   2.288 +  (let [analysis-interval 0x40]
   2.289 +    (when (> (count experiences) analysis-interval)
   2.290 +      (let [a-flex 3
   2.291 +            a-ex   2
   2.292 +            muscle-activity
   2.293 +            (map :muscle (vector:last-n experiences analysis-interval))
   2.294 +            base-activity
   2.295 +            (map #(- (% a-flex) (% a-ex)) muscle-activity)]
   2.296 +        (= 2
   2.297 +           (first
   2.298 +            (max-indexed
   2.299 +             (map #(Math/abs %)
   2.300 +                  (take 20 (fft base-activity))))))))))
   2.301 +   #+end_src
   2.302 +   #+end_listing
   2.303 +
   2.304 +   With these action predicates, I can now recognize the actions of
   2.305 +   the worm while it is moving under my control and I have access to
   2.306 +   all the worm's senses.
   2.307 +
   2.308 +   #+caption: Use the action predicates defined earlier to report on 
   2.309 +   #+caption: what the worm is doing while in simulation.
   2.310 +   #+name: report-worm-activity
   2.311 +   #+begin_listing clojure
   2.312 +   #+begin_src clojure
   2.313 +(defn debug-experience
   2.314 +  [experiences text]
   2.315 +  (cond
   2.316 +   (grand-circle? experiences) (.setText text "Grand Circle")
   2.317 +   (curled? experiences)       (.setText text "Curled")
   2.318 +   (wiggling? experiences)     (.setText text "Wiggling")
   2.319 +   (resting? experiences)      (.setText text "Resting")))
   2.320 +   #+end_src
   2.321 +   #+end_listing
   2.322 +
   2.323 +   #+caption: Using =debug-experience=, the body-centered predicates
   2.324 +   #+caption: work together to classify the behaviour of the worm. 
   2.325 +   #+caption: while under manual motor control.
   2.326 +   #+name: basic-worm-view
   2.327 +   #+ATTR_LaTeX: :width 10cm
   2.328 +   [[./images/worm-identify-init.png]]
   2.329 +
   2.330 +   These action predicates satisfy the recognition requirement of an
   2.331 +   empathic recognition system. There is a lot of power in the
   2.332 +   simplicity of the action predicates. They describe their actions
   2.333 +   without getting confused in visual details of the worm. Each one is
   2.334 +   frame independent, but more than that, they are each indepent of
   2.335 +   irrelevant visual details of the worm and the environment. They
   2.336 +   will work regardless of whether the worm is a different color or
   2.337 +   hevaily textured, or of the environment has strange lighting.
   2.338 +
   2.339 +   The trick now is to make the action predicates work even when the
   2.340 +   sensory data on which they depend is absent. If I can do that, then
   2.341 +   I will have gained much,
   2.342  
   2.343  ** \Phi-space describes the worm's experiences
   2.344 +   
   2.345 +   As a first step towards building empathy, I need to gather all of
   2.346 +   the worm's experiences during free play. I use a simple vector to
   2.347 +   store all the experiences. 
   2.348 +   
   2.349 +   #+caption: Program to gather the worm's experiences into a vector for 
   2.350 +   #+caption: further processing. The =motor-control-program= line uses
   2.351 +   #+caption: a motor control script that causes the worm to execute a series
   2.352 +   #+caption: of ``exercices'' that include all the action predicates.
   2.353 +   #+name: generate-phi-space
   2.354 +   #+begin_listing clojure
   2.355 +   #+begin_src clojure
   2.356 +(defn generate-phi-space []
   2.357 +  (let [experiences (atom [])]
   2.358 +    (run-world
   2.359 +     (apply-map 
   2.360 +      worm-world
   2.361 +      (merge
   2.362 +       (worm-world-defaults)
   2.363 +       {:end-frame 700
   2.364 +        :motor-control
   2.365 +        (motor-control-program worm-muscle-labels do-all-the-things)
   2.366 +        :experiences experiences})))
   2.367 +    @experiences))
   2.368 +   #+end_src
   2.369 +   #+end_listing
   2.370 +
   2.371 +   Each element of the experience vector exists in the vast space of
   2.372 +   all possible worm-experiences. Most of this vast space is actually
   2.373 +   unreachable due to physical constraints of the worm's body. For
   2.374 +   example, the worm's segments are connected by hinge joints that put
   2.375 +   a practical limit on the worm's degrees of freedom. Also, the worm
   2.376 +   can not be bent into a circle so that its ends are touching and at
   2.377 +   the same time not also experience the sensation of touching itself.
   2.378 +
   2.379 +   As the worm moves around during free play and the vector grows
   2.380 +   larger, the vector begins to define a subspace which is all the
   2.381 +   practical experiences the worm can experience during normal
   2.382 +   operation, which I call \Phi-space, short for physical-space. The
   2.383 +   vector defines a path through \Phi-space. This path has interesting
   2.384 +   properties that all derive from embodiment. The proprioceptive
   2.385 +   components are completely smooth, because in order for the worm to
   2.386 +   move from one position to another, it must pass through the
   2.387 +   intermediate positions. The path invariably forms loops as actions
   2.388 +   are repeated. Finally and most importantly, proprioception actually
   2.389 +   gives very strong inference about the other senses. For example,
   2.390 +   when the worm is flat, you can infer that it is touching the ground
   2.391 +   and that its muscles are not active, because if the muscles were
   2.392 +   active, the worm would be moving and would not be perfectly flat.
   2.393 +   In order to stay flat, the worm has to be touching the ground, or
   2.394 +   it would again be moving out of the flat position due to gravity.
   2.395 +   If the worm is positioned in such a way that it interacts with
   2.396 +   itself, then it is very likely to be feeling the same tactile
   2.397 +   feelings as the last time it was in that position, because it has
   2.398 +   the same body as then. If you observe multiple frames of
   2.399 +   proprioceptive data, then you can become increasingly confident
   2.400 +   about the exact activations of the worm's muscles, because it
   2.401 +   generally takes a unique combination of muscle contractions to
   2.402 +   transform the worm's body along a specific path through \Phi-space.
   2.403 +
   2.404 +   There is a simple way of taking \Phi-space and the total ordering
   2.405 +   provided by an experience vector and reliably infering the rest of
   2.406 +   the senses.
   2.407  
   2.408  ** Empathy is the process of tracing though \Phi-space 
   2.409 +
   2.410 +
   2.411 +
   2.412 +(defn bin [digits]
   2.413 +  (fn [angles]
   2.414 +    (->> angles
   2.415 +         (flatten)
   2.416 +         (map (juxt #(Math/sin %) #(Math/cos %)))
   2.417 +         (flatten)
   2.418 +         (mapv #(Math/round (* % (Math/pow 10 (dec digits))))))))
   2.419 +
   2.420 +(defn gen-phi-scan 
   2.421 +"Nearest-neighbors with spatial binning. Only returns a result if
   2.422 + the propriceptive data is within 10% of a previously recorded
   2.423 + result in all dimensions."
   2.424 +
   2.425 +[phi-space]
   2.426 +  (let [bin-keys (map bin [3 2 1])
   2.427 +        bin-maps
   2.428 +        (map (fn [bin-key]
   2.429 +               (group-by
   2.430 +                (comp bin-key :proprioception phi-space)
   2.431 +                (range (count phi-space)))) bin-keys)
   2.432 +        lookups (map (fn [bin-key bin-map]
   2.433 +                      (fn [proprio] (bin-map (bin-key proprio))))
   2.434 +                    bin-keys bin-maps)]
   2.435 +    (fn lookup [proprio-data]
   2.436 +      (set (some #(% proprio-data) lookups)))))
   2.437 +
   2.438 +
   2.439 +(defn longest-thread
   2.440 +  "Find the longest thread from phi-index-sets. The index sets should
   2.441 +   be ordered from most recent to least recent."
   2.442 +  [phi-index-sets]
   2.443 +  (loop [result '()
   2.444 +         [thread-bases & remaining :as phi-index-sets] phi-index-sets]
   2.445 +    (if (empty? phi-index-sets)
   2.446 +      (vec result)
   2.447 +      (let [threads
   2.448 +            (for [thread-base thread-bases]
   2.449 +              (loop [thread (list thread-base)
   2.450 +                     remaining remaining]
   2.451 +                (let [next-index (dec (first thread))]
   2.452 +                  (cond (empty? remaining) thread
   2.453 +                        (contains? (first remaining) next-index)
   2.454 +                        (recur
   2.455 +                         (cons next-index thread) (rest remaining))
   2.456 +                        :else thread))))
   2.457 +            longest-thread
   2.458 +            (reduce (fn [thread-a thread-b]
   2.459 +                      (if (> (count thread-a) (count thread-b))
   2.460 +                        thread-a thread-b))
   2.461 +                    '(nil)
   2.462 +                    threads)]
   2.463 +        (recur (concat longest-thread result)
   2.464 +               (drop (count longest-thread) phi-index-sets))))))
   2.465 +
   2.466 +There is one final piece, which is to replace missing sensory data
   2.467 +with a best-guess estimate. While I could fill in missing data by
   2.468 +using a gradient over the closest known sensory data points, averages
   2.469 +can be misleading. It is certainly possible to create an impossible
   2.470 +sensory state by averaging two possible sensory states. Therefore, I
   2.471 +simply replicate the most recent sensory experience to fill in the
   2.472 +gaps. 
   2.473 +
   2.474 +   #+caption: Fill in blanks in sensory experience by replicating the most 
   2.475 +   #+caption: recent experience.
   2.476 +   #+name: infer-nils
   2.477 +   #+begin_listing clojure
   2.478 +   #+begin_src clojure
   2.479 +(defn infer-nils
   2.480 +  "Replace nils with the next available non-nil element in the
   2.481 +   sequence, or barring that, 0."
   2.482 +  [s]
   2.483 +  (loop [i (dec (count s))
   2.484 +         v (transient s)]
   2.485 +    (if (zero? i) (persistent! v)
   2.486 +        (if-let [cur (v i)]
   2.487 +          (if (get v (dec i) 0)
   2.488 +            (recur (dec i) v)
   2.489 +            (recur (dec i) (assoc! v (dec i) cur)))
   2.490 +          (recur i (assoc! v i 0))))))
   2.491 +   #+end_src
   2.492 +   #+end_listing
   2.493 +
   2.494 +
   2.495 +
   2.496 +
   2.497    
   2.498  ** Efficient action recognition with =EMPATH=
   2.499  
   2.500 +** Digression: bootstrapping touch using free exploration
   2.501 +
   2.502  * Contributions
   2.503 -  - Built =CORTEX=, a comprehensive platform for embodied AI
   2.504 -    experiments. Has many new features lacking in other systems, such
   2.505 -    as sound. Easy to model/create new creatures.
   2.506 -  - created a novel concept for action recognition by using artificial
   2.507 -    imagination. 
   2.508 -
   2.509 -In the second half of the thesis I develop a computational model of
   2.510 -empathy, using =CORTEX= as a base. Empathy in this context is the
   2.511 -ability to observe another creature and infer what sorts of sensations
   2.512 -that creature is feeling. My empathy algorithm involves multiple
   2.513 -phases. First is free-play, where the creature moves around and gains
   2.514 -sensory experience. From this experience I construct a representation
   2.515 -of the creature's sensory state space, which I call \Phi-space. Using
   2.516 -\Phi-space, I construct an efficient function for enriching the
   2.517 -limited data that comes from observing another creature with a full
   2.518 -compliment of imagined sensory data based on previous experience. I
   2.519 -can then use the imagined sensory data to recognize what the observed
   2.520 -creature is doing and feeling, using straightforward embodied action
   2.521 -predicates. This is all demonstrated with using a simple worm-like
   2.522 -creature, and recognizing worm-actions based on limited data.
   2.523 -
   2.524 -Embodied representation using multiple senses such as touch,
   2.525 -proprioception, and muscle tension turns out be be exceedingly
   2.526 -efficient at describing body-centered actions. It is the ``right
   2.527 -language for the job''. For example, it takes only around 5 lines of
   2.528 -LISP code to describe the action of ``curling'' using embodied
   2.529 -primitives. It takes about 8 lines to describe the seemingly
   2.530 -complicated action of wiggling.
   2.531 -
   2.532 -
   2.533 -
   2.534 -* COMMENT names for cortex
   2.535 - - bioland
   2.536  
   2.537  
   2.538  
     3.1 Binary file thesis/images/basic-worm-view.png has changed
     4.1 Binary file thesis/images/full-hand.png has changed
     5.1 Binary file thesis/images/worm-identify-init.png has changed
     6.1 --- a/thesis/rlm-cortex-meng.tex	Tue Mar 25 22:54:41 2014 -0400
     6.2 +++ b/thesis/rlm-cortex-meng.tex	Wed Mar 26 02:42:01 2014 -0400
     6.3 @@ -43,7 +43,7 @@
     6.4  \usepackage{hyperref}
     6.5  \usepackage{libertine}
     6.6  \usepackage{inconsolata}
     6.7 -
     6.8 +\usepackage{rotating}
     6.9  
    6.10  \usepackage[backend=bibtex,style=alphabetic]{biblatex}
    6.11  \addbibresource{cortex.bib}