changeset 460:763d13f77e03

merge in laptop changes.
author Robert McIntyre <rlm@mit.edu>
date Thu, 27 Mar 2014 17:57:01 -0400
parents a86555b02916 (current diff) 26c13c42481f (diff)
children b345650a0baa
files java/build.xml java/src/com/aurellem/opencv/OpenCV.java org/ideas.org org/touch.org org/util.org
diffstat 89 files changed, 5410 insertions(+), 81 deletions(-) [+]
line wrap: on
line diff
     1.1 --- a/.hgignore	Thu Mar 27 17:56:26 2014 -0400
     1.2 +++ b/.hgignore	Thu Mar 27 17:57:01 2014 -0400
     1.3 @@ -15,6 +15,7 @@
     1.4  libbulletjme.so
     1.5  java/build/*
     1.6  java/dist/*
     1.7 +thesis/*.pdf
     1.8  
     1.9  syntax: regexp
    1.10  ^.*blend\d$
     2.1 Binary file assets/Models/test-touch/touch-cube.blend.orig has changed
     3.1 Binary file assets/Models/worm/basic-muscle.png has changed
     4.1 Binary file assets/Models/worm/touch-profile-imag.png has changed
     5.1 Binary file assets/Models/worm/touch-profile-imag.xcf has changed
     6.1 Binary file assets/Models/worm/touch-profile.png has changed
     7.1 Binary file assets/Models/worm/touch-profile.xcf has changed
     8.1 Binary file assets/Models/worm/worm-of-the-imagination.blend has changed
     9.1 Binary file assets/Models/worm/worm-segment.blend has changed
    10.1 Binary file assets/Models/worm/worm-single-segment.blend has changed
    11.1 Binary file assets/Models/worm/worm.blend has changed
    12.1 Binary file assets/Textures/aurellem.png has changed
    13.1 Binary file assets/Textures/greenGrid.png has changed
    14.1 Binary file assets/Textures/greenHexGrid.png has changed
    15.1 Binary file assets/Textures/squareGrid.png has changed
    16.1 Binary file gimp-patterns/film-grain-vertical.pat has changed
    17.1 Binary file gimp-patterns/gb-grid.pat has changed
    18.1 Binary file gimp-patterns/tactile-1.pat has changed
    19.1 Binary file gimp-patterns/tactile-128.pat has changed
    20.1 Binary file gimp-patterns/tactile-16.pat has changed
    21.1 Binary file gimp-patterns/tactile-2.pat has changed
    22.1 Binary file gimp-patterns/tactile-3.pat has changed
    23.1 Binary file gimp-patterns/tactile-32.pat has changed
    24.1 Binary file gimp-patterns/tactile-4.pat has changed
    25.1 Binary file gimp-patterns/tactile-5.pat has changed
    26.1 Binary file gimp-patterns/tactile-6.pat has changed
    27.1 Binary file gimp-patterns/tactile-64.pat has changed
    28.1 Binary file gimp-patterns/tactile-7.pat has changed
    29.1 Binary file gimp-patterns/tactile-8.pat has changed
    30.1 Binary file images/aurellem.xcf has changed
    31.1 --- a/java/build.xml	Thu Mar 27 17:56:26 2014 -0400
    31.2 +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
    31.3 @@ -1,32 +0,0 @@
    31.4 -<project name="cortex-opencv" default="dist" basedir="."> 
    31.5 -
    31.6 -  <property name="build" value="build"/>
    31.7 -  <property name="src" value="src"/>
    31.8 -  <property name="dist" value="dist"/>
    31.9 -
   31.10 -  <target name="prepare">
   31.11 -    <mkdir dir="${build}"/>
   31.12 -    <mkdir dir="${dist}"/> 
   31.13 -  </target>
   31.14 -
   31.15 -  <target name="compile" depends="prepare" >
   31.16 -    <javac srcdir="${src}" destdir="${build}"
   31.17 -	   includeantruntime="false"/>
   31.18 -  </target>
   31.19 -
   31.20 -  <target name="dist" depends="compile">
   31.21 -    <jar jarfile="${dist}/cortex-opencv.jar">
   31.22 -      <fileset dir="${build}"/>
   31.23 -      <fileset dir="${src}"/>
   31.24 -    </jar>
   31.25 -  </target>
   31.26 -
   31.27 -  <target name="all" depends="dist"/>
   31.28 -
   31.29 -  <target name="clean" > 
   31.30 -    <delete dir="${build}"/> 
   31.31 -    <delete dir="${dist}"/> 
   31.32 -  </target> 
   31.33 -
   31.34 -</project>
   31.35 -
    32.1 --- a/java/src/com/aurellem/opencv/OpenCV.java	Thu Mar 27 17:56:26 2014 -0400
    32.2 +++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
    32.3 @@ -1,15 +0,0 @@
    32.4 -package com.aurellem.opencv;
    32.5 -
    32.6 -public class OpenCV {
    32.7 -
    32.8 -    public OpenCV(){}
    32.9 -    
   32.10 -    public static void loadVBA(){
   32.11 -	System.loadLibrary("opencv_java249");
   32.12 -    }
   32.13 -
   32.14 -    public static void absoluteLoadVBA(){
   32.15 -	System.load("/usr/share/OpenCV/java/libopencv_java249.so");
   32.16 -    }
   32.17 -
   32.18 -}
    33.1 --- a/org/ideas.org	Thu Mar 27 17:56:26 2014 -0400
    33.2 +++ b/org/ideas.org	Thu Mar 27 17:57:01 2014 -0400
    33.3 @@ -27,12 +27,12 @@
    33.4  - control of gravity within a certain radius
    33.5  - speed up/slow time
    33.6  - object creation/destruction
    33.7 -- future-sight -- step the simulation forward a few ticks, gather
    33.8 +- prescience -- step the simulation forward a few ticks, gather
    33.9    sensory data, then supply this data for the creature as one of its
   33.10    actual senses.
   33.11  
   33.12  - Symbol Sense
   33.13 -   Where objects in the world can be queried for description /
   33.14 +   Objects in the world can be queried for description /
   33.15     symbols.
   33.16  
   33.17  - Symbol Marking
   33.18 @@ -66,10 +66,6 @@
   33.19   - make a joint that figures out what type of joint it is (range of
   33.20     motion)
   33.21  
   33.22 -   
   33.23 -
   33.24 -
   33.25 -
   33.26  * goals
   33.27  
   33.28  ** have to get done before Winston
   33.29 @@ -104,8 +100,6 @@
   33.30  ** don't have to get done before winston
   33.31   - [X] write tests for integration -- 3 days
   33.32   - [X] usertime/gametime clock HUD display -- day 
   33.33 - - [ ] find papers for each of the senses justifying my own
   33.34 -       representation -- week
   33.35   - [X] show sensor maps in HUD display? -- 4 days
   33.36   - [X] show sensor maps in AWT display? -- 2 days
   33.37   - [X] upgrade to clojure 1.3, replace all defvars with new def
   33.38 @@ -122,10 +116,9 @@
   33.39  ;;Each minute and unseen part;
   33.40  ;;For the Gods see everywhere.
   33.41  
   33.42 -
   33.43  * misc
   33.44    - use object tracking on moving objects to derive good static
   33.45      detectors and achieve background separation
   33.46    - temporal scale pyramids.  this can help in verb recognition by
   33.47      making verb identification time-scale independent (up to a certian
   33.48 -    factor)
   33.49 \ No newline at end of file
   33.50 +    factor)
    34.1 --- a/org/movement.org	Thu Mar 27 17:56:26 2014 -0400
    34.2 +++ b/org/movement.org	Thu Mar 27 17:57:01 2014 -0400
    34.3 @@ -283,7 +283,7 @@
    34.4          muscles (pics "muscles/0")
    34.5          targets (map
    34.6                   #(File. (str base "out/" (format "%07d.png" %)))
    34.7 -                 (range 0 (count main-view)))]
    34.8 +                 (range (count main-view)))]
    34.9      (dorun
   34.10       (pmap
   34.11        (comp
    35.1 --- a/org/proprioception.org	Thu Mar 27 17:56:26 2014 -0400
    35.2 +++ b/org/proprioception.org	Thu Mar 27 17:57:01 2014 -0400
    35.3 @@ -52,7 +52,7 @@
    35.4     system. The three vectors do not have to be normalized or
    35.5     orthogonal."
    35.6    [vec1 vec2 vec3]
    35.7 -  (< 0 (.dot (.cross vec1 vec2) vec3)))
    35.8 +  (pos? (.dot (.cross vec1 vec2) vec3)))
    35.9  
   35.10  (defn absolute-angle
   35.11    "The angle between 'vec1 and 'vec2 around 'axis. In the range 
   35.12 @@ -328,7 +328,7 @@
   35.13          proprioception (pics "proprio/0")
   35.14          targets (map
   35.15                   #(File. (str base "out/" (format "%07d.png" %)))
   35.16 -                 (range 0 (count main-view)))]
   35.17 +                 (range (count main-view)))]
   35.18      (dorun
   35.19       (pmap
   35.20        (comp
   35.21 @@ -385,7 +385,7 @@
   35.22  
   35.23  * Next 
   35.24  
   35.25 -Next time, I'll give the Worm the power to [[./movement.org][move on it's own]].
   35.26 +Next time, I'll give the Worm the power to [[./movement.org][move on its own]].
   35.27  
   35.28  
   35.29  * COMMENT generate source
    36.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    36.2 +++ b/org/self_organizing_touch.clj	Thu Mar 27 17:57:01 2014 -0400
    36.3 @@ -0,0 +1,169 @@
    36.4 +(ns org.aurellem.self-organizing-touch
    36.5 +  "Using free play to automatically organize touch perception into regions."
    36.6 +  {:author "Robert McIntyre"}
    36.7 +  (:use (cortex world util import body sense
    36.8 +                hearing touch vision proprioception movement
    36.9 +                test))
   36.10 +  (:use [clojure set pprint])
   36.11 +  (:import (com.jme3.math ColorRGBA Vector3f))
   36.12 +  (:import java.io.File)
   36.13 +  (:import com.jme3.audio.AudioNode)
   36.14 +  (:import com.aurellem.capture.RatchetTimer)
   36.15 +  (:import (com.aurellem.capture Capture IsoTimer))
   36.16 +  (:import (com.jme3.math Vector3f ColorRGBA)))
   36.17 +
   36.18 +(use 'org.aurellem.worm-learn)
   36.19 +(dorun (cortex.import/mega-import-jme3))
   36.20 +
   36.21 +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
   36.22 +;; A demonstration of self organiging touch maps through experience. ;
   36.23 +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
   36.24 +
   36.25 +(def single-worm-segment-view
   36.26 +  [(Vector3f. 2.0681207, -6.1406755, 1.6106138)
   36.27 +   (Quaternion. -0.15558705, 0.843615, -0.3428654, -0.38281822)])
   36.28 +
   36.29 +(def worm-single-segment-muscle-labels
   36.30 +  [:lift-1 :lift-2 :roll-1 :roll-2])
   36.31 +
   36.32 +(defn touch-kinesthetics []
   36.33 +  [[170 :lift-1 40]
   36.34 +   [190 :lift-1 19]
   36.35 +   [206 :lift-1  0]
   36.36 +
   36.37 +   [400 :lift-2 40]
   36.38 +   [410 :lift-2  0]
   36.39 +
   36.40 +   [570 :lift-2 40]
   36.41 +   [590 :lift-2 21]
   36.42 +   [606 :lift-2  0]
   36.43 +
   36.44 +   [800 :lift-1 30]
   36.45 +   [809 :lift-1 0]
   36.46 +
   36.47 +   [900 :roll-2 40]
   36.48 +   [905 :roll-2 20]
   36.49 +   [910 :roll-2  0]
   36.50 +
   36.51 +   [1000 :roll-2 40]
   36.52 +   [1005 :roll-2 20]
   36.53 +   [1010 :roll-2  0]
   36.54 +   
   36.55 +   [1100 :roll-2 40]
   36.56 +   [1105 :roll-2 20]
   36.57 +   [1110 :roll-2  0]
   36.58 +   ])
   36.59 +
   36.60 +(defn single-worm-segment []
   36.61 +  (load-blender-model "Models/worm/worm-single-segment.blend"))
   36.62 +
   36.63 +(defn worm-segment []
   36.64 +  (let [model (single-worm-segment)]
   36.65 +    {:body (doto model (body!))
   36.66 +     :touch (touch! model)
   36.67 +     :proprioception (proprioception! model)
   36.68 +     :muscles (movement! model)}))
   36.69 +
   36.70 +
   36.71 +(defn worm-segment-defaults []
   36.72 +  (let [direct-control (worm-direct-control worm-muscle-labels 40)]
   36.73 +    (merge (worm-world-defaults)
   36.74 +           {:worm worm-segment
   36.75 +            :view single-worm-segment-view
   36.76 +            :experience-watch nil
   36.77 +            :motor-control
   36.78 +            (motor-control-program
   36.79 +             worm-single-segment-muscle-labels
   36.80 +             (touch-kinesthetics))
   36.81 +            :end-frame 1200})))
   36.82 +
   36.83 +(def full-contact [(float 0.0) (float 0.1)])
   36.84 +
   36.85 +(defn pure-touch?
   36.86 +  "This is worm specific code to determine if a large region of touch
   36.87 +   sensors is either all on or all off."
   36.88 +  [[coords touch :as touch-data]]
   36.89 +  (= (set (map first touch)) (set full-contact)))
   36.90 +
   36.91 +(defn remove-similar
   36.92 +  [coll]
   36.93 +  (loop [result () coll (sort-by (comp - count) coll)]
   36.94 +    (if (empty? coll) result
   36.95 +        (let  [[x & xs] coll
   36.96 +               c (count x)]
   36.97 +          (if (some
   36.98 +               (fn [other-set]
   36.99 +                 (let [oc (count other-set)]
  36.100 +                   (< (- (count (union other-set x)) c) (* oc 0.1))))
  36.101 +               xs)
  36.102 +            (recur result xs)
  36.103 +            (recur (cons x result) xs))))))
  36.104 +
  36.105 +(def all-touch-coordinates
  36.106 +  (concat
  36.107 +   (rect-region [0  15] [7  22])
  36.108 +   (rect-region [8   0] [14 29])
  36.109 +   (rect-region [15 15] [22 22])))
  36.110 +
  36.111 +(defn view-touch-region
  36.112 +  ([coords out]
  36.113 +     (let [touched-region
  36.114 +           (reduce
  36.115 +            (fn [m k]
  36.116 +              (assoc m k [0.0 0.1]))
  36.117 +            (zipmap all-touch-coordinates (repeat [0.1 0.1])) coords)
  36.118 +           data
  36.119 +           [[(vec (keys touched-region)) (vec (vals touched-region))]]
  36.120 +           touch-display (view-touch)]
  36.121 +       (touch-display data out)))
  36.122 +  ([coords] (view-touch-region nil)))
  36.123 +
  36.124 +
  36.125 +(defn learn-touch-regions []
  36.126 +  (let [experiences (atom [])
  36.127 +        world (apply-map
  36.128 +               worm-world
  36.129 +               (assoc (worm-segment-defaults)
  36.130 +                 :experiences experiences
  36.131 +                 :record (File. "/home/r/proj/cortex/thesis/video/touch-learn-2/")))]
  36.132 +    (run-world world)
  36.133 +    (->>
  36.134 +     @experiences
  36.135 +     (drop 175)
  36.136 +     ;; access the single segment's touch data
  36.137 +     (map (comp first :touch))
  36.138 +     ;; only deal with "pure" touch data to determine surfaces
  36.139 +     (filter pure-touch?)
  36.140 +     ;; associate coordinates with touch values
  36.141 +     (map (partial apply zipmap))
  36.142 +     ;; select those regions where contact is being made
  36.143 +     (map (partial group-by second))
  36.144 +     (map #(get % full-contact))
  36.145 +     (map (partial map first))
  36.146 +     ;; remove redundant/subset regions
  36.147 +     (map set)
  36.148 +     remove-similar)))
  36.149 +
  36.150 +
  36.151 +(def all-touch-coordinates
  36.152 +  (concat
  36.153 +   (rect-region [0  15] [7  22])
  36.154 +   (rect-region [8   0] [14 29])
  36.155 +   (rect-region [15 15] [22 22])))
  36.156 +
  36.157 +(defn view-touch-region [coords]
  36.158 +  (let [touched-region
  36.159 +        (reduce
  36.160 +         (fn [m k]
  36.161 +           (assoc m k [0.0 0.1]))
  36.162 +         (zipmap all-touch-coordinates (repeat [0.1 0.1])) coords)
  36.163 +        data
  36.164 +        [[(vec (keys touched-region)) (vec (vals touched-region))]]
  36.165 +        touch-display (view-touch)]
  36.166 +    (dorun (repeatedly 5 #(touch-display data)))))
  36.167 +
  36.168 +(defn learn-and-view-touch-regions []
  36.169 +  (map view-touch-region
  36.170 +       (learn-touch-regions)))
  36.171 +
  36.172 +
    37.1 --- a/org/sense.org	Thu Mar 27 17:56:26 2014 -0400
    37.2 +++ b/org/sense.org	Thu Mar 27 17:57:01 2014 -0400
    37.3 @@ -257,7 +257,7 @@
    37.4     Returns a function that accepts a BufferedImage and draws it to the
    37.5     JPanel. If given a directory it will save the images as png files
    37.6     starting at 0000000.png and incrementing from there."
    37.7 -  ([#^File save]
    37.8 +  ([#^File save title]
    37.9       (let [idx (atom -1)
   37.10             image
   37.11             (atom
   37.12 @@ -268,7 +268,7 @@
   37.13                 [graphics]
   37.14                 (proxy-super paintComponent graphics)
   37.15                 (.drawImage graphics @image 0 0 nil)))
   37.16 -           frame (JFrame. "Display Image")]
   37.17 +           frame (JFrame. title)]
   37.18         (SwingUtilities/invokeLater
   37.19          (fn []
   37.20            (doto frame
   37.21 @@ -285,6 +285,8 @@
   37.22             (ImageIO/write
   37.23              i "png"
   37.24              (File. save (format "%07d.png" (swap! idx inc))))))))
   37.25 +  ([#^File save]
   37.26 +     (view-image save "Display Image"))
   37.27    ([] (view-image nil)))
   37.28  
   37.29  (defn view-sense 
    38.1 --- a/org/touch.org	Thu Mar 27 17:56:26 2014 -0400
    38.2 +++ b/org/touch.org	Thu Mar 27 17:57:01 2014 -0400
    38.3 @@ -78,7 +78,7 @@
    38.4  To simulate touch there are three conceptual steps. For each solid
    38.5  object in the creature, you first have to get UV image and scale
    38.6  parameter which define the position and length of the feelers. Then,
    38.7 -you use the triangles which compose the mesh and the UV data stored in
    38.8 +you use the triangles which comprise the mesh and the UV data stored in
    38.9  the mesh to determine the world-space position and orientation of each
   38.10  feeler. Then once every frame, update these positions and orientations
   38.11  to match the current position and orientation of the object, and use
   38.12 @@ -136,7 +136,7 @@
   38.13  A =Mesh= is composed of =Triangles=, and each =Triangle= has three
   38.14  vertices which have coordinates in world space and UV space.
   38.15   
   38.16 -Here, =triangles= gets all the world-space triangles which compose a
   38.17 +Here, =triangles= gets all the world-space triangles which comprise a
   38.18  mesh, while =pixel-triangles= gets those same triangles expressed in
   38.19  pixel coordinates (which are UV coordinates scaled to fit the height
   38.20  and width of the UV image).
   38.21 @@ -152,7 +152,7 @@
   38.22       (.getTriangle (.getMesh geo) triangle-index scratch) scratch)))
   38.23  
   38.24  (defn triangles
   38.25 -  "Return a sequence of all the Triangles which compose a given
   38.26 +  "Return a sequence of all the Triangles which comprise a given
   38.27     Geometry." 
   38.28    [#^Geometry geo]
   38.29    (map (partial triangle geo) (range (.getTriangleCount (.getMesh geo)))))
   38.30 @@ -240,7 +240,7 @@
   38.31    [#^Triangle t]
   38.32    (let [mat (Matrix4f.)
   38.33          [vert-1 vert-2 vert-3]
   38.34 -        ((comp vec map) #(.get t %) (range 3))
   38.35 +        (mapv #(.get t %) (range 3))
   38.36          unit-normal (do (.calculateNormal t)(.getNormal t))
   38.37          vertices [vert-1 vert-2 vert-3 unit-normal]]
   38.38      (dorun 
    39.1 --- a/org/util.org	Thu Mar 27 17:56:26 2014 -0400
    39.2 +++ b/org/util.org	Thu Mar 27 17:57:01 2014 -0400
    39.3 @@ -129,10 +129,13 @@
    39.4  
    39.5  (defn position-camera
    39.6    "Change the position of the in-world camera."
    39.7 -  [world #^Vector3f position #^Quaternion rotation]
    39.8 +  ([world #^Vector3f position #^Quaternion rotation]
    39.9       (doto (.getCamera world)
   39.10         (.setLocation position)
   39.11         (.setRotation rotation)))
   39.12 +  ([world [position rotation]]
   39.13 +     (position-camera world position rotation)))
   39.14 +     
   39.15  
   39.16  (defn enable-debug 
   39.17    "Turn on debug wireframes for every object in this simulation."
   39.18 @@ -146,13 +149,13 @@
   39.19  
   39.20  (defn speed-up 
   39.21    "Increase the dismally slow speed of the world's camera."
   39.22 -  [world]
   39.23 -  (.setMoveSpeed (.getFlyByCamera world)
   39.24 -                 (float 60))
   39.25 -  (.setRotationSpeed (.getFlyByCamera world)
   39.26 -                     (float 3))
   39.27 -  world)
   39.28 -
   39.29 +  ([world] (speed-up world 1))
   39.30 +  ([world amount]
   39.31 +     (.setMoveSpeed (.getFlyByCamera world)
   39.32 +                    (float (* amount 60)))
   39.33 +     (.setRotationSpeed (.getFlyByCamera world)
   39.34 +                        (float (* amount 3)))
   39.35 +     world))
   39.36  
   39.37  (defn no-logging 
   39.38    "Disable all of jMonkeyEngine's logging."
   39.39 @@ -682,7 +685,7 @@
   39.40  #+end_src
   39.41  
   39.42  
   39.43 -* COMMENT code generation
   39.44 +* code generation
   39.45  #+begin_src clojure :tangle ../src/cortex/import.clj
   39.46  <<import>>
   39.47  #+end_src
    40.1 --- a/org/world.org	Thu Mar 27 17:56:26 2014 -0400
    40.2 +++ b/org/world.org	Thu Mar 27 17:57:01 2014 -0400
    40.3 @@ -177,7 +177,7 @@
    40.4  
    40.5  (defn initialize-inputs
    40.6    "Establish key-bindings for a particular virtual world."
    40.7 -  [game  input-manager key-map]
    40.8 +  [game input-manager key-map]
    40.9    (doall
   40.10     (map (fn [[name trigger]]
   40.11            (.addMapping
   40.12 @@ -192,9 +192,6 @@
   40.13  
   40.14  #+end_src
   40.15  
   40.16 -#+results: input
   40.17 -: #'cortex.world/initialize-inputs
   40.18 -
   40.19  These functions are for controlling the world through the keyboard and
   40.20  mouse.
   40.21  
   40.22 @@ -247,7 +244,7 @@
   40.23    "the =world= function takes care of the details of initializing a
   40.24    SimpleApplication.
   40.25  
   40.26 -   ***** Arguments:
   40.27 +   ,***** Arguments:
   40.28  
   40.29     - root-node : a com.jme3.scene.Node object which contains all of
   40.30         the objects that should be in the simulation.
    41.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    41.2 +++ b/org/worm_learn.clj	Thu Mar 27 17:57:01 2014 -0400
    41.3 @@ -0,0 +1,562 @@
    41.4 +(ns org.aurellem.worm-learn
    41.5 +  "General worm creation framework."
    41.6 +  {:author "Robert McIntyre"}
    41.7 +  (:use (cortex world util import body sense
    41.8 +                hearing touch vision proprioception movement
    41.9 +                test))
   41.10 +  (:import (com.jme3.math ColorRGBA Vector3f))
   41.11 +  (:import java.io.File)
   41.12 +  (:import com.jme3.audio.AudioNode)
   41.13 +  (:import com.aurellem.capture.RatchetTimer)
   41.14 +  (:import (com.aurellem.capture Capture IsoTimer))
   41.15 +  (:import (com.jme3.math Vector3f ColorRGBA)))
   41.16 +
   41.17 +(import org.apache.commons.math3.transform.TransformType)
   41.18 +(import org.apache.commons.math3.transform.FastFourierTransformer)
   41.19 +(import org.apache.commons.math3.transform.DftNormalization)
   41.20 +
   41.21 +(use 'clojure.pprint)
   41.22 +(use 'clojure.set)
   41.23 +(dorun (cortex.import/mega-import-jme3))
   41.24 +(rlm.rlm-commands/help)
   41.25 +
   41.26 +(load-bullet)
   41.27 +
   41.28 +(def hand "Models/test-creature/hand.blend")
   41.29 +
   41.30 +(defn worm-model []
   41.31 +  (load-blender-model "Models/worm/worm.blend"))
   41.32 +
   41.33 +(defn worm []
   41.34 +  (let [model (load-blender-model "Models/worm/worm.blend")]
   41.35 +    {:body (doto model (body!))
   41.36 +     :touch (touch! model)
   41.37 +     :proprioception (proprioception! model)
   41.38 +     :muscles (movement! model)}))
   41.39 +
   41.40 +(defn worm* []
   41.41 +  (let [model (load-blender-model "Models/worm/worm-of-the-imagination.blend")]
   41.42 +    {:body (doto model (body!))
   41.43 +     :touch (touch! model)
   41.44 +     :proprioception (proprioception! model)
   41.45 +     :muscles (movement! model)}))
   41.46 +
   41.47 +
   41.48 +(def output-base (File. "/home/r/proj/cortex/render/worm-learn/curl"))
   41.49 +
   41.50 +
   41.51 +(defn motor-control-program
   41.52 +  "Create a function which will execute the motor script"
   41.53 +  [muscle-labels
   41.54 +   script]
   41.55 +  (let [current-frame (atom -1)
   41.56 +        keyed-script (group-by first script)
   41.57 +        current-forces (atom {}) ]
   41.58 +    (fn [effectors]
   41.59 +      (let [indexed-effectors (vec effectors)]
   41.60 +        (dorun 
   41.61 +         (for [[_ part force] (keyed-script (swap! current-frame inc))]
   41.62 +           (swap! current-forces (fn [m] (assoc m part force)))))
   41.63 +        (doall (map (fn [effector power]
   41.64 +                      (effector (int power)))
   41.65 +                    effectors
   41.66 +                    (map #(@current-forces % 0) muscle-labels)))))))
   41.67 +
   41.68 +(defn worm-direct-control
   41.69 +  "Create keybindings and a muscle control program that will enable
   41.70 +   the user to control the worm via the keyboard."
   41.71 +  [muscle-labels activation-strength]
   41.72 +  (let [strengths (mapv (fn [_] (atom 0)) muscle-labels)
   41.73 +        activator
   41.74 +        (fn [n]
   41.75 +          (fn [world pressed?]
   41.76 +            (let [strength (if pressed? activation-strength 0)]
   41.77 +              (swap! (nth strengths n) (constantly strength)))))
   41.78 +        activators
   41.79 +        (map activator (range (count muscle-labels)))
   41.80 +        worm-keys
   41.81 +        ["key-f" "key-r"
   41.82 +         "key-g" "key-t"
   41.83 +         "key-h" "key-y"
   41.84 +         "key-j" "key-u"
   41.85 +         "key-k" "key-i"
   41.86 +         "key-l" "key-o"]]
   41.87 +    {:motor-control
   41.88 +     (fn [effectors]
   41.89 +      (doall
   41.90 +       (map (fn [strength effector]
   41.91 +              (effector (deref strength)))
   41.92 +            strengths effectors)))
   41.93 +     :keybindings
   41.94 +     ;; assume muscles are listed in pairs and map them to keys.
   41.95 +     (zipmap worm-keys activators)}))
   41.96 +
   41.97 +;; These are scripts that direct the worm to move in two radically
   41.98 +;; different patterns -- a sinusoidal wiggling motion, and a curling
   41.99 +;; motions that causes the worm to form a circle.
  41.100 +
  41.101 +(def curl-script
  41.102 +  [[150 :d-flex 40]
  41.103 +   [250 :d-flex 0]])
  41.104 +
  41.105 +(def period 18)
  41.106 +
  41.107 +(def worm-muscle-labels
  41.108 +  [:base-ex :base-flex
  41.109 +   :a-ex :a-flex
  41.110 +   :b-ex :b-flex
  41.111 +   :c-ex :c-flex
  41.112 +   :d-ex :d-flex])
  41.113 +
  41.114 +(defn gen-wiggle [[flexor extensor :as muscle-pair] time-base]
  41.115 +  (let [period period
  41.116 +        power 45]
  41.117 +    [[time-base flexor power]
  41.118 +     [(+ time-base period) flexor 0]
  41.119 +     [(+ time-base period 1) extensor power]
  41.120 +     [(+ time-base (+ (* 2 period) 2))  extensor 0]]))
  41.121 +  
  41.122 +(def wiggle-script
  41.123 +  (mapcat gen-wiggle (repeat 4000 [:a-ex :a-flex])
  41.124 +                     (range 100 1000000 (+ 3 (* period 2)))))
  41.125 +
  41.126 +
  41.127 +(defn shift-script [shift script]
  41.128 +  (map (fn [[time label power]] [(+ time shift) label power])
  41.129 +       script))
  41.130 +
  41.131 +(def do-all-the-things 
  41.132 +  (concat
  41.133 +   curl-script
  41.134 +   [[300 :d-ex 40]
  41.135 +    [320 :d-ex 0]]
  41.136 +   (shift-script 280 (take 16 wiggle-script))))
  41.137 +
  41.138 +;; Normally, we'd use unsupervised/supervised machine learning to pick
  41.139 +;; out the defining features of the different actions available to the
  41.140 +;; worm. For this project, I am going to explicitely define functions
  41.141 +;; that recognize curling and wiggling respectively. These functions
  41.142 +;; are defined using all the information available from an embodied
  41.143 +;; simulation of the action. Note how much easier they are to define
  41.144 +;; than if I only had vision to work with. Things like scale/position
  41.145 +;; invariance are complete non-issues here. This is the advantage of
  41.146 +;; body-centered action recognition and what I hope to show with this
  41.147 +;; thesis.
  41.148 +
  41.149 +
  41.150 +;; curled? relies on proprioception, resting? relies on touch,
  41.151 +;; wiggling? relies on a fourier analysis of muscle contraction, and
  41.152 +;; grand-circle? relies on touch and reuses curled? as a gaurd.
  41.153 +
  41.154 +(defn curled?
  41.155 +  "Is the worm curled up?"
  41.156 +  [experiences]
  41.157 +  (every?
  41.158 +   (fn [[_ _ bend]]
  41.159 +     (> (Math/sin bend) 0.64))
  41.160 +   (:proprioception (peek experiences))))
  41.161 +
  41.162 +(defn rect-region [[x0 y0] [x1 y1]]
  41.163 +  (vec
  41.164 +   (for [x (range x0 (inc x1))
  41.165 +         y (range y0 (inc y1))]
  41.166 +     [x y])))
  41.167 +
  41.168 +(def worm-segment-bottom (rect-region [8 15] [14 22]))
  41.169 +
  41.170 +(defn contact
  41.171 +  "Determine how much contact a particular worm segment has with
  41.172 +   other objects. Returns a value between 0 and 1, where 1 is full
  41.173 +   contact and 0 is no contact."
  41.174 +  [touch-region [coords contact :as touch]]
  41.175 +  (-> (zipmap coords contact)
  41.176 +      (select-keys touch-region)
  41.177 +      (vals)
  41.178 +      (#(map first %))
  41.179 +      (average)
  41.180 +      (* 10)
  41.181 +      (- 1)
  41.182 +      (Math/abs)))
  41.183 +
  41.184 +(defn resting?
  41.185 +  "Is the worm resting on the ground?"
  41.186 +  [experiences]
  41.187 +  (every?
  41.188 +   (fn [touch-data]
  41.189 +     (< 0.9 (contact worm-segment-bottom touch-data)))
  41.190 +   (:touch (peek experiences))))
  41.191 +
  41.192 +(defn vector:last-n [v n]
  41.193 +  (let [c (count v)]
  41.194 +    (if (< c n) v
  41.195 +        (subvec v (- c n) c))))
  41.196 +
  41.197 +(defn fft [nums]
  41.198 +  (map
  41.199 +   #(.getReal %)
  41.200 +   (.transform
  41.201 +    (FastFourierTransformer. DftNormalization/STANDARD)
  41.202 +    (double-array nums) TransformType/FORWARD)))
  41.203 +
  41.204 +(def indexed (partial map-indexed vector))
  41.205 +
  41.206 +(defn max-indexed [s]
  41.207 +  (first (sort-by (comp - second) (indexed s))))
  41.208 +
  41.209 +(defn wiggling?
  41.210 +  "Is the worm wiggling?"
  41.211 +  [experiences]
  41.212 +  (let [analysis-interval 96]
  41.213 +    (when (> (count experiences) analysis-interval)
  41.214 +      (let [a-flex 3
  41.215 +            a-ex   2
  41.216 +            muscle-activity
  41.217 +            (map :muscle (vector:last-n experiences analysis-interval))
  41.218 +            base-activity
  41.219 +            (map #(- (% a-flex) (% a-ex)) muscle-activity)
  41.220 +            accept?
  41.221 +            (fn [activity]
  41.222 +              (->> activity (fft) (take 20) (map #(Math/abs %))
  41.223 +                   (max-indexed) (first) (<= 2)))]
  41.224 +        (or (accept? (take 64 base-activity))
  41.225 +            (accept? (take 64 (drop 20 base-activity))))))))
  41.226 +
  41.227 +
  41.228 +
  41.229 +(def worm-segment-bottom-tip (rect-region [15 15] [22 22]))
  41.230 +
  41.231 +(def worm-segment-top-tip (rect-region [0 15] [7 22]))
  41.232 +
  41.233 +(defn grand-circle?
  41.234 +  "Does the worm form a majestic circle (one end touching the other)?"
  41.235 +  [experiences]
  41.236 +  (and (curled? experiences)
  41.237 +       (let [worm-touch (:touch (peek experiences))
  41.238 +             tail-touch (worm-touch 0)
  41.239 +             head-touch (worm-touch 4)]
  41.240 +         (and (< 0.1 (contact worm-segment-bottom-tip tail-touch))
  41.241 +              (< 0.1 (contact worm-segment-top-tip    head-touch))))))
  41.242 +
  41.243 +
  41.244 +(declare phi-space phi-scan debug-experience) 
  41.245 +
  41.246 +
  41.247 +
  41.248 +(def standard-world-view
  41.249 +  [(Vector3f. 4.207176, -3.7366982, 3.0816958)
  41.250 +   (Quaternion. 0.11118768, 0.87678415, 0.24434438, -0.3989771)])
  41.251 +
  41.252 +(def worm-side-view
  41.253 +  [(Vector3f. 4.207176, -3.7366982, 3.0816958)
  41.254 +   (Quaternion. -0.11555642, 0.88188726, -0.2854942, -0.3569518)])
  41.255 +
  41.256 +(def degenerate-worm-view
  41.257 +  [(Vector3f. -0.0708936, -8.570261, 2.6487997)
  41.258 +   (Quaternion. -2.318909E-4, 0.9985348, 0.053941682, 0.004291452)])
  41.259 +
  41.260 +(defn worm-world-defaults []
  41.261 +  (let [direct-control (worm-direct-control worm-muscle-labels 40)]
  41.262 +    (merge direct-control     
  41.263 +           {:view worm-side-view
  41.264 +            :record nil
  41.265 +            :experiences (atom [])
  41.266 +            :experience-watch debug-experience
  41.267 +            :worm worm
  41.268 +            :end-frame nil})))
  41.269 +
  41.270 +(defn dir! [file]
  41.271 +  (if-not (.exists file)
  41.272 +    (.mkdir file))
  41.273 +  file)
  41.274 +
  41.275 +(defn record-experience! [experiences data]
  41.276 +  (swap! experiences #(conj % data)))
  41.277 +
  41.278 +(defn enable-shadows [world]
  41.279 +  (let [bsr (doto
  41.280 +                (BasicShadowRenderer. (asset-manager) 512)
  41.281 +              (.setDirection (.normalizeLocal (Vector3f. 1 -1 -1))))]
  41.282 +    (.addProcessor (.getViewPort world) bsr)))
  41.283 +
  41.284 +(defn enable-good-shadows [world]
  41.285 +  (let [pssm
  41.286 +        (doto (PssmShadowRenderer. (asset-manager) 1024 3)
  41.287 +          (.setDirection  (.normalizeLocal (Vector3f. -1 -3 -1)))
  41.288 +          (.setLambda (float 0.55))
  41.289 +          (.setShadowIntensity (float 0.6))
  41.290 +          (.setCompareMode PssmShadowRenderer$CompareMode/Software)
  41.291 +          (.setFilterMode  PssmShadowRenderer$FilterMode/Bilinear))]
  41.292 +    (.addProcessor (.getViewPort world) pssm)))
  41.293 +        
  41.294 +(defn debug-experience
  41.295 +  [experiences text]
  41.296 +  (cond
  41.297 +   (grand-circle? experiences) (.setText text "Grand Circle")
  41.298 +   (curled? experiences)       (.setText text "Curled")
  41.299 +   (wiggling? experiences)     (.setText text "Wiggling")
  41.300 +   (resting? experiences)      (.setText text "Resting")
  41.301 +   :else                       (.setText text "Unknown")))
  41.302 +
  41.303 +
  41.304 +(defn worm-world
  41.305 +  [& {:keys    [record motor-control keybindings view experiences
  41.306 +                worm end-frame experience-watch] :as settings}]
  41.307 +  (let [{:keys [record motor-control keybindings view experiences
  41.308 +                worm end-frame experience-watch]}
  41.309 +        (merge (worm-world-defaults) settings)
  41.310 +       
  41.311 +        touch-display  (view-touch)
  41.312 +        prop-display   (view-proprioception)
  41.313 +        muscle-display (view-movement)
  41.314 +        {:keys [proprioception touch muscles body]} (worm)
  41.315 +        
  41.316 +        floor
  41.317 +        (box 5 1 5 :position (Vector3f. 0 -10 0)
  41.318 +             :mass 0
  41.319 +             :texture "Textures/aurellem.png"
  41.320 +             :material "Common/MatDefs/Misc/Unshaded.j3md")
  41.321 +        timer (IsoTimer. 60)
  41.322 +
  41.323 +        font (.loadFont (asset-manager) "Interface/Fonts/Console.fnt")
  41.324 +        worm-action (doto (BitmapText. font false)
  41.325 +                      (.setSize 35)
  41.326 +                      (.setColor (ColorRGBA/Black)))]
  41.327 +
  41.328 +    (world
  41.329 +     (nodify [body floor])
  41.330 +       (merge standard-debug-controls keybindings)
  41.331 +       (fn [world]
  41.332 +         (.setLocalTranslation
  41.333 +          worm-action 20 470 0)
  41.334 +         (.attachChild (.getGuiNode world) worm-action)
  41.335 +         
  41.336 +         (enable-good-shadows world)
  41.337 +         (.setShadowMode body RenderQueue$ShadowMode/CastAndReceive)
  41.338 +         (.setShadowMode floor RenderQueue$ShadowMode/Receive)
  41.339 +                  
  41.340 +         (.setBackgroundColor (.getViewPort world) (ColorRGBA/White))
  41.341 +         (.setDisplayStatView world false)
  41.342 +         (.setDisplayFps world false)
  41.343 +         (position-camera world view)
  41.344 +         (.setTimer world timer)
  41.345 +         ;;(display-dilated-time world timer)
  41.346 +         (when record
  41.347 +           (dir! record)
  41.348 +           (Capture/captureVideo
  41.349 +            world
  41.350 +            (dir! (File. record "main-view"))))
  41.351 +         (speed-up world 0.5)
  41.352 +         ;;(light-up-everything world)
  41.353 +         )
  41.354 +       (fn [world tpf]
  41.355 +         (if (and end-frame (> (.getTime timer) end-frame))
  41.356 +           (.stop world))
  41.357 +         (let [muscle-data (vec (motor-control muscles))
  41.358 +               proprioception-data (proprioception)
  41.359 +               touch-data (mapv #(% (.getRootNode world)) touch)]
  41.360 +           (when experiences
  41.361 +             (record-experience!
  41.362 +              experiences {:touch touch-data
  41.363 +                           :proprioception proprioception-data
  41.364 +                           :muscle muscle-data}))
  41.365 +           (when experience-watch
  41.366 +             (experience-watch @experiences worm-action))
  41.367 +           (muscle-display
  41.368 +            muscle-data
  41.369 +            (when record (dir! (File. record "muscle"))))
  41.370 +           (prop-display
  41.371 +            proprioception-data
  41.372 +            (when record (dir! (File. record "proprio"))))
  41.373 +           (touch-display 
  41.374 +            touch-data
  41.375 +            (when record (dir! (File. record "touch")))))))))
  41.376 +
  41.377 +
  41.378 +
  41.379 +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
  41.380 +;;;;;;;;   Phi-Space   ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
  41.381 +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
  41.382 +
  41.383 +(defn generate-phi-space []
  41.384 +  (let [experiences (atom [])]
  41.385 +    (run-world
  41.386 +     (apply-map 
  41.387 +      worm-world
  41.388 +      (merge
  41.389 +       (worm-world-defaults)
  41.390 +       {:end-frame 700
  41.391 +        :motor-control
  41.392 +        (motor-control-program worm-muscle-labels do-all-the-things)
  41.393 +        :experiences experiences})))
  41.394 +    @experiences))
  41.395 +
  41.396 +(defn bin [digits]
  41.397 +  (fn [angles]
  41.398 +    (->> angles
  41.399 +         (flatten)
  41.400 +         (map (juxt #(Math/sin %) #(Math/cos %)))
  41.401 +         (flatten)
  41.402 +         (mapv #(Math/round (* % (Math/pow 10 (dec digits))))))))
  41.403 +
  41.404 +;; k-nearest neighbors with spatial binning. Only returns a result if
  41.405 +;; the propriceptive data is within 10% of a previously recorded
  41.406 +;; result in all dimensions.
  41.407 +(defn gen-phi-scan [phi-space]
  41.408 +  (let [bin-keys (map bin [3 2 1])
  41.409 +        bin-maps
  41.410 +        (map (fn [bin-key]
  41.411 +               (group-by
  41.412 +                (comp bin-key :proprioception phi-space)
  41.413 +                (range (count phi-space)))) bin-keys)
  41.414 +        lookups (map (fn [bin-key bin-map]
  41.415 +                      (fn [proprio] (bin-map (bin-key proprio))))
  41.416 +                    bin-keys bin-maps)]
  41.417 +    (fn lookup [proprio-data]
  41.418 +      (set (some #(% proprio-data) lookups)))))
  41.419 +
  41.420 +
  41.421 +(defn longest-thread
  41.422 +  "Find the longest thread from phi-index-sets. The index sets should
  41.423 +   be ordered from most recent to least recent."
  41.424 +  [phi-index-sets]
  41.425 +  (loop [result '()
  41.426 +         [thread-bases & remaining :as phi-index-sets] phi-index-sets]
  41.427 +    (if (empty? phi-index-sets)
  41.428 +      (vec result)
  41.429 +      (let [threads
  41.430 +            (for [thread-base thread-bases]
  41.431 +              (loop [thread (list thread-base)
  41.432 +                     remaining remaining]
  41.433 +                (let [next-index (dec (first thread))]
  41.434 +                  (cond (empty? remaining) thread
  41.435 +                        (contains? (first remaining) next-index)
  41.436 +                        (recur
  41.437 +                         (cons next-index thread) (rest remaining))
  41.438 +                        :else thread))))
  41.439 +            longest-thread
  41.440 +            (reduce (fn [thread-a thread-b]
  41.441 +                      (if (> (count thread-a) (count thread-b))
  41.442 +                        thread-a thread-b))
  41.443 +                    '(nil)
  41.444 +                    threads)]
  41.445 +        (recur (concat longest-thread result)
  41.446 +               (drop (count longest-thread) phi-index-sets))))))
  41.447 +
  41.448 +
  41.449 +(defn init []
  41.450 +  (def phi-space (generate-phi-space))
  41.451 +  (def phi-scan (gen-phi-scan phi-space))
  41.452 +  )
  41.453 +
  41.454 +;; (defn infer-nils-dyl [s]
  41.455 +;;   (loop [closed ()
  41.456 +;;          open s
  41.457 +;;          anchor 0]
  41.458 +;;     (if-not (empty? open)
  41.459 +;;       (recur (conj closed
  41.460 +;;                    (or (peek open)
  41.461 +;;                        anchor))
  41.462 +;;              (pop open)
  41.463 +;;              (or (peek open) anchor))
  41.464 +;;        closed)))
  41.465 +      
  41.466 +;; (defn infer-nils [s]
  41.467 +;;   (for [i (range (count s))]
  41.468 +;;     (or (get s i)
  41.469 +;;         (some (comp not nil?) (vector:last-n (- (count s) i)))
  41.470 +;;         0)))
  41.471 +
  41.472 +
  41.473 +(defn infer-nils
  41.474 +  "Replace nils with the next available non-nil element in the
  41.475 +   sequence, or barring that, 0."
  41.476 +  [s]
  41.477 +  (loop [i (dec (count s))
  41.478 +         v (transient s)]
  41.479 +    (if (zero? i) (persistent! v)
  41.480 +        (if-let [cur (v i)]
  41.481 +          (if (get v (dec i) 0)
  41.482 +            (recur (dec i) v)
  41.483 +            (recur (dec i) (assoc! v (dec i) cur)))
  41.484 +          (recur i (assoc! v i 0))))))
  41.485 +
  41.486 +;; tests
  41.487 +
  41.488 +;;(infer-nils [1 nil 1 1]) [1 1 1 1]
  41.489 +;;(infer-nils [1 1 1 nil]) [1 1 1 0]
  41.490 +;;(infer-nils [nil 2 1 1]) [2 2 1 1]       
  41.491 +  
  41.492 +
  41.493 +(defn empathy-demonstration []
  41.494 +  (let [proprio (atom ())]
  41.495 +    (fn
  41.496 +      [experiences text]
  41.497 +      (let [phi-indices (phi-scan (:proprioception (peek experiences)))]
  41.498 +        (swap! proprio (partial cons phi-indices))
  41.499 +        (let [exp-thread (longest-thread (take 300 @proprio))
  41.500 +              empathy (mapv phi-space (infer-nils exp-thread))]
  41.501 +          (println-repl (vector:last-n exp-thread 22))
  41.502 +          (cond
  41.503 +           (grand-circle? empathy) (.setText text "Grand Circle")
  41.504 +           (curled? empathy)       (.setText text "Curled")
  41.505 +           (wiggling? empathy)     (.setText text "Wiggling")
  41.506 +           (resting? empathy)      (.setText text "Resting")
  41.507 +           :else                   (.setText text "Unknown")))))))
  41.508 +
  41.509 +(defn init-interactive []
  41.510 +  (def phi-space
  41.511 +    (let [experiences (atom [])]
  41.512 +      (run-world
  41.513 +       (apply-map 
  41.514 +        worm-world
  41.515 +        (merge
  41.516 +         (worm-world-defaults)
  41.517 +         {:experiences experiences})))
  41.518 +      @experiences))
  41.519 +  (def phi-scan (gen-phi-scan phi-space)))
  41.520 +
  41.521 +(defn empathy-experiment-1 [record]
  41.522 +  (.start (worm-world :experience-watch (empathy-demonstration)
  41.523 +                      :record record :worm worm*)))
  41.524 +
  41.525 +
  41.526 +(def worm-action-label
  41.527 +  (juxt grand-circle? curled? wiggling?))
  41.528 +
  41.529 +(defn compare-empathy-with-baseline [accuracy]
  41.530 +  (let [proprio (atom ())]
  41.531 +    (fn
  41.532 +      [experiences text]
  41.533 +      (let [phi-indices (phi-scan (:proprioception (peek experiences)))]
  41.534 +        (swap! proprio (partial cons phi-indices))
  41.535 +        (let [exp-thread (longest-thread (take 300 @proprio))
  41.536 +              empathy (mapv phi-space (infer-nils exp-thread))
  41.537 +              experience-matches-empathy
  41.538 +              (= (worm-action-label experiences)
  41.539 +                 (worm-action-label empathy))]
  41.540 +          (cond
  41.541 +           (grand-circle? empathy) (.setText text "Grand Circle")
  41.542 +           (curled? empathy)       (.setText text "Curled")
  41.543 +           (wiggling? empathy)     (.setText text "Wiggling")
  41.544 +           (resting? empathy)      (.setText text "Resting")
  41.545 +           :else                   (.setText text "Unknown"))
  41.546 +
  41.547 +          (println-repl experience-matches-empathy)
  41.548 +          (swap! accuracy #(conj % experience-matches-empathy)))))))
  41.549 +              
  41.550 +(defn accuracy [v]
  41.551 +  (float (/ (count (filter true? v)) (count v))))
  41.552 +
  41.553 +(defn test-empathy-accuracy []
  41.554 +  (let [res (atom [])]
  41.555 +    (run-world
  41.556 +     (worm-world :experience-watch
  41.557 +                 (compare-empathy-with-baseline res)
  41.558 +                 :worm worm*))
  41.559 +    (accuracy @res)))
  41.560 +
  41.561 +
  41.562 +
  41.563 +
  41.564 +
  41.565 +
    42.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    42.2 +++ b/thesis/Makefile	Thu Mar 27 17:57:01 2014 -0400
    42.3 @@ -0,0 +1,16 @@
    42.4 +#INVOKE_LATEX = pdflatex -shell-escape thesis.tex;
    42.5 +THESIS_NAME  = rlm-cortex-meng
    42.6 +INVOKE_LATEX = texi2dvi --shell-escape --pdf -V --batch $(THESIS_NAME).tex;
    42.7 +#INVOKE_LATEX = texi2dvi --shell-escape --pdf -V  $(THESIS_NAME).tex;
    42.8 +
    42.9 +all:
   42.10 +	./weave-thesis.sh cortex
   42.11 +	rsync -avz --delete --exclude "video" \
   42.12 +		/home/r/proj/cortex/thesis "r@aurellem.org:~"
   42.13 +	ssh r@aurellem.org cd "~/thesis; $(INVOKE_LATEX)"
   42.14 +	scp "r@aurellem.org:/home/r/thesis/$(THESIS_NAME).pdf" .
   42.15 +	rm cortex.tex abstract.tex user-guide.tex
   42.16 +
   42.17 +
   42.18 +
   42.19 +
    43.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    43.2 +++ b/thesis/abstract.org	Thu Mar 27 17:57:01 2014 -0400
    43.3 @@ -0,0 +1,23 @@
    43.4 +Here I demonstrate the power of using embodied artificial intelligence
    43.5 +to attack the /action recognition/ problem, which is the challenge of
    43.6 +recognizing actions performed by a creature given limited data about
    43.7 +the creature's actions, such as a video recording. I solve this
    43.8 +problem in the case of a worm-like creature performing actions such as
    43.9 +curling and wiggling.
   43.10 +
   43.11 +To attack the action recognition problem, I developed a computational
   43.12 +model of empathy (=EMPATH=) which allows me to recognize actions using
   43.13 +simple, embodied representations of actions (which require rich
   43.14 +sensory data), even when that sensory data is not actually
   43.15 +available. The missing sense data is ``imagined'' by the system by
   43.16 +combining previous experiences gained from unsupervised free play.
   43.17 +
   43.18 +In order to build this empathic, action-recognizing system, I created
   43.19 +a program called =CORTEX=, which is a complete platform for embodied
   43.20 +AI research. It provides multiple senses for simulated creatures,
   43.21 +including vision, touch, proprioception, muscle tension, and
   43.22 +hearing. Each of these senses provides a wealth of parameters that are
   43.23 +biologically inspired. =CORTEX= is able to simulate any number of
   43.24 +creatures and senses, and provides facilities for easily modeling and
   43.25 +creating new creatures. As a research platform it is more complete
   43.26 +than any other system currently available.
    44.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    44.2 +++ b/thesis/aux/mitthesis/README.bibliography	Thu Mar 27 17:57:01 2014 -0400
    44.3 @@ -0,0 +1,28 @@
    44.4 +This file gives an overview on what you will need to do to create a
    44.5 +bibliographic database for references, as well as create the actual
    44.6 +bibliography for your thesis.
    44.7 +
    44.8 +You should not need to touch the file biblio.tex.  This merely tells
    44.9 +latex to look for the file with the bibliographic info.  The file you
   44.10 +want to edit is main.bib.  For each entry, use the appropriate style
   44.11 +as designated in the file.
   44.12 +
   44.13 +Citing your references:
   44.14 +
   44.15 +When you cite a reference, you need to use the ``key'' you declare in
   44.16 +main.bib for the entry.  No one ever sees the keys, but you'll want to
   44.17 +use something you will easily remember.  For instance, if you had an
   44.18 +entry with:
   44.19 +
   44.20 +	key=geer1
   44.21 +
   44.22 +Then you would use \cite{geer1} to reference it within your thesis.
   44.23 +
   44.24 +NOTE: You can not include references in your bibliography that are
   44.25 +never cited in your paper by default. If you need to do this, create a
   44.26 +key for the entry and at the end of your thesis include the line:
   44.27 +
   44.28 +\nocite{key}
   44.29 +
   44.30 +This should be done for every entry which is not explicitly cited.
   44.31 +
    45.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    45.2 +++ b/thesis/aux/mitthesis/README.main	Thu Mar 27 17:57:01 2014 -0400
    45.3 @@ -0,0 +1,54 @@
    45.4 +This file explains the options available to you for editting the file
    45.5 +main.tex.
    45.6 +
    45.7 +The commands in the this file allow you to specify options such as
    45.8 +spacing, double-sided printing, a draft copy, etc.   By default, 12pt
    45.9 +and lgrind are included; lgrind is the 2e style for including code in
   45.10 +your thesis.
   45.11 +
   45.12 +\documentclass[12pt]{mitthesis}
   45.13 +\usepackage{lgrind}
   45.14 +\pagestyle{plain}
   45.15 +
   45.16 +You can add options in the documentclass line as follows:
   45.17 +
   45.18 +	o  singlespace
   45.19 +
   45.20 +	\documentclass[12pt,singlespace]{mitthesis}
   45.21 +	
   45.22 +	o  twoside
   45.23 +
   45.24 +	\documentclass[12pt,twoside]{mitthesis}
   45.25 +
   45.26 +	o  draft   (make sure to change the pagestyle to drafthead as
   45.27 +			well)
   45.28 +
   45.29 +	\documentclass[12pt,draft]{mitthesis}
   45.30 +	\usepackage{lgrind}
   45.31 +	\pagestyle{drafthead}
   45.32 +
   45.33 +	o vi   (for course vi and course viii theses)
   45.34 +
   45.35 +	\documentclass[12pt,vi]{mitthesis}
   45.36 +
   45.37 +Any options you would use for report.sty will work here as well.
   45.38 +
   45.39 +
   45.40 +You should not need to change the first three lines and last two lines
   45.41 +below.  Be sure to include an \include command for each file you are
   45.42 +including in your thesis.
   45.43 +  
   45.44 +\include{cover}
   45.45 +\pagestyle{plain}
   45.46 +\include{contents}
   45.47 +\include{chap1}
   45.48 +\include{chap2}
   45.49 +\appendix
   45.50 +\include{appa}
   45.51 +\include{appb}
   45.52 +\include{biblio}
   45.53 +\end{document}
   45.54 +
   45.55 +Comment: to include appendices use a single \appendix command followed by
   45.56 +a number of \include{} commands as many files as needed, each of which
   45.57 +should contain a \chapter{} command for the appendix title.
    46.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    46.2 +++ b/thesis/aux/mitthesis/README.text	Thu Mar 27 17:57:01 2014 -0400
    46.3 @@ -0,0 +1,34 @@
    46.4 +This file includes the basic commands you will need to use within each
    46.5 +chapter of your thesis.
    46.6 +
    46.7 +The file chap1.tex is a sample first chapter.  To get started, you may
    46.8 +just want to replace the text and commands in that file as needed.  In
    46.9 +general though, for each new chapter you want to do the following:
   46.10 +
   46.11 +	o Make sure the name has the extension .tex .  Otherwise, you
   46.12 +	can call it anything you want.  For ease of use, all the
   46.13 +	examples use chap1, chap2, etc.
   46.14 +
   46.15 +	o Add a line to the file main.tex that reads:
   46.16 +
   46.17 +	\include{yourfilename}
   46.18 +
   46.19 +	This should not include the .tex extension, because latex
   46.20 +	assumes that is there.
   46.21 +
   46.22 +Basic syntax:
   46.23 +
   46.24 +	o The first line of each chapter should be:
   46.25 +
   46.26 +	\chapter{Chapter Title}
   46.27 +
   46.28 +	o To start a new section (labeled chap#.sec# -- as in 1.1,
   46.29 +	1.2, etc):
   46.30 +
   46.31 +	\section{Section Heading}
   46.32 +
   46.33 +	You can also include subsections:
   46.34 +
   46.35 +	\subsection{Heading}
   46.36 +
   46.37 +
    47.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    47.2 +++ b/thesis/aux/mitthesis/abstract.tex	Thu Mar 27 17:57:01 2014 -0400
    47.3 @@ -0,0 +1,22 @@
    47.4 +% $Log: abstract.tex,v $
    47.5 +% Revision 1.1  93/05/14  14:56:25  starflt
    47.6 +% Initial revision
    47.7 +% 
    47.8 +% Revision 1.1  90/05/04  10:41:01  lwvanels
    47.9 +% Initial revision
   47.10 +% 
   47.11 +%
   47.12 +%% The text of your abstract and nothing else (other than comments) goes here.
   47.13 +%% It will be single-spaced and the rest of the text that is supposed to go on
   47.14 +%% the abstract page will be generated by the abstractpage environment.  This
   47.15 +%% file should be \input (not \include 'd) from cover.tex.
   47.16 +In this thesis, I designed and implemented a compiler which performs
   47.17 +optimizations that reduce the number of low-level floating point operations
   47.18 +necessary for a specific task; this involves the optimization of chains of
   47.19 +floating point operations as well as the implementation of a ``fixed'' point
   47.20 +data type that allows some floating point operations to simulated with integer
   47.21 +arithmetic.  The source language of the compiler is a subset of C, and the
   47.22 +destination language is assembly language for a micro-floating point CPU.  An
   47.23 +instruction-level simulator of the CPU was written to allow testing of the
   47.24 +code.  A series of test pieces of codes was compiled, both with and without
   47.25 +optimization, to determine how effective these optimizations were.
    48.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    48.2 +++ b/thesis/aux/mitthesis/biblio.tex	Thu Mar 27 17:57:01 2014 -0400
    48.3 @@ -0,0 +1,8 @@
    48.4 +%% This defines the bibliography file (main.bib) and the bibliography style.
    48.5 +%% If you want to create a bibliography file by hand, change the contents of
    48.6 +%% this file to a `thebibliography' environment.  For more information 
    48.7 +%% see section 4.3 of the LaTeX manual.
    48.8 +\begin{singlespace}
    48.9 +\bibliography{main}
   48.10 +\bibliographystyle{plain}
   48.11 +\end{singlespace}
    49.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    49.2 +++ b/thesis/aux/mitthesis/chap1.tex	Thu Mar 27 17:57:01 2014 -0400
    49.3 @@ -0,0 +1,250 @@
    49.4 +%% This is an example first chapter.  You should put chapter/appendix that you
    49.5 +%% write into a separate file, and add a line \include{yourfilename} to
    49.6 +%% main.tex, where `yourfilename.tex' is the name of the chapter/appendix file.
    49.7 +%% You can process specific files by typing their names in at the 
    49.8 +%% \files=
    49.9 +%% prompt when you run the file main.tex through LaTeX.
   49.10 +\chapter{Introduction}
   49.11 +
   49.12 +Micro-optimization is a technique to reduce the overall operation count of
   49.13 +floating point operations.  In a standard floating point unit, floating
   49.14 +point operations are fairly high level, such as ``multiply'' and ``add'';
   49.15 +in a micro floating point unit ($\mu$FPU), these have been broken down into
   49.16 +their constituent low-level floating point operations on the mantissas and
   49.17 +exponents of the floating point numbers.
   49.18 +
   49.19 +Chapter two describes the architecture of the $\mu$FPU unit, and the
   49.20 +motivations for the design decisions made.
   49.21 +
   49.22 +Chapter three describes the design of the compiler, as well as how the
   49.23 +optimizations discussed in section~\ref{ch1:opts} were implemented.
   49.24 +
   49.25 +Chapter four describes the purpose of test code that was compiled, and which
   49.26 +statistics were gathered by running it through the simulator.  The purpose
   49.27 +is to measure what effect the micro-optimizations had, compared to
   49.28 +unoptimized code.  Possible future expansions to the project are also
   49.29 +discussed.
   49.30 +
   49.31 +\section{Motivations for micro-optimization}
   49.32 +
   49.33 +The idea of micro-optimization is motivated by the recent trends in computer
   49.34 +architecture towards low-level parallelism and small, pipelineable
   49.35 +instruction sets \cite{patterson:risc,rad83}.  By getting rid of more
   49.36 +complex instructions and concentrating on optimizing frequently used
   49.37 +instructions, substantial increases in performance were realized.
   49.38 +
   49.39 +Another important motivation was the trend towards placing more of the
   49.40 +burden of performance on the compiler.  Many of the new architectures depend
   49.41 +on an intelligent, optimizing compiler in order to realize anywhere near
   49.42 +their peak performance
   49.43 +\cite{ellis:bulldog,pet87,coutant:precision-compilers}.  In these cases, the
   49.44 +compiler not only is responsible for faithfully generating native code to
   49.45 +match the source language, but also must be aware of instruction latencies,
   49.46 +delayed branches, pipeline stages, and a multitude of other factors in order
   49.47 +to generate fast code \cite{gib86}.
   49.48 +
   49.49 +Taking these ideas one step further, it seems that the floating point
   49.50 +operations that are normally single, large instructions can be further broken
   49.51 +down into smaller, simpler, faster instructions, with more control in the
   49.52 +compiler and less in the hardware.  This is the idea behind a
   49.53 +micro-optimizing FPU; break the floating point instructions down into their
   49.54 +basic components and use a small, fast implementation, with a large part of
   49.55 +the burden of hardware allocation and optimization shifted towards
   49.56 +compile-time.
   49.57 +
   49.58 +Along with the hardware speedups possible by using a $\mu$FPU, there are
   49.59 +also optimizations that the compiler can perform on the code that is
   49.60 +generated.  In a normal sequence of floating point operations, there are
   49.61 +many hidden redundancies that can be eliminated by allowing the compiler to
   49.62 +control the floating point operations down to their lowest level.  These
   49.63 +optimizations are described in detail in section~\ref{ch1:opts}.
   49.64 +
   49.65 +\section{Description of micro-optimization}\label{ch1:opts}
   49.66 +
   49.67 +In order to perform a sequence of floating point operations, a normal FPU
   49.68 +performs many redundant internal shifts and normalizations in the process of
   49.69 +performing a sequence of operations.  However, if a compiler can
   49.70 +decompose the floating point operations it needs down to the lowest level,
   49.71 +it then can optimize away many of these redundant operations.  
   49.72 +
   49.73 +If there is some additional hardware support specifically for
   49.74 +micro-optimization, there are additional optimizations that can be
   49.75 +performed.  This hardware support entails extra ``guard bits'' on the
   49.76 +standard floating point formats, to allow several unnormalized operations to
   49.77 +be performed in a row without the loss information\footnote{A description of
   49.78 +the floating point format used is shown in figures~\ref{exponent-format}
   49.79 +and~\ref{mantissa-format}.}.  A discussion of the mathematics behind
   49.80 +unnormalized arithmetic is in appendix~\ref{unnorm-math}.
   49.81 +
   49.82 +The optimizations that the compiler can perform fall into several categories:
   49.83 +
   49.84 +\subsection{Post Multiply Normalization}
   49.85 +
   49.86 +When more than two multiplications are performed in a row, the intermediate
   49.87 +normalization of the results between multiplications can be eliminated.
   49.88 +This is because with each multiplication, the mantissa can become
   49.89 +denormalized by at most one bit.  If there are guard bits on the mantissas
   49.90 +to prevent bits from ``falling off'' the end during multiplications, the
   49.91 +normalization can be postponed until after a sequence of several
   49.92 +multiplies\footnote{Using unnormalized numbers for math is not a new idea; a
   49.93 +good example of it is the Control Data CDC 6600, designed by Seymour Cray.
   49.94 +\cite{thornton:cdc6600} The CDC 6600 had all of its instructions performing
   49.95 +unnormalized arithmetic, with a separate {\tt NORMALIZE} instruction.}.
   49.96 +
   49.97 +% This is an example of how you would use tgrind to include an example
   49.98 +% of source code; it is commented out in this template since the code
   49.99 +% example file does not exist.  To use it, you need to remove the '%' on the
  49.100 +% beginning of the line, and insert your own information in the call.
  49.101 +%
  49.102 +%\tagrind[htbp]{code/pmn.s.tex}{Post Multiply Normalization}{opt:pmn}
  49.103 +
  49.104 +As you can see, the intermediate results can be multiplied together, with no
  49.105 +need for intermediate normalizations due to the guard bit.  It is only at
  49.106 +the end of the operation that the normalization must be performed, in order
  49.107 +to get it into a format suitable for storing in memory\footnote{Note that
  49.108 +for purposed of clarity, the pipeline delays were considered to be 0, and
  49.109 +the branches were not delayed.}.
  49.110 +
  49.111 +\subsection{Block Exponent}
  49.112 +
  49.113 +In a unoptimized sequence of additions, the sequence of operations is as
  49.114 +follows for each pair of numbers ($m_1$,$e_1$) and ($m_2$,$e_2$).
  49.115 +\begin{enumerate}
  49.116 +  \item Compare $e_1$ and $e_2$.
  49.117 +  \item Shift the mantissa associated with the smaller exponent $|e_1-e_2|$
  49.118 +        places to the right.
  49.119 +  \item Add $m_1$ and $m_2$.
  49.120 +  \item Find the first one in the resulting mantissa.
  49.121 +  \item Shift the resulting mantissa so that normalized
  49.122 +  \item Adjust the exponent accordingly.
  49.123 +\end{enumerate}
  49.124 +
  49.125 +Out of 6 steps, only one is the actual addition, and the rest are involved
  49.126 +in aligning the mantissas prior to the add, and then normalizing the result
  49.127 +afterward.  In the block exponent optimization, the largest mantissa is
  49.128 +found to start with, and all the mantissa's shifted before any additions
  49.129 +take place.  Once the mantissas have been shifted, the additions can take
  49.130 +place one after another\footnote{This requires that for n consecutive
  49.131 +additions, there are $\log_{2}n$ high guard bits to prevent overflow.  In
  49.132 +the $\mu$FPU, there are 3 guard bits, making up to 8 consecutive additions
  49.133 +possible.}.  An example of the Block Exponent optimization on the expression
  49.134 +X = A + B + C is given in figure~\ref{opt:be}.
  49.135 +
  49.136 +% This is an example of how you would use tgrind to include an example
  49.137 +% of source code; it is commented out in this template since the code
  49.138 +% example file does not exist.  To use it, you need to remove the '%' on the
  49.139 +% beginning of the line, and insert your own information in the call.
  49.140 +%
  49.141 +%\tgrind[htbp]{code/be.s.tex}{Block Exponent}{opt:be}
  49.142 +
  49.143 +\section{Integer optimizations}
  49.144 +
  49.145 +As well as the floating point optimizations described above, there are
  49.146 +also integer optimizations that can be used in the $\mu$FPU.  In concert
  49.147 +with the floating point optimizations, these can provide a significant
  49.148 +speedup.  
  49.149 +
  49.150 +\subsection{Conversion to fixed point}
  49.151 +
  49.152 +Integer operations are much faster than floating point operations; if it is
  49.153 +possible to replace floating point operations with fixed point operations,
  49.154 +this would provide a significant increase in speed.
  49.155 +
  49.156 +This conversion can either take place automatically or or based on a
  49.157 +specific request from the programmer.  To do this automatically, the
  49.158 +compiler must either be very smart, or play fast and loose with the accuracy
  49.159 +and precision of the programmer's variables.  To be ``smart'', the computer
  49.160 +must track the ranges of all the floating point variables through the
  49.161 +program, and then see if there are any potential candidates for conversion
  49.162 +to floating point.  This technique is discussed further in
  49.163 +section~\ref{range-tracking}, where it was implemented.
  49.164 +
  49.165 +The other way to do this is to rely on specific hints from the programmer
  49.166 +that a certain value will only assume a specific range, and that only a
  49.167 +specific precision is desired.  This is somewhat more taxing on the
  49.168 +programmer, in that he has to know the ranges that his values will take at
  49.169 +declaration time (something normally abstracted away), but it does provide
  49.170 +the opportunity for fine-tuning already working code.
  49.171 +
  49.172 +Potential applications of this would be simulation programs, where the
  49.173 +variable represents some physical quantity; the constraints of the physical
  49.174 +system may provide bounds on the range the variable can take.
  49.175 +\subsection{Small Constant Multiplications}
  49.176 +
  49.177 +One other class of optimizations that can be done is to replace
  49.178 +multiplications by small integer constants into some combination of
  49.179 +additions and shifts.  Addition and shifting can be significantly faster
  49.180 +than multiplication.  This is done by using some combination of
  49.181 +\begin{eqnarray*}
  49.182 +a_i & = & a_j + a_k \\
  49.183 +a_i & = & 2a_j + a_k \\
  49.184 +a_i & = & 4a_j + a_k \\
  49.185 +a_i & = & 8a_j + a_k \\
  49.186 +a_i & = & a_j - a_k \\
  49.187 +a_i & = & a_j \ll m \mbox{shift}
  49.188 +\end{eqnarray*}
  49.189 +instead of the multiplication.  For example, to multiply $s$ by 10 and store
  49.190 +the result in $r$, you could use:
  49.191 +\begin{eqnarray*}
  49.192 +r & = & 4s + s\\
  49.193 +r & = & r + r
  49.194 +\end{eqnarray*}
  49.195 +Or by 59:
  49.196 +\begin{eqnarray*}
  49.197 +t & = & 2s + s \\
  49.198 +r & = & 2t + s \\
  49.199 +r & = & 8r + t
  49.200 +\end{eqnarray*}
  49.201 +Similar combinations can be found for almost all of the smaller
  49.202 +integers\footnote{This optimization is only an ``optimization'', of course,
  49.203 +when the amount of time spent on the shifts and adds is less than the time
  49.204 +that would be spent doing the multiplication.  Since the time costs of these
  49.205 +operations are known to the compiler in order for it to do scheduling, it is
  49.206 +easy for the compiler to determine when this optimization is worth using.}.
  49.207 +\cite{magenheimer:precision}
  49.208 +
  49.209 +\section{Other optimizations}
  49.210 +
  49.211 +\subsection{Low-level parallelism}
  49.212 +
  49.213 +The current trend is towards duplicating hardware at the lowest level to
  49.214 +provide parallelism\footnote{This can been seen in the i860; floating point
  49.215 +additions and multiplications can proceed at the same time, and the RISC
  49.216 +core be moving data in and out of the floating point registers and providing
  49.217 +flow control at the same time the floating point units are active. \cite{byte:i860}}
  49.218 +
  49.219 +Conceptually, it is easy to take advantage to low-level parallelism in the
  49.220 +instruction stream by simply adding more functional units to the $\mu$FPU,
  49.221 +widening the instruction word to control them, and then scheduling as many
  49.222 +operations to take place at one time as possible.
  49.223 +
  49.224 +However, simply adding more functional units can only be done so many times;
  49.225 +there is only a limited amount of parallelism directly available in the
  49.226 +instruction stream, and without it, much of the extra resources will go to
  49.227 +waste.  One process used to make more instructions potentially schedulable
  49.228 +at any given time is ``trace scheduling''.  This technique originated in the
  49.229 +Bulldog compiler for the original VLIW machine, the ELI-512.
  49.230 +\cite{ellis:bulldog,colwell:vliw}  In trace scheduling, code can be
  49.231 +scheduled through many basic blocks at one time, following a single
  49.232 +potential ``trace'' of program execution.  In this way, instructions that
  49.233 +{\em might\/} be executed depending on a conditional branch further down in
  49.234 +the instruction stream are scheduled, allowing an increase in the potential
  49.235 +parallelism.  To account for the cases where the expected branch wasn't
  49.236 +taken, correction code is inserted after the branches to undo the effects of
  49.237 +any prematurely executed instructions.
  49.238 +
  49.239 +\subsection{Pipeline optimizations}
  49.240 +
  49.241 +In addition to having operations going on in parallel across functional
  49.242 +units, it is also typical to have several operations in various stages of
  49.243 +completion in each unit.  This pipelining allows the throughput of the
  49.244 +functional units to be increased, with no increase in latency.
  49.245 +
  49.246 +There are several ways pipelined operations can be optimized.  On the
  49.247 +hardware side, support can be added to allow data to be recirculated back
  49.248 +into the beginning of the pipeline from the end, saving a trip through the
  49.249 +registers.  On the software side, the compiler can utilize several tricks to
  49.250 +try to fill up as many of the pipeline delay slots as possible, as
  49.251 +seendescribed by Gibbons. \cite{gib86}
  49.252 +
  49.253 +
    50.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    50.2 +++ b/thesis/aux/mitthesis/contents.tex	Thu Mar 27 17:57:01 2014 -0400
    50.3 @@ -0,0 +1,11 @@
    50.4 +  % -*- Mode:TeX -*-
    50.5 +%% This file simply contains the commands that actually generate the table of
    50.6 +%% contents and lists of figures and tables.  You can omit any or all of
    50.7 +%% these files by simply taking out the appropriate command.  For more
    50.8 +%% information on these files, see appendix C.3.3 of the LaTeX manual. 
    50.9 +\tableofcontents
   50.10 +\newpage
   50.11 +\listoffigures
   50.12 +\newpage
   50.13 +\listoftables
   50.14 +
    51.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    51.2 +++ b/thesis/aux/mitthesis/cover.tex	Thu Mar 27 17:57:01 2014 -0400
    51.3 @@ -0,0 +1,132 @@
    51.4 +% -*-latex-*-
    51.5 +% 
    51.6 +% For questions, comments, concerns or complaints:
    51.7 +% thesis@mit.edu
    51.8 +% 
    51.9 +%
   51.10 +% $Log: cover.tex,v $
   51.11 +% Revision 1.8  2008/05/13 15:02:15  jdreed
   51.12 +% Degree month is June, not May.  Added note about prevdegrees.
   51.13 +% Arthur Smith's title updated
   51.14 +%
   51.15 +% Revision 1.7  2001/02/08 18:53:16  boojum
   51.16 +% changed some \newpages to \cleardoublepages
   51.17 +%
   51.18 +% Revision 1.6  1999/10/21 14:49:31  boojum
   51.19 +% changed comment referring to documentstyle
   51.20 +%
   51.21 +% Revision 1.5  1999/10/21 14:39:04  boojum
   51.22 +% *** empty log message ***
   51.23 +%
   51.24 +% Revision 1.4  1997/04/18  17:54:10  othomas
   51.25 +% added page numbers on abstract and cover, and made 1 abstract
   51.26 +% page the default rather than 2.  (anne hunter tells me this
   51.27 +% is the new institute standard.)
   51.28 +%
   51.29 +% Revision 1.4  1997/04/18  17:54:10  othomas
   51.30 +% added page numbers on abstract and cover, and made 1 abstract
   51.31 +% page the default rather than 2.  (anne hunter tells me this
   51.32 +% is the new institute standard.)
   51.33 +%
   51.34 +% Revision 1.3  93/05/17  17:06:29  starflt
   51.35 +% Added acknowledgements section (suggested by tompalka)
   51.36 +% 
   51.37 +% Revision 1.2  92/04/22  13:13:13  epeisach
   51.38 +% Fixes for 1991 course 6 requirements
   51.39 +% Phrase "and to grant others the right to do so" has been added to 
   51.40 +% permission clause
   51.41 +% Second copy of abstract is not counted as separate pages so numbering works
   51.42 +% out
   51.43 +% 
   51.44 +% Revision 1.1  92/04/22  13:08:20  epeisach
   51.45 +
   51.46 +% NOTE:
   51.47 +% These templates make an effort to conform to the MIT Thesis specifications,
   51.48 +% however the specifications can change.  We recommend that you verify the
   51.49 +% layout of your title page with your thesis advisor and/or the MIT 
   51.50 +% Libraries before printing your final copy.
   51.51 +\title{An Optimizing Compiler for Low-Level Floating Point Operations}
   51.52 +
   51.53 +\author{Robert McIntyre}
   51.54 +% If you wish to list your previous degrees on the cover page, use the 
   51.55 +% previous degrees command:
   51.56 +%       \prevdegrees{A.A., Harvard University (1985)}
   51.57 +% You can use the \\ command to list multiple previous degrees
   51.58 +%       \prevdegrees{B.S., University of California (1978) \\
   51.59 +%                    S.M., Massachusetts Institute of Technology (1981)}
   51.60 +\department{Department of Electrical Engineering and Computer Science}
   51.61 +
   51.62 +% If the thesis is for two degrees simultaneously, list them both
   51.63 +% separated by \and like this:
   51.64 +% \degree{Doctor of Philosophy \and Master of Science}
   51.65 +\degree{Bachelor of Science in Computer Science and Engineering}
   51.66 +
   51.67 +% As of the 2007-08 academic year, valid degree months are September, 
   51.68 +% February, or June.  The default is June.
   51.69 +\degreemonth{June}
   51.70 +\degreeyear{1990}
   51.71 +\thesisdate{May 18, 1990}
   51.72 +
   51.73 +%% By default, the thesis will be copyrighted to MIT.  If you need to copyright
   51.74 +%% the thesis to yourself, just specify the `vi' documentclass option.  If for
   51.75 +%% some reason you want to exactly specify the copyright notice text, you can
   51.76 +%% use the \copyrightnoticetext command.  
   51.77 +%\copyrightnoticetext{\copyright IBM, 1990.  Do not open till Xmas.}
   51.78 +
   51.79 +% If there is more than one supervisor, use the \supervisor command
   51.80 +% once for each.
   51.81 +\supervisor{William J. Dally}{Associate Professor}
   51.82 +
   51.83 +% This is the department committee chairman, not the thesis committee
   51.84 +% chairman.  You should replace this with your Department's Committee
   51.85 +% Chairman.
   51.86 +\chairman{Arthur C. Smith}{Chairman, Department Committee on Graduate Theses}
   51.87 +
   51.88 +% Make the titlepage based on the above information.  If you need
   51.89 +% something special and can't use the standard form, you can specify
   51.90 +% the exact text of the titlepage yourself.  Put it in a titlepage
   51.91 +% environment and leave blank lines where you want vertical space.
   51.92 +% The spaces will be adjusted to fill the entire page.  The dotted
   51.93 +% lines for the signatures are made with the \signature command.
   51.94 +\maketitle
   51.95 +
   51.96 +% The abstractpage environment sets up everything on the page except
   51.97 +% the text itself.  The title and other header material are put at the
   51.98 +% top of the page, and the supervisors are listed at the bottom.  A
   51.99 +% new page is begun both before and after.  Of course, an abstract may
  51.100 +% be more than one page itself.  If you need more control over the
  51.101 +% format of the page, you can use the abstract environment, which puts
  51.102 +% the word "Abstract" at the beginning and single spaces its text.
  51.103 +
  51.104 +%% You can either \input (*not* \include) your abstract file, or you can put
  51.105 +%% the text of the abstract directly between the \begin{abstractpage} and
  51.106 +%% \end{abstractpage} commands.
  51.107 +
  51.108 +% First copy: start a new page, and save the page number.
  51.109 +\cleardoublepage
  51.110 +% Uncomment the next line if you do NOT want a page number on your
  51.111 +% abstract and acknowledgments pages.
  51.112 +\pagestyle{empty}
  51.113 +\setcounter{savepage}{\thepage}
  51.114 +\begin{abstractpage}
  51.115 +\input{abstract}
  51.116 +\end{abstractpage}
  51.117 +
  51.118 +% Additional copy: start a new page, and reset the page number.  This way,
  51.119 +% the second copy of the abstract is not counted as separate pages.
  51.120 +% Uncomment the next 6 lines if you need two copies of the abstract
  51.121 +% page.
  51.122 +% \setcounter{page}{\thesavepage}
  51.123 +% \begin{abstractpage}
  51.124 +% \input{abstract}
  51.125 +% \end{abstractpage}
  51.126 +
  51.127 +\cleardoublepage
  51.128 +
  51.129 +\section*{Acknowledgments}
  51.130 +
  51.131 +This is the acknowledgements section.  You should replace this with your
  51.132 +own acknowledgements.
  51.133 +
  51.134 +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
  51.135 +% -*-latex-*-
    52.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    52.2 +++ b/thesis/aux/mitthesis/main.bib	Thu Mar 27 17:57:01 2014 -0400
    52.3 @@ -0,0 +1,356 @@
    52.4 +@preamble{ "\newcommand{\noopsort}[1]{} "
    52.5 +	# "\newcommand{\printfirst}[2]{#1} "
    52.6 +	# "\newcommand{\singleletter}[1]{#1} "
    52.7 +	# "\newcommand{\switchargs}[2]{#2#1} " }
    52.8 +
    52.9 +@ARTICLE{article-minimal,
   52.10 +   author = {L[eslie] A. Aamport},
   52.11 +   title = {The Gnats and Gnus Document Preparation System},
   52.12 +   journal = {\mbox{G-Animal's} Journal},
   52.13 +   year = 1986,
   52.14 +}
   52.15 +
   52.16 +@ARTICLE{article-full,
   52.17 +   author = {L[eslie] A. Aamport},
   52.18 +   title = {The Gnats and Gnus Document Preparation System},
   52.19 +   journal = {\mbox{G-Animal's} Journal},
   52.20 +   year = 1986,
   52.21 +   volume = 41,
   52.22 +   number = 7,
   52.23 +   pages = "73+",
   52.24 +   month = jul,
   52.25 +   note = "This is a full ARTICLE entry",
   52.26 +}
   52.27 +
   52.28 +The KEY field is here to override the KEY field in the journal being
   52.29 +cross referenced (so is the NOTE field, in addition to its imparting
   52.30 +information).
   52.31 +
   52.32 +@ARTICLE{article-crossref,
   52.33 +   crossref = {WHOLE-JOURNAL},
   52.34 +   key = "",
   52.35 +   author = {L[eslie] A. Aamport},
   52.36 +   title = {The Gnats and Gnus Document Preparation System},
   52.37 +   pages = "73+",
   52.38 +   note = "This is a cross-referencing ARTICLE entry",
   52.39 +}
   52.40 +
   52.41 +@ARTICLE{whole-journal,
   52.42 +   key = "GAJ",
   52.43 +   journal = {\mbox{G-Animal's} Journal},
   52.44 +   year = 1986,
   52.45 +   volume = 41,
   52.46 +   number = 7,
   52.47 +   month = jul,
   52.48 +   note = {The entire issue is devoted to gnats and gnus
   52.49 +		(this entry is a cross-referenced ARTICLE (journal))},
   52.50 +}
   52.51 +
   52.52 +@INBOOK{inbook-minimal,
   52.53 +   author = "Donald E. Knuth",
   52.54 +   title = "Fundamental Algorithms",
   52.55 +   publisher = "Addison-Wesley",
   52.56 +   year = "{\noopsort{1973b}}1973",
   52.57 +   chapter = "1.2",
   52.58 +}
   52.59 +
   52.60 +@INBOOK{inbook-full,
   52.61 +   author = "Donald E. Knuth",
   52.62 +   title = "Fundamental Algorithms",
   52.63 +   volume = 1,
   52.64 +   series = "The Art of Computer Programming",
   52.65 +   publisher = "Addison-Wesley",
   52.66 +   address = "Reading, Massachusetts",
   52.67 +   edition = "Second",
   52.68 +   month = "10~" # jan,
   52.69 +   year = "{\noopsort{1973b}}1973",
   52.70 +   type = "Section",
   52.71 +   chapter = "1.2",
   52.72 +   pages = "10--119",
   52.73 +   note = "This is a full INBOOK entry",
   52.74 +}
   52.75 +
   52.76 +@INBOOK{inbook-crossref,
   52.77 +   crossref = "whole-set",
   52.78 +   title = "Fundamental Algorithms",
   52.79 +   volume = 1,
   52.80 +   series = "The Art of Computer Programming",
   52.81 +   edition = "Second",
   52.82 +   year = "{\noopsort{1973b}}1973",
   52.83 +   type = "Section",
   52.84 +   chapter = "1.2",
   52.85 +   note = "This is a cross-referencing INBOOK entry",
   52.86 +}
   52.87 +
   52.88 +@BOOK{book-minimal,
   52.89 +   author = "Donald E. Knuth",
   52.90 +   title = "Seminumerical Algorithms",
   52.91 +   publisher = "Addison-Wesley",
   52.92 +   year = "{\noopsort{1973c}}1981",
   52.93 +}
   52.94 +
   52.95 +@BOOK{book-full,
   52.96 +   author = "Donald E. Knuth",
   52.97 +   title = "Seminumerical Algorithms",
   52.98 +   volume = 2,
   52.99 +   series = "The Art of Computer Programming",
  52.100 +   publisher = "Addison-Wesley",
  52.101 +   address = "Reading, Massachusetts",
  52.102 +   edition = "Second",
  52.103 +   month = "10~" # jan,
  52.104 +   year = "{\noopsort{1973c}}1981",
  52.105 +   note = "This is a full BOOK entry",
  52.106 +}
  52.107 +
  52.108 +@BOOK{book-crossref,
  52.109 +   crossref = "whole-set",
  52.110 +   title = "Seminumerical Algorithms",
  52.111 +   volume = 2,
  52.112 +   series = "The Art of Computer Programming",
  52.113 +   edition = "Second",
  52.114 +   year = "{\noopsort{1973c}}1981",
  52.115 +   note = "This is a cross-referencing BOOK entry",
  52.116 +}
  52.117 +
  52.118 +@BOOK{whole-set,
  52.119 +   author = "Donald E. Knuth",
  52.120 +   publisher = "Addison-Wesley",
  52.121 +   title = "The Art of Computer Programming",
  52.122 +   series = "Four volumes",
  52.123 +   year = "{\noopsort{1973a}}{\switchargs{--90}{1968}}",
  52.124 +   note = "Seven volumes planned (this is a cross-referenced set of BOOKs)",
  52.125 +}
  52.126 +
  52.127 +@BOOKLET{booklet-minimal,
  52.128 +   key = "Kn{\printfirst{v}{1987}}",
  52.129 +   title = "The Programming of Computer Art",
  52.130 +}
  52.131 +
  52.132 +@BOOKLET{booklet-full,
  52.133 +   author = "Jill C. Knvth",
  52.134 +   title = "The Programming of Computer Art",
  52.135 +   howpublished = "Vernier Art Center",
  52.136 +   address = "Stanford, California",
  52.137 +   month = feb,
  52.138 +   year = 1988,
  52.139 +   note = "This is a full BOOKLET entry",
  52.140 +}
  52.141 +
  52.142 +@INCOLLECTION{incollection-minimal,
  52.143 +   author = "Daniel D. Lincoll",
  52.144 +   title = "Semigroups of Recurrences",
  52.145 +   booktitle = "High Speed Computer and Algorithm Organization",
  52.146 +   publisher = "Academic Press",
  52.147 +   year = 1977,
  52.148 +}
  52.149 +
  52.150 +@INCOLLECTION{incollection-full,
  52.151 +   author = "Daniel D. Lincoll",
  52.152 +   title = "Semigroups of Recurrences",
  52.153 +   editor = "David J. Lipcoll and D. H. Lawrie and A. H. Sameh",
  52.154 +   booktitle = "High Speed Computer and Algorithm Organization",
  52.155 +   number = 23,
  52.156 +   series = "Fast Computers",
  52.157 +   chapter = 3,
  52.158 +   type = "Part",
  52.159 +   pages = "179--183",
  52.160 +   publisher = "Academic Press",
  52.161 +   address = "New York",
  52.162 +   edition = "Third",
  52.163 +   month = sep,
  52.164 +   year = 1977,
  52.165 +   note = "This is a full INCOLLECTION entry",
  52.166 +}
  52.167 +
  52.168 +@INCOLLECTION{incollection-crossref,
  52.169 +   crossref = "whole-collection",
  52.170 +   author = "Daniel D. Lincoll",
  52.171 +   title = "Semigroups of Recurrences",
  52.172 +   pages = "179--183",
  52.173 +   note = "This is a cross-referencing INCOLLECTION entry",
  52.174 +}
  52.175 +
  52.176 +@BOOK{whole-collection,
  52.177 +   editor = "David J. Lipcoll and D. H. Lawrie and A. H. Sameh",
  52.178 +   title = "High Speed Computer and Algorithm Organization",
  52.179 +   booktitle = "High Speed Computer and Algorithm Organization",
  52.180 +   number = 23,
  52.181 +   series = "Fast Computers",
  52.182 +   publisher = "Academic Press",
  52.183 +   address = "New York",
  52.184 +   edition = "Third",
  52.185 +   month = sep,
  52.186 +   year = 1977,
  52.187 +   note = "This is a cross-referenced BOOK (collection) entry",
  52.188 +}
  52.189 +
  52.190 +@MANUAL{manual-minimal,
  52.191 +   key = "Manmaker",
  52.192 +   title = "The Definitive Computer Manual",
  52.193 +}
  52.194 +
  52.195 +@MANUAL{manual-full,
  52.196 +   author = "Larry Manmaker",
  52.197 +   title = "The Definitive Computer Manual",
  52.198 +   organization = "Chips-R-Us",
  52.199 +   address = "Silicon Valley",
  52.200 +   edition = "Silver",
  52.201 +   month = apr # "-" # may,
  52.202 +   year = 1986,
  52.203 +   note = "This is a full MANUAL entry",
  52.204 +}
  52.205 +
  52.206 +@MASTERSTHESIS{mastersthesis-minimal,
  52.207 +   author = "{\'{E}}douard Masterly",
  52.208 +   title = "Mastering Thesis Writing",
  52.209 +   school = "Stanford University",
  52.210 +   year = 1988,
  52.211 +}
  52.212 +
  52.213 +@MASTERSTHESIS{mastersthesis-full,
  52.214 +   author = "{\'{E}}douard Masterly",
  52.215 +   title = "Mastering Thesis Writing",
  52.216 +   school = "Stanford University",
  52.217 +   type = "Master's project",
  52.218 +   address = "English Department",
  52.219 +   month = jun # "-" # aug,
  52.220 +   year = 1988,
  52.221 +   note = "This is a full MASTERSTHESIS entry",
  52.222 +}
  52.223 +
  52.224 +@MISC{misc-minimal,
  52.225 +   key = "Missilany",
  52.226 +   note = "This is a minimal MISC entry",
  52.227 +}
  52.228 +
  52.229 +@MISC{misc-full,
  52.230 +   author = "Joe-Bob Missilany",
  52.231 +   title = "Handing out random pamphlets in airports",
  52.232 +   howpublished = "Handed out at O'Hare",
  52.233 +   month = oct,
  52.234 +   year = 1984,
  52.235 +   note = "This is a full MISC entry",
  52.236 +}
  52.237 +
  52.238 +@STRING{STOC-key = "OX{\singleletter{stoc}}"}
  52.239 +
  52.240 +@STRING{ACM = "The OX Association for Computing Machinery"}
  52.241 +
  52.242 +@STRING{STOC = " Symposium on the Theory of Computing"}
  52.243 +
  52.244 +@INPROCEEDINGS{inproceedings-minimal,
  52.245 +   author = "Alfred V. Oaho and Jeffrey D. Ullman and Mihalis Yannakakis",
  52.246 +   title = "On Notions of Information Transfer in {VLSI} Circuits",
  52.247 +   booktitle = "Proc. Fifteenth Annual ACM" # STOC,
  52.248 +   year = 1983,
  52.249 +}
  52.250 +
  52.251 +@INPROCEEDINGS{inproceedings-full,
  52.252 +   author = "Alfred V. Oaho and Jeffrey D. Ullman and Mihalis Yannakakis",
  52.253 +   title = "On Notions of Information Transfer in {VLSI} Circuits",
  52.254 +   editor = "Wizard V. Oz and Mihalis Yannakakis",
  52.255 +   booktitle = "Proc. Fifteenth Annual ACM" # STOC,
  52.256 +   number = 17,
  52.257 +   series = "All ACM Conferences",
  52.258 +   pages = "133--139",
  52.259 +   month = mar,
  52.260 +   year = 1983,
  52.261 +   address = "Boston",
  52.262 +   organization = ACM,
  52.263 +   publisher = "Academic Press",
  52.264 +   note = "This is a full INPROCEDINGS entry",
  52.265 +}
  52.266 +
  52.267 +@INPROCEEDINGS{inproceedings-crossref,
  52.268 +   crossref = "whole-proceedings",
  52.269 +   author = "Alfred V. Oaho and Jeffrey D. Ullman and Mihalis Yannakakis",
  52.270 +   title = "On Notions of Information Transfer in {VLSI} Circuits",
  52.271 +   organization = "",
  52.272 +   pages = "133--139",
  52.273 +   note = "This is a cross-referencing INPROCEEDINGS entry",
  52.274 +}
  52.275 +
  52.276 +@PROCEEDINGS{proceedings-minimal,
  52.277 +   key = STOC-key,
  52.278 +   title = "Proc. Fifteenth Annual" # STOC,
  52.279 +   year = 1983,
  52.280 +}
  52.281 +
  52.282 +@PROCEEDINGS{proceedings-full,
  52.283 +   editor = "Wizard V. Oz and Mihalis Yannakakis",
  52.284 +   title = "Proc. Fifteenth Annual" # STOC,
  52.285 +   number = 17,
  52.286 +   series = "All ACM Conferences",
  52.287 +   month = mar,
  52.288 +   year = 1983,
  52.289 +   address = "Boston",
  52.290 +   organization = ACM,
  52.291 +   publisher = "Academic Press",
  52.292 +   note = "This is a full PROCEEDINGS entry",
  52.293 +}
  52.294 +
  52.295 +@PROCEEDINGS{whole-proceedings,
  52.296 +   key = STOC-key,
  52.297 +   organization = ACM,
  52.298 +   title = "Proc. Fifteenth Annual" # STOC,
  52.299 +   address = "Boston",
  52.300 +   year = 1983,
  52.301 +   booktitle = "Proc. Fifteenth Annual ACM" # STOC,
  52.302 +   note = "This is a cross-referenced PROCEEDINGS",
  52.303 +}
  52.304 +
  52.305 +@PHDTHESIS{phdthesis-minimal,
  52.306 +   author = "F. Phidias Phony-Baloney",
  52.307 +   title = "Fighting Fire with Fire: Festooning {F}rench Phrases",
  52.308 +   school = "Fanstord University",
  52.309 +   year = 1988,
  52.310 +}
  52.311 +
  52.312 +@PHDTHESIS{phdthesis-full,
  52.313 +   author = "F. Phidias Phony-Baloney",
  52.314 +   title = "Fighting Fire with Fire: Festooning {F}rench Phrases",
  52.315 +   school = "Fanstord University",
  52.316 +   type = "{PhD} Dissertation",
  52.317 +   address = "Department of French",
  52.318 +   month = jun # "-" # aug,
  52.319 +   year = 1988,
  52.320 +   note = "This is a full PHDTHESIS entry",
  52.321 +}
  52.322 +
  52.323 +@TECHREPORT{techreport-minimal,
  52.324 +   author = "Tom Terrific",
  52.325 +   title = "An {$O(n \log n / \! \log\log n)$} Sorting Algorithm",
  52.326 +   institution = "Fanstord University",
  52.327 +   year = 1988,
  52.328 +}
  52.329 +
  52.330 +@TECHREPORT{techreport-full,
  52.331 +   author = "Tom T{\'{e}}rrific",
  52.332 +   title = "An {$O(n \log n / \! \log\log n)$} Sorting Algorithm",
  52.333 +   institution = "Fanstord University",
  52.334 +   type = "Wishful Research Result",
  52.335 +   number = "7",
  52.336 +   address = "Computer Science Department, Fanstord, California",
  52.337 +   month = oct,
  52.338 +   year = 1988,
  52.339 +   note = "This is a full TECHREPORT entry",
  52.340 +}
  52.341 +
  52.342 +@UNPUBLISHED{unpublished-minimal,
  52.343 +   author = "Ulrich {\"{U}}nderwood and Ned {\~N}et and Paul {\={P}}ot",
  52.344 +   title = "Lower Bounds for Wishful Research Results",
  52.345 +   note = "Talk at Fanstord University (this is a minimal UNPUBLISHED entry)",
  52.346 +}
  52.347 +
  52.348 +@UNPUBLISHED{unpublished-full,
  52.349 +   author = "Ulrich {\"{U}}nderwood and Ned {\~N}et and Paul {\={P}}ot",
  52.350 +   title = "Lower Bounds for Wishful Research Results",
  52.351 +   month = nov # ", " # dec,
  52.352 +   year = 1988,
  52.353 +   note = "Talk at Fanstord University (this is a full UNPUBLISHED entry)",
  52.354 +}
  52.355 +
  52.356 +@MISC{random-note-crossref,
  52.357 +   key = {Volume-2},
  52.358 +   note = "Volume~2 is listed under Knuth \cite{book-full}"
  52.359 +}
    53.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    53.2 +++ b/thesis/aux/mitthesis/main.tex	Thu Mar 27 17:57:01 2014 -0400
    53.3 @@ -0,0 +1,46 @@
    53.4 +% -*- Mode:TeX -*-
    53.5 +
    53.6 +%% IMPORTANT: The official thesis specifications are available at:
    53.7 +%%            http://libraries.mit.edu/archives/thesis-specs/
    53.8 +%%
    53.9 +%%            Please verify your thesis' formatting and copyright
   53.10 +%%            assignment before submission.  If you notice any
   53.11 +%%            discrepancies between these templates and the 
   53.12 +%%            MIT Libraries' specs, please let us know
   53.13 +%%            by e-mailing thesis@mit.edu
   53.14 +
   53.15 +%% The documentclass options along with the pagestyle can be used to generate
   53.16 +%% a technical report, a draft copy, or a regular thesis.  You may need to
   53.17 +%% re-specify the pagestyle after you \include  cover.tex.  For more
   53.18 +%% information, see the first few lines of mitthesis.cls. 
   53.19 +
   53.20 +%\documentclass[12pt,vi,twoside]{mitthesis}
   53.21 +%%
   53.22 +%%  If you want your thesis copyright to you instead of MIT, use the
   53.23 +%%  ``vi'' option, as above.
   53.24 +%%
   53.25 +%\documentclass[12pt,twoside,leftblank]{mitthesis}
   53.26 +%%
   53.27 +%% If you want blank pages before new chapters to be labelled ``This
   53.28 +%% Page Intentionally Left Blank'', use the ``leftblank'' option, as
   53.29 +%% above. 
   53.30 +
   53.31 +\documentclass[12pt,twoside]{mitthesis}
   53.32 +%\usepackage{lgrind}
   53.33 +\pagestyle{plain}
   53.34 +
   53.35 +\begin{document}
   53.36 +
   53.37 +\include{cover}
   53.38 +% Some departments (e.g. 5) require an additional signature page.  See
   53.39 +% signature.tex for more information and uncomment the following line if
   53.40 +% applicable.
   53.41 +% \include{signature}
   53.42 +\pagestyle{plain}
   53.43 +\include{contents}
   53.44 +\include{chap1}
   53.45 +\include{chap2}
   53.46 +\appendix
   53.47 +\include{biblio}
   53.48 +\end{document}
   53.49 +
    54.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    54.2 +++ b/thesis/aux/mitthesis/mitthesis.cls	Thu Mar 27 17:57:01 2014 -0400
    54.3 @@ -0,0 +1,535 @@
    54.4 +% $Log: mitthesis.cls,v $
    54.5 +% Revision 1.9  2012/02/15 15:48:16  jdreed
    54.6 +% Tweak the "permission" statement per the Libraries' specs
    54.7 +% retrieved 15-Feb-2012
    54.8 +%
    54.9 +% Revision 1.8  2009/07/27 14:23:50  boojum
   54.10 +% added changing draft mark
   54.11 +%
   54.12 +% Revision 1.6  2005/05/13 19:57:40  boojum
   54.13 +% added leftblank option
   54.14 +%
   54.15 +% Revision 1.5  2002/04/18 14:10:08  boojum
   54.16 +% changed doublespace into setspace for 2e compliance
   54.17 +%
   54.18 +% Revision 1.4  2001/02/08 18:57:13  boojum
   54.19 +% turned two newpages into cleardoublepages
   54.20 +%
   54.21 +% Revision 1.3  2000/08/14 14:53:29  boojum
   54.22 +% commented out footheight, which is relevant for drafthead
   54.23 +%
   54.24 +% Revision 1.2  1999/10/21 14:51:33  boojum
   54.25 +% changed references to documentstyle to documentclass in comments
   54.26 +%
   54.27 +% Revision 1.1  1999/10/21 14:39:31  boojum
   54.28 +% Initial revision
   54.29 +%
   54.30 +%Revision 1.7  1998/04/01  20:45:34  othomas
   54.31 +%removed offending phrase ", and to grant others the right to do so" from copyright notice.
   54.32 +%
   54.33 +%Revision 1.6  96/06/26  15:07:29  othomas
   54.34 +%safety checkin.
   54.35 +%
   54.36 +%Revision 1.5  93/06/07  15:38:50  starflt
   54.37 +%Altered 'vi' option copyright wording to comply with new Institute
   54.38 +%Archives requirements and MIT lawyers.
   54.39 +%
   54.40 +%Revision 1.4  92/08/19  16:51:06  lwvanels
   54.41 +%Updated Course 6 title page for new permissions.
   54.42 +%
   54.43 +%Revision 1.3  92/04/23  10:16:15  epeisach
   54.44 +%Fixed comment character in rcs file
   54.45 +%
   54.46 +%Revision 1.2  92/04/22  13:12:02  epeisach
   54.47 +%Fixes for 1991 course 6 requirements
   54.48 +%Phrase "and to grant others the right to do so" has been added to 
   54.49 +%permission clause
   54.50 +%Second copy of abstract is not counted as separate pages so numbering works
   54.51 +%out
   54.52 +%
   54.53 +%Revision 1.1  90/05/04  11:45:53  lwvanels
   54.54 +%Initial revision
   54.55 +
   54.56 +%
   54.57 +% LaTeX format for theses at MIT
   54.58 +% Based on "Specifications for Thesis Preparation" 
   54.59 +
   54.60 +% `vi' and `upcase' options by Krishna Sethuraman - krishna@athena.mit.edu
   54.61 +% Margins and heading types by Peter Nuth  - nuth@ai.mit.edu
   54.62 +% Title and abstract page by Stephen Gildea - gildea@erl.mit.edu
   54.63 +% Look in this directory for example file mitthesis.doc
   54.64 +% Also for propcover.tex - Boilerplate for PHD proposal.
   54.65 +
   54.66 +% To use this style - say something like:
   54.67 +%  for dull, boring thesis format:
   54.68 +%	\documentclass[12pt]{mitthesis}
   54.69 +%       \pagestyle{plain}
   54.70 +% OR for fast drafts: 
   54.71 +%	\documentclass[11pt,singlespace,draft]{mitthesis}
   54.72 +%	\pagestyle{drafthead}
   54.73 +% OR for Tech Reports:
   54.74 +%	\documentclass[12pt,twoside]{mitthesis}	
   54.75 +%	\pagestyle{headings}
   54.76 +% OR
   54.77 +%  some other combination...
   54.78 +%
   54.79 +%%%% New options:
   54.80 +% 
   54.81 +% Option `twoside':
   54.82 +%   Good for producing Tech Reports.
   54.83 +%   The default is single-sided printing, which is what M.I.T. wants on the
   54.84 +%   thesis document itself.
   54.85 +%
   54.86 +% Option `singlespace':
   54.87 +%   Good for drafts.
   54.88 +%   Double-spaced theses are the default.
   54.89 +%   That is what M.I.T. asks for in the formal specifications.
   54.90 +%
   54.91 +% 	Note that MIT does not REQUIRE all theses to be double-spaced anymore.
   54.92 +% 	Someone in the library system said that it's OK to be single-spaced.
   54.93 +% 	(Regardless of what the specs. say...)
   54.94 +%   To get singlespacing in an area - Use  the 'singlespace' environment. 
   54.95 +%
   54.96 +% Option `draft':
   54.97 +%   Puts `overfull' boxes at the end of lines that are too long. 
   54.98 +%
   54.99 +% Pagestyle `drafthead':
  54.100 +%   Puts the date and the label ``*DRAFT*'' in the footer.
  54.101 +%
  54.102 +%%%%%%%%%%
  54.103 +%
  54.104 +%%%% Parameters to initialize for boilerplate page:
  54.105 +%
  54.106 +%	\title{Mixed Circular Cylindrical Shells}
  54.107 +% 	\author{J. Casey Salas}
  54.108 +% 	\prevdegrees{B.S., University of California (1978) \\
  54.109 +%		     S.M., Massachusetts Institute of Technology (1981)}
  54.110 +% 	\department{Department of Electrical Engineering and Computer Science}
  54.111 +% 	\degree{Doctor of Philosophy}
  54.112 +%% If the thesis is for two degrees simultaneously, list them both
  54.113 +%% separated by \and like this:
  54.114 +% 	\degree{Doctor of Philosophy \and Master of Science}
  54.115 +% 	\degreemonth{February}
  54.116 +% 	\degreeyear{1987}
  54.117 +% 	\thesisdate{December 10, 1986}
  54.118 +%% If the thesis is copyright by the Institute, leave this line out and
  54.119 +%% the standard copyright line will be used instead.
  54.120 +% 	\copyrightnotice{J. Casey Salas, 1986}
  54.121 +%% If there is more than one supervisor, use the \supervisor command
  54.122 +%% once for each.
  54.123 +% 	\supervisor{John D. Galli}{Director, Sound Instrument Laboratory}
  54.124 +%% This is the department committee chairman, not the thesis committee chairman
  54.125 +% 	\chairman{Arthur C. Smith}
  54.126 +%		 {Chairman, Departmental Committee on Graduate Students}
  54.127 +%% Make the titlepage based on the above information.  If you need
  54.128 +%% something special and can't use the standard form, you can specify
  54.129 +%% the exact text of the titlepage yourself.  Put it in a titlepage
  54.130 +%% environment and leave blank lines where you want vertical space.
  54.131 +%% The spaces will be adjusted to fill the entire page.  The dotted
  54.132 +%% lines for the signatures are made with the \signature command.
  54.133 +%
  54.134 +%% The abstractpage environment sets up everything on the page except
  54.135 +%% the text itself.  The title and other header material are put at the
  54.136 +%% top of the page, and the supervisors are listed at the bottom.  A
  54.137 +%% new page is begun both before and after.  Of course, an abstract may
  54.138 +%% be more than one page itself.  If you need more control over the
  54.139 +%% format of the page, you can use the abstract environment, which puts
  54.140 +%% the word "Abstract" at the beginning and single spaces its text.
  54.141 +%
  54.142 +% 	\begin{abstractpage}
  54.143 +%	    Abstract goes here.
  54.144 +%	\end{abstractpage}
  54.145 +%
  54.146 +%%%%%%%% Newer additions 
  54.147 +%
  54.148 +% documentclass options - 
  54.149 +% vi		For MIT course VI or VIII thesis - will copyright the thesis to
  54.150 +% 		you while giving MIT permission to copy and distribute it.
  54.151 +% upcase	Will put much of the cover page in uppercase, as per the
  54.152 +% 		example on page 17 of the *Specifications for Thesis
  54.153 +% 		Preparation*, (revised 1989)
  54.154 +% Also added ``All Rights Reserved'' to default copyright notice.
  54.155 +%
  54.156 +%%%%%%%%%%%
  54.157 +% 
  54.158 +% Documentclass options (vi and upcase) and changes to copyright notice
  54.159 +%	Copyright (c) 1990, by Krishna Sethuraman.
  54.160 +%
  54.161 +% Pagestyle and header generation
  54.162 +%	Copyright (c) 1987, 1988 by Peter Nuth
  54.163 +%
  54.164 +% Original version
  54.165 +%	 Copyright (c) 1987 by Stephen Gildea
  54.166 +% Permission to copy all or part of this work is granted, provided
  54.167 +% that the copies are not made or distributed for resale, and that
  54.168 +% the copyright notice and this notice are retained.
  54.169 +% 
  54.170 +% THIS WORK IS PROVIDED ON AN "AS IS" BASIS.  THE AUTHOR PROVIDES NO
  54.171 +% WARRANTY WHATSOEVER, EITHER EXPRESS OR IMPLIED, REGARDING THE WORK,
  54.172 +% INCLUDING WARRANTIES WITH RESPECT TO ITS MERCHANTABILITY OR FITNESS
  54.173 +% FOR ANY PARTICULAR PURPOSE.
  54.174 +%%%%%%%%
  54.175 +
  54.176 +\NeedsTeXFormat{LaTeX2e}
  54.177 +\ProvidesClass{mitthesis}[1999/10/20]
  54.178 +
  54.179 +\def\mystretch{1.5}		% Double spacing hack
  54.180 +\DeclareOption{doublespace}{}	% This is default
  54.181 +				% So we do not read this style twice
  54.182 +\DeclareOption{singlespace}{		% If he explicitly wants single spacing
  54.183 +    \typeout{Single spaced}
  54.184 +    \def\mystretch{1}}	
  54.185 +
  54.186 +%% `vi' and `upcase' document style options.  Krishna Sethuraman (1990)
  54.187 +\newcount\vithesis
  54.188 +\DeclareOption{vi}{\typeout{Course VI/VIII thesis style.}\advance\vithesis by1}
  54.189 +\vithesis=0
  54.190 +
  54.191 +\DeclareOption{upcase}{\typeout{Uppercase cover page.}
  54.192 +	\gdef\choosecase#1{\uppercase\expandafter{#1}}}
  54.193 +\def\choosecase#1{#1}
  54.194 +
  54.195 +%% leftblank option by Kevin Fu
  54.196 +\newif\if@leftblank \@leftblankfalse
  54.197 +
  54.198 +\DeclareOption{leftblank}{\typeout{Intentionally Leaving Pages Blank}
  54.199 +\@leftblanktrue}
  54.200 +
  54.201 +%  Thesis looks much like report
  54.202 +\DeclareOption*{\PassOptionsToClass{\CurrentOption}{report}}
  54.203 +\ProcessOptions
  54.204 +\LoadClass{report}
  54.205 +
  54.206 +% If the user wants single spacing, set baselinestretch=1.
  54.207 +
  54.208 +\usepackage{setspace}
  54.209 +
  54.210 +% Note - doublespace.sty has some float-related troubles in
  54.211 +% combination with graphics or color, and is not officially compliant
  54.212 +% with 2e.  setspace is a replacement which is 2e-compliant.
  54.213 +
  54.214 +% Read the doublespace style that we got from Rochester:
  54.215 +%\input setdoublespace.sty 		
  54.216 +
  54.217 +\def\baselinestretch{\mystretch}	% Double spacing hack
  54.218 +
  54.219 +%%%%%%%  Set up margins and formatting params %%%
  54.220 +
  54.221 +% Margins.
  54.222 +%  Note we want 1in top margin assuming no header line, so push header
  54.223 +%	into 1in margin.
  54.224 +%  Draft mode brings the header back down.
  54.225 +
  54.226 +\setlength{\oddsidemargin}{0.25in}	% 1.25in left margin 
  54.227 +\setlength{\evensidemargin}{0.25in}	% 1.25in left margin (even pages)
  54.228 +\setlength{\topmargin}{0.0in}		% 1in top margin
  54.229 +\setlength{\textwidth}{6.0in}		% 6.0in text - 1.25in rt margin
  54.230 +\setlength{\textheight}{9in}		% Body ht for 1in margins
  54.231 +\addtolength{\topmargin}{-\headheight}	% No header, so compensate
  54.232 +\addtolength{\topmargin}{-\headsep}	% for header height and separation
  54.233 +
  54.234 +% The next two macros compensate page style for headers and footers
  54.235 +% We only need them in page styles that USE headers and footers.
  54.236 +    % If we have a header, it must be 1in from top of page.
  54.237 +\def\pulldownheader{			% Shift header down 1in from top
  54.238 +    \addtolength{\topmargin}{\headheight}	
  54.239 +    \addtolength{\topmargin}{\headsep}	
  54.240 +    \addtolength{\textheight}{-\headheight}
  54.241 +    \addtolength{\textheight}{-\headsep}
  54.242 +}
  54.243 +    % If we have a footer, put it 1in up from bottom
  54.244 +\def\pullupfooter{				% Shift footer up
  54.245 +    \addtolength{\textheight}{-\footskip}
  54.246 +%    \addtolength{\textheight}{-\footheight}  %footheight doesn't
  54.247 +%    						exist in 2e
  54.248 +}
  54.249 +
  54.250 +%%%%%%%  End of margins and formatting params %%%
  54.251 +
  54.252 +%%%%%%%  Fix various header and footer problems %%%
  54.253 +
  54.254 +% Draft mark on the right side of left pages (outside)
  54.255 +% this mark is also the only one visible on single sided.
  54.256 +\newcommand{\draftrmark}{**DRAFT**} 
  54.257 +% Draft mark on the left side of right pages (outside)
  54.258 +\newcommand{\draftlmark}{**DRAFT**} % 
  54.259 +
  54.260 +% Macros to make changing the Draft easier
  54.261 +\newcommand{\drmark}[1]{\renewcommand{\draftrmark}{#1}}
  54.262 +\newcommand{\dlmark}[1]{\renewcommand{\draftlmark}{#1}}
  54.263 +\newcommand{\dmark}[1]{\drmark{#1}\dlmark{#1}}
  54.264 +
  54.265 +% Format for draft of thesis.  Define our own PageStyle -
  54.266 +% Just like headings, but has foot lines with the date and warning
  54.267 +
  54.268 +\if@twoside         % If two-sided printing.
  54.269 +\def\ps@drafthead{
  54.270 +    \let\@mkboth\markboth
  54.271 +    \def\@oddfoot{\rm \today \hfil \sc \draftrmark}
  54.272 +    \def\@evenfoot{\sc \draftlmark \hfil \rm \today }
  54.273 +    \def\@evenhead{\rm \thepage\hfil \sl \leftmark}
  54.274 +    \def\@oddhead{\hbox{}\sl \rightmark \hfil \rm\thepage}
  54.275 +    \def\chaptermark##1{\markboth {\uppercase{\ifnum \c@secnumdepth >\m@ne
  54.276 +	\@chapapp\ \thechapter. \ \fi ##1}}{}}
  54.277 +    \def\sectionmark##1{\markright {\uppercase{\ifnum \c@secnumdepth >\z@
  54.278 +	\thesection. \ \fi ##1}}}
  54.279 +    \pulldownheader				% Bring header down from edge
  54.280 +    \pullupfooter				% Bring footer up
  54.281 +}
  54.282 +\else               % If one-sided printing.
  54.283 +\def\ps@drafthead{
  54.284 +    \let\@mkboth\markboth
  54.285 +    \def\@oddfoot{\rm \today \hfil \sc \draftrmark}
  54.286 +    \def\@oddhead{\hbox{}\sl \rightmark \hfil \rm\thepage}
  54.287 +    \def\chaptermark##1{\markright {\uppercase{\ifnum \c@secnumdepth >\m@ne
  54.288 +	\@chapapp\ \thechapter. \ \fi ##1}}}
  54.289 +    \pulldownheader				% Bring header down from edge
  54.290 +    \pullupfooter				% Bring footer up
  54.291 +}
  54.292 +\fi
  54.293 +
  54.294 +% I redefine these formats that were defined in report.sty
  54.295 +% Definition of 'headings' page style 
  54.296 +%  Note the use of ##1 for parameter of \def\chaptermark inside the
  54.297 +%  \def\ps@headings.
  54.298 +%
  54.299 +
  54.300 +\if@twoside					% If two-sided printing.
  54.301 +\def\ps@headings{\let\@mkboth\markboth
  54.302 +    \def\@oddfoot{}
  54.303 +    \def\@evenfoot{}		% No feet.
  54.304 +    \def\@evenhead{\rm \thepage\hfil \sl \leftmark}	% Left heading.
  54.305 +    \def\@oddhead{\hbox{}\sl \rightmark \hfil \rm\thepage}	% Right heading.
  54.306 +    \def\chaptermark##1{\markboth {\uppercase{\ifnum \c@secnumdepth >\m@ne
  54.307 +	\@chapapp\ \thechapter. \ \fi ##1}}{}}	
  54.308 +    \def\sectionmark##1{\markright {\uppercase{\ifnum \c@secnumdepth >\z@
  54.309 +	\thesection. \ \fi ##1}}}
  54.310 +    \pulldownheader				% Bring header down from edge
  54.311 +}
  54.312 +\else						% If one-sided printing.
  54.313 +\def\ps@headings{\let\@mkboth\markboth
  54.314 +    \def\@oddfoot{}
  54.315 +    \def\@evenfoot{}		%     No feet.
  54.316 +    \def\@oddhead{\hbox {}\sl \rightmark \hfil \rm\thepage}	% Heading.
  54.317 +    \def\chaptermark##1{\markright {\uppercase{\ifnum \c@secnumdepth >\m@ne
  54.318 +	\@chapapp\ \thechapter. \ \fi ##1}}}
  54.319 +    \pulldownheader				% Bring header down from edge
  54.320 +}
  54.321 +\fi
  54.322 +
  54.323 +% Redefinition of 'myheadings' page style.  
  54.324 +%
  54.325 +\def\ps@myheadings{\let\@mkboth\@gobbletwo
  54.326 +    \def\@oddfoot{}
  54.327 +    \def\@evenfoot{}
  54.328 +    \def\sectionmark##1{}
  54.329 +    \def\subsectionmark##1{}
  54.330 +    \def\@evenhead{\rm \thepage\hfil\sl\leftmark\hbox {}}	%
  54.331 +    \def\@oddhead{\hbox{}\sl\rightmark \hfil \rm\thepage}	%
  54.332 +    \pulldownheader				% Bring header down from edge
  54.333 +}						
  54.334 +
  54.335 +% Redefine '/chapter' to always start on an odd page.
  54.336 +% Should make no difference in singleside mode.
  54.337 +%
  54.338 +\if@leftblank
  54.339 +% Prints "THIS PAGE INTENTIONALLY LEFT BLANK" on blank pages.
  54.340 +\def\chapter{\clearpage\ifodd\c@page\else
  54.341 +   \hbox{}\par\vfill\centerline%
  54.342 +   {THIS PAGE INTENTIONALLY LEFT BLANK}%
  54.343 +   \vfill\newpage\fi
  54.344 +   \thispagestyle{plain}	% Page style of chapter page is 'plain'
  54.345 +   \global\@topnum\z@		% Prevents figures from going at top of page.
  54.346 +   \@afterindentfalse		% Suppresses indent in first paragraph.  Change
  54.347 +   \secdef\@chapter\@schapter}	% to \@afterindenttrue to have indent.
  54.348 +\else
  54.349 +\def\chapter{\cleardoublepage	% Starts new page.
  54.350 +   \thispagestyle{plain}	% Page style of chapter page is 'plain'
  54.351 +   \global\@topnum\z@		% Prevents figures from going at top of page.
  54.352 +   \@afterindentfalse		% Suppresses indent in first paragraph.  Change
  54.353 +   \secdef\@chapter\@schapter}	% to \@afterindenttrue to have indent.
  54.354 +\fi
  54.355 +% If using the report style, use - instead of . in the figure number.
  54.356 +\@ifundefined{thechapter}{}{\def\thefigure{\thechapter-\arabic{figure}}}
  54.357 +
  54.358 +
  54.359 +%%%%%%%%%  End of Style parameters %%%%
  54.360 +
  54.361 +% Here's Gildea's Boilerplate Stuff.
  54.362 +% Copyright (c) 1987 by Stephen Gildea
  54.363 +% Permission to copy all or part of this work is granted, provided
  54.364 +% that the copies are not made or distributed for resale, and that
  54.365 +% the copyright notice and this notice are retained.
  54.366 +
  54.367 +%% Define all the pieces that go on the title page and the abstract.
  54.368 +
  54.369 +% \title and \author already exist
  54.370 +
  54.371 +\def\prevdegrees#1{\gdef\@prevdegrees{#1}}
  54.372 +\def\@prevdegrees{}
  54.373 +
  54.374 +\def\department#1{\gdef\@department{#1}}
  54.375 +
  54.376 +% If you are getting two degrees, use \and between the names.
  54.377 +\def\degree#1{\setbox0\hbox{#1}	 %for side effect of setting \@degreeword
  54.378 +  \gdef\@degree{#1}}
  54.379 +
  54.380 +% \and is used inside the \degree argument to separate two degrees
  54.381 +\def\and{\gdef\@degreeword{degrees} \par and \par}
  54.382 +\def\@degreeword{degree}
  54.383 +
  54.384 +% The copyright notice stuff is a tremendous mess.
  54.385 +%
  54.386 +% \@copyrightnotice is used by \maketitle to actually put text on the
  54.387 +% page; it defaults to ``Copyright MIT 19xx.  All rights reserved.''
  54.388 +% \copyrightnoticetext takes an argument and defined \@copyrightnotice
  54.389 +% to that argument.  \copyrightnotice takes an argument, and calls
  54.390 +% \copyrightnoticetext with that argument, preceeded by a copyright
  54.391 +% symbol and followed by ``All rights reserved.'' and the standard
  54.392 +% permission notice.
  54.393 +% 
  54.394 +% If you use the 'vi' option, \copyrightnoticetext is used to set the
  54.395 +% copyright to ``(C) Your Name, Current Year in Roman Numerals.''
  54.396 +% followed by the permission notice.
  54.397 +
  54.398 +% If there is no \copyrightnotice command, it is asssumed that MIT
  54.399 +% holds the copyright.  This commands adds the copyright symbol to the
  54.400 +% beginning, and puts the standard permission notice below.
  54.401 +%% ``All rights reserved'' added.  Krishna Sethuraman (1990)
  54.402 +\def\copyrightnotice#1{\copyrightnoticetext{\copyright\ #1.  All rights
  54.403 +reserved.\par\permission}}
  54.404 +
  54.405 +% Occacionally you will need to exactly specify the text of the 
  54.406 +% copyright notice.  The \copyrightnoticetext command is then useful.
  54.407 +\long\def\copyrightnoticetext#1{\gdef\@copyrightnotice{#1}}
  54.408 +\def\@copyrightnotice{\copyright\ \Mit\ \@degreeyear.  All rights reserved.}
  54.409 +
  54.410 +%% `vi' documentclass option: Specifying this option automatically
  54.411 +%% copyrights the thesis to the author and gives MIT permission to copy and
  54.412 +%% distribute the document.  If you want, you can still specify
  54.413 +%% \copyrightnotice{stuff} to copyright to someone else, or
  54.414 +%% \copyrightnoticetext{stuff} to specify the exact text of the copyright
  54.415 +%% notice.
  54.416 +\ifodd\vithesis \copyrightnoticetext{\copyright\ \@author,
  54.417 +\uppercase\expandafter{\romannumeral\@degreeyear}.  All rights reserved.\par\permission}
  54.418 +%% or just
  54.419 +%%\@degreeyear}}
  54.420 +\typeout{Copyright given to author,
  54.421 +	permission to copy/distribute given to MIT.}
  54.422 +\else \typeout{Thesis document copyright MIT unless otherwise (manually) specified}
  54.423 +\fi
  54.424 +
  54.425 +\def\thesisdate#1{\gdef\@thesisdate{#1}}
  54.426 +
  54.427 +% typically just a month and year
  54.428 +\def\degreemonth#1{\gdef\@degreemonth{#1}}
  54.429 +\def\degreeyear#1{\gdef\@degreeyear{#1}}
  54.430 +
  54.431 +% Usage: \supervisor{name}{title}
  54.432 +%        \chairman{name}{title}
  54.433 +
  54.434 +% since there can be more than one supervisor,
  54.435 +% we build the appropriate boxes for the titlepage and
  54.436 +% the abstractpage as the user makes multiple calls
  54.437 +% to \supervisor
  54.438 +\newbox\@titlesupervisor 	\newbox\@abstractsupervisor
  54.439 +
  54.440 +\def\supervisor#1#2{\setbox\@titlesupervisor\vbox
  54.441 +  {\unvbox\@titlesupervisor \vskip 10pt% plus 1fil minus 1fil
  54.442 +  \def\baselinestretch{1}\large
  54.443 +  \signature{Certified by}{#1 \\ #2 \\ Thesis Supervisor}}
  54.444 +  \setbox\@abstractsupervisor\vbox{\unvbox\@abstractsupervisor
  54.445 +  \vskip\baselineskip \def\baselinestretch{1}\@normalsize 
  54.446 +  \par\noindent Thesis Supervisor: #1 \\ Title: #2}}
  54.447 +
  54.448 +% department chairman, not thesis committee chairman
  54.449 +\def\chairman#1#2{\gdef\@chairmanname{#1}\gdef\@chairmantitle{#2}}
  54.450 +
  54.451 +%% `upcase' documentclass option: \choosecase is defined either as a dummy or
  54.452 +%% a macro to change the (expanded) argument to uppercase.
  54.453 +\def\maketitle{\begin{titlepage}
  54.454 +\large
  54.455 +{\def\baselinestretch{1.2}\Large\bf \choosecase{\@title} \par}
  54.456 +by\par
  54.457 +{\Large  \choosecase{\@author}}
  54.458 +\par
  54.459 +\@prevdegrees
  54.460 +\par
  54.461 +\choosecase{Submitted to the} \choosecase{\@department} \\
  54.462 +\choosecase{in partial fulfillment of the requirements for the}
  54.463 +\choosecase{\@degreeword} 
  54.464 +\choosecase{of}
  54.465 +\par
  54.466 +\choosecase{\@degree}
  54.467 +\par
  54.468 +at the
  54.469 +\par\MIT\par
  54.470 +\@degreemonth\ \@degreeyear
  54.471 +\par
  54.472 +\@copyrightnotice
  54.473 +\par
  54.474 +\vskip 3\baselineskip
  54.475 +\signature{Author}{\@department \\ \@thesisdate}
  54.476 +\par
  54.477 +\vfill
  54.478 +\unvbox\@titlesupervisor
  54.479 +\par
  54.480 +\vfill
  54.481 +\signature{Accepted by}{\@chairmanname \\ \@chairmantitle}
  54.482 +\vfill
  54.483 +\end{titlepage}}
  54.484 +
  54.485 +% this environment should probably be called abstract,
  54.486 +% but we want people to also be able to get at the more
  54.487 +% basic abstract environment
  54.488 +\def\abstractpage{\cleardoublepage
  54.489 +\begin{center}{\large{\bf \@title} \\
  54.490 +by \\
  54.491 +\@author \\[\baselineskip]}
  54.492 +\par
  54.493 +\def\baselinestretch{1}\@normalsize
  54.494 +Submitted to the \@department \\
  54.495 +on \@thesisdate, in partial fulfillment of the \\
  54.496 +requirements for the \@degreeword\ of \\
  54.497 +\@degree
  54.498 +\end{center}
  54.499 +\par
  54.500 +\begin{abstract}}
  54.501 +
  54.502 +%% Changed from \unvbox to \unvcopy for use with multiple copies of abstract
  54.503 +%% page.
  54.504 +%% Krishna Sethuraman (1990)
  54.505 +\def\endabstractpage{\end{abstract}\noindent
  54.506 + \unvcopy\@abstractsupervisor \newpage}
  54.507 +
  54.508 +%% This counter is used to save the page number for the second copy of
  54.509 +%% the abstract.
  54.510 +\newcounter{savepage}
  54.511 +
  54.512 +% You can use the titlepage environment to do it all yourself if you
  54.513 +% don't want to use \maketitle.  If the titlepage environment, the
  54.514 +% paragraph skip is infinitely stretchable, so if you leave a blank line
  54.515 +% between lines that you want space between, the space will stretch so
  54.516 +% that the title page fills up the entire page.
  54.517 +\def\titlepage{\cleardoublepage\centering
  54.518 +  \thispagestyle{empty}
  54.519 +  \parindent 0pt \parskip 10pt plus 1fil minus 1fil
  54.520 +  \def\baselinestretch{1}\@normalsize\vbox to \vsize\bgroup\vbox to 9in\bgroup}
  54.521 +% The \kern0pt pushes any depth into the height.  Thanks to Richard Stone.
  54.522 +\def\endtitlepage{\par\kern 0pt\egroup\vss\egroup\newpage}
  54.523 +
  54.524 +\def\MIT{MASSACHUSETTS INSTITUTE OF TECHNOLOGY}
  54.525 +\def\Mit{Massachusetts Institute of Technology}
  54.526 +
  54.527 +\def\permission{\par\noindent{\centering
  54.528 +   The author hereby grants to MIT permission to reproduce and to
  54.529 +   distribute publicly paper and electronic copies of this thesis
  54.530 +   document in whole or in part in any medium now known or hereafter
  54.531 +   created.}\par}
  54.532 +
  54.533 +\def\signature#1#2{\par\noindent#1\dotfill\null\\*
  54.534 +  {\raggedleft #2\par}}
  54.535 +
  54.536 +\def\abstract{\subsection*{Abstract}\small\def\baselinestretch{1}\@normalsize}
  54.537 +\def\endabstract{\par}
  54.538 +
    55.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    55.2 +++ b/thesis/aux/mitthesis/sample.org	Thu Mar 27 17:57:01 2014 -0400
    55.3 @@ -0,0 +1,7 @@
    55.4 +#+TITLE: asdhfklasdfkljasdf
    55.5 +#+AUTHOR: asdhfkjasdhfkljdasf
    55.6 +
    55.7 +* Section one
    55.8 +asdfasdfasdf
    55.9 +* Another section
   55.10 +aqghowahaoshfwhowehorwer
   55.11 \ No newline at end of file
    56.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    56.2 +++ b/thesis/aux/mitthesis/sample.tex	Thu Mar 27 17:57:01 2014 -0400
    56.3 @@ -0,0 +1,40 @@
    56.4 +% Created 2014-03-20 Thu 23:12
    56.5 +\documentclass[11pt]{article}
    56.6 +\usepackage[utf8]{inputenc}
    56.7 +\usepackage[T1]{fontenc}
    56.8 +\usepackage{fixltx2e}
    56.9 +\usepackage{graphicx}
   56.10 +\usepackage{longtable}
   56.11 +\usepackage{float}
   56.12 +\usepackage{wrapfig}
   56.13 +\usepackage{rotating}
   56.14 +\usepackage[normalem]{ulem}
   56.15 +\usepackage{amsmath}
   56.16 +\usepackage{textcomp}
   56.17 +\usepackage{marvosym}
   56.18 +\usepackage{wasysym}
   56.19 +\usepackage{amssymb}
   56.20 +\usepackage{hyperref}
   56.21 +\tolerance=1000
   56.22 +\author{asdhfkjasdhfkljdasf}
   56.23 +\date{\today}
   56.24 +\title{asdhfklasdfkljasdf}
   56.25 +\hypersetup{
   56.26 +  pdfkeywords={},
   56.27 +  pdfsubject={},
   56.28 +  pdfcreator={Emacs 24.2.1 (Org mode 8.2.5h)}}
   56.29 +\begin{document}
   56.30 +
   56.31 +\maketitle
   56.32 +\tableofcontents
   56.33 +
   56.34 +
   56.35 +\section{Section one}
   56.36 +\label{sec-1}
   56.37 +asdfasdfasdf
   56.38 +
   56.39 +\section{Another section}
   56.40 +\label{sec-2}
   56.41 +aqghowahaoshfwhowehorwer
   56.42 +% Emacs 24.2.1 (Org mode 8.2.5h)
   56.43 +\end{document}
   56.44 \ No newline at end of file
    57.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    57.2 +++ b/thesis/aux/mitthesis/signature.tex	Thu Mar 27 17:57:01 2014 -0400
    57.3 @@ -0,0 +1,31 @@
    57.4 +% -*- Mode:TeX -*-
    57.5 +%
    57.6 +% Some departments (e.g. Chemistry) require an additional cover page
    57.7 +% with signatures of the thesis committee.  Please check with your
    57.8 +% thesis advisor or other appropriate person to determine if such a 
    57.9 +% page is required for your thesis.  
   57.10 +%
   57.11 +% If you choose not to use the "titlepage" environment, a \newpage
   57.12 +% commands, and several \vspace{\fill} commands may be necessary to
   57.13 +% achieve the required spacing.  The \signature command is defined in
   57.14 +% the "mitthesis" class
   57.15 +%
   57.16 +% The following sample appears courtesy of Ben Kaduk <kaduk@mit.edu> and
   57.17 +% was used in his June 2012 doctoral thesis in Chemistry. 
   57.18 +
   57.19 +\begin{titlepage}
   57.20 +\begin{large}
   57.21 +This doctoral thesis has been examined by a Committee of the Department
   57.22 +of Chemistry as follows:
   57.23 +
   57.24 +\signature{Professor Jianshu Cao}{Chairman, Thesis Committee \\
   57.25 +   Professor of Chemistry}
   57.26 +
   57.27 +\signature{Professor Troy Van Voorhis}{Thesis Supervisor \\
   57.28 +   Associate Professor of Chemistry}
   57.29 +
   57.30 +\signature{Professor Robert W. Field}{Member, Thesis Committee \\
   57.31 +   Haslam and Dewey Professor of Chemistry}
   57.32 +\end{large}
   57.33 +\end{titlepage}
   57.34 +
    58.1 Binary file thesis/aux/mitthesis/templates.zip has changed
    59.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    59.2 +++ b/thesis/comp.pl	Thu Mar 27 17:57:01 2014 -0400
    59.3 @@ -0,0 +1,19 @@
    59.4 +#!/bin/perl
    59.5 +
    59.6 +use List::Flatten;
    59.7 +
    59.8 +$target = shift(@ARGV);
    59.9 +
   59.10 +$frames = shift(@ARGV);
   59.11 +
   59.12 +@numbers = split(/,/, $frames);
   59.13 +@images = map {sprintf("$target/%07d.png", $_)} @numbers;
   59.14 +
   59.15 +
   59.16 +$output = $target.".png";
   59.17 +
   59.18 +@imagemagick_command = flat("montage", @images, "-geometry", "+2+2", $output);
   59.19 +
   59.20 +print "@imagemagick_command\n";
   59.21 +
   59.22 +system(@imagemagick_command);
    60.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    60.2 +++ b/thesis/cortex.bib	Thu Mar 27 17:57:01 2014 -0400
    60.3 @@ -0,0 +1,15 @@
    60.4 +@incollection {Tappert77,
    60.5 +AUTHOR = {Tappert, Fred D.},
    60.6 +TITLE = {The parabolic approximation method},
    60.7 +BOOKTITLE = {Wave propagation and underwater acoustics ({W}orkshop,
    60.8 +              {M}ystic, {C}onn., 1974)},
    60.9 +PAGES = {224--287. Lecture Notes in Phys., Vol. 70},
   60.10 +PUBLISHER = {Springer},
   60.11 +ADDRESS = {Berlin},
   60.12 +YEAR = {1977},
   60.13 +MRCLASS = {76.41 (86.41)},
   60.14 +ADDENDUM = {\why{This is a cool paper that really helped me out. It's
   60.15 +                  related to the thesis because they talk about
   60.16 +                  manifolds and other types of pure, unrefined
   60.17 +                  bullshit.}}
   60.18 +}
    61.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    61.2 +++ b/thesis/cortex.org	Thu Mar 27 17:57:01 2014 -0400
    61.3 @@ -0,0 +1,1154 @@
    61.4 +#+title: =CORTEX=
    61.5 +#+author: Robert McIntyre
    61.6 +#+email: rlm@mit.edu
    61.7 +#+description: Using embodied AI to facilitate Artificial Imagination.
    61.8 +#+keywords: AI, clojure, embodiment
    61.9 +#+LaTeX_CLASS_OPTIONS: [nofloat]
   61.10 +
   61.11 +* Empathy and Embodiment as problem solving strategies
   61.12 +  
   61.13 +  By the end of this thesis, you will have seen a novel approach to
   61.14 +  interpreting video using embodiment and empathy. You will have also
   61.15 +  seen one way to efficiently implement empathy for embodied
   61.16 +  creatures. Finally, you will become familiar with =CORTEX=, a system
   61.17 +  for designing and simulating creatures with rich senses, which you
   61.18 +  may choose to use in your own research.
   61.19 +  
   61.20 +  This is the core vision of my thesis: That one of the important ways
   61.21 +  in which we understand others is by imagining ourselves in their
   61.22 +  position and emphatically feeling experiences relative to our own
   61.23 +  bodies. By understanding events in terms of our own previous
   61.24 +  corporeal experience, we greatly constrain the possibilities of what
   61.25 +  would otherwise be an unwieldy exponential search. This extra
   61.26 +  constraint can be the difference between easily understanding what
   61.27 +  is happening in a video and being completely lost in a sea of
   61.28 +  incomprehensible color and movement.
   61.29 +
   61.30 +** Recognizing actions in video is extremely difficult
   61.31 +
   61.32 +   Consider for example the problem of determining what is happening
   61.33 +   in a video of which this is one frame:
   61.34 +
   61.35 +   #+caption: A cat drinking some water. Identifying this action is 
   61.36 +   #+caption: beyond the state of the art for computers.
   61.37 +   #+ATTR_LaTeX: :width 7cm
   61.38 +   [[./images/cat-drinking.jpg]]
   61.39 +   
   61.40 +   It is currently impossible for any computer program to reliably
   61.41 +   label such a video as ``drinking''. And rightly so -- it is a very
   61.42 +   hard problem! What features can you describe in terms of low level
   61.43 +   functions of pixels that can even begin to describe at a high level
   61.44 +   what is happening here?
   61.45 +  
   61.46 +   Or suppose that you are building a program that recognizes chairs.
   61.47 +   How could you ``see'' the chair in figure \ref{hidden-chair}?
   61.48 +   
   61.49 +   #+caption: The chair in this image is quite obvious to humans, but I 
   61.50 +   #+caption: doubt that any modern computer vision program can find it.
   61.51 +   #+name: hidden-chair
   61.52 +   #+ATTR_LaTeX: :width 10cm
   61.53 +   [[./images/fat-person-sitting-at-desk.jpg]]
   61.54 +   
   61.55 +   Finally, how is it that you can easily tell the difference between
   61.56 +   how the girls /muscles/ are working in figure \ref{girl}?
   61.57 +   
   61.58 +   #+caption: The mysterious ``common sense'' appears here as you are able 
   61.59 +   #+caption: to discern the difference in how the girl's arm muscles
   61.60 +   #+caption: are activated between the two images.
   61.61 +   #+name: girl
   61.62 +   #+ATTR_LaTeX: :width 7cm
   61.63 +   [[./images/wall-push.png]]
   61.64 +  
   61.65 +   Each of these examples tells us something about what might be going
   61.66 +   on in our minds as we easily solve these recognition problems.
   61.67 +   
   61.68 +   The hidden chairs show us that we are strongly triggered by cues
   61.69 +   relating to the position of human bodies, and that we can determine
   61.70 +   the overall physical configuration of a human body even if much of
   61.71 +   that body is occluded.
   61.72 +
   61.73 +   The picture of the girl pushing against the wall tells us that we
   61.74 +   have common sense knowledge about the kinetics of our own bodies.
   61.75 +   We know well how our muscles would have to work to maintain us in
   61.76 +   most positions, and we can easily project this self-knowledge to
   61.77 +   imagined positions triggered by images of the human body.
   61.78 +
   61.79 +** =EMPATH= neatly solves recognition problems  
   61.80 +   
   61.81 +   I propose a system that can express the types of recognition
   61.82 +   problems above in a form amenable to computation. It is split into
   61.83 +   four parts:
   61.84 +
   61.85 +   - Free/Guided Play :: The creature moves around and experiences the
   61.86 +        world through its unique perspective. Many otherwise
   61.87 +        complicated actions are easily described in the language of a
   61.88 +        full suite of body-centered, rich senses. For example,
   61.89 +        drinking is the feeling of water sliding down your throat, and
   61.90 +        cooling your insides. It's often accompanied by bringing your
   61.91 +        hand close to your face, or bringing your face close to water.
   61.92 +        Sitting down is the feeling of bending your knees, activating
   61.93 +        your quadriceps, then feeling a surface with your bottom and
   61.94 +        relaxing your legs. These body-centered action descriptions
   61.95 +        can be either learned or hard coded.
   61.96 +   - Posture Imitation :: When trying to interpret a video or image,
   61.97 +        the creature takes a model of itself and aligns it with
   61.98 +        whatever it sees. This alignment can even cross species, as
   61.99 +        when humans try to align themselves with things like ponies,
  61.100 +        dogs, or other humans with a different body type.
  61.101 +   - Empathy         :: The alignment triggers associations with
  61.102 +        sensory data from prior experiences. For example, the
  61.103 +        alignment itself easily maps to proprioceptive data. Any
  61.104 +        sounds or obvious skin contact in the video can to a lesser
  61.105 +        extent trigger previous experience. Segments of previous
  61.106 +        experiences are stitched together to form a coherent and
  61.107 +        complete sensory portrait of the scene.
  61.108 +   - Recognition      :: With the scene described in terms of first
  61.109 +        person sensory events, the creature can now run its
  61.110 +        action-identification programs on this synthesized sensory
  61.111 +        data, just as it would if it were actually experiencing the
  61.112 +        scene first-hand. If previous experience has been accurately
  61.113 +        retrieved, and if it is analogous enough to the scene, then
  61.114 +        the creature will correctly identify the action in the scene.
  61.115 +   
  61.116 +   For example, I think humans are able to label the cat video as
  61.117 +   ``drinking'' because they imagine /themselves/ as the cat, and
  61.118 +   imagine putting their face up against a stream of water and
  61.119 +   sticking out their tongue. In that imagined world, they can feel
  61.120 +   the cool water hitting their tongue, and feel the water entering
  61.121 +   their body, and are able to recognize that /feeling/ as drinking.
  61.122 +   So, the label of the action is not really in the pixels of the
  61.123 +   image, but is found clearly in a simulation inspired by those
  61.124 +   pixels. An imaginative system, having been trained on drinking and
  61.125 +   non-drinking examples and learning that the most important
  61.126 +   component of drinking is the feeling of water sliding down one's
  61.127 +   throat, would analyze a video of a cat drinking in the following
  61.128 +   manner:
  61.129 +   
  61.130 +   1. Create a physical model of the video by putting a ``fuzzy''
  61.131 +      model of its own body in place of the cat. Possibly also create
  61.132 +      a simulation of the stream of water.
  61.133 +
  61.134 +   2. Play out this simulated scene and generate imagined sensory
  61.135 +      experience. This will include relevant muscle contractions, a
  61.136 +      close up view of the stream from the cat's perspective, and most
  61.137 +      importantly, the imagined feeling of water entering the
  61.138 +      mouth. The imagined sensory experience can come from a
  61.139 +      simulation of the event, but can also be pattern-matched from
  61.140 +      previous, similar embodied experience.
  61.141 +
  61.142 +   3. The action is now easily identified as drinking by the sense of
  61.143 +      taste alone. The other senses (such as the tongue moving in and
  61.144 +      out) help to give plausibility to the simulated action. Note that
  61.145 +      the sense of vision, while critical in creating the simulation,
  61.146 +      is not critical for identifying the action from the simulation.
  61.147 +
  61.148 +   For the chair examples, the process is even easier:
  61.149 +
  61.150 +    1. Align a model of your body to the person in the image.
  61.151 +
  61.152 +    2. Generate proprioceptive sensory data from this alignment.
  61.153 +  
  61.154 +    3. Use the imagined proprioceptive data as a key to lookup related
  61.155 +       sensory experience associated with that particular proproceptive
  61.156 +       feeling.
  61.157 +
  61.158 +    4. Retrieve the feeling of your bottom resting on a surface, your
  61.159 +       knees bent, and your leg muscles relaxed.
  61.160 +
  61.161 +    5. This sensory information is consistent with the =sitting?=
  61.162 +       sensory predicate, so you (and the entity in the image) must be
  61.163 +       sitting.
  61.164 +
  61.165 +    6. There must be a chair-like object since you are sitting.
  61.166 +
  61.167 +   Empathy offers yet another alternative to the age-old AI
  61.168 +   representation question: ``What is a chair?'' --- A chair is the
  61.169 +   feeling of sitting.
  61.170 +
  61.171 +   My program, =EMPATH= uses this empathic problem solving technique
  61.172 +   to interpret the actions of a simple, worm-like creature. 
  61.173 +   
  61.174 +   #+caption: The worm performs many actions during free play such as 
  61.175 +   #+caption: curling, wiggling, and resting.
  61.176 +   #+name: worm-intro
  61.177 +   #+ATTR_LaTeX: :width 15cm
  61.178 +   [[./images/worm-intro-white.png]]
  61.179 +
  61.180 +   #+caption: =EMPATH= recognized and classified each of these poses by
  61.181 +   #+caption: inferring the complete sensory experience from 
  61.182 +   #+caption: proprioceptive data.
  61.183 +   #+name: worm-recognition-intro
  61.184 +   #+ATTR_LaTeX: :width 15cm
  61.185 +   [[./images/worm-poses.png]]
  61.186 +   
  61.187 +   One powerful advantage of empathic problem solving is that it
  61.188 +   factors the action recognition problem into two easier problems. To
  61.189 +   use empathy, you need an /aligner/, which takes the video and a
  61.190 +   model of your body, and aligns the model with the video. Then, you
  61.191 +   need a /recognizer/, which uses the aligned model to interpret the
  61.192 +   action. The power in this method lies in the fact that you describe
  61.193 +   all actions form a body-centered viewpoint. You are less tied to
  61.194 +   the particulars of any visual representation of the actions. If you
  61.195 +   teach the system what ``running'' is, and you have a good enough
  61.196 +   aligner, the system will from then on be able to recognize running
  61.197 +   from any point of view, even strange points of view like above or
  61.198 +   underneath the runner. This is in contrast to action recognition
  61.199 +   schemes that try to identify actions using a non-embodied approach.
  61.200 +   If these systems learn about running as viewed from the side, they
  61.201 +   will not automatically be able to recognize running from any other
  61.202 +   viewpoint.
  61.203 +
  61.204 +   Another powerful advantage is that using the language of multiple
  61.205 +   body-centered rich senses to describe body-centerd actions offers a
  61.206 +   massive boost in descriptive capability. Consider how difficult it
  61.207 +   would be to compose a set of HOG filters to describe the action of
  61.208 +   a simple worm-creature ``curling'' so that its head touches its
  61.209 +   tail, and then behold the simplicity of describing thus action in a
  61.210 +   language designed for the task (listing \ref{grand-circle-intro}):
  61.211 +
  61.212 +   #+caption: Body-centerd actions are best expressed in a body-centered 
  61.213 +   #+caption: language. This code detects when the worm has curled into a 
  61.214 +   #+caption: full circle. Imagine how you would replicate this functionality
  61.215 +   #+caption: using low-level pixel features such as HOG filters!
  61.216 +   #+name: grand-circle-intro
  61.217 +   #+attr_latex: [htpb]
  61.218 +#+begin_listing clojure
  61.219 +   #+begin_src clojure
  61.220 +(defn grand-circle?
  61.221 +  "Does the worm form a majestic circle (one end touching the other)?"
  61.222 +  [experiences]
  61.223 +  (and (curled? experiences)
  61.224 +       (let [worm-touch (:touch (peek experiences))
  61.225 +             tail-touch (worm-touch 0)
  61.226 +             head-touch (worm-touch 4)]
  61.227 +         (and (< 0.55 (contact worm-segment-bottom-tip tail-touch))
  61.228 +              (< 0.55 (contact worm-segment-top-tip    head-touch))))))
  61.229 +   #+end_src
  61.230 +   #+end_listing
  61.231 +
  61.232 +
  61.233 +**  =CORTEX= is a toolkit for building sensate creatures
  61.234 +
  61.235 +   I built =CORTEX= to be a general AI research platform for doing
  61.236 +   experiments involving multiple rich senses and a wide variety and
  61.237 +   number of creatures. I intend it to be useful as a library for many
  61.238 +   more projects than just this one. =CORTEX= was necessary to meet a
  61.239 +   need among AI researchers at CSAIL and beyond, which is that people
  61.240 +   often will invent neat ideas that are best expressed in the
  61.241 +   language of creatures and senses, but in order to explore those
  61.242 +   ideas they must first build a platform in which they can create
  61.243 +   simulated creatures with rich senses! There are many ideas that
  61.244 +   would be simple to execute (such as =EMPATH=), but attached to them
  61.245 +   is the multi-month effort to make a good creature simulator. Often,
  61.246 +   that initial investment of time proves to be too much, and the
  61.247 +   project must make do with a lesser environment.
  61.248 +
  61.249 +   =CORTEX= is well suited as an environment for embodied AI research
  61.250 +   for three reasons:
  61.251 +
  61.252 +   - You can create new creatures using Blender, a popular 3D modeling
  61.253 +     program. Each sense can be specified using special blender nodes
  61.254 +     with biologically inspired paramaters. You need not write any
  61.255 +     code to create a creature, and can use a wide library of
  61.256 +     pre-existing blender models as a base for your own creatures.
  61.257 +
  61.258 +   - =CORTEX= implements a wide variety of senses, including touch,
  61.259 +     proprioception, vision, hearing, and muscle tension. Complicated
  61.260 +     senses like touch, and vision involve multiple sensory elements
  61.261 +     embedded in a 2D surface. You have complete control over the
  61.262 +     distribution of these sensor elements through the use of simple
  61.263 +     png image files. In particular, =CORTEX= implements more
  61.264 +     comprehensive hearing than any other creature simulation system
  61.265 +     available. 
  61.266 +
  61.267 +   - =CORTEX= supports any number of creatures and any number of
  61.268 +     senses. Time in =CORTEX= dialates so that the simulated creatures
  61.269 +     always precieve a perfectly smooth flow of time, regardless of
  61.270 +     the actual computational load.
  61.271 +
  61.272 +   =CORTEX= is built on top of =jMonkeyEngine3=, which is a video game
  61.273 +   engine designed to create cross-platform 3D desktop games. =CORTEX=
  61.274 +   is mainly written in clojure, a dialect of =LISP= that runs on the
  61.275 +   java virtual machine (JVM). The API for creating and simulating
  61.276 +   creatures and senses is entirely expressed in clojure, though many
  61.277 +   senses are implemented at the layer of jMonkeyEngine or below. For
  61.278 +   example, for the sense of hearing I use a layer of clojure code on
  61.279 +   top of a layer of java JNI bindings that drive a layer of =C++=
  61.280 +   code which implements a modified version of =OpenAL= to support
  61.281 +   multiple listeners. =CORTEX= is the only simulation environment
  61.282 +   that I know of that can support multiple entities that can each
  61.283 +   hear the world from their own perspective. Other senses also
  61.284 +   require a small layer of Java code. =CORTEX= also uses =bullet=, a
  61.285 +   physics simulator written in =C=.
  61.286 +
  61.287 +   #+caption: Here is the worm from above modeled in Blender, a free 
  61.288 +   #+caption: 3D-modeling program. Senses and joints are described
  61.289 +   #+caption: using special nodes in Blender.
  61.290 +   #+name: worm-recognition-intro
  61.291 +   #+ATTR_LaTeX: :width 12cm
  61.292 +   [[./images/blender-worm.png]]
  61.293 +
  61.294 +   Here are some thing I anticipate that =CORTEX= might be used for:
  61.295 +
  61.296 +   - exploring new ideas about sensory integration
  61.297 +   - distributed communication among swarm creatures
  61.298 +   - self-learning using free exploration, 
  61.299 +   - evolutionary algorithms involving creature construction
  61.300 +   - exploration of exoitic senses and effectors that are not possible
  61.301 +     in the real world (such as telekenisis or a semantic sense)
  61.302 +   - imagination using subworlds
  61.303 +
  61.304 +   During one test with =CORTEX=, I created 3,000 creatures each with
  61.305 +   their own independent senses and ran them all at only 1/80 real
  61.306 +   time. In another test, I created a detailed model of my own hand,
  61.307 +   equipped with a realistic distribution of touch (more sensitive at
  61.308 +   the fingertips), as well as eyes and ears, and it ran at around 1/4
  61.309 +   real time.
  61.310 +
  61.311 +#+BEGIN_LaTeX
  61.312 +   \begin{sidewaysfigure}
  61.313 +   \includegraphics[width=9.5in]{images/full-hand.png}
  61.314 +   \caption{
  61.315 +   I modeled my own right hand in Blender and rigged it with all the
  61.316 +   senses that {\tt CORTEX} supports. My simulated hand has a
  61.317 +   biologically inspired distribution of touch sensors. The senses are
  61.318 +   displayed on the right, and the simulation is displayed on the
  61.319 +   left. Notice that my hand is curling its fingers, that it can see
  61.320 +   its own finger from the eye in its palm, and that it can feel its
  61.321 +   own thumb touching its palm.}
  61.322 +   \end{sidewaysfigure}
  61.323 +#+END_LaTeX
  61.324 +
  61.325 +** Contributions
  61.326 +
  61.327 +   - I built =CORTEX=, a comprehensive platform for embodied AI
  61.328 +     experiments. =CORTEX= supports many features lacking in other
  61.329 +     systems, such proper simulation of hearing. It is easy to create
  61.330 +     new =CORTEX= creatures using Blender, a free 3D modeling program.
  61.331 +
  61.332 +   - I built =EMPATH=, which uses =CORTEX= to identify the actions of
  61.333 +     a worm-like creature using a computational model of empathy.
  61.334 +   
  61.335 +* Building =CORTEX=
  61.336 +
  61.337 +** To explore embodiment, we need a world, body, and senses
  61.338 +
  61.339 +** Because of Time, simulation is perferable to reality
  61.340 +
  61.341 +** Video game engines are a great starting point
  61.342 +
  61.343 +** Bodies are composed of segments connected by joints
  61.344 +
  61.345 +** Eyes reuse standard video game components
  61.346 +
  61.347 +** Hearing is hard; =CORTEX= does it right
  61.348 +
  61.349 +** Touch uses hundreds of hair-like elements
  61.350 +
  61.351 +** Proprioception is the sense that makes everything ``real''
  61.352 +
  61.353 +** Muscles are both effectors and sensors
  61.354 +
  61.355 +** =CORTEX= brings complex creatures to life!
  61.356 +
  61.357 +** =CORTEX= enables many possiblities for further research
  61.358 +
  61.359 +* Empathy in a simulated worm
  61.360 +
  61.361 +  Here I develop a computational model of empathy, using =CORTEX= as a
  61.362 +  base. Empathy in this context is the ability to observe another
  61.363 +  creature and infer what sorts of sensations that creature is
  61.364 +  feeling. My empathy algorithm involves multiple phases. First is
  61.365 +  free-play, where the creature moves around and gains sensory
  61.366 +  experience. From this experience I construct a representation of the
  61.367 +  creature's sensory state space, which I call \Phi-space. Using
  61.368 +  \Phi-space, I construct an efficient function which takes the
  61.369 +  limited data that comes from observing another creature and enriches
  61.370 +  it full compliment of imagined sensory data. I can then use the
  61.371 +  imagined sensory data to recognize what the observed creature is
  61.372 +  doing and feeling, using straightforward embodied action predicates.
  61.373 +  This is all demonstrated with using a simple worm-like creature, and
  61.374 +  recognizing worm-actions based on limited data.
  61.375 +
  61.376 +  #+caption: Here is the worm with which we will be working. 
  61.377 +  #+caption: It is composed of 5 segments. Each segment has a 
  61.378 +  #+caption: pair of extensor and flexor muscles. Each of the 
  61.379 +  #+caption: worm's four joints is a hinge joint which allows 
  61.380 +  #+caption: about 30 degrees of rotation to either side. Each segment
  61.381 +  #+caption: of the worm is touch-capable and has a uniform 
  61.382 +  #+caption: distribution of touch sensors on each of its faces.
  61.383 +  #+caption: Each joint has a proprioceptive sense to detect 
  61.384 +  #+caption: relative positions. The worm segments are all the 
  61.385 +  #+caption: same except for the first one, which has a much
  61.386 +  #+caption: higher weight than the others to allow for easy 
  61.387 +  #+caption: manual motor control.
  61.388 +  #+name: basic-worm-view
  61.389 +  #+ATTR_LaTeX: :width 10cm
  61.390 +  [[./images/basic-worm-view.png]]
  61.391 +
  61.392 +  #+caption: Program for reading a worm from a blender file and 
  61.393 +  #+caption: outfitting it with the senses of proprioception, 
  61.394 +  #+caption: touch, and the ability to move, as specified in the 
  61.395 +  #+caption: blender file.
  61.396 +  #+name: get-worm
  61.397 +  #+begin_listing clojure
  61.398 +  #+begin_src clojure
  61.399 +(defn worm []
  61.400 +  (let [model (load-blender-model "Models/worm/worm.blend")]
  61.401 +    {:body (doto model (body!))
  61.402 +     :touch (touch! model)
  61.403 +     :proprioception (proprioception! model)
  61.404 +     :muscles (movement! model)}))
  61.405 +  #+end_src
  61.406 +  #+end_listing
  61.407 +
  61.408 +** Embodiment factors action recognition into managable parts
  61.409 +
  61.410 +   Using empathy, I divide the problem of action recognition into a
  61.411 +   recognition process expressed in the language of a full compliment
  61.412 +   of senses, and an imaganitive process that generates full sensory
  61.413 +   data from partial sensory data. Splitting the action recognition
  61.414 +   problem in this manner greatly reduces the total amount of work to
  61.415 +   recognize actions: The imaganitive process is mostly just matching
  61.416 +   previous experience, and the recognition process gets to use all
  61.417 +   the senses to directly describe any action.
  61.418 +
  61.419 +** Action recognition is easy with a full gamut of senses
  61.420 +
  61.421 +   Embodied representations using multiple senses such as touch,
  61.422 +   proprioception, and muscle tension turns out be be exceedingly
  61.423 +   efficient at describing body-centered actions. It is the ``right
  61.424 +   language for the job''. For example, it takes only around 5 lines
  61.425 +   of LISP code to describe the action of ``curling'' using embodied
  61.426 +   primitives. It takes about 10 lines to describe the seemingly
  61.427 +   complicated action of wiggling.
  61.428 +
  61.429 +   The following action predicates each take a stream of sensory
  61.430 +   experience, observe however much of it they desire, and decide
  61.431 +   whether the worm is doing the action they describe. =curled?=
  61.432 +   relies on proprioception, =resting?= relies on touch, =wiggling?=
  61.433 +   relies on a fourier analysis of muscle contraction, and
  61.434 +   =grand-circle?= relies on touch and reuses =curled?= as a gaurd.
  61.435 +   
  61.436 +   #+caption: Program for detecting whether the worm is curled. This is the 
  61.437 +   #+caption: simplest action predicate, because it only uses the last frame 
  61.438 +   #+caption: of sensory experience, and only uses proprioceptive data. Even 
  61.439 +   #+caption: this simple predicate, however, is automatically frame 
  61.440 +   #+caption: independent and ignores vermopomorphic differences such as 
  61.441 +   #+caption: worm textures and colors.
  61.442 +   #+name: curled
  61.443 +   #+attr_latex: [htpb]
  61.444 +#+begin_listing clojure
  61.445 +   #+begin_src clojure
  61.446 +(defn curled?
  61.447 +  "Is the worm curled up?"
  61.448 +  [experiences]
  61.449 +  (every?
  61.450 +   (fn [[_ _ bend]]
  61.451 +     (> (Math/sin bend) 0.64))
  61.452 +   (:proprioception (peek experiences))))
  61.453 +   #+end_src
  61.454 +   #+end_listing
  61.455 +
  61.456 +   #+caption: Program for summarizing the touch information in a patch 
  61.457 +   #+caption: of skin.
  61.458 +   #+name: touch-summary
  61.459 +   #+attr_latex: [htpb]
  61.460 +
  61.461 +#+begin_listing clojure
  61.462 +   #+begin_src clojure
  61.463 +(defn contact
  61.464 +  "Determine how much contact a particular worm segment has with
  61.465 +   other objects. Returns a value between 0 and 1, where 1 is full
  61.466 +   contact and 0 is no contact."
  61.467 +  [touch-region [coords contact :as touch]]
  61.468 +  (-> (zipmap coords contact)
  61.469 +      (select-keys touch-region)
  61.470 +      (vals)
  61.471 +      (#(map first %))
  61.472 +      (average)
  61.473 +      (* 10)
  61.474 +      (- 1)
  61.475 +      (Math/abs)))
  61.476 +   #+end_src
  61.477 +   #+end_listing
  61.478 +
  61.479 +
  61.480 +   #+caption: Program for detecting whether the worm is at rest. This program
  61.481 +   #+caption: uses a summary of the tactile information from the underbelly 
  61.482 +   #+caption: of the worm, and is only true if every segment is touching the 
  61.483 +   #+caption: floor. Note that this function contains no references to 
  61.484 +   #+caption: proprioction at all.
  61.485 +   #+name: resting
  61.486 +   #+attr_latex: [htpb]
  61.487 +#+begin_listing clojure
  61.488 +   #+begin_src clojure
  61.489 +(def worm-segment-bottom (rect-region [8 15] [14 22]))
  61.490 +
  61.491 +(defn resting?
  61.492 +  "Is the worm resting on the ground?"
  61.493 +  [experiences]
  61.494 +  (every?
  61.495 +   (fn [touch-data]
  61.496 +     (< 0.9 (contact worm-segment-bottom touch-data)))
  61.497 +   (:touch (peek experiences))))
  61.498 +   #+end_src
  61.499 +   #+end_listing
  61.500 +
  61.501 +   #+caption: Program for detecting whether the worm is curled up into a 
  61.502 +   #+caption: full circle. Here the embodied approach begins to shine, as
  61.503 +   #+caption: I am able to both use a previous action predicate (=curled?=)
  61.504 +   #+caption: as well as the direct tactile experience of the head and tail.
  61.505 +   #+name: grand-circle
  61.506 +   #+attr_latex: [htpb]
  61.507 +#+begin_listing clojure
  61.508 +   #+begin_src clojure
  61.509 +(def worm-segment-bottom-tip (rect-region [15 15] [22 22]))
  61.510 +
  61.511 +(def worm-segment-top-tip (rect-region [0 15] [7 22]))
  61.512 +
  61.513 +(defn grand-circle?
  61.514 +  "Does the worm form a majestic circle (one end touching the other)?"
  61.515 +  [experiences]
  61.516 +  (and (curled? experiences)
  61.517 +       (let [worm-touch (:touch (peek experiences))
  61.518 +             tail-touch (worm-touch 0)
  61.519 +             head-touch (worm-touch 4)]
  61.520 +         (and (< 0.55 (contact worm-segment-bottom-tip tail-touch))
  61.521 +              (< 0.55 (contact worm-segment-top-tip    head-touch))))))
  61.522 +   #+end_src
  61.523 +   #+end_listing
  61.524 +
  61.525 +
  61.526 +   #+caption: Program for detecting whether the worm has been wiggling for 
  61.527 +   #+caption: the last few frames. It uses a fourier analysis of the muscle 
  61.528 +   #+caption: contractions of the worm's tail to determine wiggling. This is 
  61.529 +   #+caption: signigicant because there is no particular frame that clearly 
  61.530 +   #+caption: indicates that the worm is wiggling --- only when multiple frames 
  61.531 +   #+caption: are analyzed together is the wiggling revealed. Defining 
  61.532 +   #+caption: wiggling this way also gives the worm an opportunity to learn 
  61.533 +   #+caption: and recognize ``frustrated wiggling'', where the worm tries to 
  61.534 +   #+caption: wiggle but can't. Frustrated wiggling is very visually different 
  61.535 +   #+caption: from actual wiggling, but this definition gives it to us for free.
  61.536 +   #+name: wiggling
  61.537 +   #+attr_latex: [htpb]
  61.538 +#+begin_listing clojure
  61.539 +   #+begin_src clojure
  61.540 +(defn fft [nums]
  61.541 +  (map
  61.542 +   #(.getReal %)
  61.543 +   (.transform
  61.544 +    (FastFourierTransformer. DftNormalization/STANDARD)
  61.545 +    (double-array nums) TransformType/FORWARD)))
  61.546 +
  61.547 +(def indexed (partial map-indexed vector))
  61.548 +
  61.549 +(defn max-indexed [s]
  61.550 +  (first (sort-by (comp - second) (indexed s))))
  61.551 +
  61.552 +(defn wiggling?
  61.553 +  "Is the worm wiggling?"
  61.554 +  [experiences]
  61.555 +  (let [analysis-interval 0x40]
  61.556 +    (when (> (count experiences) analysis-interval)
  61.557 +      (let [a-flex 3
  61.558 +            a-ex   2
  61.559 +            muscle-activity
  61.560 +            (map :muscle (vector:last-n experiences analysis-interval))
  61.561 +            base-activity
  61.562 +            (map #(- (% a-flex) (% a-ex)) muscle-activity)]
  61.563 +        (= 2
  61.564 +           (first
  61.565 +            (max-indexed
  61.566 +             (map #(Math/abs %)
  61.567 +                  (take 20 (fft base-activity))))))))))
  61.568 +   #+end_src
  61.569 +   #+end_listing
  61.570 +
  61.571 +   With these action predicates, I can now recognize the actions of
  61.572 +   the worm while it is moving under my control and I have access to
  61.573 +   all the worm's senses.
  61.574 +
  61.575 +   #+caption: Use the action predicates defined earlier to report on 
  61.576 +   #+caption: what the worm is doing while in simulation.
  61.577 +   #+name: report-worm-activity
  61.578 +   #+attr_latex: [htpb]
  61.579 +#+begin_listing clojure
  61.580 +   #+begin_src clojure
  61.581 +(defn debug-experience
  61.582 +  [experiences text]
  61.583 +  (cond
  61.584 +   (grand-circle? experiences) (.setText text "Grand Circle")
  61.585 +   (curled? experiences)       (.setText text "Curled")
  61.586 +   (wiggling? experiences)     (.setText text "Wiggling")
  61.587 +   (resting? experiences)      (.setText text "Resting")))
  61.588 +   #+end_src
  61.589 +   #+end_listing
  61.590 +
  61.591 +   #+caption: Using =debug-experience=, the body-centered predicates
  61.592 +   #+caption: work together to classify the behaviour of the worm. 
  61.593 +   #+caption: the predicates are operating with access to the worm's
  61.594 +   #+caption: full sensory data.
  61.595 +   #+name: basic-worm-view
  61.596 +   #+ATTR_LaTeX: :width 10cm
  61.597 +   [[./images/worm-identify-init.png]]
  61.598 +
  61.599 +   These action predicates satisfy the recognition requirement of an
  61.600 +   empathic recognition system. There is power in the simplicity of
  61.601 +   the action predicates. They describe their actions without getting
  61.602 +   confused in visual details of the worm. Each one is frame
  61.603 +   independent, but more than that, they are each indepent of
  61.604 +   irrelevant visual details of the worm and the environment. They
  61.605 +   will work regardless of whether the worm is a different color or
  61.606 +   hevaily textured, or if the environment has strange lighting.
  61.607 +
  61.608 +   The trick now is to make the action predicates work even when the
  61.609 +   sensory data on which they depend is absent. If I can do that, then
  61.610 +   I will have gained much,
  61.611 +
  61.612 +** \Phi-space describes the worm's experiences
  61.613 +   
  61.614 +   As a first step towards building empathy, I need to gather all of
  61.615 +   the worm's experiences during free play. I use a simple vector to
  61.616 +   store all the experiences. 
  61.617 +
  61.618 +   Each element of the experience vector exists in the vast space of
  61.619 +   all possible worm-experiences. Most of this vast space is actually
  61.620 +   unreachable due to physical constraints of the worm's body. For
  61.621 +   example, the worm's segments are connected by hinge joints that put
  61.622 +   a practical limit on the worm's range of motions without limiting
  61.623 +   its degrees of freedom. Some groupings of senses are impossible;
  61.624 +   the worm can not be bent into a circle so that its ends are
  61.625 +   touching and at the same time not also experience the sensation of
  61.626 +   touching itself.
  61.627 +
  61.628 +   As the worm moves around during free play and its experience vector
  61.629 +   grows larger, the vector begins to define a subspace which is all
  61.630 +   the sensations the worm can practicaly experience during normal
  61.631 +   operation. I call this subspace \Phi-space, short for
  61.632 +   physical-space. The experience vector defines a path through
  61.633 +   \Phi-space. This path has interesting properties that all derive
  61.634 +   from physical embodiment. The proprioceptive components are
  61.635 +   completely smooth, because in order for the worm to move from one
  61.636 +   position to another, it must pass through the intermediate
  61.637 +   positions. The path invariably forms loops as actions are repeated.
  61.638 +   Finally and most importantly, proprioception actually gives very
  61.639 +   strong inference about the other senses. For example, when the worm
  61.640 +   is flat, you can infer that it is touching the ground and that its
  61.641 +   muscles are not active, because if the muscles were active, the
  61.642 +   worm would be moving and would not be perfectly flat. In order to
  61.643 +   stay flat, the worm has to be touching the ground, or it would
  61.644 +   again be moving out of the flat position due to gravity. If the
  61.645 +   worm is positioned in such a way that it interacts with itself,
  61.646 +   then it is very likely to be feeling the same tactile feelings as
  61.647 +   the last time it was in that position, because it has the same body
  61.648 +   as then. If you observe multiple frames of proprioceptive data,
  61.649 +   then you can become increasingly confident about the exact
  61.650 +   activations of the worm's muscles, because it generally takes a
  61.651 +   unique combination of muscle contractions to transform the worm's
  61.652 +   body along a specific path through \Phi-space.
  61.653 +
  61.654 +   There is a simple way of taking \Phi-space and the total ordering
  61.655 +   provided by an experience vector and reliably infering the rest of
  61.656 +   the senses.
  61.657 +
  61.658 +** Empathy is the process of tracing though \Phi-space 
  61.659 +
  61.660 +   Here is the core of a basic empathy algorithm, starting with an
  61.661 +   experience vector:
  61.662 +
  61.663 +   First, group the experiences into tiered proprioceptive bins. I use
  61.664 +   powers of 10 and 3 bins, and the smallest bin has an approximate
  61.665 +   size of 0.001 radians in all proprioceptive dimensions.
  61.666 +   
  61.667 +   Then, given a sequence of proprioceptive input, generate a set of
  61.668 +   matching experience records for each input, using the tiered
  61.669 +   proprioceptive bins. 
  61.670 +
  61.671 +   Finally, to infer sensory data, select the longest consective chain
  61.672 +   of experiences. Conecutive experience means that the experiences
  61.673 +   appear next to each other in the experience vector.
  61.674 +
  61.675 +   This algorithm has three advantages: 
  61.676 +
  61.677 +   1. It's simple
  61.678 +
  61.679 +   3. It's very fast -- retrieving possible interpretations takes
  61.680 +      constant time. Tracing through chains of interpretations takes
  61.681 +      time proportional to the average number of experiences in a
  61.682 +      proprioceptive bin. Redundant experiences in \Phi-space can be
  61.683 +      merged to save computation.
  61.684 +
  61.685 +   2. It protects from wrong interpretations of transient ambiguous
  61.686 +      proprioceptive data. For example, if the worm is flat for just
  61.687 +      an instant, this flattness will not be interpreted as implying
  61.688 +      that the worm has its muscles relaxed, since the flattness is
  61.689 +      part of a longer chain which includes a distinct pattern of
  61.690 +      muscle activation. Markov chains or other memoryless statistical
  61.691 +      models that operate on individual frames may very well make this
  61.692 +      mistake.
  61.693 +
  61.694 +   #+caption: Program to convert an experience vector into a 
  61.695 +   #+caption: proprioceptively binned lookup function.
  61.696 +   #+name: bin
  61.697 +   #+attr_latex: [htpb]
  61.698 +#+begin_listing clojure
  61.699 +   #+begin_src clojure
  61.700 +(defn bin [digits]
  61.701 +  (fn [angles]
  61.702 +    (->> angles
  61.703 +         (flatten)
  61.704 +         (map (juxt #(Math/sin %) #(Math/cos %)))
  61.705 +         (flatten)
  61.706 +         (mapv #(Math/round (* % (Math/pow 10 (dec digits))))))))
  61.707 +
  61.708 +(defn gen-phi-scan 
  61.709 +  "Nearest-neighbors with binning. Only returns a result if
  61.710 +   the propriceptive data is within 10% of a previously recorded
  61.711 +   result in all dimensions."
  61.712 +  [phi-space]
  61.713 +  (let [bin-keys (map bin [3 2 1])
  61.714 +        bin-maps
  61.715 +        (map (fn [bin-key]
  61.716 +               (group-by
  61.717 +                (comp bin-key :proprioception phi-space)
  61.718 +                (range (count phi-space)))) bin-keys)
  61.719 +        lookups (map (fn [bin-key bin-map]
  61.720 +                       (fn [proprio] (bin-map (bin-key proprio))))
  61.721 +                     bin-keys bin-maps)]
  61.722 +    (fn lookup [proprio-data]
  61.723 +      (set (some #(% proprio-data) lookups)))))
  61.724 +   #+end_src
  61.725 +   #+end_listing
  61.726 +
  61.727 +   #+caption: =longest-thread= finds the longest path of consecutive 
  61.728 +   #+caption: experiences to explain proprioceptive worm data.
  61.729 +   #+name: phi-space-history-scan
  61.730 +   #+ATTR_LaTeX: :width 10cm
  61.731 +   [[./images/aurellem-gray.png]]
  61.732 +
  61.733 +   =longest-thread= infers sensory data by stitching together pieces
  61.734 +   from previous experience. It prefers longer chains of previous
  61.735 +   experience to shorter ones. For example, during training the worm
  61.736 +   might rest on the ground for one second before it performs its
  61.737 +   excercises. If during recognition the worm rests on the ground for
  61.738 +   five seconds, =longest-thread= will accomodate this five second
  61.739 +   rest period by looping the one second rest chain five times.
  61.740 +
  61.741 +   =longest-thread= takes time proportinal to the average number of
  61.742 +   entries in a proprioceptive bin, because for each element in the
  61.743 +   starting bin it performes a series of set lookups in the preceeding
  61.744 +   bins. If the total history is limited, then this is only a constant
  61.745 +   multiple times the number of entries in the starting bin. This
  61.746 +   analysis also applies even if the action requires multiple longest
  61.747 +   chains -- it's still the average number of entries in a
  61.748 +   proprioceptive bin times the desired chain length. Because
  61.749 +   =longest-thread= is so efficient and simple, I can interpret
  61.750 +   worm-actions in real time.
  61.751 +
  61.752 +   #+caption: Program to calculate empathy by tracing though \Phi-space
  61.753 +   #+caption: and finding the longest (ie. most coherent) interpretation
  61.754 +   #+caption: of the data.
  61.755 +   #+name: longest-thread
  61.756 +   #+attr_latex: [htpb]
  61.757 +#+begin_listing clojure
  61.758 +   #+begin_src clojure
  61.759 +(defn longest-thread
  61.760 +  "Find the longest thread from phi-index-sets. The index sets should
  61.761 +   be ordered from most recent to least recent."
  61.762 +  [phi-index-sets]
  61.763 +  (loop [result '()
  61.764 +         [thread-bases & remaining :as phi-index-sets] phi-index-sets]
  61.765 +    (if (empty? phi-index-sets)
  61.766 +      (vec result)
  61.767 +      (let [threads
  61.768 +            (for [thread-base thread-bases]
  61.769 +              (loop [thread (list thread-base)
  61.770 +                     remaining remaining]
  61.771 +                (let [next-index (dec (first thread))]
  61.772 +                  (cond (empty? remaining) thread
  61.773 +                        (contains? (first remaining) next-index)
  61.774 +                        (recur
  61.775 +                         (cons next-index thread) (rest remaining))
  61.776 +                        :else thread))))
  61.777 +            longest-thread
  61.778 +            (reduce (fn [thread-a thread-b]
  61.779 +                      (if (> (count thread-a) (count thread-b))
  61.780 +                        thread-a thread-b))
  61.781 +                    '(nil)
  61.782 +                    threads)]
  61.783 +        (recur (concat longest-thread result)
  61.784 +               (drop (count longest-thread) phi-index-sets))))))
  61.785 +   #+end_src
  61.786 +   #+end_listing
  61.787 +
  61.788 +   There is one final piece, which is to replace missing sensory data
  61.789 +   with a best-guess estimate. While I could fill in missing data by
  61.790 +   using a gradient over the closest known sensory data points,
  61.791 +   averages can be misleading. It is certainly possible to create an
  61.792 +   impossible sensory state by averaging two possible sensory states.
  61.793 +   Therefore, I simply replicate the most recent sensory experience to
  61.794 +   fill in the gaps.
  61.795 +
  61.796 +   #+caption: Fill in blanks in sensory experience by replicating the most 
  61.797 +   #+caption: recent experience.
  61.798 +   #+name: infer-nils
  61.799 +   #+attr_latex: [htpb]
  61.800 +#+begin_listing clojure
  61.801 +   #+begin_src clojure
  61.802 +(defn infer-nils
  61.803 +  "Replace nils with the next available non-nil element in the
  61.804 +   sequence, or barring that, 0."
  61.805 +  [s]
  61.806 +  (loop [i (dec (count s))
  61.807 +         v (transient s)]
  61.808 +    (if (zero? i) (persistent! v)
  61.809 +        (if-let [cur (v i)]
  61.810 +          (if (get v (dec i) 0)
  61.811 +            (recur (dec i) v)
  61.812 +            (recur (dec i) (assoc! v (dec i) cur)))
  61.813 +          (recur i (assoc! v i 0))))))
  61.814 +   #+end_src
  61.815 +   #+end_listing
  61.816 +  
  61.817 +** Efficient action recognition with =EMPATH=
  61.818 +   
  61.819 +   To use =EMPATH= with the worm, I first need to gather a set of
  61.820 +   experiences from the worm that includes the actions I want to
  61.821 +   recognize. The =generate-phi-space= program (listing
  61.822 +   \ref{generate-phi-space} runs the worm through a series of
  61.823 +   exercices and gatheres those experiences into a vector. The
  61.824 +   =do-all-the-things= program is a routine expressed in a simple
  61.825 +   muscle contraction script language for automated worm control. It
  61.826 +   causes the worm to rest, curl, and wiggle over about 700 frames
  61.827 +   (approx. 11 seconds).
  61.828 +
  61.829 +   #+caption: Program to gather the worm's experiences into a vector for 
  61.830 +   #+caption: further processing. The =motor-control-program= line uses
  61.831 +   #+caption: a motor control script that causes the worm to execute a series
  61.832 +   #+caption: of ``exercices'' that include all the action predicates.
  61.833 +   #+name: generate-phi-space
  61.834 +   #+attr_latex: [htpb]
  61.835 +#+begin_listing clojure 
  61.836 +   #+begin_src clojure
  61.837 +(def do-all-the-things 
  61.838 +  (concat
  61.839 +   curl-script
  61.840 +   [[300 :d-ex 40]
  61.841 +    [320 :d-ex 0]]
  61.842 +   (shift-script 280 (take 16 wiggle-script))))
  61.843 +
  61.844 +(defn generate-phi-space []
  61.845 +  (let [experiences (atom [])]
  61.846 +    (run-world
  61.847 +     (apply-map 
  61.848 +      worm-world
  61.849 +      (merge
  61.850 +       (worm-world-defaults)
  61.851 +       {:end-frame 700
  61.852 +        :motor-control
  61.853 +        (motor-control-program worm-muscle-labels do-all-the-things)
  61.854 +        :experiences experiences})))
  61.855 +    @experiences))
  61.856 +   #+end_src
  61.857 +   #+end_listing
  61.858 +
  61.859 +   #+caption: Use longest thread and a phi-space generated from a short
  61.860 +   #+caption: exercise routine to interpret actions during free play.
  61.861 +   #+name: empathy-debug
  61.862 +   #+attr_latex: [htpb]
  61.863 +#+begin_listing clojure
  61.864 +   #+begin_src clojure
  61.865 +(defn init []
  61.866 +  (def phi-space (generate-phi-space))
  61.867 +  (def phi-scan (gen-phi-scan phi-space)))
  61.868 +
  61.869 +(defn empathy-demonstration []
  61.870 +  (let [proprio (atom ())]
  61.871 +    (fn
  61.872 +      [experiences text]
  61.873 +      (let [phi-indices (phi-scan (:proprioception (peek experiences)))]
  61.874 +        (swap! proprio (partial cons phi-indices))
  61.875 +        (let [exp-thread (longest-thread (take 300 @proprio))
  61.876 +              empathy (mapv phi-space (infer-nils exp-thread))]
  61.877 +          (println-repl (vector:last-n exp-thread 22))
  61.878 +          (cond
  61.879 +           (grand-circle? empathy) (.setText text "Grand Circle")
  61.880 +           (curled? empathy)       (.setText text "Curled")
  61.881 +           (wiggling? empathy)     (.setText text "Wiggling")
  61.882 +           (resting? empathy)      (.setText text "Resting")
  61.883 +           :else                       (.setText text "Unknown")))))))
  61.884 +
  61.885 +(defn empathy-experiment [record]
  61.886 +  (.start (worm-world :experience-watch (debug-experience-phi)
  61.887 +                      :record record :worm worm*)))
  61.888 +   #+end_src
  61.889 +   #+end_listing
  61.890 +   
  61.891 +   The result of running =empathy-experiment= is that the system is
  61.892 +   generally able to interpret worm actions using the action-predicates
  61.893 +   on simulated sensory data just as well as with actual data. Figure
  61.894 +   \ref{empathy-debug-image} was generated using =empathy-experiment=:
  61.895 +
  61.896 +  #+caption: From only proprioceptive data, =EMPATH= was able to infer 
  61.897 +  #+caption: the complete sensory experience and classify four poses
  61.898 +  #+caption: (The last panel shows a composite image of \emph{wriggling}, 
  61.899 +  #+caption: a dynamic pose.)
  61.900 +  #+name: empathy-debug-image
  61.901 +  #+ATTR_LaTeX: :width 10cm :placement [H]
  61.902 +  [[./images/empathy-1.png]]
  61.903 +
  61.904 +  One way to measure the performance of =EMPATH= is to compare the
  61.905 +  sutiability of the imagined sense experience to trigger the same
  61.906 +  action predicates as the real sensory experience. 
  61.907 +  
  61.908 +   #+caption: Determine how closely empathy approximates actual 
  61.909 +   #+caption: sensory data.
  61.910 +   #+name: test-empathy-accuracy
  61.911 +   #+attr_latex: [htpb]
  61.912 +#+begin_listing clojure
  61.913 +   #+begin_src clojure
  61.914 +(def worm-action-label
  61.915 +  (juxt grand-circle? curled? wiggling?))
  61.916 +
  61.917 +(defn compare-empathy-with-baseline [matches]
  61.918 +  (let [proprio (atom ())]
  61.919 +    (fn
  61.920 +      [experiences text]
  61.921 +      (let [phi-indices (phi-scan (:proprioception (peek experiences)))]
  61.922 +        (swap! proprio (partial cons phi-indices))
  61.923 +        (let [exp-thread (longest-thread (take 300 @proprio))
  61.924 +              empathy (mapv phi-space (infer-nils exp-thread))
  61.925 +              experience-matches-empathy
  61.926 +              (= (worm-action-label experiences)
  61.927 +                 (worm-action-label empathy))]
  61.928 +          (println-repl experience-matches-empathy)
  61.929 +          (swap! matches #(conj % experience-matches-empathy)))))))
  61.930 +              
  61.931 +(defn accuracy [v]
  61.932 +  (float (/ (count (filter true? v)) (count v))))
  61.933 +
  61.934 +(defn test-empathy-accuracy []
  61.935 +  (let [res (atom [])]
  61.936 +    (run-world
  61.937 +     (worm-world :experience-watch
  61.938 +                 (compare-empathy-with-baseline res)
  61.939 +                 :worm worm*))
  61.940 +    (accuracy @res)))
  61.941 +   #+end_src
  61.942 +   #+end_listing
  61.943 +
  61.944 +  Running =test-empathy-accuracy= using the very short exercise
  61.945 +  program defined in listing \ref{generate-phi-space}, and then doing
  61.946 +  a similar pattern of activity manually yeilds an accuracy of around
  61.947 +  73%. This is based on very limited worm experience. By training the
  61.948 +  worm for longer, the accuracy dramatically improves.
  61.949 +
  61.950 +   #+caption: Program to generate \Phi-space using manual training.
  61.951 +   #+name: manual-phi-space
  61.952 +   #+attr_latex: [htpb]
  61.953 +   #+begin_listing clojure
  61.954 +   #+begin_src clojure
  61.955 +(defn init-interactive []
  61.956 +  (def phi-space
  61.957 +    (let [experiences (atom [])]
  61.958 +      (run-world
  61.959 +       (apply-map 
  61.960 +        worm-world
  61.961 +        (merge
  61.962 +         (worm-world-defaults)
  61.963 +         {:experiences experiences})))
  61.964 +      @experiences))
  61.965 +  (def phi-scan (gen-phi-scan phi-space)))
  61.966 +   #+end_src
  61.967 +   #+end_listing
  61.968 +
  61.969 +  After about 1 minute of manual training, I was able to achieve 95%
  61.970 +  accuracy on manual testing of the worm using =init-interactive= and
  61.971 +  =test-empathy-accuracy=. The majority of errors are near the
  61.972 +  boundaries of transitioning from one type of action to another.
  61.973 +  During these transitions the exact label for the action is more open
  61.974 +  to interpretation, and dissaggrement between empathy and experience
  61.975 +  is more excusable.
  61.976 +
  61.977 +** Digression: bootstrapping touch using free exploration
  61.978 +
  61.979 +   In the previous section I showed how to compute actions in terms of
  61.980 +   body-centered predicates which relied averate touch activation of
  61.981 +   pre-defined regions of the worm's skin. What if, instead of recieving
  61.982 +   touch pre-grouped into the six faces of each worm segment, the true
  61.983 +   topology of the worm's skin was unknown? This is more similiar to how
  61.984 +   a nerve fiber bundle might be arranged. While two fibers that are
  61.985 +   close in a nerve bundle /might/ correspond to two touch sensors that
  61.986 +   are close together on the skin, the process of taking a complicated
  61.987 +   surface and forcing it into essentially a circle requires some cuts
  61.988 +   and rerragenments.
  61.989 +   
  61.990 +   In this section I show how to automatically learn the skin-topology of
  61.991 +   a worm segment by free exploration. As the worm rolls around on the
  61.992 +   floor, large sections of its surface get activated. If the worm has
  61.993 +   stopped moving, then whatever region of skin that is touching the
  61.994 +   floor is probably an important region, and should be recorded.
  61.995 +   
  61.996 +   #+caption: Program to detect whether the worm is in a resting state 
  61.997 +   #+caption: with one face touching the floor.
  61.998 +   #+name: pure-touch
  61.999 +   #+begin_listing clojure
 61.1000 +   #+begin_src clojure
 61.1001 +(def full-contact [(float 0.0) (float 0.1)])
 61.1002 +
 61.1003 +(defn pure-touch?
 61.1004 +  "This is worm specific code to determine if a large region of touch
 61.1005 +   sensors is either all on or all off."
 61.1006 +  [[coords touch :as touch-data]]
 61.1007 +  (= (set (map first touch)) (set full-contact)))
 61.1008 +   #+end_src
 61.1009 +   #+end_listing
 61.1010 +
 61.1011 +   After collecting these important regions, there will many nearly
 61.1012 +   similiar touch regions. While for some purposes the subtle
 61.1013 +   differences between these regions will be important, for my
 61.1014 +   purposes I colapse them into mostly non-overlapping sets using
 61.1015 +   =remove-similiar= in listing \ref{remove-similiar}
 61.1016 +
 61.1017 +   #+caption: Program to take a lits of set of points and ``collapse them''
 61.1018 +   #+caption: so that the remaining sets in the list are siginificantly 
 61.1019 +   #+caption: different from each other. Prefer smaller sets to larger ones.
 61.1020 +   #+name: remove-similiar
 61.1021 +   #+begin_listing clojure
 61.1022 +   #+begin_src clojure
 61.1023 +(defn remove-similar
 61.1024 +  [coll]
 61.1025 +  (loop [result () coll (sort-by (comp - count) coll)]
 61.1026 +    (if (empty? coll) result
 61.1027 +        (let  [[x & xs] coll
 61.1028 +               c (count x)]
 61.1029 +          (if (some
 61.1030 +               (fn [other-set]
 61.1031 +                 (let [oc (count other-set)]
 61.1032 +                   (< (- (count (union other-set x)) c) (* oc 0.1))))
 61.1033 +               xs)
 61.1034 +            (recur result xs)
 61.1035 +            (recur (cons x result) xs))))))
 61.1036 +   #+end_src
 61.1037 +   #+end_listing
 61.1038 +
 61.1039 +   Actually running this simulation is easy given =CORTEX='s facilities.
 61.1040 +
 61.1041 +   #+caption: Collect experiences while the worm moves around. Filter the touch 
 61.1042 +   #+caption: sensations by stable ones, collapse similiar ones together, 
 61.1043 +   #+caption: and report the regions learned.
 61.1044 +   #+name: learn-touch
 61.1045 +   #+begin_listing clojure
 61.1046 +   #+begin_src clojure
 61.1047 +(defn learn-touch-regions []
 61.1048 +  (let [experiences (atom [])
 61.1049 +        world (apply-map
 61.1050 +               worm-world
 61.1051 +               (assoc (worm-segment-defaults)
 61.1052 +                 :experiences experiences))]
 61.1053 +    (run-world world)
 61.1054 +    (->>
 61.1055 +     @experiences
 61.1056 +     (drop 175)
 61.1057 +     ;; access the single segment's touch data
 61.1058 +     (map (comp first :touch))
 61.1059 +     ;; only deal with "pure" touch data to determine surfaces
 61.1060 +     (filter pure-touch?)
 61.1061 +     ;; associate coordinates with touch values
 61.1062 +     (map (partial apply zipmap))
 61.1063 +     ;; select those regions where contact is being made
 61.1064 +     (map (partial group-by second))
 61.1065 +     (map #(get % full-contact))
 61.1066 +     (map (partial map first))
 61.1067 +     ;; remove redundant/subset regions
 61.1068 +     (map set)
 61.1069 +     remove-similar)))
 61.1070 +
 61.1071 +(defn learn-and-view-touch-regions []
 61.1072 +  (map view-touch-region
 61.1073 +       (learn-touch-regions)))
 61.1074 +   #+end_src
 61.1075 +   #+end_listing
 61.1076 +
 61.1077 +   The only thing remining to define is the particular motion the worm
 61.1078 +   must take. I accomplish this with a simple motor control program.
 61.1079 +
 61.1080 +   #+caption: Motor control program for making the worm roll on the ground.
 61.1081 +   #+caption: This could also be replaced with random motion.
 61.1082 +   #+name: worm-roll
 61.1083 +   #+begin_listing clojure
 61.1084 +   #+begin_src clojure
 61.1085 +(defn touch-kinesthetics []
 61.1086 +  [[170 :lift-1 40]
 61.1087 +   [190 :lift-1 19]
 61.1088 +   [206 :lift-1  0]
 61.1089 +
 61.1090 +   [400 :lift-2 40]
 61.1091 +   [410 :lift-2  0]
 61.1092 +
 61.1093 +   [570 :lift-2 40]
 61.1094 +   [590 :lift-2 21]
 61.1095 +   [606 :lift-2  0]
 61.1096 +
 61.1097 +   [800 :lift-1 30]
 61.1098 +   [809 :lift-1 0]
 61.1099 +
 61.1100 +   [900 :roll-2 40]
 61.1101 +   [905 :roll-2 20]
 61.1102 +   [910 :roll-2  0]
 61.1103 +
 61.1104 +   [1000 :roll-2 40]
 61.1105 +   [1005 :roll-2 20]
 61.1106 +   [1010 :roll-2  0]
 61.1107 +   
 61.1108 +   [1100 :roll-2 40]
 61.1109 +   [1105 :roll-2 20]
 61.1110 +   [1110 :roll-2  0]
 61.1111 +   ])
 61.1112 +   #+end_src
 61.1113 +   #+end_listing
 61.1114 +
 61.1115 +
 61.1116 +   #+caption: The small worm rolls around on the floor, driven
 61.1117 +   #+caption: by the motor control program in listing \ref{worm-roll}.
 61.1118 +   #+name: worm-roll
 61.1119 +   #+ATTR_LaTeX: :width 12cm
 61.1120 +   [[./images/worm-roll.png]]
 61.1121 +
 61.1122 +
 61.1123 +   #+caption: After completing its adventures, the worm now knows 
 61.1124 +   #+caption: how its touch sensors are arranged along its skin. These 
 61.1125 +   #+caption: are the regions that were deemed important by 
 61.1126 +   #+caption: =learn-touch-regions=. Note that the worm has discovered
 61.1127 +   #+caption: that it has six sides.
 61.1128 +   #+name: worm-touch-map
 61.1129 +   #+ATTR_LaTeX: :width 12cm
 61.1130 +   [[./images/touch-learn.png]]
 61.1131 +
 61.1132 +   While simple, =learn-touch-regions= exploits regularities in both
 61.1133 +   the worm's physiology and the worm's environment to correctly
 61.1134 +   deduce that the worm has six sides. Note that =learn-touch-regions=
 61.1135 +   would work just as well even if the worm's touch sense data were
 61.1136 +   completely scrambled. The cross shape is just for convienence. This
 61.1137 +   example justifies the use of pre-defined touch regions in =EMPATH=.
 61.1138 +
 61.1139 +* Contributions
 61.1140 +  
 61.1141 +  I created =CORTEX=, a complete environment for creating simulated
 61.1142 +  creatures. Creatures can use biologically inspired senses including
 61.1143 +  touch, proprioception, hearing, vision, and muscle tension. Each
 61.1144 +  sense has a uniform API that is well documented. =CORTEX= comes with
 61.1145 +  multiple example creatures and a large test suite. You can create
 61.1146 +  new creatures using blender, a free 3D modeling tool. I hope that
 61.1147 +  =CORTEX= will prove useful for research ranging from distributed
 61.1148 +  swarm creature simulation to further research in sensory
 61.1149 +  integration. 
 61.1150 +
 61.1151 +
 61.1152 +
 61.1153 +# An anatomical joke:
 61.1154 +# - Training
 61.1155 +# - Skeletal imitation
 61.1156 +# - Sensory fleshing-out
 61.1157 +# - Classification
    62.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    62.2 +++ b/thesis/cover.tex	Thu Mar 27 17:57:01 2014 -0400
    62.3 @@ -0,0 +1,138 @@
    62.4 +% -*-latex-*-
    62.5 +% 
    62.6 +% For questions, comments, concerns or complaints:
    62.7 +% thesis@mit.edu
    62.8 +% 
    62.9 +%
   62.10 +% $Log: cover.tex,v $
   62.11 +% Revision 1.8  2008/05/13 15:02:15  jdreed
   62.12 +% Degree month is June, not May.  Added note about prevdegrees.
   62.13 +% Arthur Smith's title updated
   62.14 +%
   62.15 +% Revision 1.7  2001/02/08 18:53:16  boojum
   62.16 +% changed some \newpages to \cleardoublepages
   62.17 +%
   62.18 +% Revision 1.6  1999/10/21 14:49:31  boojum
   62.19 +% changed comment referring to documentstyle
   62.20 +%
   62.21 +% Revision 1.5  1999/10/21 14:39:04  boojum
   62.22 +% *** empty log message ***
   62.23 +%
   62.24 +% Revision 1.4  1997/04/18  17:54:10  othomas
   62.25 +% added page numbers on abstract and cover, and made 1 abstract
   62.26 +% page the default rather than 2.  (anne hunter tells me this
   62.27 +% is the new institute standard.)
   62.28 +%
   62.29 +% Revision 1.4  1997/04/18  17:54:10  othomas
   62.30 +% added page numbers on abstract and cover, and made 1 abstract
   62.31 +% page the default rather than 2.  (anne hunter tells me this
   62.32 +% is the new institute standard.)
   62.33 +%
   62.34 +% Revision 1.3  93/05/17  17:06:29  starflt
   62.35 +% Added acknowledgements section (suggested by tompalka)
   62.36 +% 
   62.37 +% Revision 1.2  92/04/22  13:13:13  epeisach
   62.38 +% Fixes for 1991 course 6 requirements
   62.39 +% Phrase "and to grant others the right to do so" has been added to 
   62.40 +% permission clause
   62.41 +% Second copy of abstract is not counted as separate pages so numbering works
   62.42 +% out
   62.43 +% 
   62.44 +% Revision 1.1  92/04/22  13:08:20  epeisach
   62.45 +
   62.46 +% NOTE:
   62.47 +% These templates make an effort to conform to the MIT Thesis specifications,
   62.48 +% however the specifications can change.  We recommend that you verify the
   62.49 +% layout of your title page with your thesis advisor and/or the MIT 
   62.50 +% Libraries before printing your final copy.
   62.51 +\title{Solving Problems using Embodiment \& Empathy}
   62.52 +\author{Robert Louis M\raisebox{\depth}{\small \underline{\underline{c}}}Intyre}
   62.53 +%\author{Robert McIntyre}
   62.54 +
   62.55 +
   62.56 +
   62.57 +% If you wish to list your previous degrees on the cover page, use the 
   62.58 +% previous degrees command:
   62.59 +%       \prevdegrees{A.A., Harvard University (1985)}
   62.60 +% You can use the \\ command to list multiple previous degrees
   62.61 +%       \prevdegrees{B.S., University of California (1978) \\
   62.62 +%                    S.M., Massachusetts Institute of Technology (1981)}
   62.63 +\department{Department of Electrical Engineering and Computer Science}
   62.64 +
   62.65 +% If the thesis is for two degrees simultaneously, list them both
   62.66 +% separated by \and like this:
   62.67 +% \degree{Doctor of Philosophy \and Master of Science}
   62.68 +\degree{Master of Engineering in Electrical Engineering and Computer
   62.69 +  Science}
   62.70 +
   62.71 +% As of the 2007-08 academic year, valid degree months are September, 
   62.72 +% February, or June.  The default is June.
   62.73 +\degreemonth{June}
   62.74 +\degreeyear{2014}
   62.75 +\thesisdate{May 23, 2014}
   62.76 +
   62.77 +%% By default, the thesis will be copyrighted to MIT.  If you need to copyright
   62.78 +%% the thesis to yourself, just specify the `vi' documentclass option.  If for
   62.79 +%% some reason you want to exactly specify the copyright notice text, you can
   62.80 +%% use the \copyrightnoticetext command.  
   62.81 +%\copyrightnoticetext{\copyright IBM, 1990.  Do not open till Xmas.}
   62.82 +
   62.83 +% If there is more than one supervisor, use the \supervisor command
   62.84 +% once for each.
   62.85 +\supervisor{Patrick H. Winston}{Ford Professor of Artificial
   62.86 +  Intelligence and Computer Science}
   62.87 +
   62.88 +% This is the department committee chairman, not the thesis committee
   62.89 +% chairman.  You should replace this with your Department's Committee
   62.90 +% Chairman.
   62.91 +\chairman{Prof. Albert R. Meyer}{Chairman, Masters of Engineering
   62.92 +  Thesis Committee}
   62.93 +
   62.94 +% Make the titlepage based on the above information.  If you need
   62.95 +% something special and can't use the standard form, you can specify
   62.96 +% the exact text of the titlepage yourself.  Put it in a titlepage
   62.97 +% environment and leave blank lines where you want vertical space.
   62.98 +% The spaces will be adjusted to fill the entire page.  The dotted
   62.99 +% lines for the signatures are made with the \signature command.
  62.100 +\maketitle
  62.101 +
  62.102 +% The abstractpage environment sets up everything on the page except
  62.103 +% the text itself.  The title and other header material are put at the
  62.104 +% top of the page, and the supervisors are listed at the bottom.  A
  62.105 +% new page is begun both before and after.  Of course, an abstract may
  62.106 +% be more than one page itself.  If you need more control over the
  62.107 +% format of the page, you can use the abstract environment, which puts
  62.108 +% the word "Abstract" at the beginning and single spaces its text.
  62.109 +
  62.110 +%% You can either \input (*not* \include) your abstract file, or you can put
  62.111 +%% the text of the abstract directly between the \begin{abstractpage} and
  62.112 +%% \end{abstractpage} commands.
  62.113 +
  62.114 +% First copy: start a new page, and save the page number.
  62.115 +\cleardoublepage
  62.116 +% Uncomment the next line if you do NOT want a page number on your
  62.117 +% abstract and acknowledgments pages.
  62.118 +\pagestyle{empty}
  62.119 +\setcounter{savepage}{\thepage}
  62.120 +\begin{abstractpage}
  62.121 +\input{abstract}
  62.122 +\end{abstractpage}
  62.123 +
  62.124 +% Additional copy: start a new page, and reset the page number.  This way,
  62.125 +% the second copy of the abstract is not counted as separate pages.
  62.126 +% Uncomment the next 6 lines if you need two copies of the abstract
  62.127 +% page.
  62.128 +% \setcounter{page}{\thesavepage}
  62.129 +% \begin{abstractpage}
  62.130 +% \input{abstract}
  62.131 +% \end{abstractpage}
  62.132 +
  62.133 +%% \cleardoublepage
  62.134 +
  62.135 +%% \section*{Acknowledgments}
  62.136 +
  62.137 +%% This is the acknowledgements section.  You should replace this with your
  62.138 +%% own acknowledgements.
  62.139 +
  62.140 +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
  62.141 +% -*-latex-*-
    63.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    63.2 +++ b/thesis/garbage_cortex.org	Thu Mar 27 17:57:01 2014 -0400
    63.3 @@ -0,0 +1,100 @@
    63.4 +#+title: =CORTEX=
    63.5 +#+author: Robert McIntyre
    63.6 +#+email: rlm@mit.edu
    63.7 +#+description: Using embodied AI to facilitate Artificial Imagination.
    63.8 +#+keywords: AI, clojure, embodiment
    63.9 +
   63.10 +* Artificial Imagination
   63.11 +
   63.12 +  Imagine watching a video of someone skateboarding. When you watch
   63.13 +  the video, you can imagine yourself skateboarding, and your
   63.14 +  knowledge of the human body and its dynamics guides your
   63.15 +  interpretation of the scene. For example, even if the skateboarder
   63.16 +  is partially occluded, you can infer the positions of his arms and
   63.17 +  body from your own knowledge of how your body would be positioned if
   63.18 +  you were skateboarding. If the skateboarder suffers an accident, you
   63.19 +  wince in sympathy, imagining the pain your own body would experience
   63.20 +  if it were in the same situation. This empathy with other people
   63.21 +  guides our understanding of whatever they are doing because it is a
   63.22 +  powerful constraint on what is probable and possible. In order to
   63.23 +  make use of this powerful empathy constraint, I need a system that
   63.24 +  can generate and make sense of sensory data from the many different
   63.25 +  senses that humans possess. The two key proprieties of such a system
   63.26 +  are /embodiment/ and /imagination/.
   63.27 +
   63.28 +** What is imagination?
   63.29 +
   63.30 +   One kind of imagination is /sympathetic/ imagination: you imagine
   63.31 +   yourself in the position of something/someone you are
   63.32 +   observing. This type of imagination comes into play when you follow
   63.33 +   along visually when watching someone perform actions, or when you
   63.34 +   sympathetically grimace when someone hurts themselves. This type of
   63.35 +   imagination uses the constraints you have learned about your own
   63.36 +   body to highly constrain the possibilities in whatever you are
   63.37 +   seeing. It uses all your senses to including your senses of touch,
   63.38 +   proprioception, etc. Humans are flexible when it comes to "putting
   63.39 +   themselves in another's shoes," and can sympathetically understand
   63.40 +   not only other humans, but entities ranging from animals to cartoon
   63.41 +   characters to [[http://www.youtube.com/watch?v=0jz4HcwTQmU][single dots]] on a screen!
   63.42 +
   63.43 +
   63.44 +   #+caption: A cat drinking some water. Identifying this action is beyond the state of the art for computers.
   63.45 +   #+ATTR_LaTeX: :width 5cm
   63.46 +   [[./images/cat-drinking.jpg]]
   63.47 +
   63.48 +
   63.49 +#+begin_listing clojure
   63.50 +\caption{This is a basic test for the vision system. It only tests the vision-pipeline and does not deal with loading eyes from a blender file. The code creates two videos of the same rotating cube from different angles.}
   63.51 +#+name: test-1
   63.52 +#+begin_src clojure
   63.53 +(defn test-pipeline
   63.54 +  "Testing vision:
   63.55 +   Tests the vision system by creating two views of the same rotating
   63.56 +   object from different angles and displaying both of those views in
   63.57 +   JFrames.
   63.58 +
   63.59 +   You should see a rotating cube, and two windows,
   63.60 +   each displaying a different view of the cube."
   63.61 +  ([] (test-pipeline false))
   63.62 +  ([record?]
   63.63 +     (let [candy
   63.64 +           (box 1 1 1 :physical? false :color ColorRGBA/Blue)]
   63.65 +       (world
   63.66 +        (doto (Node.)
   63.67 +          (.attachChild candy))
   63.68 +        {}
   63.69 +        (fn [world]
   63.70 +          (let [cam (.clone (.getCamera world))
   63.71 +                width (.getWidth cam)
   63.72 +                height (.getHeight cam)]
   63.73 +            (add-camera! world cam 
   63.74 +                         (comp
   63.75 +                          (view-image
   63.76 +                           (if record?
   63.77 +                             (File. "/home/r/proj/cortex/render/vision/1")))
   63.78 +                          BufferedImage!))
   63.79 +            (add-camera! world
   63.80 +                         (doto (.clone cam)
   63.81 +                           (.setLocation (Vector3f. -10 0 0))
   63.82 +                           (.lookAt Vector3f/ZERO Vector3f/UNIT_Y))
   63.83 +                         (comp
   63.84 +                          (view-image
   63.85 +                           (if record?
   63.86 +                             (File. "/home/r/proj/cortex/render/vision/2")))
   63.87 +                          BufferedImage!))
   63.88 +            (let [timer (IsoTimer. 60)]
   63.89 +              (.setTimer world timer)
   63.90 +              (display-dilated-time world timer))
   63.91 +            ;; This is here to restore the main view
   63.92 +            ;; after the other views have completed processing
   63.93 +            (add-camera! world (.getCamera world) no-op)))
   63.94 +        (fn [world tpf]
   63.95 +          (.rotate candy (* tpf 0.2) 0 0))))))
   63.96 +#+end_src
   63.97 +#+end_listing
   63.98 +
   63.99 +- This is test1 \cite{Tappert77}.
  63.100 +
  63.101 +\cite{Tappert77}
  63.102 +lol
  63.103 +\cite{Tappert77}
  63.104 \ No newline at end of file
    64.1 Binary file thesis/images/aurellem-gray.png has changed
    65.1 Binary file thesis/images/basic-worm-view.png has changed
    66.1 Binary file thesis/images/blender-worm.png has changed
    67.1 Binary file thesis/images/cat-drinking.jpg has changed
    68.1 Binary file thesis/images/empathy-1.png has changed
    69.1 Binary file thesis/images/fat-person-sitting-at-desk.jpg has changed
    70.1 Binary file thesis/images/finger-UV.png has changed
    71.1 Binary file thesis/images/full-hand.png has changed
    72.1 Binary file thesis/images/invisible-chair.png has changed
    73.1 Binary file thesis/images/touch-learn.png has changed
    74.1 Binary file thesis/images/wall-push.png has changed
    75.1 Binary file thesis/images/worm-identify-init.png has changed
    76.1 Binary file thesis/images/worm-intro-black.png has changed
    77.1 Binary file thesis/images/worm-intro-rainbow.png has changed
    78.1 Binary file thesis/images/worm-intro-white.png has changed
    79.1 Binary file thesis/images/worm-poses.png has changed
    80.1 Binary file thesis/images/worm-roll.png has changed
    81.1 Binary file thesis/images/worm-with-muscle.png has changed
    82.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    82.2 +++ b/thesis/mitthesis.cls	Thu Mar 27 17:57:01 2014 -0400
    82.3 @@ -0,0 +1,535 @@
    82.4 +% $Log: mitthesis.cls,v $
    82.5 +% Revision 1.9  2012/02/15 15:48:16  jdreed
    82.6 +% Tweak the "permission" statement per the Libraries' specs
    82.7 +% retrieved 15-Feb-2012
    82.8 +%
    82.9 +% Revision 1.8  2009/07/27 14:23:50  boojum
   82.10 +% added changing draft mark
   82.11 +%
   82.12 +% Revision 1.6  2005/05/13 19:57:40  boojum
   82.13 +% added leftblank option
   82.14 +%
   82.15 +% Revision 1.5  2002/04/18 14:10:08  boojum
   82.16 +% changed doublespace into setspace for 2e compliance
   82.17 +%
   82.18 +% Revision 1.4  2001/02/08 18:57:13  boojum
   82.19 +% turned two newpages into cleardoublepages
   82.20 +%
   82.21 +% Revision 1.3  2000/08/14 14:53:29  boojum
   82.22 +% commented out footheight, which is relevant for drafthead
   82.23 +%
   82.24 +% Revision 1.2  1999/10/21 14:51:33  boojum
   82.25 +% changed references to documentstyle to documentclass in comments
   82.26 +%
   82.27 +% Revision 1.1  1999/10/21 14:39:31  boojum
   82.28 +% Initial revision
   82.29 +%
   82.30 +%Revision 1.7  1998/04/01  20:45:34  othomas
   82.31 +%removed offending phrase ", and to grant others the right to do so" from copyright notice.
   82.32 +%
   82.33 +%Revision 1.6  96/06/26  15:07:29  othomas
   82.34 +%safety checkin.
   82.35 +%
   82.36 +%Revision 1.5  93/06/07  15:38:50  starflt
   82.37 +%Altered 'vi' option copyright wording to comply with new Institute
   82.38 +%Archives requirements and MIT lawyers.
   82.39 +%
   82.40 +%Revision 1.4  92/08/19  16:51:06  lwvanels
   82.41 +%Updated Course 6 title page for new permissions.
   82.42 +%
   82.43 +%Revision 1.3  92/04/23  10:16:15  epeisach
   82.44 +%Fixed comment character in rcs file
   82.45 +%
   82.46 +%Revision 1.2  92/04/22  13:12:02  epeisach
   82.47 +%Fixes for 1991 course 6 requirements
   82.48 +%Phrase "and to grant others the right to do so" has been added to 
   82.49 +%permission clause
   82.50 +%Second copy of abstract is not counted as separate pages so numbering works
   82.51 +%out
   82.52 +%
   82.53 +%Revision 1.1  90/05/04  11:45:53  lwvanels
   82.54 +%Initial revision
   82.55 +
   82.56 +%
   82.57 +% LaTeX format for theses at MIT
   82.58 +% Based on "Specifications for Thesis Preparation" 
   82.59 +
   82.60 +% `vi' and `upcase' options by Krishna Sethuraman - krishna@athena.mit.edu
   82.61 +% Margins and heading types by Peter Nuth  - nuth@ai.mit.edu
   82.62 +% Title and abstract page by Stephen Gildea - gildea@erl.mit.edu
   82.63 +% Look in this directory for example file mitthesis.doc
   82.64 +% Also for propcover.tex - Boilerplate for PHD proposal.
   82.65 +
   82.66 +% To use this style - say something like:
   82.67 +%  for dull, boring thesis format:
   82.68 +%	\documentclass[12pt]{mitthesis}
   82.69 +%       \pagestyle{plain}
   82.70 +% OR for fast drafts: 
   82.71 +%	\documentclass[11pt,singlespace,draft]{mitthesis}
   82.72 +%	\pagestyle{drafthead}
   82.73 +% OR for Tech Reports:
   82.74 +%	\documentclass[12pt,twoside]{mitthesis}	
   82.75 +%	\pagestyle{headings}
   82.76 +% OR
   82.77 +%  some other combination...
   82.78 +%
   82.79 +%%%% New options:
   82.80 +% 
   82.81 +% Option `twoside':
   82.82 +%   Good for producing Tech Reports.
   82.83 +%   The default is single-sided printing, which is what M.I.T. wants on the
   82.84 +%   thesis document itself.
   82.85 +%
   82.86 +% Option `singlespace':
   82.87 +%   Good for drafts.
   82.88 +%   Double-spaced theses are the default.
   82.89 +%   That is what M.I.T. asks for in the formal specifications.
   82.90 +%
   82.91 +% 	Note that MIT does not REQUIRE all theses to be double-spaced anymore.
   82.92 +% 	Someone in the library system said that it's OK to be single-spaced.
   82.93 +% 	(Regardless of what the specs. say...)
   82.94 +%   To get singlespacing in an area - Use  the 'singlespace' environment. 
   82.95 +%
   82.96 +% Option `draft':
   82.97 +%   Puts `overfull' boxes at the end of lines that are too long. 
   82.98 +%
   82.99 +% Pagestyle `drafthead':
  82.100 +%   Puts the date and the label ``*DRAFT*'' in the footer.
  82.101 +%
  82.102 +%%%%%%%%%%
  82.103 +%
  82.104 +%%%% Parameters to initialize for boilerplate page:
  82.105 +%
  82.106 +%	\title{Mixed Circular Cylindrical Shells}
  82.107 +% 	\author{J. Casey Salas}
  82.108 +% 	\prevdegrees{B.S., University of California (1978) \\
  82.109 +%		     S.M., Massachusetts Institute of Technology (1981)}
  82.110 +% 	\department{Department of Electrical Engineering and Computer Science}
  82.111 +% 	\degree{Doctor of Philosophy}
  82.112 +%% If the thesis is for two degrees simultaneously, list them both
  82.113 +%% separated by \and like this:
  82.114 +% 	\degree{Doctor of Philosophy \and Master of Science}
  82.115 +% 	\degreemonth{February}
  82.116 +% 	\degreeyear{1987}
  82.117 +% 	\thesisdate{December 10, 1986}
  82.118 +%% If the thesis is copyright by the Institute, leave this line out and
  82.119 +%% the standard copyright line will be used instead.
  82.120 +% 	\copyrightnotice{J. Casey Salas, 1986}
  82.121 +%% If there is more than one supervisor, use the \supervisor command
  82.122 +%% once for each.
  82.123 +% 	\supervisor{John D. Galli}{Director, Sound Instrument Laboratory}
  82.124 +%% This is the department committee chairman, not the thesis committee chairman
  82.125 +% 	\chairman{Arthur C. Smith}
  82.126 +%		 {Chairman, Departmental Committee on Graduate Students}
  82.127 +%% Make the titlepage based on the above information.  If you need
  82.128 +%% something special and can't use the standard form, you can specify
  82.129 +%% the exact text of the titlepage yourself.  Put it in a titlepage
  82.130 +%% environment and leave blank lines where you want vertical space.
  82.131 +%% The spaces will be adjusted to fill the entire page.  The dotted
  82.132 +%% lines for the signatures are made with the \signature command.
  82.133 +%
  82.134 +%% The abstractpage environment sets up everything on the page except
  82.135 +%% the text itself.  The title and other header material are put at the
  82.136 +%% top of the page, and the supervisors are listed at the bottom.  A
  82.137 +%% new page is begun both before and after.  Of course, an abstract may
  82.138 +%% be more than one page itself.  If you need more control over the
  82.139 +%% format of the page, you can use the abstract environment, which puts
  82.140 +%% the word "Abstract" at the beginning and single spaces its text.
  82.141 +%
  82.142 +% 	\begin{abstractpage}
  82.143 +%	    Abstract goes here.
  82.144 +%	\end{abstractpage}
  82.145 +%
  82.146 +%%%%%%%% Newer additions 
  82.147 +%
  82.148 +% documentclass options - 
  82.149 +% vi		For MIT course VI or VIII thesis - will copyright the thesis to
  82.150 +% 		you while giving MIT permission to copy and distribute it.
  82.151 +% upcase	Will put much of the cover page in uppercase, as per the
  82.152 +% 		example on page 17 of the *Specifications for Thesis
  82.153 +% 		Preparation*, (revised 1989)
  82.154 +% Also added ``All Rights Reserved'' to default copyright notice.
  82.155 +%
  82.156 +%%%%%%%%%%%
  82.157 +% 
  82.158 +% Documentclass options (vi and upcase) and changes to copyright notice
  82.159 +%	Copyright (c) 1990, by Krishna Sethuraman.
  82.160 +%
  82.161 +% Pagestyle and header generation
  82.162 +%	Copyright (c) 1987, 1988 by Peter Nuth
  82.163 +%
  82.164 +% Original version
  82.165 +%	 Copyright (c) 1987 by Stephen Gildea
  82.166 +% Permission to copy all or part of this work is granted, provided
  82.167 +% that the copies are not made or distributed for resale, and that
  82.168 +% the copyright notice and this notice are retained.
  82.169 +% 
  82.170 +% THIS WORK IS PROVIDED ON AN "AS IS" BASIS.  THE AUTHOR PROVIDES NO
  82.171 +% WARRANTY WHATSOEVER, EITHER EXPRESS OR IMPLIED, REGARDING THE WORK,
  82.172 +% INCLUDING WARRANTIES WITH RESPECT TO ITS MERCHANTABILITY OR FITNESS
  82.173 +% FOR ANY PARTICULAR PURPOSE.
  82.174 +%%%%%%%%
  82.175 +
  82.176 +\NeedsTeXFormat{LaTeX2e}
  82.177 +\ProvidesClass{mitthesis}[1999/10/20]
  82.178 +
  82.179 +\def\mystretch{1.5}		% Double spacing hack
  82.180 +\DeclareOption{doublespace}{}	% This is default
  82.181 +				% So we do not read this style twice
  82.182 +\DeclareOption{singlespace}{		% If he explicitly wants single spacing
  82.183 +    \typeout{Single spaced}
  82.184 +    \def\mystretch{1}}	
  82.185 +
  82.186 +%% `vi' and `upcase' document style options.  Krishna Sethuraman (1990)
  82.187 +\newcount\vithesis
  82.188 +\DeclareOption{vi}{\typeout{Course VI/VIII thesis style.}\advance\vithesis by1}
  82.189 +\vithesis=0
  82.190 +
  82.191 +\DeclareOption{upcase}{\typeout{Uppercase cover page.}
  82.192 +	\gdef\choosecase#1{\uppercase\expandafter{#1}}}
  82.193 +\def\choosecase#1{#1}
  82.194 +
  82.195 +%% leftblank option by Kevin Fu
  82.196 +\newif\if@leftblank \@leftblankfalse
  82.197 +
  82.198 +\DeclareOption{leftblank}{\typeout{Intentionally Leaving Pages Blank}
  82.199 +\@leftblanktrue}
  82.200 +
  82.201 +%  Thesis looks much like report
  82.202 +\DeclareOption*{\PassOptionsToClass{\CurrentOption}{report}}
  82.203 +\ProcessOptions
  82.204 +\LoadClass{report}
  82.205 +
  82.206 +% If the user wants single spacing, set baselinestretch=1.
  82.207 +
  82.208 +\usepackage{setspace}
  82.209 +
  82.210 +% Note - doublespace.sty has some float-related troubles in
  82.211 +% combination with graphics or color, and is not officially compliant
  82.212 +% with 2e.  setspace is a replacement which is 2e-compliant.
  82.213 +
  82.214 +% Read the doublespace style that we got from Rochester:
  82.215 +%\input setdoublespace.sty 		
  82.216 +
  82.217 +\def\baselinestretch{\mystretch}	% Double spacing hack
  82.218 +
  82.219 +%%%%%%%  Set up margins and formatting params %%%
  82.220 +
  82.221 +% Margins.
  82.222 +%  Note we want 1in top margin assuming no header line, so push header
  82.223 +%	into 1in margin.
  82.224 +%  Draft mode brings the header back down.
  82.225 +
  82.226 +\setlength{\oddsidemargin}{0.25in}	% 1.25in left margin 
  82.227 +\setlength{\evensidemargin}{0.25in}	% 1.25in left margin (even pages)
  82.228 +\setlength{\topmargin}{0.0in}		% 1in top margin
  82.229 +\setlength{\textwidth}{6.0in}		% 6.0in text - 1.25in rt margin
  82.230 +\setlength{\textheight}{9in}		% Body ht for 1in margins
  82.231 +\addtolength{\topmargin}{-\headheight}	% No header, so compensate
  82.232 +\addtolength{\topmargin}{-\headsep}	% for header height and separation
  82.233 +
  82.234 +% The next two macros compensate page style for headers and footers
  82.235 +% We only need them in page styles that USE headers and footers.
  82.236 +    % If we have a header, it must be 1in from top of page.
  82.237 +\def\pulldownheader{			% Shift header down 1in from top
  82.238 +    \addtolength{\topmargin}{\headheight}	
  82.239 +    \addtolength{\topmargin}{\headsep}	
  82.240 +    \addtolength{\textheight}{-\headheight}
  82.241 +    \addtolength{\textheight}{-\headsep}
  82.242 +}
  82.243 +    % If we have a footer, put it 1in up from bottom
  82.244 +\def\pullupfooter{				% Shift footer up
  82.245 +    \addtolength{\textheight}{-\footskip}
  82.246 +%    \addtolength{\textheight}{-\footheight}  %footheight doesn't
  82.247 +%    						exist in 2e
  82.248 +}
  82.249 +
  82.250 +%%%%%%%  End of margins and formatting params %%%
  82.251 +
  82.252 +%%%%%%%  Fix various header and footer problems %%%
  82.253 +
  82.254 +% Draft mark on the right side of left pages (outside)
  82.255 +% this mark is also the only one visible on single sided.
  82.256 +\newcommand{\draftrmark}{**DRAFT**} 
  82.257 +% Draft mark on the left side of right pages (outside)
  82.258 +\newcommand{\draftlmark}{**DRAFT**} % 
  82.259 +
  82.260 +% Macros to make changing the Draft easier
  82.261 +\newcommand{\drmark}[1]{\renewcommand{\draftrmark}{#1}}
  82.262 +\newcommand{\dlmark}[1]{\renewcommand{\draftlmark}{#1}}
  82.263 +\newcommand{\dmark}[1]{\drmark{#1}\dlmark{#1}}
  82.264 +
  82.265 +% Format for draft of thesis.  Define our own PageStyle -
  82.266 +% Just like headings, but has foot lines with the date and warning
  82.267 +
  82.268 +\if@twoside         % If two-sided printing.
  82.269 +\def\ps@drafthead{
  82.270 +    \let\@mkboth\markboth
  82.271 +    \def\@oddfoot{\rm \today \hfil \sc \draftrmark}
  82.272 +    \def\@evenfoot{\sc \draftlmark \hfil \rm \today }
  82.273 +    \def\@evenhead{\rm \thepage\hfil \sl \leftmark}
  82.274 +    \def\@oddhead{\hbox{}\sl \rightmark \hfil \rm\thepage}
  82.275 +    \def\chaptermark##1{\markboth {\uppercase{\ifnum \c@secnumdepth >\m@ne
  82.276 +	\@chapapp\ \thechapter. \ \fi ##1}}{}}
  82.277 +    \def\sectionmark##1{\markright {\uppercase{\ifnum \c@secnumdepth >\z@
  82.278 +	\thesection. \ \fi ##1}}}
  82.279 +    \pulldownheader				% Bring header down from edge
  82.280 +    \pullupfooter				% Bring footer up
  82.281 +}
  82.282 +\else               % If one-sided printing.
  82.283 +\def\ps@drafthead{
  82.284 +    \let\@mkboth\markboth
  82.285 +    \def\@oddfoot{\rm \today \hfil \sc \draftrmark}
  82.286 +    \def\@oddhead{\hbox{}\sl \rightmark \hfil \rm\thepage}
  82.287 +    \def\chaptermark##1{\markright {\uppercase{\ifnum \c@secnumdepth >\m@ne
  82.288 +	\@chapapp\ \thechapter. \ \fi ##1}}}
  82.289 +    \pulldownheader				% Bring header down from edge
  82.290 +    \pullupfooter				% Bring footer up
  82.291 +}
  82.292 +\fi
  82.293 +
  82.294 +% I redefine these formats that were defined in report.sty
  82.295 +% Definition of 'headings' page style 
  82.296 +%  Note the use of ##1 for parameter of \def\chaptermark inside the
  82.297 +%  \def\ps@headings.
  82.298 +%
  82.299 +
  82.300 +\if@twoside					% If two-sided printing.
  82.301 +\def\ps@headings{\let\@mkboth\markboth
  82.302 +    \def\@oddfoot{}
  82.303 +    \def\@evenfoot{}		% No feet.
  82.304 +    \def\@evenhead{\rm \thepage\hfil \sl \leftmark}	% Left heading.
  82.305 +    \def\@oddhead{\hbox{}\sl \rightmark \hfil \rm\thepage}	% Right heading.
  82.306 +    \def\chaptermark##1{\markboth {\uppercase{\ifnum \c@secnumdepth >\m@ne
  82.307 +	\@chapapp\ \thechapter. \ \fi ##1}}{}}	
  82.308 +    \def\sectionmark##1{\markright {\uppercase{\ifnum \c@secnumdepth >\z@
  82.309 +	\thesection. \ \fi ##1}}}
  82.310 +    \pulldownheader				% Bring header down from edge
  82.311 +}
  82.312 +\else						% If one-sided printing.
  82.313 +\def\ps@headings{\let\@mkboth\markboth
  82.314 +    \def\@oddfoot{}
  82.315 +    \def\@evenfoot{}		%     No feet.
  82.316 +    \def\@oddhead{\hbox {}\sl \rightmark \hfil \rm\thepage}	% Heading.
  82.317 +    \def\chaptermark##1{\markright {\uppercase{\ifnum \c@secnumdepth >\m@ne
  82.318 +	\@chapapp\ \thechapter. \ \fi ##1}}}
  82.319 +    \pulldownheader				% Bring header down from edge
  82.320 +}
  82.321 +\fi
  82.322 +
  82.323 +% Redefinition of 'myheadings' page style.  
  82.324 +%
  82.325 +\def\ps@myheadings{\let\@mkboth\@gobbletwo
  82.326 +    \def\@oddfoot{}
  82.327 +    \def\@evenfoot{}
  82.328 +    \def\sectionmark##1{}
  82.329 +    \def\subsectionmark##1{}
  82.330 +    \def\@evenhead{\rm \thepage\hfil\sl\leftmark\hbox {}}	%
  82.331 +    \def\@oddhead{\hbox{}\sl\rightmark \hfil \rm\thepage}	%
  82.332 +    \pulldownheader				% Bring header down from edge
  82.333 +}						
  82.334 +
  82.335 +% Redefine '/chapter' to always start on an odd page.
  82.336 +% Should make no difference in singleside mode.
  82.337 +%
  82.338 +\if@leftblank
  82.339 +% Prints "THIS PAGE INTENTIONALLY LEFT BLANK" on blank pages.
  82.340 +\def\chapter{\clearpage\ifodd\c@page\else
  82.341 +   \hbox{}\par\vfill\centerline%
  82.342 +   {THIS PAGE INTENTIONALLY LEFT BLANK}%
  82.343 +   \vfill\newpage\fi
  82.344 +   \thispagestyle{plain}	% Page style of chapter page is 'plain'
  82.345 +   \global\@topnum\z@		% Prevents figures from going at top of page.
  82.346 +   \@afterindentfalse		% Suppresses indent in first paragraph.  Change
  82.347 +   \secdef\@chapter\@schapter}	% to \@afterindenttrue to have indent.
  82.348 +\else
  82.349 +\def\chapter{\cleardoublepage	% Starts new page.
  82.350 +   \thispagestyle{plain}	% Page style of chapter page is 'plain'
  82.351 +   \global\@topnum\z@		% Prevents figures from going at top of page.
  82.352 +   \@afterindentfalse		% Suppresses indent in first paragraph.  Change
  82.353 +   \secdef\@chapter\@schapter}	% to \@afterindenttrue to have indent.
  82.354 +\fi
  82.355 +% If using the report style, use - instead of . in the figure number.
  82.356 +\@ifundefined{thechapter}{}{\def\thefigure{\thechapter-\arabic{figure}}}
  82.357 +
  82.358 +
  82.359 +%%%%%%%%%  End of Style parameters %%%%
  82.360 +
  82.361 +% Here's Gildea's Boilerplate Stuff.
  82.362 +% Copyright (c) 1987 by Stephen Gildea
  82.363 +% Permission to copy all or part of this work is granted, provided
  82.364 +% that the copies are not made or distributed for resale, and that
  82.365 +% the copyright notice and this notice are retained.
  82.366 +
  82.367 +%% Define all the pieces that go on the title page and the abstract.
  82.368 +
  82.369 +% \title and \author already exist
  82.370 +
  82.371 +\def\prevdegrees#1{\gdef\@prevdegrees{#1}}
  82.372 +\def\@prevdegrees{}
  82.373 +
  82.374 +\def\department#1{\gdef\@department{#1}}
  82.375 +
  82.376 +% If you are getting two degrees, use \and between the names.
  82.377 +\def\degree#1{\setbox0\hbox{#1}	 %for side effect of setting \@degreeword
  82.378 +  \gdef\@degree{#1}}
  82.379 +
  82.380 +% \and is used inside the \degree argument to separate two degrees
  82.381 +\def\and{\gdef\@degreeword{degrees} \par and \par}
  82.382 +\def\@degreeword{degree}
  82.383 +
  82.384 +% The copyright notice stuff is a tremendous mess.
  82.385 +%
  82.386 +% \@copyrightnotice is used by \maketitle to actually put text on the
  82.387 +% page; it defaults to ``Copyright MIT 19xx.  All rights reserved.''
  82.388 +% \copyrightnoticetext takes an argument and defined \@copyrightnotice
  82.389 +% to that argument.  \copyrightnotice takes an argument, and calls
  82.390 +% \copyrightnoticetext with that argument, preceeded by a copyright
  82.391 +% symbol and followed by ``All rights reserved.'' and the standard
  82.392 +% permission notice.
  82.393 +% 
  82.394 +% If you use the 'vi' option, \copyrightnoticetext is used to set the
  82.395 +% copyright to ``(C) Your Name, Current Year in Roman Numerals.''
  82.396 +% followed by the permission notice.
  82.397 +
  82.398 +% If there is no \copyrightnotice command, it is asssumed that MIT
  82.399 +% holds the copyright.  This commands adds the copyright symbol to the
  82.400 +% beginning, and puts the standard permission notice below.
  82.401 +%% ``All rights reserved'' added.  Krishna Sethuraman (1990)
  82.402 +\def\copyrightnotice#1{\copyrightnoticetext{\copyright\ #1.  All rights
  82.403 +reserved.\par\permission}}
  82.404 +
  82.405 +% Occacionally you will need to exactly specify the text of the 
  82.406 +% copyright notice.  The \copyrightnoticetext command is then useful.
  82.407 +\long\def\copyrightnoticetext#1{\gdef\@copyrightnotice{#1}}
  82.408 +\def\@copyrightnotice{\copyright\ \Mit\ \@degreeyear.  All rights reserved.}
  82.409 +
  82.410 +%% `vi' documentclass option: Specifying this option automatically
  82.411 +%% copyrights the thesis to the author and gives MIT permission to copy and
  82.412 +%% distribute the document.  If you want, you can still specify
  82.413 +%% \copyrightnotice{stuff} to copyright to someone else, or
  82.414 +%% \copyrightnoticetext{stuff} to specify the exact text of the copyright
  82.415 +%% notice.
  82.416 +\ifodd\vithesis \copyrightnoticetext{\copyright\ \@author,
  82.417 +\uppercase\expandafter{\romannumeral\@degreeyear}.  All rights reserved.\par\permission}
  82.418 +%% or just
  82.419 +%%\@degreeyear}}
  82.420 +\typeout{Copyright given to author,
  82.421 +	permission to copy/distribute given to MIT.}
  82.422 +\else \typeout{Thesis document copyright MIT unless otherwise (manually) specified}
  82.423 +\fi
  82.424 +
  82.425 +\def\thesisdate#1{\gdef\@thesisdate{#1}}
  82.426 +
  82.427 +% typically just a month and year
  82.428 +\def\degreemonth#1{\gdef\@degreemonth{#1}}
  82.429 +\def\degreeyear#1{\gdef\@degreeyear{#1}}
  82.430 +
  82.431 +% Usage: \supervisor{name}{title}
  82.432 +%        \chairman{name}{title}
  82.433 +
  82.434 +% since there can be more than one supervisor,
  82.435 +% we build the appropriate boxes for the titlepage and
  82.436 +% the abstractpage as the user makes multiple calls
  82.437 +% to \supervisor
  82.438 +\newbox\@titlesupervisor 	\newbox\@abstractsupervisor
  82.439 +
  82.440 +\def\supervisor#1#2{\setbox\@titlesupervisor\vbox
  82.441 +  {\unvbox\@titlesupervisor \vskip 10pt% plus 1fil minus 1fil
  82.442 +  \def\baselinestretch{1}\large
  82.443 +  \signature{Certified by}{#1 \\ #2 \\ Thesis Supervisor}}
  82.444 +  \setbox\@abstractsupervisor\vbox{\unvbox\@abstractsupervisor
  82.445 +  \vskip\baselineskip \def\baselinestretch{1}\@normalsize 
  82.446 +  \par\noindent Thesis Supervisor: #1 \\ Title: #2}}
  82.447 +
  82.448 +% department chairman, not thesis committee chairman
  82.449 +\def\chairman#1#2{\gdef\@chairmanname{#1}\gdef\@chairmantitle{#2}}
  82.450 +
  82.451 +%% `upcase' documentclass option: \choosecase is defined either as a dummy or
  82.452 +%% a macro to change the (expanded) argument to uppercase.
  82.453 +\def\maketitle{\begin{titlepage}
  82.454 +\large
  82.455 +{\def\baselinestretch{1.2}\Large\bf \choosecase{\@title} \par}
  82.456 +by\par
  82.457 +{\Large  \choosecase{\@author}}
  82.458 +\par
  82.459 +\@prevdegrees
  82.460 +\par
  82.461 +\choosecase{Submitted to the} \choosecase{\@department} \\
  82.462 +\choosecase{in partial fulfillment of the requirements for the}
  82.463 +\choosecase{\@degreeword} 
  82.464 +\choosecase{of}
  82.465 +\par
  82.466 +\choosecase{\@degree}
  82.467 +\par
  82.468 +at the
  82.469 +\par\MIT\par
  82.470 +\@degreemonth\ \@degreeyear
  82.471 +\par
  82.472 +\@copyrightnotice
  82.473 +\par
  82.474 +\vskip 3\baselineskip
  82.475 +\signature{Author}{\@department \\ \@thesisdate}
  82.476 +\par
  82.477 +\vfill
  82.478 +\unvbox\@titlesupervisor
  82.479 +\par
  82.480 +\vfill
  82.481 +\signature{Accepted by}{\@chairmanname \\ \@chairmantitle}
  82.482 +\vfill
  82.483 +\end{titlepage}}
  82.484 +
  82.485 +% this environment should probably be called abstract,
  82.486 +% but we want people to also be able to get at the more
  82.487 +% basic abstract environment
  82.488 +\def\abstractpage{\cleardoublepage
  82.489 +\begin{center}{\large{\bf \@title} \\
  82.490 +by \\
  82.491 +\@author \\[\baselineskip]}
  82.492 +\par
  82.493 +\def\baselinestretch{1}\@normalsize
  82.494 +Submitted to the \@department \\
  82.495 +on \@thesisdate, in partial fulfillment of the \\
  82.496 +requirements for the \@degreeword\ of \\
  82.497 +\@degree
  82.498 +\end{center}
  82.499 +\par
  82.500 +\begin{abstract}}
  82.501 +
  82.502 +%% Changed from \unvbox to \unvcopy for use with multiple copies of abstract
  82.503 +%% page.
  82.504 +%% Krishna Sethuraman (1990)
  82.505 +\def\endabstractpage{\end{abstract}\noindent
  82.506 + \unvcopy\@abstractsupervisor \newpage}
  82.507 +
  82.508 +%% This counter is used to save the page number for the second copy of
  82.509 +%% the abstract.
  82.510 +\newcounter{savepage}
  82.511 +
  82.512 +% You can use the titlepage environment to do it all yourself if you
  82.513 +% don't want to use \maketitle.  If the titlepage environment, the
  82.514 +% paragraph skip is infinitely stretchable, so if you leave a blank line
  82.515 +% between lines that you want space between, the space will stretch so
  82.516 +% that the title page fills up the entire page.
  82.517 +\def\titlepage{\cleardoublepage\centering
  82.518 +  \thispagestyle{empty}
  82.519 +  \parindent 0pt \parskip 10pt plus 1fil minus 1fil
  82.520 +  \def\baselinestretch{1}\@normalsize\vbox to \vsize\bgroup\vbox to 9in\bgroup}
  82.521 +% The \kern0pt pushes any depth into the height.  Thanks to Richard Stone.
  82.522 +\def\endtitlepage{\par\kern 0pt\egroup\vss\egroup\newpage}
  82.523 +
  82.524 +\def\MIT{MASSACHUSETTS INSTITUTE OF TECHNOLOGY}
  82.525 +\def\Mit{Massachusetts Institute of Technology}
  82.526 +
  82.527 +\def\permission{\par\noindent{\centering
  82.528 +   The author hereby grants to MIT permission to reproduce and to
  82.529 +   distribute publicly paper and electronic copies of this thesis
  82.530 +   document in whole or in part in any medium now known or hereafter
  82.531 +   created.}\par}
  82.532 +
  82.533 +\def\signature#1#2{\par\noindent#1\dotfill\null\\*
  82.534 +  {\raggedleft #2\par}}
  82.535 +
  82.536 +\def\abstract{\subsection*{Abstract}\small\def\baselinestretch{1}\@normalsize}
  82.537 +\def\endabstract{\par}
  82.538 +
    83.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    83.2 +++ b/thesis/org/first-chapter.html	Thu Mar 27 17:57:01 2014 -0400
    83.3 @@ -0,0 +1,455 @@
    83.4 +<?xml version="1.0" encoding="utf-8"?>
    83.5 +<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
    83.6 +               "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
    83.7 +<html xmlns="http://www.w3.org/1999/xhtml" lang="en" xml:lang="en">
    83.8 +<head>
    83.9 +<title><code>CORTEX</code></title>
   83.10 +<meta http-equiv="Content-Type" content="text/html;charset=utf-8"/>
   83.11 +<meta name="title" content="<code>CORTEX</code>"/>
   83.12 +<meta name="generator" content="Org-mode"/>
   83.13 +<meta name="generated" content="2013-11-07 04:21:29 EST"/>
   83.14 +<meta name="author" content="Robert McIntyre"/>
   83.15 +<meta name="description" content="Using embodied AI to facilitate Artificial Imagination."/>
   83.16 +<meta name="keywords" content="AI, clojure, embodiment"/>
   83.17 +<style type="text/css">
   83.18 + <!--/*--><![CDATA[/*><!--*/
   83.19 +  html { font-family: Times, serif; font-size: 12pt; }
   83.20 +  .title  { text-align: center; }
   83.21 +  .todo   { color: red; }
   83.22 +  .done   { color: green; }
   83.23 +  .tag    { background-color: #add8e6; font-weight:normal }
   83.24 +  .target { }
   83.25 +  .timestamp { color: #bebebe; }
   83.26 +  .timestamp-kwd { color: #5f9ea0; }
   83.27 +  .right  {margin-left:auto; margin-right:0px;  text-align:right;}
   83.28 +  .left   {margin-left:0px;  margin-right:auto; text-align:left;}
   83.29 +  .center {margin-left:auto; margin-right:auto; text-align:center;}
   83.30 +  p.verse { margin-left: 3% }
   83.31 +  pre {
   83.32 +	border: 1pt solid #AEBDCC;
   83.33 +	background-color: #F3F5F7;
   83.34 +	padding: 5pt;
   83.35 +	font-family: courier, monospace;
   83.36 +        font-size: 90%;
   83.37 +        overflow:auto;
   83.38 +  }
   83.39 +  table { border-collapse: collapse; }
   83.40 +  td, th { vertical-align: top;  }
   83.41 +  th.right  { text-align:center;  }
   83.42 +  th.left   { text-align:center;   }
   83.43 +  th.center { text-align:center; }
   83.44 +  td.right  { text-align:right;  }
   83.45 +  td.left   { text-align:left;   }
   83.46 +  td.center { text-align:center; }
   83.47 +  dt { font-weight: bold; }
   83.48 +  div.figure { padding: 0.5em; }
   83.49 +  div.figure p { text-align: center; }
   83.50 +  div.inlinetask {
   83.51 +    padding:10px;
   83.52 +    border:2px solid gray;
   83.53 +    margin:10px;
   83.54 +    background: #ffffcc;
   83.55 +  }
   83.56 +  textarea { overflow-x: auto; }
   83.57 +  .linenr { font-size:smaller }
   83.58 +  .code-highlighted {background-color:#ffff00;}
   83.59 +  .org-info-js_info-navigation { border-style:none; }
   83.60 +  #org-info-js_console-label { font-size:10px; font-weight:bold;
   83.61 +                               white-space:nowrap; }
   83.62 +  .org-info-js_search-highlight {background-color:#ffff00; color:#000000;
   83.63 +                                 font-weight:bold; }
   83.64 +  /*]]>*/-->
   83.65 +</style>
   83.66 +<script type="text/javascript">var _gaq = _gaq || [];_gaq.push(['_setAccount', 'UA-31261312-1']);_gaq.push(['_trackPageview']);(function() {var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';var s = document.getElementsByTagName('script')[0]; s.parentNode.insertBefore(ga, s);})();</script><link rel="stylesheet" type="text/css" href="../../aurellem/css/argentum.css" />
   83.67 +<script type="text/javascript">
   83.68 +<!--/*--><![CDATA[/*><!--*/
   83.69 + function CodeHighlightOn(elem, id)
   83.70 + {
   83.71 +   var target = document.getElementById(id);
   83.72 +   if(null != target) {
   83.73 +     elem.cacheClassElem = elem.className;
   83.74 +     elem.cacheClassTarget = target.className;
   83.75 +     target.className = "code-highlighted";
   83.76 +     elem.className   = "code-highlighted";
   83.77 +   }
   83.78 + }
   83.79 + function CodeHighlightOff(elem, id)
   83.80 + {
   83.81 +   var target = document.getElementById(id);
   83.82 +   if(elem.cacheClassElem)
   83.83 +     elem.className = elem.cacheClassElem;
   83.84 +   if(elem.cacheClassTarget)
   83.85 +     target.className = elem.cacheClassTarget;
   83.86 + }
   83.87 +/*]]>*///-->
   83.88 +</script>
   83.89 +
   83.90 +</head>
   83.91 +<body>
   83.92 +
   83.93 +
   83.94 +<div id="content">
   83.95 +<h1 class="title"><code>CORTEX</code></h1>
   83.96 +
   83.97 +
   83.98 +<div class="header">
   83.99 +  <div class="float-right">	
  83.100 +    <!-- 
  83.101 +    <form>
  83.102 +      <input type="text"/><input type="submit" value="search the blog &raquo;"/> 
  83.103 +    </form>
  83.104 +    -->
  83.105 +  </div>
  83.106 +
  83.107 +  <h1>aurellem <em>&#x2609;</em></h1>
  83.108 +  <ul class="nav">
  83.109 +    <li><a href="/">read the blog &raquo;</a></li>
  83.110 +    <!-- li><a href="#">learn about us &raquo;</a></li-->
  83.111 +  </ul>
  83.112 +</div>
  83.113 +
  83.114 +<div class="author">Written by <author>Robert McIntyre</author></div>
  83.115 +
  83.116 +
  83.117 +
  83.118 +
  83.119 +
  83.120 +
  83.121 +
  83.122 +<div id="outline-container-1" class="outline-2">
  83.123 +<h2 id="sec-1">Artificial Imagination</h2>
  83.124 +<div class="outline-text-2" id="text-1">
  83.125 +
  83.126 +
  83.127 +<p>
  83.128 +  Imagine watching a video of someone skateboarding. When you watch
  83.129 +  the video, you can imagine yourself skateboarding, and your
  83.130 +  knowledge of the human body and its dynamics guides your
  83.131 +  interpretation of the scene. For example, even if the skateboarder
  83.132 +  is partially occluded, you can infer the positions of his arms and
  83.133 +  body from your own knowledge of how your body would be positioned if
  83.134 +  you were skateboarding. If the skateboarder suffers an accident, you
  83.135 +  wince in sympathy, imagining the pain your own body would experience
  83.136 +  if it were in the same situation. This empathy with other people
  83.137 +  guides our understanding of whatever they are doing because it is a
  83.138 +  powerful constraint on what is probable and possible. In order to
  83.139 +  make use of this powerful empathy constraint, I need a system that
  83.140 +  can generate and make sense of sensory data from the many different
  83.141 +  senses that humans possess. The two key proprieties of such a system
  83.142 +  are <i>embodiment</i> and <i>imagination</i>.
  83.143 +</p>
  83.144 +
  83.145 +</div>
  83.146 +
  83.147 +<div id="outline-container-1-1" class="outline-3">
  83.148 +<h3 id="sec-1-1">What is imagination?</h3>
  83.149 +<div class="outline-text-3" id="text-1-1">
  83.150 +
  83.151 +
  83.152 +<p>
  83.153 +   One kind of imagination is <i>sympathetic</i> imagination: you imagine
  83.154 +   yourself in the position of something/someone you are
  83.155 +   observing. This type of imagination comes into play when you follow
  83.156 +   along visually when watching someone perform actions, or when you
  83.157 +   sympathetically grimace when someone hurts themselves. This type of
  83.158 +   imagination uses the constraints you have learned about your own
  83.159 +   body to highly constrain the possibilities in whatever you are
  83.160 +   seeing. It uses all your senses to including your senses of touch,
  83.161 +   proprioception, etc. Humans are flexible when it comes to "putting
  83.162 +   themselves in another's shoes," and can sympathetically understand
  83.163 +   not only other humans, but entities ranging animals to cartoon
  83.164 +   characters to <a href="http://www.youtube.com/watch?v=0jz4HcwTQmU">single dots</a> on a screen!
  83.165 +</p>
  83.166 +<p>
  83.167 +   Another kind of imagination is <i>predictive</i> imagination: you
  83.168 +   construct scenes in your mind that are not entirely related to
  83.169 +   whatever you are observing, but instead are predictions of the
  83.170 +   future or simply flights of fancy. You use this type of imagination
  83.171 +   to plan out multi-step actions, or play out dangerous situations in
  83.172 +   your mind so as to avoid messing them up in reality.
  83.173 +</p>
  83.174 +<p>
  83.175 +   Of course, sympathetic and predictive imagination blend into each
  83.176 +   other and are not completely separate concepts. One dimension along
  83.177 +   which you can distinguish types of imagination is dependence on raw
  83.178 +   sense data. Sympathetic imagination is highly constrained by your
  83.179 +   senses, while predictive imagination can be more or less dependent
  83.180 +   on your senses depending on how far ahead you imagine. Daydreaming
  83.181 +   is an extreme form of predictive imagination that wanders through
  83.182 +   different possibilities without concern for whether they are
  83.183 +   related to whatever is happening in reality.
  83.184 +</p>
  83.185 +<p>
  83.186 +   For this thesis, I will mostly focus on sympathetic imagination and
  83.187 +   the constraint it provides for understanding sensory data.
  83.188 +</p>
  83.189 +</div>
  83.190 +
  83.191 +</div>
  83.192 +
  83.193 +<div id="outline-container-1-2" class="outline-3">
  83.194 +<h3 id="sec-1-2">What problems can imagination solve?</h3>
  83.195 +<div class="outline-text-3" id="text-1-2">
  83.196 +
  83.197 +
  83.198 +<p>
  83.199 +   Consider a video of a cat drinking some water.
  83.200 +</p>
  83.201 +
  83.202 +<div class="figure">
  83.203 +<p><img src="../images/cat-drinking.jpg"  alt="../images/cat-drinking.jpg" /></p>
  83.204 +<p>A cat drinking some water. Identifying this action is beyond the state of the art for computers.</p>
  83.205 +</div>
  83.206 +
  83.207 +<p>
  83.208 +   It is currently impossible for any computer program to reliably
  83.209 +   label such an video as "drinking". I think humans are able to label
  83.210 +   such video as "drinking" because they imagine <i>themselves</i> as the
  83.211 +   cat, and imagine putting their face up against a stream of water
  83.212 +   and sticking out their tongue. In that imagined world, they can
  83.213 +   feel the cool water hitting their tongue, and feel the water
  83.214 +   entering their body, and are able to recognize that <i>feeling</i> as
  83.215 +   drinking. So, the label of the action is not really in the pixels
  83.216 +   of the image, but is found clearly in a simulation inspired by
  83.217 +   those pixels. An imaginative system, having been trained on
  83.218 +   drinking and non-drinking examples and learning that the most
  83.219 +   important component of drinking is the feeling of water sliding
  83.220 +   down one's throat, would analyze a video of a cat drinking in the
  83.221 +   following manner:
  83.222 +</p>
  83.223 +<ul>
  83.224 +<li>Create a physical model of the video by putting a "fuzzy" model
  83.225 +     of its own body in place of the cat. Also, create a simulation of
  83.226 +     the stream of water.
  83.227 +
  83.228 +</li>
  83.229 +<li>Play out this simulated scene and generate imagined sensory
  83.230 +     experience. This will include relevant muscle contractions, a
  83.231 +     close up view of the stream from the cat's perspective, and most
  83.232 +     importantly, the imagined feeling of water entering the mouth.
  83.233 +
  83.234 +</li>
  83.235 +<li>The action is now easily identified as drinking by the sense of
  83.236 +     taste alone. The other senses (such as the tongue moving in and
  83.237 +     out) help to give plausibility to the simulated action. Note that
  83.238 +     the sense of vision, while critical in creating the simulation,
  83.239 +     is not critical for identifying the action from the simulation.
  83.240 +</li>
  83.241 +</ul>
  83.242 +
  83.243 +
  83.244 +<p>
  83.245 +   More generally, I expect imaginative systems to be particularly
  83.246 +   good at identifying embodied actions in videos.
  83.247 +</p>
  83.248 +</div>
  83.249 +</div>
  83.250 +
  83.251 +</div>
  83.252 +
  83.253 +<div id="outline-container-2" class="outline-2">
  83.254 +<h2 id="sec-2">Cortex</h2>
  83.255 +<div class="outline-text-2" id="text-2">
  83.256 +
  83.257 +
  83.258 +<p>
  83.259 +  The previous example involves liquids, the sense of taste, and
  83.260 +  imagining oneself as a cat. For this thesis I constrain myself to
  83.261 +  simpler, more easily digitizable senses and situations.
  83.262 +</p>
  83.263 +<p>
  83.264 +  My system, <code>Cortex</code> performs imagination in two different simplified
  83.265 +  worlds: <i>worm world</i> and <i>stick figure world</i>. In each of these
  83.266 +  worlds, entities capable of imagination recognize actions by
  83.267 +  simulating the experience from their own perspective, and then
  83.268 +  recognizing the action from a database of examples.
  83.269 +</p>
  83.270 +<p>
  83.271 +  In order to serve as a framework for experiments in imagination,
  83.272 +  <code>Cortex</code> requires simulated bodies, worlds, and senses like vision,
  83.273 +  hearing, touch, proprioception, etc.
  83.274 +</p>
  83.275 +
  83.276 +</div>
  83.277 +
  83.278 +<div id="outline-container-2-1" class="outline-3">
  83.279 +<h3 id="sec-2-1">A Video Game Engine takes care of some of the groundwork</h3>
  83.280 +<div class="outline-text-3" id="text-2-1">
  83.281 +
  83.282 +
  83.283 +<p>
  83.284 +   When it comes to simulation environments, the engines used to
  83.285 +   create the worlds in video games offer top-notch physics and
  83.286 +   graphics support. These engines also have limited support for
  83.287 +   creating cameras and rendering 3D sound, which can be repurposed
  83.288 +   for vision and hearing respectively. Physics collision detection
  83.289 +   can be expanded to create a sense of touch.
  83.290 +</p>
  83.291 +<p>   
  83.292 +   jMonkeyEngine3 is one such engine for creating video games in
  83.293 +   Java. It uses OpenGL to render to the screen and uses screengraphs
  83.294 +   to avoid drawing things that do not appear on the screen. It has an
  83.295 +   active community and several games in the pipeline. The engine was
  83.296 +   not built to serve any particular game but is instead meant to be
  83.297 +   used for any 3D game. I chose jMonkeyEngine3 it because it had the
  83.298 +   most features out of all the open projects I looked at, and because
  83.299 +   I could then write my code in Clojure, an implementation of LISP
  83.300 +   that runs on the JVM.
  83.301 +</p>
  83.302 +</div>
  83.303 +
  83.304 +</div>
  83.305 +
  83.306 +<div id="outline-container-2-2" class="outline-3">
  83.307 +<h3 id="sec-2-2"><code>CORTEX</code> Extends jMonkeyEngine3 to implement rich senses</h3>
  83.308 +<div class="outline-text-3" id="text-2-2">
  83.309 +
  83.310 +
  83.311 +<p>
  83.312 +   Using the game-making primitives provided by jMonkeyEngine3, I have
  83.313 +   constructed every major human sense except for smell and
  83.314 +   taste. <code>Cortex</code> also provides an interface for creating creatures
  83.315 +   in Blender, a 3D modeling environment, and then "rigging" the
  83.316 +   creatures with senses using 3D annotations in Blender. A creature
  83.317 +   can have any number of senses, and there can be any number of
  83.318 +   creatures in a simulation.
  83.319 +</p>
  83.320 +<p>   
  83.321 +   The senses available in <code>Cortex</code> are:
  83.322 +</p>
  83.323 +<ul>
  83.324 +<li><a href="../../cortex/html/vision.html">Vision</a>
  83.325 +</li>
  83.326 +<li><a href="../../cortex/html/hearing.html">Hearing</a>
  83.327 +</li>
  83.328 +<li><a href="../../cortex/html/touch.html">Touch</a>
  83.329 +</li>
  83.330 +<li><a href="../../cortex/html/proprioception.html">Proprioception</a>
  83.331 +</li>
  83.332 +<li><a href="../../cortex/html/movement.html">Muscle Tension</a>
  83.333 +</li>
  83.334 +</ul>
  83.335 +
  83.336 +
  83.337 +</div>
  83.338 +</div>
  83.339 +
  83.340 +</div>
  83.341 +
  83.342 +<div id="outline-container-3" class="outline-2">
  83.343 +<h2 id="sec-3">A roadmap for <code>Cortex</code> experiments</h2>
  83.344 +<div class="outline-text-2" id="text-3">
  83.345 +
  83.346 +
  83.347 +
  83.348 +</div>
  83.349 +
  83.350 +<div id="outline-container-3-1" class="outline-3">
  83.351 +<h3 id="sec-3-1">Worm World</h3>
  83.352 +<div class="outline-text-3" id="text-3-1">
  83.353 +
  83.354 +
  83.355 +<p>
  83.356 +   Worms in <code>Cortex</code> are segmented creatures which vary in length and
  83.357 +   number of segments, and have the senses of vision, proprioception,
  83.358 +   touch, and muscle tension.
  83.359 +</p>
  83.360 +
  83.361 +<div class="figure">
  83.362 +<p><img src="../images/finger-UV.png" width=755 alt="../images/finger-UV.png" /></p>
  83.363 +<p>This is the tactile-sensor-profile for the upper segment of a worm. It defines regions of high touch sensitivity (where there are many white pixels) and regions of low sensitivity (where white pixels are sparse).</p>
  83.364 +</div>
  83.365 +
  83.366 +
  83.367 +
  83.368 +
  83.369 +<div class="figure">
  83.370 +  <center>
  83.371 +    <video controls="controls" width="550">
  83.372 +      <source src="../video/worm-touch.ogg" type="video/ogg"
  83.373 +              preload="none" />
  83.374 +    </video>
  83.375 +    <br> <a href="http://youtu.be/RHx2wqzNVcU"> YouTube </a>
  83.376 +  </center>
  83.377 +  <p>The worm responds to touch.</p>
  83.378 +</div>
  83.379 +
  83.380 +<div class="figure">
  83.381 +  <center>
  83.382 +    <video controls="controls" width="550">
  83.383 +      <source src="../video/test-proprioception.ogg" type="video/ogg"
  83.384 +              preload="none" />
  83.385 +    </video>
  83.386 +    <br> <a href="http://youtu.be/JjdDmyM8b0w"> YouTube </a>
  83.387 +  </center>
  83.388 +  <p>Proprioception in a worm. The proprioceptive readout is
  83.389 +    in the upper left corner of the screen.</p>
  83.390 +</div>
  83.391 +
  83.392 +<p>
  83.393 +   A worm is trained in various actions such as sinusoidal movement,
  83.394 +   curling, flailing, and spinning by directly playing motor
  83.395 +   contractions while the worm "feels" the experience. These actions
  83.396 +   are recorded both as vectors of muscle tension, touch, and
  83.397 +   proprioceptive data, but also in higher level forms such as
  83.398 +   frequencies of the various contractions and a symbolic name for the
  83.399 +   action.
  83.400 +</p>
  83.401 +<p>
  83.402 +   Then, the worm watches a video of another worm performing one of
  83.403 +   the actions, and must judge which action was performed. Normally
  83.404 +   this would be an extremely difficult problem, but the worm is able
  83.405 +   to greatly diminish the search space through sympathetic
  83.406 +   imagination. First, it creates an imagined copy of its body which
  83.407 +   it observes from a third person point of view. Then for each frame
  83.408 +   of the video, it maneuvers its simulated body to be in registration
  83.409 +   with the worm depicted in the video. The physical constraints
  83.410 +   imposed by the physics simulation greatly decrease the number of
  83.411 +   poses that have to be tried, making the search feasible. As the
  83.412 +   imaginary worm moves, it generates imaginary muscle tension and
  83.413 +   proprioceptive sensations. The worm determines the action not by
  83.414 +   vision, but by matching the imagined proprioceptive data with
  83.415 +   previous examples.
  83.416 +</p>
  83.417 +<p>
  83.418 +   By using non-visual sensory data such as touch, the worms can also
  83.419 +   answer body related questions such as "did your head touch your
  83.420 +   tail?" and "did worm A touch worm B?"
  83.421 +</p>
  83.422 +<p>
  83.423 +   The proprioceptive information used for action identification is
  83.424 +   body-centric, so only the registration step is dependent on point
  83.425 +   of view, not the identification step. Registration is not specific
  83.426 +   to any particular action. Thus, action identification can be
  83.427 +   divided into a point-of-view dependent generic registration step,
  83.428 +   and a action-specific step that is body-centered and invariant to
  83.429 +   point of view.
  83.430 +</p>
  83.431 +</div>
  83.432 +
  83.433 +</div>
  83.434 +
  83.435 +<div id="outline-container-3-2" class="outline-3">
  83.436 +<h3 id="sec-3-2">Stick Figure World</h3>
  83.437 +<div class="outline-text-3" id="text-3-2">
  83.438 +
  83.439 +
  83.440 +<p>
  83.441 +   This environment is similar to Worm World, except the creatures are
  83.442 +   more complicated and the actions and questions more varied. It is
  83.443 +   an experiment to see how far imagination can go in interpreting
  83.444 +   actions.  
  83.445 +</p></div>
  83.446 +</div>
  83.447 +</div>
  83.448 +</div>
  83.449 +
  83.450 +<div id="postamble">
  83.451 +<p class="date">Date: 2013-11-07 04:21:29 EST</p>
  83.452 +<p class="author">Author: Robert McIntyre</p>
  83.453 +<p class="creator">Org version 7.7 with Emacs version 24</p>
  83.454 +<a href="http://validator.w3.org/check?uri=referer">Validate XHTML 1.0</a>
  83.455 +
  83.456 +</div>
  83.457 +</body>
  83.458 +</html>
    84.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    84.2 +++ b/thesis/org/first-chapter.org	Thu Mar 27 17:57:01 2014 -0400
    84.3 @@ -0,0 +1,241 @@
    84.4 +#+title: =CORTEX=
    84.5 +#+author: Robert McIntyre
    84.6 +#+email: rlm@mit.edu
    84.7 +#+description: Using embodied AI to facilitate Artificial Imagination.
    84.8 +#+keywords: AI, clojure, embodiment
    84.9 +#+SETUPFILE: ../../aurellem/org/setup.org
   84.10 +#+INCLUDE: ../../aurellem/org/level-0.org
   84.11 +#+babel: :mkdirp yes :noweb yes :exports both
   84.12 +#+OPTIONS: toc:nil, num:nil
   84.13 +
   84.14 +* Artificial Imagination
   84.15 +  Imagine watching a video of someone skateboarding. When you watch
   84.16 +  the video, you can imagine yourself skateboarding, and your
   84.17 +  knowledge of the human body and its dynamics guides your
   84.18 +  interpretation of the scene. For example, even if the skateboarder
   84.19 +  is partially occluded, you can infer the positions of his arms and
   84.20 +  body from your own knowledge of how your body would be positioned if
   84.21 +  you were skateboarding. If the skateboarder suffers an accident, you
   84.22 +  wince in sympathy, imagining the pain your own body would experience
   84.23 +  if it were in the same situation. This empathy with other people
   84.24 +  guides our understanding of whatever they are doing because it is a
   84.25 +  powerful constraint on what is probable and possible. In order to
   84.26 +  make use of this powerful empathy constraint, I need a system that
   84.27 +  can generate and make sense of sensory data from the many different
   84.28 +  senses that humans possess. The two key proprieties of such a system
   84.29 +  are /embodiment/ and /imagination/.
   84.30 +
   84.31 +** What is imagination?
   84.32 +
   84.33 +   One kind of imagination is /sympathetic/ imagination: you imagine
   84.34 +   yourself in the position of something/someone you are
   84.35 +   observing. This type of imagination comes into play when you follow
   84.36 +   along visually when watching someone perform actions, or when you
   84.37 +   sympathetically grimace when someone hurts themselves. This type of
   84.38 +   imagination uses the constraints you have learned about your own
   84.39 +   body to highly constrain the possibilities in whatever you are
   84.40 +   seeing. It uses all your senses to including your senses of touch,
   84.41 +   proprioception, etc. Humans are flexible when it comes to "putting
   84.42 +   themselves in another's shoes," and can sympathetically understand
   84.43 +   not only other humans, but entities ranging from animals to cartoon
   84.44 +   characters to [[http://www.youtube.com/watch?v=0jz4HcwTQmU][single dots]] on a screen!
   84.45 +
   84.46 +# and can infer intention from the actions of not only other humans,
   84.47 +# but also animals, cartoon characters, and even abstract moving dots
   84.48 +# on a screen!
   84.49 +
   84.50 +   Another kind of imagination is /predictive/ imagination: you
   84.51 +   construct scenes in your mind that are not entirely related to
   84.52 +   whatever you are observing, but instead are predictions of the
   84.53 +   future or simply flights of fancy. You use this type of imagination
   84.54 +   to plan out multi-step actions, or play out dangerous situations in
   84.55 +   your mind so as to avoid messing them up in reality.
   84.56 +
   84.57 +   Of course, sympathetic and predictive imagination blend into each
   84.58 +   other and are not completely separate concepts. One dimension along
   84.59 +   which you can distinguish types of imagination is dependence on raw
   84.60 +   sense data. Sympathetic imagination is highly constrained by your
   84.61 +   senses, while predictive imagination can be more or less dependent
   84.62 +   on your senses depending on how far ahead you imagine. Daydreaming
   84.63 +   is an extreme form of predictive imagination that wanders through
   84.64 +   different possibilities without concern for whether they are
   84.65 +   related to whatever is happening in reality.
   84.66 +
   84.67 +   For this thesis, I will mostly focus on sympathetic imagination and
   84.68 +   the constraint it provides for understanding sensory data.
   84.69 +   
   84.70 +** What problems can imagination solve?
   84.71 +
   84.72 +   Consider a video of a cat drinking some water.
   84.73 +
   84.74 +   #+caption: A cat drinking some water. Identifying this action is beyond the state of the art for computers.
   84.75 +   #+ATTR_LaTeX: width=5cm
   84.76 +   [[../images/cat-drinking.jpg]]
   84.77 +
   84.78 +   It is currently impossible for any computer program to reliably
   84.79 +   label such an video as "drinking". I think humans are able to label
   84.80 +   such video as "drinking" because they imagine /themselves/ as the
   84.81 +   cat, and imagine putting their face up against a stream of water
   84.82 +   and sticking out their tongue. In that imagined world, they can
   84.83 +   feel the cool water hitting their tongue, and feel the water
   84.84 +   entering their body, and are able to recognize that /feeling/ as
   84.85 +   drinking. So, the label of the action is not really in the pixels
   84.86 +   of the image, but is found clearly in a simulation inspired by
   84.87 +   those pixels. An imaginative system, having been trained on
   84.88 +   drinking and non-drinking examples and learning that the most
   84.89 +   important component of drinking is the feeling of water sliding
   84.90 +   down one's throat, would analyze a video of a cat drinking in the
   84.91 +   following manner:
   84.92 +   
   84.93 +   - Create a physical model of the video by putting a "fuzzy" model
   84.94 +     of its own body in place of the cat. Also, create a simulation of
   84.95 +     the stream of water.
   84.96 +
   84.97 +   - Play out this simulated scene and generate imagined sensory
   84.98 +     experience. This will include relevant muscle contractions, a
   84.99 +     close up view of the stream from the cat's perspective, and most
  84.100 +     importantly, the imagined feeling of water entering the mouth.
  84.101 +
  84.102 +   - The action is now easily identified as drinking by the sense of
  84.103 +     taste alone. The other senses (such as the tongue moving in and
  84.104 +     out) help to give plausibility to the simulated action. Note that
  84.105 +     the sense of vision, while critical in creating the simulation,
  84.106 +     is not critical for identifying the action from the simulation.
  84.107 +
  84.108 +   More generally, I expect imaginative systems to be particularly
  84.109 +   good at identifying embodied actions in videos.
  84.110 +
  84.111 +* Cortex
  84.112 +
  84.113 +  The previous example involves liquids, the sense of taste, and
  84.114 +  imagining oneself as a cat. For this thesis I constrain myself to
  84.115 +  simpler, more easily digitizable senses and situations.
  84.116 +
  84.117 +  My system, =CORTEX= performs imagination in two different simplified
  84.118 +  worlds: /worm world/ and /stick-figure world/. In each of these
  84.119 +  worlds, entities capable of imagination recognize actions by
  84.120 +  simulating the experience from their own perspective, and then
  84.121 +  recognizing the action from a database of examples.
  84.122 +
  84.123 +  In order to serve as a framework for experiments in imagination,
  84.124 +  =CORTEX= requires simulated bodies, worlds, and senses like vision,
  84.125 +  hearing, touch, proprioception, etc.
  84.126 +
  84.127 +** A Video Game Engine takes care of some of the groundwork
  84.128 +
  84.129 +   When it comes to simulation environments, the engines used to
  84.130 +   create the worlds in video games offer top-notch physics and
  84.131 +   graphics support. These engines also have limited support for
  84.132 +   creating cameras and rendering 3D sound, which can be repurposed
  84.133 +   for vision and hearing respectively. Physics collision detection
  84.134 +   can be expanded to create a sense of touch.
  84.135 +   
  84.136 +   jMonkeyEngine3 is one such engine for creating video games in
  84.137 +   Java. It uses OpenGL to render to the screen and uses screengraphs
  84.138 +   to avoid drawing things that do not appear on the screen. It has an
  84.139 +   active community and several games in the pipeline. The engine was
  84.140 +   not built to serve any particular game but is instead meant to be
  84.141 +   used for any 3D game. I chose jMonkeyEngine3 it because it had the
  84.142 +   most features out of all the open projects I looked at, and because
  84.143 +   I could then write my code in Clojure, an implementation of LISP
  84.144 +   that runs on the JVM.
  84.145 +
  84.146 +** =CORTEX= Extends jMonkeyEngine3 to implement rich senses
  84.147 +
  84.148 +   Using the game-making primitives provided by jMonkeyEngine3, I have
  84.149 +   constructed every major human sense except for smell and
  84.150 +   taste. =CORTEX= also provides an interface for creating creatures
  84.151 +   in Blender, a 3D modeling environment, and then "rigging" the
  84.152 +   creatures with senses using 3D annotations in Blender. A creature
  84.153 +   can have any number of senses, and there can be any number of
  84.154 +   creatures in a simulation.
  84.155 +   
  84.156 +   The senses available in =CORTEX= are:
  84.157 +
  84.158 +   - [[../../cortex/html/vision.html][Vision]]
  84.159 +   - [[../../cortex/html/hearing.html][Hearing]]
  84.160 +   - [[../../cortex/html/touch.html][Touch]]
  84.161 +   - [[../../cortex/html/proprioception.html][Proprioception]]
  84.162 +   - [[../../cortex/html/movement.html][Muscle Tension]]
  84.163 +
  84.164 +* A roadmap for =CORTEX= experiments
  84.165 +
  84.166 +** Worm World
  84.167 +
  84.168 +   Worms in =CORTEX= are segmented creatures which vary in length and
  84.169 +   number of segments, and have the senses of vision, proprioception,
  84.170 +   touch, and muscle tension.
  84.171 +
  84.172 +#+attr_html: width=755
  84.173 +#+caption: This is the tactile-sensor-profile for the upper segment of a worm. It defines regions of high touch sensitivity (where there are many white pixels) and regions of low sensitivity (where white pixels are sparse).
  84.174 +[[../images/finger-UV.png]]
  84.175 +
  84.176 +
  84.177 +#+begin_html
  84.178 +<div class="figure">
  84.179 +  <center>
  84.180 +    <video controls="controls" width="550">
  84.181 +      <source src="../video/worm-touch.ogg" type="video/ogg"
  84.182 +	      preload="none" />
  84.183 +    </video>
  84.184 +    <br> <a href="http://youtu.be/RHx2wqzNVcU"> YouTube </a>
  84.185 +  </center>
  84.186 +  <p>The worm responds to touch.</p>
  84.187 +</div>
  84.188 +#+end_html
  84.189 +
  84.190 +#+begin_html
  84.191 +<div class="figure">
  84.192 +  <center>
  84.193 +    <video controls="controls" width="550">
  84.194 +      <source src="../video/test-proprioception.ogg" type="video/ogg"
  84.195 +	      preload="none" />
  84.196 +    </video>
  84.197 +    <br> <a href="http://youtu.be/JjdDmyM8b0w"> YouTube </a>
  84.198 +  </center>
  84.199 +  <p>Proprioception in a worm. The proprioceptive readout is
  84.200 +    in the upper left corner of the screen.</p>
  84.201 +</div>
  84.202 +#+end_html
  84.203 +
  84.204 +   A worm is trained in various actions such as sinusoidal movement,
  84.205 +   curling, flailing, and spinning by directly playing motor
  84.206 +   contractions while the worm "feels" the experience. These actions
  84.207 +   are recorded both as vectors of muscle tension, touch, and
  84.208 +   proprioceptive data, but also in higher level forms such as
  84.209 +   frequencies of the various contractions and a symbolic name for the
  84.210 +   action.
  84.211 +
  84.212 +   Then, the worm watches a video of another worm performing one of
  84.213 +   the actions, and must judge which action was performed. Normally
  84.214 +   this would be an extremely difficult problem, but the worm is able
  84.215 +   to greatly diminish the search space through sympathetic
  84.216 +   imagination. First, it creates an imagined copy of its body which
  84.217 +   it observes from a third person point of view. Then for each frame
  84.218 +   of the video, it maneuvers its simulated body to be in registration
  84.219 +   with the worm depicted in the video. The physical constraints
  84.220 +   imposed by the physics simulation greatly decrease the number of
  84.221 +   poses that have to be tried, making the search feasible. As the
  84.222 +   imaginary worm moves, it generates imaginary muscle tension and
  84.223 +   proprioceptive sensations. The worm determines the action not by
  84.224 +   vision, but by matching the imagined proprioceptive data with
  84.225 +   previous examples.
  84.226 +
  84.227 +   By using non-visual sensory data such as touch, the worms can also
  84.228 +   answer body related questions such as "did your head touch your
  84.229 +   tail?" and "did worm A touch worm B?"
  84.230 +
  84.231 +   The proprioceptive information used for action identification is
  84.232 +   body-centric, so only the registration step is dependent on point
  84.233 +   of view, not the identification step. Registration is not specific
  84.234 +   to any particular action. Thus, action identification can be
  84.235 +   divided into a point-of-view dependent generic registration step,
  84.236 +   and a action-specific step that is body-centered and invariant to
  84.237 +   point of view.
  84.238 +
  84.239 +** Stick Figure World
  84.240 +
  84.241 +   This environment is similar to Worm World, except the creatures are
  84.242 +   more complicated and the actions and questions more varied. It is
  84.243 +   an experiment to see how far imagination can go in interpreting
  84.244 +   actions.  
    85.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    85.2 +++ b/thesis/org/roadmap.org	Thu Mar 27 17:57:01 2014 -0400
    85.3 @@ -0,0 +1,220 @@
    85.4 +In order for this to be a reasonable thesis that I can be proud of,
    85.5 +what are the /minimum/ number of things I need to get done?
    85.6 +
    85.7 +
    85.8 +* worm OR hand registration
    85.9 +  - training from a few examples (2 to start out)
   85.10 +  - aligning the body with the scene
   85.11 +  - generating sensory data
   85.12 +  - matching previous labeled examples using dot-products or some
   85.13 +    other basic thing
   85.14 +  - showing that it works with different views
   85.15 +
   85.16 +* first draft
   85.17 +  - draft of thesis without bibliography or formatting
   85.18 +  - should have basic experiment and have full description of
   85.19 +    framework with code
   85.20 +  - review with Winston
   85.21 +  
   85.22 +* final draft
   85.23 +  - implement stretch goals from Winston if possible
   85.24 +  - complete final formatting and submit
   85.25 +
   85.26 +* CORTEX
   85.27 +  DEADLINE: <2014-05-09 Fri>
   85.28 +  SHIT THAT'S IN 67 DAYS!!!
   85.29 +
   85.30 +** program simple feature matching code for the worm's segments
   85.31 +
   85.32 +Subgoals:
   85.33 +*** DONE Get cortex working again, run tests, no jmonkeyengine updates
   85.34 +    CLOSED: [2014-03-03 Mon 22:07] SCHEDULED: <2014-03-03 Mon>
   85.35 +*** DONE get blender working again
   85.36 +    CLOSED: [2014-03-03 Mon 22:43] SCHEDULED: <2014-03-03 Mon>
   85.37 +*** DONE make sparce touch worm segment in blender
   85.38 +    CLOSED: [2014-03-03 Mon 23:16] SCHEDULED: <2014-03-03 Mon>
   85.39 +    CLOCK: [2014-03-03 Mon 22:44]--[2014-03-03 Mon 23:16] =>  0:32
   85.40 +*** DONE make multi-segment touch worm with touch sensors and display
   85.41 +    CLOSED: [2014-03-03 Mon 23:54] SCHEDULED: <2014-03-03 Mon>
   85.42 +
   85.43 +*** DONE Make a worm wiggle and curl
   85.44 +    CLOSED: [2014-03-04 Tue 23:03] SCHEDULED: <2014-03-04 Tue>
   85.45 +
   85.46 +
   85.47 +** First draft
   85.48 +
   85.49 +Subgoals:
   85.50 +*** Writeup new worm experiments.
   85.51 +*** Triage implementation code and get it into chapter form.
   85.52 +
   85.53 +
   85.54 +
   85.55 + 
   85.56 +
   85.57 +** for today
   85.58 +
   85.59 +- guided worm :: control the worm with the keyboard. Useful for
   85.60 +                 testing the body-centered recog scripts, and for
   85.61 +                 preparing a cool demo video.
   85.62 +
   85.63 +- body-centered recognition :: detect actions using hard coded
   85.64 +     body-centered scripts. 
   85.65 +
   85.66 +- cool demo video of the worm being moved and recognizing things ::
   85.67 +     will be a neat part of the thesis.
   85.68 +
   85.69 +- thesis export :: refactoring and organization of code so that it
   85.70 +                   spits out a thesis in addition to the web page.
   85.71 +
   85.72 +- video alignment :: analyze the frames of a video in order to align
   85.73 +     the worm. Requires body-centered recognition. Can "cheat".
   85.74 +
   85.75 +- smoother actions :: use debugging controls to directly influence the
   85.76 +     demo actions, and to generate recoginition procedures.
   85.77 +
   85.78 +- degenerate video demonstration :: show the system recognizing a
   85.79 +     curled worm from dead on. Crowning achievement of thesis.
   85.80 +
   85.81 +** Ordered from easiest to hardest
   85.82 +
   85.83 +Just report the positions of everything. I don't think that this
   85.84 +necessairly shows anything usefull.
   85.85 +
   85.86 +Worm-segment vision -- you initialize a view of the worm, but instead
   85.87 +of pixels you use labels via ray tracing. Has the advantage of still
   85.88 +allowing for visual occlusion, but reliably identifies the objects,
   85.89 +even without rainbow coloring. You can code this as an image. 
   85.90 +
   85.91 +Same as above, except just with worm/non-worm labels.
   85.92 +
   85.93 +Color code each worm segment and then recognize them using blob
   85.94 +detectors. Then you solve for the perspective and the action
   85.95 +simultaneously.
   85.96 +
   85.97 +The entire worm can be colored the same, high contrast color against a
   85.98 +nearly black background.
   85.99 +
  85.100 +"Rooted" vision. You give the exact coordinates of ONE piece of the
  85.101 +worm, but the algorithm figures out the rest.
  85.102 +
  85.103 +More rooted vision -- start off the entire worm with one posistion.
  85.104 +
  85.105 +The right way to do alignment is to use motion over multiple frames to
  85.106 +snap individual pieces of the model into place sharing and
  85.107 +propragating the individual alignments over the whole model. We also
  85.108 +want to limit the alignment search to just those actions we are
  85.109 +prepared to identify. This might mean that I need some small "micro
  85.110 +actions" such as the individual movements of the worm pieces.
  85.111 +
  85.112 +Get just the centers of each segment projected onto the imaging
  85.113 +plane. (best so far).
  85.114 +
  85.115 +
  85.116 +Repertoire of actions  +  video frames -->
  85.117 +   directed multi-frame-search alg
  85.118 +
  85.119 +
  85.120 +
  85.121 +
  85.122 +
  85.123 +
  85.124 +!! Could also have a bounding box around the worm provided by
  85.125 +filtering the worm/non-worm render, and use bbbgs. As a bonus, I get
  85.126 +to include bbbgs in my thesis! Could finally do that recursive things
  85.127 +where I make bounding boxes be those things that give results that
  85.128 +give good bounding boxes. If I did this I could use a disruptive
  85.129 +pattern on the worm.
  85.130 +
  85.131 +Re imagining using default textures is very simple for this system,
  85.132 +but hard for others.
  85.133 +
  85.134 +
  85.135 +Want to demonstrate, at minimum, alignment of some model of the worm
  85.136 +to the video, and a lookup of the action by simulated perception.
  85.137 +
  85.138 +note: the purple/white points is a very beautiful texture, because
  85.139 +when it moves slightly, the white dots look like they're
  85.140 +twinkling. Would look even better if it was a darker purple. Also
  85.141 +would look better more spread out.
  85.142 +
  85.143 +
  85.144 +embed assumption of one frame of view, search by moving around in
  85.145 +simulated world.
  85.146 +
  85.147 +Allowed to limit search by setting limits to a hemisphere around the
  85.148 +imagined worm! This limits scale also.
  85.149 +
  85.150 +
  85.151 +
  85.152 +
  85.153 +
  85.154 +!! Limited search with worm/non-worm rendering. 
  85.155 +How much inverse kinematics do we have to do?
  85.156 +What about cached (allowed state-space) paths, derived from labeled
  85.157 +training. You have to lead from one to another.
  85.158 +
  85.159 +What about initial state? Could start the input videos at a specific
  85.160 +state, then just match that explicitly.
  85.161 +
  85.162 +!! The training doesn't have to be labeled -- you can just move around
  85.163 +for a while!!
  85.164 +
  85.165 +!! Limited search with motion based alignment.
  85.166 +
  85.167 +
  85.168 +
  85.169 +
  85.170 +"play arounds" can establish a chain of linked sensoriums. Future
  85.171 +matches must fall into one of the already experienced things, and once
  85.172 +they do, it greatly limits the things that are possible in the future.
  85.173 +
  85.174 +
  85.175 +frame differences help to detect muscle exertion.
  85.176 +
  85.177 +Can try to match on a few "representative" frames. Can also just have
  85.178 +a few "bodies" in various states which we try to match.
  85.179 +
  85.180 +
  85.181 +
  85.182 +Paths through state-space have the exact same signature as
  85.183 +simulation. BUT, these can be searched in parallel and don't interfere
  85.184 +with each other.
  85.185 +
  85.186 +
  85.187 +
  85.188 +
  85.189 +** Final stretch up to First Draft
  85.190 +
  85.191 +*** DONE complete debug control of worm
  85.192 +    CLOSED: [2014-03-17 Mon 17:29] SCHEDULED: <2014-03-17 Mon>
  85.193 +    CLOCK: [2014-03-17 Mon 14:01]--[2014-03-17 Mon 17:29] =>  3:28
  85.194 +*** DONE add phi-space output to debug control
  85.195 +    CLOSED: [2014-03-17 Mon 17:42] SCHEDULED: <2014-03-17 Mon>
  85.196 +    CLOCK: [2014-03-17 Mon 17:31]--[2014-03-17 Mon 17:42] =>  0:11
  85.197 +
  85.198 +*** DONE complete automatic touch partitioning
  85.199 +    CLOSED: [2014-03-18 Tue 21:43] SCHEDULED: <2014-03-18 Tue>
  85.200 +*** DONE complete cyclic predicate
  85.201 +    CLOSED: [2014-03-19 Wed 16:34] SCHEDULED: <2014-03-18 Tue>
  85.202 +    CLOCK: [2014-03-19 Wed 13:16]--[2014-03-19 Wed 16:34] =>  3:18
  85.203 +*** DONE complete three phi-stream action predicatates; test them with debug control
  85.204 +    CLOSED: [2014-03-19 Wed 16:35] SCHEDULED: <2014-03-17 Mon>
  85.205 +    CLOCK: [2014-03-18 Tue 18:36]--[2014-03-18 Tue 21:43] =>  3:07
  85.206 +    CLOCK: [2014-03-18 Tue 18:34]--[2014-03-18 Tue 18:36] =>  0:02
  85.207 +    CLOCK: [2014-03-17 Mon 19:19]--[2014-03-17 Mon 21:19] =>  2:00
  85.208 +*** DONE build an automatic "do all the things" sequence.
  85.209 +    CLOSED: [2014-03-19 Wed 16:55] SCHEDULED: <2014-03-19 Wed>
  85.210 +    CLOCK: [2014-03-19 Wed 16:53]--[2014-03-19 Wed 16:55] =>  0:02
  85.211 +*** DONE implement proprioception based movement lookup in phi-space
  85.212 +    CLOSED: [2014-03-19 Wed 22:04] SCHEDULED: <2014-03-19 Wed>
  85.213 +    CLOCK: [2014-03-19 Wed 19:32]--[2014-03-19 Wed 22:04] =>  2:32
  85.214 +*** DONE make proprioception reference phi-space indexes
  85.215 +    CLOSED: [2014-03-19 Wed 22:47] SCHEDULED: <2014-03-19 Wed>
  85.216 +    CLOCK: [2014-03-19 Wed 22:07]
  85.217 +
  85.218 +
  85.219 +*** DONE create test videos, also record positions of worm segments
  85.220 +    CLOSED: [2014-03-20 Thu 22:02] SCHEDULED: <2014-03-19 Wed>
  85.221 +
  85.222 +*** TODO Collect intro, worm-learn and cortex creation into draft thesis. 
  85.223 +    
    86.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    86.2 +++ b/thesis/rlm-cortex-meng.tex	Thu Mar 27 17:57:01 2014 -0400
    86.3 @@ -0,0 +1,138 @@
    86.4 +% -*- Mode:TeX -*-
    86.5 +
    86.6 +%% IMPORTANT: The official thesis specifications are available at:
    86.7 +%%            http://libraries.mit.edu/archives/thesis-specs/
    86.8 +%%
    86.9 +%%            Please verify your thesis' formatting and copyright
   86.10 +%%            assignment before submission.  If you notice any
   86.11 +%%            discrepancies between these templates and the 
   86.12 +%%            MIT Libraries' specs, please let us know
   86.13 +%%            by e-mailing thesis@mit.edu
   86.14 +
   86.15 +%% The documentclass options along with the pagestyle can be used to generate
   86.16 +%% a technical report, a draft copy, or a regular thesis.  You may need to
   86.17 +%% re-specify the pagestyle after you \include  cover.tex.  For more
   86.18 +%% information, see the first few lines of mitthesis.cls. 
   86.19 +
   86.20 +%\documentclass[12pt,vi,twoside]{mitthesis}
   86.21 +%%
   86.22 +%%  If you want your thesis copyright to you instead of MIT, use the
   86.23 +%%  ``vi'' option, as above.
   86.24 +%%
   86.25 +%\documentclass[12pt,twoside,leftblank]{mitthesis}
   86.26 +%%
   86.27 +%% If you want blank pages before new chapters to be labelled ``This
   86.28 +%% Page Intentionally Left Blank'', use the ``leftblank'' option, as
   86.29 +%% above. 
   86.30 +
   86.31 +\documentclass[12pt,twoside,singlespace,vi]{mitthesis}
   86.32 +%\documentclass[12pt,twoside,vi]{mitthesis}
   86.33 +\usepackage[utf8]{inputenc}
   86.34 +\usepackage[T1]{fontenc}
   86.35 +\usepackage{fixltx2e}
   86.36 +\usepackage{graphicx}
   86.37 +\usepackage{longtable}
   86.38 +\usepackage{float}
   86.39 +\usepackage{wrapfig}
   86.40 +\usepackage{rotating}
   86.41 +\usepackage[normalem]{ulem}
   86.42 +\usepackage{amsmath}
   86.43 +\usepackage{textcomp}
   86.44 +\usepackage{marvosym}
   86.45 +\usepackage{wasysym}
   86.46 +\usepackage{amssymb}
   86.47 +\usepackage{hyperref}
   86.48 +\usepackage{libertine}
   86.49 +\usepackage{inconsolata}
   86.50 +\usepackage{rotating}
   86.51 +\usepackage{caption}
   86.52 +
   86.53 +%\usepackage{afterpage}
   86.54 +
   86.55 +%\afterpage{\clearpage}          %
   86.56 +
   86.57 +\usepackage[backend=bibtex,style=alphabetic]{biblatex}
   86.58 +\addbibresource{cortex.bib}
   86.59 +
   86.60 +\usepackage{xcolor}
   86.61 +\definecolor{dark-red}{rgb}{0.4,0.15,0.15}
   86.62 +\definecolor{dark-blue}{rgb}{0.15,0.4,0.15}
   86.63 +\definecolor{medium-blue}{rgb}{0,0,0.5}
   86.64 +\hypersetup{
   86.65 +    colorlinks, linkcolor={dark-red},
   86.66 +    citecolor={dark-blue}, urlcolor={medium-blue}
   86.67 +}
   86.68 +
   86.69 +\newenvironment{code}{\captionsetup{type=listing}}{}
   86.70 +
   86.71 +\renewcommand{\thesection}{\arabic{section}}
   86.72 +\renewcommand{\thefigure}{\arabic{figure}}
   86.73 +
   86.74 +%%%%% better source code display
   86.75 +\usepackage{minted}
   86.76 +
   86.77 +%% dyl fonts
   86.78 +
   86.79 +% \usemintedstyle{friendly}
   86.80 +% \usemintedstyle{perldoc}
   86.81 +%\definecolor{bg}{rgb}{0.95,0.95,0.95}
   86.82 +\definecolor{bg}{rgb}{0.625,0,0}
   86.83 +\usemintedstyle{default}
   86.84 +\newcommand{\why}[1]{\\ \par{\footnotesize #1}}
   86.85 +%\setmonofont[Scale=0.9,BoldFont={Inconsolata Bold}]{Inconsolata}
   86.86 +
   86.87 +%\usepackage[gray]{xcolor}
   86.88 +\newminted{clojure}{fontsize=\footnotesize}
   86.89 +%\newminted{clojure}{fontsize=\footnotesize,bgcolor=bg}
   86.90 +%\newminted{clojure}{fontsize=\scriptsize}
   86.91 +
   86.92 +%\usepackage{lgrind}
   86.93 +\pagestyle{plain}
   86.94 +
   86.95 +
   86.96 +%% % Alter some LaTeX defaults for better treatment of figures:
   86.97 +%% % See p.105 of "TeX Unbound" for suggested values.
   86.98 +%% % See pp. 199-200 of Lamport's "LaTeX" book for details.
   86.99 +%% %   General parameters, for ALL pages:
  86.100 +%% \renewcommand{\topfraction}{0.9}	% max fraction of floats at top
  86.101 +%% \renewcommand{\bottomfraction}{0.8}	% max fraction of floats at bottom
  86.102 +%% %   Parameters for TEXT pages (not float pages):
  86.103 +%% \setcounter{topnumber}{2}
  86.104 +%% \setcounter{bottomnumber}{2}
  86.105 +%% \setcounter{totalnumber}{4}     % 2 may work better
  86.106 +%% \setcounter{dbltopnumber}{2}    % for 2-column pages
  86.107 +%% \renewcommand{\dbltopfraction}{0.9}	% fit big float above 2-col. text
  86.108 +%% \renewcommand{\textfraction}{0.07}	% allow minimal text w. figs
  86.109 +%% %   Parameters for FLOAT pages (not text pages):
  86.110 +%% \renewcommand{\floatpagefraction}{0.7}	% require fuller float pages
  86.111 +%% % N.B.: floatpagefraction MUST be less than topfraction !!
  86.112 +%% \renewcommand{\dblfloatpagefraction}{0.7}	% require fuller float pages
  86.113 +%% % remember to use [htp] or [htpb] for placement
  86.114 +
  86.115 +
  86.116 +\begin{document}
  86.117 +
  86.118 +\include{cover}
  86.119 +% Some departments (e.g. 5) require an additional signature page.  See
  86.120 +% signature.tex for more information and uncomment the following line if
  86.121 +% applicable.
  86.122 +% \include{signature}
  86.123 +\pagestyle{plain}
  86.124 +\tableofcontents
  86.125 +%\newpage
  86.126 +%\listoffigures
  86.127 +%\newpage
  86.128 +%\listoftables
  86.129 +\include{cortex}
  86.130 +\nocite{*}
  86.131 +%\include{chap2}
  86.132 +\appendix
  86.133 +\begin{singlespace}
  86.134 +%\bibliographystyle{agsm}
  86.135 +%\bibliographystyle{apa}
  86.136 +%\bibliographystyle{plainnat}
  86.137 +\include{user-guide}
  86.138 +\printbibliography
  86.139 +\end{singlespace}
  86.140 +\end{document}
  86.141 +
    87.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    87.2 +++ b/thesis/to-frames.pl	Thu Mar 27 17:57:01 2014 -0400
    87.3 @@ -0,0 +1,15 @@
    87.4 +#!/bin/perl
    87.5 +
    87.6 +$movie_file = shift(@ARGV);
    87.7 +
    87.8 +# get file name without extension
    87.9 +$movie_file =~ m/^([^.]+)\.[^.]+$/;
   87.10 +$movie_name = $1;
   87.11 +
   87.12 +@mkdir_command = ("mkdir", "-vp", $movie_name);
   87.13 +@ffmpeg_command = ("ffmpeg", "-i", $movie_file, $movie_name."/%07d.png");
   87.14 +
   87.15 +print "@mkdir_command\n";
   87.16 +system(@mkdir_command);
   87.17 +print "@ffmpeg_command\n";
   87.18 +system(@ffmpeg_command);
   87.19 \ No newline at end of file
    88.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    88.2 +++ b/thesis/user-guide.org	Thu Mar 27 17:57:01 2014 -0400
    88.3 @@ -0,0 +1,6 @@
    88.4 +* Appendix: =CORTEX= User Guide
    88.5 +
    88.6 +  For future students who whould like to use =CORTEX= in their own
    88.7 +  projects.
    88.8 +
    88.9 +
    89.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    89.2 +++ b/thesis/weave-thesis.sh	Thu Mar 27 17:57:01 2014 -0400
    89.3 @@ -0,0 +1,20 @@
    89.4 +#!/bin/sh
    89.5 +
    89.6 +emacs  \
    89.7 +-l /home/r/config/emacs/clojure-init.el \
    89.8 +-l /home/r/config/emacs/org-init.el \
    89.9 +-l /home/r/config/emacs/thesis-color.el \
   89.10 +--batch \
   89.11 +--eval "
   89.12 +(progn
   89.13 +  (find-file \"cortex.org\")
   89.14 +  (org-latex-export-to-latex nil nil nil t nil) \
   89.15 +  (find-file \"user-guide.org\")
   89.16 +  (org-latex-export-to-latex nil nil nil t nil) \
   89.17 +  (find-file \"abstract.org\")
   89.18 +  (org-latex-export-to-latex nil nil nil t nil))" \
   89.19 +\
   89.20 +2>&1 
   89.21 +
   89.22 +rm -f cortex.tex~
   89.23 +rm -f abstract.tex~