# HG changeset patch # User Robert McIntyre # Date 1395957421 14400 # Node ID 763d13f77e037be224d5ed2ccf38b967051a7933 # Parent a86555b029165b06ef14628d8b9581f528d3c94b# Parent 26c13c42481f912ebc0732c4ebcab53cad469473 merge in laptop changes. diff -r a86555b02916 -r 763d13f77e03 .hgignore --- a/.hgignore Thu Mar 27 17:56:26 2014 -0400 +++ b/.hgignore Thu Mar 27 17:57:01 2014 -0400 @@ -15,6 +15,7 @@ libbulletjme.so java/build/* java/dist/* +thesis/*.pdf syntax: regexp ^.*blend\d$ diff -r a86555b02916 -r 763d13f77e03 assets/Models/test-touch/touch-cube.blend.orig Binary file assets/Models/test-touch/touch-cube.blend.orig has changed diff -r a86555b02916 -r 763d13f77e03 assets/Models/worm/basic-muscle.png Binary file assets/Models/worm/basic-muscle.png has changed diff -r a86555b02916 -r 763d13f77e03 assets/Models/worm/touch-profile-imag.png Binary file assets/Models/worm/touch-profile-imag.png has changed diff -r a86555b02916 -r 763d13f77e03 assets/Models/worm/touch-profile-imag.xcf Binary file assets/Models/worm/touch-profile-imag.xcf has changed diff -r a86555b02916 -r 763d13f77e03 assets/Models/worm/touch-profile.png Binary file assets/Models/worm/touch-profile.png has changed diff -r a86555b02916 -r 763d13f77e03 assets/Models/worm/touch-profile.xcf Binary file assets/Models/worm/touch-profile.xcf has changed diff -r a86555b02916 -r 763d13f77e03 assets/Models/worm/worm-of-the-imagination.blend Binary file assets/Models/worm/worm-of-the-imagination.blend has changed diff -r a86555b02916 -r 763d13f77e03 assets/Models/worm/worm-segment.blend Binary file assets/Models/worm/worm-segment.blend has changed diff -r a86555b02916 -r 763d13f77e03 assets/Models/worm/worm-single-segment.blend Binary file assets/Models/worm/worm-single-segment.blend has changed diff -r a86555b02916 -r 763d13f77e03 assets/Models/worm/worm.blend Binary file assets/Models/worm/worm.blend has changed diff -r a86555b02916 -r 763d13f77e03 assets/Textures/aurellem.png Binary file assets/Textures/aurellem.png has changed diff -r a86555b02916 -r 763d13f77e03 assets/Textures/greenGrid.png Binary file assets/Textures/greenGrid.png has changed diff -r a86555b02916 -r 763d13f77e03 assets/Textures/greenHexGrid.png Binary file assets/Textures/greenHexGrid.png has changed diff -r a86555b02916 -r 763d13f77e03 assets/Textures/squareGrid.png Binary file assets/Textures/squareGrid.png has changed diff -r a86555b02916 -r 763d13f77e03 gimp-patterns/film-grain-vertical.pat Binary file gimp-patterns/film-grain-vertical.pat has changed diff -r a86555b02916 -r 763d13f77e03 gimp-patterns/gb-grid.pat Binary file gimp-patterns/gb-grid.pat has changed diff -r a86555b02916 -r 763d13f77e03 gimp-patterns/tactile-1.pat Binary file gimp-patterns/tactile-1.pat has changed diff -r a86555b02916 -r 763d13f77e03 gimp-patterns/tactile-128.pat Binary file gimp-patterns/tactile-128.pat has changed diff -r a86555b02916 -r 763d13f77e03 gimp-patterns/tactile-16.pat Binary file gimp-patterns/tactile-16.pat has changed diff -r a86555b02916 -r 763d13f77e03 gimp-patterns/tactile-2.pat Binary file gimp-patterns/tactile-2.pat has changed diff -r a86555b02916 -r 763d13f77e03 gimp-patterns/tactile-3.pat Binary file gimp-patterns/tactile-3.pat has changed diff -r a86555b02916 -r 763d13f77e03 gimp-patterns/tactile-32.pat Binary file gimp-patterns/tactile-32.pat has changed diff -r a86555b02916 -r 763d13f77e03 gimp-patterns/tactile-4.pat Binary file gimp-patterns/tactile-4.pat has changed diff -r a86555b02916 -r 763d13f77e03 gimp-patterns/tactile-5.pat Binary file gimp-patterns/tactile-5.pat has changed diff -r a86555b02916 -r 763d13f77e03 gimp-patterns/tactile-6.pat Binary file gimp-patterns/tactile-6.pat has changed diff -r a86555b02916 -r 763d13f77e03 gimp-patterns/tactile-64.pat Binary file gimp-patterns/tactile-64.pat has changed diff -r a86555b02916 -r 763d13f77e03 gimp-patterns/tactile-7.pat Binary file gimp-patterns/tactile-7.pat has changed diff -r a86555b02916 -r 763d13f77e03 gimp-patterns/tactile-8.pat Binary file gimp-patterns/tactile-8.pat has changed diff -r a86555b02916 -r 763d13f77e03 images/aurellem.xcf Binary file images/aurellem.xcf has changed diff -r a86555b02916 -r 763d13f77e03 java/build.xml --- a/java/build.xml Thu Mar 27 17:56:26 2014 -0400 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,32 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff -r a86555b02916 -r 763d13f77e03 java/src/com/aurellem/opencv/OpenCV.java --- a/java/src/com/aurellem/opencv/OpenCV.java Thu Mar 27 17:56:26 2014 -0400 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,15 +0,0 @@ -package com.aurellem.opencv; - -public class OpenCV { - - public OpenCV(){} - - public static void loadVBA(){ - System.loadLibrary("opencv_java249"); - } - - public static void absoluteLoadVBA(){ - System.load("/usr/share/OpenCV/java/libopencv_java249.so"); - } - -} diff -r a86555b02916 -r 763d13f77e03 org/ideas.org --- a/org/ideas.org Thu Mar 27 17:56:26 2014 -0400 +++ b/org/ideas.org Thu Mar 27 17:57:01 2014 -0400 @@ -27,12 +27,12 @@ - control of gravity within a certain radius - speed up/slow time - object creation/destruction -- future-sight -- step the simulation forward a few ticks, gather +- prescience -- step the simulation forward a few ticks, gather sensory data, then supply this data for the creature as one of its actual senses. - Symbol Sense - Where objects in the world can be queried for description / + Objects in the world can be queried for description / symbols. - Symbol Marking @@ -66,10 +66,6 @@ - make a joint that figures out what type of joint it is (range of motion) - - - - * goals ** have to get done before Winston @@ -104,8 +100,6 @@ ** don't have to get done before winston - [X] write tests for integration -- 3 days - [X] usertime/gametime clock HUD display -- day - - [ ] find papers for each of the senses justifying my own - representation -- week - [X] show sensor maps in HUD display? -- 4 days - [X] show sensor maps in AWT display? -- 2 days - [X] upgrade to clojure 1.3, replace all defvars with new def @@ -122,10 +116,9 @@ ;;Each minute and unseen part; ;;For the Gods see everywhere. - * misc - use object tracking on moving objects to derive good static detectors and achieve background separation - temporal scale pyramids. this can help in verb recognition by making verb identification time-scale independent (up to a certian - factor) \ No newline at end of file + factor) diff -r a86555b02916 -r 763d13f77e03 org/movement.org --- a/org/movement.org Thu Mar 27 17:56:26 2014 -0400 +++ b/org/movement.org Thu Mar 27 17:57:01 2014 -0400 @@ -283,7 +283,7 @@ muscles (pics "muscles/0") targets (map #(File. (str base "out/" (format "%07d.png" %))) - (range 0 (count main-view)))] + (range (count main-view)))] (dorun (pmap (comp diff -r a86555b02916 -r 763d13f77e03 org/proprioception.org --- a/org/proprioception.org Thu Mar 27 17:56:26 2014 -0400 +++ b/org/proprioception.org Thu Mar 27 17:57:01 2014 -0400 @@ -52,7 +52,7 @@ system. The three vectors do not have to be normalized or orthogonal." [vec1 vec2 vec3] - (< 0 (.dot (.cross vec1 vec2) vec3))) + (pos? (.dot (.cross vec1 vec2) vec3))) (defn absolute-angle "The angle between 'vec1 and 'vec2 around 'axis. In the range @@ -328,7 +328,7 @@ proprioception (pics "proprio/0") targets (map #(File. (str base "out/" (format "%07d.png" %))) - (range 0 (count main-view)))] + (range (count main-view)))] (dorun (pmap (comp @@ -385,7 +385,7 @@ * Next -Next time, I'll give the Worm the power to [[./movement.org][move on it's own]]. +Next time, I'll give the Worm the power to [[./movement.org][move on its own]]. * COMMENT generate source diff -r a86555b02916 -r 763d13f77e03 org/self_organizing_touch.clj --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/org/self_organizing_touch.clj Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,169 @@ +(ns org.aurellem.self-organizing-touch + "Using free play to automatically organize touch perception into regions." + {:author "Robert McIntyre"} + (:use (cortex world util import body sense + hearing touch vision proprioception movement + test)) + (:use [clojure set pprint]) + (:import (com.jme3.math ColorRGBA Vector3f)) + (:import java.io.File) + (:import com.jme3.audio.AudioNode) + (:import com.aurellem.capture.RatchetTimer) + (:import (com.aurellem.capture Capture IsoTimer)) + (:import (com.jme3.math Vector3f ColorRGBA))) + +(use 'org.aurellem.worm-learn) +(dorun (cortex.import/mega-import-jme3)) + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;; A demonstration of self organiging touch maps through experience. ; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +(def single-worm-segment-view + [(Vector3f. 2.0681207, -6.1406755, 1.6106138) + (Quaternion. -0.15558705, 0.843615, -0.3428654, -0.38281822)]) + +(def worm-single-segment-muscle-labels + [:lift-1 :lift-2 :roll-1 :roll-2]) + +(defn touch-kinesthetics [] + [[170 :lift-1 40] + [190 :lift-1 19] + [206 :lift-1 0] + + [400 :lift-2 40] + [410 :lift-2 0] + + [570 :lift-2 40] + [590 :lift-2 21] + [606 :lift-2 0] + + [800 :lift-1 30] + [809 :lift-1 0] + + [900 :roll-2 40] + [905 :roll-2 20] + [910 :roll-2 0] + + [1000 :roll-2 40] + [1005 :roll-2 20] + [1010 :roll-2 0] + + [1100 :roll-2 40] + [1105 :roll-2 20] + [1110 :roll-2 0] + ]) + +(defn single-worm-segment [] + (load-blender-model "Models/worm/worm-single-segment.blend")) + +(defn worm-segment [] + (let [model (single-worm-segment)] + {:body (doto model (body!)) + :touch (touch! model) + :proprioception (proprioception! model) + :muscles (movement! model)})) + + +(defn worm-segment-defaults [] + (let [direct-control (worm-direct-control worm-muscle-labels 40)] + (merge (worm-world-defaults) + {:worm worm-segment + :view single-worm-segment-view + :experience-watch nil + :motor-control + (motor-control-program + worm-single-segment-muscle-labels + (touch-kinesthetics)) + :end-frame 1200}))) + +(def full-contact [(float 0.0) (float 0.1)]) + +(defn pure-touch? + "This is worm specific code to determine if a large region of touch + sensors is either all on or all off." + [[coords touch :as touch-data]] + (= (set (map first touch)) (set full-contact))) + +(defn remove-similar + [coll] + (loop [result () coll (sort-by (comp - count) coll)] + (if (empty? coll) result + (let [[x & xs] coll + c (count x)] + (if (some + (fn [other-set] + (let [oc (count other-set)] + (< (- (count (union other-set x)) c) (* oc 0.1)))) + xs) + (recur result xs) + (recur (cons x result) xs)))))) + +(def all-touch-coordinates + (concat + (rect-region [0 15] [7 22]) + (rect-region [8 0] [14 29]) + (rect-region [15 15] [22 22]))) + +(defn view-touch-region + ([coords out] + (let [touched-region + (reduce + (fn [m k] + (assoc m k [0.0 0.1])) + (zipmap all-touch-coordinates (repeat [0.1 0.1])) coords) + data + [[(vec (keys touched-region)) (vec (vals touched-region))]] + touch-display (view-touch)] + (touch-display data out))) + ([coords] (view-touch-region nil))) + + +(defn learn-touch-regions [] + (let [experiences (atom []) + world (apply-map + worm-world + (assoc (worm-segment-defaults) + :experiences experiences + :record (File. "/home/r/proj/cortex/thesis/video/touch-learn-2/")))] + (run-world world) + (->> + @experiences + (drop 175) + ;; access the single segment's touch data + (map (comp first :touch)) + ;; only deal with "pure" touch data to determine surfaces + (filter pure-touch?) + ;; associate coordinates with touch values + (map (partial apply zipmap)) + ;; select those regions where contact is being made + (map (partial group-by second)) + (map #(get % full-contact)) + (map (partial map first)) + ;; remove redundant/subset regions + (map set) + remove-similar))) + + +(def all-touch-coordinates + (concat + (rect-region [0 15] [7 22]) + (rect-region [8 0] [14 29]) + (rect-region [15 15] [22 22]))) + +(defn view-touch-region [coords] + (let [touched-region + (reduce + (fn [m k] + (assoc m k [0.0 0.1])) + (zipmap all-touch-coordinates (repeat [0.1 0.1])) coords) + data + [[(vec (keys touched-region)) (vec (vals touched-region))]] + touch-display (view-touch)] + (dorun (repeatedly 5 #(touch-display data))))) + +(defn learn-and-view-touch-regions [] + (map view-touch-region + (learn-touch-regions))) + + diff -r a86555b02916 -r 763d13f77e03 org/sense.org --- a/org/sense.org Thu Mar 27 17:56:26 2014 -0400 +++ b/org/sense.org Thu Mar 27 17:57:01 2014 -0400 @@ -257,7 +257,7 @@ Returns a function that accepts a BufferedImage and draws it to the JPanel. If given a directory it will save the images as png files starting at 0000000.png and incrementing from there." - ([#^File save] + ([#^File save title] (let [idx (atom -1) image (atom @@ -268,7 +268,7 @@ [graphics] (proxy-super paintComponent graphics) (.drawImage graphics @image 0 0 nil))) - frame (JFrame. "Display Image")] + frame (JFrame. title)] (SwingUtilities/invokeLater (fn [] (doto frame @@ -285,6 +285,8 @@ (ImageIO/write i "png" (File. save (format "%07d.png" (swap! idx inc)))))))) + ([#^File save] + (view-image save "Display Image")) ([] (view-image nil))) (defn view-sense diff -r a86555b02916 -r 763d13f77e03 org/touch.org --- a/org/touch.org Thu Mar 27 17:56:26 2014 -0400 +++ b/org/touch.org Thu Mar 27 17:57:01 2014 -0400 @@ -78,7 +78,7 @@ To simulate touch there are three conceptual steps. For each solid object in the creature, you first have to get UV image and scale parameter which define the position and length of the feelers. Then, -you use the triangles which compose the mesh and the UV data stored in +you use the triangles which comprise the mesh and the UV data stored in the mesh to determine the world-space position and orientation of each feeler. Then once every frame, update these positions and orientations to match the current position and orientation of the object, and use @@ -136,7 +136,7 @@ A =Mesh= is composed of =Triangles=, and each =Triangle= has three vertices which have coordinates in world space and UV space. -Here, =triangles= gets all the world-space triangles which compose a +Here, =triangles= gets all the world-space triangles which comprise a mesh, while =pixel-triangles= gets those same triangles expressed in pixel coordinates (which are UV coordinates scaled to fit the height and width of the UV image). @@ -152,7 +152,7 @@ (.getTriangle (.getMesh geo) triangle-index scratch) scratch))) (defn triangles - "Return a sequence of all the Triangles which compose a given + "Return a sequence of all the Triangles which comprise a given Geometry." [#^Geometry geo] (map (partial triangle geo) (range (.getTriangleCount (.getMesh geo))))) @@ -240,7 +240,7 @@ [#^Triangle t] (let [mat (Matrix4f.) [vert-1 vert-2 vert-3] - ((comp vec map) #(.get t %) (range 3)) + (mapv #(.get t %) (range 3)) unit-normal (do (.calculateNormal t)(.getNormal t)) vertices [vert-1 vert-2 vert-3 unit-normal]] (dorun diff -r a86555b02916 -r 763d13f77e03 org/util.org --- a/org/util.org Thu Mar 27 17:56:26 2014 -0400 +++ b/org/util.org Thu Mar 27 17:57:01 2014 -0400 @@ -129,10 +129,13 @@ (defn position-camera "Change the position of the in-world camera." - [world #^Vector3f position #^Quaternion rotation] + ([world #^Vector3f position #^Quaternion rotation] (doto (.getCamera world) (.setLocation position) (.setRotation rotation))) + ([world [position rotation]] + (position-camera world position rotation))) + (defn enable-debug "Turn on debug wireframes for every object in this simulation." @@ -146,13 +149,13 @@ (defn speed-up "Increase the dismally slow speed of the world's camera." - [world] - (.setMoveSpeed (.getFlyByCamera world) - (float 60)) - (.setRotationSpeed (.getFlyByCamera world) - (float 3)) - world) - + ([world] (speed-up world 1)) + ([world amount] + (.setMoveSpeed (.getFlyByCamera world) + (float (* amount 60))) + (.setRotationSpeed (.getFlyByCamera world) + (float (* amount 3))) + world)) (defn no-logging "Disable all of jMonkeyEngine's logging." @@ -682,7 +685,7 @@ #+end_src -* COMMENT code generation +* code generation #+begin_src clojure :tangle ../src/cortex/import.clj <> #+end_src diff -r a86555b02916 -r 763d13f77e03 org/world.org --- a/org/world.org Thu Mar 27 17:56:26 2014 -0400 +++ b/org/world.org Thu Mar 27 17:57:01 2014 -0400 @@ -177,7 +177,7 @@ (defn initialize-inputs "Establish key-bindings for a particular virtual world." - [game input-manager key-map] + [game input-manager key-map] (doall (map (fn [[name trigger]] (.addMapping @@ -192,9 +192,6 @@ #+end_src -#+results: input -: #'cortex.world/initialize-inputs - These functions are for controlling the world through the keyboard and mouse. @@ -247,7 +244,7 @@ "the =world= function takes care of the details of initializing a SimpleApplication. - ***** Arguments: + ,***** Arguments: - root-node : a com.jme3.scene.Node object which contains all of the objects that should be in the simulation. diff -r a86555b02916 -r 763d13f77e03 org/worm_learn.clj --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/org/worm_learn.clj Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,562 @@ +(ns org.aurellem.worm-learn + "General worm creation framework." + {:author "Robert McIntyre"} + (:use (cortex world util import body sense + hearing touch vision proprioception movement + test)) + (:import (com.jme3.math ColorRGBA Vector3f)) + (:import java.io.File) + (:import com.jme3.audio.AudioNode) + (:import com.aurellem.capture.RatchetTimer) + (:import (com.aurellem.capture Capture IsoTimer)) + (:import (com.jme3.math Vector3f ColorRGBA))) + +(import org.apache.commons.math3.transform.TransformType) +(import org.apache.commons.math3.transform.FastFourierTransformer) +(import org.apache.commons.math3.transform.DftNormalization) + +(use 'clojure.pprint) +(use 'clojure.set) +(dorun (cortex.import/mega-import-jme3)) +(rlm.rlm-commands/help) + +(load-bullet) + +(def hand "Models/test-creature/hand.blend") + +(defn worm-model [] + (load-blender-model "Models/worm/worm.blend")) + +(defn worm [] + (let [model (load-blender-model "Models/worm/worm.blend")] + {:body (doto model (body!)) + :touch (touch! model) + :proprioception (proprioception! model) + :muscles (movement! model)})) + +(defn worm* [] + (let [model (load-blender-model "Models/worm/worm-of-the-imagination.blend")] + {:body (doto model (body!)) + :touch (touch! model) + :proprioception (proprioception! model) + :muscles (movement! model)})) + + +(def output-base (File. "/home/r/proj/cortex/render/worm-learn/curl")) + + +(defn motor-control-program + "Create a function which will execute the motor script" + [muscle-labels + script] + (let [current-frame (atom -1) + keyed-script (group-by first script) + current-forces (atom {}) ] + (fn [effectors] + (let [indexed-effectors (vec effectors)] + (dorun + (for [[_ part force] (keyed-script (swap! current-frame inc))] + (swap! current-forces (fn [m] (assoc m part force))))) + (doall (map (fn [effector power] + (effector (int power))) + effectors + (map #(@current-forces % 0) muscle-labels))))))) + +(defn worm-direct-control + "Create keybindings and a muscle control program that will enable + the user to control the worm via the keyboard." + [muscle-labels activation-strength] + (let [strengths (mapv (fn [_] (atom 0)) muscle-labels) + activator + (fn [n] + (fn [world pressed?] + (let [strength (if pressed? activation-strength 0)] + (swap! (nth strengths n) (constantly strength))))) + activators + (map activator (range (count muscle-labels))) + worm-keys + ["key-f" "key-r" + "key-g" "key-t" + "key-h" "key-y" + "key-j" "key-u" + "key-k" "key-i" + "key-l" "key-o"]] + {:motor-control + (fn [effectors] + (doall + (map (fn [strength effector] + (effector (deref strength))) + strengths effectors))) + :keybindings + ;; assume muscles are listed in pairs and map them to keys. + (zipmap worm-keys activators)})) + +;; These are scripts that direct the worm to move in two radically +;; different patterns -- a sinusoidal wiggling motion, and a curling +;; motions that causes the worm to form a circle. + +(def curl-script + [[150 :d-flex 40] + [250 :d-flex 0]]) + +(def period 18) + +(def worm-muscle-labels + [:base-ex :base-flex + :a-ex :a-flex + :b-ex :b-flex + :c-ex :c-flex + :d-ex :d-flex]) + +(defn gen-wiggle [[flexor extensor :as muscle-pair] time-base] + (let [period period + power 45] + [[time-base flexor power] + [(+ time-base period) flexor 0] + [(+ time-base period 1) extensor power] + [(+ time-base (+ (* 2 period) 2)) extensor 0]])) + +(def wiggle-script + (mapcat gen-wiggle (repeat 4000 [:a-ex :a-flex]) + (range 100 1000000 (+ 3 (* period 2))))) + + +(defn shift-script [shift script] + (map (fn [[time label power]] [(+ time shift) label power]) + script)) + +(def do-all-the-things + (concat + curl-script + [[300 :d-ex 40] + [320 :d-ex 0]] + (shift-script 280 (take 16 wiggle-script)))) + +;; Normally, we'd use unsupervised/supervised machine learning to pick +;; out the defining features of the different actions available to the +;; worm. For this project, I am going to explicitely define functions +;; that recognize curling and wiggling respectively. These functions +;; are defined using all the information available from an embodied +;; simulation of the action. Note how much easier they are to define +;; than if I only had vision to work with. Things like scale/position +;; invariance are complete non-issues here. This is the advantage of +;; body-centered action recognition and what I hope to show with this +;; thesis. + + +;; curled? relies on proprioception, resting? relies on touch, +;; wiggling? relies on a fourier analysis of muscle contraction, and +;; grand-circle? relies on touch and reuses curled? as a gaurd. + +(defn curled? + "Is the worm curled up?" + [experiences] + (every? + (fn [[_ _ bend]] + (> (Math/sin bend) 0.64)) + (:proprioception (peek experiences)))) + +(defn rect-region [[x0 y0] [x1 y1]] + (vec + (for [x (range x0 (inc x1)) + y (range y0 (inc y1))] + [x y]))) + +(def worm-segment-bottom (rect-region [8 15] [14 22])) + +(defn contact + "Determine how much contact a particular worm segment has with + other objects. Returns a value between 0 and 1, where 1 is full + contact and 0 is no contact." + [touch-region [coords contact :as touch]] + (-> (zipmap coords contact) + (select-keys touch-region) + (vals) + (#(map first %)) + (average) + (* 10) + (- 1) + (Math/abs))) + +(defn resting? + "Is the worm resting on the ground?" + [experiences] + (every? + (fn [touch-data] + (< 0.9 (contact worm-segment-bottom touch-data))) + (:touch (peek experiences)))) + +(defn vector:last-n [v n] + (let [c (count v)] + (if (< c n) v + (subvec v (- c n) c)))) + +(defn fft [nums] + (map + #(.getReal %) + (.transform + (FastFourierTransformer. DftNormalization/STANDARD) + (double-array nums) TransformType/FORWARD))) + +(def indexed (partial map-indexed vector)) + +(defn max-indexed [s] + (first (sort-by (comp - second) (indexed s)))) + +(defn wiggling? + "Is the worm wiggling?" + [experiences] + (let [analysis-interval 96] + (when (> (count experiences) analysis-interval) + (let [a-flex 3 + a-ex 2 + muscle-activity + (map :muscle (vector:last-n experiences analysis-interval)) + base-activity + (map #(- (% a-flex) (% a-ex)) muscle-activity) + accept? + (fn [activity] + (->> activity (fft) (take 20) (map #(Math/abs %)) + (max-indexed) (first) (<= 2)))] + (or (accept? (take 64 base-activity)) + (accept? (take 64 (drop 20 base-activity)))))))) + + + +(def worm-segment-bottom-tip (rect-region [15 15] [22 22])) + +(def worm-segment-top-tip (rect-region [0 15] [7 22])) + +(defn grand-circle? + "Does the worm form a majestic circle (one end touching the other)?" + [experiences] + (and (curled? experiences) + (let [worm-touch (:touch (peek experiences)) + tail-touch (worm-touch 0) + head-touch (worm-touch 4)] + (and (< 0.1 (contact worm-segment-bottom-tip tail-touch)) + (< 0.1 (contact worm-segment-top-tip head-touch)))))) + + +(declare phi-space phi-scan debug-experience) + + + +(def standard-world-view + [(Vector3f. 4.207176, -3.7366982, 3.0816958) + (Quaternion. 0.11118768, 0.87678415, 0.24434438, -0.3989771)]) + +(def worm-side-view + [(Vector3f. 4.207176, -3.7366982, 3.0816958) + (Quaternion. -0.11555642, 0.88188726, -0.2854942, -0.3569518)]) + +(def degenerate-worm-view + [(Vector3f. -0.0708936, -8.570261, 2.6487997) + (Quaternion. -2.318909E-4, 0.9985348, 0.053941682, 0.004291452)]) + +(defn worm-world-defaults [] + (let [direct-control (worm-direct-control worm-muscle-labels 40)] + (merge direct-control + {:view worm-side-view + :record nil + :experiences (atom []) + :experience-watch debug-experience + :worm worm + :end-frame nil}))) + +(defn dir! [file] + (if-not (.exists file) + (.mkdir file)) + file) + +(defn record-experience! [experiences data] + (swap! experiences #(conj % data))) + +(defn enable-shadows [world] + (let [bsr (doto + (BasicShadowRenderer. (asset-manager) 512) + (.setDirection (.normalizeLocal (Vector3f. 1 -1 -1))))] + (.addProcessor (.getViewPort world) bsr))) + +(defn enable-good-shadows [world] + (let [pssm + (doto (PssmShadowRenderer. (asset-manager) 1024 3) + (.setDirection (.normalizeLocal (Vector3f. -1 -3 -1))) + (.setLambda (float 0.55)) + (.setShadowIntensity (float 0.6)) + (.setCompareMode PssmShadowRenderer$CompareMode/Software) + (.setFilterMode PssmShadowRenderer$FilterMode/Bilinear))] + (.addProcessor (.getViewPort world) pssm))) + +(defn debug-experience + [experiences text] + (cond + (grand-circle? experiences) (.setText text "Grand Circle") + (curled? experiences) (.setText text "Curled") + (wiggling? experiences) (.setText text "Wiggling") + (resting? experiences) (.setText text "Resting") + :else (.setText text "Unknown"))) + + +(defn worm-world + [& {:keys [record motor-control keybindings view experiences + worm end-frame experience-watch] :as settings}] + (let [{:keys [record motor-control keybindings view experiences + worm end-frame experience-watch]} + (merge (worm-world-defaults) settings) + + touch-display (view-touch) + prop-display (view-proprioception) + muscle-display (view-movement) + {:keys [proprioception touch muscles body]} (worm) + + floor + (box 5 1 5 :position (Vector3f. 0 -10 0) + :mass 0 + :texture "Textures/aurellem.png" + :material "Common/MatDefs/Misc/Unshaded.j3md") + timer (IsoTimer. 60) + + font (.loadFont (asset-manager) "Interface/Fonts/Console.fnt") + worm-action (doto (BitmapText. font false) + (.setSize 35) + (.setColor (ColorRGBA/Black)))] + + (world + (nodify [body floor]) + (merge standard-debug-controls keybindings) + (fn [world] + (.setLocalTranslation + worm-action 20 470 0) + (.attachChild (.getGuiNode world) worm-action) + + (enable-good-shadows world) + (.setShadowMode body RenderQueue$ShadowMode/CastAndReceive) + (.setShadowMode floor RenderQueue$ShadowMode/Receive) + + (.setBackgroundColor (.getViewPort world) (ColorRGBA/White)) + (.setDisplayStatView world false) + (.setDisplayFps world false) + (position-camera world view) + (.setTimer world timer) + ;;(display-dilated-time world timer) + (when record + (dir! record) + (Capture/captureVideo + world + (dir! (File. record "main-view")))) + (speed-up world 0.5) + ;;(light-up-everything world) + ) + (fn [world tpf] + (if (and end-frame (> (.getTime timer) end-frame)) + (.stop world)) + (let [muscle-data (vec (motor-control muscles)) + proprioception-data (proprioception) + touch-data (mapv #(% (.getRootNode world)) touch)] + (when experiences + (record-experience! + experiences {:touch touch-data + :proprioception proprioception-data + :muscle muscle-data})) + (when experience-watch + (experience-watch @experiences worm-action)) + (muscle-display + muscle-data + (when record (dir! (File. record "muscle")))) + (prop-display + proprioception-data + (when record (dir! (File. record "proprio")))) + (touch-display + touch-data + (when record (dir! (File. record "touch"))))))))) + + + +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;; Phi-Space ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; +;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; + +(defn generate-phi-space [] + (let [experiences (atom [])] + (run-world + (apply-map + worm-world + (merge + (worm-world-defaults) + {:end-frame 700 + :motor-control + (motor-control-program worm-muscle-labels do-all-the-things) + :experiences experiences}))) + @experiences)) + +(defn bin [digits] + (fn [angles] + (->> angles + (flatten) + (map (juxt #(Math/sin %) #(Math/cos %))) + (flatten) + (mapv #(Math/round (* % (Math/pow 10 (dec digits)))))))) + +;; k-nearest neighbors with spatial binning. Only returns a result if +;; the propriceptive data is within 10% of a previously recorded +;; result in all dimensions. +(defn gen-phi-scan [phi-space] + (let [bin-keys (map bin [3 2 1]) + bin-maps + (map (fn [bin-key] + (group-by + (comp bin-key :proprioception phi-space) + (range (count phi-space)))) bin-keys) + lookups (map (fn [bin-key bin-map] + (fn [proprio] (bin-map (bin-key proprio)))) + bin-keys bin-maps)] + (fn lookup [proprio-data] + (set (some #(% proprio-data) lookups))))) + + +(defn longest-thread + "Find the longest thread from phi-index-sets. The index sets should + be ordered from most recent to least recent." + [phi-index-sets] + (loop [result '() + [thread-bases & remaining :as phi-index-sets] phi-index-sets] + (if (empty? phi-index-sets) + (vec result) + (let [threads + (for [thread-base thread-bases] + (loop [thread (list thread-base) + remaining remaining] + (let [next-index (dec (first thread))] + (cond (empty? remaining) thread + (contains? (first remaining) next-index) + (recur + (cons next-index thread) (rest remaining)) + :else thread)))) + longest-thread + (reduce (fn [thread-a thread-b] + (if (> (count thread-a) (count thread-b)) + thread-a thread-b)) + '(nil) + threads)] + (recur (concat longest-thread result) + (drop (count longest-thread) phi-index-sets)))))) + + +(defn init [] + (def phi-space (generate-phi-space)) + (def phi-scan (gen-phi-scan phi-space)) + ) + +;; (defn infer-nils-dyl [s] +;; (loop [closed () +;; open s +;; anchor 0] +;; (if-not (empty? open) +;; (recur (conj closed +;; (or (peek open) +;; anchor)) +;; (pop open) +;; (or (peek open) anchor)) +;; closed))) + +;; (defn infer-nils [s] +;; (for [i (range (count s))] +;; (or (get s i) +;; (some (comp not nil?) (vector:last-n (- (count s) i))) +;; 0))) + + +(defn infer-nils + "Replace nils with the next available non-nil element in the + sequence, or barring that, 0." + [s] + (loop [i (dec (count s)) + v (transient s)] + (if (zero? i) (persistent! v) + (if-let [cur (v i)] + (if (get v (dec i) 0) + (recur (dec i) v) + (recur (dec i) (assoc! v (dec i) cur))) + (recur i (assoc! v i 0)))))) + +;; tests + +;;(infer-nils [1 nil 1 1]) [1 1 1 1] +;;(infer-nils [1 1 1 nil]) [1 1 1 0] +;;(infer-nils [nil 2 1 1]) [2 2 1 1] + + +(defn empathy-demonstration [] + (let [proprio (atom ())] + (fn + [experiences text] + (let [phi-indices (phi-scan (:proprioception (peek experiences)))] + (swap! proprio (partial cons phi-indices)) + (let [exp-thread (longest-thread (take 300 @proprio)) + empathy (mapv phi-space (infer-nils exp-thread))] + (println-repl (vector:last-n exp-thread 22)) + (cond + (grand-circle? empathy) (.setText text "Grand Circle") + (curled? empathy) (.setText text "Curled") + (wiggling? empathy) (.setText text "Wiggling") + (resting? empathy) (.setText text "Resting") + :else (.setText text "Unknown"))))))) + +(defn init-interactive [] + (def phi-space + (let [experiences (atom [])] + (run-world + (apply-map + worm-world + (merge + (worm-world-defaults) + {:experiences experiences}))) + @experiences)) + (def phi-scan (gen-phi-scan phi-space))) + +(defn empathy-experiment-1 [record] + (.start (worm-world :experience-watch (empathy-demonstration) + :record record :worm worm*))) + + +(def worm-action-label + (juxt grand-circle? curled? wiggling?)) + +(defn compare-empathy-with-baseline [accuracy] + (let [proprio (atom ())] + (fn + [experiences text] + (let [phi-indices (phi-scan (:proprioception (peek experiences)))] + (swap! proprio (partial cons phi-indices)) + (let [exp-thread (longest-thread (take 300 @proprio)) + empathy (mapv phi-space (infer-nils exp-thread)) + experience-matches-empathy + (= (worm-action-label experiences) + (worm-action-label empathy))] + (cond + (grand-circle? empathy) (.setText text "Grand Circle") + (curled? empathy) (.setText text "Curled") + (wiggling? empathy) (.setText text "Wiggling") + (resting? empathy) (.setText text "Resting") + :else (.setText text "Unknown")) + + (println-repl experience-matches-empathy) + (swap! accuracy #(conj % experience-matches-empathy))))))) + +(defn accuracy [v] + (float (/ (count (filter true? v)) (count v)))) + +(defn test-empathy-accuracy [] + (let [res (atom [])] + (run-world + (worm-world :experience-watch + (compare-empathy-with-baseline res) + :worm worm*)) + (accuracy @res))) + + + + + + diff -r a86555b02916 -r 763d13f77e03 thesis/Makefile --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/Makefile Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,16 @@ +#INVOKE_LATEX = pdflatex -shell-escape thesis.tex; +THESIS_NAME = rlm-cortex-meng +INVOKE_LATEX = texi2dvi --shell-escape --pdf -V --batch $(THESIS_NAME).tex; +#INVOKE_LATEX = texi2dvi --shell-escape --pdf -V $(THESIS_NAME).tex; + +all: + ./weave-thesis.sh cortex + rsync -avz --delete --exclude "video" \ + /home/r/proj/cortex/thesis "r@aurellem.org:~" + ssh r@aurellem.org cd "~/thesis; $(INVOKE_LATEX)" + scp "r@aurellem.org:/home/r/thesis/$(THESIS_NAME).pdf" . + rm cortex.tex abstract.tex user-guide.tex + + + + diff -r a86555b02916 -r 763d13f77e03 thesis/abstract.org --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/abstract.org Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,23 @@ +Here I demonstrate the power of using embodied artificial intelligence +to attack the /action recognition/ problem, which is the challenge of +recognizing actions performed by a creature given limited data about +the creature's actions, such as a video recording. I solve this +problem in the case of a worm-like creature performing actions such as +curling and wiggling. + +To attack the action recognition problem, I developed a computational +model of empathy (=EMPATH=) which allows me to recognize actions using +simple, embodied representations of actions (which require rich +sensory data), even when that sensory data is not actually +available. The missing sense data is ``imagined'' by the system by +combining previous experiences gained from unsupervised free play. + +In order to build this empathic, action-recognizing system, I created +a program called =CORTEX=, which is a complete platform for embodied +AI research. It provides multiple senses for simulated creatures, +including vision, touch, proprioception, muscle tension, and +hearing. Each of these senses provides a wealth of parameters that are +biologically inspired. =CORTEX= is able to simulate any number of +creatures and senses, and provides facilities for easily modeling and +creating new creatures. As a research platform it is more complete +than any other system currently available. diff -r a86555b02916 -r 763d13f77e03 thesis/aux/mitthesis/README.bibliography --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/aux/mitthesis/README.bibliography Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,28 @@ +This file gives an overview on what you will need to do to create a +bibliographic database for references, as well as create the actual +bibliography for your thesis. + +You should not need to touch the file biblio.tex. This merely tells +latex to look for the file with the bibliographic info. The file you +want to edit is main.bib. For each entry, use the appropriate style +as designated in the file. + +Citing your references: + +When you cite a reference, you need to use the ``key'' you declare in +main.bib for the entry. No one ever sees the keys, but you'll want to +use something you will easily remember. For instance, if you had an +entry with: + + key=geer1 + +Then you would use \cite{geer1} to reference it within your thesis. + +NOTE: You can not include references in your bibliography that are +never cited in your paper by default. If you need to do this, create a +key for the entry and at the end of your thesis include the line: + +\nocite{key} + +This should be done for every entry which is not explicitly cited. + diff -r a86555b02916 -r 763d13f77e03 thesis/aux/mitthesis/README.main --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/aux/mitthesis/README.main Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,54 @@ +This file explains the options available to you for editting the file +main.tex. + +The commands in the this file allow you to specify options such as +spacing, double-sided printing, a draft copy, etc. By default, 12pt +and lgrind are included; lgrind is the 2e style for including code in +your thesis. + +\documentclass[12pt]{mitthesis} +\usepackage{lgrind} +\pagestyle{plain} + +You can add options in the documentclass line as follows: + + o singlespace + + \documentclass[12pt,singlespace]{mitthesis} + + o twoside + + \documentclass[12pt,twoside]{mitthesis} + + o draft (make sure to change the pagestyle to drafthead as + well) + + \documentclass[12pt,draft]{mitthesis} + \usepackage{lgrind} + \pagestyle{drafthead} + + o vi (for course vi and course viii theses) + + \documentclass[12pt,vi]{mitthesis} + +Any options you would use for report.sty will work here as well. + + +You should not need to change the first three lines and last two lines +below. Be sure to include an \include command for each file you are +including in your thesis. + +\include{cover} +\pagestyle{plain} +\include{contents} +\include{chap1} +\include{chap2} +\appendix +\include{appa} +\include{appb} +\include{biblio} +\end{document} + +Comment: to include appendices use a single \appendix command followed by +a number of \include{} commands as many files as needed, each of which +should contain a \chapter{} command for the appendix title. diff -r a86555b02916 -r 763d13f77e03 thesis/aux/mitthesis/README.text --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/aux/mitthesis/README.text Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,34 @@ +This file includes the basic commands you will need to use within each +chapter of your thesis. + +The file chap1.tex is a sample first chapter. To get started, you may +just want to replace the text and commands in that file as needed. In +general though, for each new chapter you want to do the following: + + o Make sure the name has the extension .tex . Otherwise, you + can call it anything you want. For ease of use, all the + examples use chap1, chap2, etc. + + o Add a line to the file main.tex that reads: + + \include{yourfilename} + + This should not include the .tex extension, because latex + assumes that is there. + +Basic syntax: + + o The first line of each chapter should be: + + \chapter{Chapter Title} + + o To start a new section (labeled chap#.sec# -- as in 1.1, + 1.2, etc): + + \section{Section Heading} + + You can also include subsections: + + \subsection{Heading} + + diff -r a86555b02916 -r 763d13f77e03 thesis/aux/mitthesis/abstract.tex --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/aux/mitthesis/abstract.tex Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,22 @@ +% $Log: abstract.tex,v $ +% Revision 1.1 93/05/14 14:56:25 starflt +% Initial revision +% +% Revision 1.1 90/05/04 10:41:01 lwvanels +% Initial revision +% +% +%% The text of your abstract and nothing else (other than comments) goes here. +%% It will be single-spaced and the rest of the text that is supposed to go on +%% the abstract page will be generated by the abstractpage environment. This +%% file should be \input (not \include 'd) from cover.tex. +In this thesis, I designed and implemented a compiler which performs +optimizations that reduce the number of low-level floating point operations +necessary for a specific task; this involves the optimization of chains of +floating point operations as well as the implementation of a ``fixed'' point +data type that allows some floating point operations to simulated with integer +arithmetic. The source language of the compiler is a subset of C, and the +destination language is assembly language for a micro-floating point CPU. An +instruction-level simulator of the CPU was written to allow testing of the +code. A series of test pieces of codes was compiled, both with and without +optimization, to determine how effective these optimizations were. diff -r a86555b02916 -r 763d13f77e03 thesis/aux/mitthesis/biblio.tex --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/aux/mitthesis/biblio.tex Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,8 @@ +%% This defines the bibliography file (main.bib) and the bibliography style. +%% If you want to create a bibliography file by hand, change the contents of +%% this file to a `thebibliography' environment. For more information +%% see section 4.3 of the LaTeX manual. +\begin{singlespace} +\bibliography{main} +\bibliographystyle{plain} +\end{singlespace} diff -r a86555b02916 -r 763d13f77e03 thesis/aux/mitthesis/chap1.tex --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/aux/mitthesis/chap1.tex Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,250 @@ +%% This is an example first chapter. You should put chapter/appendix that you +%% write into a separate file, and add a line \include{yourfilename} to +%% main.tex, where `yourfilename.tex' is the name of the chapter/appendix file. +%% You can process specific files by typing their names in at the +%% \files= +%% prompt when you run the file main.tex through LaTeX. +\chapter{Introduction} + +Micro-optimization is a technique to reduce the overall operation count of +floating point operations. In a standard floating point unit, floating +point operations are fairly high level, such as ``multiply'' and ``add''; +in a micro floating point unit ($\mu$FPU), these have been broken down into +their constituent low-level floating point operations on the mantissas and +exponents of the floating point numbers. + +Chapter two describes the architecture of the $\mu$FPU unit, and the +motivations for the design decisions made. + +Chapter three describes the design of the compiler, as well as how the +optimizations discussed in section~\ref{ch1:opts} were implemented. + +Chapter four describes the purpose of test code that was compiled, and which +statistics were gathered by running it through the simulator. The purpose +is to measure what effect the micro-optimizations had, compared to +unoptimized code. Possible future expansions to the project are also +discussed. + +\section{Motivations for micro-optimization} + +The idea of micro-optimization is motivated by the recent trends in computer +architecture towards low-level parallelism and small, pipelineable +instruction sets \cite{patterson:risc,rad83}. By getting rid of more +complex instructions and concentrating on optimizing frequently used +instructions, substantial increases in performance were realized. + +Another important motivation was the trend towards placing more of the +burden of performance on the compiler. Many of the new architectures depend +on an intelligent, optimizing compiler in order to realize anywhere near +their peak performance +\cite{ellis:bulldog,pet87,coutant:precision-compilers}. In these cases, the +compiler not only is responsible for faithfully generating native code to +match the source language, but also must be aware of instruction latencies, +delayed branches, pipeline stages, and a multitude of other factors in order +to generate fast code \cite{gib86}. + +Taking these ideas one step further, it seems that the floating point +operations that are normally single, large instructions can be further broken +down into smaller, simpler, faster instructions, with more control in the +compiler and less in the hardware. This is the idea behind a +micro-optimizing FPU; break the floating point instructions down into their +basic components and use a small, fast implementation, with a large part of +the burden of hardware allocation and optimization shifted towards +compile-time. + +Along with the hardware speedups possible by using a $\mu$FPU, there are +also optimizations that the compiler can perform on the code that is +generated. In a normal sequence of floating point operations, there are +many hidden redundancies that can be eliminated by allowing the compiler to +control the floating point operations down to their lowest level. These +optimizations are described in detail in section~\ref{ch1:opts}. + +\section{Description of micro-optimization}\label{ch1:opts} + +In order to perform a sequence of floating point operations, a normal FPU +performs many redundant internal shifts and normalizations in the process of +performing a sequence of operations. However, if a compiler can +decompose the floating point operations it needs down to the lowest level, +it then can optimize away many of these redundant operations. + +If there is some additional hardware support specifically for +micro-optimization, there are additional optimizations that can be +performed. This hardware support entails extra ``guard bits'' on the +standard floating point formats, to allow several unnormalized operations to +be performed in a row without the loss information\footnote{A description of +the floating point format used is shown in figures~\ref{exponent-format} +and~\ref{mantissa-format}.}. A discussion of the mathematics behind +unnormalized arithmetic is in appendix~\ref{unnorm-math}. + +The optimizations that the compiler can perform fall into several categories: + +\subsection{Post Multiply Normalization} + +When more than two multiplications are performed in a row, the intermediate +normalization of the results between multiplications can be eliminated. +This is because with each multiplication, the mantissa can become +denormalized by at most one bit. If there are guard bits on the mantissas +to prevent bits from ``falling off'' the end during multiplications, the +normalization can be postponed until after a sequence of several +multiplies\footnote{Using unnormalized numbers for math is not a new idea; a +good example of it is the Control Data CDC 6600, designed by Seymour Cray. +\cite{thornton:cdc6600} The CDC 6600 had all of its instructions performing +unnormalized arithmetic, with a separate {\tt NORMALIZE} instruction.}. + +% This is an example of how you would use tgrind to include an example +% of source code; it is commented out in this template since the code +% example file does not exist. To use it, you need to remove the '%' on the +% beginning of the line, and insert your own information in the call. +% +%\tagrind[htbp]{code/pmn.s.tex}{Post Multiply Normalization}{opt:pmn} + +As you can see, the intermediate results can be multiplied together, with no +need for intermediate normalizations due to the guard bit. It is only at +the end of the operation that the normalization must be performed, in order +to get it into a format suitable for storing in memory\footnote{Note that +for purposed of clarity, the pipeline delays were considered to be 0, and +the branches were not delayed.}. + +\subsection{Block Exponent} + +In a unoptimized sequence of additions, the sequence of operations is as +follows for each pair of numbers ($m_1$,$e_1$) and ($m_2$,$e_2$). +\begin{enumerate} + \item Compare $e_1$ and $e_2$. + \item Shift the mantissa associated with the smaller exponent $|e_1-e_2|$ + places to the right. + \item Add $m_1$ and $m_2$. + \item Find the first one in the resulting mantissa. + \item Shift the resulting mantissa so that normalized + \item Adjust the exponent accordingly. +\end{enumerate} + +Out of 6 steps, only one is the actual addition, and the rest are involved +in aligning the mantissas prior to the add, and then normalizing the result +afterward. In the block exponent optimization, the largest mantissa is +found to start with, and all the mantissa's shifted before any additions +take place. Once the mantissas have been shifted, the additions can take +place one after another\footnote{This requires that for n consecutive +additions, there are $\log_{2}n$ high guard bits to prevent overflow. In +the $\mu$FPU, there are 3 guard bits, making up to 8 consecutive additions +possible.}. An example of the Block Exponent optimization on the expression +X = A + B + C is given in figure~\ref{opt:be}. + +% This is an example of how you would use tgrind to include an example +% of source code; it is commented out in this template since the code +% example file does not exist. To use it, you need to remove the '%' on the +% beginning of the line, and insert your own information in the call. +% +%\tgrind[htbp]{code/be.s.tex}{Block Exponent}{opt:be} + +\section{Integer optimizations} + +As well as the floating point optimizations described above, there are +also integer optimizations that can be used in the $\mu$FPU. In concert +with the floating point optimizations, these can provide a significant +speedup. + +\subsection{Conversion to fixed point} + +Integer operations are much faster than floating point operations; if it is +possible to replace floating point operations with fixed point operations, +this would provide a significant increase in speed. + +This conversion can either take place automatically or or based on a +specific request from the programmer. To do this automatically, the +compiler must either be very smart, or play fast and loose with the accuracy +and precision of the programmer's variables. To be ``smart'', the computer +must track the ranges of all the floating point variables through the +program, and then see if there are any potential candidates for conversion +to floating point. This technique is discussed further in +section~\ref{range-tracking}, where it was implemented. + +The other way to do this is to rely on specific hints from the programmer +that a certain value will only assume a specific range, and that only a +specific precision is desired. This is somewhat more taxing on the +programmer, in that he has to know the ranges that his values will take at +declaration time (something normally abstracted away), but it does provide +the opportunity for fine-tuning already working code. + +Potential applications of this would be simulation programs, where the +variable represents some physical quantity; the constraints of the physical +system may provide bounds on the range the variable can take. +\subsection{Small Constant Multiplications} + +One other class of optimizations that can be done is to replace +multiplications by small integer constants into some combination of +additions and shifts. Addition and shifting can be significantly faster +than multiplication. This is done by using some combination of +\begin{eqnarray*} +a_i & = & a_j + a_k \\ +a_i & = & 2a_j + a_k \\ +a_i & = & 4a_j + a_k \\ +a_i & = & 8a_j + a_k \\ +a_i & = & a_j - a_k \\ +a_i & = & a_j \ll m \mbox{shift} +\end{eqnarray*} +instead of the multiplication. For example, to multiply $s$ by 10 and store +the result in $r$, you could use: +\begin{eqnarray*} +r & = & 4s + s\\ +r & = & r + r +\end{eqnarray*} +Or by 59: +\begin{eqnarray*} +t & = & 2s + s \\ +r & = & 2t + s \\ +r & = & 8r + t +\end{eqnarray*} +Similar combinations can be found for almost all of the smaller +integers\footnote{This optimization is only an ``optimization'', of course, +when the amount of time spent on the shifts and adds is less than the time +that would be spent doing the multiplication. Since the time costs of these +operations are known to the compiler in order for it to do scheduling, it is +easy for the compiler to determine when this optimization is worth using.}. +\cite{magenheimer:precision} + +\section{Other optimizations} + +\subsection{Low-level parallelism} + +The current trend is towards duplicating hardware at the lowest level to +provide parallelism\footnote{This can been seen in the i860; floating point +additions and multiplications can proceed at the same time, and the RISC +core be moving data in and out of the floating point registers and providing +flow control at the same time the floating point units are active. \cite{byte:i860}} + +Conceptually, it is easy to take advantage to low-level parallelism in the +instruction stream by simply adding more functional units to the $\mu$FPU, +widening the instruction word to control them, and then scheduling as many +operations to take place at one time as possible. + +However, simply adding more functional units can only be done so many times; +there is only a limited amount of parallelism directly available in the +instruction stream, and without it, much of the extra resources will go to +waste. One process used to make more instructions potentially schedulable +at any given time is ``trace scheduling''. This technique originated in the +Bulldog compiler for the original VLIW machine, the ELI-512. +\cite{ellis:bulldog,colwell:vliw} In trace scheduling, code can be +scheduled through many basic blocks at one time, following a single +potential ``trace'' of program execution. In this way, instructions that +{\em might\/} be executed depending on a conditional branch further down in +the instruction stream are scheduled, allowing an increase in the potential +parallelism. To account for the cases where the expected branch wasn't +taken, correction code is inserted after the branches to undo the effects of +any prematurely executed instructions. + +\subsection{Pipeline optimizations} + +In addition to having operations going on in parallel across functional +units, it is also typical to have several operations in various stages of +completion in each unit. This pipelining allows the throughput of the +functional units to be increased, with no increase in latency. + +There are several ways pipelined operations can be optimized. On the +hardware side, support can be added to allow data to be recirculated back +into the beginning of the pipeline from the end, saving a trip through the +registers. On the software side, the compiler can utilize several tricks to +try to fill up as many of the pipeline delay slots as possible, as +seendescribed by Gibbons. \cite{gib86} + + diff -r a86555b02916 -r 763d13f77e03 thesis/aux/mitthesis/contents.tex --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/aux/mitthesis/contents.tex Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,11 @@ + % -*- Mode:TeX -*- +%% This file simply contains the commands that actually generate the table of +%% contents and lists of figures and tables. You can omit any or all of +%% these files by simply taking out the appropriate command. For more +%% information on these files, see appendix C.3.3 of the LaTeX manual. +\tableofcontents +\newpage +\listoffigures +\newpage +\listoftables + diff -r a86555b02916 -r 763d13f77e03 thesis/aux/mitthesis/cover.tex --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/aux/mitthesis/cover.tex Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,132 @@ +% -*-latex-*- +% +% For questions, comments, concerns or complaints: +% thesis@mit.edu +% +% +% $Log: cover.tex,v $ +% Revision 1.8 2008/05/13 15:02:15 jdreed +% Degree month is June, not May. Added note about prevdegrees. +% Arthur Smith's title updated +% +% Revision 1.7 2001/02/08 18:53:16 boojum +% changed some \newpages to \cleardoublepages +% +% Revision 1.6 1999/10/21 14:49:31 boojum +% changed comment referring to documentstyle +% +% Revision 1.5 1999/10/21 14:39:04 boojum +% *** empty log message *** +% +% Revision 1.4 1997/04/18 17:54:10 othomas +% added page numbers on abstract and cover, and made 1 abstract +% page the default rather than 2. (anne hunter tells me this +% is the new institute standard.) +% +% Revision 1.4 1997/04/18 17:54:10 othomas +% added page numbers on abstract and cover, and made 1 abstract +% page the default rather than 2. (anne hunter tells me this +% is the new institute standard.) +% +% Revision 1.3 93/05/17 17:06:29 starflt +% Added acknowledgements section (suggested by tompalka) +% +% Revision 1.2 92/04/22 13:13:13 epeisach +% Fixes for 1991 course 6 requirements +% Phrase "and to grant others the right to do so" has been added to +% permission clause +% Second copy of abstract is not counted as separate pages so numbering works +% out +% +% Revision 1.1 92/04/22 13:08:20 epeisach + +% NOTE: +% These templates make an effort to conform to the MIT Thesis specifications, +% however the specifications can change. We recommend that you verify the +% layout of your title page with your thesis advisor and/or the MIT +% Libraries before printing your final copy. +\title{An Optimizing Compiler for Low-Level Floating Point Operations} + +\author{Robert McIntyre} +% If you wish to list your previous degrees on the cover page, use the +% previous degrees command: +% \prevdegrees{A.A., Harvard University (1985)} +% You can use the \\ command to list multiple previous degrees +% \prevdegrees{B.S., University of California (1978) \\ +% S.M., Massachusetts Institute of Technology (1981)} +\department{Department of Electrical Engineering and Computer Science} + +% If the thesis is for two degrees simultaneously, list them both +% separated by \and like this: +% \degree{Doctor of Philosophy \and Master of Science} +\degree{Bachelor of Science in Computer Science and Engineering} + +% As of the 2007-08 academic year, valid degree months are September, +% February, or June. The default is June. +\degreemonth{June} +\degreeyear{1990} +\thesisdate{May 18, 1990} + +%% By default, the thesis will be copyrighted to MIT. If you need to copyright +%% the thesis to yourself, just specify the `vi' documentclass option. If for +%% some reason you want to exactly specify the copyright notice text, you can +%% use the \copyrightnoticetext command. +%\copyrightnoticetext{\copyright IBM, 1990. Do not open till Xmas.} + +% If there is more than one supervisor, use the \supervisor command +% once for each. +\supervisor{William J. Dally}{Associate Professor} + +% This is the department committee chairman, not the thesis committee +% chairman. You should replace this with your Department's Committee +% Chairman. +\chairman{Arthur C. Smith}{Chairman, Department Committee on Graduate Theses} + +% Make the titlepage based on the above information. If you need +% something special and can't use the standard form, you can specify +% the exact text of the titlepage yourself. Put it in a titlepage +% environment and leave blank lines where you want vertical space. +% The spaces will be adjusted to fill the entire page. The dotted +% lines for the signatures are made with the \signature command. +\maketitle + +% The abstractpage environment sets up everything on the page except +% the text itself. The title and other header material are put at the +% top of the page, and the supervisors are listed at the bottom. A +% new page is begun both before and after. Of course, an abstract may +% be more than one page itself. If you need more control over the +% format of the page, you can use the abstract environment, which puts +% the word "Abstract" at the beginning and single spaces its text. + +%% You can either \input (*not* \include) your abstract file, or you can put +%% the text of the abstract directly between the \begin{abstractpage} and +%% \end{abstractpage} commands. + +% First copy: start a new page, and save the page number. +\cleardoublepage +% Uncomment the next line if you do NOT want a page number on your +% abstract and acknowledgments pages. +\pagestyle{empty} +\setcounter{savepage}{\thepage} +\begin{abstractpage} +\input{abstract} +\end{abstractpage} + +% Additional copy: start a new page, and reset the page number. This way, +% the second copy of the abstract is not counted as separate pages. +% Uncomment the next 6 lines if you need two copies of the abstract +% page. +% \setcounter{page}{\thesavepage} +% \begin{abstractpage} +% \input{abstract} +% \end{abstractpage} + +\cleardoublepage + +\section*{Acknowledgments} + +This is the acknowledgements section. You should replace this with your +own acknowledgements. + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% -*-latex-*- diff -r a86555b02916 -r 763d13f77e03 thesis/aux/mitthesis/main.bib --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/aux/mitthesis/main.bib Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,356 @@ +@preamble{ "\newcommand{\noopsort}[1]{} " + # "\newcommand{\printfirst}[2]{#1} " + # "\newcommand{\singleletter}[1]{#1} " + # "\newcommand{\switchargs}[2]{#2#1} " } + +@ARTICLE{article-minimal, + author = {L[eslie] A. Aamport}, + title = {The Gnats and Gnus Document Preparation System}, + journal = {\mbox{G-Animal's} Journal}, + year = 1986, +} + +@ARTICLE{article-full, + author = {L[eslie] A. Aamport}, + title = {The Gnats and Gnus Document Preparation System}, + journal = {\mbox{G-Animal's} Journal}, + year = 1986, + volume = 41, + number = 7, + pages = "73+", + month = jul, + note = "This is a full ARTICLE entry", +} + +The KEY field is here to override the KEY field in the journal being +cross referenced (so is the NOTE field, in addition to its imparting +information). + +@ARTICLE{article-crossref, + crossref = {WHOLE-JOURNAL}, + key = "", + author = {L[eslie] A. Aamport}, + title = {The Gnats and Gnus Document Preparation System}, + pages = "73+", + note = "This is a cross-referencing ARTICLE entry", +} + +@ARTICLE{whole-journal, + key = "GAJ", + journal = {\mbox{G-Animal's} Journal}, + year = 1986, + volume = 41, + number = 7, + month = jul, + note = {The entire issue is devoted to gnats and gnus + (this entry is a cross-referenced ARTICLE (journal))}, +} + +@INBOOK{inbook-minimal, + author = "Donald E. Knuth", + title = "Fundamental Algorithms", + publisher = "Addison-Wesley", + year = "{\noopsort{1973b}}1973", + chapter = "1.2", +} + +@INBOOK{inbook-full, + author = "Donald E. Knuth", + title = "Fundamental Algorithms", + volume = 1, + series = "The Art of Computer Programming", + publisher = "Addison-Wesley", + address = "Reading, Massachusetts", + edition = "Second", + month = "10~" # jan, + year = "{\noopsort{1973b}}1973", + type = "Section", + chapter = "1.2", + pages = "10--119", + note = "This is a full INBOOK entry", +} + +@INBOOK{inbook-crossref, + crossref = "whole-set", + title = "Fundamental Algorithms", + volume = 1, + series = "The Art of Computer Programming", + edition = "Second", + year = "{\noopsort{1973b}}1973", + type = "Section", + chapter = "1.2", + note = "This is a cross-referencing INBOOK entry", +} + +@BOOK{book-minimal, + author = "Donald E. Knuth", + title = "Seminumerical Algorithms", + publisher = "Addison-Wesley", + year = "{\noopsort{1973c}}1981", +} + +@BOOK{book-full, + author = "Donald E. Knuth", + title = "Seminumerical Algorithms", + volume = 2, + series = "The Art of Computer Programming", + publisher = "Addison-Wesley", + address = "Reading, Massachusetts", + edition = "Second", + month = "10~" # jan, + year = "{\noopsort{1973c}}1981", + note = "This is a full BOOK entry", +} + +@BOOK{book-crossref, + crossref = "whole-set", + title = "Seminumerical Algorithms", + volume = 2, + series = "The Art of Computer Programming", + edition = "Second", + year = "{\noopsort{1973c}}1981", + note = "This is a cross-referencing BOOK entry", +} + +@BOOK{whole-set, + author = "Donald E. Knuth", + publisher = "Addison-Wesley", + title = "The Art of Computer Programming", + series = "Four volumes", + year = "{\noopsort{1973a}}{\switchargs{--90}{1968}}", + note = "Seven volumes planned (this is a cross-referenced set of BOOKs)", +} + +@BOOKLET{booklet-minimal, + key = "Kn{\printfirst{v}{1987}}", + title = "The Programming of Computer Art", +} + +@BOOKLET{booklet-full, + author = "Jill C. Knvth", + title = "The Programming of Computer Art", + howpublished = "Vernier Art Center", + address = "Stanford, California", + month = feb, + year = 1988, + note = "This is a full BOOKLET entry", +} + +@INCOLLECTION{incollection-minimal, + author = "Daniel D. Lincoll", + title = "Semigroups of Recurrences", + booktitle = "High Speed Computer and Algorithm Organization", + publisher = "Academic Press", + year = 1977, +} + +@INCOLLECTION{incollection-full, + author = "Daniel D. Lincoll", + title = "Semigroups of Recurrences", + editor = "David J. Lipcoll and D. H. Lawrie and A. H. Sameh", + booktitle = "High Speed Computer and Algorithm Organization", + number = 23, + series = "Fast Computers", + chapter = 3, + type = "Part", + pages = "179--183", + publisher = "Academic Press", + address = "New York", + edition = "Third", + month = sep, + year = 1977, + note = "This is a full INCOLLECTION entry", +} + +@INCOLLECTION{incollection-crossref, + crossref = "whole-collection", + author = "Daniel D. Lincoll", + title = "Semigroups of Recurrences", + pages = "179--183", + note = "This is a cross-referencing INCOLLECTION entry", +} + +@BOOK{whole-collection, + editor = "David J. Lipcoll and D. H. Lawrie and A. H. Sameh", + title = "High Speed Computer and Algorithm Organization", + booktitle = "High Speed Computer and Algorithm Organization", + number = 23, + series = "Fast Computers", + publisher = "Academic Press", + address = "New York", + edition = "Third", + month = sep, + year = 1977, + note = "This is a cross-referenced BOOK (collection) entry", +} + +@MANUAL{manual-minimal, + key = "Manmaker", + title = "The Definitive Computer Manual", +} + +@MANUAL{manual-full, + author = "Larry Manmaker", + title = "The Definitive Computer Manual", + organization = "Chips-R-Us", + address = "Silicon Valley", + edition = "Silver", + month = apr # "-" # may, + year = 1986, + note = "This is a full MANUAL entry", +} + +@MASTERSTHESIS{mastersthesis-minimal, + author = "{\'{E}}douard Masterly", + title = "Mastering Thesis Writing", + school = "Stanford University", + year = 1988, +} + +@MASTERSTHESIS{mastersthesis-full, + author = "{\'{E}}douard Masterly", + title = "Mastering Thesis Writing", + school = "Stanford University", + type = "Master's project", + address = "English Department", + month = jun # "-" # aug, + year = 1988, + note = "This is a full MASTERSTHESIS entry", +} + +@MISC{misc-minimal, + key = "Missilany", + note = "This is a minimal MISC entry", +} + +@MISC{misc-full, + author = "Joe-Bob Missilany", + title = "Handing out random pamphlets in airports", + howpublished = "Handed out at O'Hare", + month = oct, + year = 1984, + note = "This is a full MISC entry", +} + +@STRING{STOC-key = "OX{\singleletter{stoc}}"} + +@STRING{ACM = "The OX Association for Computing Machinery"} + +@STRING{STOC = " Symposium on the Theory of Computing"} + +@INPROCEEDINGS{inproceedings-minimal, + author = "Alfred V. Oaho and Jeffrey D. Ullman and Mihalis Yannakakis", + title = "On Notions of Information Transfer in {VLSI} Circuits", + booktitle = "Proc. Fifteenth Annual ACM" # STOC, + year = 1983, +} + +@INPROCEEDINGS{inproceedings-full, + author = "Alfred V. Oaho and Jeffrey D. Ullman and Mihalis Yannakakis", + title = "On Notions of Information Transfer in {VLSI} Circuits", + editor = "Wizard V. Oz and Mihalis Yannakakis", + booktitle = "Proc. Fifteenth Annual ACM" # STOC, + number = 17, + series = "All ACM Conferences", + pages = "133--139", + month = mar, + year = 1983, + address = "Boston", + organization = ACM, + publisher = "Academic Press", + note = "This is a full INPROCEDINGS entry", +} + +@INPROCEEDINGS{inproceedings-crossref, + crossref = "whole-proceedings", + author = "Alfred V. Oaho and Jeffrey D. Ullman and Mihalis Yannakakis", + title = "On Notions of Information Transfer in {VLSI} Circuits", + organization = "", + pages = "133--139", + note = "This is a cross-referencing INPROCEEDINGS entry", +} + +@PROCEEDINGS{proceedings-minimal, + key = STOC-key, + title = "Proc. Fifteenth Annual" # STOC, + year = 1983, +} + +@PROCEEDINGS{proceedings-full, + editor = "Wizard V. Oz and Mihalis Yannakakis", + title = "Proc. Fifteenth Annual" # STOC, + number = 17, + series = "All ACM Conferences", + month = mar, + year = 1983, + address = "Boston", + organization = ACM, + publisher = "Academic Press", + note = "This is a full PROCEEDINGS entry", +} + +@PROCEEDINGS{whole-proceedings, + key = STOC-key, + organization = ACM, + title = "Proc. Fifteenth Annual" # STOC, + address = "Boston", + year = 1983, + booktitle = "Proc. Fifteenth Annual ACM" # STOC, + note = "This is a cross-referenced PROCEEDINGS", +} + +@PHDTHESIS{phdthesis-minimal, + author = "F. Phidias Phony-Baloney", + title = "Fighting Fire with Fire: Festooning {F}rench Phrases", + school = "Fanstord University", + year = 1988, +} + +@PHDTHESIS{phdthesis-full, + author = "F. Phidias Phony-Baloney", + title = "Fighting Fire with Fire: Festooning {F}rench Phrases", + school = "Fanstord University", + type = "{PhD} Dissertation", + address = "Department of French", + month = jun # "-" # aug, + year = 1988, + note = "This is a full PHDTHESIS entry", +} + +@TECHREPORT{techreport-minimal, + author = "Tom Terrific", + title = "An {$O(n \log n / \! \log\log n)$} Sorting Algorithm", + institution = "Fanstord University", + year = 1988, +} + +@TECHREPORT{techreport-full, + author = "Tom T{\'{e}}rrific", + title = "An {$O(n \log n / \! \log\log n)$} Sorting Algorithm", + institution = "Fanstord University", + type = "Wishful Research Result", + number = "7", + address = "Computer Science Department, Fanstord, California", + month = oct, + year = 1988, + note = "This is a full TECHREPORT entry", +} + +@UNPUBLISHED{unpublished-minimal, + author = "Ulrich {\"{U}}nderwood and Ned {\~N}et and Paul {\={P}}ot", + title = "Lower Bounds for Wishful Research Results", + note = "Talk at Fanstord University (this is a minimal UNPUBLISHED entry)", +} + +@UNPUBLISHED{unpublished-full, + author = "Ulrich {\"{U}}nderwood and Ned {\~N}et and Paul {\={P}}ot", + title = "Lower Bounds for Wishful Research Results", + month = nov # ", " # dec, + year = 1988, + note = "Talk at Fanstord University (this is a full UNPUBLISHED entry)", +} + +@MISC{random-note-crossref, + key = {Volume-2}, + note = "Volume~2 is listed under Knuth \cite{book-full}" +} diff -r a86555b02916 -r 763d13f77e03 thesis/aux/mitthesis/main.tex --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/aux/mitthesis/main.tex Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,46 @@ +% -*- Mode:TeX -*- + +%% IMPORTANT: The official thesis specifications are available at: +%% http://libraries.mit.edu/archives/thesis-specs/ +%% +%% Please verify your thesis' formatting and copyright +%% assignment before submission. If you notice any +%% discrepancies between these templates and the +%% MIT Libraries' specs, please let us know +%% by e-mailing thesis@mit.edu + +%% The documentclass options along with the pagestyle can be used to generate +%% a technical report, a draft copy, or a regular thesis. You may need to +%% re-specify the pagestyle after you \include cover.tex. For more +%% information, see the first few lines of mitthesis.cls. + +%\documentclass[12pt,vi,twoside]{mitthesis} +%% +%% If you want your thesis copyright to you instead of MIT, use the +%% ``vi'' option, as above. +%% +%\documentclass[12pt,twoside,leftblank]{mitthesis} +%% +%% If you want blank pages before new chapters to be labelled ``This +%% Page Intentionally Left Blank'', use the ``leftblank'' option, as +%% above. + +\documentclass[12pt,twoside]{mitthesis} +%\usepackage{lgrind} +\pagestyle{plain} + +\begin{document} + +\include{cover} +% Some departments (e.g. 5) require an additional signature page. See +% signature.tex for more information and uncomment the following line if +% applicable. +% \include{signature} +\pagestyle{plain} +\include{contents} +\include{chap1} +\include{chap2} +\appendix +\include{biblio} +\end{document} + diff -r a86555b02916 -r 763d13f77e03 thesis/aux/mitthesis/mitthesis.cls --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/aux/mitthesis/mitthesis.cls Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,535 @@ +% $Log: mitthesis.cls,v $ +% Revision 1.9 2012/02/15 15:48:16 jdreed +% Tweak the "permission" statement per the Libraries' specs +% retrieved 15-Feb-2012 +% +% Revision 1.8 2009/07/27 14:23:50 boojum +% added changing draft mark +% +% Revision 1.6 2005/05/13 19:57:40 boojum +% added leftblank option +% +% Revision 1.5 2002/04/18 14:10:08 boojum +% changed doublespace into setspace for 2e compliance +% +% Revision 1.4 2001/02/08 18:57:13 boojum +% turned two newpages into cleardoublepages +% +% Revision 1.3 2000/08/14 14:53:29 boojum +% commented out footheight, which is relevant for drafthead +% +% Revision 1.2 1999/10/21 14:51:33 boojum +% changed references to documentstyle to documentclass in comments +% +% Revision 1.1 1999/10/21 14:39:31 boojum +% Initial revision +% +%Revision 1.7 1998/04/01 20:45:34 othomas +%removed offending phrase ", and to grant others the right to do so" from copyright notice. +% +%Revision 1.6 96/06/26 15:07:29 othomas +%safety checkin. +% +%Revision 1.5 93/06/07 15:38:50 starflt +%Altered 'vi' option copyright wording to comply with new Institute +%Archives requirements and MIT lawyers. +% +%Revision 1.4 92/08/19 16:51:06 lwvanels +%Updated Course 6 title page for new permissions. +% +%Revision 1.3 92/04/23 10:16:15 epeisach +%Fixed comment character in rcs file +% +%Revision 1.2 92/04/22 13:12:02 epeisach +%Fixes for 1991 course 6 requirements +%Phrase "and to grant others the right to do so" has been added to +%permission clause +%Second copy of abstract is not counted as separate pages so numbering works +%out +% +%Revision 1.1 90/05/04 11:45:53 lwvanels +%Initial revision + +% +% LaTeX format for theses at MIT +% Based on "Specifications for Thesis Preparation" + +% `vi' and `upcase' options by Krishna Sethuraman - krishna@athena.mit.edu +% Margins and heading types by Peter Nuth - nuth@ai.mit.edu +% Title and abstract page by Stephen Gildea - gildea@erl.mit.edu +% Look in this directory for example file mitthesis.doc +% Also for propcover.tex - Boilerplate for PHD proposal. + +% To use this style - say something like: +% for dull, boring thesis format: +% \documentclass[12pt]{mitthesis} +% \pagestyle{plain} +% OR for fast drafts: +% \documentclass[11pt,singlespace,draft]{mitthesis} +% \pagestyle{drafthead} +% OR for Tech Reports: +% \documentclass[12pt,twoside]{mitthesis} +% \pagestyle{headings} +% OR +% some other combination... +% +%%%% New options: +% +% Option `twoside': +% Good for producing Tech Reports. +% The default is single-sided printing, which is what M.I.T. wants on the +% thesis document itself. +% +% Option `singlespace': +% Good for drafts. +% Double-spaced theses are the default. +% That is what M.I.T. asks for in the formal specifications. +% +% Note that MIT does not REQUIRE all theses to be double-spaced anymore. +% Someone in the library system said that it's OK to be single-spaced. +% (Regardless of what the specs. say...) +% To get singlespacing in an area - Use the 'singlespace' environment. +% +% Option `draft': +% Puts `overfull' boxes at the end of lines that are too long. +% +% Pagestyle `drafthead': +% Puts the date and the label ``*DRAFT*'' in the footer. +% +%%%%%%%%%% +% +%%%% Parameters to initialize for boilerplate page: +% +% \title{Mixed Circular Cylindrical Shells} +% \author{J. Casey Salas} +% \prevdegrees{B.S., University of California (1978) \\ +% S.M., Massachusetts Institute of Technology (1981)} +% \department{Department of Electrical Engineering and Computer Science} +% \degree{Doctor of Philosophy} +%% If the thesis is for two degrees simultaneously, list them both +%% separated by \and like this: +% \degree{Doctor of Philosophy \and Master of Science} +% \degreemonth{February} +% \degreeyear{1987} +% \thesisdate{December 10, 1986} +%% If the thesis is copyright by the Institute, leave this line out and +%% the standard copyright line will be used instead. +% \copyrightnotice{J. Casey Salas, 1986} +%% If there is more than one supervisor, use the \supervisor command +%% once for each. +% \supervisor{John D. Galli}{Director, Sound Instrument Laboratory} +%% This is the department committee chairman, not the thesis committee chairman +% \chairman{Arthur C. Smith} +% {Chairman, Departmental Committee on Graduate Students} +%% Make the titlepage based on the above information. If you need +%% something special and can't use the standard form, you can specify +%% the exact text of the titlepage yourself. Put it in a titlepage +%% environment and leave blank lines where you want vertical space. +%% The spaces will be adjusted to fill the entire page. The dotted +%% lines for the signatures are made with the \signature command. +% +%% The abstractpage environment sets up everything on the page except +%% the text itself. The title and other header material are put at the +%% top of the page, and the supervisors are listed at the bottom. A +%% new page is begun both before and after. Of course, an abstract may +%% be more than one page itself. If you need more control over the +%% format of the page, you can use the abstract environment, which puts +%% the word "Abstract" at the beginning and single spaces its text. +% +% \begin{abstractpage} +% Abstract goes here. +% \end{abstractpage} +% +%%%%%%%% Newer additions +% +% documentclass options - +% vi For MIT course VI or VIII thesis - will copyright the thesis to +% you while giving MIT permission to copy and distribute it. +% upcase Will put much of the cover page in uppercase, as per the +% example on page 17 of the *Specifications for Thesis +% Preparation*, (revised 1989) +% Also added ``All Rights Reserved'' to default copyright notice. +% +%%%%%%%%%%% +% +% Documentclass options (vi and upcase) and changes to copyright notice +% Copyright (c) 1990, by Krishna Sethuraman. +% +% Pagestyle and header generation +% Copyright (c) 1987, 1988 by Peter Nuth +% +% Original version +% Copyright (c) 1987 by Stephen Gildea +% Permission to copy all or part of this work is granted, provided +% that the copies are not made or distributed for resale, and that +% the copyright notice and this notice are retained. +% +% THIS WORK IS PROVIDED ON AN "AS IS" BASIS. THE AUTHOR PROVIDES NO +% WARRANTY WHATSOEVER, EITHER EXPRESS OR IMPLIED, REGARDING THE WORK, +% INCLUDING WARRANTIES WITH RESPECT TO ITS MERCHANTABILITY OR FITNESS +% FOR ANY PARTICULAR PURPOSE. +%%%%%%%% + +\NeedsTeXFormat{LaTeX2e} +\ProvidesClass{mitthesis}[1999/10/20] + +\def\mystretch{1.5} % Double spacing hack +\DeclareOption{doublespace}{} % This is default + % So we do not read this style twice +\DeclareOption{singlespace}{ % If he explicitly wants single spacing + \typeout{Single spaced} + \def\mystretch{1}} + +%% `vi' and `upcase' document style options. Krishna Sethuraman (1990) +\newcount\vithesis +\DeclareOption{vi}{\typeout{Course VI/VIII thesis style.}\advance\vithesis by1} +\vithesis=0 + +\DeclareOption{upcase}{\typeout{Uppercase cover page.} + \gdef\choosecase#1{\uppercase\expandafter{#1}}} +\def\choosecase#1{#1} + +%% leftblank option by Kevin Fu +\newif\if@leftblank \@leftblankfalse + +\DeclareOption{leftblank}{\typeout{Intentionally Leaving Pages Blank} +\@leftblanktrue} + +% Thesis looks much like report +\DeclareOption*{\PassOptionsToClass{\CurrentOption}{report}} +\ProcessOptions +\LoadClass{report} + +% If the user wants single spacing, set baselinestretch=1. + +\usepackage{setspace} + +% Note - doublespace.sty has some float-related troubles in +% combination with graphics or color, and is not officially compliant +% with 2e. setspace is a replacement which is 2e-compliant. + +% Read the doublespace style that we got from Rochester: +%\input setdoublespace.sty + +\def\baselinestretch{\mystretch} % Double spacing hack + +%%%%%%% Set up margins and formatting params %%% + +% Margins. +% Note we want 1in top margin assuming no header line, so push header +% into 1in margin. +% Draft mode brings the header back down. + +\setlength{\oddsidemargin}{0.25in} % 1.25in left margin +\setlength{\evensidemargin}{0.25in} % 1.25in left margin (even pages) +\setlength{\topmargin}{0.0in} % 1in top margin +\setlength{\textwidth}{6.0in} % 6.0in text - 1.25in rt margin +\setlength{\textheight}{9in} % Body ht for 1in margins +\addtolength{\topmargin}{-\headheight} % No header, so compensate +\addtolength{\topmargin}{-\headsep} % for header height and separation + +% The next two macros compensate page style for headers and footers +% We only need them in page styles that USE headers and footers. + % If we have a header, it must be 1in from top of page. +\def\pulldownheader{ % Shift header down 1in from top + \addtolength{\topmargin}{\headheight} + \addtolength{\topmargin}{\headsep} + \addtolength{\textheight}{-\headheight} + \addtolength{\textheight}{-\headsep} +} + % If we have a footer, put it 1in up from bottom +\def\pullupfooter{ % Shift footer up + \addtolength{\textheight}{-\footskip} +% \addtolength{\textheight}{-\footheight} %footheight doesn't +% exist in 2e +} + +%%%%%%% End of margins and formatting params %%% + +%%%%%%% Fix various header and footer problems %%% + +% Draft mark on the right side of left pages (outside) +% this mark is also the only one visible on single sided. +\newcommand{\draftrmark}{**DRAFT**} +% Draft mark on the left side of right pages (outside) +\newcommand{\draftlmark}{**DRAFT**} % + +% Macros to make changing the Draft easier +\newcommand{\drmark}[1]{\renewcommand{\draftrmark}{#1}} +\newcommand{\dlmark}[1]{\renewcommand{\draftlmark}{#1}} +\newcommand{\dmark}[1]{\drmark{#1}\dlmark{#1}} + +% Format for draft of thesis. Define our own PageStyle - +% Just like headings, but has foot lines with the date and warning + +\if@twoside % If two-sided printing. +\def\ps@drafthead{ + \let\@mkboth\markboth + \def\@oddfoot{\rm \today \hfil \sc \draftrmark} + \def\@evenfoot{\sc \draftlmark \hfil \rm \today } + \def\@evenhead{\rm \thepage\hfil \sl \leftmark} + \def\@oddhead{\hbox{}\sl \rightmark \hfil \rm\thepage} + \def\chaptermark##1{\markboth {\uppercase{\ifnum \c@secnumdepth >\m@ne + \@chapapp\ \thechapter. \ \fi ##1}}{}} + \def\sectionmark##1{\markright {\uppercase{\ifnum \c@secnumdepth >\z@ + \thesection. \ \fi ##1}}} + \pulldownheader % Bring header down from edge + \pullupfooter % Bring footer up +} +\else % If one-sided printing. +\def\ps@drafthead{ + \let\@mkboth\markboth + \def\@oddfoot{\rm \today \hfil \sc \draftrmark} + \def\@oddhead{\hbox{}\sl \rightmark \hfil \rm\thepage} + \def\chaptermark##1{\markright {\uppercase{\ifnum \c@secnumdepth >\m@ne + \@chapapp\ \thechapter. \ \fi ##1}}} + \pulldownheader % Bring header down from edge + \pullupfooter % Bring footer up +} +\fi + +% I redefine these formats that were defined in report.sty +% Definition of 'headings' page style +% Note the use of ##1 for parameter of \def\chaptermark inside the +% \def\ps@headings. +% + +\if@twoside % If two-sided printing. +\def\ps@headings{\let\@mkboth\markboth + \def\@oddfoot{} + \def\@evenfoot{} % No feet. + \def\@evenhead{\rm \thepage\hfil \sl \leftmark} % Left heading. + \def\@oddhead{\hbox{}\sl \rightmark \hfil \rm\thepage} % Right heading. + \def\chaptermark##1{\markboth {\uppercase{\ifnum \c@secnumdepth >\m@ne + \@chapapp\ \thechapter. \ \fi ##1}}{}} + \def\sectionmark##1{\markright {\uppercase{\ifnum \c@secnumdepth >\z@ + \thesection. \ \fi ##1}}} + \pulldownheader % Bring header down from edge +} +\else % If one-sided printing. +\def\ps@headings{\let\@mkboth\markboth + \def\@oddfoot{} + \def\@evenfoot{} % No feet. + \def\@oddhead{\hbox {}\sl \rightmark \hfil \rm\thepage} % Heading. + \def\chaptermark##1{\markright {\uppercase{\ifnum \c@secnumdepth >\m@ne + \@chapapp\ \thechapter. \ \fi ##1}}} + \pulldownheader % Bring header down from edge +} +\fi + +% Redefinition of 'myheadings' page style. +% +\def\ps@myheadings{\let\@mkboth\@gobbletwo + \def\@oddfoot{} + \def\@evenfoot{} + \def\sectionmark##1{} + \def\subsectionmark##1{} + \def\@evenhead{\rm \thepage\hfil\sl\leftmark\hbox {}} % + \def\@oddhead{\hbox{}\sl\rightmark \hfil \rm\thepage} % + \pulldownheader % Bring header down from edge +} + +% Redefine '/chapter' to always start on an odd page. +% Should make no difference in singleside mode. +% +\if@leftblank +% Prints "THIS PAGE INTENTIONALLY LEFT BLANK" on blank pages. +\def\chapter{\clearpage\ifodd\c@page\else + \hbox{}\par\vfill\centerline% + {THIS PAGE INTENTIONALLY LEFT BLANK}% + \vfill\newpage\fi + \thispagestyle{plain} % Page style of chapter page is 'plain' + \global\@topnum\z@ % Prevents figures from going at top of page. + \@afterindentfalse % Suppresses indent in first paragraph. Change + \secdef\@chapter\@schapter} % to \@afterindenttrue to have indent. +\else +\def\chapter{\cleardoublepage % Starts new page. + \thispagestyle{plain} % Page style of chapter page is 'plain' + \global\@topnum\z@ % Prevents figures from going at top of page. + \@afterindentfalse % Suppresses indent in first paragraph. Change + \secdef\@chapter\@schapter} % to \@afterindenttrue to have indent. +\fi +% If using the report style, use - instead of . in the figure number. +\@ifundefined{thechapter}{}{\def\thefigure{\thechapter-\arabic{figure}}} + + +%%%%%%%%% End of Style parameters %%%% + +% Here's Gildea's Boilerplate Stuff. +% Copyright (c) 1987 by Stephen Gildea +% Permission to copy all or part of this work is granted, provided +% that the copies are not made or distributed for resale, and that +% the copyright notice and this notice are retained. + +%% Define all the pieces that go on the title page and the abstract. + +% \title and \author already exist + +\def\prevdegrees#1{\gdef\@prevdegrees{#1}} +\def\@prevdegrees{} + +\def\department#1{\gdef\@department{#1}} + +% If you are getting two degrees, use \and between the names. +\def\degree#1{\setbox0\hbox{#1} %for side effect of setting \@degreeword + \gdef\@degree{#1}} + +% \and is used inside the \degree argument to separate two degrees +\def\and{\gdef\@degreeword{degrees} \par and \par} +\def\@degreeword{degree} + +% The copyright notice stuff is a tremendous mess. +% +% \@copyrightnotice is used by \maketitle to actually put text on the +% page; it defaults to ``Copyright MIT 19xx. All rights reserved.'' +% \copyrightnoticetext takes an argument and defined \@copyrightnotice +% to that argument. \copyrightnotice takes an argument, and calls +% \copyrightnoticetext with that argument, preceeded by a copyright +% symbol and followed by ``All rights reserved.'' and the standard +% permission notice. +% +% If you use the 'vi' option, \copyrightnoticetext is used to set the +% copyright to ``(C) Your Name, Current Year in Roman Numerals.'' +% followed by the permission notice. + +% If there is no \copyrightnotice command, it is asssumed that MIT +% holds the copyright. This commands adds the copyright symbol to the +% beginning, and puts the standard permission notice below. +%% ``All rights reserved'' added. Krishna Sethuraman (1990) +\def\copyrightnotice#1{\copyrightnoticetext{\copyright\ #1. All rights +reserved.\par\permission}} + +% Occacionally you will need to exactly specify the text of the +% copyright notice. The \copyrightnoticetext command is then useful. +\long\def\copyrightnoticetext#1{\gdef\@copyrightnotice{#1}} +\def\@copyrightnotice{\copyright\ \Mit\ \@degreeyear. All rights reserved.} + +%% `vi' documentclass option: Specifying this option automatically +%% copyrights the thesis to the author and gives MIT permission to copy and +%% distribute the document. If you want, you can still specify +%% \copyrightnotice{stuff} to copyright to someone else, or +%% \copyrightnoticetext{stuff} to specify the exact text of the copyright +%% notice. +\ifodd\vithesis \copyrightnoticetext{\copyright\ \@author, +\uppercase\expandafter{\romannumeral\@degreeyear}. All rights reserved.\par\permission} +%% or just +%%\@degreeyear}} +\typeout{Copyright given to author, + permission to copy/distribute given to MIT.} +\else \typeout{Thesis document copyright MIT unless otherwise (manually) specified} +\fi + +\def\thesisdate#1{\gdef\@thesisdate{#1}} + +% typically just a month and year +\def\degreemonth#1{\gdef\@degreemonth{#1}} +\def\degreeyear#1{\gdef\@degreeyear{#1}} + +% Usage: \supervisor{name}{title} +% \chairman{name}{title} + +% since there can be more than one supervisor, +% we build the appropriate boxes for the titlepage and +% the abstractpage as the user makes multiple calls +% to \supervisor +\newbox\@titlesupervisor \newbox\@abstractsupervisor + +\def\supervisor#1#2{\setbox\@titlesupervisor\vbox + {\unvbox\@titlesupervisor \vskip 10pt% plus 1fil minus 1fil + \def\baselinestretch{1}\large + \signature{Certified by}{#1 \\ #2 \\ Thesis Supervisor}} + \setbox\@abstractsupervisor\vbox{\unvbox\@abstractsupervisor + \vskip\baselineskip \def\baselinestretch{1}\@normalsize + \par\noindent Thesis Supervisor: #1 \\ Title: #2}} + +% department chairman, not thesis committee chairman +\def\chairman#1#2{\gdef\@chairmanname{#1}\gdef\@chairmantitle{#2}} + +%% `upcase' documentclass option: \choosecase is defined either as a dummy or +%% a macro to change the (expanded) argument to uppercase. +\def\maketitle{\begin{titlepage} +\large +{\def\baselinestretch{1.2}\Large\bf \choosecase{\@title} \par} +by\par +{\Large \choosecase{\@author}} +\par +\@prevdegrees +\par +\choosecase{Submitted to the} \choosecase{\@department} \\ +\choosecase{in partial fulfillment of the requirements for the} +\choosecase{\@degreeword} +\choosecase{of} +\par +\choosecase{\@degree} +\par +at the +\par\MIT\par +\@degreemonth\ \@degreeyear +\par +\@copyrightnotice +\par +\vskip 3\baselineskip +\signature{Author}{\@department \\ \@thesisdate} +\par +\vfill +\unvbox\@titlesupervisor +\par +\vfill +\signature{Accepted by}{\@chairmanname \\ \@chairmantitle} +\vfill +\end{titlepage}} + +% this environment should probably be called abstract, +% but we want people to also be able to get at the more +% basic abstract environment +\def\abstractpage{\cleardoublepage +\begin{center}{\large{\bf \@title} \\ +by \\ +\@author \\[\baselineskip]} +\par +\def\baselinestretch{1}\@normalsize +Submitted to the \@department \\ +on \@thesisdate, in partial fulfillment of the \\ +requirements for the \@degreeword\ of \\ +\@degree +\end{center} +\par +\begin{abstract}} + +%% Changed from \unvbox to \unvcopy for use with multiple copies of abstract +%% page. +%% Krishna Sethuraman (1990) +\def\endabstractpage{\end{abstract}\noindent + \unvcopy\@abstractsupervisor \newpage} + +%% This counter is used to save the page number for the second copy of +%% the abstract. +\newcounter{savepage} + +% You can use the titlepage environment to do it all yourself if you +% don't want to use \maketitle. If the titlepage environment, the +% paragraph skip is infinitely stretchable, so if you leave a blank line +% between lines that you want space between, the space will stretch so +% that the title page fills up the entire page. +\def\titlepage{\cleardoublepage\centering + \thispagestyle{empty} + \parindent 0pt \parskip 10pt plus 1fil minus 1fil + \def\baselinestretch{1}\@normalsize\vbox to \vsize\bgroup\vbox to 9in\bgroup} +% The \kern0pt pushes any depth into the height. Thanks to Richard Stone. +\def\endtitlepage{\par\kern 0pt\egroup\vss\egroup\newpage} + +\def\MIT{MASSACHUSETTS INSTITUTE OF TECHNOLOGY} +\def\Mit{Massachusetts Institute of Technology} + +\def\permission{\par\noindent{\centering + The author hereby grants to MIT permission to reproduce and to + distribute publicly paper and electronic copies of this thesis + document in whole or in part in any medium now known or hereafter + created.}\par} + +\def\signature#1#2{\par\noindent#1\dotfill\null\\* + {\raggedleft #2\par}} + +\def\abstract{\subsection*{Abstract}\small\def\baselinestretch{1}\@normalsize} +\def\endabstract{\par} + diff -r a86555b02916 -r 763d13f77e03 thesis/aux/mitthesis/sample.org --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/aux/mitthesis/sample.org Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,7 @@ +#+TITLE: asdhfklasdfkljasdf +#+AUTHOR: asdhfkjasdhfkljdasf + +* Section one +asdfasdfasdf +* Another section +aqghowahaoshfwhowehorwer \ No newline at end of file diff -r a86555b02916 -r 763d13f77e03 thesis/aux/mitthesis/sample.tex --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/aux/mitthesis/sample.tex Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,40 @@ +% Created 2014-03-20 Thu 23:12 +\documentclass[11pt]{article} +\usepackage[utf8]{inputenc} +\usepackage[T1]{fontenc} +\usepackage{fixltx2e} +\usepackage{graphicx} +\usepackage{longtable} +\usepackage{float} +\usepackage{wrapfig} +\usepackage{rotating} +\usepackage[normalem]{ulem} +\usepackage{amsmath} +\usepackage{textcomp} +\usepackage{marvosym} +\usepackage{wasysym} +\usepackage{amssymb} +\usepackage{hyperref} +\tolerance=1000 +\author{asdhfkjasdhfkljdasf} +\date{\today} +\title{asdhfklasdfkljasdf} +\hypersetup{ + pdfkeywords={}, + pdfsubject={}, + pdfcreator={Emacs 24.2.1 (Org mode 8.2.5h)}} +\begin{document} + +\maketitle +\tableofcontents + + +\section{Section one} +\label{sec-1} +asdfasdfasdf + +\section{Another section} +\label{sec-2} +aqghowahaoshfwhowehorwer +% Emacs 24.2.1 (Org mode 8.2.5h) +\end{document} \ No newline at end of file diff -r a86555b02916 -r 763d13f77e03 thesis/aux/mitthesis/signature.tex --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/aux/mitthesis/signature.tex Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,31 @@ +% -*- Mode:TeX -*- +% +% Some departments (e.g. Chemistry) require an additional cover page +% with signatures of the thesis committee. Please check with your +% thesis advisor or other appropriate person to determine if such a +% page is required for your thesis. +% +% If you choose not to use the "titlepage" environment, a \newpage +% commands, and several \vspace{\fill} commands may be necessary to +% achieve the required spacing. The \signature command is defined in +% the "mitthesis" class +% +% The following sample appears courtesy of Ben Kaduk and +% was used in his June 2012 doctoral thesis in Chemistry. + +\begin{titlepage} +\begin{large} +This doctoral thesis has been examined by a Committee of the Department +of Chemistry as follows: + +\signature{Professor Jianshu Cao}{Chairman, Thesis Committee \\ + Professor of Chemistry} + +\signature{Professor Troy Van Voorhis}{Thesis Supervisor \\ + Associate Professor of Chemistry} + +\signature{Professor Robert W. Field}{Member, Thesis Committee \\ + Haslam and Dewey Professor of Chemistry} +\end{large} +\end{titlepage} + diff -r a86555b02916 -r 763d13f77e03 thesis/aux/mitthesis/templates.zip Binary file thesis/aux/mitthesis/templates.zip has changed diff -r a86555b02916 -r 763d13f77e03 thesis/comp.pl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/comp.pl Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,19 @@ +#!/bin/perl + +use List::Flatten; + +$target = shift(@ARGV); + +$frames = shift(@ARGV); + +@numbers = split(/,/, $frames); +@images = map {sprintf("$target/%07d.png", $_)} @numbers; + + +$output = $target.".png"; + +@imagemagick_command = flat("montage", @images, "-geometry", "+2+2", $output); + +print "@imagemagick_command\n"; + +system(@imagemagick_command); diff -r a86555b02916 -r 763d13f77e03 thesis/cortex.bib --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/cortex.bib Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,15 @@ +@incollection {Tappert77, +AUTHOR = {Tappert, Fred D.}, +TITLE = {The parabolic approximation method}, +BOOKTITLE = {Wave propagation and underwater acoustics ({W}orkshop, + {M}ystic, {C}onn., 1974)}, +PAGES = {224--287. Lecture Notes in Phys., Vol. 70}, +PUBLISHER = {Springer}, +ADDRESS = {Berlin}, +YEAR = {1977}, +MRCLASS = {76.41 (86.41)}, +ADDENDUM = {\why{This is a cool paper that really helped me out. It's + related to the thesis because they talk about + manifolds and other types of pure, unrefined + bullshit.}} +} diff -r a86555b02916 -r 763d13f77e03 thesis/cortex.org --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/cortex.org Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,1154 @@ +#+title: =CORTEX= +#+author: Robert McIntyre +#+email: rlm@mit.edu +#+description: Using embodied AI to facilitate Artificial Imagination. +#+keywords: AI, clojure, embodiment +#+LaTeX_CLASS_OPTIONS: [nofloat] + +* Empathy and Embodiment as problem solving strategies + + By the end of this thesis, you will have seen a novel approach to + interpreting video using embodiment and empathy. You will have also + seen one way to efficiently implement empathy for embodied + creatures. Finally, you will become familiar with =CORTEX=, a system + for designing and simulating creatures with rich senses, which you + may choose to use in your own research. + + This is the core vision of my thesis: That one of the important ways + in which we understand others is by imagining ourselves in their + position and emphatically feeling experiences relative to our own + bodies. By understanding events in terms of our own previous + corporeal experience, we greatly constrain the possibilities of what + would otherwise be an unwieldy exponential search. This extra + constraint can be the difference between easily understanding what + is happening in a video and being completely lost in a sea of + incomprehensible color and movement. + +** Recognizing actions in video is extremely difficult + + Consider for example the problem of determining what is happening + in a video of which this is one frame: + + #+caption: A cat drinking some water. Identifying this action is + #+caption: beyond the state of the art for computers. + #+ATTR_LaTeX: :width 7cm + [[./images/cat-drinking.jpg]] + + It is currently impossible for any computer program to reliably + label such a video as ``drinking''. And rightly so -- it is a very + hard problem! What features can you describe in terms of low level + functions of pixels that can even begin to describe at a high level + what is happening here? + + Or suppose that you are building a program that recognizes chairs. + How could you ``see'' the chair in figure \ref{hidden-chair}? + + #+caption: The chair in this image is quite obvious to humans, but I + #+caption: doubt that any modern computer vision program can find it. + #+name: hidden-chair + #+ATTR_LaTeX: :width 10cm + [[./images/fat-person-sitting-at-desk.jpg]] + + Finally, how is it that you can easily tell the difference between + how the girls /muscles/ are working in figure \ref{girl}? + + #+caption: The mysterious ``common sense'' appears here as you are able + #+caption: to discern the difference in how the girl's arm muscles + #+caption: are activated between the two images. + #+name: girl + #+ATTR_LaTeX: :width 7cm + [[./images/wall-push.png]] + + Each of these examples tells us something about what might be going + on in our minds as we easily solve these recognition problems. + + The hidden chairs show us that we are strongly triggered by cues + relating to the position of human bodies, and that we can determine + the overall physical configuration of a human body even if much of + that body is occluded. + + The picture of the girl pushing against the wall tells us that we + have common sense knowledge about the kinetics of our own bodies. + We know well how our muscles would have to work to maintain us in + most positions, and we can easily project this self-knowledge to + imagined positions triggered by images of the human body. + +** =EMPATH= neatly solves recognition problems + + I propose a system that can express the types of recognition + problems above in a form amenable to computation. It is split into + four parts: + + - Free/Guided Play :: The creature moves around and experiences the + world through its unique perspective. Many otherwise + complicated actions are easily described in the language of a + full suite of body-centered, rich senses. For example, + drinking is the feeling of water sliding down your throat, and + cooling your insides. It's often accompanied by bringing your + hand close to your face, or bringing your face close to water. + Sitting down is the feeling of bending your knees, activating + your quadriceps, then feeling a surface with your bottom and + relaxing your legs. These body-centered action descriptions + can be either learned or hard coded. + - Posture Imitation :: When trying to interpret a video or image, + the creature takes a model of itself and aligns it with + whatever it sees. This alignment can even cross species, as + when humans try to align themselves with things like ponies, + dogs, or other humans with a different body type. + - Empathy :: The alignment triggers associations with + sensory data from prior experiences. For example, the + alignment itself easily maps to proprioceptive data. Any + sounds or obvious skin contact in the video can to a lesser + extent trigger previous experience. Segments of previous + experiences are stitched together to form a coherent and + complete sensory portrait of the scene. + - Recognition :: With the scene described in terms of first + person sensory events, the creature can now run its + action-identification programs on this synthesized sensory + data, just as it would if it were actually experiencing the + scene first-hand. If previous experience has been accurately + retrieved, and if it is analogous enough to the scene, then + the creature will correctly identify the action in the scene. + + For example, I think humans are able to label the cat video as + ``drinking'' because they imagine /themselves/ as the cat, and + imagine putting their face up against a stream of water and + sticking out their tongue. In that imagined world, they can feel + the cool water hitting their tongue, and feel the water entering + their body, and are able to recognize that /feeling/ as drinking. + So, the label of the action is not really in the pixels of the + image, but is found clearly in a simulation inspired by those + pixels. An imaginative system, having been trained on drinking and + non-drinking examples and learning that the most important + component of drinking is the feeling of water sliding down one's + throat, would analyze a video of a cat drinking in the following + manner: + + 1. Create a physical model of the video by putting a ``fuzzy'' + model of its own body in place of the cat. Possibly also create + a simulation of the stream of water. + + 2. Play out this simulated scene and generate imagined sensory + experience. This will include relevant muscle contractions, a + close up view of the stream from the cat's perspective, and most + importantly, the imagined feeling of water entering the + mouth. The imagined sensory experience can come from a + simulation of the event, but can also be pattern-matched from + previous, similar embodied experience. + + 3. The action is now easily identified as drinking by the sense of + taste alone. The other senses (such as the tongue moving in and + out) help to give plausibility to the simulated action. Note that + the sense of vision, while critical in creating the simulation, + is not critical for identifying the action from the simulation. + + For the chair examples, the process is even easier: + + 1. Align a model of your body to the person in the image. + + 2. Generate proprioceptive sensory data from this alignment. + + 3. Use the imagined proprioceptive data as a key to lookup related + sensory experience associated with that particular proproceptive + feeling. + + 4. Retrieve the feeling of your bottom resting on a surface, your + knees bent, and your leg muscles relaxed. + + 5. This sensory information is consistent with the =sitting?= + sensory predicate, so you (and the entity in the image) must be + sitting. + + 6. There must be a chair-like object since you are sitting. + + Empathy offers yet another alternative to the age-old AI + representation question: ``What is a chair?'' --- A chair is the + feeling of sitting. + + My program, =EMPATH= uses this empathic problem solving technique + to interpret the actions of a simple, worm-like creature. + + #+caption: The worm performs many actions during free play such as + #+caption: curling, wiggling, and resting. + #+name: worm-intro + #+ATTR_LaTeX: :width 15cm + [[./images/worm-intro-white.png]] + + #+caption: =EMPATH= recognized and classified each of these poses by + #+caption: inferring the complete sensory experience from + #+caption: proprioceptive data. + #+name: worm-recognition-intro + #+ATTR_LaTeX: :width 15cm + [[./images/worm-poses.png]] + + One powerful advantage of empathic problem solving is that it + factors the action recognition problem into two easier problems. To + use empathy, you need an /aligner/, which takes the video and a + model of your body, and aligns the model with the video. Then, you + need a /recognizer/, which uses the aligned model to interpret the + action. The power in this method lies in the fact that you describe + all actions form a body-centered viewpoint. You are less tied to + the particulars of any visual representation of the actions. If you + teach the system what ``running'' is, and you have a good enough + aligner, the system will from then on be able to recognize running + from any point of view, even strange points of view like above or + underneath the runner. This is in contrast to action recognition + schemes that try to identify actions using a non-embodied approach. + If these systems learn about running as viewed from the side, they + will not automatically be able to recognize running from any other + viewpoint. + + Another powerful advantage is that using the language of multiple + body-centered rich senses to describe body-centerd actions offers a + massive boost in descriptive capability. Consider how difficult it + would be to compose a set of HOG filters to describe the action of + a simple worm-creature ``curling'' so that its head touches its + tail, and then behold the simplicity of describing thus action in a + language designed for the task (listing \ref{grand-circle-intro}): + + #+caption: Body-centerd actions are best expressed in a body-centered + #+caption: language. This code detects when the worm has curled into a + #+caption: full circle. Imagine how you would replicate this functionality + #+caption: using low-level pixel features such as HOG filters! + #+name: grand-circle-intro + #+attr_latex: [htpb] +#+begin_listing clojure + #+begin_src clojure +(defn grand-circle? + "Does the worm form a majestic circle (one end touching the other)?" + [experiences] + (and (curled? experiences) + (let [worm-touch (:touch (peek experiences)) + tail-touch (worm-touch 0) + head-touch (worm-touch 4)] + (and (< 0.55 (contact worm-segment-bottom-tip tail-touch)) + (< 0.55 (contact worm-segment-top-tip head-touch)))))) + #+end_src + #+end_listing + + +** =CORTEX= is a toolkit for building sensate creatures + + I built =CORTEX= to be a general AI research platform for doing + experiments involving multiple rich senses and a wide variety and + number of creatures. I intend it to be useful as a library for many + more projects than just this one. =CORTEX= was necessary to meet a + need among AI researchers at CSAIL and beyond, which is that people + often will invent neat ideas that are best expressed in the + language of creatures and senses, but in order to explore those + ideas they must first build a platform in which they can create + simulated creatures with rich senses! There are many ideas that + would be simple to execute (such as =EMPATH=), but attached to them + is the multi-month effort to make a good creature simulator. Often, + that initial investment of time proves to be too much, and the + project must make do with a lesser environment. + + =CORTEX= is well suited as an environment for embodied AI research + for three reasons: + + - You can create new creatures using Blender, a popular 3D modeling + program. Each sense can be specified using special blender nodes + with biologically inspired paramaters. You need not write any + code to create a creature, and can use a wide library of + pre-existing blender models as a base for your own creatures. + + - =CORTEX= implements a wide variety of senses, including touch, + proprioception, vision, hearing, and muscle tension. Complicated + senses like touch, and vision involve multiple sensory elements + embedded in a 2D surface. You have complete control over the + distribution of these sensor elements through the use of simple + png image files. In particular, =CORTEX= implements more + comprehensive hearing than any other creature simulation system + available. + + - =CORTEX= supports any number of creatures and any number of + senses. Time in =CORTEX= dialates so that the simulated creatures + always precieve a perfectly smooth flow of time, regardless of + the actual computational load. + + =CORTEX= is built on top of =jMonkeyEngine3=, which is a video game + engine designed to create cross-platform 3D desktop games. =CORTEX= + is mainly written in clojure, a dialect of =LISP= that runs on the + java virtual machine (JVM). The API for creating and simulating + creatures and senses is entirely expressed in clojure, though many + senses are implemented at the layer of jMonkeyEngine or below. For + example, for the sense of hearing I use a layer of clojure code on + top of a layer of java JNI bindings that drive a layer of =C++= + code which implements a modified version of =OpenAL= to support + multiple listeners. =CORTEX= is the only simulation environment + that I know of that can support multiple entities that can each + hear the world from their own perspective. Other senses also + require a small layer of Java code. =CORTEX= also uses =bullet=, a + physics simulator written in =C=. + + #+caption: Here is the worm from above modeled in Blender, a free + #+caption: 3D-modeling program. Senses and joints are described + #+caption: using special nodes in Blender. + #+name: worm-recognition-intro + #+ATTR_LaTeX: :width 12cm + [[./images/blender-worm.png]] + + Here are some thing I anticipate that =CORTEX= might be used for: + + - exploring new ideas about sensory integration + - distributed communication among swarm creatures + - self-learning using free exploration, + - evolutionary algorithms involving creature construction + - exploration of exoitic senses and effectors that are not possible + in the real world (such as telekenisis or a semantic sense) + - imagination using subworlds + + During one test with =CORTEX=, I created 3,000 creatures each with + their own independent senses and ran them all at only 1/80 real + time. In another test, I created a detailed model of my own hand, + equipped with a realistic distribution of touch (more sensitive at + the fingertips), as well as eyes and ears, and it ran at around 1/4 + real time. + +#+BEGIN_LaTeX + \begin{sidewaysfigure} + \includegraphics[width=9.5in]{images/full-hand.png} + \caption{ + I modeled my own right hand in Blender and rigged it with all the + senses that {\tt CORTEX} supports. My simulated hand has a + biologically inspired distribution of touch sensors. The senses are + displayed on the right, and the simulation is displayed on the + left. Notice that my hand is curling its fingers, that it can see + its own finger from the eye in its palm, and that it can feel its + own thumb touching its palm.} + \end{sidewaysfigure} +#+END_LaTeX + +** Contributions + + - I built =CORTEX=, a comprehensive platform for embodied AI + experiments. =CORTEX= supports many features lacking in other + systems, such proper simulation of hearing. It is easy to create + new =CORTEX= creatures using Blender, a free 3D modeling program. + + - I built =EMPATH=, which uses =CORTEX= to identify the actions of + a worm-like creature using a computational model of empathy. + +* Building =CORTEX= + +** To explore embodiment, we need a world, body, and senses + +** Because of Time, simulation is perferable to reality + +** Video game engines are a great starting point + +** Bodies are composed of segments connected by joints + +** Eyes reuse standard video game components + +** Hearing is hard; =CORTEX= does it right + +** Touch uses hundreds of hair-like elements + +** Proprioception is the sense that makes everything ``real'' + +** Muscles are both effectors and sensors + +** =CORTEX= brings complex creatures to life! + +** =CORTEX= enables many possiblities for further research + +* Empathy in a simulated worm + + Here I develop a computational model of empathy, using =CORTEX= as a + base. Empathy in this context is the ability to observe another + creature and infer what sorts of sensations that creature is + feeling. My empathy algorithm involves multiple phases. First is + free-play, where the creature moves around and gains sensory + experience. From this experience I construct a representation of the + creature's sensory state space, which I call \Phi-space. Using + \Phi-space, I construct an efficient function which takes the + limited data that comes from observing another creature and enriches + it full compliment of imagined sensory data. I can then use the + imagined sensory data to recognize what the observed creature is + doing and feeling, using straightforward embodied action predicates. + This is all demonstrated with using a simple worm-like creature, and + recognizing worm-actions based on limited data. + + #+caption: Here is the worm with which we will be working. + #+caption: It is composed of 5 segments. Each segment has a + #+caption: pair of extensor and flexor muscles. Each of the + #+caption: worm's four joints is a hinge joint which allows + #+caption: about 30 degrees of rotation to either side. Each segment + #+caption: of the worm is touch-capable and has a uniform + #+caption: distribution of touch sensors on each of its faces. + #+caption: Each joint has a proprioceptive sense to detect + #+caption: relative positions. The worm segments are all the + #+caption: same except for the first one, which has a much + #+caption: higher weight than the others to allow for easy + #+caption: manual motor control. + #+name: basic-worm-view + #+ATTR_LaTeX: :width 10cm + [[./images/basic-worm-view.png]] + + #+caption: Program for reading a worm from a blender file and + #+caption: outfitting it with the senses of proprioception, + #+caption: touch, and the ability to move, as specified in the + #+caption: blender file. + #+name: get-worm + #+begin_listing clojure + #+begin_src clojure +(defn worm [] + (let [model (load-blender-model "Models/worm/worm.blend")] + {:body (doto model (body!)) + :touch (touch! model) + :proprioception (proprioception! model) + :muscles (movement! model)})) + #+end_src + #+end_listing + +** Embodiment factors action recognition into managable parts + + Using empathy, I divide the problem of action recognition into a + recognition process expressed in the language of a full compliment + of senses, and an imaganitive process that generates full sensory + data from partial sensory data. Splitting the action recognition + problem in this manner greatly reduces the total amount of work to + recognize actions: The imaganitive process is mostly just matching + previous experience, and the recognition process gets to use all + the senses to directly describe any action. + +** Action recognition is easy with a full gamut of senses + + Embodied representations using multiple senses such as touch, + proprioception, and muscle tension turns out be be exceedingly + efficient at describing body-centered actions. It is the ``right + language for the job''. For example, it takes only around 5 lines + of LISP code to describe the action of ``curling'' using embodied + primitives. It takes about 10 lines to describe the seemingly + complicated action of wiggling. + + The following action predicates each take a stream of sensory + experience, observe however much of it they desire, and decide + whether the worm is doing the action they describe. =curled?= + relies on proprioception, =resting?= relies on touch, =wiggling?= + relies on a fourier analysis of muscle contraction, and + =grand-circle?= relies on touch and reuses =curled?= as a gaurd. + + #+caption: Program for detecting whether the worm is curled. This is the + #+caption: simplest action predicate, because it only uses the last frame + #+caption: of sensory experience, and only uses proprioceptive data. Even + #+caption: this simple predicate, however, is automatically frame + #+caption: independent and ignores vermopomorphic differences such as + #+caption: worm textures and colors. + #+name: curled + #+attr_latex: [htpb] +#+begin_listing clojure + #+begin_src clojure +(defn curled? + "Is the worm curled up?" + [experiences] + (every? + (fn [[_ _ bend]] + (> (Math/sin bend) 0.64)) + (:proprioception (peek experiences)))) + #+end_src + #+end_listing + + #+caption: Program for summarizing the touch information in a patch + #+caption: of skin. + #+name: touch-summary + #+attr_latex: [htpb] + +#+begin_listing clojure + #+begin_src clojure +(defn contact + "Determine how much contact a particular worm segment has with + other objects. Returns a value between 0 and 1, where 1 is full + contact and 0 is no contact." + [touch-region [coords contact :as touch]] + (-> (zipmap coords contact) + (select-keys touch-region) + (vals) + (#(map first %)) + (average) + (* 10) + (- 1) + (Math/abs))) + #+end_src + #+end_listing + + + #+caption: Program for detecting whether the worm is at rest. This program + #+caption: uses a summary of the tactile information from the underbelly + #+caption: of the worm, and is only true if every segment is touching the + #+caption: floor. Note that this function contains no references to + #+caption: proprioction at all. + #+name: resting + #+attr_latex: [htpb] +#+begin_listing clojure + #+begin_src clojure +(def worm-segment-bottom (rect-region [8 15] [14 22])) + +(defn resting? + "Is the worm resting on the ground?" + [experiences] + (every? + (fn [touch-data] + (< 0.9 (contact worm-segment-bottom touch-data))) + (:touch (peek experiences)))) + #+end_src + #+end_listing + + #+caption: Program for detecting whether the worm is curled up into a + #+caption: full circle. Here the embodied approach begins to shine, as + #+caption: I am able to both use a previous action predicate (=curled?=) + #+caption: as well as the direct tactile experience of the head and tail. + #+name: grand-circle + #+attr_latex: [htpb] +#+begin_listing clojure + #+begin_src clojure +(def worm-segment-bottom-tip (rect-region [15 15] [22 22])) + +(def worm-segment-top-tip (rect-region [0 15] [7 22])) + +(defn grand-circle? + "Does the worm form a majestic circle (one end touching the other)?" + [experiences] + (and (curled? experiences) + (let [worm-touch (:touch (peek experiences)) + tail-touch (worm-touch 0) + head-touch (worm-touch 4)] + (and (< 0.55 (contact worm-segment-bottom-tip tail-touch)) + (< 0.55 (contact worm-segment-top-tip head-touch)))))) + #+end_src + #+end_listing + + + #+caption: Program for detecting whether the worm has been wiggling for + #+caption: the last few frames. It uses a fourier analysis of the muscle + #+caption: contractions of the worm's tail to determine wiggling. This is + #+caption: signigicant because there is no particular frame that clearly + #+caption: indicates that the worm is wiggling --- only when multiple frames + #+caption: are analyzed together is the wiggling revealed. Defining + #+caption: wiggling this way also gives the worm an opportunity to learn + #+caption: and recognize ``frustrated wiggling'', where the worm tries to + #+caption: wiggle but can't. Frustrated wiggling is very visually different + #+caption: from actual wiggling, but this definition gives it to us for free. + #+name: wiggling + #+attr_latex: [htpb] +#+begin_listing clojure + #+begin_src clojure +(defn fft [nums] + (map + #(.getReal %) + (.transform + (FastFourierTransformer. DftNormalization/STANDARD) + (double-array nums) TransformType/FORWARD))) + +(def indexed (partial map-indexed vector)) + +(defn max-indexed [s] + (first (sort-by (comp - second) (indexed s)))) + +(defn wiggling? + "Is the worm wiggling?" + [experiences] + (let [analysis-interval 0x40] + (when (> (count experiences) analysis-interval) + (let [a-flex 3 + a-ex 2 + muscle-activity + (map :muscle (vector:last-n experiences analysis-interval)) + base-activity + (map #(- (% a-flex) (% a-ex)) muscle-activity)] + (= 2 + (first + (max-indexed + (map #(Math/abs %) + (take 20 (fft base-activity)))))))))) + #+end_src + #+end_listing + + With these action predicates, I can now recognize the actions of + the worm while it is moving under my control and I have access to + all the worm's senses. + + #+caption: Use the action predicates defined earlier to report on + #+caption: what the worm is doing while in simulation. + #+name: report-worm-activity + #+attr_latex: [htpb] +#+begin_listing clojure + #+begin_src clojure +(defn debug-experience + [experiences text] + (cond + (grand-circle? experiences) (.setText text "Grand Circle") + (curled? experiences) (.setText text "Curled") + (wiggling? experiences) (.setText text "Wiggling") + (resting? experiences) (.setText text "Resting"))) + #+end_src + #+end_listing + + #+caption: Using =debug-experience=, the body-centered predicates + #+caption: work together to classify the behaviour of the worm. + #+caption: the predicates are operating with access to the worm's + #+caption: full sensory data. + #+name: basic-worm-view + #+ATTR_LaTeX: :width 10cm + [[./images/worm-identify-init.png]] + + These action predicates satisfy the recognition requirement of an + empathic recognition system. There is power in the simplicity of + the action predicates. They describe their actions without getting + confused in visual details of the worm. Each one is frame + independent, but more than that, they are each indepent of + irrelevant visual details of the worm and the environment. They + will work regardless of whether the worm is a different color or + hevaily textured, or if the environment has strange lighting. + + The trick now is to make the action predicates work even when the + sensory data on which they depend is absent. If I can do that, then + I will have gained much, + +** \Phi-space describes the worm's experiences + + As a first step towards building empathy, I need to gather all of + the worm's experiences during free play. I use a simple vector to + store all the experiences. + + Each element of the experience vector exists in the vast space of + all possible worm-experiences. Most of this vast space is actually + unreachable due to physical constraints of the worm's body. For + example, the worm's segments are connected by hinge joints that put + a practical limit on the worm's range of motions without limiting + its degrees of freedom. Some groupings of senses are impossible; + the worm can not be bent into a circle so that its ends are + touching and at the same time not also experience the sensation of + touching itself. + + As the worm moves around during free play and its experience vector + grows larger, the vector begins to define a subspace which is all + the sensations the worm can practicaly experience during normal + operation. I call this subspace \Phi-space, short for + physical-space. The experience vector defines a path through + \Phi-space. This path has interesting properties that all derive + from physical embodiment. The proprioceptive components are + completely smooth, because in order for the worm to move from one + position to another, it must pass through the intermediate + positions. The path invariably forms loops as actions are repeated. + Finally and most importantly, proprioception actually gives very + strong inference about the other senses. For example, when the worm + is flat, you can infer that it is touching the ground and that its + muscles are not active, because if the muscles were active, the + worm would be moving and would not be perfectly flat. In order to + stay flat, the worm has to be touching the ground, or it would + again be moving out of the flat position due to gravity. If the + worm is positioned in such a way that it interacts with itself, + then it is very likely to be feeling the same tactile feelings as + the last time it was in that position, because it has the same body + as then. If you observe multiple frames of proprioceptive data, + then you can become increasingly confident about the exact + activations of the worm's muscles, because it generally takes a + unique combination of muscle contractions to transform the worm's + body along a specific path through \Phi-space. + + There is a simple way of taking \Phi-space and the total ordering + provided by an experience vector and reliably infering the rest of + the senses. + +** Empathy is the process of tracing though \Phi-space + + Here is the core of a basic empathy algorithm, starting with an + experience vector: + + First, group the experiences into tiered proprioceptive bins. I use + powers of 10 and 3 bins, and the smallest bin has an approximate + size of 0.001 radians in all proprioceptive dimensions. + + Then, given a sequence of proprioceptive input, generate a set of + matching experience records for each input, using the tiered + proprioceptive bins. + + Finally, to infer sensory data, select the longest consective chain + of experiences. Conecutive experience means that the experiences + appear next to each other in the experience vector. + + This algorithm has three advantages: + + 1. It's simple + + 3. It's very fast -- retrieving possible interpretations takes + constant time. Tracing through chains of interpretations takes + time proportional to the average number of experiences in a + proprioceptive bin. Redundant experiences in \Phi-space can be + merged to save computation. + + 2. It protects from wrong interpretations of transient ambiguous + proprioceptive data. For example, if the worm is flat for just + an instant, this flattness will not be interpreted as implying + that the worm has its muscles relaxed, since the flattness is + part of a longer chain which includes a distinct pattern of + muscle activation. Markov chains or other memoryless statistical + models that operate on individual frames may very well make this + mistake. + + #+caption: Program to convert an experience vector into a + #+caption: proprioceptively binned lookup function. + #+name: bin + #+attr_latex: [htpb] +#+begin_listing clojure + #+begin_src clojure +(defn bin [digits] + (fn [angles] + (->> angles + (flatten) + (map (juxt #(Math/sin %) #(Math/cos %))) + (flatten) + (mapv #(Math/round (* % (Math/pow 10 (dec digits)))))))) + +(defn gen-phi-scan + "Nearest-neighbors with binning. Only returns a result if + the propriceptive data is within 10% of a previously recorded + result in all dimensions." + [phi-space] + (let [bin-keys (map bin [3 2 1]) + bin-maps + (map (fn [bin-key] + (group-by + (comp bin-key :proprioception phi-space) + (range (count phi-space)))) bin-keys) + lookups (map (fn [bin-key bin-map] + (fn [proprio] (bin-map (bin-key proprio)))) + bin-keys bin-maps)] + (fn lookup [proprio-data] + (set (some #(% proprio-data) lookups))))) + #+end_src + #+end_listing + + #+caption: =longest-thread= finds the longest path of consecutive + #+caption: experiences to explain proprioceptive worm data. + #+name: phi-space-history-scan + #+ATTR_LaTeX: :width 10cm + [[./images/aurellem-gray.png]] + + =longest-thread= infers sensory data by stitching together pieces + from previous experience. It prefers longer chains of previous + experience to shorter ones. For example, during training the worm + might rest on the ground for one second before it performs its + excercises. If during recognition the worm rests on the ground for + five seconds, =longest-thread= will accomodate this five second + rest period by looping the one second rest chain five times. + + =longest-thread= takes time proportinal to the average number of + entries in a proprioceptive bin, because for each element in the + starting bin it performes a series of set lookups in the preceeding + bins. If the total history is limited, then this is only a constant + multiple times the number of entries in the starting bin. This + analysis also applies even if the action requires multiple longest + chains -- it's still the average number of entries in a + proprioceptive bin times the desired chain length. Because + =longest-thread= is so efficient and simple, I can interpret + worm-actions in real time. + + #+caption: Program to calculate empathy by tracing though \Phi-space + #+caption: and finding the longest (ie. most coherent) interpretation + #+caption: of the data. + #+name: longest-thread + #+attr_latex: [htpb] +#+begin_listing clojure + #+begin_src clojure +(defn longest-thread + "Find the longest thread from phi-index-sets. The index sets should + be ordered from most recent to least recent." + [phi-index-sets] + (loop [result '() + [thread-bases & remaining :as phi-index-sets] phi-index-sets] + (if (empty? phi-index-sets) + (vec result) + (let [threads + (for [thread-base thread-bases] + (loop [thread (list thread-base) + remaining remaining] + (let [next-index (dec (first thread))] + (cond (empty? remaining) thread + (contains? (first remaining) next-index) + (recur + (cons next-index thread) (rest remaining)) + :else thread)))) + longest-thread + (reduce (fn [thread-a thread-b] + (if (> (count thread-a) (count thread-b)) + thread-a thread-b)) + '(nil) + threads)] + (recur (concat longest-thread result) + (drop (count longest-thread) phi-index-sets)))))) + #+end_src + #+end_listing + + There is one final piece, which is to replace missing sensory data + with a best-guess estimate. While I could fill in missing data by + using a gradient over the closest known sensory data points, + averages can be misleading. It is certainly possible to create an + impossible sensory state by averaging two possible sensory states. + Therefore, I simply replicate the most recent sensory experience to + fill in the gaps. + + #+caption: Fill in blanks in sensory experience by replicating the most + #+caption: recent experience. + #+name: infer-nils + #+attr_latex: [htpb] +#+begin_listing clojure + #+begin_src clojure +(defn infer-nils + "Replace nils with the next available non-nil element in the + sequence, or barring that, 0." + [s] + (loop [i (dec (count s)) + v (transient s)] + (if (zero? i) (persistent! v) + (if-let [cur (v i)] + (if (get v (dec i) 0) + (recur (dec i) v) + (recur (dec i) (assoc! v (dec i) cur))) + (recur i (assoc! v i 0)))))) + #+end_src + #+end_listing + +** Efficient action recognition with =EMPATH= + + To use =EMPATH= with the worm, I first need to gather a set of + experiences from the worm that includes the actions I want to + recognize. The =generate-phi-space= program (listing + \ref{generate-phi-space} runs the worm through a series of + exercices and gatheres those experiences into a vector. The + =do-all-the-things= program is a routine expressed in a simple + muscle contraction script language for automated worm control. It + causes the worm to rest, curl, and wiggle over about 700 frames + (approx. 11 seconds). + + #+caption: Program to gather the worm's experiences into a vector for + #+caption: further processing. The =motor-control-program= line uses + #+caption: a motor control script that causes the worm to execute a series + #+caption: of ``exercices'' that include all the action predicates. + #+name: generate-phi-space + #+attr_latex: [htpb] +#+begin_listing clojure + #+begin_src clojure +(def do-all-the-things + (concat + curl-script + [[300 :d-ex 40] + [320 :d-ex 0]] + (shift-script 280 (take 16 wiggle-script)))) + +(defn generate-phi-space [] + (let [experiences (atom [])] + (run-world + (apply-map + worm-world + (merge + (worm-world-defaults) + {:end-frame 700 + :motor-control + (motor-control-program worm-muscle-labels do-all-the-things) + :experiences experiences}))) + @experiences)) + #+end_src + #+end_listing + + #+caption: Use longest thread and a phi-space generated from a short + #+caption: exercise routine to interpret actions during free play. + #+name: empathy-debug + #+attr_latex: [htpb] +#+begin_listing clojure + #+begin_src clojure +(defn init [] + (def phi-space (generate-phi-space)) + (def phi-scan (gen-phi-scan phi-space))) + +(defn empathy-demonstration [] + (let [proprio (atom ())] + (fn + [experiences text] + (let [phi-indices (phi-scan (:proprioception (peek experiences)))] + (swap! proprio (partial cons phi-indices)) + (let [exp-thread (longest-thread (take 300 @proprio)) + empathy (mapv phi-space (infer-nils exp-thread))] + (println-repl (vector:last-n exp-thread 22)) + (cond + (grand-circle? empathy) (.setText text "Grand Circle") + (curled? empathy) (.setText text "Curled") + (wiggling? empathy) (.setText text "Wiggling") + (resting? empathy) (.setText text "Resting") + :else (.setText text "Unknown"))))))) + +(defn empathy-experiment [record] + (.start (worm-world :experience-watch (debug-experience-phi) + :record record :worm worm*))) + #+end_src + #+end_listing + + The result of running =empathy-experiment= is that the system is + generally able to interpret worm actions using the action-predicates + on simulated sensory data just as well as with actual data. Figure + \ref{empathy-debug-image} was generated using =empathy-experiment=: + + #+caption: From only proprioceptive data, =EMPATH= was able to infer + #+caption: the complete sensory experience and classify four poses + #+caption: (The last panel shows a composite image of \emph{wriggling}, + #+caption: a dynamic pose.) + #+name: empathy-debug-image + #+ATTR_LaTeX: :width 10cm :placement [H] + [[./images/empathy-1.png]] + + One way to measure the performance of =EMPATH= is to compare the + sutiability of the imagined sense experience to trigger the same + action predicates as the real sensory experience. + + #+caption: Determine how closely empathy approximates actual + #+caption: sensory data. + #+name: test-empathy-accuracy + #+attr_latex: [htpb] +#+begin_listing clojure + #+begin_src clojure +(def worm-action-label + (juxt grand-circle? curled? wiggling?)) + +(defn compare-empathy-with-baseline [matches] + (let [proprio (atom ())] + (fn + [experiences text] + (let [phi-indices (phi-scan (:proprioception (peek experiences)))] + (swap! proprio (partial cons phi-indices)) + (let [exp-thread (longest-thread (take 300 @proprio)) + empathy (mapv phi-space (infer-nils exp-thread)) + experience-matches-empathy + (= (worm-action-label experiences) + (worm-action-label empathy))] + (println-repl experience-matches-empathy) + (swap! matches #(conj % experience-matches-empathy))))))) + +(defn accuracy [v] + (float (/ (count (filter true? v)) (count v)))) + +(defn test-empathy-accuracy [] + (let [res (atom [])] + (run-world + (worm-world :experience-watch + (compare-empathy-with-baseline res) + :worm worm*)) + (accuracy @res))) + #+end_src + #+end_listing + + Running =test-empathy-accuracy= using the very short exercise + program defined in listing \ref{generate-phi-space}, and then doing + a similar pattern of activity manually yeilds an accuracy of around + 73%. This is based on very limited worm experience. By training the + worm for longer, the accuracy dramatically improves. + + #+caption: Program to generate \Phi-space using manual training. + #+name: manual-phi-space + #+attr_latex: [htpb] + #+begin_listing clojure + #+begin_src clojure +(defn init-interactive [] + (def phi-space + (let [experiences (atom [])] + (run-world + (apply-map + worm-world + (merge + (worm-world-defaults) + {:experiences experiences}))) + @experiences)) + (def phi-scan (gen-phi-scan phi-space))) + #+end_src + #+end_listing + + After about 1 minute of manual training, I was able to achieve 95% + accuracy on manual testing of the worm using =init-interactive= and + =test-empathy-accuracy=. The majority of errors are near the + boundaries of transitioning from one type of action to another. + During these transitions the exact label for the action is more open + to interpretation, and dissaggrement between empathy and experience + is more excusable. + +** Digression: bootstrapping touch using free exploration + + In the previous section I showed how to compute actions in terms of + body-centered predicates which relied averate touch activation of + pre-defined regions of the worm's skin. What if, instead of recieving + touch pre-grouped into the six faces of each worm segment, the true + topology of the worm's skin was unknown? This is more similiar to how + a nerve fiber bundle might be arranged. While two fibers that are + close in a nerve bundle /might/ correspond to two touch sensors that + are close together on the skin, the process of taking a complicated + surface and forcing it into essentially a circle requires some cuts + and rerragenments. + + In this section I show how to automatically learn the skin-topology of + a worm segment by free exploration. As the worm rolls around on the + floor, large sections of its surface get activated. If the worm has + stopped moving, then whatever region of skin that is touching the + floor is probably an important region, and should be recorded. + + #+caption: Program to detect whether the worm is in a resting state + #+caption: with one face touching the floor. + #+name: pure-touch + #+begin_listing clojure + #+begin_src clojure +(def full-contact [(float 0.0) (float 0.1)]) + +(defn pure-touch? + "This is worm specific code to determine if a large region of touch + sensors is either all on or all off." + [[coords touch :as touch-data]] + (= (set (map first touch)) (set full-contact))) + #+end_src + #+end_listing + + After collecting these important regions, there will many nearly + similiar touch regions. While for some purposes the subtle + differences between these regions will be important, for my + purposes I colapse them into mostly non-overlapping sets using + =remove-similiar= in listing \ref{remove-similiar} + + #+caption: Program to take a lits of set of points and ``collapse them'' + #+caption: so that the remaining sets in the list are siginificantly + #+caption: different from each other. Prefer smaller sets to larger ones. + #+name: remove-similiar + #+begin_listing clojure + #+begin_src clojure +(defn remove-similar + [coll] + (loop [result () coll (sort-by (comp - count) coll)] + (if (empty? coll) result + (let [[x & xs] coll + c (count x)] + (if (some + (fn [other-set] + (let [oc (count other-set)] + (< (- (count (union other-set x)) c) (* oc 0.1)))) + xs) + (recur result xs) + (recur (cons x result) xs)))))) + #+end_src + #+end_listing + + Actually running this simulation is easy given =CORTEX='s facilities. + + #+caption: Collect experiences while the worm moves around. Filter the touch + #+caption: sensations by stable ones, collapse similiar ones together, + #+caption: and report the regions learned. + #+name: learn-touch + #+begin_listing clojure + #+begin_src clojure +(defn learn-touch-regions [] + (let [experiences (atom []) + world (apply-map + worm-world + (assoc (worm-segment-defaults) + :experiences experiences))] + (run-world world) + (->> + @experiences + (drop 175) + ;; access the single segment's touch data + (map (comp first :touch)) + ;; only deal with "pure" touch data to determine surfaces + (filter pure-touch?) + ;; associate coordinates with touch values + (map (partial apply zipmap)) + ;; select those regions where contact is being made + (map (partial group-by second)) + (map #(get % full-contact)) + (map (partial map first)) + ;; remove redundant/subset regions + (map set) + remove-similar))) + +(defn learn-and-view-touch-regions [] + (map view-touch-region + (learn-touch-regions))) + #+end_src + #+end_listing + + The only thing remining to define is the particular motion the worm + must take. I accomplish this with a simple motor control program. + + #+caption: Motor control program for making the worm roll on the ground. + #+caption: This could also be replaced with random motion. + #+name: worm-roll + #+begin_listing clojure + #+begin_src clojure +(defn touch-kinesthetics [] + [[170 :lift-1 40] + [190 :lift-1 19] + [206 :lift-1 0] + + [400 :lift-2 40] + [410 :lift-2 0] + + [570 :lift-2 40] + [590 :lift-2 21] + [606 :lift-2 0] + + [800 :lift-1 30] + [809 :lift-1 0] + + [900 :roll-2 40] + [905 :roll-2 20] + [910 :roll-2 0] + + [1000 :roll-2 40] + [1005 :roll-2 20] + [1010 :roll-2 0] + + [1100 :roll-2 40] + [1105 :roll-2 20] + [1110 :roll-2 0] + ]) + #+end_src + #+end_listing + + + #+caption: The small worm rolls around on the floor, driven + #+caption: by the motor control program in listing \ref{worm-roll}. + #+name: worm-roll + #+ATTR_LaTeX: :width 12cm + [[./images/worm-roll.png]] + + + #+caption: After completing its adventures, the worm now knows + #+caption: how its touch sensors are arranged along its skin. These + #+caption: are the regions that were deemed important by + #+caption: =learn-touch-regions=. Note that the worm has discovered + #+caption: that it has six sides. + #+name: worm-touch-map + #+ATTR_LaTeX: :width 12cm + [[./images/touch-learn.png]] + + While simple, =learn-touch-regions= exploits regularities in both + the worm's physiology and the worm's environment to correctly + deduce that the worm has six sides. Note that =learn-touch-regions= + would work just as well even if the worm's touch sense data were + completely scrambled. The cross shape is just for convienence. This + example justifies the use of pre-defined touch regions in =EMPATH=. + +* Contributions + + I created =CORTEX=, a complete environment for creating simulated + creatures. Creatures can use biologically inspired senses including + touch, proprioception, hearing, vision, and muscle tension. Each + sense has a uniform API that is well documented. =CORTEX= comes with + multiple example creatures and a large test suite. You can create + new creatures using blender, a free 3D modeling tool. I hope that + =CORTEX= will prove useful for research ranging from distributed + swarm creature simulation to further research in sensory + integration. + + + +# An anatomical joke: +# - Training +# - Skeletal imitation +# - Sensory fleshing-out +# - Classification diff -r a86555b02916 -r 763d13f77e03 thesis/cover.tex --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/cover.tex Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,138 @@ +% -*-latex-*- +% +% For questions, comments, concerns or complaints: +% thesis@mit.edu +% +% +% $Log: cover.tex,v $ +% Revision 1.8 2008/05/13 15:02:15 jdreed +% Degree month is June, not May. Added note about prevdegrees. +% Arthur Smith's title updated +% +% Revision 1.7 2001/02/08 18:53:16 boojum +% changed some \newpages to \cleardoublepages +% +% Revision 1.6 1999/10/21 14:49:31 boojum +% changed comment referring to documentstyle +% +% Revision 1.5 1999/10/21 14:39:04 boojum +% *** empty log message *** +% +% Revision 1.4 1997/04/18 17:54:10 othomas +% added page numbers on abstract and cover, and made 1 abstract +% page the default rather than 2. (anne hunter tells me this +% is the new institute standard.) +% +% Revision 1.4 1997/04/18 17:54:10 othomas +% added page numbers on abstract and cover, and made 1 abstract +% page the default rather than 2. (anne hunter tells me this +% is the new institute standard.) +% +% Revision 1.3 93/05/17 17:06:29 starflt +% Added acknowledgements section (suggested by tompalka) +% +% Revision 1.2 92/04/22 13:13:13 epeisach +% Fixes for 1991 course 6 requirements +% Phrase "and to grant others the right to do so" has been added to +% permission clause +% Second copy of abstract is not counted as separate pages so numbering works +% out +% +% Revision 1.1 92/04/22 13:08:20 epeisach + +% NOTE: +% These templates make an effort to conform to the MIT Thesis specifications, +% however the specifications can change. We recommend that you verify the +% layout of your title page with your thesis advisor and/or the MIT +% Libraries before printing your final copy. +\title{Solving Problems using Embodiment \& Empathy} +\author{Robert Louis M\raisebox{\depth}{\small \underline{\underline{c}}}Intyre} +%\author{Robert McIntyre} + + + +% If you wish to list your previous degrees on the cover page, use the +% previous degrees command: +% \prevdegrees{A.A., Harvard University (1985)} +% You can use the \\ command to list multiple previous degrees +% \prevdegrees{B.S., University of California (1978) \\ +% S.M., Massachusetts Institute of Technology (1981)} +\department{Department of Electrical Engineering and Computer Science} + +% If the thesis is for two degrees simultaneously, list them both +% separated by \and like this: +% \degree{Doctor of Philosophy \and Master of Science} +\degree{Master of Engineering in Electrical Engineering and Computer + Science} + +% As of the 2007-08 academic year, valid degree months are September, +% February, or June. The default is June. +\degreemonth{June} +\degreeyear{2014} +\thesisdate{May 23, 2014} + +%% By default, the thesis will be copyrighted to MIT. If you need to copyright +%% the thesis to yourself, just specify the `vi' documentclass option. If for +%% some reason you want to exactly specify the copyright notice text, you can +%% use the \copyrightnoticetext command. +%\copyrightnoticetext{\copyright IBM, 1990. Do not open till Xmas.} + +% If there is more than one supervisor, use the \supervisor command +% once for each. +\supervisor{Patrick H. Winston}{Ford Professor of Artificial + Intelligence and Computer Science} + +% This is the department committee chairman, not the thesis committee +% chairman. You should replace this with your Department's Committee +% Chairman. +\chairman{Prof. Albert R. Meyer}{Chairman, Masters of Engineering + Thesis Committee} + +% Make the titlepage based on the above information. If you need +% something special and can't use the standard form, you can specify +% the exact text of the titlepage yourself. Put it in a titlepage +% environment and leave blank lines where you want vertical space. +% The spaces will be adjusted to fill the entire page. The dotted +% lines for the signatures are made with the \signature command. +\maketitle + +% The abstractpage environment sets up everything on the page except +% the text itself. The title and other header material are put at the +% top of the page, and the supervisors are listed at the bottom. A +% new page is begun both before and after. Of course, an abstract may +% be more than one page itself. If you need more control over the +% format of the page, you can use the abstract environment, which puts +% the word "Abstract" at the beginning and single spaces its text. + +%% You can either \input (*not* \include) your abstract file, or you can put +%% the text of the abstract directly between the \begin{abstractpage} and +%% \end{abstractpage} commands. + +% First copy: start a new page, and save the page number. +\cleardoublepage +% Uncomment the next line if you do NOT want a page number on your +% abstract and acknowledgments pages. +\pagestyle{empty} +\setcounter{savepage}{\thepage} +\begin{abstractpage} +\input{abstract} +\end{abstractpage} + +% Additional copy: start a new page, and reset the page number. This way, +% the second copy of the abstract is not counted as separate pages. +% Uncomment the next 6 lines if you need two copies of the abstract +% page. +% \setcounter{page}{\thesavepage} +% \begin{abstractpage} +% \input{abstract} +% \end{abstractpage} + +%% \cleardoublepage + +%% \section*{Acknowledgments} + +%% This is the acknowledgements section. You should replace this with your +%% own acknowledgements. + +%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%% +% -*-latex-*- diff -r a86555b02916 -r 763d13f77e03 thesis/garbage_cortex.org --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/garbage_cortex.org Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,100 @@ +#+title: =CORTEX= +#+author: Robert McIntyre +#+email: rlm@mit.edu +#+description: Using embodied AI to facilitate Artificial Imagination. +#+keywords: AI, clojure, embodiment + +* Artificial Imagination + + Imagine watching a video of someone skateboarding. When you watch + the video, you can imagine yourself skateboarding, and your + knowledge of the human body and its dynamics guides your + interpretation of the scene. For example, even if the skateboarder + is partially occluded, you can infer the positions of his arms and + body from your own knowledge of how your body would be positioned if + you were skateboarding. If the skateboarder suffers an accident, you + wince in sympathy, imagining the pain your own body would experience + if it were in the same situation. This empathy with other people + guides our understanding of whatever they are doing because it is a + powerful constraint on what is probable and possible. In order to + make use of this powerful empathy constraint, I need a system that + can generate and make sense of sensory data from the many different + senses that humans possess. The two key proprieties of such a system + are /embodiment/ and /imagination/. + +** What is imagination? + + One kind of imagination is /sympathetic/ imagination: you imagine + yourself in the position of something/someone you are + observing. This type of imagination comes into play when you follow + along visually when watching someone perform actions, or when you + sympathetically grimace when someone hurts themselves. This type of + imagination uses the constraints you have learned about your own + body to highly constrain the possibilities in whatever you are + seeing. It uses all your senses to including your senses of touch, + proprioception, etc. Humans are flexible when it comes to "putting + themselves in another's shoes," and can sympathetically understand + not only other humans, but entities ranging from animals to cartoon + characters to [[http://www.youtube.com/watch?v=0jz4HcwTQmU][single dots]] on a screen! + + + #+caption: A cat drinking some water. Identifying this action is beyond the state of the art for computers. + #+ATTR_LaTeX: :width 5cm + [[./images/cat-drinking.jpg]] + + +#+begin_listing clojure +\caption{This is a basic test for the vision system. It only tests the vision-pipeline and does not deal with loading eyes from a blender file. The code creates two videos of the same rotating cube from different angles.} +#+name: test-1 +#+begin_src clojure +(defn test-pipeline + "Testing vision: + Tests the vision system by creating two views of the same rotating + object from different angles and displaying both of those views in + JFrames. + + You should see a rotating cube, and two windows, + each displaying a different view of the cube." + ([] (test-pipeline false)) + ([record?] + (let [candy + (box 1 1 1 :physical? false :color ColorRGBA/Blue)] + (world + (doto (Node.) + (.attachChild candy)) + {} + (fn [world] + (let [cam (.clone (.getCamera world)) + width (.getWidth cam) + height (.getHeight cam)] + (add-camera! world cam + (comp + (view-image + (if record? + (File. "/home/r/proj/cortex/render/vision/1"))) + BufferedImage!)) + (add-camera! world + (doto (.clone cam) + (.setLocation (Vector3f. -10 0 0)) + (.lookAt Vector3f/ZERO Vector3f/UNIT_Y)) + (comp + (view-image + (if record? + (File. "/home/r/proj/cortex/render/vision/2"))) + BufferedImage!)) + (let [timer (IsoTimer. 60)] + (.setTimer world timer) + (display-dilated-time world timer)) + ;; This is here to restore the main view + ;; after the other views have completed processing + (add-camera! world (.getCamera world) no-op))) + (fn [world tpf] + (.rotate candy (* tpf 0.2) 0 0)))))) +#+end_src +#+end_listing + +- This is test1 \cite{Tappert77}. + +\cite{Tappert77} +lol +\cite{Tappert77} \ No newline at end of file diff -r a86555b02916 -r 763d13f77e03 thesis/images/aurellem-gray.png Binary file thesis/images/aurellem-gray.png has changed diff -r a86555b02916 -r 763d13f77e03 thesis/images/basic-worm-view.png Binary file thesis/images/basic-worm-view.png has changed diff -r a86555b02916 -r 763d13f77e03 thesis/images/blender-worm.png Binary file thesis/images/blender-worm.png has changed diff -r a86555b02916 -r 763d13f77e03 thesis/images/cat-drinking.jpg Binary file thesis/images/cat-drinking.jpg has changed diff -r a86555b02916 -r 763d13f77e03 thesis/images/empathy-1.png Binary file thesis/images/empathy-1.png has changed diff -r a86555b02916 -r 763d13f77e03 thesis/images/fat-person-sitting-at-desk.jpg Binary file thesis/images/fat-person-sitting-at-desk.jpg has changed diff -r a86555b02916 -r 763d13f77e03 thesis/images/finger-UV.png Binary file thesis/images/finger-UV.png has changed diff -r a86555b02916 -r 763d13f77e03 thesis/images/full-hand.png Binary file thesis/images/full-hand.png has changed diff -r a86555b02916 -r 763d13f77e03 thesis/images/invisible-chair.png Binary file thesis/images/invisible-chair.png has changed diff -r a86555b02916 -r 763d13f77e03 thesis/images/touch-learn.png Binary file thesis/images/touch-learn.png has changed diff -r a86555b02916 -r 763d13f77e03 thesis/images/wall-push.png Binary file thesis/images/wall-push.png has changed diff -r a86555b02916 -r 763d13f77e03 thesis/images/worm-identify-init.png Binary file thesis/images/worm-identify-init.png has changed diff -r a86555b02916 -r 763d13f77e03 thesis/images/worm-intro-black.png Binary file thesis/images/worm-intro-black.png has changed diff -r a86555b02916 -r 763d13f77e03 thesis/images/worm-intro-rainbow.png Binary file thesis/images/worm-intro-rainbow.png has changed diff -r a86555b02916 -r 763d13f77e03 thesis/images/worm-intro-white.png Binary file thesis/images/worm-intro-white.png has changed diff -r a86555b02916 -r 763d13f77e03 thesis/images/worm-poses.png Binary file thesis/images/worm-poses.png has changed diff -r a86555b02916 -r 763d13f77e03 thesis/images/worm-roll.png Binary file thesis/images/worm-roll.png has changed diff -r a86555b02916 -r 763d13f77e03 thesis/images/worm-with-muscle.png Binary file thesis/images/worm-with-muscle.png has changed diff -r a86555b02916 -r 763d13f77e03 thesis/mitthesis.cls --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/mitthesis.cls Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,535 @@ +% $Log: mitthesis.cls,v $ +% Revision 1.9 2012/02/15 15:48:16 jdreed +% Tweak the "permission" statement per the Libraries' specs +% retrieved 15-Feb-2012 +% +% Revision 1.8 2009/07/27 14:23:50 boojum +% added changing draft mark +% +% Revision 1.6 2005/05/13 19:57:40 boojum +% added leftblank option +% +% Revision 1.5 2002/04/18 14:10:08 boojum +% changed doublespace into setspace for 2e compliance +% +% Revision 1.4 2001/02/08 18:57:13 boojum +% turned two newpages into cleardoublepages +% +% Revision 1.3 2000/08/14 14:53:29 boojum +% commented out footheight, which is relevant for drafthead +% +% Revision 1.2 1999/10/21 14:51:33 boojum +% changed references to documentstyle to documentclass in comments +% +% Revision 1.1 1999/10/21 14:39:31 boojum +% Initial revision +% +%Revision 1.7 1998/04/01 20:45:34 othomas +%removed offending phrase ", and to grant others the right to do so" from copyright notice. +% +%Revision 1.6 96/06/26 15:07:29 othomas +%safety checkin. +% +%Revision 1.5 93/06/07 15:38:50 starflt +%Altered 'vi' option copyright wording to comply with new Institute +%Archives requirements and MIT lawyers. +% +%Revision 1.4 92/08/19 16:51:06 lwvanels +%Updated Course 6 title page for new permissions. +% +%Revision 1.3 92/04/23 10:16:15 epeisach +%Fixed comment character in rcs file +% +%Revision 1.2 92/04/22 13:12:02 epeisach +%Fixes for 1991 course 6 requirements +%Phrase "and to grant others the right to do so" has been added to +%permission clause +%Second copy of abstract is not counted as separate pages so numbering works +%out +% +%Revision 1.1 90/05/04 11:45:53 lwvanels +%Initial revision + +% +% LaTeX format for theses at MIT +% Based on "Specifications for Thesis Preparation" + +% `vi' and `upcase' options by Krishna Sethuraman - krishna@athena.mit.edu +% Margins and heading types by Peter Nuth - nuth@ai.mit.edu +% Title and abstract page by Stephen Gildea - gildea@erl.mit.edu +% Look in this directory for example file mitthesis.doc +% Also for propcover.tex - Boilerplate for PHD proposal. + +% To use this style - say something like: +% for dull, boring thesis format: +% \documentclass[12pt]{mitthesis} +% \pagestyle{plain} +% OR for fast drafts: +% \documentclass[11pt,singlespace,draft]{mitthesis} +% \pagestyle{drafthead} +% OR for Tech Reports: +% \documentclass[12pt,twoside]{mitthesis} +% \pagestyle{headings} +% OR +% some other combination... +% +%%%% New options: +% +% Option `twoside': +% Good for producing Tech Reports. +% The default is single-sided printing, which is what M.I.T. wants on the +% thesis document itself. +% +% Option `singlespace': +% Good for drafts. +% Double-spaced theses are the default. +% That is what M.I.T. asks for in the formal specifications. +% +% Note that MIT does not REQUIRE all theses to be double-spaced anymore. +% Someone in the library system said that it's OK to be single-spaced. +% (Regardless of what the specs. say...) +% To get singlespacing in an area - Use the 'singlespace' environment. +% +% Option `draft': +% Puts `overfull' boxes at the end of lines that are too long. +% +% Pagestyle `drafthead': +% Puts the date and the label ``*DRAFT*'' in the footer. +% +%%%%%%%%%% +% +%%%% Parameters to initialize for boilerplate page: +% +% \title{Mixed Circular Cylindrical Shells} +% \author{J. Casey Salas} +% \prevdegrees{B.S., University of California (1978) \\ +% S.M., Massachusetts Institute of Technology (1981)} +% \department{Department of Electrical Engineering and Computer Science} +% \degree{Doctor of Philosophy} +%% If the thesis is for two degrees simultaneously, list them both +%% separated by \and like this: +% \degree{Doctor of Philosophy \and Master of Science} +% \degreemonth{February} +% \degreeyear{1987} +% \thesisdate{December 10, 1986} +%% If the thesis is copyright by the Institute, leave this line out and +%% the standard copyright line will be used instead. +% \copyrightnotice{J. Casey Salas, 1986} +%% If there is more than one supervisor, use the \supervisor command +%% once for each. +% \supervisor{John D. Galli}{Director, Sound Instrument Laboratory} +%% This is the department committee chairman, not the thesis committee chairman +% \chairman{Arthur C. Smith} +% {Chairman, Departmental Committee on Graduate Students} +%% Make the titlepage based on the above information. If you need +%% something special and can't use the standard form, you can specify +%% the exact text of the titlepage yourself. Put it in a titlepage +%% environment and leave blank lines where you want vertical space. +%% The spaces will be adjusted to fill the entire page. The dotted +%% lines for the signatures are made with the \signature command. +% +%% The abstractpage environment sets up everything on the page except +%% the text itself. The title and other header material are put at the +%% top of the page, and the supervisors are listed at the bottom. A +%% new page is begun both before and after. Of course, an abstract may +%% be more than one page itself. If you need more control over the +%% format of the page, you can use the abstract environment, which puts +%% the word "Abstract" at the beginning and single spaces its text. +% +% \begin{abstractpage} +% Abstract goes here. +% \end{abstractpage} +% +%%%%%%%% Newer additions +% +% documentclass options - +% vi For MIT course VI or VIII thesis - will copyright the thesis to +% you while giving MIT permission to copy and distribute it. +% upcase Will put much of the cover page in uppercase, as per the +% example on page 17 of the *Specifications for Thesis +% Preparation*, (revised 1989) +% Also added ``All Rights Reserved'' to default copyright notice. +% +%%%%%%%%%%% +% +% Documentclass options (vi and upcase) and changes to copyright notice +% Copyright (c) 1990, by Krishna Sethuraman. +% +% Pagestyle and header generation +% Copyright (c) 1987, 1988 by Peter Nuth +% +% Original version +% Copyright (c) 1987 by Stephen Gildea +% Permission to copy all or part of this work is granted, provided +% that the copies are not made or distributed for resale, and that +% the copyright notice and this notice are retained. +% +% THIS WORK IS PROVIDED ON AN "AS IS" BASIS. THE AUTHOR PROVIDES NO +% WARRANTY WHATSOEVER, EITHER EXPRESS OR IMPLIED, REGARDING THE WORK, +% INCLUDING WARRANTIES WITH RESPECT TO ITS MERCHANTABILITY OR FITNESS +% FOR ANY PARTICULAR PURPOSE. +%%%%%%%% + +\NeedsTeXFormat{LaTeX2e} +\ProvidesClass{mitthesis}[1999/10/20] + +\def\mystretch{1.5} % Double spacing hack +\DeclareOption{doublespace}{} % This is default + % So we do not read this style twice +\DeclareOption{singlespace}{ % If he explicitly wants single spacing + \typeout{Single spaced} + \def\mystretch{1}} + +%% `vi' and `upcase' document style options. Krishna Sethuraman (1990) +\newcount\vithesis +\DeclareOption{vi}{\typeout{Course VI/VIII thesis style.}\advance\vithesis by1} +\vithesis=0 + +\DeclareOption{upcase}{\typeout{Uppercase cover page.} + \gdef\choosecase#1{\uppercase\expandafter{#1}}} +\def\choosecase#1{#1} + +%% leftblank option by Kevin Fu +\newif\if@leftblank \@leftblankfalse + +\DeclareOption{leftblank}{\typeout{Intentionally Leaving Pages Blank} +\@leftblanktrue} + +% Thesis looks much like report +\DeclareOption*{\PassOptionsToClass{\CurrentOption}{report}} +\ProcessOptions +\LoadClass{report} + +% If the user wants single spacing, set baselinestretch=1. + +\usepackage{setspace} + +% Note - doublespace.sty has some float-related troubles in +% combination with graphics or color, and is not officially compliant +% with 2e. setspace is a replacement which is 2e-compliant. + +% Read the doublespace style that we got from Rochester: +%\input setdoublespace.sty + +\def\baselinestretch{\mystretch} % Double spacing hack + +%%%%%%% Set up margins and formatting params %%% + +% Margins. +% Note we want 1in top margin assuming no header line, so push header +% into 1in margin. +% Draft mode brings the header back down. + +\setlength{\oddsidemargin}{0.25in} % 1.25in left margin +\setlength{\evensidemargin}{0.25in} % 1.25in left margin (even pages) +\setlength{\topmargin}{0.0in} % 1in top margin +\setlength{\textwidth}{6.0in} % 6.0in text - 1.25in rt margin +\setlength{\textheight}{9in} % Body ht for 1in margins +\addtolength{\topmargin}{-\headheight} % No header, so compensate +\addtolength{\topmargin}{-\headsep} % for header height and separation + +% The next two macros compensate page style for headers and footers +% We only need them in page styles that USE headers and footers. + % If we have a header, it must be 1in from top of page. +\def\pulldownheader{ % Shift header down 1in from top + \addtolength{\topmargin}{\headheight} + \addtolength{\topmargin}{\headsep} + \addtolength{\textheight}{-\headheight} + \addtolength{\textheight}{-\headsep} +} + % If we have a footer, put it 1in up from bottom +\def\pullupfooter{ % Shift footer up + \addtolength{\textheight}{-\footskip} +% \addtolength{\textheight}{-\footheight} %footheight doesn't +% exist in 2e +} + +%%%%%%% End of margins and formatting params %%% + +%%%%%%% Fix various header and footer problems %%% + +% Draft mark on the right side of left pages (outside) +% this mark is also the only one visible on single sided. +\newcommand{\draftrmark}{**DRAFT**} +% Draft mark on the left side of right pages (outside) +\newcommand{\draftlmark}{**DRAFT**} % + +% Macros to make changing the Draft easier +\newcommand{\drmark}[1]{\renewcommand{\draftrmark}{#1}} +\newcommand{\dlmark}[1]{\renewcommand{\draftlmark}{#1}} +\newcommand{\dmark}[1]{\drmark{#1}\dlmark{#1}} + +% Format for draft of thesis. Define our own PageStyle - +% Just like headings, but has foot lines with the date and warning + +\if@twoside % If two-sided printing. +\def\ps@drafthead{ + \let\@mkboth\markboth + \def\@oddfoot{\rm \today \hfil \sc \draftrmark} + \def\@evenfoot{\sc \draftlmark \hfil \rm \today } + \def\@evenhead{\rm \thepage\hfil \sl \leftmark} + \def\@oddhead{\hbox{}\sl \rightmark \hfil \rm\thepage} + \def\chaptermark##1{\markboth {\uppercase{\ifnum \c@secnumdepth >\m@ne + \@chapapp\ \thechapter. \ \fi ##1}}{}} + \def\sectionmark##1{\markright {\uppercase{\ifnum \c@secnumdepth >\z@ + \thesection. \ \fi ##1}}} + \pulldownheader % Bring header down from edge + \pullupfooter % Bring footer up +} +\else % If one-sided printing. +\def\ps@drafthead{ + \let\@mkboth\markboth + \def\@oddfoot{\rm \today \hfil \sc \draftrmark} + \def\@oddhead{\hbox{}\sl \rightmark \hfil \rm\thepage} + \def\chaptermark##1{\markright {\uppercase{\ifnum \c@secnumdepth >\m@ne + \@chapapp\ \thechapter. \ \fi ##1}}} + \pulldownheader % Bring header down from edge + \pullupfooter % Bring footer up +} +\fi + +% I redefine these formats that were defined in report.sty +% Definition of 'headings' page style +% Note the use of ##1 for parameter of \def\chaptermark inside the +% \def\ps@headings. +% + +\if@twoside % If two-sided printing. +\def\ps@headings{\let\@mkboth\markboth + \def\@oddfoot{} + \def\@evenfoot{} % No feet. + \def\@evenhead{\rm \thepage\hfil \sl \leftmark} % Left heading. + \def\@oddhead{\hbox{}\sl \rightmark \hfil \rm\thepage} % Right heading. + \def\chaptermark##1{\markboth {\uppercase{\ifnum \c@secnumdepth >\m@ne + \@chapapp\ \thechapter. \ \fi ##1}}{}} + \def\sectionmark##1{\markright {\uppercase{\ifnum \c@secnumdepth >\z@ + \thesection. \ \fi ##1}}} + \pulldownheader % Bring header down from edge +} +\else % If one-sided printing. +\def\ps@headings{\let\@mkboth\markboth + \def\@oddfoot{} + \def\@evenfoot{} % No feet. + \def\@oddhead{\hbox {}\sl \rightmark \hfil \rm\thepage} % Heading. + \def\chaptermark##1{\markright {\uppercase{\ifnum \c@secnumdepth >\m@ne + \@chapapp\ \thechapter. \ \fi ##1}}} + \pulldownheader % Bring header down from edge +} +\fi + +% Redefinition of 'myheadings' page style. +% +\def\ps@myheadings{\let\@mkboth\@gobbletwo + \def\@oddfoot{} + \def\@evenfoot{} + \def\sectionmark##1{} + \def\subsectionmark##1{} + \def\@evenhead{\rm \thepage\hfil\sl\leftmark\hbox {}} % + \def\@oddhead{\hbox{}\sl\rightmark \hfil \rm\thepage} % + \pulldownheader % Bring header down from edge +} + +% Redefine '/chapter' to always start on an odd page. +% Should make no difference in singleside mode. +% +\if@leftblank +% Prints "THIS PAGE INTENTIONALLY LEFT BLANK" on blank pages. +\def\chapter{\clearpage\ifodd\c@page\else + \hbox{}\par\vfill\centerline% + {THIS PAGE INTENTIONALLY LEFT BLANK}% + \vfill\newpage\fi + \thispagestyle{plain} % Page style of chapter page is 'plain' + \global\@topnum\z@ % Prevents figures from going at top of page. + \@afterindentfalse % Suppresses indent in first paragraph. Change + \secdef\@chapter\@schapter} % to \@afterindenttrue to have indent. +\else +\def\chapter{\cleardoublepage % Starts new page. + \thispagestyle{plain} % Page style of chapter page is 'plain' + \global\@topnum\z@ % Prevents figures from going at top of page. + \@afterindentfalse % Suppresses indent in first paragraph. Change + \secdef\@chapter\@schapter} % to \@afterindenttrue to have indent. +\fi +% If using the report style, use - instead of . in the figure number. +\@ifundefined{thechapter}{}{\def\thefigure{\thechapter-\arabic{figure}}} + + +%%%%%%%%% End of Style parameters %%%% + +% Here's Gildea's Boilerplate Stuff. +% Copyright (c) 1987 by Stephen Gildea +% Permission to copy all or part of this work is granted, provided +% that the copies are not made or distributed for resale, and that +% the copyright notice and this notice are retained. + +%% Define all the pieces that go on the title page and the abstract. + +% \title and \author already exist + +\def\prevdegrees#1{\gdef\@prevdegrees{#1}} +\def\@prevdegrees{} + +\def\department#1{\gdef\@department{#1}} + +% If you are getting two degrees, use \and between the names. +\def\degree#1{\setbox0\hbox{#1} %for side effect of setting \@degreeword + \gdef\@degree{#1}} + +% \and is used inside the \degree argument to separate two degrees +\def\and{\gdef\@degreeword{degrees} \par and \par} +\def\@degreeword{degree} + +% The copyright notice stuff is a tremendous mess. +% +% \@copyrightnotice is used by \maketitle to actually put text on the +% page; it defaults to ``Copyright MIT 19xx. All rights reserved.'' +% \copyrightnoticetext takes an argument and defined \@copyrightnotice +% to that argument. \copyrightnotice takes an argument, and calls +% \copyrightnoticetext with that argument, preceeded by a copyright +% symbol and followed by ``All rights reserved.'' and the standard +% permission notice. +% +% If you use the 'vi' option, \copyrightnoticetext is used to set the +% copyright to ``(C) Your Name, Current Year in Roman Numerals.'' +% followed by the permission notice. + +% If there is no \copyrightnotice command, it is asssumed that MIT +% holds the copyright. This commands adds the copyright symbol to the +% beginning, and puts the standard permission notice below. +%% ``All rights reserved'' added. Krishna Sethuraman (1990) +\def\copyrightnotice#1{\copyrightnoticetext{\copyright\ #1. All rights +reserved.\par\permission}} + +% Occacionally you will need to exactly specify the text of the +% copyright notice. The \copyrightnoticetext command is then useful. +\long\def\copyrightnoticetext#1{\gdef\@copyrightnotice{#1}} +\def\@copyrightnotice{\copyright\ \Mit\ \@degreeyear. All rights reserved.} + +%% `vi' documentclass option: Specifying this option automatically +%% copyrights the thesis to the author and gives MIT permission to copy and +%% distribute the document. If you want, you can still specify +%% \copyrightnotice{stuff} to copyright to someone else, or +%% \copyrightnoticetext{stuff} to specify the exact text of the copyright +%% notice. +\ifodd\vithesis \copyrightnoticetext{\copyright\ \@author, +\uppercase\expandafter{\romannumeral\@degreeyear}. All rights reserved.\par\permission} +%% or just +%%\@degreeyear}} +\typeout{Copyright given to author, + permission to copy/distribute given to MIT.} +\else \typeout{Thesis document copyright MIT unless otherwise (manually) specified} +\fi + +\def\thesisdate#1{\gdef\@thesisdate{#1}} + +% typically just a month and year +\def\degreemonth#1{\gdef\@degreemonth{#1}} +\def\degreeyear#1{\gdef\@degreeyear{#1}} + +% Usage: \supervisor{name}{title} +% \chairman{name}{title} + +% since there can be more than one supervisor, +% we build the appropriate boxes for the titlepage and +% the abstractpage as the user makes multiple calls +% to \supervisor +\newbox\@titlesupervisor \newbox\@abstractsupervisor + +\def\supervisor#1#2{\setbox\@titlesupervisor\vbox + {\unvbox\@titlesupervisor \vskip 10pt% plus 1fil minus 1fil + \def\baselinestretch{1}\large + \signature{Certified by}{#1 \\ #2 \\ Thesis Supervisor}} + \setbox\@abstractsupervisor\vbox{\unvbox\@abstractsupervisor + \vskip\baselineskip \def\baselinestretch{1}\@normalsize + \par\noindent Thesis Supervisor: #1 \\ Title: #2}} + +% department chairman, not thesis committee chairman +\def\chairman#1#2{\gdef\@chairmanname{#1}\gdef\@chairmantitle{#2}} + +%% `upcase' documentclass option: \choosecase is defined either as a dummy or +%% a macro to change the (expanded) argument to uppercase. +\def\maketitle{\begin{titlepage} +\large +{\def\baselinestretch{1.2}\Large\bf \choosecase{\@title} \par} +by\par +{\Large \choosecase{\@author}} +\par +\@prevdegrees +\par +\choosecase{Submitted to the} \choosecase{\@department} \\ +\choosecase{in partial fulfillment of the requirements for the} +\choosecase{\@degreeword} +\choosecase{of} +\par +\choosecase{\@degree} +\par +at the +\par\MIT\par +\@degreemonth\ \@degreeyear +\par +\@copyrightnotice +\par +\vskip 3\baselineskip +\signature{Author}{\@department \\ \@thesisdate} +\par +\vfill +\unvbox\@titlesupervisor +\par +\vfill +\signature{Accepted by}{\@chairmanname \\ \@chairmantitle} +\vfill +\end{titlepage}} + +% this environment should probably be called abstract, +% but we want people to also be able to get at the more +% basic abstract environment +\def\abstractpage{\cleardoublepage +\begin{center}{\large{\bf \@title} \\ +by \\ +\@author \\[\baselineskip]} +\par +\def\baselinestretch{1}\@normalsize +Submitted to the \@department \\ +on \@thesisdate, in partial fulfillment of the \\ +requirements for the \@degreeword\ of \\ +\@degree +\end{center} +\par +\begin{abstract}} + +%% Changed from \unvbox to \unvcopy for use with multiple copies of abstract +%% page. +%% Krishna Sethuraman (1990) +\def\endabstractpage{\end{abstract}\noindent + \unvcopy\@abstractsupervisor \newpage} + +%% This counter is used to save the page number for the second copy of +%% the abstract. +\newcounter{savepage} + +% You can use the titlepage environment to do it all yourself if you +% don't want to use \maketitle. If the titlepage environment, the +% paragraph skip is infinitely stretchable, so if you leave a blank line +% between lines that you want space between, the space will stretch so +% that the title page fills up the entire page. +\def\titlepage{\cleardoublepage\centering + \thispagestyle{empty} + \parindent 0pt \parskip 10pt plus 1fil minus 1fil + \def\baselinestretch{1}\@normalsize\vbox to \vsize\bgroup\vbox to 9in\bgroup} +% The \kern0pt pushes any depth into the height. Thanks to Richard Stone. +\def\endtitlepage{\par\kern 0pt\egroup\vss\egroup\newpage} + +\def\MIT{MASSACHUSETTS INSTITUTE OF TECHNOLOGY} +\def\Mit{Massachusetts Institute of Technology} + +\def\permission{\par\noindent{\centering + The author hereby grants to MIT permission to reproduce and to + distribute publicly paper and electronic copies of this thesis + document in whole or in part in any medium now known or hereafter + created.}\par} + +\def\signature#1#2{\par\noindent#1\dotfill\null\\* + {\raggedleft #2\par}} + +\def\abstract{\subsection*{Abstract}\small\def\baselinestretch{1}\@normalsize} +\def\endabstract{\par} + diff -r a86555b02916 -r 763d13f77e03 thesis/org/first-chapter.html --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/org/first-chapter.html Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,455 @@ + + + + +<code>CORTEX</code> + + + + + + + + + + + + + + + +
+

CORTEX

+ + +
+
+ +
+ +

aurellem

+ +
+ +
Written by Robert McIntyre
+ + + + + + + +
+

Artificial Imagination

+
+ + +

+ Imagine watching a video of someone skateboarding. When you watch + the video, you can imagine yourself skateboarding, and your + knowledge of the human body and its dynamics guides your + interpretation of the scene. For example, even if the skateboarder + is partially occluded, you can infer the positions of his arms and + body from your own knowledge of how your body would be positioned if + you were skateboarding. If the skateboarder suffers an accident, you + wince in sympathy, imagining the pain your own body would experience + if it were in the same situation. This empathy with other people + guides our understanding of whatever they are doing because it is a + powerful constraint on what is probable and possible. In order to + make use of this powerful empathy constraint, I need a system that + can generate and make sense of sensory data from the many different + senses that humans possess. The two key proprieties of such a system + are embodiment and imagination. +

+ +
+ +
+

What is imagination?

+
+ + +

+ One kind of imagination is sympathetic imagination: you imagine + yourself in the position of something/someone you are + observing. This type of imagination comes into play when you follow + along visually when watching someone perform actions, or when you + sympathetically grimace when someone hurts themselves. This type of + imagination uses the constraints you have learned about your own + body to highly constrain the possibilities in whatever you are + seeing. It uses all your senses to including your senses of touch, + proprioception, etc. Humans are flexible when it comes to "putting + themselves in another's shoes," and can sympathetically understand + not only other humans, but entities ranging animals to cartoon + characters to single dots on a screen! +

+

+ Another kind of imagination is predictive imagination: you + construct scenes in your mind that are not entirely related to + whatever you are observing, but instead are predictions of the + future or simply flights of fancy. You use this type of imagination + to plan out multi-step actions, or play out dangerous situations in + your mind so as to avoid messing them up in reality. +

+

+ Of course, sympathetic and predictive imagination blend into each + other and are not completely separate concepts. One dimension along + which you can distinguish types of imagination is dependence on raw + sense data. Sympathetic imagination is highly constrained by your + senses, while predictive imagination can be more or less dependent + on your senses depending on how far ahead you imagine. Daydreaming + is an extreme form of predictive imagination that wanders through + different possibilities without concern for whether they are + related to whatever is happening in reality. +

+

+ For this thesis, I will mostly focus on sympathetic imagination and + the constraint it provides for understanding sensory data. +

+
+ +
+ +
+

What problems can imagination solve?

+
+ + +

+ Consider a video of a cat drinking some water. +

+ +
+

../images/cat-drinking.jpg

+

A cat drinking some water. Identifying this action is beyond the state of the art for computers.

+
+ +

+ It is currently impossible for any computer program to reliably + label such an video as "drinking". I think humans are able to label + such video as "drinking" because they imagine themselves as the + cat, and imagine putting their face up against a stream of water + and sticking out their tongue. In that imagined world, they can + feel the cool water hitting their tongue, and feel the water + entering their body, and are able to recognize that feeling as + drinking. So, the label of the action is not really in the pixels + of the image, but is found clearly in a simulation inspired by + those pixels. An imaginative system, having been trained on + drinking and non-drinking examples and learning that the most + important component of drinking is the feeling of water sliding + down one's throat, would analyze a video of a cat drinking in the + following manner: +

+
    +
  • Create a physical model of the video by putting a "fuzzy" model + of its own body in place of the cat. Also, create a simulation of + the stream of water. + +
  • +
  • Play out this simulated scene and generate imagined sensory + experience. This will include relevant muscle contractions, a + close up view of the stream from the cat's perspective, and most + importantly, the imagined feeling of water entering the mouth. + +
  • +
  • The action is now easily identified as drinking by the sense of + taste alone. The other senses (such as the tongue moving in and + out) help to give plausibility to the simulated action. Note that + the sense of vision, while critical in creating the simulation, + is not critical for identifying the action from the simulation. +
  • +
+ + +

+ More generally, I expect imaginative systems to be particularly + good at identifying embodied actions in videos. +

+
+
+ +
+ +
+

Cortex

+
+ + +

+ The previous example involves liquids, the sense of taste, and + imagining oneself as a cat. For this thesis I constrain myself to + simpler, more easily digitizable senses and situations. +

+

+ My system, Cortex performs imagination in two different simplified + worlds: worm world and stick figure world. In each of these + worlds, entities capable of imagination recognize actions by + simulating the experience from their own perspective, and then + recognizing the action from a database of examples. +

+

+ In order to serve as a framework for experiments in imagination, + Cortex requires simulated bodies, worlds, and senses like vision, + hearing, touch, proprioception, etc. +

+ +
+ +
+

A Video Game Engine takes care of some of the groundwork

+
+ + +

+ When it comes to simulation environments, the engines used to + create the worlds in video games offer top-notch physics and + graphics support. These engines also have limited support for + creating cameras and rendering 3D sound, which can be repurposed + for vision and hearing respectively. Physics collision detection + can be expanded to create a sense of touch. +

+

+ jMonkeyEngine3 is one such engine for creating video games in + Java. It uses OpenGL to render to the screen and uses screengraphs + to avoid drawing things that do not appear on the screen. It has an + active community and several games in the pipeline. The engine was + not built to serve any particular game but is instead meant to be + used for any 3D game. I chose jMonkeyEngine3 it because it had the + most features out of all the open projects I looked at, and because + I could then write my code in Clojure, an implementation of LISP + that runs on the JVM. +

+
+ +
+ +
+

CORTEX Extends jMonkeyEngine3 to implement rich senses

+
+ + +

+ Using the game-making primitives provided by jMonkeyEngine3, I have + constructed every major human sense except for smell and + taste. Cortex also provides an interface for creating creatures + in Blender, a 3D modeling environment, and then "rigging" the + creatures with senses using 3D annotations in Blender. A creature + can have any number of senses, and there can be any number of + creatures in a simulation. +

+

+ The senses available in Cortex are: +

+ + + +
+
+ +
+ +
+

A roadmap for Cortex experiments

+
+ + + +
+ +
+

Worm World

+
+ + +

+ Worms in Cortex are segmented creatures which vary in length and + number of segments, and have the senses of vision, proprioception, + touch, and muscle tension. +

+ +
+

../images/finger-UV.png

+

This is the tactile-sensor-profile for the upper segment of a worm. It defines regions of high touch sensitivity (where there are many white pixels) and regions of low sensitivity (where white pixels are sparse).

+
+ + + + +
+
+ +
YouTube +
+

The worm responds to touch.

+
+ +
+
+ +
YouTube +
+

Proprioception in a worm. The proprioceptive readout is + in the upper left corner of the screen.

+
+ +

+ A worm is trained in various actions such as sinusoidal movement, + curling, flailing, and spinning by directly playing motor + contractions while the worm "feels" the experience. These actions + are recorded both as vectors of muscle tension, touch, and + proprioceptive data, but also in higher level forms such as + frequencies of the various contractions and a symbolic name for the + action. +

+

+ Then, the worm watches a video of another worm performing one of + the actions, and must judge which action was performed. Normally + this would be an extremely difficult problem, but the worm is able + to greatly diminish the search space through sympathetic + imagination. First, it creates an imagined copy of its body which + it observes from a third person point of view. Then for each frame + of the video, it maneuvers its simulated body to be in registration + with the worm depicted in the video. The physical constraints + imposed by the physics simulation greatly decrease the number of + poses that have to be tried, making the search feasible. As the + imaginary worm moves, it generates imaginary muscle tension and + proprioceptive sensations. The worm determines the action not by + vision, but by matching the imagined proprioceptive data with + previous examples. +

+

+ By using non-visual sensory data such as touch, the worms can also + answer body related questions such as "did your head touch your + tail?" and "did worm A touch worm B?" +

+

+ The proprioceptive information used for action identification is + body-centric, so only the registration step is dependent on point + of view, not the identification step. Registration is not specific + to any particular action. Thus, action identification can be + divided into a point-of-view dependent generic registration step, + and a action-specific step that is body-centered and invariant to + point of view. +

+
+ +
+ +
+

Stick Figure World

+
+ + +

+ This environment is similar to Worm World, except the creatures are + more complicated and the actions and questions more varied. It is + an experiment to see how far imagination can go in interpreting + actions. +

+
+
+
+ +
+

Date: 2013-11-07 04:21:29 EST

+

Author: Robert McIntyre

+

Org version 7.7 with Emacs version 24

+Validate XHTML 1.0 + +
+ + diff -r a86555b02916 -r 763d13f77e03 thesis/org/first-chapter.org --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/org/first-chapter.org Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,241 @@ +#+title: =CORTEX= +#+author: Robert McIntyre +#+email: rlm@mit.edu +#+description: Using embodied AI to facilitate Artificial Imagination. +#+keywords: AI, clojure, embodiment +#+SETUPFILE: ../../aurellem/org/setup.org +#+INCLUDE: ../../aurellem/org/level-0.org +#+babel: :mkdirp yes :noweb yes :exports both +#+OPTIONS: toc:nil, num:nil + +* Artificial Imagination + Imagine watching a video of someone skateboarding. When you watch + the video, you can imagine yourself skateboarding, and your + knowledge of the human body and its dynamics guides your + interpretation of the scene. For example, even if the skateboarder + is partially occluded, you can infer the positions of his arms and + body from your own knowledge of how your body would be positioned if + you were skateboarding. If the skateboarder suffers an accident, you + wince in sympathy, imagining the pain your own body would experience + if it were in the same situation. This empathy with other people + guides our understanding of whatever they are doing because it is a + powerful constraint on what is probable and possible. In order to + make use of this powerful empathy constraint, I need a system that + can generate and make sense of sensory data from the many different + senses that humans possess. The two key proprieties of such a system + are /embodiment/ and /imagination/. + +** What is imagination? + + One kind of imagination is /sympathetic/ imagination: you imagine + yourself in the position of something/someone you are + observing. This type of imagination comes into play when you follow + along visually when watching someone perform actions, or when you + sympathetically grimace when someone hurts themselves. This type of + imagination uses the constraints you have learned about your own + body to highly constrain the possibilities in whatever you are + seeing. It uses all your senses to including your senses of touch, + proprioception, etc. Humans are flexible when it comes to "putting + themselves in another's shoes," and can sympathetically understand + not only other humans, but entities ranging from animals to cartoon + characters to [[http://www.youtube.com/watch?v=0jz4HcwTQmU][single dots]] on a screen! + +# and can infer intention from the actions of not only other humans, +# but also animals, cartoon characters, and even abstract moving dots +# on a screen! + + Another kind of imagination is /predictive/ imagination: you + construct scenes in your mind that are not entirely related to + whatever you are observing, but instead are predictions of the + future or simply flights of fancy. You use this type of imagination + to plan out multi-step actions, or play out dangerous situations in + your mind so as to avoid messing them up in reality. + + Of course, sympathetic and predictive imagination blend into each + other and are not completely separate concepts. One dimension along + which you can distinguish types of imagination is dependence on raw + sense data. Sympathetic imagination is highly constrained by your + senses, while predictive imagination can be more or less dependent + on your senses depending on how far ahead you imagine. Daydreaming + is an extreme form of predictive imagination that wanders through + different possibilities without concern for whether they are + related to whatever is happening in reality. + + For this thesis, I will mostly focus on sympathetic imagination and + the constraint it provides for understanding sensory data. + +** What problems can imagination solve? + + Consider a video of a cat drinking some water. + + #+caption: A cat drinking some water. Identifying this action is beyond the state of the art for computers. + #+ATTR_LaTeX: width=5cm + [[../images/cat-drinking.jpg]] + + It is currently impossible for any computer program to reliably + label such an video as "drinking". I think humans are able to label + such video as "drinking" because they imagine /themselves/ as the + cat, and imagine putting their face up against a stream of water + and sticking out their tongue. In that imagined world, they can + feel the cool water hitting their tongue, and feel the water + entering their body, and are able to recognize that /feeling/ as + drinking. So, the label of the action is not really in the pixels + of the image, but is found clearly in a simulation inspired by + those pixels. An imaginative system, having been trained on + drinking and non-drinking examples and learning that the most + important component of drinking is the feeling of water sliding + down one's throat, would analyze a video of a cat drinking in the + following manner: + + - Create a physical model of the video by putting a "fuzzy" model + of its own body in place of the cat. Also, create a simulation of + the stream of water. + + - Play out this simulated scene and generate imagined sensory + experience. This will include relevant muscle contractions, a + close up view of the stream from the cat's perspective, and most + importantly, the imagined feeling of water entering the mouth. + + - The action is now easily identified as drinking by the sense of + taste alone. The other senses (such as the tongue moving in and + out) help to give plausibility to the simulated action. Note that + the sense of vision, while critical in creating the simulation, + is not critical for identifying the action from the simulation. + + More generally, I expect imaginative systems to be particularly + good at identifying embodied actions in videos. + +* Cortex + + The previous example involves liquids, the sense of taste, and + imagining oneself as a cat. For this thesis I constrain myself to + simpler, more easily digitizable senses and situations. + + My system, =CORTEX= performs imagination in two different simplified + worlds: /worm world/ and /stick-figure world/. In each of these + worlds, entities capable of imagination recognize actions by + simulating the experience from their own perspective, and then + recognizing the action from a database of examples. + + In order to serve as a framework for experiments in imagination, + =CORTEX= requires simulated bodies, worlds, and senses like vision, + hearing, touch, proprioception, etc. + +** A Video Game Engine takes care of some of the groundwork + + When it comes to simulation environments, the engines used to + create the worlds in video games offer top-notch physics and + graphics support. These engines also have limited support for + creating cameras and rendering 3D sound, which can be repurposed + for vision and hearing respectively. Physics collision detection + can be expanded to create a sense of touch. + + jMonkeyEngine3 is one such engine for creating video games in + Java. It uses OpenGL to render to the screen and uses screengraphs + to avoid drawing things that do not appear on the screen. It has an + active community and several games in the pipeline. The engine was + not built to serve any particular game but is instead meant to be + used for any 3D game. I chose jMonkeyEngine3 it because it had the + most features out of all the open projects I looked at, and because + I could then write my code in Clojure, an implementation of LISP + that runs on the JVM. + +** =CORTEX= Extends jMonkeyEngine3 to implement rich senses + + Using the game-making primitives provided by jMonkeyEngine3, I have + constructed every major human sense except for smell and + taste. =CORTEX= also provides an interface for creating creatures + in Blender, a 3D modeling environment, and then "rigging" the + creatures with senses using 3D annotations in Blender. A creature + can have any number of senses, and there can be any number of + creatures in a simulation. + + The senses available in =CORTEX= are: + + - [[../../cortex/html/vision.html][Vision]] + - [[../../cortex/html/hearing.html][Hearing]] + - [[../../cortex/html/touch.html][Touch]] + - [[../../cortex/html/proprioception.html][Proprioception]] + - [[../../cortex/html/movement.html][Muscle Tension]] + +* A roadmap for =CORTEX= experiments + +** Worm World + + Worms in =CORTEX= are segmented creatures which vary in length and + number of segments, and have the senses of vision, proprioception, + touch, and muscle tension. + +#+attr_html: width=755 +#+caption: This is the tactile-sensor-profile for the upper segment of a worm. It defines regions of high touch sensitivity (where there are many white pixels) and regions of low sensitivity (where white pixels are sparse). +[[../images/finger-UV.png]] + + +#+begin_html +
+
+ +
YouTube +
+

The worm responds to touch.

+
+#+end_html + +#+begin_html +
+
+ +
YouTube +
+

Proprioception in a worm. The proprioceptive readout is + in the upper left corner of the screen.

+
+#+end_html + + A worm is trained in various actions such as sinusoidal movement, + curling, flailing, and spinning by directly playing motor + contractions while the worm "feels" the experience. These actions + are recorded both as vectors of muscle tension, touch, and + proprioceptive data, but also in higher level forms such as + frequencies of the various contractions and a symbolic name for the + action. + + Then, the worm watches a video of another worm performing one of + the actions, and must judge which action was performed. Normally + this would be an extremely difficult problem, but the worm is able + to greatly diminish the search space through sympathetic + imagination. First, it creates an imagined copy of its body which + it observes from a third person point of view. Then for each frame + of the video, it maneuvers its simulated body to be in registration + with the worm depicted in the video. The physical constraints + imposed by the physics simulation greatly decrease the number of + poses that have to be tried, making the search feasible. As the + imaginary worm moves, it generates imaginary muscle tension and + proprioceptive sensations. The worm determines the action not by + vision, but by matching the imagined proprioceptive data with + previous examples. + + By using non-visual sensory data such as touch, the worms can also + answer body related questions such as "did your head touch your + tail?" and "did worm A touch worm B?" + + The proprioceptive information used for action identification is + body-centric, so only the registration step is dependent on point + of view, not the identification step. Registration is not specific + to any particular action. Thus, action identification can be + divided into a point-of-view dependent generic registration step, + and a action-specific step that is body-centered and invariant to + point of view. + +** Stick Figure World + + This environment is similar to Worm World, except the creatures are + more complicated and the actions and questions more varied. It is + an experiment to see how far imagination can go in interpreting + actions. diff -r a86555b02916 -r 763d13f77e03 thesis/org/roadmap.org --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/org/roadmap.org Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,220 @@ +In order for this to be a reasonable thesis that I can be proud of, +what are the /minimum/ number of things I need to get done? + + +* worm OR hand registration + - training from a few examples (2 to start out) + - aligning the body with the scene + - generating sensory data + - matching previous labeled examples using dot-products or some + other basic thing + - showing that it works with different views + +* first draft + - draft of thesis without bibliography or formatting + - should have basic experiment and have full description of + framework with code + - review with Winston + +* final draft + - implement stretch goals from Winston if possible + - complete final formatting and submit + +* CORTEX + DEADLINE: <2014-05-09 Fri> + SHIT THAT'S IN 67 DAYS!!! + +** program simple feature matching code for the worm's segments + +Subgoals: +*** DONE Get cortex working again, run tests, no jmonkeyengine updates + CLOSED: [2014-03-03 Mon 22:07] SCHEDULED: <2014-03-03 Mon> +*** DONE get blender working again + CLOSED: [2014-03-03 Mon 22:43] SCHEDULED: <2014-03-03 Mon> +*** DONE make sparce touch worm segment in blender + CLOSED: [2014-03-03 Mon 23:16] SCHEDULED: <2014-03-03 Mon> + CLOCK: [2014-03-03 Mon 22:44]--[2014-03-03 Mon 23:16] => 0:32 +*** DONE make multi-segment touch worm with touch sensors and display + CLOSED: [2014-03-03 Mon 23:54] SCHEDULED: <2014-03-03 Mon> + +*** DONE Make a worm wiggle and curl + CLOSED: [2014-03-04 Tue 23:03] SCHEDULED: <2014-03-04 Tue> + + +** First draft + +Subgoals: +*** Writeup new worm experiments. +*** Triage implementation code and get it into chapter form. + + + + + +** for today + +- guided worm :: control the worm with the keyboard. Useful for + testing the body-centered recog scripts, and for + preparing a cool demo video. + +- body-centered recognition :: detect actions using hard coded + body-centered scripts. + +- cool demo video of the worm being moved and recognizing things :: + will be a neat part of the thesis. + +- thesis export :: refactoring and organization of code so that it + spits out a thesis in addition to the web page. + +- video alignment :: analyze the frames of a video in order to align + the worm. Requires body-centered recognition. Can "cheat". + +- smoother actions :: use debugging controls to directly influence the + demo actions, and to generate recoginition procedures. + +- degenerate video demonstration :: show the system recognizing a + curled worm from dead on. Crowning achievement of thesis. + +** Ordered from easiest to hardest + +Just report the positions of everything. I don't think that this +necessairly shows anything usefull. + +Worm-segment vision -- you initialize a view of the worm, but instead +of pixels you use labels via ray tracing. Has the advantage of still +allowing for visual occlusion, but reliably identifies the objects, +even without rainbow coloring. You can code this as an image. + +Same as above, except just with worm/non-worm labels. + +Color code each worm segment and then recognize them using blob +detectors. Then you solve for the perspective and the action +simultaneously. + +The entire worm can be colored the same, high contrast color against a +nearly black background. + +"Rooted" vision. You give the exact coordinates of ONE piece of the +worm, but the algorithm figures out the rest. + +More rooted vision -- start off the entire worm with one posistion. + +The right way to do alignment is to use motion over multiple frames to +snap individual pieces of the model into place sharing and +propragating the individual alignments over the whole model. We also +want to limit the alignment search to just those actions we are +prepared to identify. This might mean that I need some small "micro +actions" such as the individual movements of the worm pieces. + +Get just the centers of each segment projected onto the imaging +plane. (best so far). + + +Repertoire of actions + video frames --> + directed multi-frame-search alg + + + + + + +!! Could also have a bounding box around the worm provided by +filtering the worm/non-worm render, and use bbbgs. As a bonus, I get +to include bbbgs in my thesis! Could finally do that recursive things +where I make bounding boxes be those things that give results that +give good bounding boxes. If I did this I could use a disruptive +pattern on the worm. + +Re imagining using default textures is very simple for this system, +but hard for others. + + +Want to demonstrate, at minimum, alignment of some model of the worm +to the video, and a lookup of the action by simulated perception. + +note: the purple/white points is a very beautiful texture, because +when it moves slightly, the white dots look like they're +twinkling. Would look even better if it was a darker purple. Also +would look better more spread out. + + +embed assumption of one frame of view, search by moving around in +simulated world. + +Allowed to limit search by setting limits to a hemisphere around the +imagined worm! This limits scale also. + + + + + +!! Limited search with worm/non-worm rendering. +How much inverse kinematics do we have to do? +What about cached (allowed state-space) paths, derived from labeled +training. You have to lead from one to another. + +What about initial state? Could start the input videos at a specific +state, then just match that explicitly. + +!! The training doesn't have to be labeled -- you can just move around +for a while!! + +!! Limited search with motion based alignment. + + + + +"play arounds" can establish a chain of linked sensoriums. Future +matches must fall into one of the already experienced things, and once +they do, it greatly limits the things that are possible in the future. + + +frame differences help to detect muscle exertion. + +Can try to match on a few "representative" frames. Can also just have +a few "bodies" in various states which we try to match. + + + +Paths through state-space have the exact same signature as +simulation. BUT, these can be searched in parallel and don't interfere +with each other. + + + + +** Final stretch up to First Draft + +*** DONE complete debug control of worm + CLOSED: [2014-03-17 Mon 17:29] SCHEDULED: <2014-03-17 Mon> + CLOCK: [2014-03-17 Mon 14:01]--[2014-03-17 Mon 17:29] => 3:28 +*** DONE add phi-space output to debug control + CLOSED: [2014-03-17 Mon 17:42] SCHEDULED: <2014-03-17 Mon> + CLOCK: [2014-03-17 Mon 17:31]--[2014-03-17 Mon 17:42] => 0:11 + +*** DONE complete automatic touch partitioning + CLOSED: [2014-03-18 Tue 21:43] SCHEDULED: <2014-03-18 Tue> +*** DONE complete cyclic predicate + CLOSED: [2014-03-19 Wed 16:34] SCHEDULED: <2014-03-18 Tue> + CLOCK: [2014-03-19 Wed 13:16]--[2014-03-19 Wed 16:34] => 3:18 +*** DONE complete three phi-stream action predicatates; test them with debug control + CLOSED: [2014-03-19 Wed 16:35] SCHEDULED: <2014-03-17 Mon> + CLOCK: [2014-03-18 Tue 18:36]--[2014-03-18 Tue 21:43] => 3:07 + CLOCK: [2014-03-18 Tue 18:34]--[2014-03-18 Tue 18:36] => 0:02 + CLOCK: [2014-03-17 Mon 19:19]--[2014-03-17 Mon 21:19] => 2:00 +*** DONE build an automatic "do all the things" sequence. + CLOSED: [2014-03-19 Wed 16:55] SCHEDULED: <2014-03-19 Wed> + CLOCK: [2014-03-19 Wed 16:53]--[2014-03-19 Wed 16:55] => 0:02 +*** DONE implement proprioception based movement lookup in phi-space + CLOSED: [2014-03-19 Wed 22:04] SCHEDULED: <2014-03-19 Wed> + CLOCK: [2014-03-19 Wed 19:32]--[2014-03-19 Wed 22:04] => 2:32 +*** DONE make proprioception reference phi-space indexes + CLOSED: [2014-03-19 Wed 22:47] SCHEDULED: <2014-03-19 Wed> + CLOCK: [2014-03-19 Wed 22:07] + + +*** DONE create test videos, also record positions of worm segments + CLOSED: [2014-03-20 Thu 22:02] SCHEDULED: <2014-03-19 Wed> + +*** TODO Collect intro, worm-learn and cortex creation into draft thesis. + diff -r a86555b02916 -r 763d13f77e03 thesis/rlm-cortex-meng.tex --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/rlm-cortex-meng.tex Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,138 @@ +% -*- Mode:TeX -*- + +%% IMPORTANT: The official thesis specifications are available at: +%% http://libraries.mit.edu/archives/thesis-specs/ +%% +%% Please verify your thesis' formatting and copyright +%% assignment before submission. If you notice any +%% discrepancies between these templates and the +%% MIT Libraries' specs, please let us know +%% by e-mailing thesis@mit.edu + +%% The documentclass options along with the pagestyle can be used to generate +%% a technical report, a draft copy, or a regular thesis. You may need to +%% re-specify the pagestyle after you \include cover.tex. For more +%% information, see the first few lines of mitthesis.cls. + +%\documentclass[12pt,vi,twoside]{mitthesis} +%% +%% If you want your thesis copyright to you instead of MIT, use the +%% ``vi'' option, as above. +%% +%\documentclass[12pt,twoside,leftblank]{mitthesis} +%% +%% If you want blank pages before new chapters to be labelled ``This +%% Page Intentionally Left Blank'', use the ``leftblank'' option, as +%% above. + +\documentclass[12pt,twoside,singlespace,vi]{mitthesis} +%\documentclass[12pt,twoside,vi]{mitthesis} +\usepackage[utf8]{inputenc} +\usepackage[T1]{fontenc} +\usepackage{fixltx2e} +\usepackage{graphicx} +\usepackage{longtable} +\usepackage{float} +\usepackage{wrapfig} +\usepackage{rotating} +\usepackage[normalem]{ulem} +\usepackage{amsmath} +\usepackage{textcomp} +\usepackage{marvosym} +\usepackage{wasysym} +\usepackage{amssymb} +\usepackage{hyperref} +\usepackage{libertine} +\usepackage{inconsolata} +\usepackage{rotating} +\usepackage{caption} + +%\usepackage{afterpage} + +%\afterpage{\clearpage} % + +\usepackage[backend=bibtex,style=alphabetic]{biblatex} +\addbibresource{cortex.bib} + +\usepackage{xcolor} +\definecolor{dark-red}{rgb}{0.4,0.15,0.15} +\definecolor{dark-blue}{rgb}{0.15,0.4,0.15} +\definecolor{medium-blue}{rgb}{0,0,0.5} +\hypersetup{ + colorlinks, linkcolor={dark-red}, + citecolor={dark-blue}, urlcolor={medium-blue} +} + +\newenvironment{code}{\captionsetup{type=listing}}{} + +\renewcommand{\thesection}{\arabic{section}} +\renewcommand{\thefigure}{\arabic{figure}} + +%%%%% better source code display +\usepackage{minted} + +%% dyl fonts + +% \usemintedstyle{friendly} +% \usemintedstyle{perldoc} +%\definecolor{bg}{rgb}{0.95,0.95,0.95} +\definecolor{bg}{rgb}{0.625,0,0} +\usemintedstyle{default} +\newcommand{\why}[1]{\\ \par{\footnotesize #1}} +%\setmonofont[Scale=0.9,BoldFont={Inconsolata Bold}]{Inconsolata} + +%\usepackage[gray]{xcolor} +\newminted{clojure}{fontsize=\footnotesize} +%\newminted{clojure}{fontsize=\footnotesize,bgcolor=bg} +%\newminted{clojure}{fontsize=\scriptsize} + +%\usepackage{lgrind} +\pagestyle{plain} + + +%% % Alter some LaTeX defaults for better treatment of figures: +%% % See p.105 of "TeX Unbound" for suggested values. +%% % See pp. 199-200 of Lamport's "LaTeX" book for details. +%% % General parameters, for ALL pages: +%% \renewcommand{\topfraction}{0.9} % max fraction of floats at top +%% \renewcommand{\bottomfraction}{0.8} % max fraction of floats at bottom +%% % Parameters for TEXT pages (not float pages): +%% \setcounter{topnumber}{2} +%% \setcounter{bottomnumber}{2} +%% \setcounter{totalnumber}{4} % 2 may work better +%% \setcounter{dbltopnumber}{2} % for 2-column pages +%% \renewcommand{\dbltopfraction}{0.9} % fit big float above 2-col. text +%% \renewcommand{\textfraction}{0.07} % allow minimal text w. figs +%% % Parameters for FLOAT pages (not text pages): +%% \renewcommand{\floatpagefraction}{0.7} % require fuller float pages +%% % N.B.: floatpagefraction MUST be less than topfraction !! +%% \renewcommand{\dblfloatpagefraction}{0.7} % require fuller float pages +%% % remember to use [htp] or [htpb] for placement + + +\begin{document} + +\include{cover} +% Some departments (e.g. 5) require an additional signature page. See +% signature.tex for more information and uncomment the following line if +% applicable. +% \include{signature} +\pagestyle{plain} +\tableofcontents +%\newpage +%\listoffigures +%\newpage +%\listoftables +\include{cortex} +\nocite{*} +%\include{chap2} +\appendix +\begin{singlespace} +%\bibliographystyle{agsm} +%\bibliographystyle{apa} +%\bibliographystyle{plainnat} +\include{user-guide} +\printbibliography +\end{singlespace} +\end{document} + diff -r a86555b02916 -r 763d13f77e03 thesis/to-frames.pl --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/to-frames.pl Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,15 @@ +#!/bin/perl + +$movie_file = shift(@ARGV); + +# get file name without extension +$movie_file =~ m/^([^.]+)\.[^.]+$/; +$movie_name = $1; + +@mkdir_command = ("mkdir", "-vp", $movie_name); +@ffmpeg_command = ("ffmpeg", "-i", $movie_file, $movie_name."/%07d.png"); + +print "@mkdir_command\n"; +system(@mkdir_command); +print "@ffmpeg_command\n"; +system(@ffmpeg_command); \ No newline at end of file diff -r a86555b02916 -r 763d13f77e03 thesis/user-guide.org --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/user-guide.org Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,6 @@ +* Appendix: =CORTEX= User Guide + + For future students who whould like to use =CORTEX= in their own + projects. + + diff -r a86555b02916 -r 763d13f77e03 thesis/weave-thesis.sh --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/thesis/weave-thesis.sh Thu Mar 27 17:57:01 2014 -0400 @@ -0,0 +1,20 @@ +#!/bin/sh + +emacs \ +-l /home/r/config/emacs/clojure-init.el \ +-l /home/r/config/emacs/org-init.el \ +-l /home/r/config/emacs/thesis-color.el \ +--batch \ +--eval " +(progn + (find-file \"cortex.org\") + (org-latex-export-to-latex nil nil nil t nil) \ + (find-file \"user-guide.org\") + (org-latex-export-to-latex nil nil nil t nil) \ + (find-file \"abstract.org\") + (org-latex-export-to-latex nil nil nil t nil))" \ +\ +2>&1 + +rm -f cortex.tex~ +rm -f abstract.tex~