changeset 353:7239aee7267f

merge.
author Robert McIntyre <rlm@mit.edu>
date Tue, 05 Mar 2013 18:55:21 +0000
parents d9128eb5f42e (diff) eb7c94a03188 (current diff)
children 8d08646eaf99
files
diffstat 20 files changed, 579 insertions(+), 219 deletions(-) [+]
line wrap: on
line diff
     1.1 --- a/.hgignore	Tue Feb 26 16:31:29 2013 +0000
     1.2 +++ b/.hgignore	Tue Mar 05 18:55:21 2013 +0000
     1.3 @@ -10,6 +10,9 @@
     1.4  libbulletjme64.so
     1.5  liblwjgl64.so
     1.6  libopenal64.so
     1.7 +liblwjgl.so
     1.8 +libopenal.so
     1.9 +libbulletjme.so
    1.10  
    1.11  
    1.12  syntax: regexp
     2.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
     2.2 +++ b/MIT-media-projects.org	Tue Mar 05 18:55:21 2013 +0000
     2.3 @@ -0,0 +1,24 @@
     2.4 +*Machine Learning and Pattern Recognition with Multiple
     2.5 +Modalities Hyungil Ahn and Rosalind W. Picard
     2.6 +
     2.7 +This project develops new theory and algorithms to enable
     2.8 +computers to make rapid and accurate inferences from
     2.9 +multiple modes of data, such as determining a person's
    2.10 +affective state from multiple sensors--video, mouse behavior,
    2.11 +chair pressure patterns, typed selections, or
    2.12 +physiology. Recent efforts focus on understanding the level
    2.13 +of a person's attention, useful for things such as
    2.14 +determining when to interrupt. Our approach is Bayesian:
    2.15 +formulating probabilistic models on the basis of domain
    2.16 +knowledge and training data, and then performing inference
    2.17 +according to the rules of probability theory. This type of
    2.18 +sensor fusion work is especially challenging due to problems
    2.19 +of sensor channel drop-out, different kinds of noise in
    2.20 +different channels, dependence between channels, scarce and
    2.21 +sometimes inaccurate labels, and patterns to detect that are
    2.22 +inherently time-varying. We have constructed a variety of
    2.23 +new algorithms for solving these problems and demonstrated
    2.24 +their performance gains over other state-of-the-art methods.
    2.25 +
    2.26 +http://affect.media.mit.edu/projectpages/multimodal/
    2.27 +
     3.1 Binary file assets/Models/joint/basic-muscle.png has changed
     4.1 Binary file assets/Models/joint/joint.blend has changed
     5.1 Binary file assets/Models/joint/retina.png has changed
     6.1 Binary file assets/Models/joint/segment-layout.png has changed
     7.1 Binary file assets/Models/joint/segment-layout.xcf has changed
     8.1 --- a/org/body.org	Tue Feb 26 16:31:29 2013 +0000
     8.2 +++ b/org/body.org	Tue Mar 05 18:55:21 2013 +0000
     8.3 @@ -579,11 +579,14 @@
     8.4  (ns cortex.test.body
     8.5    (:use (cortex world util body))
     8.6    (:import
     8.7 -   (com.aurellem.capture Capture RatchetTimer)
     8.8 +   (com.aurellem.capture Capture RatchetTimer IsoTimer)
     8.9     (com.jme3.math Quaternion Vector3f ColorRGBA)
    8.10     java.io.File))
    8.11  #+end_src
    8.12  
    8.13 +#+results: test-header
    8.14 +: java.io.File
    8.15 +
    8.16  * Source 
    8.17  - [[../src/cortex/body.clj][cortex.body]]
    8.18  - [[../src/cortex/test/body.clj][cortex.test.body]]
     9.1 --- a/org/hearing.org	Tue Feb 26 16:31:29 2013 +0000
     9.2 +++ b/org/hearing.org	Tue Mar 05 18:55:21 2013 +0000
     9.3 @@ -5,7 +5,7 @@
     9.4  #+keywords: simulated hearing, openal, clojure, jMonkeyEngine3, LWJGL, AI
     9.5  #+SETUPFILE: ../../aurellem/org/setup.org
     9.6  #+INCLUDE: ../../aurellem/org/level-0.org
     9.7 -#+BABEL: :exports both :noweb yes :cache no :mkdirp yes
     9.8 +
     9.9  
    9.10  * Hearing
    9.11  
    9.12 @@ -976,7 +976,7 @@
    9.13  </div>
    9.14  #+end_html
    9.15  
    9.16 -#+include "../../jmeCapture/src/com/aurellem/capture/examples/Advanced.java" src java  
    9.17 +#+include: "../../jmeCapture/src/com/aurellem/capture/examples/Advanced.java" src java  
    9.18  
    9.19  Here is a small clojure program to drive the java program and make it
    9.20  available as part of my test suite.
    9.21 @@ -1027,8 +1027,8 @@
    9.22     hearing. When you play a sound, the bar should display a waveform.
    9.23  
    9.24     Keys:
    9.25 -   <enter> : play sound"
    9.26 -
    9.27 +   <enter> : play sound
    9.28 +       l   : play hymn"
    9.29    ([] (test-worm-hearing false))
    9.30    ([record?] 
    9.31       (let [the-worm (doto (worm) (body!))
    9.32 @@ -1051,14 +1051,17 @@
    9.33                    (if value (.play hymn)))})
    9.34          (fn [world]
    9.35            (light-up-everything world)
    9.36 +          (let [timer (IsoTimer. 60)]
    9.37 +            (.setTimer world timer)
    9.38 +            (display-dilated-time world timer))
    9.39            (if record?
    9.40              (do 
    9.41                (com.aurellem.capture.Capture/captureVideo
    9.42                 world
    9.43 -               (File."/home/r/proj/cortex/render/worm-audio/frames"))
    9.44 +               (File. "/home/r/proj/cortex/render/worm-audio/frames"))
    9.45                (com.aurellem.capture.Capture/captureAudio
    9.46                 world
    9.47 -               (File."/home/r/proj/cortex/render/worm-audio/audio.wav")))))
    9.48 +               (File. "/home/r/proj/cortex/render/worm-audio/audio.wav")))))
    9.49          
    9.50          (fn [world tpf]
    9.51            (hearing-display
    9.52 @@ -1067,6 +1070,9 @@
    9.53               (File. "/home/r/proj/cortex/render/worm-audio/hearing-data"))))))))
    9.54  #+end_src
    9.55  
    9.56 +#+results: test-hearing-2
    9.57 +: #'cortex.test.hearing/test-worm-hearing
    9.58 +
    9.59  In this test, I load the worm with its newly formed ear and let it
    9.60  hear sounds. The sound the worm is hearing is localized to the origin
    9.61  of the world, and you can see that as the worm moves farther away from
    9.62 @@ -1170,9 +1176,13 @@
    9.63    (:import java.io.File)
    9.64    (:import com.jme3.scene.Node
    9.65  	   com.jme3.system.AppSettings
    9.66 -           com.jme3.math.Vector3f))
    9.67 +           com.jme3.math.Vector3f)
    9.68 +  (:import (com.aurellem.capture Capture IsoTimer RatchetTimer)))
    9.69  #+end_src
    9.70  
    9.71 +#+results: test-header
    9.72 +: com.aurellem.capture.RatchetTimer
    9.73 +
    9.74  * Source Listing
    9.75    - [[../src/cortex/hearing.clj][cortex.hearing]]
    9.76    - [[../src/cortex/test/hearing.clj][cortex.test.hearing]]
    10.1 --- a/org/ideas.org	Tue Feb 26 16:31:29 2013 +0000
    10.2 +++ b/org/ideas.org	Tue Mar 05 18:55:21 2013 +0000
    10.3 @@ -1,3 +1,5 @@
    10.4 +
    10.5 +
    10.6  * Brainstorming different sensors and effectors.
    10.7  
    10.8  Every sense that we have should have an effector that changes what
    11.1 --- a/org/intro.org	Tue Feb 26 16:31:29 2013 +0000
    11.2 +++ b/org/intro.org	Tue Mar 05 18:55:21 2013 +0000
    11.3 @@ -8,170 +8,193 @@
    11.4  #+babel: :mkdirp yes :noweb yes
    11.5  
    11.6  * Background
    11.7 -Artificial Intelligence has tried and failed for more than half a
    11.8 -century to produce programs as flexible, creative, and "intelligent"
    11.9 -as the human mind itself. Clearly, we are still missing some important
   11.10 -ideas concerning intelligent programs or we would have strong AI
   11.11 -already. What idea could be missing?
   11.12 +
   11.13 +Artificial Intelligence has tried and failed for more than
   11.14 +half a century to produce programs as flexible, creative,
   11.15 +and "intelligent" as the human mind itself. Clearly, we are
   11.16 +still missing some important ideas concerning intelligent
   11.17 +programs or we would have strong AI already. What idea could
   11.18 +be missing?
   11.19  
   11.20  When Turing first proposed his famous "Turing Test" in the
   11.21 -groundbreaking paper [[../sources/turing.pdf][/Computing Machines and Intelligence/]], he gave
   11.22 -little importance to how a computer program might interact with the
   11.23 -world:
   11.24 +groundbreaking paper [[../sources/turing.pdf][/Computing Machines and Intelligence/]],
   11.25 +he gave little importance to how a computer program might
   11.26 +interact with the world:
   11.27  
   11.28  #+BEGIN_QUOTE
   11.29 -\ldquo{}We need not be too concerned about the legs, eyes, etc. The example of
   11.30 -Miss Helen Keller shows that education can take place provided that
   11.31 -communication in both directions between teacher and pupil can take
   11.32 -place by some means or other.\rdquo{}
   11.33 +\ldquo{}We need not be too concerned about the legs, eyes,
   11.34 +etc. The example of Miss Helen Keller shows that education
   11.35 +can take place provided that communication in both
   11.36 +directions between teacher and pupil can take place by some
   11.37 +means or other.\rdquo{}
   11.38  #+END_QUOTE
   11.39  
   11.40 -And from the example of Hellen Keller he went on to assume that the
   11.41 -only thing a fledgling AI program could need by way of communication
   11.42 -is a teletypewriter. But Hellen Keller did possess vision and hearing
   11.43 -for the first few months of her life, and her tactile sense was far
   11.44 -more rich than any text-stream could hope to achieve. She possessed a
   11.45 -body she could move freely, and had continual access to the real world
   11.46 -to learn from her actions.
   11.47 +And from the example of Hellen Keller he went on to assume
   11.48 +that the only thing a fledgling AI program could need by way
   11.49 +of communication is a teletypewriter. But Hellen Keller did
   11.50 +possess vision and hearing for the first few months of her
   11.51 +life, and her tactile sense was far more rich than any
   11.52 +text-stream could hope to achieve. She possessed a body she
   11.53 +could move freely, and had continual access to the real
   11.54 +world to learn from her actions.
   11.55  
   11.56 -I believe that our programs are suffering from too little sensory
   11.57 -input to become really intelligent. Imagine for a moment that you
   11.58 -lived in a world completely cut off form all sensory stimulation. You
   11.59 -have no eyes to see, no ears to hear, no mouth to speak. No body, no
   11.60 -taste, no feeling whatsoever. The only sense you get at all is a
   11.61 -single point of light, flickering on and off in the void. If this was
   11.62 -your life from birth, you would never learn anything, and could never
   11.63 -become intelligent. Actual humans placed in sensory deprivation
   11.64 -chambers experience hallucinations and can begin to loose their sense
   11.65 -of reality. Most of the time, the programs we write are in exactly
   11.66 -this situation. They do not interface with cameras and microphones,
   11.67 -and they do not control a real or simulated body or interact with any
   11.68 -sort of world.
   11.69 +I believe that our programs are suffering from too little
   11.70 +sensory input to become really intelligent. Imagine for a
   11.71 +moment that you lived in a world completely cut off form all
   11.72 +sensory stimulation. You have no eyes to see, no ears to
   11.73 +hear, no mouth to speak. No body, no taste, no feeling
   11.74 +whatsoever. The only sense you get at all is a single point
   11.75 +of light, flickering on and off in the void. If this was
   11.76 +your life from birth, you would never learn anything, and
   11.77 +could never become intelligent. Actual humans placed in
   11.78 +sensory deprivation chambers experience hallucinations and
   11.79 +can begin to loose their sense of reality. Most of the time,
   11.80 +the programs we write are in exactly this situation. They do
   11.81 +not interface with cameras and microphones, and they do not
   11.82 +control a real or simulated body or interact with any sort
   11.83 +of world.
   11.84  
   11.85  * Simulation vs. Reality
   11.86 +
   11.87  I want demonstrate that multiple senses are what enable
   11.88 -intelligence. There are two ways of playing around with senses and
   11.89 -computer programs:
   11.90 -
   11.91 +intelligence. There are two ways of playing around with
   11.92 +senses and computer programs:
   11.93  
   11.94  ** Simulation
   11.95 -The first is to go entirely with simulation: virtual world, virtual
   11.96 -character, virtual senses. The advantages are that when everything is
   11.97 -a simulation, experiments in that simulation are absolutely
   11.98 -reproducible. It's also easier to change the character and world to
   11.99 -explore new situations and different sensory combinations.
  11.100  
  11.101 -If the world is to be simulated on a computer, then not only do you
  11.102 -have to worry about whether the character's senses are rich enough to
  11.103 -learn from the world, but whether the world itself is rendered with
  11.104 -enough detail and realism to give enough working material to the
  11.105 -character's senses. To name just a few difficulties facing modern
  11.106 -physics simulators: destructibility of the environment, simulation of
  11.107 -water/other fluids, large areas, nonrigid bodies, lots of objects,
  11.108 -smoke. I don't know of any computer simulation that would allow a
  11.109 -character to take a rock and grind it into fine dust, then use that
  11.110 -dust to make a clay sculpture, at least not without spending years
  11.111 -calculating the interactions of every single small grain of
  11.112 -dust. Maybe a simulated world with today's limitations doesn't provide
  11.113 +The first is to go entirely with simulation: virtual world,
  11.114 +virtual character, virtual senses. The advantages are that
  11.115 +when everything is a simulation, experiments in that
  11.116 +simulation are absolutely reproducible. It's also easier to
  11.117 +change the character and world to explore new situations and
  11.118 +different sensory combinations.
  11.119 +
  11.120 +If the world is to be simulated on a computer, then not only
  11.121 +do you have to worry about whether the character's senses
  11.122 +are rich enough to learn from the world, but whether the
  11.123 +world itself is rendered with enough detail and realism to
  11.124 +give enough working material to the character's senses. To
  11.125 +name just a few difficulties facing modern physics
  11.126 +simulators: destructibility of the environment, simulation
  11.127 +of water/other fluids, large areas, nonrigid bodies, lots of
  11.128 +objects, smoke. I don't know of any computer simulation that
  11.129 +would allow a character to take a rock and grind it into
  11.130 +fine dust, then use that dust to make a clay sculpture, at
  11.131 +least not without spending years calculating the
  11.132 +interactions of every single small grain of dust. Maybe a
  11.133 +simulated world with today's limitations doesn't provide
  11.134  enough richness for real intelligence to evolve.
  11.135  
  11.136  ** Reality
  11.137  
  11.138 -The other approach for playing with senses is to hook your software up
  11.139 -to real cameras, microphones, robots, etc., and let it loose in the
  11.140 -real world. This has the advantage of eliminating concerns about
  11.141 -simulating the world at the expense of increasing the complexity of
  11.142 -implementing the senses. Instead of just grabbing the current rendered
  11.143 -frame for processing, you have to use an actual camera with real
  11.144 -lenses and interact with photons to get an image. It is much harder to
  11.145 -change the character, which is now partly a physical robot of some
  11.146 -sort, since doing so involves changing things around in the real world
  11.147 -instead of modifying lines of code. While the real world is very rich
  11.148 -and definitely provides enough stimulation for intelligence to develop
  11.149 -as evidenced by our own existence, it is also uncontrollable in the
  11.150 -sense that a particular situation cannot be recreated perfectly or
  11.151 -saved for later use. It is harder to conduct science because it is
  11.152 -harder to repeat an experiment. The worst thing about using the real
  11.153 -world instead of a simulation is the matter of time. Instead of
  11.154 -simulated time you get the constant and unstoppable flow of real
  11.155 -time. This severely limits the sorts of software you can use to
  11.156 -program the AI because all sense inputs must be handled in real
  11.157 -time. Complicated ideas may have to be implemented in hardware or may
  11.158 -simply be impossible given the current speed of our
  11.159 -processors. Contrast this with a simulation, in which the flow of time
  11.160 -in the simulated world can be slowed down to accommodate the
  11.161 -limitations of the character's programming. In terms of cost, doing
  11.162 -everything in software is far cheaper than building custom real-time
  11.163 +The other approach for playing with senses is to hook your
  11.164 +software up to real cameras, microphones, robots, etc., and
  11.165 +let it loose in the real world. This has the advantage of
  11.166 +eliminating concerns about simulating the world at the
  11.167 +expense of increasing the complexity of implementing the
  11.168 +senses. Instead of just grabbing the current rendered frame
  11.169 +for processing, you have to use an actual camera with real
  11.170 +lenses and interact with photons to get an image. It is much
  11.171 +harder to change the character, which is now partly a
  11.172 +physical robot of some sort, since doing so involves
  11.173 +changing things around in the real world instead of
  11.174 +modifying lines of code. While the real world is very rich
  11.175 +and definitely provides enough stimulation for intelligence
  11.176 +to develop as evidenced by our own existence, it is also
  11.177 +uncontrollable in the sense that a particular situation
  11.178 +cannot be recreated perfectly or saved for later use. It is
  11.179 +harder to conduct science because it is harder to repeat an
  11.180 +experiment. The worst thing about using the real world
  11.181 +instead of a simulation is the matter of time. Instead of
  11.182 +simulated time you get the constant and unstoppable flow of
  11.183 +real time. This severely limits the sorts of software you
  11.184 +can use to program the AI because all sense inputs must be
  11.185 +handled in real time. Complicated ideas may have to be
  11.186 +implemented in hardware or may simply be impossible given
  11.187 +the current speed of our processors. Contrast this with a
  11.188 +simulation, in which the flow of time in the simulated world
  11.189 +can be slowed down to accommodate the limitations of the
  11.190 +character's programming. In terms of cost, doing everything
  11.191 +in software is far cheaper than building custom real-time
  11.192  hardware. All you need is a laptop and some patience.
  11.193  
  11.194  * Choose a Simulation Engine
  11.195  
  11.196 -Mainly because of issues with controlling the flow of time, I chose to
  11.197 -simulate both the world and the character. I set out to make a world
  11.198 -in which I could embed a character with multiple senses. My main goal
  11.199 -is to make an environment where I can perform further experiments in
  11.200 -simulated senses.
  11.201 +Mainly because of issues with controlling the flow of time,
  11.202 +I chose to simulate both the world and the character. I set
  11.203 +out to make a world in which I could embed a character with
  11.204 +multiple senses. My main goal is to make an environment
  11.205 +where I can perform further experiments in simulated senses.
  11.206  
  11.207 -I examined many different 3D environments to try and find something I
  11.208 -would use as the base for my simulation; eventually the choice came
  11.209 -down to three engines: the Quake II engine, the Source Engine, and
  11.210 -jMonkeyEngine.
  11.211 +I examined many different 3D environments to try and find
  11.212 +something I would use as the base for my simulation;
  11.213 +eventually the choice came down to three engines: the Quake
  11.214 +II engine, the Source Engine, and jMonkeyEngine.
  11.215  
  11.216  ** [[http://www.idsoftware.com][Quake II]]/[[http://www.bytonic.de/html/jake2.html][Jake2]]
  11.217  
  11.218 -I spent a bit more than a month working with the Quake II Engine from
  11.219 -ID software to see if I could use it for my purposes. All the source
  11.220 -code was released by ID software into the Public Domain several years
  11.221 -ago, and as a result it has been ported and modified for many
  11.222 -different reasons. This engine was famous for its advanced use of
  11.223 +I spent a bit more than a month working with the Quake II
  11.224 +Engine from ID software to see if I could use it for my
  11.225 +purposes. All the source code was released by ID software
  11.226 +into the Public Domain several years ago, and as a result it
  11.227 +has been ported and modified for many different
  11.228 +reasons. This engine was famous for its advanced use of
  11.229  realistic shading and had decent and fast physics
  11.230 -simulation. Researchers at Princeton [[http://papers.cnl.salk.edu/PDFs/Intracelllular%20Dynamics%20of%20Virtual%20Place%20Cells%202011-4178.pdf][used this code]] ([[http://brainwindows.wordpress.com/2009/10/14/playing-quake-with-a-real-mouse/][video]]) to study
  11.231 -spatial information encoding in the hippocampal cells of rats. Those
  11.232 -researchers created a special Quake II level that simulated a maze,
  11.233 -and added an interface where a mouse could run on top of a ball in
  11.234 -various directions to move the character in the simulated maze. They
  11.235 -measured hippocampal activity during this exercise to try and tease
  11.236 -out the method in which spatial data was stored in that area of the
  11.237 -brain. I find this promising because if a real living rat can interact
  11.238 -with a computer simulation of a maze in the same way as it interacts
  11.239 -with a real-world maze, then maybe that simulation is close enough to
  11.240 -reality that a simulated sense of vision and motor control interacting
  11.241 -with that simulation could reveal useful information about the real
  11.242 -thing. There is a Java port of the original C source code called
  11.243 -Jake2. The port demonstrates Java's OpenGL bindings and runs anywhere
  11.244 -from 90% to 105% as fast as the C version. After reviewing much of the
  11.245 -source of Jake2, I eventually rejected it because the engine is too
  11.246 -tied to the concept of a first-person shooter game. One of the
  11.247 -problems I had was that there do not seem to be any easy way to attach
  11.248 -multiple cameras to a single character. There are also several physics
  11.249 -clipping issues that are corrected in a way that only applies to the
  11.250 -main character and does not apply to arbitrary objects. While there is
  11.251 -a large community of level modders, I couldn't find a community to
  11.252 -support using the engine to make new things.
  11.253 +simulation. Researchers at Princeton [[http://papers.cnl.salk.edu/PDFs/Intracelllular%20Dynamics%20of%20Virtual%20Place%20Cells%202011-4178.pdf][used this code]] ([[http://brainwindows.wordpress.com/2009/10/14/playing-quake-with-a-real-mouse/][video]])
  11.254 +to study spatial information encoding in the hippocampal
  11.255 +cells of rats. Those researchers created a special Quake II
  11.256 +level that simulated a maze, and added an interface where a
  11.257 +mouse could run on top of a ball in various directions to
  11.258 +move the character in the simulated maze. They measured
  11.259 +hippocampal activity during this exercise to try and tease
  11.260 +out the method in which spatial data was stored in that area
  11.261 +of the brain. I find this promising because if a real living
  11.262 +rat can interact with a computer simulation of a maze in the
  11.263 +same way as it interacts with a real-world maze, then maybe
  11.264 +that simulation is close enough to reality that a simulated
  11.265 +sense of vision and motor control interacting with that
  11.266 +simulation could reveal useful information about the real
  11.267 +thing. There is a Java port of the original C source code
  11.268 +called Jake2. The port demonstrates Java's OpenGL bindings
  11.269 +and runs anywhere from 90% to 105% as fast as the C
  11.270 +version. After reviewing much of the source of Jake2, I
  11.271 +rejected it because the engine is too tied to the concept of
  11.272 +a first-person shooter game. One of the problems I had was
  11.273 +that there does not seem to be any easy way to attach
  11.274 +multiple cameras to a single character. There are also
  11.275 +several physics clipping issues that are corrected in a way
  11.276 +that only applies to the main character and do not apply to
  11.277 +arbitrary objects. While there is a large community of level
  11.278 +modders, I couldn't find a community to support using the
  11.279 +engine to make new things.
  11.280  
  11.281  ** [[http://source.valvesoftware.com/][Source Engine]]
  11.282  
  11.283 -The Source Engine evolved from the Quake II and Quake I engines and is
  11.284 -used by Valve in the Half-Life series of games. The physics simulation
  11.285 -in the Source Engine is quite accurate and probably the best out of
  11.286 -all the engines I investigated. There is also an extensive community
  11.287 -actively working with the engine. However, applications that use the
  11.288 -Source Engine must be written in C++, the code is not open, it only
  11.289 -runs on Windows, and the tools that come with the SDK to handle models
  11.290 -and textures are complicated and awkward to use.
  11.291 +The Source Engine evolved from the Quake II and Quake I
  11.292 +engines and is used by Valve in the Half-Life series of
  11.293 +games. The physics simulation in the Source Engine is quite
  11.294 +accurate and probably the best out of all the engines I
  11.295 +investigated. There is also an extensive community actively
  11.296 +working with the engine. However, applications that use the
  11.297 +Source Engine must be written in C++, the code is not open,
  11.298 +it only runs on Windows, and the tools that come with the
  11.299 +SDK to handle models and textures are complicated and
  11.300 +awkward to use.
  11.301  
  11.302  ** [[http://jmonkeyengine.com/][jMonkeyEngine3]]
  11.303  
  11.304 -jMonkeyEngine is a new library for creating games in Java. It uses
  11.305 -OpenGL to render to the screen and uses screengraphs to avoid drawing
  11.306 -things that do not appear on the screen. It has an active community
  11.307 -and several games in the pipeline. The engine was not built to serve
  11.308 -any particular game but is instead meant to be used for any 3D
  11.309 -game. After experimenting with each of these three engines and a few
  11.310 -others for about 2 months I settled on jMonkeyEngine. I chose it
  11.311 -because it had the most features out of all the open projects I looked
  11.312 -at, and because I could then write my code in Clojure, an
  11.313 -implementation of LISP that runs on the JVM.
  11.314 +jMonkeyEngine is a new library for creating games in
  11.315 +Java. It uses OpenGL to render to the screen and uses
  11.316 +screengraphs to avoid drawing things that do not appear on
  11.317 +the screen. It has an active community and several games in
  11.318 +the pipeline. The engine was not built to serve any
  11.319 +particular game but is instead meant to be used for any 3D
  11.320 +game. After experimenting with each of these three engines
  11.321 +and a few others for about 2 months I settled on
  11.322 +jMonkeyEngine. I chose it because it had the most features
  11.323 +out of all the open projects I looked at, and because I
  11.324 +could then write my code in Clojure, an implementation of
  11.325 +LISP that runs on the JVM.
  11.326  
  11.327  
  11.328  
    12.1 --- /dev/null	Thu Jan 01 00:00:00 1970 +0000
    12.2 +++ b/org/joint.org	Tue Mar 05 18:55:21 2013 +0000
    12.3 @@ -0,0 +1,124 @@
    12.4 +* Summary of Senses
    12.5 +
    12.6 +vision -- list of functions which must each be called with
    12.7 +the world as their argument, each of which returns [topology data]. Each
    12.8 +element of data is a number between 0 and 255 representing the
    12.9 +intensity of the light recieved at that sensor. Each element of
   12.10 +topology is a pair of numbers [x, y] such that numbers whose pairs
   12.11 +have a short euclidean distance are generally physically close on the
   12.12 +actual sensor.
   12.13 +
   12.14 +proprioception -- list of nullary functions, one for each joint, which
   12.15 +return [heding pitch roll].
   12.16 +
   12.17 +movement -- list of functions, one for each muscle, which must be
   12.18 +called with an integer between 0 and the total number of muscle fibers
   12.19 +in the muscle. Each function returns a float which is (current-force/
   12.20 +total-possible-force).
   12.21 +
   12.22 +touch -- list of functions which must each be called with a Node
   12.23 +(normally the root node of the simulation) the argument, each of which
   12.24 +returns [topology data]. Each element of data is [length limit] where
   12.25 +limit is the length of that particular "hair" and length is the amount
   12.26 +of the hair that has been activated so far. (= limit length) means that
   12.27 +nothing is touching the hair.
   12.28 +
   12.29 +
   12.30 +* A Flower
   12.31 +
   12.32 +A flower is a basic creature that tries to maximize the amount of
   12.33 +light that it sees. It can have one or more eyes, with one eye being
   12.34 +"special" in that it is this eye which must recieve maximum light. It
   12.35 +can have multiple articulated joints and mulcles.
   12.36 +
   12.37 +Want an algorithm that uses the sense data of =vision=
   12.38 +=proprioception=, and =movement= to maximum benefit in order to look
   12.39 +at the light source.
   12.40 +
   12.41 +The light source will move from place to place and the flower will
   12.42 +have to follow it. 
   12.43 +
   12.44 +The algorithm should be generalize to any number of eyes and muscles,
   12.45 +and should become /more/ preformant the more sensory data is
   12.46 +available.
   12.47 +
   12.48 +I will punt on working out an elegant model of motivation for the
   12.49 +flower which makes it want to go to the light.
   12.50 +
   12.51 +Maybe I need a motivationless entity first, which just learns how its
   12.52 +own body works? But then, wouldn't that just be a motivation itself?
   12.53 +
   12.54 +
   12.55 +
   12.56 +
   12.57 +
   12.58 +#+name: load-creature
   12.59 +#+begin_src clojure
   12.60 +(in-ns 'cortex.joint)
   12.61 +
   12.62 +(def joint "Models/joint/joint.blend")
   12.63 +
   12.64 +(defn joint-creature []
   12.65 +  (load-blender-model joint))
   12.66 +
   12.67 +(defn test-joint-creature []
   12.68 +  (let [me (sphere 0.5 :color ColorRGBA/Blue :physical? false)
   12.69 +        creature (doto (joint-creature) (body!))
   12.70 +
   12.71 +      ;;;;;;;;;;;;  Sensors/Effectors  ;;;;;;;;;;;;;;;;;;;;;;;;;;;;
   12.72 +        touch (touch! creature)
   12.73 +        touch-display (view-touch)
   12.74 +
   12.75 +        vision (vision! creature)
   12.76 +        vision-display (view-vision)
   12.77 +
   12.78 +        ;;hearing (hearing! creature)
   12.79 +        ;;hearing-display (view-hearing)
   12.80 +
   12.81 +        prop (proprioception! creature)
   12.82 +        prop-display (view-proprioception)
   12.83 +
   12.84 +        muscles (movement! creature)
   12.85 +        muscle-display (view-movement)
   12.86 +      ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
   12.87 +
   12.88 +        fix-display (gen-fix-display)
   12.89 +
   12.90 +        floor (box 10 2 10 :position (Vector3f. 0 -9 0)
   12.91 +                   :color ColorRGBA/Gray :mass 0)]
   12.92 +    (world
   12.93 +     (nodify [floor me creature])
   12.94 +     standard-debug-controls
   12.95 +     (fn [world]
   12.96 +       ;;(speed-up world)
   12.97 +       (light-up-everything world)
   12.98 +       (let [timer (RatchetTimer. 60)]
   12.99 +         (.setTimer world timer)
  12.100 +         (display-dilated-time world timer)))
  12.101 +     (fn [world tpf]
  12.102 +       (.setLocalTranslation me (.getLocation (.getCamera world)))
  12.103 +       (fix-display world)))))
  12.104 +#+end_src
  12.105 +
  12.106 +* Headers
  12.107 +#+name: joint-header
  12.108 +#+begin_src clojure
  12.109 +(ns cortex.joint
  12.110 +  (:require cortex.import)
  12.111 +  (:use (cortex world util import body sense
  12.112 +                hearing touch vision proprioception movement))
  12.113 +  (:import java.io.File)
  12.114 +  (:import (com.aurellem.capture RatchetTimer IsoTimer)))
  12.115 +
  12.116 +(cortex.import/mega-import-jme3)
  12.117 +(rlm.rlm-commands/help)
  12.118 +#+end_src
  12.119 +
  12.120 +
  12.121 +* COMMENT Generate Source
  12.122 +
  12.123 +#+begin_src clojure :tangle ../src/cortex/joint.clj
  12.124 +<<joint-header>>
  12.125 +<<load-creature>>
  12.126 +#+end_src
  12.127 +  
    13.1 --- a/org/movement.org	Tue Feb 26 16:31:29 2013 +0000
    13.2 +++ b/org/movement.org	Tue Mar 05 18:55:21 2013 +0000
    13.3 @@ -185,6 +185,8 @@
    13.4  
    13.5  #+name: test-movement
    13.6  #+begin_src clojure
    13.7 +(in-ns 'cortex.test.movement)
    13.8 +
    13.9  (defn test-worm-movement
   13.10    "Testing movement:
   13.11     You should see the worm suspended in mid air and a display on the
   13.12 @@ -217,13 +219,16 @@
   13.13                    (if value
   13.14                      (swap! muscle-exertion (fn [v] (- v 20)))))})
   13.15          (fn [world]
   13.16 +
   13.17 +          (let [timer (RatchetTimer. 60)]
   13.18 +            (.setTimer world timer)
   13.19 +            (display-dilated-time world timer))
   13.20            (if record?
   13.21              (Capture/captureVideo
   13.22               world
   13.23               (File. "/home/r/proj/cortex/render/worm-muscles/main-view")))
   13.24            (light-up-everything world)
   13.25            (enable-debug world)
   13.26 -          (.setTimer world (RatchetTimer. 60))
   13.27            (set-gravity world (Vector3f. 0 0 0))
   13.28            (.setLocation (.getCamera world)
   13.29                          (Vector3f. -4.912815, 2.004171, 0.15710819))
   13.30 @@ -237,6 +242,9 @@
   13.31               (File. "/home/r/proj/cortex/render/worm-muscles/muscles"))))))))
   13.32  #+end_src
   13.33  
   13.34 +#+results: test-movement
   13.35 +: #'cortex.test.movement/test-worm-movement
   13.36 +
   13.37  * Video Demonstration 
   13.38  
   13.39  #+begin_html
   13.40 @@ -317,10 +325,13 @@
   13.41    (:import java.awt.image.BufferedImage)
   13.42    (:import com.jme3.scene.Node)
   13.43    (:import (com.jme3.math Quaternion Vector3f))
   13.44 -  (:import (com.aurellem.capture Capture RatchetTimer))
   13.45 +  (:import (com.aurellem.capture Capture RatchetTimer IsoTimer))
   13.46    (:import com.jme3.bullet.control.RigidBodyControl))
   13.47  #+end_src
   13.48  
   13.49 +#+results: test-header
   13.50 +: com.jme3.bullet.control.RigidBodyControl
   13.51 +
   13.52  * Source Listing
   13.53    - [[../src/cortex/movement.clj][cortex.movement]]
   13.54    - [[../src/cortex/test/movement.clj][cortex.test.movement]]
    14.1 --- a/org/proposal.org	Tue Feb 26 16:31:29 2013 +0000
    14.2 +++ b/org/proposal.org	Tue Mar 05 18:55:21 2013 +0000
    14.3 @@ -1,1 +0,0 @@
    14.4 -
    15.1 --- a/org/proprioception.org	Tue Feb 26 16:31:29 2013 +0000
    15.2 +++ b/org/proprioception.org	Tue Mar 05 18:55:21 2013 +0000
    15.3 @@ -272,11 +272,13 @@
    15.4            [root
    15.5             standard-debug-controls
    15.6             (fn [world]
    15.7 +             (let [timer (RatchetTimer. 60)]
    15.8 +               (.setTimer world timer)
    15.9 +               (display-dilated-time world timer))
   15.10               (if record?
   15.11                 (Capture/captureVideo
   15.12                  world
   15.13                  (File. "/home/r/proj/cortex/render/proprio/main-view")))
   15.14 -             (.setTimer world (com.aurellem.capture.RatchetTimer. 60))
   15.15               (set-gravity world (Vector3f. 0 0 0))
   15.16               (enable-debug world)
   15.17               (light-up-everything world))
   15.18 @@ -363,7 +365,7 @@
   15.19  #+name: test-proprioception-header
   15.20  #+begin_src clojure
   15.21  (ns cortex.test.proprioception
   15.22 -  (:import (com.aurellem.capture Capture RatchetTimer))
   15.23 +  (:import (com.aurellem.capture Capture RatchetTimer IsoTimer))
   15.24    (:use (cortex util world proprioception body))
   15.25    (:import java.io.File)
   15.26    (:import com.jme3.bullet.control.RigidBodyControl)
   15.27 @@ -371,6 +373,9 @@
   15.28    (:import (com.jme3.math Vector3f Quaternion ColorRGBA)))
   15.29  #+end_src
   15.30  
   15.31 +#+results: test-proprioception-header
   15.32 +: com.jme3.math.ColorRGBA
   15.33 +
   15.34  * Source Listing
   15.35    - [[../src/cortex/proprioception.clj][cortex.proprioception]]
   15.36    - [[../src/cortex/test/touch.clj][cortex.test.proprioception]]
    16.1 --- a/org/sense.org	Tue Feb 26 16:31:29 2013 +0000
    16.2 +++ b/org/sense.org	Tue Mar 05 18:55:21 2013 +0000
    16.3 @@ -14,13 +14,21 @@
    16.4  
    16.5  #+name: blender-1
    16.6  #+begin_src clojure
    16.7 +(in-ns 'cortex.sense)
    16.8  (defn meta-data
    16.9    "Get the meta-data for a node created with blender."
   16.10    [blender-node key]
   16.11    (if-let [data (.getUserData blender-node "properties")]
   16.12 -    (.findValue data key) nil))
   16.13 +    ;; this part is to accomodate weird blender properties
   16.14 +    ;; as well as sensible clojure maps.
   16.15 +    (.findValue data key)
   16.16 +    (.getUserData blender-node key)))
   16.17 +
   16.18  #+end_src
   16.19  
   16.20 +#+results: blender-1
   16.21 +: #'cortex.sense/meta-data
   16.22 +
   16.23  Blender uses a different coordinate system than jMonkeyEngine so it
   16.24  is useful to be able to convert between the two. These only come into
   16.25  play when the meta-data of a node refers to a vector in the blender
   16.26 @@ -446,6 +454,8 @@
   16.27  
   16.28  #+name: test
   16.29  #+begin_src clojure 
   16.30 +(in-ns 'cortex.test.sense)
   16.31 +
   16.32  (defn test-bind-sense
   16.33    "Show a camera that stays in the same relative position to a blue
   16.34    cube."
   16.35 @@ -469,12 +479,14 @@
   16.36              (.setTimer world (RatchetTimer. 60))
   16.37              (if record?
   16.38                (Capture/captureVideo
   16.39 -               world (File. "/home/r/proj/cortex/render/bind-sense0")))
   16.40 +               world 
   16.41 +               (File. "/home/r/proj/cortex/render/bind-sense0")))
   16.42              (add-camera!
   16.43               world cam
   16.44 -             (comp (view-image
   16.45 -                    (if record?
   16.46 -                    (File. "/home/r/proj/cortex/render/bind-sense1")))
   16.47 +             (comp
   16.48 +              (view-image
   16.49 +               (if record?
   16.50 +                 (File. "/home/r/proj/cortex/render/bind-sense1")))
   16.51                     BufferedImage!))
   16.52              (add-camera! world (.getCamera world) no-op)))
   16.53          no-op))))
    17.1 --- a/org/thesis.org	Tue Feb 26 16:31:29 2013 +0000
    17.2 +++ b/org/thesis.org	Tue Mar 05 18:55:21 2013 +0000
    17.3 @@ -1,1 +1,57 @@
    17.4 +#+title: Thesis
    17.5 +#+author: Robert McIntyre
    17.6 +#+email: rlm@mit.edu
    17.7 +#+description: MEng thesis for Robert McIntyre
    17.8 +#+keywords: AI, simulation, jMonkeyEngine3, clojure, virtual reality
    17.9 +#+SETUPFILE: ../../aurellem/org/setup.org
   17.10  
   17.11 +* COMMENT Multiple senses are compelling for AI.
   17.12 +#+include: "./intro.org"
   17.13 +
   17.14 +* Virtual reality is vastly easier than actual reality.
   17.15 +  
   17.16 +* There is no framework for AI experimenmts with multiple senses.
   17.17 +
   17.18 +* Cortex is my attempt at such a framework.
   17.19 +
   17.20 +** COMMENT Cortex uses jMonkeyEngine3 to create virtual worlds...
   17.21 +#+include: "./world.org"
   17.22 +#+include: "./util.org"
   17.23 +
   17.24 +** COMMENT ...and Blender to describe virtual creatures.
   17.25 +
   17.26 +** COMMENT Bodies are articulated rigid constructs
   17.27 +#+include: "./body.org"
   17.28 +
   17.29 +** COMMENT Senses are all 2d surfaces with embedded sensor elements.
   17.30 +#+include: "./sense.org"
   17.31 +
   17.32 +** COMMENT Thousands of hair-like elements simulate touch.
   17.33 +#+include: "./touch.org"
   17.34 +
   17.35 +** COMMENT Vision is modeled after the human retina.
   17.36 +#+include: "./vision.org"
   17.37 +
   17.38 +** COMMENT Cortex provides general simulated hearing.
   17.39 +#+include: "./hearing.org"
   17.40 +
   17.41 +** COMMENT Proprioception and Movement provide a sense of embodiment.
   17.42 +#+include: "./proprioception.org"
   17.43 +#+include: "./movement.org"
   17.44 +
   17.45 +* COMMENT The Hand
   17.46 +#+include: "./integration.org"
   17.47 +
   17.48 +* The Reusable Self Learning Joint
   17.49 +
   17.50 +* Robotic Calisthenics
   17.51 +
   17.52 +* The Lense that Sees its Flaws
   17.53 +
   17.54 +* Rat in a Maze
   17.55 +
   17.56 +* Swarm Creatures
   17.57 +  
   17.58 +* Simulated Imagination
   17.59 +
   17.60 +
    18.1 --- a/org/touch.org	Tue Feb 26 16:31:29 2013 +0000
    18.2 +++ b/org/touch.org	Tue Mar 05 18:55:21 2013 +0000
    18.3 @@ -552,6 +552,9 @@
    18.4          standard-debug-controls
    18.5          
    18.6          (fn [world]
    18.7 +          (let [timer (IsoTimer. 60)]
    18.8 +            (.setTimer world timer)
    18.9 +            (display-dilated-time world timer))
   18.10            (if record?
   18.11              (Capture/captureVideo
   18.12               world
   18.13 @@ -566,6 +569,9 @@
   18.14               (File. "/home/r/proj/cortex/render/touch-cube/touch/"))))))))
   18.15  #+end_src
   18.16  
   18.17 +#+results: test-touch-1
   18.18 +: #'cortex.test.touch/test-basic-touch
   18.19 +
   18.20  ** Basic Touch Demonstration
   18.21  
   18.22  #+begin_html
   18.23 @@ -656,6 +662,9 @@
   18.24          standard-debug-controls
   18.25          
   18.26          (fn [world]
   18.27 +          (let [timer (IsoTimer. 60)]
   18.28 +            (.setTimer world timer)
   18.29 +            (display-dilated-time world timer))
   18.30            (if record? 
   18.31              (Capture/captureVideo 
   18.32               world
   18.33 @@ -670,6 +679,9 @@
   18.34               (File. "/home/r/proj/cortex/render/worm-touch/touch/"))))))))
   18.35  #+end_src
   18.36  
   18.37 +#+results: test-touch-2
   18.38 +: #'cortex.test.touch/test-worm-touch
   18.39 +
   18.40  ** Worm Touch Demonstration 
   18.41  #+begin_html
   18.42  <div class="figure">
   18.43 @@ -747,11 +759,14 @@
   18.44  (ns cortex.test.touch
   18.45    (:use (cortex world util sense body touch))
   18.46    (:use cortex.test.body)
   18.47 -  (:import com.aurellem.capture.Capture)
   18.48 +  (:import (com.aurellem.capture Capture IsoTimer))
   18.49    (:import java.io.File)
   18.50    (:import (com.jme3.math Vector3f ColorRGBA)))
   18.51  #+end_src
   18.52  
   18.53 +#+results: test-touch-header
   18.54 +: com.jme3.math.ColorRGBA
   18.55 +
   18.56  * Source Listing
   18.57    - [[../src/cortex/touch.clj][cortex.touch]]
   18.58    - [[../src/cortex/test/touch.clj][cortex.test.touch]]
   18.59 @@ -762,23 +777,26 @@
   18.60    - [[http://hg.bortreb.com ][source-repository]]
   18.61  
   18.62  * Next
   18.63 -So far I've implemented simulated Vision, Hearing, and Touch, the most
   18.64 -obvious and prominent senses that humans have.  Smell and Taste shall
   18.65 -remain unimplemented for now. This accounts for the "five senses" that
   18.66 -feature so prominently in our lives. But humans have far more than the
   18.67 -five main senses. There are internal chemical senses, pain (which is
   18.68 -*not* the same as touch), heat sensitivity, and our sense of balance,
   18.69 -among others. One extra sense is so important that I must implement it
   18.70 -to have a hope of making creatures that can gracefully control their
   18.71 -own bodies.  It is Proprioception, which is the sense of the location
   18.72 -of each body part in relation to the other body parts.
   18.73 +So far I've implemented simulated Vision, Hearing, and
   18.74 +Touch, the most obvious and prominent senses that humans
   18.75 +have.  Smell and Taste shall remain unimplemented for
   18.76 +now. This accounts for the "five senses" that feature so
   18.77 +prominently in our lives. But humans have far more than the
   18.78 +five main senses. There are internal chemical senses, pain
   18.79 +(which is *not* the same as touch), heat sensitivity, and
   18.80 +our sense of balance, among others. One extra sense is so
   18.81 +important that I must implement it to have a hope of making
   18.82 +creatures that can gracefully control their own bodies.  It
   18.83 +is Proprioception, which is the sense of the location of
   18.84 +each body part in relation to the other body parts.
   18.85  
   18.86 -Close your eyes, and touch your nose with your right index finger. How
   18.87 -did you do it? You could not see your hand, and neither your hand nor
   18.88 -your nose could use the sense of touch to guide the path of your hand.
   18.89 -There are no sound cues, and Taste and Smell certainly don't provide
   18.90 -any help. You know where your hand is without your other senses
   18.91 -because of Proprioception.
   18.92 +Close your eyes, and touch your nose with your right index
   18.93 +finger. How did you do it? You could not see your hand, and
   18.94 +neither your hand nor your nose could use the sense of touch
   18.95 +to guide the path of your hand.  There are no sound cues,
   18.96 +and Taste and Smell certainly don't provide any help. You
   18.97 +know where your hand is without your other senses because of
   18.98 +Proprioception.
   18.99  
  18.100  Onward to [[./proprioception.org][proprioception]]!
  18.101  
    19.1 --- a/org/util.org	Tue Feb 26 16:31:29 2013 +0000
    19.2 +++ b/org/util.org	Tue Mar 05 18:55:21 2013 +0000
    19.3 @@ -100,6 +100,7 @@
    19.4    (:import java.awt.image.BufferedImage)
    19.5    (:import javax.swing.JPanel)
    19.6    (:import javax.swing.JFrame)
    19.7 +  (:import ij.ImagePlus)
    19.8    (:import javax.swing.SwingUtilities)
    19.9    (:import com.jme3.scene.plugins.blender.BlenderModelLoader)
   19.10    (:import (java.util.logging Level Logger)))
   19.11 @@ -491,7 +492,7 @@
   19.12         (controlUpdate [tpf]
   19.13           (.setText text (format
   19.14                           "%.2f"
   19.15 -                         (float (/ (.getTime timer) 1000)))))
   19.16 +                         (float (.getTimeInSeconds timer)))))
   19.17         (controlRender [_ _])))
   19.18      (.attachChild (.getGuiNode world) text)))
   19.19  #+end_src
   19.20 @@ -532,6 +533,18 @@
   19.21      (view (doto (Node.)
   19.22              (.attachChild (box 1 1 1 :color color))))))
   19.23  
   19.24 +(extend-type ij.ImagePlus
   19.25 +  Viewable
   19.26 +  (view [image]
   19.27 +    (.show image)))
   19.28 +
   19.29 +(extend-type java.awt.image.BufferedImage
   19.30 +  Viewable
   19.31 +  (view
   19.32 +    [image]
   19.33 +    (view (ImagePlus. "view-buffered-image" image))))
   19.34 +
   19.35 +
   19.36  (defprotocol Textual
   19.37    (text [something]
   19.38      "Display a detailed textual analysis of the given object."))
    20.1 --- a/org/vision.org	Tue Feb 26 16:31:29 2013 +0000
    20.2 +++ b/org/vision.org	Tue Mar 05 18:55:21 2013 +0000
    20.3 @@ -149,26 +149,34 @@
    20.4  
    20.5  (defn add-eye!
    20.6    "Create a Camera centered on the current position of 'eye which
    20.7 -   follows the closest physical node in 'creature and sends visual
    20.8 -   data to 'continuation. The camera will point in the X direction and
    20.9 -   use the Z vector as up as determined by the rotation of these
   20.10 -   vectors in blender coordinate space. Use XZY rotation for the node
   20.11 -   in blender."
   20.12 +   follows the closest physical node in 'creature. The camera will
   20.13 +   point in the X direction and use the Z vector as up as determined
   20.14 +   by the rotation of these vectors in blender coordinate space. Use
   20.15 +   XZY rotation for the node in blender."
   20.16    [#^Node creature #^Spatial eye]
   20.17    (let [target (closest-node creature eye)
   20.18 -        [cam-width cam-height] (eye-dimensions eye)
   20.19 +        [cam-width cam-height] 
   20.20 +        ;;[640 480] ;; graphics card on laptop doesn't support
   20.21 +                    ;; arbitray dimensions.
   20.22 +        (eye-dimensions eye)
   20.23          cam (Camera. cam-width cam-height)
   20.24          rot (.getWorldRotation eye)]
   20.25      (.setLocation cam (.getWorldTranslation eye))
   20.26      (.lookAtDirection
   20.27 -     cam                                ; this part is not a mistake and
   20.28 -     (.mult rot Vector3f/UNIT_X)        ; is consistent with using Z in
   20.29 -     (.mult rot Vector3f/UNIT_Y))       ; blender as the UP vector.
   20.30 +     cam                           ; this part is not a mistake and
   20.31 +     (.mult rot Vector3f/UNIT_X)   ; is consistent with using Z in
   20.32 +     (.mult rot Vector3f/UNIT_Y))  ; blender as the UP vector.
   20.33      (.setFrustumPerspective
   20.34 -     cam 45 (/ (.getWidth cam) (.getHeight cam)) 1 1000)
   20.35 +     cam (float 45)
   20.36 +     (float (/ (.getWidth cam) (.getHeight cam)))
   20.37 +     (float 1)
   20.38 +     (float 1000))
   20.39      (bind-sense target cam) cam))
   20.40  #+end_src
   20.41  
   20.42 +#+results: add-eye
   20.43 +: #'cortex.vision/add-eye!
   20.44 +
   20.45  Here, the camera is created based on metadata on the eye-node and
   20.46  attached to the nearest physical object with =bind-sense=
   20.47  ** The Retina
   20.48 @@ -280,6 +288,7 @@
   20.49  
   20.50  #+name: add-camera
   20.51  #+begin_src clojure
   20.52 +(in-ns 'cortex.vision)
   20.53  (defn add-camera!
   20.54    "Add a camera to the world, calling continuation on every frame
   20.55    produced." 
   20.56 @@ -295,6 +304,9 @@
   20.57        (.attachScene (.getRootNode world)))))
   20.58  #+end_src
   20.59  
   20.60 +#+results: add-camera
   20.61 +: #'cortex.vision/add-camera!
   20.62 +
   20.63  
   20.64  The eye's continuation function should register the viewport with the
   20.65  simulation the first time it is called, use the CPU to extract the
   20.66 @@ -399,8 +411,8 @@
   20.67  #+name: main
   20.68  #+begin_src clojure
   20.69  (defn vision!
   20.70 -  "Returns a function which returns visual sensory data when called
   20.71 -   inside a running simulation."
   20.72 +  "Returns a list of functions, each of which returns visual sensory
   20.73 +   data when called inside a running simulation."
   20.74    [#^Node creature & {skip :skip :or {skip 0}}]
   20.75    (reduce
   20.76     concat 
   20.77 @@ -480,13 +492,19 @@
   20.78                             (if record?
   20.79                               (File. "/home/r/proj/cortex/render/vision/2")))
   20.80                            BufferedImage!))
   20.81 +            (let [timer (IsoTimer. 60)]
   20.82 +              (.setTimer world timer)
   20.83 +              (display-dilated-time world timer))
   20.84              ;; This is here to restore the main view
   20.85 -         ;; after the other views have completed processing
   20.86 +            ;; after the other views have completed processing
   20.87              (add-camera! world (.getCamera world) no-op)))
   20.88          (fn [world tpf]
   20.89            (.rotate candy (* tpf 0.2) 0 0))))))
   20.90  #+end_src
   20.91  
   20.92 +#+results: test-1
   20.93 +: #'cortex.test.vision/test-pipeline
   20.94 +
   20.95  #+begin_html
   20.96  <div class="figure">
   20.97  <video controls="controls" width="755">
   20.98 @@ -545,6 +563,32 @@
   20.99    (comp #(change-color % color)
  20.100           (fire-cannon-ball)))
  20.101  
  20.102 +(defn gen-worm
  20.103 +  "create a creature acceptable for testing as a replacement for the
  20.104 +   worm."
  20.105 +  []
  20.106 +  (nodify
  20.107 +   "worm"
  20.108 +   [(nodify
  20.109 +     "eyes"
  20.110 +     [(doto
  20.111 +          (Node. "eye1")
  20.112 +        (.setLocalTranslation (Vector3f. 0 -1.1 0))
  20.113 +        (.setUserData
  20.114 +         
  20.115 +         "eye" 
  20.116 +         "(let [retina
  20.117 +                \"Models/test-creature/retina-small.png\"]
  20.118 +                {:all retina :red retina
  20.119 +                 :green retina :blue retina})"))])
  20.120 +    (box
  20.121 +     0.2 0.2 0.2
  20.122 +     :name "worm-segment"
  20.123 +     :position (Vector3f. 0 0 0)
  20.124 +     :color ColorRGBA/Orange)]))
  20.125 +
  20.126 +
  20.127 +
  20.128  (defn test-worm-vision 
  20.129    "Testing vision:
  20.130     You should see the worm suspended in mid-air, looking down at a
  20.131 @@ -557,7 +601,7 @@
  20.132       b  : fire blue-ball
  20.133       g  : fire green-ball
  20.134       <space> : fire white ball"
  20.135 -   
  20.136 +  
  20.137    ([] (test-worm-vision false))
  20.138    ([record?] 
  20.139       (let [the-worm (doto (worm)(body!))
  20.140 @@ -574,47 +618,56 @@
  20.141             z-axis
  20.142             (box 0.01 0.01 1 :physical? false :color ColorRGBA/Blue
  20.143                  :position (Vector3f. 0 -5 0))
  20.144 -           timer (RatchetTimer. 60)]
  20.145  
  20.146 -       (world (nodify [(floor) the-worm x-axis y-axis z-axis me])
  20.147 -              (assoc standard-debug-controls
  20.148 -                "key-r" (colored-cannon-ball ColorRGBA/Red)
  20.149 +           ]
  20.150 +
  20.151 +       (world
  20.152 +        (nodify [(floor) the-worm x-axis y-axis z-axis me])
  20.153 +        (merge standard-debug-controls
  20.154 +               {"key-r" (colored-cannon-ball ColorRGBA/Red)
  20.155                  "key-b" (colored-cannon-ball ColorRGBA/Blue)
  20.156 -                "key-g" (colored-cannon-ball ColorRGBA/Green))
  20.157 -              (fn [world]
  20.158 -                (light-up-everything world)
  20.159 -                (speed-up world)
  20.160 +                "key-g" (colored-cannon-ball ColorRGBA/Green)})
  20.161 +        
  20.162 +        (fn [world]
  20.163 +          (light-up-everything world)
  20.164 +          (speed-up world)
  20.165 +          (let [timer (IsoTimer. 60)]
  20.166                  (.setTimer world timer)
  20.167 -                (display-dilated-time world timer)
  20.168 -                ;; add a view from the worm's perspective
  20.169 -                (if record?
  20.170 -                  (Capture/captureVideo
  20.171 -                   world
  20.172 -                   (File.
  20.173 -                    "/home/r/proj/cortex/render/worm-vision/main-view")))
  20.174 -                
  20.175 -                (add-camera!
  20.176 -                 world
  20.177 -                 (add-eye! the-worm
  20.178 -                           (.getChild 
  20.179 -                            (.getChild the-worm "eyes") "eye"))
  20.180 -                 (comp
  20.181 -                  (view-image
  20.182 -                   (if record?
  20.183 -                     (File.
  20.184 -                      "/home/r/proj/cortex/render/worm-vision/worm-view")))
  20.185 -                  BufferedImage!))
  20.186 -                
  20.187 -                (set-gravity world Vector3f/ZERO))
  20.188 -              
  20.189 -              (fn [world _ ]
  20.190 -                (.setLocalTranslation me (.getLocation (.getCamera world)))
  20.191 -                (vision-display
  20.192 -                 (map #(% world) vision)
  20.193 -                 (if record? (File. "/home/r/proj/cortex/render/worm-vision")))
  20.194 -                (fix-display world))))))
  20.195 +                (display-dilated-time world timer))
  20.196 +          ;; add a view from the worm's perspective
  20.197 +          (if record?
  20.198 +            (Capture/captureVideo
  20.199 +             world
  20.200 +             (File.
  20.201 +              "/home/r/proj/cortex/render/worm-vision/main-view")))
  20.202 +            
  20.203 +          (add-camera!
  20.204 +           world
  20.205 +           (add-eye! the-worm (first (eyes the-worm)))
  20.206 +           (comp
  20.207 +            (view-image
  20.208 +             (if record?
  20.209 +               (File.
  20.210 +                "/home/r/proj/cortex/render/worm-vision/worm-view")))
  20.211 +            BufferedImage!))
  20.212 +            
  20.213 +          (set-gravity world Vector3f/ZERO)
  20.214 +          (add-camera! world (.getCamera world) no-op))
  20.215 +        
  20.216 +        (fn [world _]
  20.217 +          (.setLocalTranslation me (.getLocation (.getCamera world)))
  20.218 +           (vision-display
  20.219 +            (map #(% world) vision)
  20.220 +            (if record?
  20.221 +              (File. "/home/r/proj/cortex/render/worm-vision")))
  20.222 +          (fix-display world)
  20.223 +          )))))
  20.224  #+end_src
  20.225  
  20.226 +#+RESULTS: test-2
  20.227 +: #'cortex.test.vision/test-worm-vision
  20.228 +
  20.229 +
  20.230  The world consists of the worm and a flat gray floor. I can shoot red,
  20.231  green, blue and white cannonballs at the worm. The worm is initially
  20.232  looking down at the floor, and there is no gravity. My perspective
  20.233 @@ -730,8 +783,12 @@
  20.234    (:import com.jme3.scene.Node)
  20.235    (:import com.jme3.math.Vector3f)
  20.236    (:import java.io.File)
  20.237 -  (:import (com.aurellem.capture Capture RatchetTimer)))
  20.238 +  (:import (com.aurellem.capture Capture RatchetTimer IsoTimer)))
  20.239  #+end_src
  20.240 +
  20.241 +#+results: test-header
  20.242 +: com.aurellem.capture.IsoTimer
  20.243 +
  20.244  * Source Listing
  20.245    - [[../src/cortex/vision.clj][cortex.vision]]
  20.246    - [[../src/cortex/test/vision.clj][cortex.test.vision]]