changeset 198:fc0bf33bded2

fleshing out prose in sense.org
author Robert McIntyre <rlm@mit.edu>
date Sun, 05 Feb 2012 14:01:47 -0700
parents 16cbce075a0b
children 305439cec54d
files images/finger-1.png images/finger-2.png images/finger-3.png images/finger-UV.png images/whatever.png org/sense.org org/util.org
diffstat 7 files changed, 253 insertions(+), 109 deletions(-) [+]
line wrap: on
line diff
     1.1 Binary file images/finger-1.png has changed
     2.1 Binary file images/finger-2.png has changed
     3.1 Binary file images/finger-3.png has changed
     4.1 Binary file images/finger-UV.png has changed
     5.1 Binary file images/whatever.png has changed
     6.1 --- a/org/sense.org	Sun Feb 05 06:55:41 2012 -0700
     6.2 +++ b/org/sense.org	Sun Feb 05 14:01:47 2012 -0700
     6.3 @@ -1,4 +1,4 @@
     6.4 -#+title: General Sense/Effector Utilities
     6.5 +#+title: 
     6.6  #+author: Robert McIntyre
     6.7  #+email: rlm@mit.edu
     6.8  #+description: sensory utilities
     6.9 @@ -6,51 +6,114 @@
    6.10  #+SETUPFILE: ../../aurellem/org/setup.org
    6.11  #+INCLUDE: ../../aurellem/org/level-0.org
    6.12  
    6.13 -* Namespace/Imports
    6.14 -#+name header
    6.15 -#+begin_src clojure
    6.16 -(ns cortex.sense
    6.17 -  "Here are functions useful in the construction of two or more
    6.18 -   sensors/effectors."
    6.19 -  {:author "Robert McInytre"}
    6.20 -  (:use (cortex world util))
    6.21 -  (:import ij.process.ImageProcessor)
    6.22 -  (:import jme3tools.converters.ImageToAwt)
    6.23 -  (:import java.awt.image.BufferedImage)
    6.24 -  (:import com.jme3.collision.CollisionResults)
    6.25 -  (:import com.jme3.bounding.BoundingBox)
    6.26 -  (:import (com.jme3.scene Node Spatial))
    6.27 -  (:import com.jme3.scene.control.AbstractControl)
    6.28 -  (:import (com.jme3.math Quaternion Vector3f)))
    6.29 -#+end_src
    6.30  
    6.31  * Blender Utilities
    6.32 -#+name: blender
    6.33 +In blender, any object can be assigned an arbitray number of key-value
    6.34 +pairs which are called "Custom Properties". These are accessable in
    6.35 +jMonkyeEngine when blender files are imported with the
    6.36 +=BlenderLoader=. =(meta-data)= extracts these properties.
    6.37 +
    6.38 +#+name: blender-1
    6.39  #+begin_src clojure
    6.40  (defn meta-data
    6.41    "Get the meta-data for a node created with blender."
    6.42    [blender-node key]
    6.43    (if-let [data (.getUserData blender-node "properties")]
    6.44 -    (.findValue data key)
    6.45 -    nil))
    6.46 +    (.findValue data key) nil))
    6.47 +#+end_src
    6.48  
    6.49 +Blender uses a different coordinate system than jMonkeyEngine so it
    6.50 +is useful to be able to convert between the two. These only come into
    6.51 +play when the meta-data of a node refers to a vector in the blender
    6.52 +coordinate system.
    6.53 +
    6.54 +#+name: blender-2
    6.55 +#+begin_src clojure
    6.56  (defn jme-to-blender
    6.57    "Convert from JME coordinates to Blender coordinates"
    6.58    [#^Vector3f in]
    6.59 -  (Vector3f. (.getX in)
    6.60 -             (- (.getZ in))
    6.61 -             (.getY in)))
    6.62 +  (Vector3f. (.getX in) (- (.getZ in)) (.getY in)))
    6.63  
    6.64  (defn blender-to-jme
    6.65    "Convert from Blender coordinates to JME coordinates"
    6.66    [#^Vector3f in]
    6.67 -  (Vector3f. (.getX in)
    6.68 -             (.getZ in)
    6.69 -             (- (.getY in))))
    6.70 +  (Vector3f. (.getX in) (.getZ in) (- (.getY in))))
    6.71  #+end_src
    6.72  
    6.73 -* Topology Related stuff
    6.74 -#+name: topology
    6.75 +* Sense Topology
    6.76 +
    6.77 +Human beings are three-dimensional objects, and the nerves that
    6.78 +transmit data from our various sense organs to our brain are
    6.79 +essentially one-dimensional. This leaves up to two dimensions in which
    6.80 +our sensory information may flow.  For example, imagine your skin: it
    6.81 +is a two-dimensional surface around a three-dimensional object (your
    6.82 +body). It has discrete touch sensors embedded at various points, and
    6.83 +the density of these sensors corresponds to the sensitivity of that
    6.84 +region of skin. Each touch sensor connects to a nerve, all of which
    6.85 +eventually are bundled together as they travel up the spinal cord to
    6.86 +the brain. Intersect the spinal nerves with a guillotining plane and
    6.87 +you will see all of the sensory data of the skin revealed in a roughly
    6.88 +circular two-dimensional image which is the cross section of the
    6.89 +spinal cord.  Points on this image that are close together in this
    6.90 +circle represent touch sensors that are /probably/ close together on
    6.91 +the skin, although there is of course some cutting and rerangement
    6.92 +that has to be done to transfer the complicated surface of the skin
    6.93 +onto a two dimensional image.
    6.94 +
    6.95 +Most human senses consist of many discrete sensors of various
    6.96 +properties distributed along a surface at various densities.  For
    6.97 +skin, it is Pacinian corpuscles, Meissner's corpuscles, Merkel's
    6.98 +disks, and Ruffini's endings, which detect pressure and vibration of
    6.99 +various intensities.  For ears, it is the stereocilia distributed
   6.100 +along the basilar membrane inside the cochlea; each one is sensitive
   6.101 +to a slightly different frequency of sound. For eyes, it is rods
   6.102 +and cones distributed along the surface of the retina. In each case,
   6.103 +we can describe the sense with a surface and a distribution of sensors
   6.104 +along that surface.
   6.105 +
   6.106 +** UV-maps
   6.107 +
   6.108 +Blender and jMonkeyEngine already have support for exactly this sort
   6.109 +of data structure because it is used to "skin" models for games. It is
   6.110 +called [[http://wiki.blender.org/index.php/Doc:2.6/Manual/Textures/Mapping/UV][UV-mapping]].  The three-dimensional surface is cut and smooshed
   6.111 +until it fits on a two-dimensional image. You paint whatever you want
   6.112 +on that image, and when the three-dimensional shape is rendered in a
   6.113 +game that image the smooshing and cutting us reversed and the image
   6.114 +appears on the three-dimensional object.
   6.115 +
   6.116 +To make a sense, interpret the UV-image as describing the distribution
   6.117 +of that senses sensors. To get different types of sensors, you can
   6.118 +either use a different color for each type of sensor, or use multiple
   6.119 +UV-maps, each labeled with that sensor type. I generally use a white
   6.120 +pixel to mean the presense of a sensor and a black pixel to mean the
   6.121 +absense of a sensor, and use one UV-map for each sensor-type within a
   6.122 +given sense.  The paths to the images are not stored as the actual
   6.123 +UV-map of the blender object but are instead referenced in the
   6.124 +meta-data of the node.
   6.125 +
   6.126 +#+CAPTION: The UV-map for an enlongated icososphere. The white dots each represent a touch sensor. They are dense in the regions that describe the tip of the finger, and less dense along the dorsal side of the finger opposite the tip.
   6.127 +#+ATTR_HTML: width="300"
   6.128 +[[../images/finger-UV.png]]
   6.129 +
   6.130 +#+CAPTION: Ventral side of the UV-mapped finger. Notice the density of touch sensors at the tip.
   6.131 +#+ATTR_HTML: width="300"
   6.132 +[[../images/finger-1.png]]
   6.133 +
   6.134 +#+CAPTION: Side view of the UV-mapped finger.
   6.135 +#+ATTR_HTML: width="300"
   6.136 +[[../images/finger-2.png]]
   6.137 +
   6.138 +#+CAPTION: Head on view of the finger. In both the head and side views you can see the divide where the touch-sensors transition from high density to low density.
   6.139 +#+ATTR_HTML: width="300"
   6.140 +[[../images/finger-3.png]]
   6.141 +
   6.142 +The following code loads images and gets the locations of the white
   6.143 +pixels so that they can be used to create senses. =(load-image)= finds
   6.144 +images using jMonkeyEngine's asset-manager, so the image path is
   6.145 +expected to be relative to the =assets= directory.  Thanks to Dylan
   6.146 +for the beautiful version of filter-pixels.
   6.147 +
   6.148 +#+name: topology-1
   6.149  #+begin_src clojure
   6.150  (defn load-image
   6.151    "Load an image as a BufferedImage using the asset-manager system." 
   6.152 @@ -66,7 +129,8 @@
   6.153  
   6.154  (defn filter-pixels
   6.155    "List the coordinates of all pixels matching pred, within the bounds
   6.156 -   provided.
   6.157 +   provided. If bounds are not specified then the entire image is
   6.158 +   searched.
   6.159     bounds -> [x0 y0 width height]"
   6.160    {:author "Dylan Holmes"}
   6.161    ([pred #^BufferedImage image]
   6.162 @@ -87,30 +151,20 @@
   6.163       (filter-pixels white? image bounds))
   6.164    ([#^BufferedImage image]
   6.165       (filter-pixels white? image)))
   6.166 +#+end_src
   6.167  
   6.168 -(defn points->image
   6.169 -  "Take a sparse collection of points and visuliaze it as a
   6.170 -   BufferedImage."
   6.171 -  [points]
   6.172 -  (if (empty? points)
   6.173 -    (BufferedImage. 1 1 BufferedImage/TYPE_BYTE_BINARY)
   6.174 -    (let [xs (vec (map first points))
   6.175 -          ys (vec (map second points))
   6.176 -          x0 (apply min xs)
   6.177 -          y0 (apply min ys)
   6.178 -          width (- (apply max xs) x0)
   6.179 -          height (- (apply max ys) y0)
   6.180 -          image (BufferedImage. (inc width) (inc height)
   6.181 -                                BufferedImage/TYPE_INT_RGB)]
   6.182 -      (dorun
   6.183 -       (for [x (range (.getWidth image))
   6.184 -             y (range (.getHeight image))]
   6.185 -         (.setRGB image x y 0xFF0000)))
   6.186 -      (dorun 
   6.187 -       (for [index (range (count points))]
   6.188 -         (.setRGB image (- (xs index) x0) (- (ys index) y0) -1)))
   6.189 -      image)))
   6.190 +** Topology
   6.191  
   6.192 +Information from the senses is transmitted to the brain via bundles of
   6.193 +axons, whether it be the optic nerve or the spinal cord. While these
   6.194 +bundles more or less perserve the overall topology of a sense's
   6.195 +two-dimensional surface, they do not perserve the percise euclidean
   6.196 +distances between every sensor. =(collapse)= is here to smoosh the
   6.197 +sensors described by a UV-map into a contigous region that still
   6.198 +perserves the topology of the original sense.
   6.199 +
   6.200 +#+name: topology-2
   6.201 +#+begin_src clojure
   6.202  (defn average [coll]
   6.203    (/ (reduce + coll) (count coll)))
   6.204  
   6.205 @@ -161,26 +215,139 @@
   6.206                       (- y min-y)])
   6.207                    squeezed))]
   6.208          relocated)))
   6.209 +#+end_src
   6.210 +* Viewing Sense Data
   6.211  
   6.212 +It's vital to /see/ the sense data to make sure that everything is
   6.213 +behaving as it should. =(view-sense)= is here so that each sense can
   6.214 +define its own way of turning sense-data into pictures, while the
   6.215 +actual rendering of said pictures stays in one central place.
   6.216 +=(points->image)= helps senses generate a base image onto which they
   6.217 +can overlay actual sense data.
   6.218 +
   6.219 +#+name view-senses
   6.220 +#+begin_src clojure
   6.221 +(defn view-sense 
   6.222 +  "Take a kernel that produces a BufferedImage from some sense data
   6.223 +   and return a function which takes a list of sense data, uses the
   6.224 +   kernel to convert to images, and displays those images, each in
   6.225 +   its own JFrame."
   6.226 +  [sense-display-kernel]
   6.227 +  (let [windows (atom [])]
   6.228 +    (fn [data]
   6.229 +      (if (> (count data) (count @windows))
   6.230 +        (reset! 
   6.231 +         windows (map (fn [_] (view-image)) (range (count data)))))
   6.232 +      (dorun
   6.233 +       (map
   6.234 +        (fn [display datum]
   6.235 +          (display (sense-display-kernel datum)))
   6.236 +        @windows data)))))
   6.237 +
   6.238 +(defn points->image
   6.239 +  "Take a collection of points and visuliaze it as a BufferedImage."
   6.240 +  [points]
   6.241 +  (if (empty? points)
   6.242 +    (BufferedImage. 1 1 BufferedImage/TYPE_BYTE_BINARY)
   6.243 +    (let [xs (vec (map first points))
   6.244 +          ys (vec (map second points))
   6.245 +          x0 (apply min xs)
   6.246 +          y0 (apply min ys)
   6.247 +          width (- (apply max xs) x0)
   6.248 +          height (- (apply max ys) y0)
   6.249 +          image (BufferedImage. (inc width) (inc height)
   6.250 +                                BufferedImage/TYPE_INT_RGB)]
   6.251 +      (dorun
   6.252 +       (for [x (range (.getWidth image))
   6.253 +             y (range (.getHeight image))]
   6.254 +         (.setRGB image x y 0xFF0000)))
   6.255 +      (dorun 
   6.256 +       (for [index (range (count points))]
   6.257 +         (.setRGB image (- (xs index) x0) (- (ys index) y0) -1)))
   6.258 +      image)))
   6.259 +
   6.260 +(defn gray
   6.261 +  "Create a gray RGB pixel with R, G, and B set to num. num must be
   6.262 +   between 0 and 255."
   6.263 +  [num]
   6.264 +  (+ num
   6.265 +     (bit-shift-left num 8)
   6.266 +     (bit-shift-left num 16)))
   6.267  #+end_src
   6.268  
   6.269 -* Node level stuff
   6.270 -#+name: node
   6.271 +* Building a Sense from Nodes
   6.272 +My method for defining senses in blender is the following:
   6.273 +
   6.274 +Senses like vision and hearing are localized to a single point
   6.275 +and follow a particular object around.  For these:
   6.276 +
   6.277 + - Create a single top-level empty node whose name is the name of the sense
   6.278 + - Add empty nodes which each contain meta-data relevant
   6.279 +   to the sense, including a UV-map describing the number/distribution
   6.280 +   of sensors if applicipable.
   6.281 + - Make each empty-node the child of the top-level
   6.282 +   node. =(sense-nodes)= below generates functions to find these children.
   6.283 +
   6.284 +For touch, store the path to the UV-map which describes touch-sensors in the
   6.285 +meta-data of the object to which that map applies.
   6.286 +
   6.287 +Each sense provides code that analyzes the Node structure of the
   6.288 +creature and creates sense-functions.  They also modify the Node
   6.289 +structure if necessary.
   6.290 +
   6.291 +Empty nodes created in blender have no appearance or physical presence
   6.292 +in jMonkeyEngine, but do appear in the scene graph. Empty nodes that
   6.293 +represent a sense which "follows" another geometry (like eyes and
   6.294 +ears) follow the closest physical object.  =(closest-node)= finds this
   6.295 +closest object given the Creature and a particular empty node.
   6.296 +
   6.297 +#+name: node-1
   6.298  #+begin_src clojure
   6.299 +(defn sense-nodes
   6.300 +  "For some senses there is a special empty blender node whose
   6.301 +   children are considered markers for an instance of that sense. This
   6.302 +   function generates functions to find those children, given the name
   6.303 +   of the special parent node."
   6.304 +  [parent-name]
   6.305 +  (fn [#^Node creature]
   6.306 +    (if-let [sense-node (.getChild creature parent-name)]
   6.307 +      (seq (.getChildren sense-node))
   6.308 +      (do (println-repl "could not find" parent-name "node") []))))
   6.309 +
   6.310  (defn closest-node
   6.311    "Return the node in creature which is closest to the given node."
   6.312 -  [#^Node creature #^Node eye]
   6.313 +  [#^Node creature #^Node empty]
   6.314    (loop [radius (float 0.01)]
   6.315      (let [results (CollisionResults.)]
   6.316        (.collideWith
   6.317         creature
   6.318 -       (BoundingBox. (.getWorldTranslation eye)
   6.319 +       (BoundingBox. (.getWorldTranslation empty)
   6.320                       radius radius radius)
   6.321         results)
   6.322        (if-let [target (first results)]
   6.323          (.getGeometry target)
   6.324          (recur (float (* 2 radius)))))))
   6.325  
   6.326 +(defn world-to-local
   6.327 +  "Convert the world coordinates into coordinates relative to the 
   6.328 +   object (i.e. local coordinates), taking into account the rotation
   6.329 +   of object."
   6.330 +  [#^Spatial object world-coordinate]
   6.331 +  (.worldToLocal object world-coordinate nil))
   6.332 +
   6.333 +(defn local-to-world
   6.334 +  "Convert the local coordinates into world relative coordinates"
   6.335 +  [#^Spatial object local-coordinate]
   6.336 +  (.localToWorld object local-coordinate nil))
   6.337 +#+end_src
   6.338 +
   6.339 +=(bind-sense)= binds either a Camera or a Listener object to any
   6.340 +object so that they will follow that object no matter how it
   6.341 +moves. Here is some example code which shows a camera bound to a blue
   6.342 +box as it is buffeted by white cannonballs.
   6.343 +
   6.344 +#+name: node-2
   6.345 +#+begin_src clojure
   6.346  (defn bind-sense
   6.347    "Bind the sense to the Spatial such that it will maintain its
   6.348     current position relative to the Spatial no matter how the spatial
   6.349 @@ -205,65 +372,41 @@
   6.350              sense
   6.351              (.mult total-rotation initial-sense-rotation))))
   6.352         (controlRender [_ _])))))
   6.353 -
   6.354 -(defn world-to-local
   6.355 -  "Convert the world coordinates into coordinates relative to the 
   6.356 -   object (i.e. local coordinates), taking into account the rotation
   6.357 -   of object."
   6.358 -  [#^Spatial object world-coordinate]
   6.359 -  (.worldToLocal object world-coordinate nil))
   6.360 -
   6.361 -(defn local-to-world
   6.362 -  "Convert the local coordinates into world relative coordinates"
   6.363 -  [#^Spatial object local-coordinate]
   6.364 -  (.localToWorld object local-coordinate nil))
   6.365 -
   6.366 -(defn sense-nodes
   6.367 -  "For each sense there is a special blender node whose children are
   6.368 -   considered markers for an instance of a that sense. This function
   6.369 -   generates functions to find those children, given the name of the
   6.370 -   special parent node."
   6.371 -  [parent-name]
   6.372 -  (fn [#^Node creature]
   6.373 -    (if-let [sense-node (.getChild creature parent-name)]
   6.374 -      (seq (.getChildren sense-node))
   6.375 -      (do (println-repl "could not find" parent-name "node") []))))
   6.376  #+end_src
   6.377  
   6.378 -* Viewing Senses
   6.379 -#+name view-senses
   6.380 +
   6.381 +
   6.382 +* Bookkeeping
   6.383 +Here is the header for this namespace, included for completness.
   6.384 +#+name header
   6.385  #+begin_src clojure
   6.386 -(defn view-sense 
   6.387 -  "Take a kernel that produces a BufferedImage from some sense data
   6.388 -   and return a function which takes a list of sense data, uses the
   6.389 -   kernem to convert to images, and displays those images, each in
   6.390 -   its own JFrame."
   6.391 -  [sense-display-kernel]
   6.392 -  (let [windows (atom [])]
   6.393 -    (fn [data]
   6.394 -      (if (> (count data) (count @windows))
   6.395 -        (reset! 
   6.396 -         windows (map (fn [_] (view-image)) (range (count data)))))
   6.397 -      (dorun
   6.398 -       (map
   6.399 -        (fn [display datum]
   6.400 -          (display (sense-display-kernel datum)))
   6.401 -        @windows data)))))
   6.402 +(ns cortex.sense
   6.403 +  "Here are functions useful in the construction of two or more
   6.404 +   sensors/effectors."
   6.405 +  {:author "Robert McInytre"}
   6.406 +  (:use (cortex world util))
   6.407 +  (:import ij.process.ImageProcessor)
   6.408 +  (:import jme3tools.converters.ImageToAwt)
   6.409 +  (:import java.awt.image.BufferedImage)
   6.410 +  (:import com.jme3.collision.CollisionResults)
   6.411 +  (:import com.jme3.bounding.BoundingBox)
   6.412 +  (:import (com.jme3.scene Node Spatial))
   6.413 +  (:import com.jme3.scene.control.AbstractControl)
   6.414 +  (:import (com.jme3.math Quaternion Vector3f)))
   6.415 +#+end_src
   6.416  
   6.417 -(defn gray
   6.418 -  "Create a gray RGB pixel with R, G, and B set to num. num must be
   6.419 -   between 0 and 255."
   6.420 -  [num]
   6.421 -  (+ num
   6.422 -     (bit-shift-left num 8)
   6.423 -     (bit-shift-left num 16)))
   6.424 -#+end_src
   6.425 +* Source Listing
   6.426 +  Full source: [[../src/cortex/sense.clj][sense.clj]]
   6.427 +
   6.428  
   6.429  * COMMENT generate source
   6.430  #+begin_src clojure :tangle ../src/cortex/sense.clj
   6.431  <<header>>
   6.432 -<<blender>>
   6.433 -<<topology>>
   6.434 -<<node>>
   6.435 +<<blender-1>>
   6.436 +<<blender-2>>
   6.437 +<<topology-1>>
   6.438 +<<topology-2>>
   6.439 +<<node-1>>
   6.440 +<<node-2>>
   6.441  <<view-senses>>
   6.442  #+end_src
     7.1 --- a/org/util.org	Sun Feb 05 06:55:41 2012 -0700
     7.2 +++ b/org/util.org	Sun Feb 05 14:01:47 2012 -0700
     7.3 @@ -512,7 +512,8 @@
     7.4      (fn [#^BufferedImage i]
     7.5        (reset! image i)
     7.6        (.setSize frame (+ 8 (.getWidth i)) (+ 28 (.getHeight i)))
     7.7 -      (.repaint panel 0 0 (.getWidth i) (.getHeight i)))))
     7.8 +      (.repaint panel 0 0 (.getWidth i) (.getHeight i))
     7.9 +      i)))
    7.10  
    7.11  (defprotocol Viewable
    7.12    (view [something]))