# HG changeset patch # User Robert McIntyre # Date 1328475707 25200 # Node ID fc0bf33bded24a654a5ad7b530d99d2c84b1445b # Parent 16cbce075a0b43a18e084f88615689c3f52f3894 fleshing out prose in sense.org diff -r 16cbce075a0b -r fc0bf33bded2 images/finger-1.png Binary file images/finger-1.png has changed diff -r 16cbce075a0b -r fc0bf33bded2 images/finger-2.png Binary file images/finger-2.png has changed diff -r 16cbce075a0b -r fc0bf33bded2 images/finger-3.png Binary file images/finger-3.png has changed diff -r 16cbce075a0b -r fc0bf33bded2 images/finger-UV.png Binary file images/finger-UV.png has changed diff -r 16cbce075a0b -r fc0bf33bded2 images/whatever.png Binary file images/whatever.png has changed diff -r 16cbce075a0b -r fc0bf33bded2 org/sense.org --- a/org/sense.org Sun Feb 05 06:55:41 2012 -0700 +++ b/org/sense.org Sun Feb 05 14:01:47 2012 -0700 @@ -1,4 +1,4 @@ -#+title: General Sense/Effector Utilities +#+title: #+author: Robert McIntyre #+email: rlm@mit.edu #+description: sensory utilities @@ -6,51 +6,114 @@ #+SETUPFILE: ../../aurellem/org/setup.org #+INCLUDE: ../../aurellem/org/level-0.org -* Namespace/Imports -#+name header -#+begin_src clojure -(ns cortex.sense - "Here are functions useful in the construction of two or more - sensors/effectors." - {:author "Robert McInytre"} - (:use (cortex world util)) - (:import ij.process.ImageProcessor) - (:import jme3tools.converters.ImageToAwt) - (:import java.awt.image.BufferedImage) - (:import com.jme3.collision.CollisionResults) - (:import com.jme3.bounding.BoundingBox) - (:import (com.jme3.scene Node Spatial)) - (:import com.jme3.scene.control.AbstractControl) - (:import (com.jme3.math Quaternion Vector3f))) -#+end_src * Blender Utilities -#+name: blender +In blender, any object can be assigned an arbitray number of key-value +pairs which are called "Custom Properties". These are accessable in +jMonkyeEngine when blender files are imported with the +=BlenderLoader=. =(meta-data)= extracts these properties. + +#+name: blender-1 #+begin_src clojure (defn meta-data "Get the meta-data for a node created with blender." [blender-node key] (if-let [data (.getUserData blender-node "properties")] - (.findValue data key) - nil)) + (.findValue data key) nil)) +#+end_src +Blender uses a different coordinate system than jMonkeyEngine so it +is useful to be able to convert between the two. These only come into +play when the meta-data of a node refers to a vector in the blender +coordinate system. + +#+name: blender-2 +#+begin_src clojure (defn jme-to-blender "Convert from JME coordinates to Blender coordinates" [#^Vector3f in] - (Vector3f. (.getX in) - (- (.getZ in)) - (.getY in))) + (Vector3f. (.getX in) (- (.getZ in)) (.getY in))) (defn blender-to-jme "Convert from Blender coordinates to JME coordinates" [#^Vector3f in] - (Vector3f. (.getX in) - (.getZ in) - (- (.getY in)))) + (Vector3f. (.getX in) (.getZ in) (- (.getY in)))) #+end_src -* Topology Related stuff -#+name: topology +* Sense Topology + +Human beings are three-dimensional objects, and the nerves that +transmit data from our various sense organs to our brain are +essentially one-dimensional. This leaves up to two dimensions in which +our sensory information may flow. For example, imagine your skin: it +is a two-dimensional surface around a three-dimensional object (your +body). It has discrete touch sensors embedded at various points, and +the density of these sensors corresponds to the sensitivity of that +region of skin. Each touch sensor connects to a nerve, all of which +eventually are bundled together as they travel up the spinal cord to +the brain. Intersect the spinal nerves with a guillotining plane and +you will see all of the sensory data of the skin revealed in a roughly +circular two-dimensional image which is the cross section of the +spinal cord. Points on this image that are close together in this +circle represent touch sensors that are /probably/ close together on +the skin, although there is of course some cutting and rerangement +that has to be done to transfer the complicated surface of the skin +onto a two dimensional image. + +Most human senses consist of many discrete sensors of various +properties distributed along a surface at various densities. For +skin, it is Pacinian corpuscles, Meissner's corpuscles, Merkel's +disks, and Ruffini's endings, which detect pressure and vibration of +various intensities. For ears, it is the stereocilia distributed +along the basilar membrane inside the cochlea; each one is sensitive +to a slightly different frequency of sound. For eyes, it is rods +and cones distributed along the surface of the retina. In each case, +we can describe the sense with a surface and a distribution of sensors +along that surface. + +** UV-maps + +Blender and jMonkeyEngine already have support for exactly this sort +of data structure because it is used to "skin" models for games. It is +called [[http://wiki.blender.org/index.php/Doc:2.6/Manual/Textures/Mapping/UV][UV-mapping]]. The three-dimensional surface is cut and smooshed +until it fits on a two-dimensional image. You paint whatever you want +on that image, and when the three-dimensional shape is rendered in a +game that image the smooshing and cutting us reversed and the image +appears on the three-dimensional object. + +To make a sense, interpret the UV-image as describing the distribution +of that senses sensors. To get different types of sensors, you can +either use a different color for each type of sensor, or use multiple +UV-maps, each labeled with that sensor type. I generally use a white +pixel to mean the presense of a sensor and a black pixel to mean the +absense of a sensor, and use one UV-map for each sensor-type within a +given sense. The paths to the images are not stored as the actual +UV-map of the blender object but are instead referenced in the +meta-data of the node. + +#+CAPTION: The UV-map for an enlongated icososphere. The white dots each represent a touch sensor. They are dense in the regions that describe the tip of the finger, and less dense along the dorsal side of the finger opposite the tip. +#+ATTR_HTML: width="300" +[[../images/finger-UV.png]] + +#+CAPTION: Ventral side of the UV-mapped finger. Notice the density of touch sensors at the tip. +#+ATTR_HTML: width="300" +[[../images/finger-1.png]] + +#+CAPTION: Side view of the UV-mapped finger. +#+ATTR_HTML: width="300" +[[../images/finger-2.png]] + +#+CAPTION: Head on view of the finger. In both the head and side views you can see the divide where the touch-sensors transition from high density to low density. +#+ATTR_HTML: width="300" +[[../images/finger-3.png]] + +The following code loads images and gets the locations of the white +pixels so that they can be used to create senses. =(load-image)= finds +images using jMonkeyEngine's asset-manager, so the image path is +expected to be relative to the =assets= directory. Thanks to Dylan +for the beautiful version of filter-pixels. + +#+name: topology-1 #+begin_src clojure (defn load-image "Load an image as a BufferedImage using the asset-manager system." @@ -66,7 +129,8 @@ (defn filter-pixels "List the coordinates of all pixels matching pred, within the bounds - provided. + provided. If bounds are not specified then the entire image is + searched. bounds -> [x0 y0 width height]" {:author "Dylan Holmes"} ([pred #^BufferedImage image] @@ -87,30 +151,20 @@ (filter-pixels white? image bounds)) ([#^BufferedImage image] (filter-pixels white? image))) +#+end_src -(defn points->image - "Take a sparse collection of points and visuliaze it as a - BufferedImage." - [points] - (if (empty? points) - (BufferedImage. 1 1 BufferedImage/TYPE_BYTE_BINARY) - (let [xs (vec (map first points)) - ys (vec (map second points)) - x0 (apply min xs) - y0 (apply min ys) - width (- (apply max xs) x0) - height (- (apply max ys) y0) - image (BufferedImage. (inc width) (inc height) - BufferedImage/TYPE_INT_RGB)] - (dorun - (for [x (range (.getWidth image)) - y (range (.getHeight image))] - (.setRGB image x y 0xFF0000))) - (dorun - (for [index (range (count points))] - (.setRGB image (- (xs index) x0) (- (ys index) y0) -1))) - image))) +** Topology +Information from the senses is transmitted to the brain via bundles of +axons, whether it be the optic nerve or the spinal cord. While these +bundles more or less perserve the overall topology of a sense's +two-dimensional surface, they do not perserve the percise euclidean +distances between every sensor. =(collapse)= is here to smoosh the +sensors described by a UV-map into a contigous region that still +perserves the topology of the original sense. + +#+name: topology-2 +#+begin_src clojure (defn average [coll] (/ (reduce + coll) (count coll))) @@ -161,26 +215,139 @@ (- y min-y)]) squeezed))] relocated))) +#+end_src +* Viewing Sense Data +It's vital to /see/ the sense data to make sure that everything is +behaving as it should. =(view-sense)= is here so that each sense can +define its own way of turning sense-data into pictures, while the +actual rendering of said pictures stays in one central place. +=(points->image)= helps senses generate a base image onto which they +can overlay actual sense data. + +#+name view-senses +#+begin_src clojure +(defn view-sense + "Take a kernel that produces a BufferedImage from some sense data + and return a function which takes a list of sense data, uses the + kernel to convert to images, and displays those images, each in + its own JFrame." + [sense-display-kernel] + (let [windows (atom [])] + (fn [data] + (if (> (count data) (count @windows)) + (reset! + windows (map (fn [_] (view-image)) (range (count data))))) + (dorun + (map + (fn [display datum] + (display (sense-display-kernel datum))) + @windows data))))) + +(defn points->image + "Take a collection of points and visuliaze it as a BufferedImage." + [points] + (if (empty? points) + (BufferedImage. 1 1 BufferedImage/TYPE_BYTE_BINARY) + (let [xs (vec (map first points)) + ys (vec (map second points)) + x0 (apply min xs) + y0 (apply min ys) + width (- (apply max xs) x0) + height (- (apply max ys) y0) + image (BufferedImage. (inc width) (inc height) + BufferedImage/TYPE_INT_RGB)] + (dorun + (for [x (range (.getWidth image)) + y (range (.getHeight image))] + (.setRGB image x y 0xFF0000))) + (dorun + (for [index (range (count points))] + (.setRGB image (- (xs index) x0) (- (ys index) y0) -1))) + image))) + +(defn gray + "Create a gray RGB pixel with R, G, and B set to num. num must be + between 0 and 255." + [num] + (+ num + (bit-shift-left num 8) + (bit-shift-left num 16))) #+end_src -* Node level stuff -#+name: node +* Building a Sense from Nodes +My method for defining senses in blender is the following: + +Senses like vision and hearing are localized to a single point +and follow a particular object around. For these: + + - Create a single top-level empty node whose name is the name of the sense + - Add empty nodes which each contain meta-data relevant + to the sense, including a UV-map describing the number/distribution + of sensors if applicipable. + - Make each empty-node the child of the top-level + node. =(sense-nodes)= below generates functions to find these children. + +For touch, store the path to the UV-map which describes touch-sensors in the +meta-data of the object to which that map applies. + +Each sense provides code that analyzes the Node structure of the +creature and creates sense-functions. They also modify the Node +structure if necessary. + +Empty nodes created in blender have no appearance or physical presence +in jMonkeyEngine, but do appear in the scene graph. Empty nodes that +represent a sense which "follows" another geometry (like eyes and +ears) follow the closest physical object. =(closest-node)= finds this +closest object given the Creature and a particular empty node. + +#+name: node-1 #+begin_src clojure +(defn sense-nodes + "For some senses there is a special empty blender node whose + children are considered markers for an instance of that sense. This + function generates functions to find those children, given the name + of the special parent node." + [parent-name] + (fn [#^Node creature] + (if-let [sense-node (.getChild creature parent-name)] + (seq (.getChildren sense-node)) + (do (println-repl "could not find" parent-name "node") [])))) + (defn closest-node "Return the node in creature which is closest to the given node." - [#^Node creature #^Node eye] + [#^Node creature #^Node empty] (loop [radius (float 0.01)] (let [results (CollisionResults.)] (.collideWith creature - (BoundingBox. (.getWorldTranslation eye) + (BoundingBox. (.getWorldTranslation empty) radius radius radius) results) (if-let [target (first results)] (.getGeometry target) (recur (float (* 2 radius))))))) +(defn world-to-local + "Convert the world coordinates into coordinates relative to the + object (i.e. local coordinates), taking into account the rotation + of object." + [#^Spatial object world-coordinate] + (.worldToLocal object world-coordinate nil)) + +(defn local-to-world + "Convert the local coordinates into world relative coordinates" + [#^Spatial object local-coordinate] + (.localToWorld object local-coordinate nil)) +#+end_src + +=(bind-sense)= binds either a Camera or a Listener object to any +object so that they will follow that object no matter how it +moves. Here is some example code which shows a camera bound to a blue +box as it is buffeted by white cannonballs. + +#+name: node-2 +#+begin_src clojure (defn bind-sense "Bind the sense to the Spatial such that it will maintain its current position relative to the Spatial no matter how the spatial @@ -205,65 +372,41 @@ sense (.mult total-rotation initial-sense-rotation)))) (controlRender [_ _]))))) - -(defn world-to-local - "Convert the world coordinates into coordinates relative to the - object (i.e. local coordinates), taking into account the rotation - of object." - [#^Spatial object world-coordinate] - (.worldToLocal object world-coordinate nil)) - -(defn local-to-world - "Convert the local coordinates into world relative coordinates" - [#^Spatial object local-coordinate] - (.localToWorld object local-coordinate nil)) - -(defn sense-nodes - "For each sense there is a special blender node whose children are - considered markers for an instance of a that sense. This function - generates functions to find those children, given the name of the - special parent node." - [parent-name] - (fn [#^Node creature] - (if-let [sense-node (.getChild creature parent-name)] - (seq (.getChildren sense-node)) - (do (println-repl "could not find" parent-name "node") [])))) #+end_src -* Viewing Senses -#+name view-senses + + +* Bookkeeping +Here is the header for this namespace, included for completness. +#+name header #+begin_src clojure -(defn view-sense - "Take a kernel that produces a BufferedImage from some sense data - and return a function which takes a list of sense data, uses the - kernem to convert to images, and displays those images, each in - its own JFrame." - [sense-display-kernel] - (let [windows (atom [])] - (fn [data] - (if (> (count data) (count @windows)) - (reset! - windows (map (fn [_] (view-image)) (range (count data))))) - (dorun - (map - (fn [display datum] - (display (sense-display-kernel datum))) - @windows data))))) +(ns cortex.sense + "Here are functions useful in the construction of two or more + sensors/effectors." + {:author "Robert McInytre"} + (:use (cortex world util)) + (:import ij.process.ImageProcessor) + (:import jme3tools.converters.ImageToAwt) + (:import java.awt.image.BufferedImage) + (:import com.jme3.collision.CollisionResults) + (:import com.jme3.bounding.BoundingBox) + (:import (com.jme3.scene Node Spatial)) + (:import com.jme3.scene.control.AbstractControl) + (:import (com.jme3.math Quaternion Vector3f))) +#+end_src -(defn gray - "Create a gray RGB pixel with R, G, and B set to num. num must be - between 0 and 255." - [num] - (+ num - (bit-shift-left num 8) - (bit-shift-left num 16))) -#+end_src +* Source Listing + Full source: [[../src/cortex/sense.clj][sense.clj]] + * COMMENT generate source #+begin_src clojure :tangle ../src/cortex/sense.clj <
> -<> -<> -<> +<> +<> +<> +<> +<> +<> <> #+end_src diff -r 16cbce075a0b -r fc0bf33bded2 org/util.org --- a/org/util.org Sun Feb 05 06:55:41 2012 -0700 +++ b/org/util.org Sun Feb 05 14:01:47 2012 -0700 @@ -512,7 +512,8 @@ (fn [#^BufferedImage i] (reset! image i) (.setSize frame (+ 8 (.getWidth i)) (+ 28 (.getHeight i))) - (.repaint panel 0 0 (.getWidth i) (.getHeight i))))) + (.repaint panel 0 0 (.getWidth i) (.getHeight i)) + i))) (defprotocol Viewable (view [something]))