# HG changeset patch # User Robert McIntyre # Date 1328273538 25200 # Node ID aaacf087504c0561985d3b70ea16a5dcb02e10d2 # Parent e1232043656a4e92f7d8b406a2112f44e2308d63 refactored vision code diff -r e1232043656a -r aaacf087504c org/blender.org --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/org/blender.org Fri Feb 03 05:52:18 2012 -0700 @@ -0,0 +1,19 @@ +#+title: Blender Clojure utilities +#+author: Robert McIntyre +#+email: rlm@mit.edu +#+description: blender utilities +#+keywords: simulation, jMonkeyEngine3, clojure, blender +#+SETUPFILE: ../../aurellem/org/setup.org +#+INCLUDE: ../../aurellem/org/level-0.org + + +#+name: blender +#+begin_src clojure + +#+end_src + + +* COMMENT generate source +#+begin_src clojure :tangle ../src/cortex/blender.clj +<> +#+end_src diff -r e1232043656a -r aaacf087504c org/eyes.org --- a/org/eyes.org Fri Feb 03 05:08:45 2012 -0700 +++ b/org/eyes.org Fri Feb 03 05:52:18 2012 -0700 @@ -11,6 +11,47 @@ I want to make creatures with eyes. Each eye can be independely moved and should see its own version of the world depending on where it is. + +Here's how vision will work. + +Make the continuation in scene-processor take FrameBuffer, +byte-buffer, BufferedImage already sized to the correct +dimensions. the continuation will decide wether to "mix" them +into the BufferedImage, lazily ignore them, or mix them halfway +and call c/graphics card routines. + +(vision creature) will take an optional :skip argument which will +inform the continuations in scene processor to skip the given +number of cycles 0 means that no cycles will be skipped. + +(vision creature) will return [init-functions sensor-functions]. +The init-functions are each single-arg functions that take the +world and register the cameras and must each be called before the +corresponding sensor-functions. Each init-function returns the +viewport for that eye which can be manipulated, saved, etc. Each +sensor-function is a thunk and will return data in the same +format as the tactile-sensor functions the structure is +[topology, sensor-data]. Internally, these sensor-functions +maintain a reference to sensor-data which is periodically updated +by the continuation function established by its init-function. +They can be queried every cycle, but their information may not +necessairly be different every cycle. + +Each eye in the creature in blender will work the same way as +joints -- a zero dimensional object with no geometry whose local +coordinate system determines the orientation of the resulting +eye. All eyes will have a parent named "eyes" just as all joints +have a parent named "joints". The resulting camera will be a +ChaseCamera or a CameraNode bound to the geo that is closest to +the eye marker. The eye marker will contain the metadata for the +eye, and will be moved by it's bound geometry. The dimensions of +the eye's camera are equal to the dimensions of the eye's "UV" +map. + + + + + #+name: eyes #+begin_src clojure (ns cortex.vision @@ -18,14 +59,18 @@ eyes from different positions to observe the same world, and pass the observed data to any arbitray function." {:author "Robert McIntyre"} - (:use cortex.world) + (:use (cortex world sense util)) (:import com.jme3.post.SceneProcessor) (:import (com.jme3.util BufferUtils Screenshots)) (:import java.nio.ByteBuffer) (:import java.awt.image.BufferedImage) (:import com.jme3.renderer.ViewPort) (:import com.jme3.math.ColorRGBA) - (:import com.jme3.renderer.Renderer)) + (:import com.jme3.renderer.Renderer) + (:import jme3tools.converters.ImageToAwt) + (:import com.jme3.scene.Node)) + +(cortex.import/mega-import-jme3) (defn vision-pipeline @@ -92,10 +137,108 @@ (.setBackgroundColor ColorRGBA/Black) (.addProcessor (vision-pipeline continuation)) (.attachScene (.getRootNode world))))) + +(defn retina-sensor-image + "Return a map of pixel selection functions to BufferedImages + describing the distribution of light-sensitive components on this + geometry's surface. Each function creates an integer from the rgb + values found in the pixel. :red, :green, :blue, :gray are already + defined as extracting the red green blue and average components + respectively." + [#^Spatial eye] + (if-let [eye-map (meta-data eye "eye")] + (map-vals + #(ImageToAwt/convert + (.getImage (.loadTexture (asset-manager) %)) + false false 0) + (eval (read-string eye-map))))) + +(defn eye-dimensions + "returns the width and height specified in the metadata of the eye" + [#^Spatial eye] + (let [dimensions + (map #(vector (.getWidth %) (.getHeight %)) + (vals (retina-sensor-image eye)))] + [(apply max (map first dimensions)) + (apply max (map second dimensions))])) + +(defn creature-eyes + ;;dylan + "Return the children of the creature's \"eyes\" node." + ;;"The eye nodes which are children of the \"eyes\" node in the + ;;creature." + [#^Node creature] + (if-let [eye-node (.getChild creature "eyes")] + (seq (.getChildren eye-node)) + (do (println-repl "could not find eyes node") []))) + + +(defn attach-eye + "Attach a Camera to the appropiate area and return the Camera." + [#^Node creature #^Spatial eye] + (let [target (closest-node creature eye) + [cam-width cam-height] (eye-dimensions eye) + cam (Camera. cam-width cam-height)] + (.setLocation cam (.getWorldTranslation eye)) + (.setRotation cam (.getWorldRotation eye)) + (.setFrustumPerspective + cam 45 (/ (.getWidth cam) (.getHeight cam)) + 1 1000) + (bind-sense target cam) + cam)) + +(def presets + {:all 0xFFFFFF + :red 0xFF0000 + :blue 0x0000FF + :green 0x00FF00}) + +(defn enable-vision + "return [init-function sensor-functions] for a particular eye" + [#^Node creature #^Spatial eye & {skip :skip :or {skip 0}}] + (let [retinal-map (retina-sensor-image eye) + camera (attach-eye creature eye) + vision-image + (atom + (BufferedImage. (.getWidth camera) + (.getHeight camera) + BufferedImage/TYPE_BYTE_BINARY))] + [(fn [world] + (add-eye + world camera + (let [counter (atom 0)] + (fn [r fb bb bi] + (if (zero? (rem (swap! counter inc) (inc skip))) + (reset! vision-image (BufferedImage! r fb bb bi))))))) + (vec + (map + (fn [[key image]] + (let [whites (white-coordinates image) + topology (vec (collapse whites)) + mask (presets key)] + (fn [] + (vector + topology + (vec + (for [[x y] whites] + (bit-and + mask (.getRGB @vision-image x y)))))))) + retinal-map))])) + +(defn vision + [#^Node creature & {skip :skip :or {skip 0}}] + (reduce + (fn [[init-a senses-a] + [init-b senses-b]] + [(conj init-a init-b) + (into senses-a senses-b)]) + [[][]] + (for [eye (creature-eyes creature)] + (enable-vision creature eye)))) + + #+end_src -#+results: eyes -: #'cortex.vision/add-eye Note the use of continuation passing style for connecting the eye to a function to process the output. You can create any number of eyes, and diff -r e1232043656a -r aaacf087504c org/sense-util.org --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/org/sense-util.org Fri Feb 03 05:52:18 2012 -0700 @@ -0,0 +1,174 @@ +#+title: General sense/effector utilities +#+author: Robert McIntyre +#+email: rlm@mit.edu +#+description: sensory utilities +#+keywords: simulation, jMonkeyEngine3, clojure, simulated senses +#+SETUPFILE: ../../aurellem/org/setup.org +#+INCLUDE: ../../aurellem/org/level-0.org + + +#+name: sense-util +#+begin_src clojure +(ns cortex.sense) +(cortex.import/mega-import-jme3) +(import ij.process.ImageProcessor) +(import java.awt.image.BufferedImage) + + +(defn meta-data [blender-node key] + (if-let [data (.getUserData blender-node "properties")] + (.findValue data key) + nil)) + +(defn closest-node + "Return the object in creature which is closest to the given node." + ;;dylan"The closest object in creature to the given node." + [#^Node creature #^Node eye] + (loop [radius (float 0.01)] + (let [results (CollisionResults.)] + (.collideWith + creature + (BoundingBox. (.getWorldTranslation eye) + radius radius radius) + results) + (if-let [target (first results)] + (.getGeometry target) + (recur (float (* 2 radius))))))) + +(defn bind-sense + "Bind the sense to the Spatial such that it will maintain its + current position relative to the Spatial no matter how the spatial + moves. 'sense can be either a Camera or Listener object." + [#^Spatial obj sense] + (let [sense-offset (.subtract (.getLocation sense) + (.getWorldTranslation obj)) + initial-sense-rotation (Quaternion. (.getRotation sense)) + base-anti-rotation (.inverse (.getWorldRotation obj))] + (.addControl + obj + (proxy [AbstractControl] [] + (controlUpdate [tpf] + (let [total-rotation + (.mult base-anti-rotation (.getWorldRotation obj))] + (.setLocation sense + (.add + (.mult total-rotation sense-offset) + (.getWorldTranslation obj))) + (.setRotation sense + (.mult total-rotation initial-sense-rotation)))) + (controlRender [_ _]))))) + +(def white -1) + +(defn filter-pixels + "List the coordinates of all pixels matching pred, within the bounds + provided. Bounds -> [x0 y0 width height]" + {:author "Dylan Holmes"} + ([pred #^BufferedImage image] + (filter-pixels pred image [0 0 (.getWidth image) (.getHeight image)])) + ([pred #^BufferedImage image [x0 y0 width height]] + ((fn accumulate [x y matches] + (cond + (>= y (+ height y0)) matches + (>= x (+ width x0)) (recur 0 (inc y) matches) + (pred (.getRGB image x y)) + (recur (inc x) y (conj matches [x y])) + :else (recur (inc x) y matches))) + x0 y0 []))) + +(defn white-coordinates + "Coordinates of all the white pixels in a subset of the image." + ([#^BufferedImage image bounds] + (filter-pixels #(= % white) image bounds)) + ([#^BufferedImage image] + (filter-pixels #(= % white) image))) + +(defn points->image + "Take a sparse collection of points and visuliaze it as a + BufferedImage." + + ;; TODO maybe parallelize this since it's easy + + [points] + (if (empty? points) + (BufferedImage. 1 1 BufferedImage/TYPE_BYTE_BINARY) + (let [xs (vec (map first points)) + ys (vec (map second points)) + x0 (apply min xs) + y0 (apply min ys) + width (- (apply max xs) x0) + height (- (apply max ys) y0) + image (BufferedImage. (inc width) (inc height) + BufferedImage/TYPE_INT_RGB)] + (dorun + (for [x (range (.getWidth image)) + y (range (.getHeight image))] + (.setRGB image x y 0xFF0000))) + (dorun + (for [index (range (count points))] + (.setRGB image (- (xs index) x0) (- (ys index) y0) -1))) + + image))) + +(defn average [coll] + (/ (reduce + coll) (count coll))) + +(defn collapse-1d + "One dimensional analogue of collapse" + [center line] + (let [length (count line) + num-above (count (filter (partial < center) line)) + num-below (- length num-above)] + (range (- center num-below) + (+ center num-above)))) + +(defn collapse + "Take a set of pairs of integers and collapse them into a + contigous bitmap." + [points] + (if (empty? points) [] + (let + [num-points (count points) + center (vector + (int (average (map first points))) + (int (average (map first points)))) + flattened + (reduce + concat + (map + (fn [column] + (map vector + (map first column) + (collapse-1d (second center) + (map second column)))) + (partition-by first (sort-by first points)))) + squeezed + (reduce + concat + (map + (fn [row] + (map vector + (collapse-1d (first center) + (map first row)) + (map second row))) + (partition-by second (sort-by second flattened)))) + relocate + (let [min-x (apply min (map first squeezed)) + min-y (apply min (map second squeezed))] + (map (fn [[x y]] + [(- x min-x) + (- y min-y)]) + squeezed))] + relocate))) + +#+end_src + +#+results: sense-util +: #'cortex.sense/meta-data + + + +* COMMENT generate source +#+begin_src clojure :tangle ../src/cortex/sense.clj +<> +#+end_src diff -r e1232043656a -r aaacf087504c org/test-creature.org --- a/org/test-creature.org Fri Feb 03 05:08:45 2012 -0700 +++ b/org/test-creature.org Fri Feb 03 05:52:18 2012 -0700 @@ -147,7 +147,7 @@ ;; TODO remove this! (require 'cortex.import) (cortex.import/mega-import-jme3) -(use '(cortex world util body hearing touch vision)) +(use '(cortex world util body hearing touch vision sense)) (rlm.rlm-commands/help) (import java.awt.image.BufferedImage) @@ -160,83 +160,6 @@ (declare joint-create) (use 'clojure.contrib.def) -(defn points->image - "Take a sparse collection of points and visuliaze it as a - BufferedImage." - - ;; TODO maybe parallelize this since it's easy - - [points] - (if (empty? points) - (BufferedImage. 1 1 BufferedImage/TYPE_BYTE_BINARY) - (let [xs (vec (map first points)) - ys (vec (map second points)) - x0 (apply min xs) - y0 (apply min ys) - width (- (apply max xs) x0) - height (- (apply max ys) y0) - image (BufferedImage. (inc width) (inc height) - BufferedImage/TYPE_INT_RGB)] - (dorun - (for [x (range (.getWidth image)) - y (range (.getHeight image))] - (.setRGB image x y 0xFF0000))) - (dorun - (for [index (range (count points))] - (.setRGB image (- (xs index) x0) (- (ys index) y0) -1))) - - image))) - -(defn average [coll] - (/ (reduce + coll) (count coll))) - -(defn collapse-1d - "One dimensional analogue of collapse" - [center line] - (let [length (count line) - num-above (count (filter (partial < center) line)) - num-below (- length num-above)] - (range (- center num-below) - (+ center num-above)))) - -(defn collapse - "Take a set of pairs of integers and collapse them into a - contigous bitmap." - [points] - (if (empty? points) [] - (let - [num-points (count points) - center (vector - (int (average (map first points))) - (int (average (map first points)))) - flattened - (reduce - concat - (map - (fn [column] - (map vector - (map first column) - (collapse-1d (second center) - (map second column)))) - (partition-by first (sort-by first points)))) - squeezed - (reduce - concat - (map - (fn [row] - (map vector - (collapse-1d (first center) - (map first row)) - (map second row))) - (partition-by second (sort-by second flattened)))) - relocate - (let [min-x (apply min (map first squeezed)) - min-y (apply min (map second squeezed))] - (map (fn [[x y]] - [(- x min-x) - (- y min-y)]) - squeezed))] - relocate))) (defn load-bullet [] (let [sim (world (Node.) {} no-op no-op)] @@ -254,11 +177,6 @@ (.registerLoader BlenderModelLoader (into-array String ["blend"]))) model)) -(defn meta-data [blender-node key] - (if-let [data (.getUserData blender-node "properties")] - (.findValue data key) - nil)) - (defn blender-to-jme "Convert from Blender coordinates to JME coordinates" [#^Vector3f in] @@ -474,33 +392,7 @@ image-path)) false false 0))) -(import ij.process.ImageProcessor) -(import java.awt.image.BufferedImage) -(def white -1) - -(defn filter-pixels - "List the coordinates of all pixels matching pred, within the bounds - provided. Bounds -> [x0 y0 width height]" - {:author "Dylan Holmes"} - ([pred #^BufferedImage image] - (filter-pixels pred image [0 0 (.getWidth image) (.getHeight image)])) - ([pred #^BufferedImage image [x0 y0 width height]] - ((fn accumulate [x y matches] - (cond - (>= y (+ height y0)) matches - (>= x (+ width x0)) (recur 0 (inc y) matches) - (pred (.getRGB image x y)) - (recur (inc x) y (conj matches [x y])) - :else (recur (inc x) y matches))) - x0 y0 []))) - -(defn white-coordinates - "Coordinates of all the white pixels in a subset of the image." - ([#^BufferedImage image bounds] - (filter-pixels #(= % white) image bounds)) - ([#^BufferedImage image] - (filter-pixels #(= % white) image))) (defn triangle "Get the triangle specified by triangle-index from the mesh within @@ -718,87 +610,12 @@ (node-seq pieces))))) -;; human eye transmits 62kb/s to brain Bandwidth is 8.75 Mb/s -;; http://en.wikipedia.org/wiki/Retina - (defn test-eye [] (.getChild (.getChild (worm-model) "eyes") "eye")) -(defn retina-sensor-image - "Return a map of pixel selection functions to BufferedImages - describing the distribution of light-sensitive components on this - geometry's surface. Each function creates an integer from the rgb - values found in the pixel. :red, :green, :blue, :gray are already - defined as extracting the red green blue and average components - respectively." - [#^Spatial eye] - (if-let [eye-map (meta-data eye "eye")] - (map-vals - #(ImageToAwt/convert - (.getImage (.loadTexture (asset-manager) %)) - false false 0) - (eval (read-string eye-map))))) - -(defn eye-dimensions - "returns the width and height specified in the metadata of the eye" - [#^Spatial eye] - (let [dimensions - (map #(vector (.getWidth %) (.getHeight %)) - (vals (retina-sensor-image eye)))] - [(apply max (map first dimensions)) - (apply max (map second dimensions))])) - -(defn creature-eyes - ;;dylan - "Return the children of the creature's \"eyes\" node." - ;;"The eye nodes which are children of the \"eyes\" node in the - ;;creature." - [#^Node creature] - (if-let [eye-node (.getChild creature "eyes")] - (seq (.getChildren eye-node)) - (do (println-repl "could not find eyes node") []))) - -;; Here's how vision will work. - -;; Make the continuation in scene-processor take FrameBuffer, -;; byte-buffer, BufferedImage already sized to the correct -;; dimensions. the continuation will decide wether to "mix" them -;; into the BufferedImage, lazily ignore them, or mix them halfway -;; and call c/graphics card routines. - -;; (vision creature) will take an optional :skip argument which will -;; inform the continuations in scene processor to skip the given -;; number of cycles; 0 means that no cycles will be skipped. - -;; (vision creature) will return [init-functions sensor-functions]. -;; The init-functions are each single-arg functions that take the -;; world and register the cameras and must each be called before the -;; corresponding sensor-functions. Each init-function returns the -;; viewport for that eye which can be manipulated, saved, etc. Each -;; sensor-function is a thunk and will return data in the same -;; format as the tactile-sensor functions; the structure is -;; [topology, sensor-data]. Internally, these sensor-functions -;; maintain a reference to sensor-data which is periodically updated -;; by the continuation function established by its init-function. -;; They can be queried every cycle, but their information may not -;; necessairly be different every cycle. - -;; Each eye in the creature in blender will work the same way as -;; joints -- a zero dimensional object with no geometry whose local -;; coordinate system determines the orientation of the resulting -;; eye. All eyes will have a parent named "eyes" just as all joints -;; have a parent named "joints". The resulting camera will be a -;; ChaseCamera or a CameraNode bound to the geo that is closest to -;; the eye marker. The eye marker will contain the metadata for the -;; eye, and will be moved by it's bound geometry. The dimensions of -;; the eye's camera are equal to the dimensions of the eye's "UV" -;; map. - - -;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;; Ears work the same way as vision. @@ -817,45 +634,9 @@ (seq (.getChildren ear-node)) (do (println-repl "could not find ears node") []))) -(defn closest-node - "Return the object in creature which is closest to the given node." - ;;dylan"The closest object in creature to the given node." - [#^Node creature #^Node eye] - (loop [radius (float 0.01)] - (let [results (CollisionResults.)] - (.collideWith - creature - (BoundingBox. (.getWorldTranslation eye) - radius radius radius) - results) - (if-let [target (first results)] - (.getGeometry target) - (recur (float (* 2 radius))))))) ;;dylan (defn follow-sense, adjoin-sense, attach-stimuli, ;;anchor-qualia, augment-organ, with-organ -(defn bind-sense - "Bind the sense to the Spatial such that it will maintain its - current position relative to the Spatial no matter how the spatial - moves. 'sense can be either a Camera or Listener object." - [#^Spatial obj sense] - (let [sense-offset (.subtract (.getLocation sense) - (.getWorldTranslation obj)) - initial-sense-rotation (Quaternion. (.getRotation sense)) - base-anti-rotation (.inverse (.getWorldRotation obj))] - (.addControl - obj - (proxy [AbstractControl] [] - (controlUpdate [tpf] - (let [total-rotation - (.mult base-anti-rotation (.getWorldRotation obj))] - (.setLocation sense - (.add - (.mult total-rotation sense-offset) - (.getWorldTranslation obj))) - (.setRotation sense - (.mult total-rotation initial-sense-rotation)))) - (controlRender [_ _]))))) (defn update-listener-velocity @@ -919,68 +700,6 @@ (for [ear (creature-ears creature)] (enable-hearing creature ear)))) -(defn attach-eye - "Attach a Camera to the appropiate area and return the Camera." - [#^Node creature #^Spatial eye] - (let [target (closest-node creature eye) - [cam-width cam-height] (eye-dimensions eye) - cam (Camera. cam-width cam-height)] - (.setLocation cam (.getWorldTranslation eye)) - (.setRotation cam (.getWorldRotation eye)) - (.setFrustumPerspective - cam 45 (/ (.getWidth cam) (.getHeight cam)) - 1 1000) - (bind-sense target cam) - cam)) - -(def presets - {:all 0xFFFFFF - :red 0xFF0000 - :blue 0x0000FF - :green 0x00FF00}) - -(defn enable-vision - "return [init-function sensor-functions] for a particular eye" - [#^Node creature #^Spatial eye & {skip :skip :or {skip 0}}] - (let [retinal-map (retina-sensor-image eye) - camera (attach-eye creature eye) - vision-image - (atom - (BufferedImage. (.getWidth camera) - (.getHeight camera) - BufferedImage/TYPE_BYTE_BINARY))] - [(fn [world] - (add-eye - world camera - (let [counter (atom 0)] - (fn [r fb bb bi] - (if (zero? (rem (swap! counter inc) (inc skip))) - (reset! vision-image (BufferedImage! r fb bb bi))))))) - (vec - (map - (fn [[key image]] - (let [whites (white-coordinates image) - topology (vec (collapse whites)) - mask (presets key)] - (fn [] - (vector - topology - (vec - (for [[x y] whites] - (bit-and - mask (.getRGB @vision-image x y)))))))) - retinal-map))])) - -(defn vision - [#^Node creature & {skip :skip :or {skip 0}}] - (reduce - (fn [[init-a senses-a] - [init-b senses-b]] - [(conj init-a init-b) - (into senses-a senses-b)]) - [[][]] - (for [eye (creature-eyes creature)] - (enable-vision creature eye)))) diff -r e1232043656a -r aaacf087504c org/util.org --- a/org/util.org Fri Feb 03 05:08:45 2012 -0700 +++ b/org/util.org Fri Feb 03 05:52:18 2012 -0700 @@ -197,6 +197,13 @@ [fn m] (apply fn (reduce #(into %1 %2) [] m))) +(defn map-vals + "Transform a map by applying a function to its values, + keeping the keys the same." + [f m] (zipmap (keys m) (map f (vals m)))) + + + #+end_src #+results: util