diff org/eyes.org @ 151:aaacf087504c

refactored vision code
author Robert McIntyre <rlm@mit.edu>
date Fri, 03 Feb 2012 05:52:18 -0700
parents 9d0fe7f54e14
children 9e6a30b8c99a
line wrap: on
line diff
     1.1 --- a/org/eyes.org	Fri Feb 03 05:08:45 2012 -0700
     1.2 +++ b/org/eyes.org	Fri Feb 03 05:52:18 2012 -0700
     1.3 @@ -11,6 +11,47 @@
     1.4  
     1.5  I want to make creatures with eyes. Each eye can be independely moved
     1.6  and should see its own version of the world depending on where it is.
     1.7 +
     1.8 +Here's how vision will work.
     1.9 +
    1.10 +Make the continuation in scene-processor take FrameBuffer,
    1.11 +byte-buffer, BufferedImage already sized to the correct
    1.12 +dimensions. the continuation will decide wether to "mix" them
    1.13 +into the BufferedImage, lazily ignore them, or mix them halfway
    1.14 +and call c/graphics card routines.
    1.15 +
    1.16 +(vision creature) will take an optional :skip argument which will
    1.17 +inform the continuations in scene processor to skip the given
    1.18 +number of cycles 0 means that no cycles will be skipped.
    1.19 +
    1.20 +(vision creature) will return [init-functions sensor-functions].
    1.21 +The init-functions are each single-arg functions that take the
    1.22 +world and register the cameras and must each be called before the
    1.23 +corresponding sensor-functions.  Each init-function returns the
    1.24 +viewport for that eye which can be manipulated, saved, etc. Each
    1.25 +sensor-function is a thunk and will return data in the same
    1.26 +format as the tactile-sensor functions the structure is
    1.27 +[topology, sensor-data]. Internally, these sensor-functions
    1.28 +maintain a reference to sensor-data which is periodically updated
    1.29 +by the continuation function established by its init-function.
    1.30 +They can be queried every cycle, but their information may not
    1.31 +necessairly be different every cycle.
    1.32 +
    1.33 +Each eye in the creature in blender will work the same way as
    1.34 +joints -- a zero dimensional object with no geometry whose local
    1.35 +coordinate system determines the orientation of the resulting
    1.36 +eye. All eyes will have a parent named "eyes" just as all joints
    1.37 +have a parent named "joints". The resulting camera will be a
    1.38 +ChaseCamera or a CameraNode bound to the geo that is closest to
    1.39 +the eye marker. The eye marker will contain the metadata for the
    1.40 +eye, and will be moved by it's bound geometry. The dimensions of
    1.41 +the eye's camera are equal to the dimensions of the eye's "UV"
    1.42 +map.
    1.43 +
    1.44 +
    1.45 +
    1.46 +
    1.47 +
    1.48  #+name: eyes
    1.49  #+begin_src clojure 
    1.50  (ns cortex.vision
    1.51 @@ -18,14 +59,18 @@
    1.52    eyes from different positions to observe the same world, and pass
    1.53    the observed data to any arbitray function."
    1.54    {:author "Robert McIntyre"}
    1.55 -  (:use cortex.world)
    1.56 +  (:use (cortex world sense util))
    1.57    (:import com.jme3.post.SceneProcessor)
    1.58    (:import (com.jme3.util BufferUtils Screenshots))
    1.59    (:import java.nio.ByteBuffer)
    1.60    (:import java.awt.image.BufferedImage)
    1.61    (:import com.jme3.renderer.ViewPort)
    1.62    (:import com.jme3.math.ColorRGBA)
    1.63 -  (:import com.jme3.renderer.Renderer))
    1.64 +  (:import com.jme3.renderer.Renderer)
    1.65 +  (:import jme3tools.converters.ImageToAwt)
    1.66 +  (:import com.jme3.scene.Node))
    1.67 +
    1.68 +(cortex.import/mega-import-jme3)
    1.69  
    1.70  
    1.71  (defn vision-pipeline
    1.72 @@ -92,10 +137,108 @@
    1.73        (.setBackgroundColor ColorRGBA/Black)
    1.74        (.addProcessor (vision-pipeline continuation))
    1.75        (.attachScene (.getRootNode world)))))
    1.76 +
    1.77 +(defn retina-sensor-image
    1.78 +  "Return a map of pixel selection functions to BufferedImages
    1.79 +   describing the distribution of light-sensitive components on this
    1.80 +   geometry's surface. Each function creates an integer from the rgb
    1.81 +   values found in the pixel. :red, :green, :blue, :gray are already
    1.82 +   defined as extracting the red green blue and average components
    1.83 +   respectively."
    1.84 +   [#^Spatial eye]
    1.85 +   (if-let [eye-map (meta-data eye "eye")]
    1.86 +     (map-vals
    1.87 +      #(ImageToAwt/convert
    1.88 +        (.getImage (.loadTexture (asset-manager) %))
    1.89 +        false false 0)
    1.90 +      (eval (read-string eye-map)))))
    1.91 +
    1.92 +(defn eye-dimensions
    1.93 +  "returns the width and height specified in the metadata of the eye"
    1.94 +  [#^Spatial eye]
    1.95 +  (let [dimensions
    1.96 +          (map #(vector (.getWidth %) (.getHeight %))
    1.97 +               (vals (retina-sensor-image eye)))]
    1.98 +    [(apply max (map first dimensions))
    1.99 +     (apply max (map second dimensions))]))
   1.100 +
   1.101 +(defn creature-eyes
   1.102 +  ;;dylan
   1.103 +  "Return the children of the creature's \"eyes\" node."
   1.104 +  ;;"The eye nodes which are children of the \"eyes\" node in the
   1.105 +  ;;creature."
   1.106 +  [#^Node creature]
   1.107 +  (if-let [eye-node (.getChild creature "eyes")]
   1.108 +    (seq (.getChildren eye-node))
   1.109 +    (do (println-repl "could not find eyes node") [])))
   1.110 +
   1.111 +
   1.112 +(defn attach-eye
   1.113 +  "Attach a Camera to the appropiate area and return the Camera."
   1.114 +  [#^Node creature #^Spatial eye]
   1.115 +  (let [target (closest-node creature eye)
   1.116 +        [cam-width cam-height] (eye-dimensions eye)
   1.117 +        cam (Camera. cam-width cam-height)]
   1.118 +    (.setLocation cam (.getWorldTranslation eye))
   1.119 +    (.setRotation cam (.getWorldRotation eye))
   1.120 +    (.setFrustumPerspective
   1.121 +     cam 45 (/ (.getWidth cam) (.getHeight cam))
   1.122 +     1 1000)
   1.123 +    (bind-sense target cam)
   1.124 +    cam))
   1.125 +
   1.126 +(def presets
   1.127 +  {:all    0xFFFFFF
   1.128 +   :red    0xFF0000
   1.129 +   :blue   0x0000FF
   1.130 +   :green  0x00FF00})
   1.131 +
   1.132 +(defn enable-vision
   1.133 +  "return [init-function sensor-functions] for a particular eye"
   1.134 +  [#^Node creature #^Spatial eye & {skip :skip :or {skip 0}}]
   1.135 +  (let [retinal-map (retina-sensor-image eye)
   1.136 +        camera (attach-eye creature eye)
   1.137 +        vision-image
   1.138 +        (atom
   1.139 +         (BufferedImage. (.getWidth camera)
   1.140 +                         (.getHeight camera)
   1.141 +                         BufferedImage/TYPE_BYTE_BINARY))]
   1.142 +    [(fn [world]
   1.143 +       (add-eye
   1.144 +        world camera
   1.145 +        (let [counter  (atom 0)]
   1.146 +          (fn [r fb bb bi]
   1.147 +            (if (zero? (rem (swap! counter inc) (inc skip)))
   1.148 +              (reset! vision-image (BufferedImage! r fb bb bi)))))))
   1.149 +     (vec
   1.150 +      (map
   1.151 +       (fn [[key image]]
   1.152 +         (let [whites (white-coordinates image)
   1.153 +               topology (vec (collapse whites))
   1.154 +               mask (presets key)]
   1.155 +           (fn []
   1.156 +             (vector
   1.157 +              topology
   1.158 +              (vec 
   1.159 +               (for [[x y] whites]
   1.160 +                 (bit-and
   1.161 +                  mask (.getRGB @vision-image x y))))))))
   1.162 +       retinal-map))]))
   1.163 +
   1.164 +(defn vision
   1.165 +  [#^Node creature & {skip :skip :or {skip 0}}]
   1.166 +  (reduce
   1.167 +   (fn [[init-a senses-a]
   1.168 +        [init-b senses-b]]
   1.169 +     [(conj init-a init-b)
   1.170 +      (into senses-a senses-b)])
   1.171 +   [[][]]      
   1.172 +   (for [eye (creature-eyes creature)]
   1.173 +     (enable-vision creature eye))))
   1.174 +
   1.175 +
   1.176  #+end_src
   1.177  
   1.178 -#+results: eyes
   1.179 -: #'cortex.vision/add-eye
   1.180  
   1.181  Note the use of continuation passing style for connecting the eye to a
   1.182  function to process the output. You can create any number of eyes, and