annotate org/vision.org @ 199:305439cec54d

added video to sense.org
author Robert McIntyre <rlm@mit.edu>
date Mon, 06 Feb 2012 01:40:22 -0700
parents ac158a976443
children 8e9825c38941
rev   line source
rlm@34 1 #+title: Simulated Sense of Sight
rlm@23 2 #+author: Robert McIntyre
rlm@23 3 #+email: rlm@mit.edu
rlm@38 4 #+description: Simulated sight for AI research using JMonkeyEngine3 and clojure
rlm@34 5 #+keywords: computer vision, jMonkeyEngine3, clojure
rlm@23 6 #+SETUPFILE: ../../aurellem/org/setup.org
rlm@23 7 #+INCLUDE: ../../aurellem/org/level-0.org
rlm@23 8 #+babel: :mkdirp yes :noweb yes :exports both
rlm@23 9
rlm@194 10 * Vision
rlm@23 11
rlm@34 12 I want to make creatures with eyes. Each eye can be independely moved
rlm@34 13 and should see its own version of the world depending on where it is.
rlm@151 14
rlm@151 15 Here's how vision will work.
rlm@151 16
rlm@151 17 Make the continuation in scene-processor take FrameBuffer,
rlm@151 18 byte-buffer, BufferedImage already sized to the correct
rlm@151 19 dimensions. the continuation will decide wether to "mix" them
rlm@151 20 into the BufferedImage, lazily ignore them, or mix them halfway
rlm@151 21 and call c/graphics card routines.
rlm@151 22
rlm@151 23 (vision creature) will take an optional :skip argument which will
rlm@151 24 inform the continuations in scene processor to skip the given
rlm@151 25 number of cycles 0 means that no cycles will be skipped.
rlm@151 26
rlm@151 27 (vision creature) will return [init-functions sensor-functions].
rlm@151 28 The init-functions are each single-arg functions that take the
rlm@151 29 world and register the cameras and must each be called before the
rlm@151 30 corresponding sensor-functions. Each init-function returns the
rlm@151 31 viewport for that eye which can be manipulated, saved, etc. Each
rlm@151 32 sensor-function is a thunk and will return data in the same
rlm@151 33 format as the tactile-sensor functions the structure is
rlm@151 34 [topology, sensor-data]. Internally, these sensor-functions
rlm@151 35 maintain a reference to sensor-data which is periodically updated
rlm@151 36 by the continuation function established by its init-function.
rlm@151 37 They can be queried every cycle, but their information may not
rlm@151 38 necessairly be different every cycle.
rlm@151 39
rlm@151 40 Each eye in the creature in blender will work the same way as
rlm@151 41 joints -- a zero dimensional object with no geometry whose local
rlm@151 42 coordinate system determines the orientation of the resulting
rlm@151 43 eye. All eyes will have a parent named "eyes" just as all joints
rlm@151 44 have a parent named "joints". The resulting camera will be a
rlm@151 45 ChaseCamera or a CameraNode bound to the geo that is closest to
rlm@151 46 the eye marker. The eye marker will contain the metadata for the
rlm@151 47 eye, and will be moved by it's bound geometry. The dimensions of
rlm@151 48 the eye's camera are equal to the dimensions of the eye's "UV"
rlm@151 49 map.
rlm@151 50
rlm@66 51 #+name: eyes
rlm@23 52 #+begin_src clojure
rlm@34 53 (ns cortex.vision
rlm@34 54 "Simulate the sense of vision in jMonkeyEngine3. Enables multiple
rlm@34 55 eyes from different positions to observe the same world, and pass
rlm@172 56 the observed data to any arbitray function. Automatically reads
rlm@172 57 eye-nodes from specially prepared blender files and instanttiates
rlm@172 58 them in the world as actual eyes."
rlm@34 59 {:author "Robert McIntyre"}
rlm@151 60 (:use (cortex world sense util))
rlm@167 61 (:use clojure.contrib.def)
rlm@34 62 (:import com.jme3.post.SceneProcessor)
rlm@113 63 (:import (com.jme3.util BufferUtils Screenshots))
rlm@34 64 (:import java.nio.ByteBuffer)
rlm@34 65 (:import java.awt.image.BufferedImage)
rlm@172 66 (:import (com.jme3.renderer ViewPort Camera))
rlm@113 67 (:import com.jme3.math.ColorRGBA)
rlm@151 68 (:import com.jme3.renderer.Renderer)
rlm@172 69 (:import com.jme3.app.Application)
rlm@172 70 (:import com.jme3.texture.FrameBuffer)
rlm@172 71 (:import (com.jme3.scene Node Spatial)))
rlm@113 72
rlm@113 73 (defn vision-pipeline
rlm@34 74 "Create a SceneProcessor object which wraps a vision processing
rlm@113 75 continuation function. The continuation is a function that takes
rlm@113 76 [#^Renderer r #^FrameBuffer fb #^ByteBuffer b #^BufferedImage bi],
rlm@113 77 each of which has already been appropiately sized."
rlm@23 78 [continuation]
rlm@23 79 (let [byte-buffer (atom nil)
rlm@113 80 renderer (atom nil)
rlm@113 81 image (atom nil)]
rlm@23 82 (proxy [SceneProcessor] []
rlm@23 83 (initialize
rlm@23 84 [renderManager viewPort]
rlm@23 85 (let [cam (.getCamera viewPort)
rlm@23 86 width (.getWidth cam)
rlm@23 87 height (.getHeight cam)]
rlm@23 88 (reset! renderer (.getRenderer renderManager))
rlm@23 89 (reset! byte-buffer
rlm@23 90 (BufferUtils/createByteBuffer
rlm@113 91 (* width height 4)))
rlm@113 92 (reset! image (BufferedImage.
rlm@113 93 width height
rlm@113 94 BufferedImage/TYPE_4BYTE_ABGR))))
rlm@23 95 (isInitialized [] (not (nil? @byte-buffer)))
rlm@23 96 (reshape [_ _ _])
rlm@23 97 (preFrame [_])
rlm@23 98 (postQueue [_])
rlm@23 99 (postFrame
rlm@23 100 [#^FrameBuffer fb]
rlm@23 101 (.clear @byte-buffer)
rlm@113 102 (continuation @renderer fb @byte-buffer @image))
rlm@23 103 (cleanup []))))
rlm@23 104
rlm@113 105 (defn frameBuffer->byteBuffer!
rlm@113 106 "Transfer the data in the graphics card (Renderer, FrameBuffer) to
rlm@113 107 the CPU (ByteBuffer)."
rlm@113 108 [#^Renderer r #^FrameBuffer fb #^ByteBuffer bb]
rlm@113 109 (.readFrameBuffer r fb bb) bb)
rlm@113 110
rlm@113 111 (defn byteBuffer->bufferedImage!
rlm@113 112 "Convert the C-style BGRA image data in the ByteBuffer bb to the AWT
rlm@113 113 style ABGR image data and place it in BufferedImage bi."
rlm@113 114 [#^ByteBuffer bb #^BufferedImage bi]
rlm@113 115 (Screenshots/convertScreenShot bb bi) bi)
rlm@113 116
rlm@113 117 (defn BufferedImage!
rlm@113 118 "Continuation which will grab the buffered image from the materials
rlm@113 119 provided by (vision-pipeline)."
rlm@113 120 [#^Renderer r #^FrameBuffer fb #^ByteBuffer bb #^BufferedImage bi]
rlm@113 121 (byteBuffer->bufferedImage!
rlm@113 122 (frameBuffer->byteBuffer! r fb bb) bi))
rlm@112 123
rlm@169 124 (defn add-camera!
rlm@169 125 "Add a camera to the world, calling continuation on every frame
rlm@34 126 produced."
rlm@167 127 [#^Application world camera continuation]
rlm@23 128 (let [width (.getWidth camera)
rlm@23 129 height (.getHeight camera)
rlm@23 130 render-manager (.getRenderManager world)
rlm@23 131 viewport (.createMainView render-manager "eye-view" camera)]
rlm@23 132 (doto viewport
rlm@23 133 (.setClearFlags true true true)
rlm@112 134 (.setBackgroundColor ColorRGBA/Black)
rlm@113 135 (.addProcessor (vision-pipeline continuation))
rlm@23 136 (.attachScene (.getRootNode world)))))
rlm@151 137
rlm@169 138 (defn retina-sensor-profile
rlm@151 139 "Return a map of pixel selection functions to BufferedImages
rlm@169 140 describing the distribution of light-sensitive components of this
rlm@169 141 eye. Each function creates an integer from the rgb values found in
rlm@169 142 the pixel. :red, :green, :blue, :gray are already defined as
rlm@169 143 extracting the red, green, blue, and average components
rlm@151 144 respectively."
rlm@151 145 [#^Spatial eye]
rlm@151 146 (if-let [eye-map (meta-data eye "eye")]
rlm@151 147 (map-vals
rlm@167 148 load-image
rlm@151 149 (eval (read-string eye-map)))))
rlm@151 150
rlm@151 151 (defn eye-dimensions
rlm@169 152 "Returns [width, height] specified in the metadata of the eye"
rlm@151 153 [#^Spatial eye]
rlm@151 154 (let [dimensions
rlm@151 155 (map #(vector (.getWidth %) (.getHeight %))
rlm@169 156 (vals (retina-sensor-profile eye)))]
rlm@151 157 [(apply max (map first dimensions))
rlm@151 158 (apply max (map second dimensions))]))
rlm@151 159
rlm@167 160 (defvar
rlm@167 161 ^{:arglists '([creature])}
rlm@167 162 eyes
rlm@167 163 (sense-nodes "eyes")
rlm@167 164 "Return the children of the creature's \"eyes\" node.")
rlm@151 165
rlm@169 166 (defn add-eye!
rlm@169 167 "Create a Camera centered on the current position of 'eye which
rlm@169 168 follows the closest physical node in 'creature and sends visual
rlm@169 169 data to 'continuation."
rlm@151 170 [#^Node creature #^Spatial eye]
rlm@151 171 (let [target (closest-node creature eye)
rlm@151 172 [cam-width cam-height] (eye-dimensions eye)
rlm@151 173 cam (Camera. cam-width cam-height)]
rlm@151 174 (.setLocation cam (.getWorldTranslation eye))
rlm@151 175 (.setRotation cam (.getWorldRotation eye))
rlm@151 176 (.setFrustumPerspective
rlm@151 177 cam 45 (/ (.getWidth cam) (.getHeight cam))
rlm@151 178 1 1000)
rlm@151 179 (bind-sense target cam)
rlm@151 180 cam))
rlm@151 181
rlm@172 182 (defvar color-channel-presets
rlm@151 183 {:all 0xFFFFFF
rlm@151 184 :red 0xFF0000
rlm@151 185 :blue 0x0000FF
rlm@172 186 :green 0x00FF00}
rlm@172 187 "Bitmasks for common RGB color channels")
rlm@151 188
rlm@169 189 (defn vision-fn
rlm@171 190 "Returns a list of functions, each of which will return a color
rlm@171 191 channel's worth of visual information when called inside a running
rlm@171 192 simulation."
rlm@151 193 [#^Node creature #^Spatial eye & {skip :skip :or {skip 0}}]
rlm@169 194 (let [retinal-map (retina-sensor-profile eye)
rlm@169 195 camera (add-eye! creature eye)
rlm@151 196 vision-image
rlm@151 197 (atom
rlm@151 198 (BufferedImage. (.getWidth camera)
rlm@151 199 (.getHeight camera)
rlm@170 200 BufferedImage/TYPE_BYTE_BINARY))
rlm@170 201 register-eye!
rlm@170 202 (runonce
rlm@170 203 (fn [world]
rlm@170 204 (add-camera!
rlm@170 205 world camera
rlm@170 206 (let [counter (atom 0)]
rlm@170 207 (fn [r fb bb bi]
rlm@170 208 (if (zero? (rem (swap! counter inc) (inc skip)))
rlm@170 209 (reset! vision-image
rlm@170 210 (BufferedImage! r fb bb bi))))))))]
rlm@151 211 (vec
rlm@151 212 (map
rlm@151 213 (fn [[key image]]
rlm@151 214 (let [whites (white-coordinates image)
rlm@151 215 topology (vec (collapse whites))
rlm@172 216 mask (color-channel-presets key)]
rlm@170 217 (fn [world]
rlm@170 218 (register-eye! world)
rlm@151 219 (vector
rlm@151 220 topology
rlm@151 221 (vec
rlm@151 222 (for [[x y] whites]
rlm@151 223 (bit-and
rlm@151 224 mask (.getRGB @vision-image x y))))))))
rlm@170 225 retinal-map))))
rlm@151 226
rlm@170 227
rlm@170 228 ;; TODO maybe should add a viewport-manipulation function to
rlm@170 229 ;; automatically change viewport settings, attach shadow filters, etc.
rlm@170 230
rlm@170 231 (defn vision!
rlm@170 232 "Returns a function which returns visual sensory data when called
rlm@170 233 inside a running simulation"
rlm@151 234 [#^Node creature & {skip :skip :or {skip 0}}]
rlm@151 235 (reduce
rlm@170 236 concat
rlm@167 237 (for [eye (eyes creature)]
rlm@169 238 (vision-fn creature eye))))
rlm@151 239
rlm@189 240 (defn view-vision
rlm@189 241 "Creates a function which accepts a list of visual sensor-data and
rlm@189 242 displays each element of the list to the screen."
rlm@189 243 []
rlm@188 244 (view-sense
rlm@188 245 (fn
rlm@188 246 [[coords sensor-data]]
rlm@188 247 (let [image (points->image coords)]
rlm@188 248 (dorun
rlm@188 249 (for [i (range (count coords))]
rlm@188 250 (.setRGB image ((coords i) 0) ((coords i) 1)
rlm@188 251 (sensor-data i))))
rlm@189 252 image))))
rlm@188 253
rlm@34 254 #+end_src
rlm@23 255
rlm@112 256
rlm@34 257 Note the use of continuation passing style for connecting the eye to a
rlm@34 258 function to process the output. You can create any number of eyes, and
rlm@34 259 each of them will see the world from their own =Camera=. Once every
rlm@34 260 frame, the rendered image is copied to a =BufferedImage=, and that
rlm@34 261 data is sent off to the continuation function. Moving the =Camera=
rlm@34 262 which was used to create the eye will change what the eye sees.
rlm@23 263
rlm@34 264 * Example
rlm@23 265
rlm@66 266 #+name: test-vision
rlm@23 267 #+begin_src clojure
rlm@68 268 (ns cortex.test.vision
rlm@34 269 (:use (cortex world util vision))
rlm@34 270 (:import java.awt.image.BufferedImage)
rlm@34 271 (:import javax.swing.JPanel)
rlm@34 272 (:import javax.swing.SwingUtilities)
rlm@34 273 (:import java.awt.Dimension)
rlm@34 274 (:import javax.swing.JFrame)
rlm@34 275 (:import com.jme3.math.ColorRGBA)
rlm@45 276 (:import com.jme3.scene.Node)
rlm@113 277 (:import com.jme3.math.Vector3f))
rlm@23 278
rlm@36 279 (defn test-two-eyes
rlm@69 280 "Testing vision:
rlm@69 281 Tests the vision system by creating two views of the same rotating
rlm@69 282 object from different angles and displaying both of those views in
rlm@69 283 JFrames.
rlm@69 284
rlm@69 285 You should see a rotating cube, and two windows,
rlm@69 286 each displaying a different view of the cube."
rlm@36 287 []
rlm@58 288 (let [candy
rlm@58 289 (box 1 1 1 :physical? false :color ColorRGBA/Blue)]
rlm@112 290 (world
rlm@112 291 (doto (Node.)
rlm@112 292 (.attachChild candy))
rlm@112 293 {}
rlm@112 294 (fn [world]
rlm@112 295 (let [cam (.clone (.getCamera world))
rlm@112 296 width (.getWidth cam)
rlm@112 297 height (.getHeight cam)]
rlm@169 298 (add-camera! world cam
rlm@113 299 ;;no-op
rlm@113 300 (comp (view-image) BufferedImage!)
rlm@112 301 )
rlm@169 302 (add-camera! world
rlm@112 303 (doto (.clone cam)
rlm@112 304 (.setLocation (Vector3f. -10 0 0))
rlm@112 305 (.lookAt Vector3f/ZERO Vector3f/UNIT_Y))
rlm@113 306 ;;no-op
rlm@113 307 (comp (view-image) BufferedImage!))
rlm@112 308 ;; This is here to restore the main view
rlm@112 309 ;; after the other views have completed processing
rlm@169 310 (add-camera! world (.getCamera world) no-op)))
rlm@112 311 (fn [world tpf]
rlm@112 312 (.rotate candy (* tpf 0.2) 0 0)))))
rlm@23 313 #+end_src
rlm@23 314
rlm@112 315 #+results: test-vision
rlm@112 316 : #'cortex.test.vision/test-two-eyes
rlm@112 317
rlm@34 318 The example code will create two videos of the same rotating object
rlm@34 319 from different angles. It can be used both for stereoscopic vision
rlm@34 320 simulation or for simulating multiple creatures, each with their own
rlm@34 321 sense of vision.
rlm@24 322
rlm@35 323 - As a neat bonus, this idea behind simulated vision also enables one
rlm@35 324 to [[../../cortex/html/capture-video.html][capture live video feeds from jMonkeyEngine]].
rlm@35 325
rlm@24 326
rlm@24 327 * COMMENT code generation
rlm@34 328 #+begin_src clojure :tangle ../src/cortex/vision.clj
rlm@24 329 <<eyes>>
rlm@24 330 #+end_src
rlm@24 331
rlm@68 332 #+begin_src clojure :tangle ../src/cortex/test/vision.clj
rlm@24 333 <<test-vision>>
rlm@24 334 #+end_src