# HG changeset patch # User Dylan Holmes # Date 1329552460 21600 # Node ID 2dfebf71053c2c7dba93a67299edf5a0be67f9ff # Parent 35d9e7d04d87c736c511eb5177c717ce9ccba05e# Parent 7e3938f40c52ca9f02b945892e4dc17bb2102445 Merged Winston cover letter diff -r 35d9e7d04d87 -r 2dfebf71053c assets/Models/heart/heart.blend Binary file assets/Models/heart/heart.blend has changed diff -r 35d9e7d04d87 -r 2dfebf71053c assets/Models/subtitles/hand.blend Binary file assets/Models/subtitles/hand.blend has changed diff -r 35d9e7d04d87 -r 2dfebf71053c assets/Models/test-creature/finger-tip-UV-layout.png Binary file assets/Models/test-creature/finger-tip-UV-layout.png has changed diff -r 35d9e7d04d87 -r 2dfebf71053c assets/Models/test-creature/finger-tip.png Binary file assets/Models/test-creature/finger-tip.png has changed diff -r 35d9e7d04d87 -r 2dfebf71053c assets/Models/test-creature/finger-tip.xcf Binary file assets/Models/test-creature/finger-tip.xcf has changed diff -r 35d9e7d04d87 -r 2dfebf71053c assets/Models/test-creature/hand.blend Binary file assets/Models/test-creature/hand.blend has changed diff -r 35d9e7d04d87 -r 2dfebf71053c assets/Models/test-creature/mid-finger.png Binary file assets/Models/test-creature/mid-finger.png has changed diff -r 35d9e7d04d87 -r 2dfebf71053c assets/Models/test-creature/mid-finger.xcf Binary file assets/Models/test-creature/mid-finger.xcf has changed diff -r 35d9e7d04d87 -r 2dfebf71053c assets/Models/test-creature/palm-retina.png Binary file assets/Models/test-creature/palm-retina.png has changed diff -r 35d9e7d04d87 -r 2dfebf71053c assets/Models/test-touch/touch-cube.blend Binary file assets/Models/test-touch/touch-cube.blend has changed diff -r 35d9e7d04d87 -r 2dfebf71053c assets/Models/test-touch/touch-cube.blend.orig Binary file assets/Models/test-touch/touch-cube.blend.orig has changed diff -r 35d9e7d04d87 -r 2dfebf71053c images/worm-with-muscle.png Binary file images/worm-with-muscle.png has changed diff -r 35d9e7d04d87 -r 2dfebf71053c org/body.org --- a/org/body.org Sat Feb 18 02:06:06 2012 -0600 +++ b/org/body.org Sat Feb 18 02:07:40 2012 -0600 @@ -80,14 +80,17 @@ (.setTimer world (RatchetTimer. 60)) world) -(defn test-one [] - (world (hand) - standard-debug-controls - (comp - #(Capture/captureVideo - % (File. "/home/r/proj/cortex/render/body/1")) - setup) - no-op)) +(defn test-hand-1 + ([] (test-hand-1 false)) + ([record?] + (world (hand) + standard-debug-controls + (fn [world] + (if record? + (Capture/captureVideo + world + (File. "/home/r/proj/cortex/render/body/1"))) + (setup world)) no-op))) #+end_src @@ -158,18 +161,22 @@ (box 10 3 10 :position (Vector3f. 0 -10 0) :color ColorRGBA/Gray :mass 0)) -(defn test-two [] - (world (nodify - [(doto (hand) - (physical!)) - (floor)]) - (merge standard-debug-controls gravity-control) - (comp - #(Capture/captureVideo - % (File. "/home/r/proj/cortex/render/body/2")) - #(do (set-gravity % Vector3f/ZERO) %) - setup) - no-op)) +(defn test-hand-2 + ([] (test-hand-2 false)) + ([record?] + (world + (nodify + [(doto (hand) + (physical!)) + (floor)]) + (merge standard-debug-controls gravity-control) + (fn [world] + (if record? + (Capture/captureVideo + world (File. "/home/r/proj/cortex/render/body/2"))) + (set-gravity world Vector3f/ZERO) + (setup world)) + no-op))) #+end_src #+begin_html @@ -428,20 +435,23 @@ {"key-h" (fn [world val] (if val (enable-debug world)))}) -(defn test-three [] - (world (nodify - [(doto (hand) - (physical!) - (joints!)) - (floor)]) - (merge standard-debug-controls debug-control - gravity-control) - (comp - #(Capture/captureVideo - % (File. "/home/r/proj/cortex/render/body/3")) - #(do (set-gravity % Vector3f/ZERO) %) - setup) - no-op)) +(defn test-hand-3 + ([] (test-hand-3 false)) + ([record?] + (world + (nodify + [(doto (hand) + (physical!) + (joints!)) + (floor)]) + (merge standard-debug-controls debug-control + gravity-control) + (comp + #(Capture/captureVideo + % (File. "/home/r/proj/cortex/render/body/3")) + #(do (set-gravity % Vector3f/ZERO) %) + setup) + no-op))) #+end_src =physical!= makes the hand solid, then =joints!= connects each @@ -494,22 +504,25 @@ (load-blender-model "Models/test-creature/worm.blend")) -(defn worm-1 [] - (let [timer (RatchetTimer. 60)] - (world - (nodify - [(doto (worm) - (body!)) - (floor)]) - (merge standard-debug-controls debug-control) - #(do - (speed-up %) - (light-up-everything %) - (.setTimer % timer) - (cortex.util/display-dialated-time % timer) - (Capture/captureVideo - % (File. "/home/r/proj/cortex/render/body/4"))) - no-op))) +(defn test-worm + ([] (test-worm false)) + ([record?] + (let [timer (RatchetTimer. 60)] + (world + (nodify + [(doto (worm) + (body!)) + (floor)]) + (merge standard-debug-controls debug-control) + #(do + (speed-up %) + (light-up-everything %) + (.setTimer % timer) + (cortex.util/display-dialated-time % timer) + (if record? + (Capture/captureVideo + % (File. "/home/r/proj/cortex/render/body/4")))) + no-op)))) #+end_src #+begin_html diff -r 35d9e7d04d87 -r 2dfebf71053c org/capture-video.org --- a/org/capture-video.org Sat Feb 18 02:06:06 2012 -0600 +++ b/org/capture-video.org Sat Feb 18 02:07:40 2012 -0600 @@ -12,8 +12,7 @@ create a demo video to show off your hard work. Screen capturing is the most straightforward way to do this, but it can slow down your game and produce low-quality video as a result. A better way is to -record a video feed directly from the game while it is -running. +record a video feed directly from the game while it is running. In this post, I'll explain how you can alter your JMonkeyEngine3 game to output video while it is running. The main trick is to alter the @@ -28,62 +27,62 @@ ** The built-in =Timer= rushes to keep up. #* Game-time vs. User-time vs. Video-time -Standard JME3 applications use a =Timer= object to manage time -in the simulated world. Because most JME3 applications (e.g. games) -are supposed to happen \ldquo{}live\rdquo{}, the built-in =Timer= -requires simulated time to match real time. This means that the -application must rush to finish all of its calculations on -schedule: the more complicated the calculations, the more the -application is obligated to rush. And if the workload becomes too -much to handle on schedule, =Timer= forces the application to cut -corners: it demands fast, approximate answers instead of careful, -accurate ones. Although this policy sometimes causes physically impossible -glitches and choppy framerates, it ensures that the user will never be -kept waiting while the computer stops to make a complicated -calculation. +Standard JME3 applications use a =Timer= object to manage time in the +simulated world. Because most JME3 applications (e.g. games) are +supposed to happen \ldquo{}live\rdquo{}, the built-in =Timer= requires +simulated time to match real time. This means that the application +must rush to finish all of its calculations on schedule: the more +complicated the calculations, the more the application is obligated to +rush. And if the workload becomes too much to handle on schedule, +=Timer= forces the application to cut corners: it demands fast, +approximate answers instead of careful, accurate ones. Although this +policy sometimes causes physically impossible glitches and choppy +framerates, it ensures that the user will never be kept waiting while +the computer stops to make a complicated calculation. Now, the built-in =Timer= values speed over accuracy because real-time -applications require it. On the other hand, if your goal is to record a -glitch-free video, you need a =Timer= that will take its time to -ensure that all calculations are accurate, even if they take a long time. In the next section, we -will create a new kind of =Timer=\mdash{}called =IsoTimer=\mdash{}which -slows down to let the computer finish all its calculations. The result -is a perfectly steady framerate and a flawless physical simulation. +applications require it. On the other hand, if your goal is to record +a glitch-free video, you need a =Timer= that will take its time to +ensure that all calculations are accurate, even if they take a long +time. In the next section, we will create a new kind of +=Timer=\mdash{}called =IsoTimer=\mdash{}which slows down to let the +computer finish all its calculations. The result is a perfectly steady +framerate and a flawless physical simulation. # are supposed to happen \ldquo live \rdquo, this =Timer= requires the -# application to update in real-time. In order to keep up with the real world, JME applications cannot -# afford to take too much time on expensive computations. Whenever the -# workload becomes too much for the computer to handle on schedule, -# =Timer= forces the computer to cut corners, giving fast, approximate -# answers instead of careful, accurate ones. Although physical accuracy sometimes -# suffers as a result, this policy ensures that the user will never be -# kept waiting while the computer stops to make a complicated -# calculation. +# application to update in real-time. In order to keep up with the +# real world, JME applications cannot afford to take too much time on +# expensive computations. Whenever the workload becomes too much for +# the computer to handle on schedule, =Timer= forces the computer to +# cut corners, giving fast, approximate answers instead of careful, +# accurate ones. Although physical accuracy sometimes suffers as a +# result, this policy ensures that the user will never be kept waiting +# while the computer stops to make a complicated calculation. #fast answers are more important than accurate ones. # A standard JME3 application that extends =SimpleApplication= or # =Application= tries as hard as it can to keep in sync with -# /user-time/. If a ball is rolling at 1 game-mile per game-hour in the -# game, and you wait for one user-hour as measured by the clock on your -# wall, then the ball should have traveled exactly one game-mile. In -# order to keep sync with the real world, the game throttles its physics -# engine and graphics display. If the computations involved in running -# the game are too intense, then the game will first skip frames, then -# sacrifice physics accuracy. If there are particuraly demanding -# computations, then you may only get 1 fps, and the ball may tunnel -# through the floor or obstacles due to inaccurate physics simulation, -# but after the end of one user-hour, that ball will have traveled one -# game-mile. +# /user-time/. If a ball is rolling at 1 game-mile per game-hour in +# the game, and you wait for one user-hour as measured by the clock on +# your wall, then the ball should have traveled exactly one +# game-mile. In order to keep sync with the real world, the game +# throttles its physics engine and graphics display. If the +# computations involved in running the game are too intense, then the +# game will first skip frames, then sacrifice physics accuracy. If +# there are particuraly demanding computations, then you may only get +# 1 fps, and the ball may tunnel through the floor or obstacles due to +# inaccurate physics simulation, but after the end of one user-hour, +# that ball will have traveled one game-mile. -# When we're recording video, we don't care if the game-time syncs with -# user-time, but instead whether the time in the recorded video +# When we're recording video, we don't care if the game-time syncs +# with user-time, but instead whether the time in the recorded video # (video-time) syncs with user-time. To continue the analogy, if we -# recorded the ball rolling at 1 game-mile per game-hour and watched the -# video later, we would want to see 30 fps video of the ball rolling at -# 1 video-mile per /user-hour/. It doesn't matter how much user-time it -# took to simulate that hour of game-time to make the high-quality -# recording. +# recorded the ball rolling at 1 game-mile per game-hour and watched +# the video later, we would want to see 30 fps video of the ball +# rolling at 1 video-mile per /user-hour/. It doesn't matter how much +# user-time it took to simulate that hour of game-time to make the +# high-quality recording. ** COMMENT Two examples to clarify the point: *** Recording from a Simple Simulation @@ -151,10 +150,10 @@ the screen, and an additional $\frac{1}{60}$ to encode the video and write the frame to disk. This is a total of $\frac{1}{24}$ user-seconds for each $\frac{1}{60}$ game-seconds. It will take -$(\frac{60}{24} = 2.5)$ user-hours to record one game-hour and game-time -will appear to flow two-fifths as fast as user time while the game is -running. However, just as in example one, when all is said and done we -will have an hour long video at 60 fps. +$(\frac{60}{24} = 2.5)$ user-hours to record one game-hour and +game-time will appear to flow two-fifths as fast as user time while +the game is running. However, just as in example one, when all is said +and done we will have an hour long video at 60 fps. ** COMMENT proposed names for the new timer @@ -182,8 +181,8 @@ #+include ../../jmeCapture/src/com/aurellem/capture/IsoTimer.java src java If an Application uses this =IsoTimer= instead of the normal one, we -can be sure that every call to =simpleUpdate=, for example, corresponds -to exactly $(\frac{1}{fps})$ seconds of game-time. +can be sure that every call to =simpleUpdate=, for example, +corresponds to exactly $(\frac{1}{fps})$ seconds of game-time. * =VideoRecorder= manages video feeds in JMonkeyEngine. @@ -240,8 +239,9 @@ With this, we are able to record video! -However, it can be hard to properly install Xuggle. If you would rather not use Xuggle, here is an alternate class that uses -[[http://www.randelshofer.ch/blog/2008/08/writing-avi-videos-in-pure-java/][Werner Randelshofer's]] excellent pure Java AVI file writer. +However, it can be hard to properly install Xuggle. If you would +rather not use Xuggle, here is an alternate class that uses [[http://www.randelshofer.ch/blog/2008/08/writing-avi-videos-in-pure-java/][Werner +Randelshofer's]] excellent pure Java AVI file writer. =./src/com/aurellem/capture/video/AVIVideoRecorder.java= #+include ../../jmeCapture/src/com/aurellem/capture/video/AVIVideoRecorder.java src java @@ -265,13 +265,14 @@ ** Include this code. - No matter how complicated your application is, it's easy to add - support for video output with just a few lines of code. +No matter how complicated your application is, it's easy to add +support for video output with just a few lines of code. # You can also record from multiple ViewPorts as the above example shows. -And although you can use =VideoRecorder= to record advanced split-screen videos with multiple views, in the simplest case, you want to capture a single view\mdash{} -exactly what's on screen. In this case, the following simple =captureVideo= -method will do the job: +And although you can use =VideoRecorder= to record advanced +split-screen videos with multiple views, in the simplest case, you +want to capture a single view\mdash{} exactly what's on screen. In +this case, the following simple =captureVideo= method will do the job: #+begin_src java public static void captureVideo(final Application app, @@ -302,12 +303,9 @@ } #+end_src -This method selects the appropriate =VideoRecorder= class -for the file type you specify, and instructs your -application to record video to the file. - - - +This method selects the appropriate =VideoRecorder= class for the file +type you specify, and instructs your application to record video to +the file. Now that you have a =captureVideo= method, you use it like this: @@ -321,7 +319,8 @@ - Choose the output file :: If you want to record from the game's main =ViewPort= to a file called =/home/r/record.flv=, then - include the following line of code somewhere before you call =app.start()=; + include the following line of code somewhere before you call + =app.start()=; #+begin_src java :exports code Capture.captureVideo(app, new File("/home/r/record.flv")); @@ -384,8 +383,9 @@ * COMMENT More Examples ** COMMENT Hello Physics -=HelloVideo= is boring. Let's add some video capturing to =HelloPhysics= -and create something fun! + +=HelloVideo= is boring. Let's add some video capturing to +=HelloPhysics= and create something fun! This example is a modified version of =HelloPhysics= that creates four simultaneous views of the same scene of cannonballs careening into a @@ -579,7 +579,7 @@ four points of view are simultaneously recorded and then glued together later. - JME3 Xuggle Aurellem video capture +JME3 Xuggle Aurellem video capture * Showcase of recorded videos @@ -757,31 +757,3 @@ videoscale ! ffmpegcolorspace ! \ video/x-raw-yuv, width=640, height=480, framerate=25/1 ! \ videobox right=-640 ! mix. - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff -r 35d9e7d04d87 -r 2dfebf71053c org/hearing.org --- a/org/hearing.org Sat Feb 18 02:06:06 2012 -0600 +++ b/org/hearing.org Sat Feb 18 02:07:40 2012 -0600 @@ -1018,43 +1018,43 @@ #+begin_src clojure (in-ns 'cortex.test.hearing) -(cortex.import/mega-import-jme3) -(import java.io.File) - -(use 'cortex.body) - -(defn test-worm-hearing [] - (let [the-worm (doto (worm) (body!)) - hearing (hearing! the-worm) - hearing-display (view-hearing) +(defn test-worm-hearing + ([] (test-worm-hearing false)) + ([record?] + (let [the-worm (doto (worm) (body!)) + hearing (hearing! the-worm) + hearing-display (view-hearing) + + tone (AudioNode. (asset-manager) + "Sounds/pure.wav" false) + + hymn (AudioNode. (asset-manager) + "Sounds/ear-and-eye.wav" false)] + (world + (nodify [the-worm (floor)]) + (merge standard-debug-controls + {"key-return" + (fn [_ value] + (if value (.play tone))) + "key-l" + (fn [_ value] + (if value (.play hymn)))}) + (fn [world] + (light-up-everything world) + (if record? + (do + (com.aurellem.capture.Capture/captureVideo + world + (File."/home/r/proj/cortex/render/worm-audio/frames")) + (com.aurellem.capture.Capture/captureAudio + world + (File."/home/r/proj/cortex/render/worm-audio/audio.wav"))))) - tone (AudioNode. (asset-manager) - "Sounds/pure.wav" false) - - hymn (AudioNode. (asset-manager) - "Sounds/ear-and-eye.wav" false)] - (world - (nodify [the-worm (floor)]) - (merge standard-debug-controls - {"key-return" - (fn [_ value] - (if value (.play tone))) - "key-l" - (fn [_ value] - (if value (.play hymn)))}) - (fn [world] - (light-up-everything world) - (com.aurellem.capture.Capture/captureVideo - world - (File."/home/r/proj/cortex/render/worm-audio/frames")) - (com.aurellem.capture.Capture/captureAudio - world - (File."/home/r/proj/cortex/render/worm-audio/audio.wav"))) - - (fn [world tpf] - (hearing-display - (map #(% world) hearing) - (File. "/home/r/proj/cortex/render/worm-audio/hearing-data")))))) + (fn [world tpf] + (hearing-display + (map #(% world) hearing) + (if record? + (File. "/home/r/proj/cortex/render/worm-audio/hearing-data")))))))) #+end_src In this test, I load the worm with its newly formed ear and let it @@ -1154,11 +1154,13 @@ #+name: test-header #+begin_src clojure (ns cortex.test.hearing - (:use (cortex world util hearing)) + (:use (cortex world util hearing body)) (:use cortex.test.body) (:import (com.jme3.audio AudioNode Listener)) + (:import java.io.File) (:import com.jme3.scene.Node - com.jme3.system.AppSettings)) + com.jme3.system.AppSettings + com.jme3.math.Vector3f)) #+end_src * Source Listing diff -r 35d9e7d04d87 -r 2dfebf71053c org/ideas.org --- a/org/ideas.org Sat Feb 18 02:06:06 2012 -0600 +++ b/org/ideas.org Sat Feb 18 02:07:40 2012 -0600 @@ -70,10 +70,11 @@ - [X] proprioception sensor map in the style of the other senses -- day - [X] refactor integration code to distribute to each of the senses -- day - - [ ] create video showing all the senses for Winston -- 2 days + - [X] create video showing all the senses for Winston -- 2 days + - [ ] spellchecking !!!!! - [ ] send package to friends for critiques -- 2 days - - [ ] fix videos that were encoded wrong, test on InterNet Explorer. - - [ ] redo videos vision with new collapse code + - [X] fix videos that were encoded wrong, test on InterNet Explorer. + - [X] redo videos vision with new collapse code - [X] find a topology that looks good. (maybe nil topology?) - [X] fix red part of touch cube in video and image - [ ] write summary of project for Winston \ diff -r 35d9e7d04d87 -r 2dfebf71053c org/integration.org --- a/org/integration.org Sat Feb 18 02:06:06 2012 -0600 +++ b/org/integration.org Sat Feb 18 02:07:40 2012 -0600 @@ -1,4 +1,4 @@ -#+title: First attempt at a creature! +#+title: #+author: Robert McIntyre #+email: rlm@mit.edu #+description: @@ -6,72 +6,235 @@ #+SETUPFILE: ../../aurellem/org/setup.org #+INCLUDE: ../../aurellem/org/level-0.org +* Integration +This is the ultimate test which features all of the senses that I've +made so far. The blender file for the creature serves as an example of +a fully equipped creature in terms of senses. You can find it [[../assets/Models/test-creature/hand.blend][here]]. -* Intro -So far, I've made the following senses -- - - Vision - - Hearing - - Touch - - Proprioception +#+begin_html +
+ +

Simulated Senses in a Virtual Environment

+
+#+end_html -And one effector: - - Movement - -However, the code so far has only enabled these senses, but has not -actually implemented them. For example, there is still a lot of work -to be done for vision. I need to be able to create an /eyeball/ in -simulation that can be moved around and see the world from different -angles. I also need to determine weather to use log-polar or cartesian -for the visual input, and I need to determine how/wether to -disceritise the visual input. - -I also want to be able to visualize both the sensors and the -effectors in pretty pictures. This semi-retarted creature will be my -first attempt at bringing everything together. - -* The creature's body - -Still going to do an eve-like body in blender, but due to problems -importing the joints, etc into jMonkeyEngine3, I'm going to do all -the connecting here in clojure code, using the names of the individual -components and trial and error. Later, I'll maybe make some sort of -creature-building modifications to blender that support whatever -discritized senses I'm going to make. +* Generating the Video #+name: integration #+begin_src clojure -(ns cortex.integration +(ns cortex.test.integration "let's play!" {:author "Robert McIntyre"} - (:use (cortex world util body - hearing touch vision sense proprioception movement)) + (:use (cortex world util body sense + hearing touch vision proprioception movement)) (:import (com.jme3.math ColorRGBA Vector3f)) + (:import java.io.File) (:import com.jme3.audio.AudioNode) (:import com.aurellem.capture.RatchetTimer)) -(def hand "Models/creature1/one.blend") +(dorun (cortex.import/mega-import-jme3)) +(rlm.rlm-commands/help) -(def worm "Models/creature1/try-again.blend") +(def hand "Models/test-creature/hand.blend") -(defn test-creature [thing] - (let [x-axis - (box 1 0.01 0.01 :physical? false :color ColorRGBA/Red) - y-axis - (box 0.01 1 0.01 :physical? false :color ColorRGBA/Green) - z-axis - (box 0.01 0.01 1 :physical? false :color ColorRGBA/Blue) +(def output-base (File. "/home/r/proj/cortex/render/hand")) +#+end_src - me (sphere 0.5 :color ColorRGBA/Blue :physical? false) - bell (AudioNode. (asset-manager) - "Sounds/pure.wav" false) +For this demonstration I have to manually drive the muscles of the +hand. I do this by creating a little mini-language to describe +simulated muscle contraction. - fix-display - (runonce (fn [world] - (add-camera! world (.getCamera world) no-op))) - creature (doto (load-blender-model thing) (body!)) +#+name: integration-2 +#+begin_src clojure +(defn motor-control-program + "Create a function which will execute the motor script" + [muscle-positions + script] + (let [current-frame (atom -1) + keyed-script (group-by first script) + current-forces (atom {}) ] + (fn [effectors] + (let [indexed-effectors (vec effectors)] + (dorun + (for [[_ part force] (keyed-script (swap! current-frame inc))] + (swap! current-forces (fn [m] (assoc m part force))))) + (doall (map (fn [effector power] + (effector (int power))) + effectors + (map #(@current-forces % 0) muscle-positions))))))) +(def muscle-positions + [:pointer-2-e + :pointer-2-f + :thumb-1 + :thumb-1 + :pointer-1-e + :pointer-1-f + :thumb-2-e + :thumb-2-f + :middle-1-e + :middle-1-f + :pointer-3-f + :pointer-3-e + :middle-2-e + :middle-2-f + :middle-3-f + :middle-3-e + :pinky-2-e + :pinky-2-f + :pinky-3-f + :pinky-3-e + :ring-3-e + :ring-3-f + :ring-2-f + :ring-2-e + :ring-1-e + :ring-1-f + :thumb-1-e + :thumb-1-f + :pinky-1-f + :pinky-1-e]) + +(def full 9001) + + +;; Coreography: + +;; Let the hand fall palm-up + +;; it curls its phalanges, starting with the pinky. + +;; it lets its phalanges fall back down. + +;; block falls down onto the hand, accompanied by a sound. The block +;; can be seen by the hand's eye. + +;; hand FORCEFULLY catapults the block so that it hits the camera. + + +;; the systax here is [keyframe body-part force] +(def move-fingers + [[300 :pinky-3-f 50] + [320 :pinky-2-f 80] + [340 :pinky-1-f 100] + + [310 :ring-3-f 100] + [330 :ring-2-f 120] + [350 :ring-1-f 140] + + [330 :middle-3-f 120] + [340 :middle-2-f 120] + [360 :middle-1-f 30] + + [350 :pointer-3-f 120] + [360 :pointer-2-f 120] + [380 :pointer-1-f 30] + + [800 :pinky-3-f 0] + [800 :pinky-2-f 0] + [800 :pinky-1-f 0] + + [800 :ring-3-f 0] + [800 :ring-2-f 0] + [800 :ring-1-f 0] + + [800 :middle-3-f 0] + [800 :middle-2-f 0] + [800 :middle-1-f 0] + + [800 :pointer-3-f 0] + [800 :pointer-2-f 0] + [800 :pointer-1-f 0] + + + [800 :pinky-3-e 50] + [800 :pinky-2-e 80] + [800 :pinky-1-e 100] + + [800 :ring-3-e 100] + [800 :ring-2-e 120] + [800 :ring-1-e 140] + + [800 :middle-3-e 120] + [800 :middle-2-e 120] + [800 :middle-1-e 30] + + [800 :pointer-3-e 120] + [800 :pointer-2-e 120] + [800 :pointer-1-e 30] + + [870 :pinky-3-e 0] + [870 :pinky-2-e 0] + [870 :pinky-1-e 0] + + [870 :ring-3-e 0] + [870 :ring-2-e 0] + [870 :ring-1-e 0] + + [870 :middle-3-e 0] + [870 :middle-2-e 0] + [870 :middle-1-e 0] + + [870 :pointer-3-e 0] + [870 :pointer-2-e 0] + [870 :pointer-1-e 0] + + [1500 :pointer-1-f full] + [1500 :pointer-2-f full] + [1500 :pointer-3-f full] + + [1500 :middle-1-f full] + [1500 :middle-2-f full] + [1500 :middle-3-f full] + + [1510 :pointer-1-f 0] + [1510 :pointer-2-f 0] + [1510 :pointer-3-f 0] + + [1510 :middle-1-f 0] + [1510 :middle-2-f 0] + [1510 :middle-3-f 0] + ]) + +(defn gen-summon-ball [debug?] + (let [wait (atom 1100)] + (fn [world] + (if (= 0 (swap! wait dec)) + (let [brick + (box 0.8 0.8 0.8 :mass 0.05 + :position (Vector3f. -0.5 0 0.5) + :color (ColorRGBA/Red)) + bell (AudioNode. (asset-manager) + "Sounds/pure.wav" false)] + (.play bell) + (if debug? + (.addControl + brick + (proxy [AbstractControl] [] + (controlUpdate [tpf] + (println-repl (.getWorldTranslation brick))) + (controlRender [_ _])))) + (add-element world brick)))))) + +(import com.aurellem.capture.Capture) + +(defn test-everything! + ([] (test-everything! false)) + ([record?] + (let [me (sphere 0.5 :color ColorRGBA/Blue :physical? false) + + base (File. "/home/r/proj/cortex/render/hand") + + + creature (doto (load-blender-model hand) + (body!)) + + summon-ball (gen-summon-ball false) ;;;;;;;;;;;; Sensors/Effectors ;;;;;;;;;;;;;;;;;;;;;;;;;;;; touch (touch! creature) touch-display (view-touch) @@ -85,69 +248,466 @@ prop (proprioception! creature) prop-display (view-proprioception) - muscle-exertion (atom 0) + control-script (motor-control-program + muscle-positions move-fingers) muscles (movement! creature) - muscle-display (view-movement)] + muscle-display (view-movement) + ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; - (apply - world - (with-movement - (.getChild creature "worm-21") - ["key-r" "key-t" - "key-f" "key-g" - "key-v" "key-b"] - [10 10 10 10 1 1] - [(nodify [creature - (box 10 2 10 :position (Vector3f. 0 -9 0) - :color ColorRGBA/Gray :mass 0) - x-axis y-axis z-axis - me]) - (merge standard-debug-controls - {"key-return" - (fn [_ value] - (if value - (do - (println-repl "play-sound") - (.play bell)))) - "key-h" - (fn [_ value] - (if value - (swap! muscle-exertion (partial + 20)))) - "key-n" - (fn [_ value] - (if value - (swap! muscle-exertion (fn [v] (- v 20)))))}) - (fn [world] - (light-up-everything world) - (enable-debug world) - (add-camera! world - (add-eye! creature - (.getChild - (.getChild creature "eyes") "eye")) - (comp (view-image) BufferedImage!)) - (.setTimer world (RatchetTimer. 60)) - (speed-up world) - (set-gravity world (Vector3f. 0 0 0)) - (comment - (com.aurellem.capture.Capture/captureVideo - world (file-str "/home/r/proj/ai-videos/hand")))) - (fn [world tpf] - (prop-display (prop)) - (touch-display (map #(% (.getRootNode world)) touch)) - (vision-display (map #(% world) vision)) - (hearing-display (map #(% world) hearing)) - (muscle-display (map #(% @muscle-exertion) muscles)) - (.setLocalTranslation me (.getLocation (.getCamera world))) - (fix-display world))])))) + fix-display (gen-fix-display)] + (world + (nodify [creature + (box 10 2 10 :position (Vector3f. 0 -9 0) + :color ColorRGBA/Gray :mass 0) + me]) + standard-debug-controls + + (fn [world] + (.setTimer world (RatchetTimer. 60)) + (position-camera + world (Vector3f. -0.13217318, 5.816415, -5.3089414) + (Quaternion. 0.55685693, 0.0042774677, -0.0028673497, 0.83059245)) + + (light-up-everything world) + (enable-debug world) + (add-camera! world + (add-eye! creature + (.getChild + (.getChild creature "eyes") "eye")) + (comp (view-image) BufferedImage!)) + + (if record? + (Capture/captureVideo + world (File. base "main"))) + (if record? + (Capture/captureAudio + world (File. base "main.wav")))) + (fn [world tpf] + (prop-display + (prop) + (if record? (File. base "proprio"))) + (touch-display + (map #(% (.getRootNode world)) touch) + (if record? (File. base "touch"))) + (vision-display + (map #(% world) vision) + (if record? (File. base "vision"))) + (hearing-display + (map #(% world) hearing) + (if record? (File. base "hearing"))) + (muscle-display + (control-script muscles) + (if record? (File. base "muscle"))) + + (summon-ball world) + + (.setLocalTranslation me (.getLocation (.getCamera world))) + (fix-display world)))))) #+end_src -#+results: body-1 -: #'cortex.silly/follow-test +** ImageMagick / ffmpeg + +Just a bunch of calls to imagemagick to arrange the data that +=test-everything!= produces. + +#+name: magick-8 +#+begin_src clojure +(ns cortex.video.magick8 + (:import java.io.File) + (:use clojure.contrib.shell-out)) + +(comment +;; list of touch targets +0 middle-11 +1 middle-21 +2 middle-31 +3 pinky-11 +4 pinky-21 +5 pinky-31 +6 pointer-11 +7 pointer-21 +8 pointer-31 +9 ring-11 +10 ring-21 +11 ring-31 +12 thumb-11 +13 thumb-2.0011 + + +;; list of vision targets +0 :all +1 :green +2 :blue +3 :red + +;; list of proprio targets +0 middle-11 -> middle-21 +1 middle-21 -> middle-31 +2 thumb-11 -> thumb-2.0011 +3 pointer-11 -> pointer-21 +4 pointer-21 -> pointer-31 +5 ring-21 -> ring-31 +6 ring-11 -> ring-21 +7 pinky-21 -> pinky-31 +8 pinky-11 -> pinky-21 +9 middle-11 -> palm1 +10 pinky-11 -> palm1 +11 palm1 -> pointer-11 +12 palm1 -> ring-11 +13 palm1 -> thumb-11 + + +;; list of muscle targets +0 :pointer-2-e +1 :pointer-2-f +2 :thumb-1 +3 :thumb-1 +4 :pointer-1-e +5 :pointer-1-f +6 :thumb-2-e +7 :thumb-2-f +8 :middle-1-e +9 :middle-1-f +10 :pointer-3-f +11 :pointer-3-e +12 :middle-2-e +13 :middle-2-f +14 :middle-3-f +15 :middle-3-e +16 :pinky-2-e +17 :pinky-2-f +18 :pinky-3-f +19 :pinky-3-e +20 :ring-3-e +21 :ring-3-f +22 :ring-2-f +23 :ring-2-e +24 :ring-1-e +25 :ring-1-f +26 :thumb-1-e +27 :thumb-1-f +28 :pinky-1-f +29 :pinky-1-e +) + +(def base (File. "/home/r/proj/cortex/render/hand")) + +(defn prepare-muscle [muscle] + ["(" muscle "-rotate" "90" "-scale" "15x60!" ")"]) + +(defn prepare-touch [touch] + ["(" touch "-rotate" "180" ")"]) + +(defn generate-top-finger [tip-flexor tip-extensor tip + joint-2-3 + mid-flexor mid-extensor mid + joint-1-2] + ["(" + "-size" "113x357" "xc:transparent" + (prepare-muscle tip-flexor) "-geometry" "+0+7" "-composite" + (prepare-muscle tip-extensor) "-geometry" "+98+7" "-composite" + (prepare-touch tip) "-geometry" "+18+0" "-composite" + + joint-2-3 "-geometry" "+32+79" "-composite" + + (prepare-muscle mid-flexor) "-geometry" "+19+131" "-composite" + (prepare-muscle mid-extensor) "-geometry" "+80+131" "-composite" + (prepare-touch mid) "-geometry" "+39+133" "-composite" + + joint-1-2 "-geometry" "+32+193" "-composite" + ")"]) + +(defn file-names [#^File dir] + (map #(.getCanonicalPath %) (next (sort (file-seq dir))))) + +(defn file-groups [& paths] + (apply (partial map list ) + (map (comp file-names #(File. base %)) + paths))) + +(defn pinky [] + (file-groups + "muscle/18" + "muscle/19" + "touch/5" + "proprio/7" + + "muscle/17" + "muscle/16" + "touch/4" + "proprio/8" + + "muscle/28" + "muscle/29" + "touch/3" + "proprio/10")) + +(defn ring [] + (file-groups + "muscle/21" + "muscle/20" + "touch/11" + "proprio/5" + + "muscle/22" + "muscle/23" + "touch/10" + "proprio/6" + + "muscle/25" + "muscle/24" + "touch/9" + "proprio/12")) + +(defn middle [] + (file-groups + "muscle/14" + "muscle/15" + "touch/2" + "proprio/1" + + "muscle/13" + "muscle/12" + "touch/1" + "proprio/0" + + "muscle/9" + "muscle/8" + "touch/0" + "proprio/9")) + +(defn pointer [] + (file-groups + "muscle/10" + "muscle/11" + "touch/8" + "proprio/4" + + "muscle/1" + "muscle/0" + "touch/7" + "proprio/3" + + "muscle/5" + "muscle/4" + "touch/6" + "proprio/11")) + +(defn thumb [] + (file-groups + "muscle/7" + "muscle/6" + "touch/13" + "proprio/2" + + "muscle/27" + "muscle/26" + "muscle/3" + "muscle/2" + "touch/12" + "proprio/13")) + +(defn generate-finger + [tip-flexor tip-extensor tip + joint-2-3 + mid-flexor mid-extensor mid + joint-1-2 + base-flexor base-extensor base + joint-palm-1] + ["(" + "-size" "113x357" "xc:transparent" + (generate-top-finger + tip-flexor tip-extensor tip + joint-2-3 + mid-flexor mid-extensor mid + joint-1-2) "-geometry" "+0+0" "-composite" + (prepare-muscle base-flexor) "-geometry" "+19+245" "-composite" + (prepare-muscle base-extensor) "-geometry" "+80+245" "-composite" + (prepare-touch base) "-geometry" "+39+247" "-composite" + joint-palm-1 "-geometry" "+32+307" "-composite" + ")"]) + +(defn generate-thumb + [tip-flexor tip-extensor tip + joint-1-2 + mid-flexor mid-extensor mid-flexor-2 mid-extensor-2 mid + joint-palm-1] + ["(" + "-size" "113x357" "xc:transparent" + (generate-top-finger + tip-flexor tip-extensor tip + joint-1-2 + mid-flexor mid-extensor mid + joint-palm-1) "-geometry" "+0+0" "-composite" + (prepare-muscle mid-flexor-2) "-geometry" "+2+131" "-composite" + (prepare-muscle mid-extensor-2) "-geometry" "+100+131" "-composite" + ")"]) + +(defn generate-hand + [pinky-pieces + ring-pieces + middle-pieces + pointer-pieces + thumb-pieces] + ["(" + "-size" "688x769" "xc:transparent" + (apply generate-finger pinky-pieces) + "-geometry" "+0+195" "-composite" + (apply generate-finger ring-pieces) + "-geometry" "+111+100" "-composite" + (apply generate-finger middle-pieces) + "-geometry" "+228+0" "-composite" + "(" (apply generate-thumb thumb-pieces) "-background" "#00000000" + "-rotate" "45" ")" + "-geometry" "+300+420" "-composite" + (apply generate-finger pointer-pieces) + "-geometry" "+350+96" "-composite" + ")"]) + +(defn generate-vision + [all green blue red] + ["(" + "-size" "204x192" "xc:transparent" + all "-geometry" "+0+0" "-composite" + green "-geometry" "+113+0" "-composite" + blue "-geometry" "+0+105" "-composite" + red "-geometry" "+113+105" "-composite" + ")"]) + +(def test-muscle (File. base "muscle/0/0000000.png")) +(def test-proprio (File. base "proprio/0/0000000.png")) +(def test-tip (File. base "touch/2/0000000.png")) +(def test-mid (File. base "touch/0/0000000.png")) +(def test-vision (File. base "vision/0/0000000.png")) +(def test-hearing (File. base "hearing/0/0000000.png")) +(def test-main (File. base "main/0000000.png")) + +(def test-target (File. base "output.png")) + +(def background (File. base "background.png")) + +(use 'clojure.contrib.shell-out) + +(defn vision [] + (file-groups + "vision/0" + "vision/1" + "vision/2" + "vision/3")) + +(defn hearing [] + (file-names (File. base "hearing/0"))) + +(defn main [] + (file-names (File. base "main"))) + +(defn targets [dest max] + (map + (comp #(.getCanonicalPath %) + #(File. (str base dest (format "%07d.png" %)))) + (range max))) + + +(defn final-image [main [all red green blue] hearing + pinky ring middle pointer thumb target] + (println target) + (apply + sh + (flatten + ["convert" + (.getCanonicalPath background) + + (generate-hand pinky ring middle pointer thumb) + "-geometry" "+809+22" "-composite" + + (generate-vision all red green blue) + "-geometry" "+974+599" "-composite" + + hearing + "-geometry" "+784+819" "-composite" + + main + "-geometry" "+78+202" "-composite" + + target]))) + +(defn combine-files [] + (dorun + (pmap final-image + (main) + (vision) + (hearing) + (pinky) + (ring) + (middle) + (pointer) + (thumb) + (targets "/out/" (count (main)))))) + +(defn subtitles [] + (file-names (File. base "subs"))) + +(defn outs [] + (file-names (File. base "out"))) + + +(defn mix-subtitles [] + (let [subs (subtitles) + targets (targets "/out-subs/" (count subs)) + overlay (.getCanonicalPath (File. base "output.png"))] + (dorun + (pmap + (fn [sub target] + (sh + "convert" + overlay + sub "-geometry" "+0+0" "-composite" + target)) + subs targets)))) + +(defn out-subtitles [] + (file-names (File. base "out-subs"))) + + +(defn insert-subtitles [] + (let [subtitles (out-subtitles) + outs (outs) + targets (targets + "/final/" + (+ (count outs) (count subtitles)))] + (dorun + (pmap + #(sh "cp" %1 %2) + (concat subtitles outs) targets)))) + +(defn generate-final [] + (combine-files) + (mix-subtitles) + (insert-subtitles)) +#+end_src + +#+begin_src sh :results silent +cd /home/r/proj/cortex/render/hand + +sox --ignore-length main.wav main-delayed.wav delay 24 + +mogrify -resize 755x final/* + +ffmpeg -r 60 -i final/%07d.png -i main-delayed.wav -b:a 128k \ + -b:v 9000k -c:a libvorbis -c:v libtheora hand.ogg +#+end_src + + +* Source Listing + - [[../src/cortex/test/integration.clj][cortex.test.integration]] + - [[../src/cortex/video/magick8.clj][cortex.video.magick8]] + - [[../assets/Models/subtitles/hand.blend][subtitles/hand.blend]] +#+html: + - [[http://hg.bortreb.com ][source-repository]] * COMMENT purgatory #+begin_src clojure - (defn bullet-trans* [] (let [obj-a (box 1.5 0.5 0.5 :color ColorRGBA/Red :position (Vector3f. 5 0 0) @@ -308,10 +868,11 @@ * COMMENT generate source -#+begin_src clojure :tangle ../src/cortex/integration.clj +#+begin_src clojure :tangle ../src/cortex/test/integration.clj <> +<> #+end_src - - - +#+begin_src clojure :tangle ../src/cortex/video/magick8.clj +<> +#+end_src diff -r 35d9e7d04d87 -r 2dfebf71053c org/movement.org --- a/org/movement.org Sat Feb 18 02:06:06 2012 -0600 +++ b/org/movement.org Sat Feb 18 02:07:40 2012 -0600 @@ -14,31 +14,31 @@ force in the human body at all! (A straight line of force would correspond to some sort of jet or rocket propulsion.) -*(next paragraph is from memory and needs to be checked!)* - -In humans, muscles are composed of millions of sarcomeres, which can -contract to exert force. A single motor neuron might control 100-1,000 -sarcomeres. When the motor neuron is engaged by the brain, it -activates all of the sarcomeres to which it is attached. Some motor -neurons command many sarcomeres, and some command only a few. The -spinal cord generally engages the motor neurons which control few -sarcomeres before the motor neurons which control many sarcomeres. -This recruitment stragety allows for percise movements at low -strength. The collection of all motor neurons that control a muscle is -called the motor pool. The brain essentially says "activate 30% of the -motor pool" and the spinal cord recruits motor neurons untill 30% are -activated. Since the distribution of power among motor neurons is -unequal and recruitment goes from weakest to strongest, the first 30% -of the motor pool might be 5% of the strength of the muscle. +In humans, muscles are composed of muscle fibers which can contract to +exert force. The muscle fibers which compose a muscle are partitioned +into discrete groups which are each controlled by a single alpha motor +neuton. A single alpha motor neuron might control as little as three +or as many as one thousand muscle fibers. When the alpha motor neuron +is engaged by the spinal cord, it activates all of the muscle fibers +to which it is attached. The spinal cord generally engages the alpha +motor neurons which control few muscle fibers before the motor neurons +which control many muscle fibers. This recruitment stragety allows +for percise movements at low strength. The collection of all motor +neurons that control a muscle is called the motor pool. The brain +essentially says "activate 30% of the motor pool" and the spinal cord +recruits motor neurons untill 30% are activated. Since the +distribution of power among motor neurons is unequal and recruitment +goes from weakest to strongest, the first 30% of the motor pool might +be 5% of the strength of the muscle. My simulated muscles follow a similiar design: Each muscle is defined by a 1-D array of numbers (the "motor pool"). Each entry in the array -represents a motor neuron which controlls a number of sarcomeres equal -to the value of the entry. A muscle also has a scalar :strength factor -which determines the total force the muscle can exert when all motor -neurons are activated. The effector function for a muscle takes a -number to index into the motor pool, and that number "activates" all -the motor neurons whose index is lower or equal to the number. Each +represents a motor neuron which controlls a number of muscle fibers +equal to the value of the entry. Each muscle has a scalar strength +factor which determines the total force the muscle can exert when all +motor neurons are activated. The effector function for a muscle takes +a number to index into the motor pool, and then "activates" all the +motor neurons whose index is lower or equal to the number. Each motor-neuron will apply force in proportion to its value in the array. Lower values cause less force. The lower values can be put at the "beginning" of the 1-D array to simulate the layout of actual human @@ -118,13 +118,14 @@ pool (motor-pool muscle) pool-integral (reductions + pool) - force-index + forces (vec (map #(float (* strength (/ % (last pool-integral)))) pool-integral)) control (.getControl target RigidBodyControl)] + (println-repl (.getName target) axis) (fn [n] (let [pool-index (max 0 (min n (dec (count pool)))) - force (force-index pool-index)] + force (forces pool-index)] (.applyTorque control (.mult axis force)) (float (/ force strength)))))) @@ -175,9 +176,16 @@ * Adding Touch to the Worm +To the worm, I add two new nodes which describe a single muscle. + +#+attr_html: width=755 +#+caption: The node highlighted in orange is the parent node of all muscles in the worm. The arrow highlighted in yellow represents the creature's single muscle, which moves the top segment. The other nodes which are not highlighted are joints, eyes, and ears. +[[../images/worm-with-muscle.png]] + +#+name: test-movement #+begin_src clojure -(defn test-movement - ([] (test-movement false)) +(defn test-worm-movement + ([] (test-worm-movement false)) ([record?] (let [creature (doto (worm) (body!)) @@ -211,11 +219,7 @@ (Vector3f. -4.912815, 2.004171, 0.15710819)) (.setRotation (.getCamera world) (Quaternion. 0.13828252, 0.65516764, - -0.12370994, 0.7323449)) - - (comment - (com.aurellem.capture.Capture/captureVideo - world (file-str "/home/r/proj/ai-videos/hand")))) + -0.12370994, 0.7323449))) (fn [world tpf] (muscle-display (map #(% @muscle-exertion) muscles) @@ -240,7 +244,6 @@ #+end_html - ** Making the Worm Muscles Video #+name: magick7 #+begin_src clojure @@ -304,11 +307,9 @@ (:import java.io.File) (:import java.awt.image.BufferedImage) (:import com.jme3.scene.Node) - (:import com.jme3.math.Vector3f) + (:import (com.jme3.math Quaternion Vector3f)) (:import (com.aurellem.capture Capture RatchetTimer)) (:import com.jme3.bullet.control.RigidBodyControl)) - -(cortex.import/mega-import-jme3) #+end_src * Source Listing diff -r 35d9e7d04d87 -r 2dfebf71053c org/proprioception.org --- a/org/proprioception.org Sat Feb 18 02:06:06 2012 -0600 +++ b/org/proprioception.org Sat Feb 18 02:07:40 2012 -0600 @@ -351,10 +351,12 @@ #+name: test-proprioception-header #+begin_src clojure (ns cortex.test.proprioception -(:import (com.aurellem.capture Capture RatchetTimer)) -(:use (cortex util world proprioception body)) -(:import java.io.File)) -(cortex.import/mega-import-jme3) + (:import (com.aurellem.capture Capture RatchetTimer)) + (:use (cortex util world proprioception body)) + (:import java.io.File) + (:import com.jme3.bullet.control.RigidBodyControl) + (:import com.jme3.bullet.collision.PhysicsCollisionObject) + (:import (com.jme3.math Vector3f Quaternion ColorRGBA))) #+end_src * Source Listing diff -r 35d9e7d04d87 -r 2dfebf71053c org/sense.org --- a/org/sense.org Sat Feb 18 02:06:06 2012 -0600 +++ b/org/sense.org Sat Feb 18 02:07:40 2012 -0600 @@ -448,32 +448,35 @@ (defn test-bind-sense "Show a camera that stays in the same relative position to a blue cube." - [] - (let [eye-pos (Vector3f. 0 30 0) - rock (box 1 1 1 :color ColorRGBA/Blue - :position (Vector3f. 0 10 0) - :mass 30) - table (box 3 1 10 :color ColorRGBA/Gray :mass 0 - :position (Vector3f. 0 -3 0))] - (world - (nodify [rock table]) - standard-debug-controls - (fn init [world] - (let [cam (doto (.clone (.getCamera world)) - (.setLocation eye-pos) - (.lookAt Vector3f/ZERO - Vector3f/UNIT_X))] - (bind-sense rock cam) - (.setTimer world (RatchetTimer. 60)) - (Capture/captureVideo - world (File. "/home/r/proj/cortex/render/bind-sense0")) - (add-camera! - world cam - (comp (view-image - (File. "/home/r/proj/cortex/render/bind-sense1")) - BufferedImage!)) - (add-camera! world (.getCamera world) no-op))) - no-op))) + ([] (test-bind-sense false)) + ([record?] + (let [eye-pos (Vector3f. 0 30 0) + rock (box 1 1 1 :color ColorRGBA/Blue + :position (Vector3f. 0 10 0) + :mass 30) + table (box 3 1 10 :color ColorRGBA/Gray :mass 0 + :position (Vector3f. 0 -3 0))] + (world + (nodify [rock table]) + standard-debug-controls + (fn init [world] + (let [cam (doto (.clone (.getCamera world)) + (.setLocation eye-pos) + (.lookAt Vector3f/ZERO + Vector3f/UNIT_X))] + (bind-sense rock cam) + (.setTimer world (RatchetTimer. 60)) + (if record? + (Capture/captureVideo + world (File. "/home/r/proj/cortex/render/bind-sense0"))) + (add-camera! + world cam + (comp (view-image + (if record? + (File. "/home/r/proj/cortex/render/bind-sense1"))) + BufferedImage!)) + (add-camera! world (.getCamera world) no-op))) + no-op)))) #+end_src #+begin_html diff -r 35d9e7d04d87 -r 2dfebf71053c org/touch.org --- a/org/touch.org Sat Feb 18 02:06:06 2012 -0600 +++ b/org/touch.org Sat Feb 18 02:07:40 2012 -0600 @@ -435,7 +435,7 @@ touch-objects))] (FastMath/clamp (float - (if (> response limit) 0.0 + (if (> response limit) (float 0.0) (+ response correction))) (float 0.0) limit))) @@ -485,7 +485,7 @@ (for [i (range (count coords))] (.setRGB image ((coords i) 0) ((coords i) 1) (apply touch->gray (sensor-data i))))) - image)))) + image)))) #+end_src #+results: visualization @@ -623,8 +623,8 @@ #+begin_src clojure (in-ns 'cortex.test.touch) -(defn test-touch - ([] (test-touch false)) +(defn test-worm-touch + ([] (test-worm-touch false)) ([record?] (let [the-worm (doto (worm) (body!)) touch (touch! the-worm) @@ -730,6 +730,7 @@ - [[../src/cortex/test/touch.clj][cortex.test.touch]] - [[../src/cortex/video/magick4.clj][cortex.video.magick4]] - [[../src/cortex/video/magick5.clj][cortex.video.magick5]] + - [[../assets/Models/test-touch/touch-cube.blend][touch-cube.blend]] #+html: - [[http://hg.bortreb.com ][source-repository]] @@ -781,11 +782,3 @@ #+begin_src clojure :tangle ../src/cortex/video/magick5.clj <> #+end_src - - - - - - - - diff -r 35d9e7d04d87 -r 2dfebf71053c org/util.org --- a/org/util.org Sat Feb 18 02:06:06 2012 -0600 +++ b/org/util.org Sat Feb 18 02:07:40 2012 -0600 @@ -113,13 +113,10 @@ (defn position-camera "Change the position of the in-world camera." - ([world position direction up] + [world #^Vector3f position #^Quaternion rotation] (doto (.getCamera world) - (.setLocation ) - (.lookAt direction up))) - ([world position direction] - (position-camera - world position direction Vector3f/UNIT_Y))) + (.setLocation position) + (.setRotation rotation))) (defn enable-debug "Turn on debug wireframes for every object in this simulation." @@ -219,7 +216,7 @@ #+end_src #+results: util -: #'cortex.util/apply-map +: #'cortex.util/runonce *** Creating Basic Shapes diff -r 35d9e7d04d87 -r 2dfebf71053c org/vision.org --- a/org/vision.org Sat Feb 18 02:06:06 2012 -0600 +++ b/org/vision.org Sat Feb 18 02:07:40 2012 -0600 @@ -453,35 +453,38 @@ You should see a rotating cube, and two windows, each displaying a different view of the cube." - [] - (let [candy - (box 1 1 1 :physical? false :color ColorRGBA/Blue)] - (world - (doto (Node.) - (.attachChild candy)) - {} - (fn [world] - (let [cam (.clone (.getCamera world)) - width (.getWidth cam) - height (.getHeight cam)] - (add-camera! world cam - (comp - (view-image - (File. "/home/r/proj/cortex/render/vision/1")) - BufferedImage!)) - (add-camera! world - (doto (.clone cam) - (.setLocation (Vector3f. -10 0 0)) - (.lookAt Vector3f/ZERO Vector3f/UNIT_Y)) - (comp - (view-image - (File. "/home/r/proj/cortex/render/vision/2")) - BufferedImage!)) - ;; This is here to restore the main view + ([] (test-pipeline false)) + ([record?] + (let [candy + (box 1 1 1 :physical? false :color ColorRGBA/Blue)] + (world + (doto (Node.) + (.attachChild candy)) + {} + (fn [world] + (let [cam (.clone (.getCamera world)) + width (.getWidth cam) + height (.getHeight cam)] + (add-camera! world cam + (comp + (view-image + (if record? + (File. "/home/r/proj/cortex/render/vision/1"))) + BufferedImage!)) + (add-camera! world + (doto (.clone cam) + (.setLocation (Vector3f. -10 0 0)) + (.lookAt Vector3f/ZERO Vector3f/UNIT_Y)) + (comp + (view-image + (if record? + (File. "/home/r/proj/cortex/render/vision/2"))) + BufferedImage!)) + ;; This is here to restore the main view ;; after the other views have completed processing - (add-camera! world (.getCamera world) no-op))) - (fn [world tpf] - (.rotate candy (* tpf 0.2) 0 0))))) + (add-camera! world (.getCamera world) no-op))) + (fn [world tpf] + (.rotate candy (* tpf 0.2) 0 0)))))) #+end_src #+begin_html @@ -541,60 +544,62 @@ (comp #(change-color % color) (fire-cannon-ball))) -(defn test-worm-vision [record] - (let [the-worm (doto (worm)(body!)) - vision (vision! the-worm) - vision-display (view-vision) - fix-display (gen-fix-display) - me (sphere 0.5 :color ColorRGBA/Blue :physical? false) - x-axis - (box 1 0.01 0.01 :physical? false :color ColorRGBA/Red - :position (Vector3f. 0 -5 0)) - y-axis - (box 0.01 1 0.01 :physical? false :color ColorRGBA/Green - :position (Vector3f. 0 -5 0)) - z-axis - (box 0.01 0.01 1 :physical? false :color ColorRGBA/Blue - :position (Vector3f. 0 -5 0)) - timer (RatchetTimer. 60)] +(defn test-worm-vision + ([] (test-worm-vision false)) + ([record?] + (let [the-worm (doto (worm)(body!)) + vision (vision! the-worm) + vision-display (view-vision) + fix-display (gen-fix-display) + me (sphere 0.5 :color ColorRGBA/Blue :physical? false) + x-axis + (box 1 0.01 0.01 :physical? false :color ColorRGBA/Red + :position (Vector3f. 0 -5 0)) + y-axis + (box 0.01 1 0.01 :physical? false :color ColorRGBA/Green + :position (Vector3f. 0 -5 0)) + z-axis + (box 0.01 0.01 1 :physical? false :color ColorRGBA/Blue + :position (Vector3f. 0 -5 0)) + timer (RatchetTimer. 60)] - (world (nodify [(floor) the-worm x-axis y-axis z-axis me]) - (assoc standard-debug-controls - "key-r" (colored-cannon-ball ColorRGBA/Red) - "key-b" (colored-cannon-ball ColorRGBA/Blue) - "key-g" (colored-cannon-ball ColorRGBA/Green)) - (fn [world] - (light-up-everything world) - (speed-up world) - (.setTimer world timer) - (display-dialated-time world timer) - ;; add a view from the worm's perspective - (if record - (Capture/captureVideo - world - (File. - "/home/r/proj/cortex/render/worm-vision/main-view"))) - - (add-camera! - world - (add-eye! the-worm - (.getChild - (.getChild the-worm "eyes") "eye")) - (comp - (view-image - (if record - (File. - "/home/r/proj/cortex/render/worm-vision/worm-view"))) - BufferedImage!)) - - (set-gravity world Vector3f/ZERO)) - - (fn [world _ ] - (.setLocalTranslation me (.getLocation (.getCamera world))) - (vision-display - (map #(% world) vision) - (if record (File. "/home/r/proj/cortex/render/worm-vision"))) - (fix-display world))))) + (world (nodify [(floor) the-worm x-axis y-axis z-axis me]) + (assoc standard-debug-controls + "key-r" (colored-cannon-ball ColorRGBA/Red) + "key-b" (colored-cannon-ball ColorRGBA/Blue) + "key-g" (colored-cannon-ball ColorRGBA/Green)) + (fn [world] + (light-up-everything world) + (speed-up world) + (.setTimer world timer) + (display-dialated-time world timer) + ;; add a view from the worm's perspective + (if record? + (Capture/captureVideo + world + (File. + "/home/r/proj/cortex/render/worm-vision/main-view"))) + + (add-camera! + world + (add-eye! the-worm + (.getChild + (.getChild the-worm "eyes") "eye")) + (comp + (view-image + (if record? + (File. + "/home/r/proj/cortex/render/worm-vision/worm-view"))) + BufferedImage!)) + + (set-gravity world Vector3f/ZERO)) + + (fn [world _ ] + (.setLocalTranslation me (.getLocation (.getCamera world))) + (vision-display + (map #(% world) vision) + (if record? (File. "/home/r/proj/cortex/render/worm-vision"))) + (fix-display world)))))) #+end_src The world consists of the worm and a flat gray floor. I can shoot red, diff -r 35d9e7d04d87 -r 2dfebf71053c winston-intro.txt --- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/winston-intro.txt Sat Feb 18 02:07:40 2012 -0600 @@ -0,0 +1,103 @@ +Dear Professor Winston, + +I'm ready for you to look through the work that I've done so far. It's +a sequence of posts describing the different simulated senses I've +implemented, with videos. + +It's "blocks world reloaded", because like you say, you need multiple +senses to enable intelligence. + +Please look through the videos and skim the text and tell me what +you think: + +Introduction: +http://aurellem.org/cortex/html/intro.html +http://aurellem.org/cortex/html/sense.html + +http://aurellem.org/cortex/html/body.html -- simulated physical bodies +http://aurellem.org/cortex/html/vision.html -- simulated eyes +http://aurellem.org/cortex/html/hearing.html -- simulated ears +http://aurellem.org/cortex/html/touch.html -- simulated skin/hairs +http://aurellem.org/cortex/html/proprioception.html -- simulated proprioception +http://aurellem.org/cortex/html/movement.html -- simulated muscles +http://aurellem.org/cortex/html/integration.html -- full demonstration + +In particular, look at the video at +http://aurellem.org/cortex/html/integration.html. It shows a +simulated hand equipped with all of the senses I've built so far. + +There's some more background information and full source code at +http://aurellem.org + +If you can't see a video, let me know and I'll upload it to YouTube so +you can see it. + + + + +Now, I need your help moving forward. Can I use this work as a base +for a Masters thesis with you when I come back to MIT this coming Fall? +What critiques and project ideas do you have after looking through +what I've done so far? + +I have some ideas on where I can go with this project but I think you +will have some better ones. + +Here are some possible projects I might do with this as a base that I +think would be worthy Masters projects. + + - HACKER for writing muscle-control programs : Presented with + low-level muscle control/ sense API, generate higher level programs + for accomplishing various stated goals. Example goals might be + "extend all your fingers" or "move your hand into the area with + blue light" or "decrease the angle of this joint". It would be + like Sussman's HACKER, except it would operate with much more data + in a more realistic world. Start off with "calisthenics" to + develop subroutines over the motor control API. This would be the + "spinal chord" of a more intelligent creature. + + - Create hundreds of small creatures and have them do simple + simulated swarm tasks. + + - A joint that learns what sort of joint it (cone, point-to-point, + hinge, etc.) is by correlating exploratory muscle movements with + vision. + + - Something with cross-modal clustering using the rich sense + data. This might prove difficult due to the higher dimensionality + of my senses. + + - Simulated Imagination --- this would involve a creature with an + effector which creates an /entire new sub-simulation/ where the + creature has direct control over placement/creation of objects via + simulated telekinesis. The creature observes this sub-world through + it's normal senses and uses its observations to make predictions + about it's top level world. + + - Hook it up with Genesis --- I could make a "semantic effector" + which marks objects/sensory states with semantic information. In + conjunction with Simulated Imagination, and HACKER-like motor + control, Genesis might be able to ask simple questions like "stack + two blocks together and hit the top one with your hand; does the + bottom block move?" and the system could answer "yes". This would + be rather complicated to do and involves many of the above + projects, but there may be a way to scale it down to Master's + thesis size. + + - Make a virtual computer in the virtual world which with which the + creature interacts using its fingers to press keys on a virtual + keyboard. The creature can access the internet, watch videos, take + over the world, anything it wants. (This is probably not worthy of + a Masters project, I just thought it was a neat idea. It's possible + to use videos/etc in the simulated world at any rate.) + + +I can't wait to hear your critiques and ideas. If you think I +shouldn't use this system as a base and should instead do something +else, that's fine too. + +On a related note, can I be considered for the position of TA next +year for 6.034 or 6.xxx? + +sincerely, +--Robert McIntyre