aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--redist/json_helpers.c1
-rw-r--r--src/epnp/epnp.h1
-rw-r--r--src/epnp/opencv_shim.c53
-rw-r--r--src/poser_epnp.c4
-rwxr-xr-xsrc/survive.c7
-rw-r--r--src/survive_playback.c29
-rw-r--r--tools/viz/README.md13
-rw-r--r--tools/viz/index.html23
-rw-r--r--tools/viz/survive_viewer.js378
9 files changed, 456 insertions, 53 deletions
diff --git a/redist/json_helpers.c b/redist/json_helpers.c
index 4aeb399..8704a93 100644
--- a/redist/json_helpers.c
+++ b/redist/json_helpers.c
@@ -210,6 +210,7 @@ void json_load_file(const char* path) {
free(value);
}
+ free(tokens);
free(JSON_STRING);
}
diff --git a/src/epnp/epnp.h b/src/epnp/epnp.h
index 9ca3a2e..0a42b04 100644
--- a/src/epnp/epnp.h
+++ b/src/epnp/epnp.h
@@ -46,6 +46,7 @@ typedef struct {
double cws_determinant;
} epnp;
+void epnp_dtor(epnp *self);
void epnp_set_internal_parameters(epnp *self, double uc, double vc, double fu, double fv);
void epnp_set_maximum_number_of_correspondences(epnp *self, int n);
void epnp_reset_correspondences(epnp *self);
diff --git a/src/epnp/opencv_shim.c b/src/epnp/opencv_shim.c
index d7886d0..df2765b 100644
--- a/src/epnp/opencv_shim.c
+++ b/src/epnp/opencv_shim.c
@@ -28,29 +28,7 @@ const int CV_64F = 0;
typedef double doublereal;
#define F77_FUNC(func) func##_
-/*
-extern int F77_FUNC(dgetrs)(char *trans, int *n, int *nrhs, double *a, int *lda, int *ipiv, double *b, int *ldb, int
-*info);
-
-extern int F77_FUNC(dgetri)(int *n, double *a, int *lda, int *ipiv, double *work, int *lwork, int *info);
-extern int F77_FUNC(dgetrf)(int *m, int *n, double *a, int *lda, int *ipiv, int *info); /* blocked LU
-
-extern int F77_FUNC(dgesvd)(char *jobu, char *jobvt,
- int *m, int *n,
- double *a, int *lda, double *s, double *u, int *ldu,
- double *vt, int *ldvt, double *work, int *lwork,
- int *info);
-
-extern int F77_FUNC(dgesdd)(char *jobz,
- int *m, int *n, double *a, int *lda,
- double *s, double *u, int *ldu, double *vt, int *ldvt,
- double *work, int *lwork, int *iwork, int *info);
-
-extern int dgemm_(char *transa, char *transb, lapack_lapack_int *m, lapack_lapack_int *
- n, lapack_lapack_int *k, double *alpha, double *a, lapack_lapack_int *lda,
- double *b, lapack_lapack_int *ldb, double *beta, double *c, lapack_lapack_int
- *ldc);
-*/
+
void cvGEMM(const CvMat *src1, const CvMat *src2, double alpha, const CvMat *src3, double beta, CvMat *dst, int tABC) {
lapack_int rows1 = src1->rows;
lapack_int cols1 = src1->cols;
@@ -85,7 +63,7 @@ void cvMulTransposed(const CvMat *src, CvMat *dst, int order, const CvMat *delta
lapack_int drows = dst->rows;
assert(drows == cols);
assert(order == 1 ? (dst->cols == src->cols) : (dst->cols == src->rows));
- assert(delta == 0); // THIS ISN'T IMPLEMENTED YET
+ assert(delta == 0 && "This isn't implemented yet");
double beta = 0;
bool isAT = order == 1;
@@ -93,16 +71,12 @@ void cvMulTransposed(const CvMat *src, CvMat *dst, int order, const CvMat *delta
lapack_int dstCols = dst->cols;
- cblas_dgemm(CblasRowMajor, isAT ? CblasTrans : CblasNoTrans, isBT ? CblasTrans : CblasNoTrans,
- cols, // isAT ? cols : rows,
- dstCols,
- rows, // isAT ? rows : cols,
+ cblas_dgemm(CblasRowMajor, isAT ? CblasTrans : CblasNoTrans, isBT ? CblasTrans : CblasNoTrans, cols, dstCols, rows,
scale,
src->data.db, cols, src->data.db, cols, beta,
dst->data.db, dstCols);
- // const CvMat* delta, double scale
}
void *cvAlloc(size_t size) { return malloc(size); }
@@ -242,14 +216,15 @@ double cvInvert(const CvMat *srcarr, CvMat *dstarr, int method) {
free(ipiv);
} else if (method == DECOMP_SVD) {
-
+ // TODO: There is no way this needs this many allocations,
+ // but in my defense I was very tired when I wrote this code
CvMat *w = cvCreateMat(1, MIN(dstarr->rows, dstarr->cols), dstarr->type);
CvMat *u = cvCreateMat(dstarr->cols, dstarr->cols, dstarr->type);
CvMat *v = cvCreateMat(dstarr->rows, dstarr->rows, dstarr->type);
- cvSVD(dstarr, w, u, v, 0);
-
CvMat *um = cvCreateMat(w->cols, w->cols, w->type);
+ cvSVD(dstarr, w, u, v, 0);
+
cvSetZero(um);
for (int i = 0; i < w->cols; i++) {
cvmSet(um, i, i, 1. / w->data.db[i]);
@@ -258,6 +233,12 @@ double cvInvert(const CvMat *srcarr, CvMat *dstarr, int method) {
CvMat *tmp = cvCreateMat(dstarr->cols, dstarr->rows, dstarr->type);
cvGEMM(v, um, 1, 0, 0, tmp, GEMM_1_T);
cvGEMM(tmp, u, 1, 0, 0, dstarr, GEMM_2_T);
+
+ cvReleaseMat(&tmp);
+ cvReleaseMat(&w);
+ cvReleaseMat(&u);
+ cvReleaseMat(&v);
+ cvReleaseMat(&um);
}
return 0;
}
@@ -333,11 +314,9 @@ int cvSolve(const CvMat *Aarr, const CvMat *xarr, CvMat *Barr, int method) {
assert(Barr->cols == xCpy->cols);
xCpy->rows = acols;
cvCopyTo(xCpy, Barr);
-/*
-Barr->data = xCpy->data;
-Barr->rows = acols;
-Barr->cols = xCpy->cols;
-*/
+
+ cvReleaseMat(&aCpy);
+ cvReleaseMat(&xCpy);
#ifdef DEBUG_PRINT
print_mat(Barr);
#endif
diff --git a/src/poser_epnp.c b/src/poser_epnp.c
index 21e61ae..7749c7b 100644
--- a/src/poser_epnp.c
+++ b/src/poser_epnp.c
@@ -85,6 +85,8 @@ static int opencv_solver_fullscene(SurviveObject *so, PoserDataFullScene *pdfs)
SurvivePose lighthouse = solve_correspondence(so, &pnp, true);
PoserData_lighthouse_pose_func(&pdfs->hdr, so, lh, &lighthouse);
+
+ epnp_dtor(&pnp);
}
return 0;
}
@@ -137,6 +139,8 @@ int PoserEPNP(SurviveObject *so, PoserData *pd) {
quatrotateabout(txPose.Rot, so->ctx->bsd[lh].Pose.Rot, pose.Rot);
PoserData_poser_raw_pose_func(pd, so, lh, &txPose);
}
+
+ epnp_dtor(&pnp);
}
return 0;
diff --git a/src/survive.c b/src/survive.c
index e09ae13..a27ba3e 100755
--- a/src/survive.c
+++ b/src/survive.c
@@ -340,7 +340,8 @@ void survive_close( SurviveContext * ctx )
{
PoserData pd;
pd.pt = POSERDATA_DISASSOCIATE;
- if( ctx->objs[i]->PoserFn ) ctx->objs[i]->PoserFn( ctx->objs[i], &pd );
+ if (ctx->objs[i]->PoserFn)
+ ctx->objs[i]->PoserFn(ctx->objs[i], &pd);
}
for( i = 0; i < oldct; i++ )
@@ -354,6 +355,10 @@ void survive_close( SurviveContext * ctx )
destroy_config_group(ctx->global_config_values);
destroy_config_group(ctx->lh_config);
+ for (i = 0; i < ctx->objs_ct; i++) {
+ free(ctx->objs[i]);
+ }
+
free( ctx->objs );
free( ctx->drivers );
free( ctx->driverpolls );
diff --git a/src/survive_playback.c b/src/survive_playback.c
index fe7af2b..c4564c4 100644
--- a/src/survive_playback.c
+++ b/src/survive_playback.c
@@ -160,6 +160,7 @@ static int playback_close(struct SurviveContext *ctx, void *_driver) {
if (driver->playback_file)
fclose(driver->playback_file);
driver->playback_file = 0;
+
return 0;
}
@@ -184,7 +185,11 @@ static int LoadConfig(SurvivePlaybackData *sv, SurviveObject *so) {
ct0conf[len] = 0;
printf("Loading config: %d\n", len);
- return survive_load_htc_config_format(ct0conf, len, so);
+ int rtn = survive_load_htc_config_format(ct0conf, len, so);
+
+ free(ct0conf);
+
+ return rtn;
}
int DriverRegPlayback(SurviveContext *ctx) {
@@ -217,20 +222,14 @@ int DriverRegPlayback(SurviveContext *ctx) {
SurviveObject *tr0 = survive_create_tr0(ctx, "Playback", sp);
SurviveObject *ww0 = survive_create_ww0(ctx, "Playback", sp);
- if (!LoadConfig(sp, hmd)) {
- survive_add_object(ctx, hmd);
- }
- if (!LoadConfig(sp, wm0)) {
- survive_add_object(ctx, wm0);
- }
- if (!LoadConfig(sp, wm1)) {
- survive_add_object(ctx, wm1);
- }
- if (!LoadConfig(sp, tr0)) {
- survive_add_object(ctx, tr0);
- }
- if (!LoadConfig(sp, ww0)) {
- survive_add_object(ctx, ww0);
+ SurviveObject *objs[] = {hmd, wm0, wm1, tr0, ww0, 0};
+
+ for (SurviveObject **obj = objs; *obj; obj++) {
+ if (!LoadConfig(sp, *obj)) {
+ survive_add_object(ctx, *obj);
+ } else {
+ free(*obj);
+ }
}
survive_add_driver(ctx, sp, playback_poll, playback_close, 0);
diff --git a/tools/viz/README.md b/tools/viz/README.md
new file mode 100644
index 0000000..6afbd5c
--- /dev/null
+++ b/tools/viz/README.md
@@ -0,0 +1,13 @@
+# How to use
+
+- Download and install: http://websocketd.com/
+- Build the repo
+- Run data_recorder through websocketd like so:
+
+``` websocketd --port=8080 ./data_recorder```
+
+- Navigate to the `index.html` page in this directory on chrome.
+
+When lighthouses, poses, or angle information is found, it should add it the scene.
+
+
diff --git a/tools/viz/index.html b/tools/viz/index.html
new file mode 100644
index 0000000..5085faf
--- /dev/null
+++ b/tools/viz/index.html
@@ -0,0 +1,23 @@
+
+<html>
+ <head>
+ <script
+ src="https://code.jquery.com/jquery-3.3.1.slim.min.js"
+ integrity="sha256-3edrmyuQ0w65f8gfBsqowzjJe2iM6n0nKciPUp8y+7E="
+ crossorigin="anonymous"></script>
+ <script src="https://cdnjs.cloudflare.com/ajax/libs/three.js/90/three.min.js"></script>
+ <script src="./lib/OrbitControls.js"></script>
+ <script src="survive_viewer.js"></script>
+ </head>
+ <body>
+ <div id="ThreeJS" style="z-index: 1; position: absolute; left:0px; top:0px"></div>
+ <div id="cam-control" style="z-index: 2;border:1px solid white;position:absolute">
+ <button id="toggleBtn">
+ Toggle 2D View
+ </button>
+ <div id="cam" style="display:none">
+ <canvas width=800 height=800 id="camcanvas"></canvas>
+ </div>
+ </div>
+ </body>
+</html>
diff --git a/tools/viz/survive_viewer.js b/tools/viz/survive_viewer.js
new file mode 100644
index 0000000..aaa0340
--- /dev/null
+++ b/tools/viz/survive_viewer.js
@@ -0,0 +1,378 @@
+var sphere, axes;
+
+function add_lighthouse(idx, p, q) {
+ var group = new THREE.Group();
+
+ var lh = new THREE.AxesHelper(1);
+
+ group.position.fromArray(p);
+ group.quaternion.fromArray([ q[1], q[2], q[3], q[0] ]);
+
+ var height = 3;
+ var geometry = new THREE.ConeGeometry(Math.sin(1.0472) * height, height, 4, 1, true);
+ var material = new THREE.MeshBasicMaterial({
+ wireframe : true,
+ vertexColor : true,
+ color : 0x111111,
+ opacity : 0.09,
+ transparent : true,
+ blending : THREE.AdditiveBlending,
+ side : THREE.BothSides
+ });
+ var cone = new THREE.Mesh(geometry, material);
+
+ var lhBoxGeom = new THREE.CubeGeometry(.1, .1, .1);
+ var lhBoxMaterial = new THREE.MeshLambertMaterial({color : 0x111111, side : THREE.FrontSide});
+ var lhBox = new THREE.Mesh(lhBoxGeom, lhBoxMaterial);
+ group.add(lhBox);
+
+ cone.translateZ(-height / 2)
+ cone.rotateZ(Math.PI / 4)
+ cone.rotateX(Math.PI / 2)
+ // cone.position.z
+
+ group.add(cone);
+
+ group.add(lh);
+ scene.add(group);
+ // DrawCoordinateSystem(p[0], p[1], p[2], q[0], q[1], q[2], q[3]);
+ }
+var downAxes = {};
+var angles = {};
+var ctx;
+var canvas;
+var oldDrawTime = 0;
+var lastWhen = {};
+
+$(function() { $("#toggleBtn").click(function() { $("#cam").toggle(); }); });
+
+function redrawCanvas(when) {
+ oldDrawTime = new Date().getTime();
+ if (!ctx) {
+ canvas = document.getElementById("camcanvas");
+ ctx = canvas.getContext("2d");
+ }
+ if (!$(canvas).is(":visible")) {
+ return true;
+ }
+ ctx.clearRect(0, 0, canvas.width, canvas.height);
+
+ var fov_degrees = 150;
+ var fov_radians = fov_degrees / 180 * Math.PI;
+
+ function rad_to_x(ang) {
+ var half_fov = fov_radians / 2;
+ return ang / half_fov * canvas.width / 2 + canvas.width / 2;
+ }
+ var rad_to_y = rad_to_x;
+
+ ctx.strokeStyle = "#ffffff";
+ ctx.beginPath();
+ for (var x = -fov_degrees; x < fov_degrees; x += 10) {
+ var length = Math.abs(x) == 60 ? canvas.width : 10;
+ ctx.moveTo(rad_to_x(x / 180 * Math.PI), 0);
+ ctx.lineTo(rad_to_x(x / 180 * Math.PI), length);
+
+ ctx.moveTo(0, rad_to_x(x / 180 * Math.PI));
+ ctx.lineTo(length, rad_to_x(x / 180 * Math.PI));
+
+ ctx.moveTo(rad_to_x(x / 180 * Math.PI), canvas.width);
+ ctx.lineTo(rad_to_x(x / 180 * Math.PI), canvas.width - length);
+
+ ctx.moveTo(canvas.width, rad_to_x(x / 180 * Math.PI));
+ ctx.lineTo(canvas.width - length, rad_to_x(x / 180 * Math.PI));
+ }
+
+ ctx.stroke();
+
+ for (var key in angles) {
+ for (var lh = 0; lh < 2; lh++) {
+ var bvalue = {"WW0" : "FF", "TR0" : "00"};
+ ctx.strokeStyle = (lh === 0 ? "#FF00" : "#00FF") + bvalue[key];
+
+ if (angles[key][lh])
+
+ for (var id in angles[key][lh]) {
+ var ang = angles[key][lh][id];
+
+ if (ang[0] === undefined || ang[1] === undefined || ang[1][1] < when[key] - 48000000 ||
+ ang[0][1] < when[key] - 48000000)
+ continue;
+
+ var half_fov = 1.0472 * 2;
+ var x = ang[0][0] / half_fov * canvas.width / 2 + canvas.width / 2;
+ var y = -ang[1][0] / half_fov * canvas.height / 2 + canvas.height / 2;
+
+ ctx.fillStyle = "white";
+ ctx.font = "14px Arial";
+ // ctx.fillText(id, x, y);
+
+ ctx.beginPath();
+ ctx.arc(x, y, 1, 0, 2 * Math.PI);
+ ctx.stroke();
+ }
+ }
+ }
+ }
+
+var objs = {};
+var sensorGeometry = new THREE.SphereGeometry(.01, 32, 16);
+// use a "lambert" material rather than "basic" for realistic lighting.
+// (don't forget to add (at least one) light!)
+
+function create_object(info) {
+ var group = new THREE.Group();
+
+ for (var idx in info.points) {
+ var p = info.points[idx];
+ var color = 0xFFFFFF; // / info.points.length * idx;
+ if (idx == 10)
+ color = 0x00ff00;
+ if (idx == 12)
+ color = 0x0000ff;
+ var sensorMaterial = new THREE.MeshLambertMaterial({color : color});
+ var newSensor = new THREE.Mesh(sensorGeometry, sensorMaterial);
+ newSensor.position.set(p[0], p[1], p[2]);
+
+ group.add(newSensor);
+ }
+
+ var axes = new THREE.AxesHelper(1);
+ group.add(axes);
+
+ objs[info.tracker] = group;
+ scene.add(group);
+ }
+
+var timecode = {};
+$(function() {
+
+ function parseLine(msg) {
+ var s = msg.split(' ');
+
+ var command_mappings = {
+ "LH_POSE" : function(v) {
+ return {
+ type : "lighthouse_pose",
+ lighthouse : parseInt(v[2]),
+ position : [ parseFloat(v[3]), parseFloat(v[4]), parseFloat(v[5]) ],
+ quat : [ parseFloat(v[6]), parseFloat(v[7]), parseFloat(v[8]), parseFloat(v[9]) ]
+ };
+ },
+ "POSE" : function(v) {
+ return {
+ type: "pose", tracker: v[2], position: [ parseFloat(v[3]), parseFloat(v[4]), parseFloat(v[5]) ],
+ quat: [ parseFloat(v[6]), parseFloat(v[7]), parseFloat(v[8]), parseFloat(v[9]) ]
+ }
+ }
+ };
+ if (command_mappings[s[1]]) {
+ var rtn = command_mappings[s[1]](s);
+ rtn.time = parseFloat(s[0]);
+ return rtn;
+ }
+ return {};
+ }
+ var ws;
+ if (window.location.protocol === "file:") {
+ ws = new WebSocket("ws://localhost:8080/ws");
+ } else {
+ ws = new WebSocket(((window.location.protocol === "https:") ? "wss://" : "ws://") + window.location.host +
+ "/ws");
+ }
+
+ ws.onopen = function(evt) {
+ // ws.send("!");
+ };
+ ws.onmessage = function(evt) {
+ var msg = evt.data;
+ var obj;
+ if (msg[0] == "{")
+ obj = JSON.parse(msg);
+ else
+ obj = parseLine(msg);
+
+ // console.log(obj);
+ if (obj.type === "pose") {
+ if (!objs[obj.tracker]) {
+ create_object(obj);
+ }
+
+ objs[obj.tracker].position.set(obj.position[0], obj.position[1], obj.position[2]);
+ objs[obj.tracker].quaternion.set(obj.quat[1], obj.quat[2], obj.quat[3], obj.quat[0]);
+
+ } else if (obj.type === "lighthouse_pose") {
+ add_lighthouse(obj.lighthouse, obj.position, obj.quat);
+ } else if (obj.type === "tracker_calibration") {
+ create_object(obj);
+ } else if (obj.type === "imu") {
+ if (objs[obj.tracker]) {
+ if (!downAxes[obj.tracker]) {
+ downAxes[obj.tracker] = new THREE.Geometry();
+ downAxes[obj.tracker].vertices.push(
+ new THREE.Vector3(0, 0, 0),
+ new THREE.Vector3(obj.accelgyro[0], obj.accelgyro[1], obj.accelgyro[2]));
+
+ var line = new THREE.Line(downAxes[obj.tracker], new THREE.LineBasicMaterial({color : 0xffffff}));
+ objs[obj.tracker].add(line);
+ } else {
+ var q = obj.accelgyro;
+ downAxes[obj.tracker].vertices[1].fromArray(q);
+ downAxes[obj.tracker].verticesNeedUpdate = true;
+ }
+ }
+
+ } else if (obj.type === "angle") {
+ angles[obj.tracker] = angles[obj.tracker] || {};
+ angles[obj.tracker][obj.lighthouse] = angles[obj.tracker][obj.lighthouse] || {};
+ angles[obj.tracker][obj.lighthouse][obj.sensor_id] =
+ angles[obj.tracker][obj.lighthouse][obj.sensor_id] || {};
+
+ angles[obj.tracker][obj.lighthouse][obj.sensor_id][obj.acode] = [ obj.angle, obj.timecode ];
+ timecode[obj.tracker] = obj.timecode;
+ }
+
+ // ws.send("!");
+ };
+});
+
+//////////
+// MAIN //
+//////////
+
+// standard global variables
+var container, scene, camera, renderer, controls, stats;
+var clock = new THREE.Clock();
+
+// custom global variables
+var cube;
+$(function() {
+ // initialization
+ init();
+
+ // animation loop / game loop
+ animate();
+})
+
+///////////////
+// FUNCTIONS //
+///////////////
+
+function
+init() {
+ ///////////
+ // SCENE //
+ ///////////
+ scene = new THREE.Scene();
+
+ ////////////
+ // CAMERA //
+ ////////////
+
+ // set the view size in pixels (custom or according to window size)
+ // var SCREEN_WIDTH = 400, SCREEN_HEIGHT = 300;
+ var SCREEN_WIDTH = window.innerWidth, SCREEN_HEIGHT = window.innerHeight;
+ // camera attributes
+ var VIEW_ANGLE = 45, ASPECT = SCREEN_WIDTH / SCREEN_HEIGHT, NEAR = 0.01, FAR = 200;
+ // set up camera
+ camera = new THREE.PerspectiveCamera(VIEW_ANGLE, ASPECT, NEAR, FAR);
+ camera.up = new THREE.Vector3(0, 0, 1);
+ // add the camera to the scene
+ scene.add(camera);
+ // the camera defaults to position (0,0,0)
+ // so pull it back (z = 400) and up (y = 100) and set the angle towards the
+ // scene origin
+ camera.position.set(5, 2, 5.00);
+ camera.lookAt(scene.position);
+
+ //////////////
+ // RENDERER //
+ //////////////
+
+ renderer = new THREE.WebGLRenderer({antialias : true});
+
+ renderer.setSize(SCREEN_WIDTH, SCREEN_HEIGHT);
+
+ // attach div element to variable to contain the renderer
+ container = document.getElementById('ThreeJS');
+ // alternatively: to create the div at runtime, use:
+ // container = document.createElement( 'div' );
+ // document.body.appendChild( container );
+
+ // attach renderer to the container div
+ container.appendChild(renderer.domElement);
+
+ ////////////
+ // EVENTS //
+ ////////////
+
+ /*
+ // automatically resize renderer
+ THREEx.WindowResize(renderer, camera);
+ // toggle full-screen on given key press
+ THREEx.FullScreen.bindKey({ charCode : 'm'.charCodeAt(0) });
+*/
+ //////////////
+ // CONTROLS //
+ //////////////
+
+ // move mouse and: left click to rotate,
+ // middle click to zoom,
+ // right click to pan
+ controls = new THREE.OrbitControls(camera, renderer.domElement);
+
+ ///////////
+ // LIGHT //
+ ///////////
+
+ // create a light
+ var light = new THREE.PointLight(0xffffff);
+ light.position.set(0, 5, 0);
+ scene.add(light);
+ var ambientLight = new THREE.AmbientLight(0x111111);
+ // scene.add(ambientLight);
+
+ var floorTexture = new THREE.ImageUtils.loadTexture('images/checkerboard.jpg');
+ floorTexture.wrapS = floorTexture.wrapT = THREE.RepeatWrapping;
+ floorTexture.repeat.set(10, 10);
+ // DoubleSide: render texture on both sides of mesh
+ var floorMaterial =
+ new THREE.MeshBasicMaterial({color : 0x000000, opacity : 0.15, transparent : true, side : THREE.FrontSide});
+ var floorGeometry = new THREE.PlaneGeometry(10, 10);
+ var floor = new THREE.Mesh(floorGeometry, floorMaterial);
+ floor.position.z = -1;
+
+ scene.add(floor);
+
+ /////////
+ // SKY //
+ /////////
+
+ // recommend either a skybox or fog effect (can't use both at the same time)
+ // without one of these, the scene's background color is determined by
+ // webpage background
+
+ var skyBoxGeometry = new THREE.CubeGeometry(50, 50, 50);
+ var skyBoxMaterial = new THREE.MeshBasicMaterial({color : 0x888888, side : THREE.BackSide});
+ var skyBox = new THREE.Mesh(skyBoxGeometry, skyBoxMaterial);
+ scene.add(skyBox);
+
+ // fog must be added to scene before first render
+ // scene.fog = new THREE.FogExp2(0xffffff, 0.025);
+}
+
+function animate() {
+ requestAnimationFrame(animate);
+ render();
+ update();
+ redrawCanvas(timecode);
+ }
+
+function update() {
+ // delta = change in time since last call (in seconds)
+ var delta = clock.getDelta();
+
+ // controls.update();
+ }
+
+function render() { renderer.render(scene, camera); }