view.C

00001 /*
00002  * This file is part of the "Archon" framework.
00003  * (http://files3d.sourceforge.net)
00004  *
00005  * Copyright © 2002 by Kristian Spangsege and Brian Kristiansen.
00006  *
00007  * Permission to use, copy, modify, and distribute this software and
00008  * its documentation under the terms of the GNU General Public License is
00009  * hereby granted. No representations are made about the suitability of
00010  * this software for any purpose. It is provided "as is" without express
00011  * or implied warranty. See the GNU General Public License
00012  * (http://www.gnu.org/copyleft/gpl.html) for more details.
00013  *
00014  * The characters in this file are ISO8859-1 encoded.
00015  *
00016  * The documentation in this file is in "Doxygen" style
00017  * (http://www.doxygen.org).
00018  */
00019 
00020 #include <math.h>
00021 #include <typeinfo>
00022 #include <iostream>
00023 #include <map>
00024 
00025 #include <GL/gl.h>
00026 #include <GL/glu.h>
00027 
00028 #include <archon/util/image.H>
00029 #include <archon/util/random.H>
00030 
00031 #include <archon/x3d/server/field_type.H>
00032 #include <archon/x3d/server/load.H>
00033 #include <archon/x3d/server/server.H>
00034 #include <archon/x3d/server/view.H>
00035 
00036 using namespace std;
00037 
00038 namespace Archon
00039 {
00040   using namespace Math;
00041 
00042   namespace X3D
00043   {
00044     Viewer::Viewer(Ref<Server> server,
00045                    int resolutionX, int resolutionY,
00046                    bool useMipmapedTextures,
00047                    string initialViewpointName,
00048                    const CoordSystem3x3 &viewCoordSystem,
00049                    double fieldOfView,
00050                    double depthOfRotation,
00051                    const Vector4 &backgroundColor,
00052                    int subdivisionX, int subdivisionY,
00053                    bool headLight,
00054                    bool showLightSources,
00055                    bool showNormals,
00056                    bool textAsQuadsMode,
00057                    bool wireframeMode,
00058                    bool enableTexture):
00059       server(server),
00060       resolutionX(resolutionX), resolutionY(resolutionY),
00061       aspectRatio(double(resolutionX)/resolutionY),
00062       nearClippingDist(1), farClippingDist(2000),
00063       useMipmapedTextures(useMipmapedTextures),
00064       viewCoordSystem(viewCoordSystem),
00065       fieldOfView(fieldOfView),
00066       depthOfRotation(depthOfRotation),
00067       renderConfig(subdivisionX, subdivisionY, showNormals, depthOfRotation/10, wireframeMode, textAsQuadsMode),
00068       headLight(headLight),
00069       showLightSources(showLightSources),
00070       disregardSensors(false),
00071       enableTexture(enableTexture),
00072       separateSpecularColorMode(false),
00073       lightModelLocalViewerMode(false),
00074       numberOfEnabledLights(0),
00075       updateShapeCacheDuringNextFrame(false),
00076       anyActiveDragSensors(false),
00077       pointingDevicePosition(0, 0),
00078       pointingDevicePositionChangedSinceLastFrame(false),
00079       pointingDeviceActive(false),
00080       pointingDeviceLastActive(false)
00081     {
00082       GLint param;
00083       glGetIntegerv(GL_MAX_LIGHTS, &param);
00084       maxLights = param;
00085 
00086       glViewport(0, 0, resolutionX, resolutionY);
00087 
00088       glClearColor(backgroundColor[0], backgroundColor[1],
00089                    backgroundColor[2], backgroundColor[3]);
00090       glDepthFunc(GL_LESS);
00091       glEnable(GL_DEPTH_TEST);                    
00092 
00093       // Disable all light sources
00094       for(unsigned i=0; i<maxLights; ++i) glDisable(GL_LIGHT0+i);
00095 
00096       if(server->getRootScene())
00097       {
00098         // Bind to the specified viewpoint
00099         if(!initialViewpointName.empty())
00100         {
00101           if(!bindViewpoint(initialViewpointName))
00102             server->getLogger()->log("WARNING: No such viewpoint '" +
00103                                      initialViewpointName + "'");
00104         }
00105 
00106         // Bind to the first viewpoint if any
00107         if(!boundViewpoint)
00108           boundViewpoint =
00109             findBoundViewpoint_group(server->getRootScene()->
00110                                      getRootGroup().get());
00111       }
00112     }
00113 
00114     bool Viewer::bindViewpoint(string name)
00115     {
00116       if(!server->getRootScene())
00117         ARCHON_THROW1(InternalException,
00118                       "Viewer::bindViewpoint: No root scene");
00119 
00120       Ref<NodeBase> n = server->getRootScene()->lookupNode(name);
00121       if(Viewpoint *v = dynamic_cast<Viewpoint *>(n.get()))
00122       {
00123         boundViewpoint = v;
00124         return true;
00125       }
00126       return false;
00127     }
00128 
00142     void Viewer::setupViewpoint()
00143     {
00144       if(boundViewpoint)
00145       {
00146         stack<const Transform *> transformPath;
00147         if(!server->getRootSceneNoLock())
00148           ARCHON_THROW1(InternalException,
00149                         "Viewer::setupViewpoint: No root scene");
00150         GroupingNode *g = server->getRootSceneNoLock()->getRootGroupNoLock().get();
00151         if(findBoundViewpoint_group(g, &transformPath))
00152         {
00153           viewCoordSystem = CoordSystem3x3::identity();
00154           while(!transformPath.empty())
00155           {
00156             const Transform *t = transformPath.top();
00157             transformPath.pop();
00158             viewCoordSystem.translate(t->getTranslation());
00159             viewCoordSystem.translate(t->getCenter());
00160             viewCoordSystem.basis.rotate(t->getRotation());
00161             viewCoordSystem.basis.rotate(t->getScaleOrientation());
00162             viewCoordSystem.basis.scale(t->getScale());
00163             viewCoordSystem.basis.rotate(-t->getScaleOrientation());
00164             viewCoordSystem.translate(-t->getCenter());
00165           }
00166           viewCoordSystem.translate(boundViewpoint->getPosition());
00167           viewCoordSystem.basis.rotate(boundViewpoint->getOrientation());
00168 
00169           fieldOfView = boundViewpoint->getFieldOfView();
00170 
00171           depthOfRotation = dist(boundViewpoint->getPosition(),
00172                                  boundViewpoint->getCenterOfRotation());
00173         }
00174       }
00175 
00176       const double viewPlaneRadius = nearClippingDist*tan(fieldOfView/2);
00177       if(aspectRatio > 1)
00178       {
00179         viewPlane[0] = viewPlaneRadius * aspectRatio;
00180         viewPlane[1] = viewPlaneRadius;
00181       }
00182       else
00183       {
00184         viewPlane[0] = viewPlaneRadius;
00185         viewPlane[1] = viewPlaneRadius / aspectRatio;
00186       }
00187       glMatrixMode(GL_PROJECTION);
00188       glLoadIdentity();
00189       glFrustum(-viewPlane[0], viewPlane[0], -viewPlane[1], viewPlane[1],
00190                 nearClippingDist, farClippingDist);
00191       glMatrixMode(GL_MODELVIEW);
00192 
00193       CoordSystem3x3 invViewCoordSystem;
00194       invViewCoordSystem.setInverseOf(viewCoordSystem);
00195       GLdouble m[16];
00196       invViewCoordSystem.setOpenGLMatrix(m);
00197       glLoadMatrixd(m);
00198     }
00199 
00200     Viewpoint *Viewer::findBoundViewpoint_child(ChildNode *c,
00201                                                 stack<const Transform *> *transformPath)
00202     {
00203       if(Viewpoint *v = dynamic_cast<Viewpoint *>(c))
00204       {
00205         if(!boundViewpoint ||
00206            boundViewpoint.get() == v) return v;
00207         return 0;
00208       }
00209 
00210       if(GroupingNode *g =
00211          dynamic_cast<GroupingNode *>(c))
00212       {
00213         if(Viewpoint *v = findBoundViewpoint_group(g, transformPath))
00214         {
00215           if(transformPath)
00216             if(Transform *t = dynamic_cast<Transform *>(g))
00217               transformPath->push(t);
00218           return v;
00219         }
00220         return 0;
00221       }
00222 
00223       return 0;
00224     }
00225 
00226     Viewpoint *Viewer::findBoundViewpoint_group(const GroupingNode *g,
00227                                                 stack<const Transform *> *transformPath)
00228     {
00229       GroupingNode::const_iterator c=g->begin();
00230 
00231       if(const Switch *s = dynamic_cast<const Switch *>(g))
00232       {
00233         int i = s->getWhichChoise();
00234         if(i<0) return 0;
00235         c += i;
00236         if(c >= g->end()) return 0;
00237         return findBoundViewpoint_child(c->get(), transformPath);
00238       }
00239 
00240       while(c!=g->end())
00241       {
00242         if(Viewpoint *v = findBoundViewpoint_child(c->get(), transformPath))
00243           return v;
00244         ++c;
00245       }
00246 
00247       return 0;
00248     }
00249 
00250 
00257     void Viewer::calculateBillboardRotation(const Billboard *b, Rotation3 &r)
00258     {
00259       double m[16];
00260       glGetDoublev(GL_MODELVIEW_MATRIX, m);
00261       CoordSystem3x3 modelview(m);
00262       CoordSystem3x3 invModelview;
00263       invModelview.setInverseOf(modelview);
00264 
00265       Vector3 n = b->getAxisOfRotation();
00266       Vector3 e = invModelview.origin; // Direction towards eye in local coordinate system
00267       if(n == Vector3::zero())
00268       {
00269         // viewer-alignment
00270 
00271         // First rotate on plane of eye and z-axis
00272         Vector3 axis1;
00273         double k = e[0]*e[0] + e[1]*e[1]; // <1
00274         double ca1 = e[2]/sqrt(k + e[2]*e[2]); // >0 (e[2])
00275         if(k == 0)
00276         {
00277           // Line of sight is parallel with z-axis
00278           axis1.set(0, 1, 0);
00279         }
00280         else
00281         {
00282           k = sqrt(k); // <1
00283           axis1.set(-e[1]/k, e[0]/k, 0); // 0, 1, 0
00284         }
00285 
00286         // Then rotate about line of sight to align y-axis with viewers up-direction
00287         Vector3 y = e;
00288         y *= invModelview.basis.y;
00289         y *= e;
00290         k = y.squareSum();
00291         Vector3 axis2;
00292         double ca2;
00293         if(k == 0)
00294         {
00295           axis2.set(0, 1, 0);
00296           ca2 = 0; // We might want to choose a more reasonable rotation in this special case
00297         }
00298         else
00299         {
00300           y /= sqrt(k);
00301           k = (1 - ca1) * axis1[1]; // <1 (1-e[2])
00302           axis2.set(k * axis1[0],
00303                     k * axis1[1] + ca1,
00304                     axis1[0] * sqrt(1 - ca1 * ca1));
00305           ca2 = dot(axis2, y);
00306           axis2 *= y;
00307           axis2.normalize();
00308         }
00309 
00310         // Combine the two rotations through the use of quaternions
00311         Quaternion q1, q2;
00312         q1.setRotation(axis1, ca1);
00313         q2.setRotation(axis2, ca2);
00314         q2 *= q1;
00315         q2.getRotation(r);
00316         return;
00317       }
00318 
00319       e *= n;
00320       double l1 = e.length();
00321       if(l1 == 0)
00322       {
00323         // The axis of rotation is coincident with the direction
00324         // towards the eye. In this case every rotation angle is as
00325         // good as any other.
00326         r.axis = b->getAxisOfRotation();
00327         r.angle = 0;
00328         return;
00329       }
00330       n *= Vector3(0, 0, -1);
00331       double l2 = n.length();
00332       if(l2 == 0)
00333       {
00334         // The axis of rotation is coincident with the direction
00335         // defining the front of the object. In this case every
00336         // rotation angle is as good as any other.
00337         r.axis = b->getAxisOfRotation();
00338         r.angle = 0;
00339         return;
00340       }
00341       double p = dot(e, n) / l1 / l2;
00342       if(p == 1)
00343       {
00344         // We already face the front as much as possible.
00345         r.axis = b->getAxisOfRotation();
00346         r.angle = 0;
00347         return;
00348       }
00349       if(p == -1)
00350       {
00351         // We face the back as much as possible.
00352         r.axis = b->getAxisOfRotation();
00353         r.angle = M_PI;
00354         return;
00355       }
00356 
00357       n *= e;
00358       n.normalize();
00359 
00360       r.axis = n;
00361       r.angle = acos(p);
00362     }
00363 
00364 
00372     void Viewer::setupLight(bool headLight, bool showLightSources)
00373     {
00374       int nextLight = 0;
00375 
00376       if(showLightSources)
00377       {
00378         glPointSize(8);
00379         glEnable(GL_POINT_SMOOTH);
00380         glDisable(GL_LIGHTING);
00381         glDisable(GL_TEXTURE_2D);
00382       }
00383 
00384       if(!server->getRootSceneNoLock())
00385         ARCHON_THROW1(InternalException,
00386                       "Viewer::setupLight: No root scene");
00387       GroupingNode *g = server->getRootSceneNoLock()->getRootGroupNoLock().get();
00388       lightGroupingNode(g, nextLight, showLightSources);
00389 
00390       if(headLight)
00391       {
00392         int i = GL_LIGHT0 + nextLight++;
00393 
00394         GLfloat v[4];
00395         v[3] = 1;
00396 
00397         v[0] = 0.2;
00398         v[1] = 0.2;
00399         v[2] = 0.2;
00400         glLightfv(i, GL_AMBIENT, v);
00401 
00402         v[0] = 0.8;
00403         v[1] = 0.8;
00404         v[2] = 0.8;
00405         glLightfv(i, GL_DIFFUSE, v);
00406 
00407         v[0] = 0.8;
00408         v[1] = 0.8;
00409         v[2] = 0.8;
00410         glLightfv(i, GL_SPECULAR, v);
00411 
00412         v[0] = viewCoordSystem.origin[0];
00413         v[1] = viewCoordSystem.origin[1];
00414         v[2] = viewCoordSystem.origin[2];
00415         glLightfv(i, GL_POSITION, v);
00416 
00417         glLightf(i, GL_SPOT_CUTOFF, 180);
00418 
00419         glLightf(i, GL_CONSTANT_ATTENUATION,  1);
00420         glLightf(i, GL_LINEAR_ATTENUATION,    0);
00421         glLightf(i, GL_QUADRATIC_ATTENUATION, 0);
00422 
00423         glEnable(i);
00424       }
00425 
00426       for(int i=nextLight; i<numberOfEnabledLights; ++i)
00427         glDisable(GL_LIGHT0+i);
00428 
00429       numberOfEnabledLights = nextLight;
00430     }
00431 
00439     void Viewer::lightPointLight(const PointLight *l, int &nextLight, bool show)
00440     {
00441       if(l->getOn())
00442       {
00443         int i = GL_LIGHT0 + nextLight++;
00444 
00445         GLfloat v[4];
00446         v[3] = 1;
00447 
00448         v[0] = l->getColor()[0] * l->getAmbientIntensity();
00449         v[1] = l->getColor()[1] * l->getAmbientIntensity();
00450         v[2] = l->getColor()[2] * l->getAmbientIntensity();
00451         glLightfv(i, GL_AMBIENT, v);
00452 
00453         v[0] = l->getColor()[0] * l->getIntensity();
00454         v[1] = l->getColor()[1] * l->getIntensity();
00455         v[2] = l->getColor()[2] * l->getIntensity();
00456         glLightfv(i, GL_DIFFUSE, v);
00457         glLightfv(i, GL_SPECULAR, v);
00458 
00459         v[0] = l->getLocation()[0];
00460         v[1] = l->getLocation()[1];
00461         v[2] = l->getLocation()[2];
00462         glLightfv(i, GL_POSITION, v);
00463 
00464         glLightf(i, GL_SPOT_CUTOFF, 180);
00465 
00466         glLightf(i, GL_CONSTANT_ATTENUATION,  l->getAttenuation()[0]);
00467         glLightf(i, GL_LINEAR_ATTENUATION,    l->getAttenuation()[1]);
00468         glLightf(i, GL_QUADRATIC_ATTENUATION, l->getAttenuation()[2]);
00469 
00470         glEnable(i);
00471       }
00472 
00473       if(show)
00474       {
00475         glColor3d(l->getColor()[0], l->getColor()[1], l->getColor()[2]);
00476         glBegin(GL_POINTS);
00477         glVertex3d(l->getLocation()[0], l->getLocation()[1], l->getLocation()[2]);
00478         glEnd();
00479       }
00480     }
00481 
00504     void Viewer::lightSpotLight(const SpotLight *l, int &nextLight, bool show)
00505     {
00506       if(l->getOn())
00507       {
00508         int i = GL_LIGHT0 + nextLight++;
00509 
00510         GLfloat v[4];
00511         v[3] = 1;
00512 
00513         v[0] = l->getColor()[0] * l->getAmbientIntensity();
00514         v[1] = l->getColor()[1] * l->getAmbientIntensity();
00515         v[2] = l->getColor()[2] * l->getAmbientIntensity();
00516         glLightfv(i, GL_AMBIENT, v);
00517 
00518         v[0] = l->getColor()[0] * l->getIntensity();
00519         v[1] = l->getColor()[1] * l->getIntensity();
00520         v[2] = l->getColor()[2] * l->getIntensity();
00521         glLightfv(i, GL_DIFFUSE, v);
00522         glLightfv(i, GL_SPECULAR, v);
00523 
00524         v[0] = l->getLocation()[0];
00525         v[1] = l->getLocation()[1];
00526         v[2] = l->getLocation()[2];
00527         glLightfv(i, GL_POSITION, v);
00528 
00529         v[0] = l->getDirection()[0];
00530         v[1] = l->getDirection()[1];
00531         v[2] = l->getDirection()[2];
00532         glLightfv(i, GL_SPOT_DIRECTION, v);
00533 
00534         const double threshold = 0.75;
00535         float spotExponent =
00536           l->getBeamWidth() >= l->getCutOffAngle() ? 0 :
00537           l->getBeamWidth() <= acos(pow(threshold, 1.0/128)) ? 128 :
00538           log(threshold)/log(cos(l->getBeamWidth()));
00539         glLightf(i, GL_SPOT_EXPONENT, spotExponent);
00540 
00541         glLightf(i, GL_SPOT_CUTOFF, l->getCutOffAngle()/M_PI*180);
00542 
00543         glLightf(i, GL_CONSTANT_ATTENUATION,  l->getAttenuation()[0]);
00544         glLightf(i, GL_LINEAR_ATTENUATION,    l->getAttenuation()[1]);
00545         glLightf(i, GL_QUADRATIC_ATTENUATION, l->getAttenuation()[2]);
00546 
00547         glEnable(i);
00548       }
00549 
00550       if(show)
00551       {
00552         glColor3d(l->getColor()[0], l->getColor()[1], l->getColor()[2]);
00553         glBegin(GL_POINTS);
00554         glVertex3d(l->getLocation()[0], l->getLocation()[1], l->getLocation()[2]);
00555         glEnd();
00556       }
00557     }
00558 
00559     void Viewer::lightDispatchLightNode(const LightNode *l, int &nextLight, bool show)
00560     {
00561       if(const PointLight *p = dynamic_cast<const PointLight *>(l))
00562         lightPointLight(p, nextLight, show);
00563       else if(const SpotLight *p = dynamic_cast<const SpotLight *>(l))
00564         lightSpotLight(p, nextLight, show);
00565     }
00566 
00567     void Viewer::lightTransform(const Transform *t, int &nextLight, bool show)
00568     {
00569       glPushMatrix();
00570 
00571       glTranslatef(t->getTranslation()[0],
00572                    t->getTranslation()[1],
00573                    t->getTranslation()[2]);
00574       glTranslatef(t->getCenter()[0],
00575                    t->getCenter()[1],
00576                    t->getCenter()[2]);
00577       glRotatef(t->getRotation().angle/M_PI*180,
00578                 t->getRotation().axis[0],
00579                 t->getRotation().axis[1],
00580                 t->getRotation().axis[2]);
00581       glRotatef(t->getScaleOrientation().angle/M_PI*180,
00582                 t->getScaleOrientation().axis[0],
00583                 t->getScaleOrientation().axis[1],
00584                 t->getScaleOrientation().axis[2]);
00585       glScalef(t->getScale()[0],
00586                t->getScale()[1],
00587                t->getScale()[2]);
00588       glRotatef(t->getScaleOrientation().angle/M_PI*180*-1,
00589                 t->getScaleOrientation().axis[0],
00590                 t->getScaleOrientation().axis[1],
00591                 t->getScaleOrientation().axis[2]);
00592       glTranslatef(t->getCenter()[0]*-1,
00593                    t->getCenter()[1]*-1,
00594                    t->getCenter()[2]*-1);
00595 
00596       lightGroupingNode(t, nextLight, show);
00597 
00598       glPopMatrix();
00599     }
00600 
00601     void Viewer::lightBillboard(const Billboard *b, int &nextLight, bool show)
00602     {
00603       glPushMatrix();
00604 
00605       Rotation3 r;
00606       calculateBillboardRotation(b, r);
00607       glRotatef(r.angle/M_PI*180, r.axis[0], r.axis[1], r.axis[2]);
00608 
00609       lightGroupingNode(b, nextLight, show);
00610 
00611       glPopMatrix();
00612     }
00613 
00614     void Viewer::lightDispatchGroupingNode(const GroupingNode *g, int &nextLight, bool show)
00615     {
00616       if(const Transform *t = dynamic_cast<const Transform *>(g))
00617         lightTransform(t, nextLight, show);
00618       else if(const Billboard *b = dynamic_cast<const Billboard *>(g))
00619         lightBillboard(b, nextLight, show);
00620       else if(const Switch *s = dynamic_cast<const Switch *>(g))
00621         lightSwitch(s, nextLight, show);
00622       else lightGroupingNode(g, nextLight, show);
00623     }
00624 
00625     void Viewer::lightSwitch(const Switch *s, int &nextLight, bool show)
00626     {
00627       GroupingNode::const_iterator c=s->begin();
00628       int i = s->getWhichChoise();
00629       if(i < 0) return;
00630       c += i;
00631       if(c >= s->end()) return;
00632       if(const LightNode *l = dynamic_cast<const LightNode *>(c->get()))
00633         lightDispatchLightNode(l, nextLight, show);
00634       else if(const GroupingNode *g2 = dynamic_cast<const GroupingNode *>(c->get()))
00635         lightDispatchGroupingNode(g2, nextLight, show);
00636     }
00637 
00638     void Viewer::lightGroupingNode(const GroupingNode *g, int &nextLight, bool show)
00639     {
00640       GroupingNode::const_iterator c=g->begin();
00641       while(c!=g->end())
00642       {
00643         if(const LightNode *l = dynamic_cast<const LightNode *>(c->get()))
00644           lightDispatchLightNode(l, nextLight, show);
00645         else if(const GroupingNode *g2 = dynamic_cast<const GroupingNode *>(c->get()))
00646           lightDispatchGroupingNode(g2, nextLight, show);
00647         else if(const SubSceneNode *s = dynamic_cast<const SubSceneNode *>(c->get()))
00648         {
00649           Ref<SceneBase> d = s->getSubSceneNoLock();
00650           if(d)
00651           {
00652             Ref<Group> h = d->getRootGroupNoLock();
00653             if(h) lightGroupingNode(h.get(), nextLight, show);
00654           }
00655         }
00656         ++c;
00657       }
00658     }
00659 
00660 
00661 
00662     struct TexUsage
00663     {
00664       GLuint textureId;
00665       int useCount;
00666 
00667       TexUsage(): useCount(0) {}
00668     };
00669 
00670     static map<Loader::Contents::Id, TexUsage> texMap;
00671 
00672     struct Viewer::ShapeCache: public Shape::DrawCache
00673     {
00674       GLuint textureId;
00675       GLuint callListId;
00676 
00677       Shape *shape;
00678       GeometryNode *geometry;
00679 
00680       Time calllistStamp;
00681 
00682       const FieldBase *geometryField;
00683 
00684       Loader::Contents::Id contentsId;
00685 
00686       ShapeCache(Shape *s): shape(s), geometry(0)
00687       {
00688         geometryField = Shape::type->lookupField("geometry");
00689         if(!geometryField)
00690           ARCHON_THROW1(InternalException,
00691                         "Could not find Shape.geometry field");
00692       }
00693 
00694       static vector<ShapeCache *> &getDisposedShapeCaches()
00695       {
00696         static vector<ShapeCache *> disposedShapeCaches;
00697         return disposedShapeCaches;
00698       }
00699 
00700       static void cleanUp()
00701       {
00702         for(unsigned i=0; i<getDisposedShapeCaches().size(); ++i)
00703           delete getDisposedShapeCaches()[i];
00704         getDisposedShapeCaches().clear();
00705       }
00706 
00707       void dispose()
00708       {
00709         getDisposedShapeCaches().push_back(this);
00710       }
00711 
00712       bool updateTexture(const Viewer *viewer)
00713       {
00714         ImageTexture *imageTexture = 0;
00715         Ref<const Loader::ImageContents> imageContents;
00716         AppearanceNode *appearance = shape->getAppearance().get();
00717         if(appearance)
00718         {
00719           Appearance *a = dynamic_cast<Appearance *>(appearance);
00720           if(!a) ARCHON_THROW1(InternalException,
00721                                "'" + appearance->getType()->getName() +
00722                                "' is not supported yet");
00723 
00724           TextureNode *textureNode = a->getTexture().get();
00725           if(textureNode)
00726           {
00727             imageTexture = dynamic_cast<ImageTexture *>(textureNode);
00728             if(!imageTexture)
00729               ARCHON_THROW1(InternalException,
00730                             "'" + textureNode->getType()->getName() +
00731                             "' is not supported yet");
00732 
00733             Ref<const Loader::Contents> c = imageTexture->getContents();
00734             imageContents = dynamic_cast<const Loader::ImageContents *>(c.get());
00735             if(c && !imageContents)
00736               ARCHON_THROW1(InternalException,
00737                             "Viewer::ShapeCache::updateTexture: Got "
00738                             "non-image contents object");
00739           }
00740         }
00741 
00742         Loader::Contents::Id newContentsId;
00743         if(imageContents) newContentsId = imageContents->getId();
00744 
00745         if(newContentsId == contentsId) return false;
00746 
00747         // Remove old texture
00748         if(contentsId)
00749         {
00750           TexUsage &texUsage = texMap[contentsId];
00751           if(--texUsage.useCount == 0)
00752           {
00753             glDeleteTextures(1, &texUsage.textureId); // Ups - what if
00754                                                       // the new
00755                                                       // texture is
00756                                                       // the same
00757                                                       // texture?
00758             texMap.erase(contentsId);
00759           }
00760         }
00761 
00762         contentsId = newContentsId;
00763 
00764         if(!contentsId) return true;
00765 
00766         // Check for existing?
00767         TexUsage &texUsage = texMap[contentsId];
00768         if(texUsage.useCount++ == 0)
00769         {
00770           // Inserted (new URI)
00771 
00772           const Image *image = &imageContents->image;
00773           int colorMode;
00774           switch(image->getComponentSpecifier())
00775           {
00776           case Utilities::Image::components_l:    colorMode = GL_LUMINANCE;       break;
00777           case Utilities::Image::components_la:   colorMode = GL_LUMINANCE_ALPHA; break;
00778           case Utilities::Image::components_rgb:  colorMode = GL_RGB;             break;
00779           case Utilities::Image::components_rgba: colorMode = GL_RGBA;            break;
00780           default:
00781             ARCHON_THROW1(InternalException,
00782                           "Unsuported color components");
00783           }
00784 
00788           int componentType;
00789           switch(image->getBitsPerComponent())
00790           {
00791           case 8:  componentType = GL_UNSIGNED_BYTE;  break;
00792           case 16: componentType = GL_UNSIGNED_SHORT; break;
00793           default:
00794             ARCHON_THROW1(InternalException,
00795                           "Unsuported color component width");
00796           }
00797 
00798           glGenTextures(1, &textureId);
00799           texUsage.textureId = textureId;
00800           glBindTexture(GL_TEXTURE_2D, textureId);
00801           glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S,
00802                           imageTexture->getRepeatS() ? GL_REPEAT : GL_CLAMP);
00803           glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T,
00804                           imageTexture->getRepeatT() ? GL_REPEAT : GL_CLAMP);
00805           glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MAG_FILTER, GL_LINEAR);
00806           glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_MIN_FILTER,
00807                           viewer->useMipmapedTextures ?
00808                           GL_NEAREST_MIPMAP_NEAREST : GL_LINEAR);
00809           glTexEnvf(GL_TEXTURE_ENV, GL_TEXTURE_ENV_MODE, GL_MODULATE);
00810 
00811           if(viewer->useMipmapedTextures)
00812             gluBuild2DMipmaps(GL_TEXTURE_2D, colorMode, image->getWidth(),
00813                               image->getHeight(), colorMode, componentType,
00814                               image->getPixelBuffer());
00815           else glTexImage2D(GL_TEXTURE_2D, 0, colorMode, image->getWidth(),
00816                             image->getHeight(), 0, colorMode, componentType,
00817                             image->getPixelBuffer());
00818         }
00819         else
00820         {
00821           textureId = texUsage.textureId;
00822         }
00823 
00824         return true;
00825       }
00826 
00827       bool updateCalllist()
00828       {
00829         GeometryNode *g = shape->getGeometry().get();
00830         if(!geometry && g) callListId = glGenLists(1);
00831         if(geometry && !g) glDeleteLists(callListId, 1);
00832         geometry = g;
00833         return geometryField->changedSince(shape, calllistStamp);       
00834       }
00835 
00839       bool update(const Viewer *viewer)
00840       {
00841         bool u = updateTexture(viewer) | updateCalllist();
00842 
00843         if(u) calllistStamp = viewer->server->getNextTimeStamp();
00844 
00845         return u;
00846       }
00847 
00848       ~ShapeCache()
00849       {{
00850         if(geometry) glDeleteLists(callListId, 1);
00851         if(contentsId)
00852         {
00853           TexUsage &texUsage = texMap[contentsId];
00854           if(--texUsage.useCount == 0)
00855           {
00856             glDeleteTextures(1, &texUsage.textureId);
00857             texMap.erase(contentsId);
00858           }
00859         }
00860       }}
00861     };
00862 
00869     Shape *Viewer::drawShape(ShapeNode *shapeNode)
00870     {
00871       Shape *shape = dynamic_cast<Shape *>(shapeNode);
00872       if(!shape)
00873         ARCHON_THROW1(InternalException,
00874                       "'" + shapeNode->getType()->getName() +
00875                       "' is not supported yet");
00876 
00877       if(!shape->drawCache) shape->drawCache = new ShapeCache(shape);
00878       ShapeCache *cache = static_cast<ShapeCache *>(shape->drawCache);
00879 
00880       const bool update = cache->update(this) || updateShapeCacheDuringNextFrame;
00881       if(!cache->geometry) return 0;
00882  
00883       AppearanceNode *appearanceNode = shape->getAppearance().get();
00884       Appearance *a = dynamic_cast<Appearance *>(appearanceNode);
00885       if(appearanceNode && !a)
00886         ARCHON_THROW1(InternalException,
00887                       "'" + appearanceNode->getType()->getName() +
00888                       "' is not supported yet");
00889 
00890       TextureNode *textureNode = a ? a->getTexture().get() : 0;
00891       ImageTexture *imageTexture=0;
00892       if(textureNode)
00893         imageTexture = dynamic_cast<ImageTexture *>(textureNode);
00894       
00895       const TextureTransformNode *textureTransformNode =
00896         a ? a->getTextureTransform().get() : 0;
00897       const TextureTransform *t =
00898         dynamic_cast<const TextureTransform *>(textureTransformNode);
00899       if(textureTransformNode && !t)
00900         ARCHON_THROW1(InternalException,
00901                       "'" + textureTransformNode->getType()->getName() +
00902                       "' is not supported yet");
00903 
00904       const MaterialNode *materialNode = a ? a->getMaterial().get() : 0;
00905       const Material *material = dynamic_cast<const Material *>(materialNode);
00906       if(materialNode && !material)
00907         ARCHON_THROW1(InternalException,
00908                       "'" + materialNode->getType()->getName() +
00909                       "' is not supported yet");
00910       if(material)
00911       {
00912         // Lit object
00913         GLenum face = GL_FRONT_AND_BACK;
00914         GLfloat c[4];
00915 
00916         double alpha = 1 - material->getTransparency();
00917         c[3]=alpha;
00918         glEnable(GL_LIGHTING);
00919 
00920         c[0]=material->getDiffuseColor()[0];
00921         c[1]=material->getDiffuseColor()[1];
00922         c[2]=material->getDiffuseColor()[2];
00923         glMaterialfv(face, GL_DIFFUSE, c);
00924 
00925         c[0] *= material->getAmbientIntensity();
00926         c[1] *= material->getAmbientIntensity();
00927         c[2] *= material->getAmbientIntensity();
00928         glMaterialfv(face, GL_AMBIENT, c);
00929 
00930         c[0] = material->getSpecularColor()[0];
00931         c[1] = material->getSpecularColor()[1];
00932         c[2] = material->getSpecularColor()[2];
00933         glMaterialfv(face, GL_SPECULAR, c);
00934 
00935         glMaterialf(face, GL_SHININESS, material->getShininess()*128);
00936 
00937         c[0] = material->getEmissiveColor()[0];
00938         c[1] = material->getEmissiveColor()[1];
00939         c[2] = material->getEmissiveColor()[2];
00940         glMaterialfv(face, GL_EMISSION, c);
00941 
00942         glColor3d(material->getDiffuseColor()[0],
00943                   material->getDiffuseColor()[1],
00944                   material->getDiffuseColor()[2]);
00945       }
00946       else
00947       {
00948         // Unlit object
00949         glDisable(GL_LIGHTING);
00950         glColor3d(1,1,1);
00951       }
00952 
00953       const bool texture = cache->contentsId;
00954 
00955       bool texStack = false;
00956 
00957       if(texture && enableTexture && !renderConfig.wireframeMode)
00958       {
00959         if(t)
00960         {
00961           glMatrixMode(GL_TEXTURE);
00962           glPushMatrix();
00963         
00964           //glLoadIdentity();
00965           glTranslatef(t->getCenter()[0],
00966                        t->getCenter()[1],
00967                        0);
00968           glScalef(t->getScale()[0],
00969                    t->getScale()[1],
00970                    1);
00971           glRotatef(t->getRotation()/M_PI*180,
00972                     0, 0, 1);
00973           glTranslatef(t->getCenter()[0]*-1,
00974                        t->getCenter()[1]*-1,
00975                        0);
00976           glTranslatef(t->getTranslation()[0],
00977                        t->getTranslation()[1],
00978                        0);
00979 
00980           glMatrixMode(GL_MODELVIEW);
00981 
00982           texStack = true;
00983         }
00984 
00985         glEnable(GL_TEXTURE_2D);
00986         glBindTexture(GL_TEXTURE_2D, cache->textureId);
00987         if(imageTexture)
00988         {
00989           glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_S, imageTexture->getRepeatS() ? GL_REPEAT : GL_CLAMP);
00990           glTexParameteri(GL_TEXTURE_2D, GL_TEXTURE_WRAP_T, imageTexture->getRepeatT() ? GL_REPEAT : GL_CLAMP);
00991         }
00992       }
00993       else glDisable(GL_TEXTURE_2D);
00994 
00995       GeometryNode *g = cache->geometry;
00996       if(!update) glCallList(cache->callListId);
00997       else if(g)
00998       {
00999         glNewList(cache->callListId, GL_COMPILE_AND_EXECUTE);
01000         g->render(texture, shape, &renderConfig);
01001         glEndList();
01002       }
01003 
01004       if(texStack)
01005       {
01006         glMatrixMode(GL_TEXTURE);
01007         glPopMatrix();
01008         glMatrixMode(GL_MODELVIEW);
01009       }
01010 
01011       return g ? shape : 0;
01012     }
01013 
01014     void Viewer::drawTransform(Transform *t, bool sensor)
01015     {
01016       glPushMatrix();
01017 
01018       glTranslatef(t->getTranslation()[0],
01019                    t->getTranslation()[1],
01020                    t->getTranslation()[2]);
01021       glTranslatef(t->getCenter()[0],
01022                    t->getCenter()[1],
01023                    t->getCenter()[2]);
01024       glRotatef(t->getRotation().angle/M_PI*180,
01025                 t->getRotation().axis[0],
01026                 t->getRotation().axis[1],
01027                 t->getRotation().axis[2]);
01028       glRotatef(t->getScaleOrientation().angle/M_PI*180,
01029                 t->getScaleOrientation().axis[0],
01030                 t->getScaleOrientation().axis[1],
01031                 t->getScaleOrientation().axis[2]);
01032       glScalef(t->getScale()[0],
01033                t->getScale()[1],
01034                t->getScale()[2]);
01035       glRotatef(t->getScaleOrientation().angle/M_PI*180*-1,
01036                 t->getScaleOrientation().axis[0],
01037                 t->getScaleOrientation().axis[1],
01038                 t->getScaleOrientation().axis[2]);
01039       glTranslatef(t->getCenter()[0]*-1,
01040                    t->getCenter()[1]*-1,
01041                    t->getCenter()[2]*-1);
01042 
01043       drawGroupingNode(t, sensor);
01044 
01045       glPopMatrix();
01046     }
01047 
01048     void Viewer::drawBillboard(Billboard *b, bool sensor)
01049     {
01050       glPushMatrix();
01051 
01052       Rotation3 r;
01053       calculateBillboardRotation(b, r);
01054       glRotatef(r.angle/M_PI*180, r.axis[0], r.axis[1], r.axis[2]);
01055 
01056       drawGroupingNode(b, sensor, &r);
01057 
01058       glPopMatrix();
01059     }
01060 
01061     void Viewer::drawSubSceneNode(SubSceneNode *s, bool sensor)
01062     {
01063       Ref<SceneBase> d = s->getSubSceneNoLock();
01064       if(!d) return;
01065 
01066       Ref<Group> h = d->getRootGroupNoLock();
01067       if(!h) return;
01068 
01069       dispatchGroupingNode(h.get(), sensor/* || selectionMode == selectApplications*/);
01070     }
01071 
01072     void Viewer::dispatchGroupingNode(GroupingNode *g, bool sensor)
01073     {
01074       if(Transform *t = dynamic_cast<Transform *>(g))
01075         drawTransform(t, sensor);
01076       else if(Billboard *b = dynamic_cast<Billboard *>(g))
01077         drawBillboard(b, sensor);
01078       else drawGroupingNode(g, sensor);
01079     }
01080 
01085     void Viewer::drawGroupingNode(GroupingNode *g, bool sensor, const Rotation3 *billboardRotation)
01086     {
01087       int numberOfDirectionalLights = 0;
01088 
01089       /*
01090        * Is this the group that introduces the first enabled sensors
01091        * for this branch?  If it is, we raise the flag 'sensor'.
01092        *
01093        * Also, setup all enabled directional light sources that are
01094        * direct children of this group.
01095        *
01096        * Note that if this is a Switch node then neither pointing
01097        * device sensors nor directional lights could have any
01098        * effect. This is because they affect only their siblings and
01099        * the children of their siblings, but since this is a Switch
01100        * node then at most one child can be active at one point in
01101        * time. Thus, there can be no siblings.
01102        */
01103       if(!dynamic_cast<const Switch *>(g))
01104       {
01105         GroupingNode::const_iterator c=g->begin();
01106         while(c!=g->end())
01107         {
01108           if(const PointingDeviceSensorNode *s =
01109              dynamic_cast<const PointingDeviceSensorNode *>(c->get()))
01110           {
01111             if(tracePointingRay/* && selectionMode == selectSensors*/ &&
01112                !sensor && s->getEnabled()) sensor = true;
01113           }
01114           else if(const DirectionalLight *l =
01115                   dynamic_cast<const DirectionalLight *>(c->get()))
01116           {
01117             if(l->getOn())
01118             {
01119               ++numberOfDirectionalLights;
01120               int i = GL_LIGHT0 + numberOfEnabledLights++;
01121 
01122               GLfloat v[4];
01123               v[3] = 1;
01124 
01125               v[0] = l->getColor()[0] * l->getAmbientIntensity();
01126               v[1] = l->getColor()[1] * l->getAmbientIntensity();
01127               v[2] = l->getColor()[2] * l->getAmbientIntensity();
01128               glLightfv(i, GL_AMBIENT, v);
01129 
01130               v[0] = l->getColor()[0] * l->getIntensity();
01131               v[1] = l->getColor()[1] * l->getIntensity();
01132               v[2] = l->getColor()[2] * l->getIntensity();
01133               glLightfv(i, GL_DIFFUSE, v);
01134               glLightfv(i, GL_SPECULAR, v);
01135 
01136               v[0] = -l->getDirection()[0];
01137               v[1] = -l->getDirection()[1];
01138               v[2] = -l->getDirection()[2];
01139               v[3] = 0; // Push the position to infinity
01140               glLightfv(i, GL_POSITION, v);
01141 
01142               glLightf(i, GL_SPOT_CUTOFF, 180);
01143 
01144               glEnable(i);
01145             }
01146           }
01147 
01148           ++c;
01149         }
01150       }
01151 
01152       /*
01153        * If we are detecting pointing ray collisions during this
01154        * frame, then all Shape nodes that are immediate children of
01155        * this GroupingNode are added to this list. These are the Shape
01156        * nodes that need to be tested for intersection with the
01157        * pointing ray.
01158        */
01159       vector<Shape *> shapes;
01160 
01161       // Recurse into childs of this group
01162       GroupingNode::const_iterator c=g->begin();
01163       ++level;
01164       if(Switch *s = dynamic_cast<Switch *>(g))
01165       {
01166         int i = s->getWhichChoise();
01167         if(i>=0)
01168         {
01169           c += i;
01170           if(c < g->end())
01171           {
01172             if(ShapeNode *s = dynamic_cast<ShapeNode *>(c->get()))
01173             {
01174               Shape *s2 = drawShape(s);
01175               if(tracePointingRay && s2) shapes.push_back(s2);
01176             }
01177             else if(SubSceneNode *s = dynamic_cast<SubSceneNode *>(c->get()))
01178               drawSubSceneNode(s, sensor);
01179             else if(GroupingNode *g2 = dynamic_cast<GroupingNode *>(c->get()))
01180               dispatchGroupingNode(g2, sensor);
01181           }
01182         }
01183       }
01184       else while(c!=g->end())
01185       {
01186         if(ShapeNode *s = dynamic_cast<ShapeNode *>(c->get()))
01187         {
01188           Shape *s2 = drawShape(s);
01189           if(tracePointingRay && s2) shapes.push_back(s2);
01190         }
01191         else if(SubSceneNode *s = dynamic_cast<SubSceneNode *>(c->get()))
01192           drawSubSceneNode(s, sensor);
01193         else if(GroupingNode *g2 = dynamic_cast<GroupingNode *>(c->get()))
01194           dispatchGroupingNode(g2, sensor);
01195 
01196         ++c;
01197       }
01198       --level;
01199 
01200       // Turn off directional light sources belonging to this group
01201       while(numberOfDirectionalLights--)
01202         glDisable(GL_LIGHT0 + --numberOfEnabledLights);
01203 
01204       if(!shapes.empty())
01205       {
01206         // Fetch and invert current model view matrix
01207         double m[16];
01208         glGetDoublev(GL_MODELVIEW_MATRIX, m);
01209         CoordSystem3x3 modelview(m);
01210         CoordSystem3x3 invModelview;
01211         invModelview.setInverseOf(modelview);
01212 
01213         // Map pointing ray into local coordinates
01214         Math::Ray3 localRay;
01215         localRay.direction = pointingRay;
01216         invModelview.basis.map(localRay.direction);
01217         localRay.origin = localRay.direction;
01218         localRay.origin += invModelview.origin;
01219 
01220         // Ray collision detection with extracted shapes
01221         bool anyHits = false;
01222         for(unsigned i=0; i<shapes.size(); ++i)
01223         {
01224           double dist;
01225           Shape *shape = shapes[i];
01226           int where = shape->getGeometry()->intersect(localRay, dist);
01227           if(where && (dist < hit.distance || hit.distance < 0))
01228           {
01229             hit.distance = dist;
01230             if(sensor)
01231             {
01232               hit.node = shape;
01233               hit.where = where;
01234             }
01235             anyHits = true;
01236           }
01237         }
01238 
01239         if(anyHits)
01240         {
01241           if(sensor)
01242           {
01243             hit.localRay = localRay;
01244             hitGroupThread.clear();
01245             hitGroupThreadBillboardRotations.clear();
01246             hitGroupThreadLowerLevel = level+1;
01247           }
01248           else hit.node = 0;
01249         }
01250       }
01251 
01252       if(hit.node && level < hitGroupThreadLowerLevel)
01253       {
01254         hitGroupThread.push_back(g);
01255         if(billboardRotation) hitGroupThreadBillboardRotations.push_back(*billboardRotation);
01256         --hitGroupThreadLowerLevel;
01257       }
01258     }
01259 
01263     void Viewer::resetViewpoint(string n, const CoordSystem3x3 &s)
01264     {
01265       if(!server->getRootScene())
01266         ARCHON_THROW1(InternalException,
01267                       "Viewer::resetViewpoint: No root scene");
01268 
01269       // Attempt to bind to the specified viewpoint
01270       if(!n.empty()) bindViewpoint(n);
01271 
01272       // Otherwise bind to the first viewpoint if any
01273       if(!boundViewpoint)
01274         boundViewpoint =
01275           findBoundViewpoint_group(server->getRootScene()->
01276                                    getRootGroup().get());
01277 
01278       // Otherwise use the default view point
01279       if(!boundViewpoint) viewCoordSystem = s;
01280     }
01281 
01282 
01283     void Viewer::setPointingDeviceActive(bool m)
01284     {
01285       pointingDeviceActive = m;
01286     }
01287 
01288     void Viewer::setInitialPointingDevicePosition2D(Vector2 p)
01289     {
01290       pointingDevicePosition = p;
01291     }
01292 
01293     void Viewer::setPointingDevicePosition2D(Vector2 p)
01294     {
01295       pointingDevicePosition = p;
01296       pointingDevicePositionChangedSinceLastFrame = true;
01297     }
01298 
01318     void Viewer::renderFrame()
01319     {
01320       // Considder pointing device activation and deactivation
01321       bool activate = false;
01322       bool deactivate = false;
01323       if(pointingDeviceActive && !disregardSensors)
01324       {
01325         if(!pointingDeviceLastActive)
01326         {
01327           activate = true;
01328           pointingDeviceLastActive = true;
01329         }
01330       }
01331       else if(pointingDeviceLastActive)
01332       {
01333         deactivate = true;
01334         pointingDeviceLastActive = false;
01335       }
01336 
01337       if(pointingDevicePositionChangedSinceLastFrame && !disregardSensors || activate)
01338       {
01351         pointingRay[0] = viewPlane[0] * ( 2 * pointingDevicePosition[0] / resolutionX - 1);
01352         pointingRay[1] = viewPlane[1] * (-2 * pointingDevicePosition[1] / resolutionY + 1);
01353         pointingRay[2] = -nearClippingDist;
01354 
01355         tracePointingRay = true;
01356       }
01357       else tracePointingRay = false;
01358 
01359       hit.node = 0;
01360       level = 0;
01361       hitGroupThread.clear();
01362       hitGroupThreadBillboardRotations.clear();
01363       hit.distance = -1; // Force first hit to be closer than "previous".
01364 
01365       glShadeModel(GL_SMOOTH);
01366       glColorMaterial(GL_FRONT_AND_BACK, GL_DIFFUSE);
01367       glEnable(GL_NORMALIZE);
01368       glPolygonMode(GL_FRONT_AND_BACK, renderConfig.wireframeMode ? GL_LINE : GL_FILL);
01369       glLightModeli(GL_LIGHT_MODEL_COLOR_CONTROL, separateSpecularColorMode ? GL_SEPARATE_SPECULAR_COLOR : GL_SINGLE_COLOR);
01370       glLightModeli(GL_LIGHT_MODEL_LOCAL_VIEWER, lightModelLocalViewerMode ? 1 : 0);
01371 
01372       Mutex::Lock lock(server->scenegraphMutex);
01373 
01374       // Process input events and generate new autonomous (time based)
01375       // events
01376       server->tick();
01377 
01378       /*
01379        * This must be done by the same thread that createss
01380        * ShapeCaches since in involves OpenGL commands.
01381        */
01382       ShapeCache::cleanUp();
01383 
01384       Scene *rootScene = server->getRootSceneNoLock().get();
01385       if(rootScene)
01386       {
01387         GroupingNode *rootGroup = rootScene->getRootGroupNoLock().get();
01388         if(rootGroup)
01389         {
01390           glMatrixMode(GL_PROJECTION);
01391           glLoadIdentity();
01392           glFrustum(-1, 1, -1, 1, 1, 1000-1);
01393           glMatrixMode(GL_MODELVIEW);
01394           glLoadIdentity();
01395           glTranslated(0, 0, -10);
01396 
01397           setupViewpoint();
01398 
01399           setupLight(headLight, showLightSources);
01400 
01401           /*
01402            * Render the scene and on request 'tracePointingRay=true' trace
01403            * the pointing ray for intersection with sensor affected
01404            * geometry. Upon return 'hit.node!=0' will indicate that the
01405            * ray has hit a sensor affected shape node and a reference to
01406            * that node is stored in 'hit.node'. The rest of the fields of
01407            * 'hit' will be set to reflect that hit, and 'hitGroupThread'
01408            * will contain the path given in grouping nodes starting with
01409            * the one imediately enclosing 'hit.node' to the root group of
01410            * the scene.
01411            */
01412           drawGroupingNode(rootGroup, false);
01413         }
01414       }
01415 
01416       // Handling PointingDeviceSensorNodes:
01417       //
01418       // We need to keep a set of references to
01419       // PointingDeviceSensorNodes whos isOver attribute has been set
01420       // to true. This list must be kept in the Viewer object, and we
01421       // need it to make sure that all those nodes get their isOver
01422       // attribute reset to false, even if they are removed from the
01423       // GroupingNode with isOver set to true. We call this list of
01424       // PointingDeviceSensorNodes the isOver set.
01425       //
01426       // For a given PointingDeviceSensorNode the isOver attribute
01427       // should be true when and only when the
01428       // PointingDeviceSensorNode is enabled and is in the current
01429       // isOver set.
01430       //
01431       // We should work with sets as opposed to lists of
01432       // PointingDeviceSensorNodes here to prevent sending isOver
01433       // events several times on the same node just because it is
01434       // included several times in the same GroupingNode.
01435       //
01436       // Set isOver to false for all enabled PointingDeviceSensorNodes
01437       // which was in the isOver set during the previous frame but
01438       // are not there now.
01439       //
01440       // Remember to set isOver to false when a
01441       // PointingDeviceSensorNode is disabled and has isOver set to
01442       // true. This must be dealt with during event handling.
01443       //
01444       // Set isOver to true for all enabled PointingDeviceSensorNodes
01445       // which are now in the isOver set but was not there during
01446       // thre previous frame.
01447       //
01448       // Calculate the mapping from the coordinate system of the
01449       // intersected geometry to the coordinate system of the sensor
01450       // group if one of the following is true:
01451       //
01452       //  - The isOver set contains at least one TouchSensor. We
01453       //    need to map the hit point and the hit normal
01454       //  - The isOver set contains at least one DragSensorNode and
01455       //    this is the first frame of the drag. We need to calculate
01456       //    the dimensions of the virtual geometry of the
01457       //    DragSensorNode, but only at the first frame of the drag.
01458       //
01459       // If the isOver set contains at least one TouchSensor, calculate
01460       // the inverse mapping of the one calculated above for mapping
01461       // hit-normals.
01462       //
01463       // ---------------------------------------------------------------
01464       // Fig. 1: The hitGroupThread layout (an example)
01465       //
01466       // Index:                0   1   2   3
01467       // Transformation:     - > - > - > - > -     > -
01468       // Coordinate system:  G       S       R       E
01469       //
01470       // Legend:
01471       // -: A coordinate system
01472       // >: A transformation (plain Groups are identity transformations)
01473       // G: The coordinate system of the indicated geometry
01474       // S: The coordinate system of the influenced sensors
01475       // R: The root coordinate system
01476       // E: Eye-space (ie. the current view point coordinate system)
01477       //
01478       // Note: Coordinate systems G, S and R may all be
01479       // coincident. That is, without any transformations/groups
01480       // between them.
01481       //
01482       // Note: The transformation at index 3 (in this case) is the
01483       // root group of the scene graph and is thus an identity
01484       // transformation, never the less it is always included in the
01485       // hitGroupThread.
01486       //
01487       // Note: The last transformation (without index) is the view point
01488       // transformation and is not included in the hitGroupThread.
01489       // ---------------------------------------------------------------
01490       //
01491 
01492       Time timeStamp = server->getNextTimeStamp();
01493 
01494       /*
01495        * Run down the thread of GroupingNodes from the intersected
01496        * geometry towards the root of the scene graph in search of the
01497        * first one holding at least one enabled
01498        * PointingDeviceSensorNode. In fig 1 this is index 2.
01499        */
01500       unsigned sensorIndex = 0;
01501       unsigned sensorIndexInBillboardRotations = 0; // Or number of billboards under sensor group
01502       if(hit.node)
01503       {
01504         while(sensorIndex < hitGroupThread.size())
01505         {
01506           const GroupingNode *g = hitGroupThread[sensorIndex];
01507           GroupingNode::const_iterator j=g->begin();
01508           while(j != g->end())
01509           {
01510             if(const PointingDeviceSensorNode *s =
01511                dynamic_cast<const PointingDeviceSensorNode *>(j->get()))
01512               if(s->getEnabled()) break;
01513             ++j;
01514           }
01515           if(j != g->end()) break;
01516           ++sensorIndex;
01517           if(dynamic_cast<const Billboard *>(g)) ++sensorIndexInBillboardRotations;
01518         }
01519 
01520         if(sensorIndex == hitGroupThread.size())
01521           ARCHON_THROW1(InternalException, "Could not find sensor group");
01522       }
01523 
01524       // Build new isOverSet and determine existance of touch and/or
01525       // drag sensors
01526       bool anyTouchSensors = false;
01527       bool anyDragSensors = false;
01528       set<Ref<PointingDeviceSensorNode> > newOverSet;
01529       if(hit.node)
01530       {
01531         Ref<const GroupingNode> g = hitGroupThread[sensorIndex];
01532         GroupingNode::const_iterator i=g.get()->begin();
01533         while(i!=g.get()->end())
01534         {
01535           if(PointingDeviceSensorNode *s =
01536              dynamic_cast<PointingDeviceSensorNode *>(const_cast<ChildNode *>(i->get())))
01537             if(s->getEnabled())
01538             {
01539               if(dynamic_cast<TouchSensor *>(s)) anyTouchSensors = true;
01540               if(dynamic_cast<DragSensorNode *>(s)) anyDragSensors = true;
01541               newOverSet.insert(s);
01542             }
01543           ++i;
01544         }
01545       }
01546 
01547       // Update the isOver set if the pointing ray was traced in this
01548       // frame (pointing device movement or activation)
01549       if(tracePointingRay)
01550       {
01551         // Set isOver to false for all enabled PointingDeviceSensorNodes
01552         // which was in the isOver set during the previous frame but
01553         // are not there now.
01554         {
01555           set<Ref<PointingDeviceSensorNode> >::iterator i = pointingDeviceOverSet.begin();
01556           while(i != pointingDeviceOverSet.end())
01557           {
01558             if(newOverSet.find(*i) == newOverSet.end())
01559             {
01560               PointingDeviceSensorNode *s = i->get();
01561               if(s->isOver)
01562               {
01563                 s->isOver = false;
01564                 Event e(new SimpleValue<bool>(SFBool::type, s->isOver), timeStamp);
01565                 s->isOverChanged.cascadeEvent(&e);
01566               }
01567             }
01568             ++i;
01569           }
01570         }
01571 
01572         // Set isOver to true for all enabled PointingDeviceSensorNodes
01573         // which are now in the isOver set but was not there during
01574         // thre previous frame.
01575         if(hit.node)
01576         {
01577           set<Ref<PointingDeviceSensorNode> >::iterator i = newOverSet.begin();
01578           while(i != newOverSet.end())
01579           {
01580             if(pointingDeviceOverSet.find(*i) == pointingDeviceOverSet.end())
01581             {
01582               PointingDeviceSensorNode *s = i->get();
01583               if(!s->isOver)
01584               {
01585                 s->isOver = true;
01586                 Event e(new SimpleValue<bool>(SFBool::type, s->isOver), timeStamp);
01587                 s->isOverChanged.cascadeEvent(&e);
01588               }
01589             }
01590             ++i;
01591           }
01592         }
01593 
01594         // Save the current isOver set for next frame
01595         pointingDeviceOverSet = newOverSet;
01596       }
01597 
01598       // Handle deactivation of sensors
01599       if(deactivate && !pointingDeviceActiveSet.empty())
01600       {
01601         set<Ref<PointingDeviceSensorNode> >::iterator i = pointingDeviceActiveSet.begin();
01602         while(i != pointingDeviceActiveSet.end())
01603         {
01604           PointingDeviceSensorNode *s = i->get();
01605           if(s->isActive)
01606           {
01607             s->isActive = false;
01608             Event e(new SimpleValue<bool>(SFBool::type, s->isActive), timeStamp);
01609             s->isActiveChanged.cascadeEvent(&e);
01610           }
01611           ++i;
01612         }
01613 
01614         // Set touchTime fields of the TouchSensors that are currently
01615         // also in the isOver set. And handle autoOffest for DragSensorNodes.
01616         i = pointingDeviceActiveSet.begin();
01617         while(i != pointingDeviceActiveSet.end())
01618         {
01619           PointingDeviceSensorNode *s = i->get();
01620           if(TouchSensorNode *t = dynamic_cast<TouchSensorNode *>(s))
01621           {
01622             if(pointingDeviceOverSet.find(*i) != pointingDeviceOverSet.end())
01623             {
01624               t->touchTime = timeStamp;
01625               Event e(new SimpleValue<Time>(SFTime::type, t->touchTime), timeStamp);
01626               t->touchTimeChanged.cascadeEvent(&e);
01627             }
01628           }
01629           else if(PlaneSensor *t = dynamic_cast<PlaneSensor *>(s))
01630           {
01631             if(t->autoOffset)
01632             {
01633               t->offset = t->translation;
01634               Event e(new SimpleValue<Vector3>(SFVec3f::type, t->offset), timeStamp);
01635               t->offsetChanged.cascadeEvent(&e);
01636             }
01637           }
01638           else if(SphereSensor *t = dynamic_cast<SphereSensor *>(s))
01639           {
01640             if(t->autoOffset)
01641             {
01642               t->offset = t->rotation;
01643               Event e(new SimpleValue<Rotation3>(SFVec3f::type, t->offset), timeStamp);
01644               t->offsetChanged.cascadeEvent(&e);
01645             }
01646           }
01647           else if(CylinderSensor *t = dynamic_cast<CylinderSensor *>(s))
01648           {
01649             if(t->autoOffset)
01650             {
01651               t->offset = t->rotation.angle;
01652               Event e(new SimpleValue<double>(SFVec3f::type, t->offset), timeStamp);
01653               t->offsetChanged.cascadeEvent(&e);
01654             }
01655           }
01656 
01657           ++i;
01658         }
01659 
01660         pointingDeviceActiveSet.clear();
01661         anyActiveDragSensors = false;
01662       }
01663 
01664       // Handle activation of sensors
01665       bool startDrag = activate && !pointingDeviceOverSet.empty();
01666       if(startDrag)
01667       {
01668         pointingDeviceActiveSet = pointingDeviceOverSet;
01669         anyActiveDragSensors = anyDragSensors;
01670 
01671         set<Ref<PointingDeviceSensorNode> >::iterator i = pointingDeviceActiveSet.begin();
01672         while(i != pointingDeviceActiveSet.end())
01673         {
01674           PointingDeviceSensorNode *s = i->get();
01675           if(!s->isActive)
01676           {
01677             s->isActive = true;
01678             Event e(new SimpleValue<bool>(SFBool::type, s->isActive), timeStamp);
01679             s->isActiveChanged.cascadeEvent(&e);
01680           }
01681           ++i;
01682         }
01683       }
01684 
01685       /*
01686        * These flags must be cleared at each invocation of this method
01687        * and after the actual rendering which is initiated by the call
01688        * to 'drawGroupingNode' above.
01689        */
01690       updateShapeCacheDuringNextFrame = false;
01691       pointingDevicePositionChangedSinceLastFrame = false;
01692 
01693 
01694       // Quit here if the pointing device position has not changed and
01695       // not been activated since the previous frame.
01696       if(!tracePointingRay) return;
01697 
01698       // An obvious optimization opportunity here is that we do not
01699       // need to calculate for example the hitNormal for the touch
01700       // sensor if no routes are attached to that field.
01701       Vector3 hitPoint;
01702       Vector3 hitNormal;
01703       Vector2 hitTexCoord;
01704       // Note that: hit.node == null ==> startDrag == false
01705       if(hit.node && anyTouchSensors || startDrag && anyDragSensors)
01706       {
01707         // Calculate the mapping from the coordinate system of the
01708         // intersected geometry to the coordinate system of the sensor
01709         // group. In fig. 1 this is the combination of transformations
01710         // 0 and 1.
01711         CoordSystem3x3 m = CoordSystem3x3::identity();
01712         for(unsigned i = sensorIndex, j = sensorIndexInBillboardRotations; i>0; --i)
01713         {
01714           const GroupingNode *g = hitGroupThread[i-1];
01715 
01716           if(const Transform *t = dynamic_cast<const Transform *>(g))
01717           {
01718             m.translate(t->getTranslation());
01719             m.translate(t->getCenter());
01720             m.basis.rotate(t->getRotation());
01721             m.basis.rotate(t->getScaleOrientation());
01722             m.basis.scale(t->getScale());
01723             m.basis.rotate(-t->getScaleOrientation());
01724             m.translate(-t->getCenter());
01725           }
01726           else if(dynamic_cast<const Billboard *>(g))
01727             m.basis.rotate(hitGroupThreadBillboardRotations[--j]);
01728         }
01729 
01730         // Determine the hit point in the coordinate system of the
01731         // intersected geometry.
01732         hitPoint = hit.localRay.direction;
01733         hitPoint *= hit.distance;
01734         hitPoint += hit.localRay.origin;
01735 
01736         if(anyTouchSensors)
01737         {
01738           // Calculate the hitNormal and the hitTexCoord
01739           hit.node->getGeometry()->getNormalAndTexCoord(hitPoint, hit.where, hit.node, &hitNormal, &hitTexCoord);
01740 
01741           /*
01742           // For debugging hitPoint and especially hitNormal
01743           glEnable(GL_COLOR_MATERIAL);
01744           glDisable(GL_TEXTURE_2D);
01745           glColor3d(1, 1, 1);
01746           glBegin(GL_LINES);
01747           glVertex3d(hitPoint[0], hitPoint[1], hitPoint[2]);
01748           glVertex3d(hitPoint[0]+hitNormal[0], hitPoint[1]+hitNormal[1], hitPoint[2]+hitNormal[2]);
01749           glEnd();
01750           */
01751 
01752           // When mapping normals we need to derive the "normal map"
01753           // from 'm'. m.basis is the "tangential map". The transpose
01754           // of the inverse of the tangential map is the normal map.
01755           Matrix3x3 invMap;
01756           invMap.setInverseOf(m.basis);
01757           hitNormal *= invMap; // Transposed mapping
01758           hitNormal.normalize();
01759         }
01760 
01761         // Map the hit point to the coordinate system of the sensors.
01762         m.map(hitPoint);
01763       }
01764 
01765       // If this is the first frame of a drag, we need to determine
01766       // the mapping from eye-space to the sensor coordinate system
01767       // and save it into the Viewer object for mapping pointing rays
01768       // for the following frames of the drag. In fig. 1 this is the
01769       // combination of transformations 2 and 3 and the view point
01770       // transformation.
01771       if(startDrag && anyDragSensors)
01772       {
01773         // We combine the transformations in reverse order to
01774         // eliminate a pair of matrix inversions. That is, we utilize
01775         // the rule that the inverse of a matrix product is the
01776         // product of the inverse matrices in reverse order.
01777         CoordSystem3x3 m = CoordSystem3x3::identity();
01778         for(unsigned i = sensorIndex, j = sensorIndexInBillboardRotations; i<hitGroupThread.size(); ++i)
01779         {
01780           const GroupingNode *g = hitGroupThread[i];
01781 
01782           if(const Transform *t = dynamic_cast<const Transform *>(g))
01783           {
01784             m.translate(t->getCenter());
01785             m.basis.rotate(t->getScaleOrientation());
01786             m.basis.invScale(t->getScale());
01787             m.basis.rotate(-t->getScaleOrientation());
01788             m.basis.rotate(-t->getRotation());
01789             m.translate(-t->getCenter());
01790             m.translate(-t->getTranslation());
01791           }
01792           else if(dynamic_cast<const Billboard *>(g))
01793             m.basis.rotate(-hitGroupThreadBillboardRotations[j++]);
01794         }
01795 
01796         m *= viewCoordSystem;
01797 
01798         eyeToSensorMap = m;
01799 
01800         // We need to save in the Viewer object the initial hit point
01801         // of a drag operation, for use in the remainder of the drag
01802         // operation.
01803         initialDragHitPoint = hitPoint;
01804       }
01805 
01806       // Map the pointing ray into the sensor coordinate system during
01807       // a drag.
01808       Ray3 sensorRay;
01809       if(anyActiveDragSensors)
01810       {
01811         // Note: The pointing ray is represented by a single vector
01812         // since its origin vector and direction vector are always
01813         // identical.
01814         sensorRay.direction = pointingRay;
01815         eyeToSensorMap.basis.map(sensorRay.direction);
01816         sensorRay.origin = sensorRay.direction;
01817         sensorRay.origin += eyeToSensorMap.origin;
01818       }
01819 
01820       // Calculate and send out TouchSensor tracking events
01821       {
01822         set<Ref<PointingDeviceSensorNode> >::iterator i = pointingDeviceOverSet.begin();
01823         while(i != pointingDeviceOverSet.end())
01824         {
01825           PointingDeviceSensorNode *s = i->get();
01826           if(TouchSensor *t = dynamic_cast<TouchSensor *>(s))
01827           {
01828             // hitPoint
01829             {
01830               t->hitPoint = hitPoint;
01831               Event e(new SimpleValue<Vector3>(SFVec3f::type, t->hitPoint), timeStamp);
01832               t->hitPointChanged.cascadeEvent(&e);
01833             }
01834 
01835             // hitNormal
01836             {
01837               t->hitNormal = hitNormal;
01838               Event e(new SimpleValue<Vector3>(SFVec3f::type, t->hitNormal), timeStamp);
01839               t->hitNormalChanged.cascadeEvent(&e);
01840             }
01841 
01842             // hitTexCoord
01843             {
01844               t->hitTexCoord = hitTexCoord;
01845               Event e(new SimpleValue<Vector2>(SFVec2f::type, t->hitTexCoord), timeStamp);
01846               t->hitTexCoordChanged.cascadeEvent(&e);
01847             }
01848           }
01849           ++i;
01850         }
01851       }
01852 
01853       // Calculate and send out DragSensor tracking events
01854       {
01855         set<Ref<PointingDeviceSensorNode> >::iterator i = pointingDeviceActiveSet.begin();
01856         while(i != pointingDeviceActiveSet.end())
01857         {
01858           PointingDeviceSensorNode *s = i->get();
01859           ++i;
01860           if(PlaneSensor *t = dynamic_cast<PlaneSensor *>(s))
01861           {
01862             Vector3 p;
01863             if(startDrag) p = initialDragHitPoint;
01864             else
01865             {
01866               // Calculate intersection with virtual plane
01867               double k = sensorRay.direction[2];
01868               if(k == 0) continue;
01869               k = (initialDragHitPoint[2] - sensorRay.origin[2]) / k;
01870               if(k <= 0) continue;
01871               p[0] = sensorRay.origin[0] + k * sensorRay.direction[0];
01872               p[1] = sensorRay.origin[1] + k * sensorRay.direction[1];
01873               p[2] = initialDragHitPoint[2];
01874             }
01875 
01876             // Send out tracking events
01877             {
01878               t->trackPoint = p;
01879               Event e(new SimpleValue<Vector3>(SFVec3f::type, t->trackPoint), timeStamp);
01880               t->trackPointChanged.cascadeEvent(&e);
01881             }
01882 
01883             p -= initialDragHitPoint;
01884             p += t->offset;
01885 
01886             // Clamp x-coordinate
01887             if(t->minPosition[0] <= t->maxPosition[0])
01888             {
01889               if(p[0] < t->minPosition[0]) p[0] = t->minPosition[0];
01890               else if(p[0] > t->maxPosition[0]) p[0] = t->maxPosition[0];
01891             }
01892 
01893             // Clamp y-coordinate
01894             if(t->minPosition[1] <= t->maxPosition[1])
01895             {
01896               if(p[1] < t->minPosition[1]) p[1] = t->minPosition[1];
01897               else if(p[1] > t->maxPosition[1]) p[1] = t->maxPosition[1];
01898             }
01899 
01900             {
01901               t->translation = p;
01902               Event e(new SimpleValue<Vector3>(SFVec3f::type, t->translation), timeStamp);
01903               t->translationChanged.cascadeEvent(&e);
01904             }
01905           }
01906           else if(SphereSensor *t = dynamic_cast<SphereSensor *>(s))
01907           {
01908             Vector3 p;
01909             if(startDrag)
01910             {
01911               p = initialDragHitPoint;
01912               virtualSphereSensor = Math::Sphere3(initialDragHitPoint.length());
01913             }
01914             else
01915             {
01916               // Calculate intersection with virtual sphere
01917               double dist;
01918               if(!Math::intersect(sensorRay, virtualSphereSensor, dist)) return;
01919               p = sensorRay.direction;
01920               p *= dist;
01921               p += sensorRay.origin;
01922             }
01923 
01924             // Send out tracking events
01925             {
01926               t->trackPoint = p;
01927               Event e(new SimpleValue<Vector3>(SFVec3f::type, t->trackPoint), timeStamp);
01928               t->trackPointChanged.cascadeEvent(&e);
01929             }
01930 
01931             {
01932               double ca = dot(p, initialDragHitPoint) /
01933                 (virtualSphereSensor.radius*virtualSphereSensor.radius);
01934               if(ca >= 1) t->rotation = t->offset;
01935               else
01936               {
01937                 p *= initialDragHitPoint;
01938                 double s = p.squareSum();
01939                 if(s==0) t->rotation = t->offset;
01940                 else
01941                 {
01942                   p /= sqrt(s); // Normalize
01943                   p.negate();
01944 
01945                   // Combine the two rotations through the use of quaternions
01946                   Quaternion q1, q2;
01947                   q1.setRotation(t->offset);
01948                   q2.setRotation(p, ca);
01949                   q2 *= q1;
01950                   q2.getRotation(t->rotation);
01951                 }
01952               }
01953 
01954               Event e(new SimpleValue<Rotation3>(SFRotation::type, t->rotation), timeStamp);
01955               t->rotationChanged.cascadeEvent(&e);
01956             }
01957           }
01958           else if(CylinderSensor *t = dynamic_cast<CylinderSensor *>(s))
01959           {
01960             Vector3 p;
01961             if(startDrag)
01962             {
01963               p = initialDragHitPoint;
01964 
01965               // Determine wether this is a disc or cylinder rotation
01966               double a = acos(sensorRay.direction[1]/sensorRay.direction.length());
01967               if(a > M_PI/2) a = M_PI - a; // Get acute angle
01968 
01969               // Get a polar representation of initialDragHitPoint
01970               virtualCylinderSensorRadius = a < t->diskAngle ? -1 : Math::length(p[0], p[2]);
01971               virtualCylinderSensorAngle = Math::angle(p[0], -p[2]);
01972             }
01973             else if(virtualCylinderSensorRadius < 0)
01974             {
01975               // Calculate intersection with virtual disc
01976               double k = sensorRay.direction[1];
01977               if(k == 0) continue;
01978               k = (initialDragHitPoint[1] - sensorRay.origin[1]) / k;
01979               p[0] = sensorRay.origin[0] + k * sensorRay.direction[0];
01980               p[1] = initialDragHitPoint[1];
01981               p[2] = sensorRay.origin[2] + k * sensorRay.direction[2];
01982             }
01983             else
01984             {
01985               // Calculate intersection with virtual cylinder
01986               double dist;
01987               if(!Math::intersectCylinder(sensorRay, -1, virtualCylinderSensorRadius, dist)) return;
01988               p = sensorRay.direction;
01989               p *= dist;
01990               p += sensorRay.origin;
01991             }
01992 
01993             // Send out tracking events
01994             {
01995               t->trackPoint = p;
01996               Event e(new SimpleValue<Vector3>(SFVec3f::type, t->trackPoint), timeStamp);
01997               t->trackPointChanged.cascadeEvent(&e);
01998             }
01999 
02000             double a = Math::angle(p[0], -p[2]) - virtualCylinderSensorAngle;
02001 
02002             // Fix range of difference to [-pi;pi)
02003             if(a < -M_PI) a += 2*M_PI;
02004             else if(a >= M_PI) a -= 2*M_PI;
02005 
02006             a += t->offset;
02007 
02008             // Clamp effective angle
02009             if(t->minAngle <= t->maxAngle)
02010             {
02011               if(a < t->minAngle) a = t->minAngle;
02012               else if(a > t->maxAngle) a = t->maxAngle;
02013             }
02014 
02015             {
02016               t->rotation.angle = a;
02017               Event e(new SimpleValue<Rotation3>(SFRotation::type, t->rotation), timeStamp);
02018               t->rotationChanged.cascadeEvent(&e);
02019             }
02020           }
02021         }
02022       }
02023     }
02024   }
02025 }

Generated on Sun Jul 30 22:55:51 2006 for Archon by  doxygen 1.4.4