/*
 ************************************************************************
 *  
 * Java 3D configuration file for a cave environment with head tracking and
 * stereo viewing.  This cave consists of 3 projectors with 3 screens to the
 * left, front, and right of the user, all at 90 degrees to each other.
 * 
 * The projectors in Sun's VirtualPortal sample site are actually turned
 * on their sides to get more height.  Screen 0 is rotated 90 degrees
 * counter-clockwise, while screens 1 and 2 are rotated 90 degrees
 * clockwise.
 * 
 ************************************************************************
 */

// Configure the head tracker.
// 
(NewDevice      tracker1 org.scijava.java3d.input.LogitechTracker)
(DeviceProperty tracker1 SerialPort "/dev/ttya") // Unix paths need quoting.
(DeviceProperty tracker1 TransmitterBaseline            0.4600)
(DeviceProperty tracker1 TransmitterLeftLeg             0.4400)
(DeviceProperty tracker1 TransmitterCalibrationDistance 0.4120)

// Configure an InputDevice to use for a 6 degree of freedom wand.
// 
(NewDevice      tracker2 org.scijava.java3d.input.LogitechTracker)
(DeviceProperty tracker2 SerialPort "/dev/ttyb")
(DeviceProperty tracker2 ReceiverBaseline     0.0700)
(DeviceProperty tracker2 ReceiverLeftLeg      0.0625)
(DeviceProperty tracker2 ReceiverHeight       0.0510)
(DeviceProperty tracker2 ReceiverTopOffset    0.0000)

// Make the tracker2 device a slave of the tracker1 device.
(DeviceProperty tracker1 Slave (Device tracker2))

// Create logical names for the head tracker and wand sensors.  The last
// argument is the sensor's index in the input device.
// 
(NewSensor head     tracker1 0)
(NewSensor sensor6d tracker2 0)

// Create new screen objects and associate them with logical names and numbers.
// These numbers are used as indices to retrieve the AWT GraphicsDevice from
// the array that GraphicsEnvironment.getScreenDevices() returns.
// 
// NOTE: The GraphicsDevice order in the array is specific to the local
// site and display system.
// 
(NewScreen left    0)
(NewScreen center  1)
(NewScreen right   2)


// Set the available image areas as well as their positition and orientation
// relative to the tracker base.  From the orientation of a user standing
// within this VirtualPortal site and facing the center screen, the tracker
// base is along the vertical midline of the screen, 0.248 meters down from
// the top edge, and 1.340 meters in front of it.  The tracker base is
// oriented so that its +X axis points to the left, its +Y axis points toward
// the screen, and its +Z axis points toward the floor.
// 
(ScreenProperty        left    PhysicalScreenWidth     2.480)
(ScreenProperty        left    PhysicalScreenHeight    1.705)
(ScreenProperty        left    WindowSize              NoBorderFullScreen)
(ScreenProperty        left    TrackerBaseToImagePlate
                                (( 0.0  0.0 -1.0  2.230)
                                 ( 0.0 -1.0  0.0  1.340)
                                 (-1.0  0.0  0.0  0.885)))

(ScreenProperty        center  PhysicalScreenWidth     2.485)
(ScreenProperty        center  PhysicalScreenHeight    1.745)
(ScreenProperty        center  WindowSize              NoBorderFullScreen)
(ScreenProperty        center  TrackerBaseToImagePlate
                                (( 0.0  0.0  1.0  0.248)
                                 (-1.0  0.0  0.0  0.885)
                                 ( 0.0 -1.0  0.0  1.340)))

(ScreenProperty        right   PhysicalScreenWidth     2.480)
(ScreenProperty        right   PhysicalScreenHeight    1.775)
(ScreenProperty        right   WindowSize              NoBorderFullScreen)
(ScreenProperty        right   TrackerBaseToImagePlate
                                (( 0.0  0.0  1.0  0.2488)
                                 ( 0.0 -1.0  0.0  1.340)
                                 ( 1.0  0.0  0.0  0.860)))

// Create a physical environment.  This contains the available input devices,
// audio devices, and sensors, and defines the coexistence coordinate system
// for mapping between the virtual and physical worlds.
// 
(NewPhysicalEnvironment VirtualPortal)

// Register the input device defined in this file and the sensor which will
// drive head tracking.
// 
(PhysicalEnvironmentProperty VirtualPortal InputDevice tracker1)
(PhysicalEnvironmentProperty VirtualPortal InputDevice tracker2)
(PhysicalEnvironmentProperty VirtualPortal HeadTracker head)

// Set the location of the center of coexistence relative to the tracker base.
// Here it set to the center of the center screen.  The default view attach
// policy of NOMINAL_SCREEN used by ConfiguredUniverse will place the origin of
// the view platform in coexistence coordinates at the center of coexistence.
// 
(PhysicalEnvironmentProperty VirtualPortal
                             CoexistenceToTrackerBase
                                ((-1.0  0.0  0.0  0.000)
                                 ( 0.0  0.0 -1.0  1.340)
                                 ( 0.0 -1.0  0.0  0.994)))

// Define the physical body.  The head origin is halfway between the eyes, with
// X extending to the right, Y up, and positive Z extending into the skull.
// 
(NewPhysicalBody      LabRat)       
(PhysicalBodyProperty LabRat StereoEyeSeparation .07)

// Define the position and orientation of the head relative to the tracker
// mounted on the head.
// 
(PhysicalBodyProperty LabRat HeadToHeadTracker 
                                 ((-1.0  0.0  0.0 0.00)
                                  ( 0.0  0.0 -1.0 0.05)
                                  ( 0.0 -1.0  0.0 0.11)))

// Create a view platform behavior for the 6DOF sensor.
// 
(NewViewPlatformBehavior vpb org.scijava.java3d.utils.behaviors.vp.WandViewBehavior)

(ViewPlatformBehaviorProperty vpb Sensor6D sensor6d)
(ViewPlatformBehaviorProperty vpb ButtonAction6D 2 GrabView)
(ViewPlatformBehaviorProperty vpb ButtonAction6D 1 TranslateForward)
(ViewPlatformBehaviorProperty vpb ButtonAction6D 0 TranslateBackward)

// Default normal translation speed is 0.1 physical meters per second.
(ViewPlatformBehaviorProperty vpb TranslationSpeed
                              1.0 PhysicalMeters PerSecond)

// Default rotation coordinates are Sensor.
(ViewPlatformBehaviorProperty vpb RotationCoords Head)

// Nominal sensor transform for modified joystick RedBarron
(SensorProperty sensor6d Hotspot (0.00 0.6 0.00))
(ViewPlatformBehaviorProperty vpb NominalSensorRotation
                                  ((-1.0  0.0  0.0)
                                   ( 0.0  0.0 -1.0)
                                   ( 0.0 -1.0  0.0)))

// Default 6DOF sensor echo is Gnomon
(ViewPlatformBehaviorProperty vpb EchoSize 0.015) 
(ViewPlatformBehaviorProperty vpb EchoType Beam) 

// Default 6DOF sensor echo color is white
(ViewPlatformBehaviorProperty vpb EchoColor 1.0 0.7 0.0)

// Default 6DOF sensor transparency is 0.0 (opaque)
(ViewPlatformBehaviorProperty vpb EchoTransparency 0.4)

// Create a new view platform and set the view platform behavior.
// 
(NewViewPlatform vp)
(ViewPlatformProperty vp ViewPlatformBehavior vpb)

// Now define the view.
// 
(NewView       view0)
(ViewProperty  view0   Screen                  left)
(ViewProperty  view0   Screen                  center)
(ViewProperty  view0   Screen                  right)
(ViewProperty  view0   PhysicalBody            LabRat)
(ViewProperty  view0   PhysicalEnvironment     VirtualPortal)
(ViewProperty  view0   ViewPlatform            vp)

// Set the screen scale.  This is scale factor from virtual to physical
// coordinates.
// 
(ViewProperty  view0   ScreenScalePolicy       SCALE_SCREEN_SIZE)

// Alternative for explict scaling.
// 
//(ViewProperty  view0   ScreenScalePolicy       SCALE_EXPLICIT)
//(ViewProperty  view0   ScreenScale             5.00)

// Enable stereo viewing.  Enable head tracking to get the position of the eyes
// with respect to coexistence.  Boolean values may be specified as either
// true, True, false, or False.
// 
(ViewProperty    view0   StereoEnable            true)
(ViewProperty    view0   TrackingEnable          True)