DIRECTORY AIS, CastRays, CoordSys, CSG, CSGGraphics, Graphics, GraphicsColor, IO, Matrix3d, Preprocess3d, Real, Rope, Shading, SV2d, SV3d, SVArtwork, SVBoundBox, SVError, SVFancyRays, SVImage, SVModelTypes, SVRayTypes, SVVector3d; CastRaysImplA: PROGRAM IMPORTS CastRays, CoordSys, CSG, GraphicsColor, IO, Matrix3d, Preprocess3d, Real, Rope, Shading, SVArtwork, SVBoundBox, SVError, SVFancyRays, SVImage, SVVector3d EXPORTS CastRays = BEGIN Artwork: TYPE = SVModelTypes.Artwork; BoundBox: TYPE = SVModelTypes.BoundBox; BoundSphere: TYPE = SVModelTypes.BoundSphere; Camera: TYPE = SVModelTypes.Camera; Color: TYPE = GraphicsColor.Color; Composite: TYPE = SVRayTypes.Composite; CoordSystem: TYPE = SVModelTypes.CoordSystem; CSGTree: TYPE = SVRayTypes.CSGTree; LightSourceList: TYPE = SVModelTypes.LightSourceList; NotifyOfProgressProc: TYPE = CastRays.NotifyOfProgressProc; Point3d: TYPE = SV3d.Point3d; Point2d: TYPE = SV2d.Point2d; PointSetOp: TYPE = SVRayTypes.PointSetOp; Primitive: TYPE = SVRayTypes.Primitive; Matrix4by4: TYPE = SV3d.Matrix4by4; Ray: TYPE = SVRayTypes.Ray; Surface: TYPE = REF ANY; Vector: TYPE = SV3d.Vector; Classification: TYPE = REF ClassificationObj; ClassificationObj: TYPE = SVRayTypes.ClassificationObj; SurfaceArray: TYPE = REF SurfaceArrayObj; SurfaceArrayObj: TYPE = SVRayTypes.SurfaceArrayObj; ParameterArray: TYPE = SVRayTypes.ParameterArray; InOutArray: TYPE = SVRayTypes.InOutArray; NormalArray: TYPE = SVRayTypes.NormalArray; PrimitiveArray: TYPE = SVRayTypes.PrimitiveArray; CompactArray: TYPE = REF CompactArrayObj; CompactArrayObj: TYPE = ARRAY [1..SVRayTypes.maxSceneDepth] OF BOOL; Image: TYPE = REF ImageObj; ImageObj: TYPE = SVImage.ImageObj; globalPoolCount: NAT = 10; globalPoolPointer: NAT; Pool: TYPE = REF PoolObj; PoolObj: TYPE = RECORD [seq: SEQUENCE maxClasses: NAT OF Classification]; globalPool: Pool; globalCompactPoolCount: NAT = 10; globalCompactPoolPointer: NAT; CompactPool: TYPE = REF CompactPoolObj; CompactPoolObj: TYPE = ARRAY[1..globalCompactPoolCount] OF CompactArray; globalCompactPool: CompactPool; WriteStreamComp: PUBLIC PROC [comp: Composite, class: Classification, makeStream: BOOL, f: IO.STREAM, indent: NAT] = { opname: Rope.ROPE; IF NOT makeStream THEN RETURN; Indent[f, indent]; SELECT comp.operation FROM union => opname _ "union"; intersection => opname _ "intersection"; difference => opname _ "difference"; ENDCASE => ERROR; f.PutF["Composite %g [op: %g] returns class: [count: %g]\n", [rope[comp.name]],[rope[opname]], [integer[class.count]]]; WritePrimNames[class, f, indent]; }; -- end of WriteStreamComp Indent: PRIVATE PROC [f: IO.STREAM, indent: NAT] = { FOR i: NAT IN[1..indent] DO f.PutChar[IO.TAB]; ENDLOOP; }; WritePrimNames: PRIVATE PROC [class: Classification, f: IO.STREAM, indent: NAT] = { FOR i: NAT IN[1..class.count] DO Indent[f, indent+1]; f.PutF["%g) %g at t = %g\n", [integer[i]], [rope[class.primitives[i].name]], [real[class.params[i]]]]; ENDLOOP; }; -- end of WritePrimNames WriteStreamPrim: PUBLIC PROC [prim: Primitive, class: Classification, makeStream: BOOL, f: IO.STREAM, indent: NAT] = { IF NOT makeStream THEN RETURN; Indent[f, indent]; f.PutF["Primitive %g returns class: [count: %g]\n", [rope[prim.name]], [integer[class.count]]]; WriteParams[class, f, indent]; }; -- end of WriteStreamPrim WriteParams: PRIVATE PROC [class: Classification, f: IO.STREAM, indent: NAT] = { FOR i: NAT IN[1..class.count] DO Indent[f, indent+1]; f.PutF["%g) %g at t = %g\n", [integer[i]], [rope[class.primitives[i].name]], [real[class.params[i]]]]; ENDLOOP; }; -- end of WritePrimNames DoesHit: PROC [class: Classification] RETURNS [BOOL] = { RETURN[class.count > 0 OR class.classifs[1] = TRUE]; }; RayCast: PUBLIC PROC [cameraPoint: Point2d, worldRay: Ray, node: REF ANY, consolidate: BOOL _ TRUE, makeStream: BOOL _ FALSE, f: IO.STREAM _ NIL, indent: NAT _ 0] RETURNS [class: Classification] = { IF node = NIL THEN {class _ EmptyClass[]; RETURN}; WITH node SELECT FROM comp: Composite => { leftClass, rightClass: Classification; leftBoxHit, leftHit, rightBoxHit, rightHit: BOOL; totalMiss: BOOL _ FALSE; boundBox: BoundBox; WITH comp.leftSolid SELECT FROM p: Primitive => boundBox _ p.boundBox; c: Composite => boundBox _ c.boundBox; ENDCASE => ERROR; leftBoxHit _ SVBoundBox.PointInBoundBox[cameraPoint, boundBox]; IF leftBoxHit THEN { leftClass _ RayCast[cameraPoint, worldRay, comp.leftSolid, consolidate, makeStream, f, indent]; leftHit _ DoesHit[leftClass]; } ELSE {leftHit _ FALSE; leftClass _ EmptyClass[]}; IF NOT leftHit THEN IF comp.operation = intersection OR comp.operation = difference THEN { class _ leftClass; WriteStreamComp[comp, class, makeStream, f, indent]; RETURN}; WITH comp.rightSolid SELECT FROM p: Primitive => boundBox _ p.boundBox; c: Composite => boundBox _ c.boundBox; ENDCASE => ERROR; rightBoxHit _ SVBoundBox.PointInBoundBox[cameraPoint, boundBox]; IF NOT rightBoxHit THEN SELECT comp.operation FROM union => {class _ leftClass; WriteStreamComp[comp, class, makeStream, f, indent]; RETURN}; intersection => IF NOT leftHit THEN RETURN[leftClass] ELSE { ReturnClassToPool[leftClass]; class _ EmptyClass[]; WriteStreamComp[comp, class, makeStream, f, indent]; RETURN}; difference => {class _ leftClass; WriteStreamComp[comp, class, makeStream, f, indent]; RETURN}; ENDCASE => ERROR; rightClass _ RayCast[cameraPoint, worldRay, comp.rightSolid, consolidate, makeStream, f, indent]; rightHit _ DoesHit[rightClass]; SELECT comp.operation FROM union => IF rightHit THEN { IF leftHit THEN class _ UnionCombine[leftClass, rightClass, consolidate] ELSE {ReturnClassToPool[leftClass]; class _ rightClass} } ELSE { ReturnClassToPool[rightClass]; class _ leftClass}; intersection => IF rightHit THEN { IF leftHit THEN class _ IntersectionCombine[leftClass, rightClass, consolidate] ELSE {ReturnClassToPool[rightClass]; class _ leftClass;} } ELSE IF leftHit THEN {ReturnClassToPool[leftClass]; class _ rightClass} ELSE {ReturnClassToPool[rightClass]; class _ leftClass}; difference => IF rightHit THEN { IF leftHit THEN class _ DifferenceCombine[leftClass, rightClass, consolidate] ELSE {ReturnClassToPool[rightClass]; class _ leftClass} -- leftClass null } ELSE {ReturnClassToPool[rightClass]; class _ leftClass}; ENDCASE => ERROR; WriteStreamComp[comp, class, makeStream, f, indent]; RETURN}; prim: Primitive => { localRay: Ray; localRay _ CSG.TransformRay[worldRay, prim.worldWRTPrim]; -- (takes a new ray from the pool) class _ prim.rayCast[cameraPoint, localRay, prim.mo, prim]; WriteStreamPrim[prim, class, makeStream, f, 0]; CSG.ReturnRayToPool[localRay]; -- returns ray to pool RETURN}; ENDCASE => ERROR; }; -- end of RayCast RayCastNoBBoxes: PUBLIC PROC [worldRay: Ray, node: REF ANY, consolidate: BOOL _ TRUE, makeStream: BOOL _ FALSE, f: IO.STREAM _ NIL, indent: NAT _ 0] RETURNS [class: Classification] = { IF node = NIL THEN {class _ EmptyClass[]; RETURN}; WITH node SELECT FROM comp: Composite => { leftClass, rightClass: Classification; leftHit, rightHit: BOOL; totalMiss: BOOL _ FALSE; leftClass _ RayCastNoBBoxes[worldRay, comp.leftSolid, consolidate, makeStream, f, indent]; leftHit _ DoesHit[leftClass]; IF NOT leftHit THEN IF comp.operation = intersection OR comp.operation = difference THEN { class _ leftClass; WriteStreamComp[comp, class, makeStream, f, indent]; RETURN}; rightClass _ RayCastNoBBoxes[worldRay, comp.rightSolid, consolidate, makeStream, f, indent]; rightHit _ DoesHit[rightClass]; SELECT comp.operation FROM union => IF rightHit THEN { IF leftHit THEN class _ UnionCombine[leftClass, rightClass, consolidate] ELSE {ReturnClassToPool[leftClass]; class _ rightClass} } ELSE { ReturnClassToPool[rightClass]; class _ leftClass}; intersection => IF rightHit THEN { IF leftHit THEN class _ IntersectionCombine[leftClass, rightClass, consolidate] ELSE {ReturnClassToPool[rightClass]; class _ leftClass;} } ELSE IF leftHit THEN {ReturnClassToPool[leftClass]; class _ rightClass} ELSE {ReturnClassToPool[rightClass]; class _ leftClass}; difference => IF rightHit THEN { IF leftHit THEN class _ DifferenceCombine[leftClass, rightClass, consolidate] ELSE {ReturnClassToPool[rightClass]; class _ leftClass} -- leftClass null } ELSE {ReturnClassToPool[rightClass]; class _ leftClass}; ENDCASE => ERROR; WriteStreamComp[comp, class, makeStream, f, indent]; RETURN}; prim: Primitive => { localRay: Ray; localRay _ CSG.TransformRay[worldRay, prim.worldWRTPrim]; -- (takes a new ray from the pool) class _ prim.rayCastNoBBoxes[localRay, prim.mo, prim]; WriteStreamPrim[prim, class, makeStream, f, 0]; CSG.ReturnRayToPool[localRay]; -- returns ray to pool RETURN}; ENDCASE => ERROR; }; -- end of RayCastNoBboxes HitsTree: PUBLIC PROC [worldRay: Ray, tree: CSGTree] RETURNS [BOOL] = { node: REF ANY _ tree.son; class: Classification; hits: BOOL; class _ RayCastNoBBoxes [worldRay: worldRay, node: node, makeStream: FALSE]; hits _ DoesHit[class]; ReturnClassToPool[class]; RETURN[hits]; }; FirstHit: PUBLIC PROC [worldRay: Ray, tree: CSGTree, useBoundSpheres: BOOL, makeStream: BOOL _ FALSE, f: IO.STREAM _ NIL, indent: NAT _ 0] RETURNS [hits: BOOL, t: REAL] = { node: REF ANY _ tree.son; class: Classification; IF NOT useBoundSpheres THEN class _ RayCastNoBBoxes [worldRay: worldRay, node: node, makeStream: makeStream, f: f, indent: indent] ELSE class _ CastRays.RayCastBoundingSpheres [worldRay: worldRay, node: node, makeStream: makeStream, f: f, indent: indent]; hits _ FALSE; t _ 0.0; FOR i: NAT IN [1..class.count] DO IF NOT class.classifs[i] THEN GOTO BeenOut; REPEAT BeenOut => { hits _ FALSE; FOR j: NAT IN [i+1..class.count+1] DO IF class.classifs[j] THEN { hits _ TRUE; t _ class.params[j-1]; ReturnClassToPool[class]; RETURN; }; ENDLOOP; ReturnClassToPool[class]; RETURN; }; FINISHED => { ReturnClassToPool[class]; RETURN; }; ENDLOOP; }; EmptyClass: PUBLIC PROC RETURNS [class: Classification] = { class _ GetClassFromPool[]; class.count _ 0; class.classifs[1] _ FALSE; }; -- end of EmptyClass SceneExceedsMaximumDepth: SIGNAL = CODE; UnionCombine: PUBLIC PROC [leftClass, rightClass: Classification, consolidate: BOOL] RETURNS [combinedClass: Classification] = { lPtr, rPtr: NAT; combinedClass _ GetClassFromPool[]; lPtr _ rPtr _ 1; combinedClass.count _ leftClass.count + rightClass.count; IF combinedClass.count > SVRayTypes.maxSceneDepth THEN SIGNAL SceneExceedsMaximumDepth; FOR i: NAT IN[1..combinedClass.count] DO IF rPtr > rightClass.count THEN GOTO RPtrWentOver; IF lPtr > leftClass.count THEN GOTO LPtrWentOver; IF leftClass.params[lPtr] < rightClass.params[rPtr] THEN { combinedClass.normals[i] _ leftClass.normals[lPtr]; combinedClass.params[i] _ leftClass.params[lPtr]; combinedClass.surfaces[i] _ leftClass.surfaces[lPtr]; combinedClass.primitives[i] _ leftClass.primitives[lPtr]; combinedClass.classifs[i] _ leftClass.classifs[lPtr] OR rightClass.classifs[rPtr]; lPtr _ lPtr + 1; } ELSE { combinedClass.normals[i] _ rightClass.normals[rPtr]; combinedClass.params[i] _ rightClass.params[rPtr]; combinedClass.surfaces[i] _ rightClass.surfaces[rPtr]; combinedClass.primitives[i] _ rightClass.primitives[rPtr]; combinedClass.classifs[i] _ leftClass.classifs[lPtr] OR rightClass.classifs[rPtr]; rPtr _ rPtr + 1; }; REPEAT RPtrWentOver => { -- finish up with lPtr data FOR k: NAT _ i, k+1 UNTIL k > combinedClass.count DO combinedClass.normals[k] _ leftClass.normals[lPtr]; combinedClass.params[k] _ leftClass.params[lPtr]; combinedClass.surfaces[k] _ leftClass.surfaces[lPtr]; combinedClass.primitives[k] _ leftClass.primitives[lPtr]; combinedClass.classifs[k] _ leftClass.classifs[lPtr] OR rightClass.classifs[rPtr]; lPtr _ lPtr + 1; ENDLOOP}; LPtrWentOver => { -- finish up with rPtr data FOR k: NAT _ i, k+1 UNTIL k > combinedClass.count DO combinedClass.normals[k] _ rightClass.normals[rPtr]; combinedClass.params[k] _ rightClass.params[rPtr]; combinedClass.surfaces[k] _ rightClass.surfaces[rPtr]; combinedClass.primitives[k] _ rightClass.primitives[rPtr]; combinedClass.classifs[k] _ leftClass.classifs[lPtr] OR rightClass.classifs[rPtr]; rPtr _ rPtr + 1; ENDLOOP}; ENDLOOP; combinedClass.classifs[combinedClass.count+1] _ leftClass.classifs[lPtr] OR rightClass.classifs[rPtr]; IF consolidate THEN ConsolidateClassification[combinedClass]; ReturnClassToPool[leftClass]; ReturnClassToPool[rightClass]; }; -- end of UnionCombine IntersectionCombine: PUBLIC PROC [leftClass, rightClass: Classification, consolidate: BOOL] RETURNS [combinedClass: Classification] = { lPtr, rPtr: NAT; combinedClass _ GetClassFromPool[]; lPtr _ rPtr _ 1; combinedClass.count _ leftClass.count + rightClass.count; IF combinedClass.count > SVRayTypes.maxSceneDepth THEN SIGNAL SceneExceedsMaximumDepth; FOR i: NAT IN[1..combinedClass.count] DO IF rPtr > rightClass.count THEN GOTO RPtrWentOver; IF lPtr > leftClass.count THEN GOTO LPtrWentOver; IF leftClass.params[lPtr] < rightClass.params[rPtr] THEN { combinedClass.normals[i] _ leftClass.normals[lPtr]; combinedClass.params[i] _ leftClass.params[lPtr]; combinedClass.surfaces[i] _ leftClass.surfaces[lPtr]; combinedClass.primitives[i] _ leftClass.primitives[lPtr]; combinedClass.classifs[i] _ leftClass.classifs[lPtr] AND rightClass.classifs[rPtr]; lPtr _ lPtr + 1; } ELSE { combinedClass.normals[i] _ rightClass.normals[rPtr]; combinedClass.params[i] _ rightClass.params[rPtr]; combinedClass.surfaces[i] _ rightClass.surfaces[rPtr]; combinedClass.primitives[i] _ rightClass.primitives[rPtr]; combinedClass.classifs[i] _ leftClass.classifs[lPtr] AND rightClass.classifs[rPtr]; rPtr _ rPtr + 1; }; REPEAT RPtrWentOver => { -- finish up with lPtr data FOR k: NAT _ i, k+1 UNTIL k > combinedClass.count DO combinedClass.normals[k] _ leftClass.normals[lPtr]; combinedClass.params[k] _ leftClass.params[lPtr]; combinedClass.surfaces[k] _ leftClass.surfaces[lPtr]; combinedClass.primitives[k] _ leftClass.primitives[lPtr]; combinedClass.classifs[k] _ leftClass.classifs[lPtr] AND rightClass.classifs[rPtr]; lPtr _ lPtr + 1; ENDLOOP}; LPtrWentOver => { -- finish up with rPtr data FOR k: NAT _ i, k+1 UNTIL k > combinedClass.count DO combinedClass.normals[k] _ rightClass.normals[rPtr]; combinedClass.params[k] _ rightClass.params[rPtr]; combinedClass.surfaces[k] _ rightClass.surfaces[rPtr]; combinedClass.primitives[k] _ rightClass.primitives[rPtr]; combinedClass.classifs[k] _ leftClass.classifs[lPtr] AND rightClass.classifs[rPtr]; rPtr _ rPtr + 1; ENDLOOP}; ENDLOOP; combinedClass.classifs[combinedClass.count+1] _ leftClass.classifs[lPtr] AND rightClass.classifs[rPtr]; IF consolidate THEN ConsolidateClassification[combinedClass]; ReturnClassToPool[leftClass]; ReturnClassToPool[rightClass]; }; -- end of IntersectionCombine DifferenceCombine: PUBLIC PROC [leftClass, rightClass: Classification, consolidate: BOOL] RETURNS [combinedClass: Classification] = { lPtr, rPtr: NAT; combinedClass _ GetClassFromPool[]; IF combinedClass.count > SVRayTypes.maxSceneDepth THEN SIGNAL SceneExceedsMaximumDepth; lPtr _ rPtr _ 1; combinedClass.count _ leftClass.count + rightClass.count; FOR i: NAT IN[1..combinedClass.count] DO IF rPtr > rightClass.count THEN GOTO RPtrWentOver; IF lPtr > leftClass.count THEN GOTO LPtrWentOver; IF leftClass.params[lPtr] < rightClass.params[rPtr] THEN { combinedClass.normals[i] _ leftClass.normals[lPtr]; combinedClass.params[i] _ leftClass.params[lPtr]; combinedClass.surfaces[i] _ leftClass.surfaces[lPtr]; combinedClass.primitives[i] _ leftClass.primitives[lPtr]; combinedClass.classifs[i] _ leftClass.classifs[lPtr] AND NOT rightClass.classifs[rPtr]; lPtr _ lPtr + 1; } ELSE { combinedClass.normals[i] _ SVVector3d.Negate[rightClass.normals[rPtr]]; combinedClass.params[i] _ rightClass.params[rPtr]; combinedClass.surfaces[i] _ rightClass.surfaces[rPtr]; combinedClass.primitives[i] _ rightClass.primitives[rPtr]; combinedClass.classifs[i] _ leftClass.classifs[lPtr] AND NOT rightClass.classifs[rPtr]; rPtr _ rPtr + 1; }; REPEAT RPtrWentOver => { -- finish up with lPtr data FOR k: NAT _ i, k+1 UNTIL k > combinedClass.count DO combinedClass.normals[k] _ leftClass.normals[lPtr]; combinedClass.params[k] _ leftClass.params[lPtr]; combinedClass.surfaces[k] _ leftClass.surfaces[lPtr]; combinedClass.primitives[k] _ leftClass.primitives[lPtr]; combinedClass.classifs[k] _ leftClass.classifs[lPtr] AND NOT rightClass.classifs[rPtr]; lPtr _ lPtr + 1; ENDLOOP}; LPtrWentOver => { -- finish up with rPtr data FOR k: NAT _ i, k+1 UNTIL k > combinedClass.count DO combinedClass.normals[k] _ SVVector3d.Negate[rightClass.normals[rPtr]]; combinedClass.params[k] _ rightClass.params[rPtr]; combinedClass.surfaces[k] _ rightClass.surfaces[rPtr]; combinedClass.primitives[k] _ rightClass.primitives[rPtr]; combinedClass.classifs[k] _ leftClass.classifs[lPtr] AND NOT rightClass.classifs[rPtr]; rPtr _ rPtr + 1; ENDLOOP}; ENDLOOP; combinedClass.classifs[combinedClass.count+1] _ leftClass.classifs[lPtr] AND NOT rightClass.classifs[rPtr]; IF consolidate THEN ConsolidateClassification[combinedClass]; ReturnClassToPool[leftClass]; ReturnClassToPool[rightClass]; }; -- end of DifferenceCombine ConsolidateClassification: PROC [class: Classification] = { currentlyWorkingOn: BOOL; compact: CompactArray _ GetCompactFromPool[]; currentlyWorkingOn _ class.classifs[1]; FOR i: NAT IN[2..class.count+1] DO IF class.classifs[i] = currentlyWorkingOn THEN -- this is not a transition so throw it out compact[i-1] _ FALSE -- don't keep it ELSE {compact[i-1] _ TRUE; currentlyWorkingOn _ class.classifs[i];}; ENDLOOP; CompactClassification[class, compact]; ReturnCompactToPool[compact]; }; -- end of ConsolidateClassification CompactClassification: PROC [class: Classification, compact: CompactArray] = { newCount: NAT; newCount _ 0; FOR i: NAT IN[1..class.count] DO IF compact[i] THEN { newCount _ newCount + 1; class.params[newCount] _ class.params[i]; class.classifs[newCount] _ class.classifs[i]; class.normals[newCount] _ class.normals[i]; class.surfaces[newCount] _ class.surfaces[i]; class.primitives[newCount] _ class.primitives[i];}; ENDLOOP; class.classifs[newCount+1] _ class.classifs[class.count+1]; class.count _ newCount; }; SingleRay: PUBLIC PROC [x, y: INTEGER, tree: CSGTree, lightSources: LightSourceList, camera: Camera, makeStream: BOOL _ FALSE, f: IO.STREAM _ NIL] RETURNS [color: Color] = { cameraRay, worldRay: Ray; cameraWRTWorld: Matrix3d.Matrix4by4; boundBox: BoundBox; boundSphere: BoundSphere; screenPoint: Point2d; cameraRay _ CSG.CreateRay[]; [boundBox, boundSphere] _ Preprocess3d.PreprocessForImage[tree, camera]; -- must call this before casting rays screenPoint _ [x, y]; CSG.StuffCameraRay[cameraRay, screenPoint, camera]; cameraWRTWorld _ CoordSys.FindInTermsOfWorld[camera.coordSys]; worldRay _ CSG.TransformRayToWorld[cameraRay, cameraWRTWorld]; -- allocates ray from pool IF makeStream THEN f.PutChar[IO.CR]; color _ TopColorCast[[x,y], worldRay, tree, lightSources, camera, boundBox, boundSphere, makeStream, f, 0]; IF makeStream THEN f.PutChar[IO.CR]; CSG.ReturnRayToPool[worldRay]; }; -- end of SingleRay SingleRay2: PUBLIC PROC [cameraPoint: Point2d, tree: CSGTree, camera: Camera, consolidate: BOOL _ TRUE, makeStream: BOOL _ FALSE, f: IO.STREAM _ NIL] RETURNS [class: Classification, rayWorld: Ray] = { topNode: REF ANY _ tree.son; rayCamera: Ray; cameraWRTWorld: Matrix4by4 _ CoordSys.FindInTermsOfWorld[camera.coordSys]; rayCamera _ CSG.GetRayFromPool[]; CSG.StuffCameraRay[rayCamera, cameraPoint, camera]; rayWorld _ CSG.TransformRayToWorld[rayCamera, cameraWRTWorld]; -- allocates ray from pool CSG.ReturnRayToPool[rayCamera]; class _ RayCast[cameraPoint, rayWorld, topNode, consolidate, makeStream, f, 0]; }; -- end of SingleRay2 NodeToRope: PROC [node: REF ANY, depth: NAT] RETURNS [r: Rope.ROPE] = { IF node = NIL THEN RETURN[NIL]; WITH node SELECT FROM prim: Primitive => {r _ prim.name; RETURN}; comp: Composite => {r _ comp.name; IF depth < 2 THEN RETURN ELSE {r1: Rope.ROPE; r2: Rope.ROPE; leftSon: REF ANY _ comp.leftSolid; rightSon: REF ANY _ comp.rightSolid; r1 _ NodeToRope[leftSon, depth - 1]; r2 _ NodeToRope[rightSon, depth - 1]; r _ Rope.Cat[r,": ",r1,"/",r2]; RETURN}; }; ENDCASE => ERROR; }; -- end of NodeToRope OutputTreeInfo: PRIVATE PROC [node: REF ANY, I: Image, outStream: IO.STREAM] = { debugName, debugRope: Rope.ROPE; debugName _ NodeToRope[node, 2]; debugRope _ IO.PutFR["About to Draw Tree: %g (%g by %g)...", [rope[debugName]], [integer[I.bwWindow.fref.raster.scanCount]], [integer[I.bwWindow.fref.raster.scanLength]]]; SVError.Append[debugRope, TRUE, TRUE]; SVError.Blink[]; }; -- end of OutputTreeInfo GetXStepRayInWorld: PRIVATE PROC [stepSize: REAL, cameraWRTWorld: Matrix4by4, camera: Camera] RETURNS [ray: Ray] = { cameraXStepRay1, cameraXStepRay2: Ray; worldXStepRay1, worldXStepRay2: Ray; cameraXStepRay1 _ CSG.CreateRay[]; IF camera.projection = perspective THEN { CSG.StuffCameraRay[cameraXStepRay1, [0,0], camera]; cameraXStepRay2 _ CSG.CreateRay[]; CSG.StuffCameraRay[cameraXStepRay2, [stepSize,0], camera]; worldXStepRay1 _ CSG.TransformRayToWorld[cameraXStepRay1, cameraWRTWorld]; worldXStepRay2 _ CSG.TransformRayToWorld[cameraXStepRay2, cameraWRTWorld]; ray _ CSG.SubtractRays[worldXStepRay2, worldXStepRay1]; CSG.ReturnRayToPool[worldXStepRay1]; CSG.ReturnRayToPool[worldXStepRay2]; } ELSE { -- orthographic projection CSG.StuffCameraRayLiterally[cameraXStepRay1, [stepSize,0,0], [0,0,0]]; ray _ CSG.TransformNewRay[cameraXStepRay1, cameraWRTWorld]; } }; -- end of GetXStepRayInWorld MasterObjectColorFromPrimitive: PRIVATE PROC [primitive: Primitive, t: REAL, worldRay: Ray, primitiveNormal: Vector] RETURNS [color: Color] = { localRay: Ray; point3d: Point3d; artwork: Artwork _ primitive.artwork; SELECT artwork.class FROM justColor => color _ artwork.color; simpleSurface => { localRay _ CSG.TransformRay[worldRay, artwork.coordSys.worldWRTlocal]; point3d _ CSG.EvaluateLocalRay[localRay, t]; CSG.ReturnRayToPool[localRay]; color _ SVArtwork.FindColorAtSurfacePoint[artwork, point3d, primitiveNormal]; }; spaceFunction => { localRay _ CSG.TransformRay[worldRay, artwork.coordSys.worldWRTlocal]; point3d _ CSG.EvaluateLocalRay[localRay, t]; CSG.ReturnRayToPool[localRay]; color _ SVArtwork.FindColorAtSpacePoint[artwork, point3d, primitiveNormal]; }; ENDCASE => ERROR; }; ColorFromClass: PRIVATE PROC [class: Classification, x, y: REAL, lightSources: LightSourceList, camera: Camera, worldRay: Ray, tree: CSGTree, makeStream: BOOL _ FALSE, f: IO.STREAM _ NIL, indent: NAT _ 0] RETURNS [color: Color] = { surf: Surface; surfColor: Color; eyePoint, surfacePt, p: Point3d; d: Vector; primitive: Primitive; visibleLights: LightSourceList; t: REAL; worldNormal, primitiveNormal: Vector; IF class.count = 0 THEN { IF NOT class.classifs[1] THEN color _ tree.backgroundColor ELSE color _ GraphicsColor.black; RETURN}; surf _ class.surfaces[1]; t _ class.params[1];-- the parameter of the ray intersection primitive _ class.primitives[1]; primitiveNormal _ class.normals[1]; surfColor _ MasterObjectColorFromPrimitive[primitive, t, worldRay, primitiveNormal]; worldNormal _ Matrix3d.UpdateVectorWithInverse[primitive.worldWRTPrim, primitiveNormal]; surfacePt _ CSG.EvaluateLocalRay[worldRay, t]; [p, d] _ CSG.GetLocalRay[worldRay]; eyePoint _ SVVector3d.Sub[p, d]; visibleLights _ IF tree.shadows THEN SVFancyRays.VisibleLights[lightSources, surfacePt, tree, camera.useBoundSpheresForShadows, makeStream, f, indent] ELSE lightSources; SELECT primitive.artwork.material FROM chalk => color _ Shading.DiffuseReflectance[worldNormal, surfacePt, surfColor, visibleLights]; plastic => color _ Shading.DiffuseAndSpecularReflectance[eyePoint, worldNormal, surfacePt, surfColor, visibleLights]; ENDCASE => ERROR; }; -- end of ColorFromClass ScanLine: TYPE = REF ScanLineObj; ScanLineObj: TYPE = RECORD [ seq: SEQUENCE lineLen: NAT OF Color]; CreateScanLine: PRIVATE PROC [len: NAT] RETURNS [scanLine: ScanLine] = { scanLine _ NEW[ScanLineObj[len]]; }; CopyScanLine: PRIVATE PROC [from: ScanLine, to: ScanLine] = { FOR i: NAT IN [0..to.lineLen) DO to[i] _ from[i]; ENDLOOP; }; PutColorInScanLine: PRIVATE PROC [scanLine: ScanLine, index: NAT, color: Color] = { scanLine[index] _ color; }; TopColorCast: PRIVATE PROC [cameraPoint: Point2d, worldRay: Ray, tree: CSGTree, lightSources: LightSourceList, camera: Camera, sceneBox: BoundBox, boundSphere: BoundSphere, makeStream: BOOL _ FALSE, f: IO.STREAM _ NIL, indent: NAT _ 0] RETURNS [color: Color] = { node: REF ANY _ tree.son; class: Classification; IF tree.son = NIL THEN RETURN[tree.backgroundColor]; IF camera.useBoundBoxes THEN { IF SVBoundBox.PointInBoundBox[cameraPoint, sceneBox] THEN { finalClassCount, firstClassCount: NAT; firstClassCount _ NumberOfClassesInPool[]; -- for debugging purposes. class _ RayCast[cameraPoint, worldRay, node, TRUE, makeStream, f, indent]; color _ ColorFromClass[class, cameraPoint[1], cameraPoint[2], lightSources, camera, worldRay, tree, makeStream, f, indent]; ReturnClassToPool[class]; finalClassCount _ NumberOfClassesInPool[]; -- for debugging purposes. IF finalClassCount < firstClassCount THEN { f.PutF["WARNING: A Classification was lost while casting a ray at [%g, %g]", [real[cameraPoint[1]]], [real[cameraPoint[2]]]]; }; } ELSE color _ tree.backgroundColor; } ELSE { -- Use Bounding Spheres IF CSG.RayHitsBoundSphere[worldRay, boundSphere] THEN { finalClassCount, firstClassCount: NAT; firstClassCount _ NumberOfClassesInPool[]; -- for debugging purposes. class _ CastRays.RayCastBoundingSpheres[worldRay, node, TRUE, makeStream, f, indent]; color _ ColorFromClass[class, cameraPoint[1], cameraPoint[2], lightSources, camera, worldRay, tree, makeStream, f, indent]; ReturnClassToPool[class]; finalClassCount _ NumberOfClassesInPool[]; -- for debugging purposes. IF finalClassCount < firstClassCount THEN { f.PutF["WARNING: A Classification was lost while casting a ray at [%g, %g]", [real[cameraPoint[1]]], [real[cameraPoint[2]]]]; }; } ELSE color _ tree.backgroundColor; }; }; SetUpRayTrace: PROC [boundBox: BoundBox, camera: Camera, aisRope: Rope.ROPE, bAndWOnly: BOOL, resolution: REAL] RETURNS [I: Image, xSamples, ySamples: NAT, stepSize, xStart, yStart: REAL] = { extentX, extentY, projectionX, projectionY, trueExtentX, trueExtentY: REAL; comment: Rope.ROPE _ IO.PutFR["res: %g dpi", [real[resolution]]]; stepSize _ 72.0/resolution; -- in screen dots per sample IF camera.frame.fullScreen THEN { [I, xSamples, ySamples] _ SVImage.OpenImage[aisRope, bAndWOnly, boundBox.minVert[1], boundBox.minVert[2], boundBox.maxVert[1], boundBox.maxVert[2], resolution, comment]; extentX _ boundBox.maxVert[1] - boundBox.minVert[1]; extentY _ boundBox.maxVert[2] - boundBox.minVert[2]; } ELSE { [I, xSamples, ySamples] _ SVImage.OpenImage[aisRope, bAndWOnly, camera.frame.downLeft[1], camera.frame.downLeft[2], camera.frame.upRight[1], camera.frame.upRight[2], resolution, comment]; extentX _ camera.frame.upRight[1] - camera.frame.downLeft[1]; extentY _ camera.frame.upRight[2] - camera.frame.downLeft[2]; }; trueExtentX _ Real.Float[xSamples-1]*stepSize; trueExtentY _ Real.Float[ySamples-1]*stepSize; projectionX _ (trueExtentX - extentX)/2.0; projectionY _ (trueExtentY - extentY)/2.0; IF camera.frame.fullScreen THEN { xStart _ boundBox.minVert[1] - projectionX; yStart _ boundBox.minVert[2] - projectionY; } ELSE { xStart _ camera.frame.downLeft[1] - projectionX; yStart _ camera.frame.downLeft[2] - projectionY; }; xStart _ xStart - stepSize/2.0; yStart _ yStart - stepSize/2.0; }; -- end of SetUpRayTrace ShutDownRayTrace: PROC [aisRope: Rope.ROPE, I: Image] = { SVImage.CloseImage[I, aisRope]; }; FillScanLine: PROC [startX, stepSize: REAL, xSamples: NAT, y: REAL, cameraXStepRayInWorld: Ray, worldRay: Ray, tree: CSGTree, lightSources: LightSourceList, camera: Camera, boundBox: BoundBox, boundSphere: BoundSphere, scanLine: ScanLine, outStream: IO.STREAM] = { color: Color; thisX: REAL; color _ TopColorCast[[startX, y], worldRay, tree, lightSources, camera, boundBox, boundSphere, FALSE, outStream]; PutColorInScanLine[scanLine, 0, color]; CSG.AddRay[cameraXStepRayInWorld, worldRay]; -- updates worldRay FOR j: INTEGER IN[1..xSamples] DO -- left to right thisX _ startX+Real.Float[j]*stepSize; color _ TopColorCast[[thisX, y], worldRay, tree, lightSources, camera, boundBox, boundSphere]; PutColorInScanLine[scanLine, j, color]; CSG.AddRay[cameraXStepRayInWorld, worldRay]; -- updates worldRay ENDLOOP; }; OutputCameraInfo: PRIVATE PROC [camera: Camera, outStream: IO.STREAM] = { IF camera.useBoundBoxes THEN outStream.PutF["Use Bounding Boxes.\n"] ELSE outStream.PutF["Use Bounding Spheres.\n"]; IF camera.useBoundSpheresForShadows THEN outStream.PutF["Use Bound Spheres for Shadows.\n"] ELSE outStream.PutF["Use nothing for Shadows.\n"]; }; DrawTreeWithStartLine: PUBLIC PROC [startLine: REAL, tree: CSGTree, lightSources: LightSourceList, camera: Camera, aisRope: Rope.ROPE, bAndWOnly: BOOL, notify: NotifyOfProgressProc _ NoOpNotifyOfProgress, clientData: REF ANY _ NIL, outStream: IO.STREAM] RETURNS [success: BOOL] = { topNode: REF ANY; -- tree.son. The top active node of the CSG Tree I: Image; boundBox: BoundBox; boundSphere: BoundSphere; cameraWRTWorld: Matrix4by4; cameraXStepRayInWorld, cameraRay, worldRay: Ray; stepSize, xStart, yStart, yMiddleStart, thisY: REAL; xSamples, ySamples, iStart: NAT; color: Color; scanLine1, scanLine2: ScanLine; success _ TRUE; topNode _ tree.son; camera.abort _ FALSE; -- if camera.abort becomes TRUE, close files and return. [boundBox, boundSphere] _ Preprocess3d.PreprocessForImage[tree, camera]; -- must call this before casting rays IF camera.frame.fullScreen AND boundBox = NIL THEN { SVError.Append["Infinite Scene. Please define a bounding frame.", TRUE, TRUE]; SVError.Blink[]; success _ FALSE; RETURN; }; [I, xSamples, ySamples, stepSize, xStart, yStart] _ SetUpRayTrace [boundBox, camera, aisRope, bAndWOnly, camera.resolution]; OutputTreeInfo[topNode, I, outStream]; OutputCameraInfo[camera, outStream]; scanLine1 _ CreateScanLine[xSamples+1]; scanLine2 _ CreateScanLine[xSamples+1]; cameraWRTWorld _ CoordSys.FindInTermsOfWorld[camera.coordSys]; cameraRay _ CSG.CreateRay[]; -- DrawTree recycles its own ray cameraXStepRayInWorld _ GetXStepRayInWorld[stepSize, cameraWRTWorld, camera]; -- works for perspective or orthographic projection iStart _ Real.Fix[(startLine-yStart)/stepSize]; yMiddleStart _ yStart+iStart*stepSize; CSG.StuffCameraRay[cameraRay, [xStart, yMiddleStart], camera]; worldRay _ CSG.TransformRayToWorld[cameraRay, cameraWRTWorld]; -- allocates ray from pool FillScanLine [xStart, stepSize, xSamples, yMiddleStart, cameraXStepRayInWorld, worldRay, tree, lightSources, camera, boundBox, boundSphere, scanLine1, outStream]; CSG.ReturnRayToPool[worldRay]; FOR i: INTEGER IN[iStart+1..ySamples] DO -- For each ray bottom to top. IF camera.abort = TRUE THEN { SVImage.CloseImage[I, aisRope]; SVError.Append["CastRays aborted. Partial files saved.", TRUE, TRUE]; RETURN; }; notify[yStart+i*stepSize, xStart, yStart, xStart+xSamples*stepSize, yStart+ySamples*stepSize, clientData]; -- tell the user interface that we have just cast line i - 1. thisY _ yStart+i*stepSize; CSG.StuffCameraRay[cameraRay, [xStart, thisY], camera]; worldRay _ CSG.TransformRayToWorld[cameraRay, cameraWRTWorld]; -- allocates ray from pool FillScanLine [xStart, stepSize, xSamples, thisY, cameraXStepRayInWorld, worldRay, tree, lightSources, camera, boundBox, boundSphere, scanLine2, outStream]; CSG.ReturnRayToPool[worldRay]; FOR k: NAT IN[0..xSamples) DO color _ ColorAverage[scanLine1[k], scanLine1[k+1], scanLine2[k], scanLine2[k+1]]; SVImage.PutImage[I, i, k, color, xSamples, ySamples]; ENDLOOP; CopyScanLine [scanLine2, scanLine1]; ENDLOOP; ShutDownRayTrace[aisRope, I]; }; -- end of DrawTreeWithStartLine DrawTree: PUBLIC PROC [tree: CSGTree, lightSources: LightSourceList, camera: Camera, aisRope: Rope.ROPE, bAndWOnly: BOOL, notify: NotifyOfProgressProc _ NoOpNotifyOfProgress, clientData: REF ANY _ NIL, outStream: IO.STREAM] RETURNS [success: BOOL] = { topNode: REF ANY; -- tree.son. The top active node of the CSG Tree I: Image; boundBox: BoundBox; boundSphere: BoundSphere; cameraWRTWorld: Matrix4by4; cameraXStepRayInWorld, cameraRay, worldRay: Ray; stepSize, xStart, yStart, thisY: REAL; xSamples, ySamples: NAT; color: Color; scanLine1, scanLine2: ScanLine; success _ TRUE; topNode _ tree.son; camera.abort _ FALSE; -- if camera.abort becomes TRUE, close files and return. [boundBox, boundSphere] _ Preprocess3d.PreprocessForImage[tree, camera]; -- must call this before casting rays IF camera.frame.fullScreen AND boundBox = NIL THEN { SVError.Append["Infinite Scene. Please define a bounding frame.", TRUE, TRUE]; SVError.Blink[]; success _ FALSE; RETURN; }; [I, xSamples, ySamples, stepSize, xStart, yStart] _ SetUpRayTrace [boundBox, camera, aisRope, bAndWOnly, camera.resolution]; OutputTreeInfo[topNode, I, outStream]; OutputCameraInfo[camera, outStream]; scanLine1 _ CreateScanLine[xSamples+1]; scanLine2 _ CreateScanLine[xSamples+1]; cameraWRTWorld _ CoordSys.FindInTermsOfWorld[camera.coordSys]; cameraRay _ CSG.CreateRay[]; -- DrawTree recycles its own ray cameraXStepRayInWorld _ GetXStepRayInWorld[stepSize, cameraWRTWorld, camera]; -- works for perspective or orthographic projection CSG.StuffCameraRay[cameraRay, [xStart, yStart], camera]; worldRay _ CSG.TransformRayToWorld[cameraRay, cameraWRTWorld]; -- allocates ray from pool FillScanLine [xStart, stepSize, xSamples, yStart, cameraXStepRayInWorld, worldRay, tree, lightSources, camera, boundBox, boundSphere, scanLine1, outStream]; CSG.ReturnRayToPool[worldRay]; FOR i: INTEGER IN[1..ySamples] DO -- For each ray bottom to top. IF camera.abort = TRUE THEN { SVImage.CloseImage[I, aisRope]; SVError.Append["CastRays aborted. Partial files saved.", TRUE, TRUE]; RETURN; }; notify[yStart+i*stepSize, xStart, yStart, xStart+xSamples*stepSize, yStart+ySamples*stepSize, clientData]; -- tell the user interface that we have just cast line i - 1. thisY _ yStart+i*stepSize; CSG.StuffCameraRay[cameraRay, [xStart, thisY], camera]; worldRay _ CSG.TransformRayToWorld[cameraRay, cameraWRTWorld]; -- allocates ray from pool FillScanLine [xStart, stepSize, xSamples, thisY, cameraXStepRayInWorld, worldRay, tree, lightSources, camera, boundBox, boundSphere, scanLine2, outStream]; CSG.ReturnRayToPool[worldRay]; FOR k: NAT IN[0..xSamples) DO color _ ColorAverage[scanLine1[k], scanLine1[k+1], scanLine2[k], scanLine2[k+1]]; SVImage.PutImage[I, i, k, color, xSamples, ySamples]; ENDLOOP; CopyScanLine [scanLine2, scanLine1]; ENDLOOP; ShutDownRayTrace[aisRope, I]; }; -- end of DrawTree MoreOrLessTheSame: PRIVATE PROC [a, b, c, d: REAL] RETURNS [BOOL] = { min, max: REAL; min _ max _ a; IF b < min THEN min _ b ELSE IF b > max THEN max _ b; IF c < min THEN min _ c ELSE IF c > max THEN max _ c; IF d < min THEN min _ d ELSE IF d > max THEN max _ d; IF max - min > 10 THEN RETURN[FALSE] ELSE RETURN[TRUE]; }; -- end of MoreOrLessTheSame ColorAverage: PRIVATE PROC [a, b, c, d: Color] RETURNS [avgColor: Color] = { ar, ag, ab, br, bg, bb, cr, cg, cb, dr, dg, db, red, green, blue: REAL; [ar, ag, ab] _ GraphicsColor.ColorToRGB[a]; [br, bg, bb] _ GraphicsColor.ColorToRGB[b]; [cr, cg, cb] _ GraphicsColor.ColorToRGB[c]; [dr, dg, db] _ GraphicsColor.ColorToRGB[d]; red _ (ar + br + cr + dr)/4.0; green _ (ag + bg + cg + dg)/4.0; blue _ (ab + bb + cb + db)/4.0; avgColor _ GraphicsColor.RGBToColor[red, green, blue]; }; -- end of ColorAverage NoOpNotifyOfProgress: PUBLIC NotifyOfProgressProc = {}; GetClassFromPool: PUBLIC PROC RETURNS [class: Classification] = { IF globalPoolPointer = 0 THEN AddAClass[]; class _ globalPool[globalPoolPointer - 1]; globalPoolPointer _ globalPoolPointer - 1; }; ClassPoolEmpty: SIGNAL = CODE; ReturnClassToPool: PUBLIC PROC [class: Classification] = { IF globalPoolPointer = globalPool.maxClasses THEN SIGNAL ClassPoolFull; globalPoolPointer _ globalPoolPointer + 1; globalPool[globalPoolPointer - 1] _ class; }; ClassPoolFull: SIGNAL = CODE; NumberOfClassesInPool: PUBLIC PROC RETURNS [count: NAT] = { count _ globalPoolPointer; }; AddAClass: PRIVATE PROC = { newPool: Pool _ NEW[PoolObj[globalPool.maxClasses+1]]; IF globalPool.maxClasses > 50 THEN {-- there must be a leak in the classification system SVError.Append["CastRaysImplA Warning: More than 50 Classifications!!", TRUE, TRUE]; SVError.Blink[]; }; FOR i: NAT IN [0..globalPoolPointer) DO newPool[i] _ globalPool[i]; ENDLOOP; globalPoolPointer _ globalPoolPointer + 1; globalPool _ newPool; globalPool[globalPoolPointer - 1] _ NEW[ClassificationObj]; globalPool[globalPoolPointer - 1].surfaces _ NEW[SurfaceArrayObj]; }; GetCompactFromPool: PROC RETURNS [compact: CompactArray] = { IF globalCompactPoolPointer = 0 THEN SIGNAL CompactPoolEmpty; compact _ globalCompactPool[globalCompactPoolPointer]; globalCompactPoolPointer _ globalCompactPoolPointer -1; }; CompactPoolEmpty: SIGNAL = CODE; ReturnCompactToPool: PROC [compact: CompactArray] = { IF globalCompactPoolPointer = globalCompactPoolCount THEN SIGNAL CompactPoolFull; globalCompactPoolPointer _ globalCompactPoolPointer + 1; globalCompactPool[globalCompactPoolPointer] _ compact; }; CompactPoolFull: SIGNAL = CODE; MakeClassAMiss: PUBLIC PROC [class: Classification] = { class.count _ 0; class.classifs[1] _ FALSE; }; Init: PROC = { globalPool _ NEW[PoolObj[globalPoolCount]]; FOR i: NAT IN[0..globalPoolCount) DO globalPool[i] _ NEW[ClassificationObj]; globalPool[i].surfaces _ NEW[SurfaceArrayObj]; ENDLOOP; globalPoolPointer _ globalPoolCount; globalCompactPool _ NEW[CompactPoolObj]; FOR i: NAT IN[1..globalCompactPoolCount] DO globalCompactPool[i] _ NEW[CompactArrayObj]; ENDLOOP; globalCompactPoolPointer _ globalCompactPoolCount; }; Init[]; END. *€File: CastRaysImplA.mesa Author: Eric Bier in the summer of 1982 Copyright c 1984 by Xerox Corporation. All rights reserved. Last edited by Bier on January 16, 1985 3:46:40 pm PST Contents: The ray casting (as opposed to tree building) part of the CSG package. CSG.mesa builds the trees RayCast is about to return class. Write the name of comp and summarize the classification. if not makeStream then do nothing The main ray casting procedure. Scene Ray must be in WORLD coordinates before this procedure is called. Before casting each ray, see if the ray will be in the bounding box of the son node. For optimizing, here is the plan: 1) Check ray for left bound box. Set leftBoxHit if appropriate. 2) If leftBoxHit then cast the ray. Set leftHit if appropriate. 3) If not leftHit then if comp.operation = intersection or difference, return miss. 4) If hit, or union, then right box test. Set RightBoxMiss if appropriate. 5) If miss then return: leftclass for difference, empty for intersection, leftClass for union. 6) Else cast ray. 7) Return rightclass or combination if appropriate 1) Check ray for left bound box. Set leftBoxHit if appropriate. 2) If leftBoxHit then cast the ray. Set leftHit if appropriate. 3) If not leftHit then if comp.operation = intersection or difference, return miss. leftClass is (or is equivalent to) EmptyClass[]; 4) If hit, or union, then right box test. Set RightBoxMiss if appropriate. (we don't have to test for this state. It is the only one left.) 5) If miss then return EmptyClass. Else cast ray. This could be a union with or without a left miss or (intersection/difference) with an initial hit. 6) Else cast ray. We have Union, or (intersection/difference) with left hit. Ray hits box. 7) Return rightclass, combination or empty if appropriate One optimation: each primitive will keep track of the last ray that hit it, and the vector corresponding to a unit x step in the CAMERA coord sys. The ray will contain information about whether or not it is the first ray of a new line. Ignore any bounding boxes which were computed. This is useful if the ray does not originate from the screen (as for computing shadows). Of course, bounding spheres would be useful in this case. The main ray casting procedure. Scene Ray must be in WORLD coordinates before this procedure is called. For optimizing, here is the plan: 1) Cast the left ray. Set leftHit if appropriate. 2) If not leftHit then if comp.operation = intersection or difference, return miss. 3) If hit, or union, then cast right ray. 4) Return rightclass or combination if appropriate 1) Cast the left ray. Set leftHit if appropriate. 2) If not leftHit then if comp.operation = intersection or difference, return miss. leftClass is (or is equivalent to) EmptyClass[]; 3) If hit, or union, then cast right ray. 4) Return rightclass, combination or empty if appropriate One optimation: each primitive will keep track of the last ray that hit it, and the vector corresponding to a unit x step in the CAMERA coord sys. The ray will contain information about whether or not it is the first ray of a new line. Like HitsTree but returns the parameter value at the first inward-going hit, if any. If we start inside of an object, wait until we are out. Each primitive shape must have a procedure here which can classify a ray with respect to it. Combine: PUBLIC PROC [leftClass, rightClass: Classification, op: PointSetOp, consolidate: BOOL] RETURNS [combinedClass: Classification] = { SELECT op FROM union => combinedClass _ UnionCombine[leftClass, rightClass, consolidate]; intersection => combinedClass _ IntersectionCombine[leftClass, rightClass, consolidate]; difference => combinedClass _ DifferenceCombine[leftClass, rightClass, consolidate]; ENDCASE => ERROR; }; Merge the two sorted lists together classifying the segments by the OR of the Classifs for each segment Merge the two sorted lists together classifying the segments by the AND of the Classifs for each segment Merge the two sorted lists together classifying the segments by the (left AND NOT right) of the Classifs for each segment Combine adjacent regions which have the same classif and throw out the surface and parameter information at those points recall ClassificationObj is RECORD [count, params, surfaces, classifs, topNormal]; Compact[i] is TRUE if we should keep class.*[i], FALSE otherwise. Order is preserved among the items we keep. The in-out value on the far side of the last param that changed in-out will always be the last value given in the class. Cast a single ray at the scene. Report the results to the output stream and find the color at that point. ray with respect to Camera (perspective) Find WORLD ray. Cast a single ray at the scene from the given cameraPoint (point in CAMERA coords on the z = 0 plane). The client must be sure that the tree has been preprocessed so RayCast will have bounding boxes to work with. (See Preprocess3d). The client must be sure to call ReturnClassToPool[class] when he is done with it. Ray not allocated from the pool. Ray not allocated from the pool. We are given a classification, a list of lightsources, a camera, the screen point from which the ray was shot, and the ray in WORLD coordinates from which we can derive the eyepoint. To produce an image with shadows, we proceed as follows: Make a new list of lightsources which includes only those lightsources visible from the surface point then proceed in the usual way. Since worldRay is in WORLD coordinates, this finds eyePoint in WORLD coordinates ColorCast: PRIVATE PROC [cameraPoint: Point2d, worldRay: Ray, tree: CSGTree, lightSources: LightSourceList, camera: Camera, makeStream: BOOL _ FALSE, f: IO.STREAM _ NIL, indent: NAT _ 0] RETURNS [color: Color] = { class: Classification; class _ RayCast[cameraPoint, worldRay, tree.son, makeStream, f, indent]; color _ ColorFromClass[class, cameraPoint[1], cameraPoint[2], lightSources, camera, worldRay, tree]; ReturnClassToPool[class]; }; Look at the frame of the camera. If frame.fullscreen is TRUE then use the bounding box of the scene. If it is FALSE, then use the frame parameters to determine the bounding box of our ray tracing. In this case, we should check before casting each ray to see if it is in the scene's bounding box before casting it. We know the size of the box which we wish to raycast and the resolution of the casting in samples per inch. Our box size is in screen dots (at 72 per inch). We wish to know screen dots per sample. (Extent/72)*resolution = inches*(samples per inch) = samples. Extent/samples = screen dots/sample as required. Compactly, then, we need 72/resolution screen dots per sample and Extent/(screen dots per sample) for total number of samples. Now for the hard part. boundBox tells us the outline of the initial box. trueExtentX represents the actual extent from the left of the first pixel to the right of the last pixel. Likewise for trueExtentY. We subtract the initial extent from the true extent and split the difference. Subtracting the result to the original bounding box origin gives the ray tracing grid outline. Now (xStart, yStart) is the center of the origin pixel. Subtracting another half a pixel will give us the lower left hand corner of the pixel. Cast the first ray of the y scan line Like DrawTree in CastRaysImplA, but we ignore all values of "i" until yStart+i*stepSize >= startCameraPoint[2]. So we start with i = (startCameraPoint[2]-yStart)/stepSize. Interpreting results of the cast ray. Calculates current transfrom matrices and bounding boxes. Cast the first scan line. We now have two complete scan lines. Average values in fours and write to ais. IF MoreOrLessTheSame[scanLine1[k], scanLine1[k+1], scanLine2[k], scanLine2[k+1]] THEN Interpreting results of the cast ray. Calculates current transfrom matrices and bounding boxes. Cast the first scan line. We now have two complete scan lines. Average values in fours and write to ais. IF MoreOrLessTheSame[scanLine1[k], scanLine1[k+1], scanLine2[k], scanLine2[k+1]] THEN ELSE color _ CastMoreRays[ul: scanLine1[k], ur: scanLine1[k+1], dl: scanLine2[k], dr: scanLine2[k+1], left: k, right: k+1, top: i, bottom: i-1, topNode: topNode, focalLength: focalLength, lightSources: lightSources, cameraWRTWorld: cameraWRTWorld]; CastMoreRays: PRIVATE PROC [ul, ur, dl, dr: Color, left, right, top, bottom: REAL, tree: CSGTree, focalLength: REAL, lightSources: LightSourceList, camera: Camera] RETURNS [color: Color] = { Cast rays left, right, top, bottom, and middle. Use rays ul, ur, dl, and dr. This further subdivides each square for a more accurate intensity value. cameraRay: Ray _ GetRayFromPool[]; worldRay: Ray; cameraWRTWorld: Matrix4by4 _ camera.coordSys.mat; leftColor, rightColor, topColor, bottomColor, middleColor: Color; midLeftY, midTopX: REAL; midLeftY _ (top-bottom)/2.0; midTopX _ (right-left)/2.0; cameraRay.basePt _ [left, midLeftY, 0];cameraRay.direction _ [left, midLeftY, focalLength]; worldRay _ TransformRay[cameraRay, cameraWRTWorld]; -- allocates ray from pool leftColor _ ColorCast[[left, midLeftY], worldRay, tree, lightSources, camera]; ReturnRayToPool[worldRay]; cameraRay.basePt _ [right, midLeftY, 0];cameraRay.direction _ [right, midLeftY, focalLength]; worldRay _ TransformRay[cameraRay, cameraWRTWorld]; -- allocates ray from pool rightColor _ ColorCast[[right, midLeftY], worldRay, tree, lightSources, camera]; ReturnRayToPool[worldRay]; cameraRay.basePt _ [midTopX, top, 0];cameraRay.direction _ [midTopX, top, focalLength]; worldRay _ TransformRay[cameraRay, cameraWRTWorld]; -- allocates ray from pool topColor _ ColorCast[[midTopX, top], worldRay, tree, lightSources, camera]; ReturnRayToPool[worldRay]; cameraRay.basePt _ [midTopX, bottom, 0];cameraRay.direction _ [midTopX, bottom, focalLength]; worldRay _ TransformRay[cameraRay, cameraWRTWorld]; -- allocates ray from pool bottomColor _ ColorCast[[midTopX, bottom], worldRay, tree, lightSources, camera]; ReturnRayToPool[worldRay]; cameraRay.basePt _ [midTopX, midLeftY, 0];cameraRay.direction _ [midTopX, midLeftY, focalLength]; worldRay _ TransformRay[cameraRay, cameraWRTWorld]; -- allocates ray from pool middleColor _ ColorCast[[midTopX, midLeftY], worldRay, tree, lightSources, camera]; ReturnRayToPool[worldRay]; color _ ColorAverage[ ColorAverage[ul, topColor, leftColor, middleColor], ColorAverage[topColor, ur, middleColor, rightColor], ColorAverage[leftColor, middleColor, dl, bottomColor], ColorAverage[middleColor, rightColor, bottomColor, dr] ]; ReturnRayToPool[cameraRay]; }; -- end of CastMoreRays This scene contains sections complicated enough that the original allocation of classifications does not cover the most complicated rays. Add another classification to the pool. Create a Classification Pool Create a Compact Pool Κ*<– "cedar" style˜Ihead1šœ™Iprocšœ'™'Jšœ Οmœ1™˜>Lšœ? ˜YLšžœ žœ žœžœ˜$L˜Lšœ‘ œW˜kLšžœ žœ žœžœ˜$Lšœ˜Lšœ ˜L˜—šŸ œžœžœDžœžœžœžœžœžœžœžœΟuœ ˜ΘLšœκ™κLšœQ™QLšœ žœžœ ˜Lšœ’œ˜LšœJ˜JL˜Lšœ’œžœ˜!Lšžœ’œ˜3Lšœ’œžœ’œ ˜YLšžœ’œ˜Lšœ‘œ’œ*˜OLšœ ˜—L˜šŸ œžœžœžœ žœžœ žœ˜HLš žœžœžœžœžœ˜šžœžœž˜Lšœ#žœ˜+šœ"˜"Lšžœ žœž˜Lšžœ žœ˜Lšœ žœ˜Lšœ žœžœ˜"Lšœ žœžœ˜$Lšœ$˜$Lšœ%˜%Lšœ˜Lšžœ˜Lšœ˜—Lšžœžœ˜—Lšœ ˜—šŸœžœžœžœžœžœžœžœ˜PLšœžœ˜ L˜Lšœ ˜ LšœYžœ,žœ$˜«Lšœž œ˜&Lšœ˜Lšœ ˜—L˜š Ÿœžœžœ žœ.žœ˜tLšœ&˜&Lšœ$˜$L˜Lšœ"˜"šžœ!žœ˜)Lšœ3˜3Lšœ"˜"Lšœ:˜:LšœJ˜JLšœJ˜Jšœ7˜7L™ —Lšœ$˜$Lšœ$˜$L˜—šžœ ˜!LšœF˜Fšœ;˜;L™ —L˜—Lšœ ˜—L˜š Ÿœžœžœžœ*žœ˜Lšœ˜Lšœ˜Lšœ%˜%Lšžœž˜Lšœ#˜#šœ˜Lšœ žœ‘œ˜FLšœ žœ˜,Lšžœ˜Lšœ‘œ$˜MLšœ˜—˜Lšœ žœ‘œ˜FLšœ žœ˜,Lšžœ˜Lšœ‘œ$˜KL˜—Lšžœžœ˜Lšœ˜L˜—šŸœžœžœžœ[žœžœžœžœžœ žœžœ˜ηL™πL™„Lšœ˜Lšœ˜Lšœ ˜ L˜ Lšœ˜Lšœ˜Lšœžœ˜Lšœ%˜%šžœžœ˜Lšžœžœžœ˜:Lšžœ˜!Lšžœ˜—Lšœ˜Lšœ (˜˜>Lšœ  ˜=LšœN 3˜L˜šœ™Lšœ/˜/Lšœ&˜&Lšœ>˜>Lšœ? ˜YL˜Lšœ’˜’L˜—Lšœ˜L˜š žœžœžœžœ ˜Gšžœžœžœ˜Lšœžœ ˜Lšœ:ž œ˜FLšžœ˜Lšœ˜—Lšœk =˜¨Lšœ˜Lšœ7˜7Lšœ? ˜YL˜Lšœ›˜›L˜Lšœ˜L˜LšœO™Ošžœžœžœž˜Lšœ2™2Lšœ"™"LšœQ˜QLšœžœ#˜5—Lšžœ˜Lšœ$˜$—Lšžœ˜L˜Lšœžœ˜Lšœ ˜"L˜—šŸœžœžœNžœ žœCžœžœžœ žœžœžœ žœ˜ϋLšœ žœžœ 1˜CLšžœ ˜ Lšœ˜L˜Lšœ˜Lšœ0˜0Lšœ!žœ˜&Lšœžœ˜L˜Lšœ%™%Lšœ-˜-L˜Lšœ žœ˜Lšœ˜Lšœžœ 8˜NL˜LšœI %˜nšžœžœ žœžœ˜4LšœCž œ˜OL˜Lšœ žœ˜Lšžœ˜Lšœ˜—L™Lšœ9™9Lšœžœz˜|Lšœžœ ˜&Lšœ$˜$L˜LšœO˜OL˜Lšœ>˜>Lšœ  ˜=LšœN 3˜L˜šœ™Lšœ8˜8Lšœ? ˜YL˜Lšœœ˜œL˜—Lšœ˜L˜š žœžœžœžœ ˜@šžœžœžœ˜Lšœžœ ˜Lšœ:ž œ˜FLšžœ˜Lšœ˜—Lšœk =˜¨Lšœ˜Lšœ7˜7Lšœ? ˜YL˜Lšœ›˜›L˜Lšœ˜L˜LšœO™Ošžœžœžœž˜Lšœ2™2Lšœ"™"LšœQ˜QLšœžœ#˜5—Lšžœ˜Lšœ$˜$—Lšžœ˜L˜Lšœžœ˜Lšœ ˜—L˜š Ÿœžœžœžœžœžœ˜ELšœ žœ˜Lšœ˜Lš žœ žœ žœžœ žœ ˜5Lš žœ žœ žœžœ žœ ˜5Lš žœ žœ žœžœ žœ ˜5Lšžœžœžœžœžœžœžœ˜7Lšœ ˜—L˜šŸ œžœžœžœ˜LLšœBžœ˜GLšœ+˜+Lšœ+˜+Lšœ+˜+Lšœ+˜+Lšœ˜Lšœ ˜ Lšœ˜Lšœ6˜6Lšœ ˜—˜Lšœψ™ψ—š Ÿ œžœžœ3žœžœ1žœ™ΎLšœ—™—L™Lšœ"™"Lšœ™Lšœ1™1LšœA™ALšœžœ™Lšœ™Lšœ™L™Lšœ[™[Lšœ4 ™NLšœN™NLšœ™L™Lšœ]™]Lšœ4 ™NLšœP™PLšœ™L™LšœW™WLšœ4 ™NLšœK™KLšœ™L™Lšœ]™]Lšœ4 ™NLšœQ™QLšœ™L™Lšœa™aLšœ4 ™NLšœS™SLšœ™L™LšœI™ILšœ4™4Lšœ6™6Lšœ9™9L™Lšœ™L™Lšœ ™—L˜Lšœžœ˜7L˜šŸœžœžœžœ˜ALšžœžœ ˜*Lšœ*˜*Lšœ*˜*Lšœ˜—Lšœžœžœ˜šŸœžœžœ˜:Lšžœ+žœžœ˜GLšœ*˜*Lšœ*˜*Lšœ˜—Lšœžœžœ˜š Ÿœžœžœžœ žœ˜;Lšœ˜L˜L˜—šŸ œžœžœ˜L™²Lšœžœ#˜6šžœžœž 4˜XLšœHž œ˜TLšœ˜L˜—šžœžœžœž˜'Lšœ˜—Lšžœ˜Lšœ*˜*Lšœ˜Lšœ$žœ˜;Lšœ-žœ˜BL˜—L˜šŸœžœžœ˜