DIRECTORY AIS, AtomButtonsTypes, BasicTime, CastRays, CoordSys, CSG, CSGGraphics, Feedback, Imager, ImagerColor, IO, Matrix3d, Preprocess3d, Real, Rope, Shading, SV2d, SV3d, SVArtwork, SVBasicTypes, SVBoundBox, SVFancyRays, SVImage, SVModelTypes, SVRayTypes, SVVector3d, ViewerClasses; CastRaysImplA: CEDAR PROGRAM IMPORTS BasicTime, CastRays, CoordSys, CSG, ImagerColor, IO, Matrix3d, Preprocess3d, Real, Rope, Shading, SVArtwork, SVBoundBox, Feedback, SVFancyRays, SVImage, SVVector3d EXPORTS CastRays = BEGIN Artwork: TYPE = SVModelTypes.Artwork; BoundBox: TYPE = SVBasicTypes.BoundBox; BoundSphere: TYPE = SVBasicTypes.BoundSphere; Camera: TYPE = SVModelTypes.Camera; Color: TYPE = Imager.Color; Composite: TYPE = SVRayTypes.Composite; CoordSystem: TYPE = SVModelTypes.CoordSystem; CSGTree: TYPE = SVRayTypes.CSGTree; FeedbackData: TYPE = AtomButtonsTypes.FeedbackData; LightSourceList: TYPE = SVModelTypes.LightSourceList; NotifyOfProgressProc: TYPE = CastRays.NotifyOfProgressProc; Point3d: TYPE = SV3d.Point3d; Point2d: TYPE = SV2d.Point2d; PointSetOp: TYPE = SVRayTypes.PointSetOp; Primitive: TYPE = SVRayTypes.Primitive; Matrix4by4: TYPE = SV3d.Matrix4by4; Ray: TYPE = SVRayTypes.Ray; Surface: TYPE = REF ANY; Vector3d: TYPE = SV3d.Vector3d; Viewer: TYPE = ViewerClasses.Viewer; Classification: TYPE = REF ClassificationObj; ClassificationObj: TYPE = SVRayTypes.ClassificationObj; SurfaceArray: TYPE = REF SurfaceArrayObj; SurfaceArrayObj: TYPE = SVRayTypes.SurfaceArrayObj; ParameterArray: TYPE = SVRayTypes.ParameterArray; InOutArray: TYPE = SVRayTypes.InOutArray; NormalArray: TYPE = SVRayTypes.NormalArray; PrimitiveArray: TYPE = SVRayTypes.PrimitiveArray; CompactArray: TYPE = REF CompactArrayObj; CompactArrayObj: TYPE = ARRAY [1..SVRayTypes.maxSceneDepth] OF BOOL; Image: TYPE = REF ImageObj; ImageObj: TYPE = SVImage.ImageObj; globalPoolCount: NAT = 10; globalPoolPointer: NAT; Pool: TYPE = REF PoolObj; PoolObj: TYPE = RECORD [seq: SEQUENCE maxClasses: NAT OF Classification]; globalPool: Pool; globalCompactPoolCount: NAT = 10; globalCompactPoolPointer: NAT; CompactPool: TYPE = REF CompactPoolObj; CompactPoolObj: TYPE = ARRAY[1..globalCompactPoolCount] OF CompactArray; globalCompactPool: CompactPool; WriteStreamComp: PUBLIC PROC [comp: Composite, class: Classification, feedback: FeedbackData, makeStream: BOOL, indent: NAT] = { opname, leftName, rightName: Rope.ROPE; f: IO.STREAM; IF NOT makeStream THEN RETURN; f _ Feedback.GetTypescriptStream[$Solidviews]; Indent[f, indent]; SELECT comp.operation FROM union => opname _ "union"; intersection => opname _ "intersection"; difference => opname _ "difference"; ENDCASE => ERROR; WITH comp.leftSolid SELECT FROM p: Primitive => leftName _ p.name; c: Composite => leftName _ c.name; ENDCASE => ERROR; WITH comp.rightSolid SELECT FROM p: Primitive => rightName _ p.name; c: Composite => rightName _ c.name; ENDCASE => ERROR; f.PutF["Composite %g [op: %g] (%g %g) returns class: [count: %g]\n", [rope[comp.name]],[rope[opname]], [rope[leftName]], [rope[rightName]], [integer[class.count]]]; WritePrimNames[class, f, indent]; }; -- end of WriteStreamComp Indent: PROC [f: IO.STREAM, indent: NAT] = { FOR i: NAT IN[1..indent] DO f.PutChar[IO.TAB]; ENDLOOP; }; WritePrimNames: PROC [class: Classification, f: IO.STREAM, indent: NAT] = { FOR i: NAT IN[1..class.count] DO Indent[f, indent+1]; f.PutF["%g) %g at t = %g\n", [integer[i]], [rope[class.primitives[i].name]], [real[class.params[i]]]]; ENDLOOP; }; -- end of WritePrimNames WriteStreamPrim: PUBLIC PROC [prim: Primitive, class: Classification, feedback: FeedbackData, makeStream: BOOL, indent: NAT] = { f: IO.STREAM; IF NOT makeStream THEN RETURN; f _ Feedback.GetTypescriptStream[$Solidviews]; Indent[f, indent]; f.PutF["Primitive %g returns class: [count: %g]\n", [rope[prim.name]], [integer[class.count]]]; WriteParams[class, f, indent]; }; -- end of WriteStreamPrim WriteParams: PROC [class: Classification, f: IO.STREAM, indent: NAT] = { FOR i: NAT IN[1..class.count] DO Indent[f, indent+1]; f.PutF["%g) %g at t = %g\n", [integer[i]], [rope[class.primitives[i].name]], [real[class.params[i]]]]; ENDLOOP; }; -- end of WriteParams DoesHit: PROC [class: Classification] RETURNS [BOOL] = { RETURN[class.count > 0 OR class.classifs[1] = TRUE]; }; RayCast: PUBLIC PROC [cameraPoint: Point2d, worldRay: Ray, node: REF ANY, consolidate: BOOL _ TRUE, feedback: FeedbackData, makeStream: BOOL _ FALSE, indent: NAT _ 0] RETURNS [class: Classification] = { IF node = NIL THEN {class _ EmptyClass[]; RETURN}; WITH node SELECT FROM comp: Composite => { leftClass, rightClass: Classification; leftBoxHit, leftHit, rightBoxHit, rightHit: BOOL; totalMiss: BOOL _ FALSE; boundBox: BoundBox; WITH comp.leftSolid SELECT FROM p: Primitive => boundBox _ p.boundBox; c: Composite => boundBox _ c.boundBox; ENDCASE => ERROR; leftBoxHit _ SVBoundBox.PointInBoundBox[cameraPoint, boundBox]; IF leftBoxHit THEN { leftClass _ RayCast[cameraPoint, worldRay, comp.leftSolid, consolidate, feedback, makeStream, indent]; leftHit _ DoesHit[leftClass]; } ELSE {leftHit _ FALSE; leftClass _ EmptyClass[]}; IF NOT leftHit THEN IF comp.operation = intersection OR comp.operation = difference THEN { class _ leftClass; WriteStreamComp[comp, class, feedback, makeStream, indent]; RETURN}; WITH comp.rightSolid SELECT FROM p: Primitive => boundBox _ p.boundBox; c: Composite => boundBox _ c.boundBox; ENDCASE => ERROR; rightBoxHit _ SVBoundBox.PointInBoundBox[cameraPoint, boundBox]; IF NOT rightBoxHit THEN SELECT comp.operation FROM union => {class _ leftClass; WriteStreamComp[comp, class, feedback, makeStream, indent]; RETURN}; intersection => IF NOT leftHit THEN RETURN[leftClass] ELSE { ReturnClassToPool[leftClass]; class _ EmptyClass[]; WriteStreamComp[comp, class, feedback, makeStream, indent]; RETURN}; difference => {class _ leftClass; WriteStreamComp[comp, class, feedback, makeStream, indent]; RETURN}; ENDCASE => ERROR; rightClass _ RayCast[cameraPoint, worldRay, comp.rightSolid, consolidate, feedback, makeStream, indent]; rightHit _ DoesHit[rightClass]; SELECT comp.operation FROM union => IF rightHit THEN { IF leftHit THEN class _ UnionCombine[leftClass, rightClass, consolidate] ELSE {ReturnClassToPool[leftClass]; class _ rightClass} } ELSE { ReturnClassToPool[rightClass]; class _ leftClass}; intersection => IF rightHit THEN { IF leftHit THEN class _ IntersectionCombine[leftClass, rightClass, consolidate] ELSE {ReturnClassToPool[rightClass]; class _ leftClass;} } ELSE IF leftHit THEN {ReturnClassToPool[leftClass]; class _ rightClass} ELSE {ReturnClassToPool[rightClass]; class _ leftClass}; difference => IF rightHit THEN { IF leftHit THEN class _ DifferenceCombine[leftClass, rightClass, consolidate] ELSE {ReturnClassToPool[rightClass]; class _ leftClass} -- leftClass null } ELSE {ReturnClassToPool[rightClass]; class _ leftClass}; ENDCASE => ERROR; WriteStreamComp[comp, class, feedback, makeStream, indent]; RETURN}; prim: Primitive => { localRay: Ray; IF prim.ignoreMe THEN {class _ CastRays.GetClassFromPool[]; CastRays.MakeClassAMiss[class]; RETURN}; localRay _ CSG.TransformRay[worldRay, prim.worldWRTPrim]; -- (takes a new ray from the pool) class _ prim.rayCast[cameraPoint, localRay, prim.mo, prim]; WriteStreamPrim[prim, class, feedback, makeStream, 0]; CSG.ReturnRayToPool[localRay]; -- returns ray to pool RETURN}; ENDCASE => ERROR; }; -- end of RayCast RayCastNoBBoxes: PUBLIC PROC [worldRay: Ray, node: REF ANY, consolidate: BOOL _ TRUE, feedback: FeedbackData, makeStream: BOOL _ FALSE, indent: NAT _ 0] RETURNS [class: Classification] = { IF node = NIL THEN {class _ EmptyClass[]; RETURN}; WITH node SELECT FROM comp: Composite => { leftClass, rightClass: Classification; leftHit, rightHit: BOOL; totalMiss: BOOL _ FALSE; leftClass _ RayCastNoBBoxes[worldRay, comp.leftSolid, consolidate, feedback, makeStream, indent]; leftHit _ DoesHit[leftClass]; IF NOT leftHit THEN IF comp.operation = intersection OR comp.operation = difference THEN { class _ leftClass; WriteStreamComp[comp, class, feedback, makeStream, indent]; RETURN}; rightClass _ RayCastNoBBoxes[worldRay, comp.rightSolid, consolidate, feedback, makeStream, indent]; rightHit _ DoesHit[rightClass]; SELECT comp.operation FROM union => IF rightHit THEN { IF leftHit THEN class _ UnionCombine[leftClass, rightClass, consolidate] ELSE {ReturnClassToPool[leftClass]; class _ rightClass} } ELSE { ReturnClassToPool[rightClass]; class _ leftClass}; intersection => IF rightHit THEN { IF leftHit THEN class _ IntersectionCombine[leftClass, rightClass, consolidate] ELSE {ReturnClassToPool[rightClass]; class _ leftClass;} } ELSE IF leftHit THEN {ReturnClassToPool[leftClass]; class _ rightClass} ELSE {ReturnClassToPool[rightClass]; class _ leftClass}; difference => IF rightHit THEN { IF leftHit THEN class _ DifferenceCombine[leftClass, rightClass, consolidate] ELSE {ReturnClassToPool[rightClass]; class _ leftClass} -- leftClass null } ELSE {ReturnClassToPool[rightClass]; class _ leftClass}; ENDCASE => ERROR; WriteStreamComp[comp, class, feedback, makeStream, indent]; RETURN}; prim: Primitive => { localRay: Ray; IF prim.ignoreMe THEN {class _ CastRays.GetClassFromPool[]; CastRays.MakeClassAMiss[class]; RETURN}; localRay _ CSG.TransformRay[worldRay, prim.worldWRTPrim]; -- (takes a new ray from the pool) class _ prim.rayCastNoBBoxes[localRay, prim.mo, prim]; WriteStreamPrim[prim, class, feedback, makeStream, 0]; CSG.ReturnRayToPool[localRay]; -- returns ray to pool RETURN}; ENDCASE => ERROR; }; -- end of RayCastNoBboxes HitsTree: PUBLIC PROC [worldRay: Ray, tree: CSGTree] RETURNS [BOOL] = { node: REF ANY _ tree.son; class: Classification; hits: BOOL; class _ RayCastNoBBoxes [worldRay: worldRay, node: node, feedback: NIL, makeStream: FALSE]; hits _ DoesHit[class]; ReturnClassToPool[class]; RETURN[hits]; }; FirstHit: PUBLIC PROC [worldRay: Ray, tree: CSGTree, useBoundSpheres: BOOL, feedback: FeedbackData, makeStream: BOOL _ FALSE, indent: NAT _ 0] RETURNS [hits: BOOL, t: REAL] = { node: REF ANY _ tree.son; class: Classification; IF NOT useBoundSpheres THEN class _ RayCastNoBBoxes [worldRay: worldRay, node: node, makeStream: makeStream, feedback: feedback, indent: indent] ELSE class _ CastRays.RayCastBoundingSpheres [worldRay: worldRay, node: node, feedback: feedback, makeStream: makeStream, indent: indent]; hits _ FALSE; t _ 0.0; FOR i: NAT IN [1..class.count] DO IF NOT class.classifs[i] THEN GOTO BeenOut; REPEAT BeenOut => { hits _ FALSE; FOR j: NAT IN [i+1..class.count+1] DO IF class.classifs[j] THEN { hits _ TRUE; t _ class.params[j-1]; ReturnClassToPool[class]; RETURN; }; ENDLOOP; ReturnClassToPool[class]; RETURN; }; FINISHED => { ReturnClassToPool[class]; RETURN; }; ENDLOOP; }; EmptyClass: PUBLIC PROC RETURNS [class: Classification] = { class _ GetClassFromPool[]; class.count _ 0; class.classifs[1] _ FALSE; }; -- end of EmptyClass SceneExceedsMaximumDepth: SIGNAL = CODE; UnionCombine: PUBLIC PROC [leftClass, rightClass: Classification, consolidate: BOOL] RETURNS [combinedClass: Classification] = { lPtr, rPtr: NAT; combinedClass _ GetClassFromPool[]; lPtr _ rPtr _ 1; combinedClass.count _ leftClass.count + rightClass.count; IF combinedClass.count > SVRayTypes.maxSceneDepth THEN SIGNAL SceneExceedsMaximumDepth; FOR i: NAT IN[1..combinedClass.count] DO IF rPtr > rightClass.count THEN GOTO RPtrWentOver; IF lPtr > leftClass.count THEN GOTO LPtrWentOver; IF leftClass.params[lPtr] < rightClass.params[rPtr] THEN { combinedClass.normals[i] _ leftClass.normals[lPtr]; combinedClass.params[i] _ leftClass.params[lPtr]; combinedClass.surfaces[i] _ leftClass.surfaces[lPtr]; combinedClass.primitives[i] _ leftClass.primitives[lPtr]; combinedClass.classifs[i] _ leftClass.classifs[lPtr] OR rightClass.classifs[rPtr]; lPtr _ lPtr + 1; } ELSE { combinedClass.normals[i] _ rightClass.normals[rPtr]; combinedClass.params[i] _ rightClass.params[rPtr]; combinedClass.surfaces[i] _ rightClass.surfaces[rPtr]; combinedClass.primitives[i] _ rightClass.primitives[rPtr]; combinedClass.classifs[i] _ leftClass.classifs[lPtr] OR rightClass.classifs[rPtr]; rPtr _ rPtr + 1; }; REPEAT RPtrWentOver => { -- finish up with lPtr data FOR k: NAT _ i, k+1 UNTIL k > combinedClass.count DO combinedClass.normals[k] _ leftClass.normals[lPtr]; combinedClass.params[k] _ leftClass.params[lPtr]; combinedClass.surfaces[k] _ leftClass.surfaces[lPtr]; combinedClass.primitives[k] _ leftClass.primitives[lPtr]; combinedClass.classifs[k] _ leftClass.classifs[lPtr] OR rightClass.classifs[rPtr]; lPtr _ lPtr + 1; ENDLOOP}; LPtrWentOver => { -- finish up with rPtr data FOR k: NAT _ i, k+1 UNTIL k > combinedClass.count DO combinedClass.normals[k] _ rightClass.normals[rPtr]; combinedClass.params[k] _ rightClass.params[rPtr]; combinedClass.surfaces[k] _ rightClass.surfaces[rPtr]; combinedClass.primitives[k] _ rightClass.primitives[rPtr]; combinedClass.classifs[k] _ leftClass.classifs[lPtr] OR rightClass.classifs[rPtr]; rPtr _ rPtr + 1; ENDLOOP}; ENDLOOP; combinedClass.classifs[combinedClass.count+1] _ leftClass.classifs[lPtr] OR rightClass.classifs[rPtr]; IF consolidate THEN ConsolidateClassification[combinedClass]; ReturnClassToPool[leftClass]; ReturnClassToPool[rightClass]; }; -- end of UnionCombine IntersectionCombine: PUBLIC PROC [leftClass, rightClass: Classification, consolidate: BOOL] RETURNS [combinedClass: Classification] = { lPtr, rPtr: NAT; combinedClass _ GetClassFromPool[]; lPtr _ rPtr _ 1; combinedClass.count _ leftClass.count + rightClass.count; IF combinedClass.count > SVRayTypes.maxSceneDepth THEN SIGNAL SceneExceedsMaximumDepth; FOR i: NAT IN[1..combinedClass.count] DO IF rPtr > rightClass.count THEN GOTO RPtrWentOver; IF lPtr > leftClass.count THEN GOTO LPtrWentOver; IF leftClass.params[lPtr] < rightClass.params[rPtr] THEN { combinedClass.normals[i] _ leftClass.normals[lPtr]; combinedClass.params[i] _ leftClass.params[lPtr]; combinedClass.surfaces[i] _ leftClass.surfaces[lPtr]; combinedClass.primitives[i] _ leftClass.primitives[lPtr]; combinedClass.classifs[i] _ leftClass.classifs[lPtr] AND rightClass.classifs[rPtr]; lPtr _ lPtr + 1; } ELSE { combinedClass.normals[i] _ rightClass.normals[rPtr]; combinedClass.params[i] _ rightClass.params[rPtr]; combinedClass.surfaces[i] _ rightClass.surfaces[rPtr]; combinedClass.primitives[i] _ rightClass.primitives[rPtr]; combinedClass.classifs[i] _ leftClass.classifs[lPtr] AND rightClass.classifs[rPtr]; rPtr _ rPtr + 1; }; REPEAT RPtrWentOver => { -- finish up with lPtr data FOR k: NAT _ i, k+1 UNTIL k > combinedClass.count DO combinedClass.normals[k] _ leftClass.normals[lPtr]; combinedClass.params[k] _ leftClass.params[lPtr]; combinedClass.surfaces[k] _ leftClass.surfaces[lPtr]; combinedClass.primitives[k] _ leftClass.primitives[lPtr]; combinedClass.classifs[k] _ leftClass.classifs[lPtr] AND rightClass.classifs[rPtr]; lPtr _ lPtr + 1; ENDLOOP}; LPtrWentOver => { -- finish up with rPtr data FOR k: NAT _ i, k+1 UNTIL k > combinedClass.count DO combinedClass.normals[k] _ rightClass.normals[rPtr]; combinedClass.params[k] _ rightClass.params[rPtr]; combinedClass.surfaces[k] _ rightClass.surfaces[rPtr]; combinedClass.primitives[k] _ rightClass.primitives[rPtr]; combinedClass.classifs[k] _ leftClass.classifs[lPtr] AND rightClass.classifs[rPtr]; rPtr _ rPtr + 1; ENDLOOP}; ENDLOOP; combinedClass.classifs[combinedClass.count+1] _ leftClass.classifs[lPtr] AND rightClass.classifs[rPtr]; IF consolidate THEN ConsolidateClassification[combinedClass]; ReturnClassToPool[leftClass]; ReturnClassToPool[rightClass]; }; -- end of IntersectionCombine DifferenceCombine: PUBLIC PROC [leftClass, rightClass: Classification, consolidate: BOOL] RETURNS [combinedClass: Classification] = { lPtr, rPtr: NAT; combinedClass _ GetClassFromPool[]; IF combinedClass.count > SVRayTypes.maxSceneDepth THEN SIGNAL SceneExceedsMaximumDepth; lPtr _ rPtr _ 1; combinedClass.count _ leftClass.count + rightClass.count; FOR i: NAT IN[1..combinedClass.count] DO IF rPtr > rightClass.count THEN GOTO RPtrWentOver; IF lPtr > leftClass.count THEN GOTO LPtrWentOver; IF leftClass.params[lPtr] < rightClass.params[rPtr] THEN { combinedClass.normals[i] _ leftClass.normals[lPtr]; combinedClass.params[i] _ leftClass.params[lPtr]; combinedClass.surfaces[i] _ leftClass.surfaces[lPtr]; combinedClass.primitives[i] _ leftClass.primitives[lPtr]; combinedClass.classifs[i] _ leftClass.classifs[lPtr] AND NOT rightClass.classifs[rPtr]; lPtr _ lPtr + 1; } ELSE { combinedClass.normals[i] _ SVVector3d.Negate[rightClass.normals[rPtr]]; combinedClass.params[i] _ rightClass.params[rPtr]; combinedClass.surfaces[i] _ rightClass.surfaces[rPtr]; combinedClass.primitives[i] _ rightClass.primitives[rPtr]; combinedClass.classifs[i] _ leftClass.classifs[lPtr] AND NOT rightClass.classifs[rPtr]; rPtr _ rPtr + 1; }; REPEAT RPtrWentOver => { -- finish up with lPtr data FOR k: NAT _ i, k+1 UNTIL k > combinedClass.count DO combinedClass.normals[k] _ leftClass.normals[lPtr]; combinedClass.params[k] _ leftClass.params[lPtr]; combinedClass.surfaces[k] _ leftClass.surfaces[lPtr]; combinedClass.primitives[k] _ leftClass.primitives[lPtr]; combinedClass.classifs[k] _ leftClass.classifs[lPtr] AND NOT rightClass.classifs[rPtr]; lPtr _ lPtr + 1; ENDLOOP}; LPtrWentOver => { -- finish up with rPtr data FOR k: NAT _ i, k+1 UNTIL k > combinedClass.count DO combinedClass.normals[k] _ SVVector3d.Negate[rightClass.normals[rPtr]]; combinedClass.params[k] _ rightClass.params[rPtr]; combinedClass.surfaces[k] _ rightClass.surfaces[rPtr]; combinedClass.primitives[k] _ rightClass.primitives[rPtr]; combinedClass.classifs[k] _ leftClass.classifs[lPtr] AND NOT rightClass.classifs[rPtr]; rPtr _ rPtr + 1; ENDLOOP}; ENDLOOP; combinedClass.classifs[combinedClass.count+1] _ leftClass.classifs[lPtr] AND NOT rightClass.classifs[rPtr]; IF consolidate THEN ConsolidateClassification[combinedClass]; ReturnClassToPool[leftClass]; ReturnClassToPool[rightClass]; }; -- end of DifferenceCombine ConsolidateClassification: PROC [class: Classification] = { currentlyWorkingOn: BOOL; compact: CompactArray _ GetCompactFromPool[]; currentlyWorkingOn _ class.classifs[1]; FOR i: NAT IN[2..class.count+1] DO IF class.classifs[i] = currentlyWorkingOn THEN -- this is not a transition so throw it out compact[i-1] _ FALSE -- don't keep it ELSE {compact[i-1] _ TRUE; currentlyWorkingOn _ class.classifs[i];}; ENDLOOP; CompactClassification[class, compact]; ReturnCompactToPool[compact]; }; -- end of ConsolidateClassification CompactClassification: PROC [class: Classification, compact: CompactArray] = { newCount: NAT; newCount _ 0; FOR i: NAT IN[1..class.count] DO IF compact[i] THEN { newCount _ newCount + 1; class.params[newCount] _ class.params[i]; class.classifs[newCount] _ class.classifs[i]; class.normals[newCount] _ class.normals[i]; class.surfaces[newCount] _ class.surfaces[i]; class.primitives[newCount] _ class.primitives[i];}; ENDLOOP; class.classifs[newCount+1] _ class.classifs[class.count+1]; class.count _ newCount; }; SingleRay: PUBLIC PROC [cameraPoint: Point2d, tree: CSGTree, lightSources: LightSourceList, camera: Camera, feedback: FeedbackData, makeStream: BOOL _ FALSE] RETURNS [color: Color] = { cameraRay, worldRay: Ray; cameraWRTWorld: Matrix3d.Matrix4by4; boundBox: BoundBox; boundSphere: BoundSphere; r,g,b: REAL; cameraRay _ CSG.CreateRay[]; [boundBox, boundSphere] _ Preprocess3d.PreprocessForImage[tree, camera]; -- must call this before casting rays CSG.StuffCameraRay[cameraRay, cameraPoint, camera]; cameraWRTWorld _ CoordSys.FindInTermsOfWorld[camera.coordSys]; worldRay _ CSG.TransformRayToWorld[cameraRay, cameraWRTWorld]; -- allocates ray from pool [r,g,b] _ TopColorCast[cameraPoint, worldRay, tree, lightSources, camera, boundBox, boundSphere, feedback, makeStream, 0]; color _ Shading.NormalizeRGB[r,g,b]; CSG.ReturnRayToPool[worldRay]; }; -- end of SingleRay SingleRay2: PUBLIC PROC [cameraPoint: Point2d, tree: CSGTree, camera: Camera, consolidate: BOOL _ TRUE, feedback: FeedbackData, makeStream: BOOL _ FALSE] RETURNS [class: Classification, rayWorld: Ray] = { topNode: REF ANY _ tree.son; rayCamera: Ray; cameraWRTWorld: Matrix4by4 _ CoordSys.FindInTermsOfWorld[camera.coordSys]; rayCamera _ CSG.GetRayFromPool[]; CSG.StuffCameraRay[rayCamera, cameraPoint, camera]; rayWorld _ CSG.TransformRayToWorld[rayCamera, cameraWRTWorld]; -- allocates ray from pool CSG.ReturnRayToPool[rayCamera]; class _ RayCast[cameraPoint, rayWorld, topNode, consolidate, feedback, makeStream, 0]; }; -- end of SingleRay2 NodeToRope: PROC [node: REF ANY, depth: NAT] RETURNS [r: Rope.ROPE] = { IF node = NIL THEN RETURN[NIL]; WITH node SELECT FROM prim: Primitive => {r _ prim.name; RETURN}; comp: Composite => {r _ comp.name; IF depth < 2 THEN RETURN ELSE {r1: Rope.ROPE; r2: Rope.ROPE; leftSon: REF ANY _ comp.leftSolid; rightSon: REF ANY _ comp.rightSolid; r1 _ NodeToRope[leftSon, depth - 1]; r2 _ NodeToRope[rightSon, depth - 1]; r _ Rope.Cat[r,": ",r1,"/",r2]; RETURN}; }; ENDCASE => ERROR; }; -- end of NodeToRope OutputTreeInfo: PROC [node: REF ANY, I: Image, feedback: FeedbackData] = { debugName: Rope.ROPE; debugName _ NodeToRope[node, 2]; Feedback.PutF[feedback, oneLiner, "About to Draw Tree: %g (%g by %g)...", [rope[debugName]], [integer[I.bwWindow.fref.raster.scanCount]], [integer[I.bwWindow.fref.raster.scanLength]]]; Feedback.Blink[feedback]; }; -- end of OutputTreeInfo GetXStepRayInWorld: PROC [stepSize: REAL, cameraWRTWorld: Matrix4by4, camera: Camera] RETURNS [ray: Ray] = { cameraXStepRay1, cameraXStepRay2: Ray; worldXStepRay1, worldXStepRay2: Ray; cameraXStepRay1 _ CSG.CreateRay[]; IF camera.projection = perspective THEN { CSG.StuffCameraRay[cameraXStepRay1, [0,0], camera]; cameraXStepRay2 _ CSG.CreateRay[]; CSG.StuffCameraRay[cameraXStepRay2, [stepSize,0], camera]; worldXStepRay1 _ CSG.TransformRayToWorld[cameraXStepRay1, cameraWRTWorld]; worldXStepRay2 _ CSG.TransformRayToWorld[cameraXStepRay2, cameraWRTWorld]; ray _ CSG.SubtractRays[worldXStepRay2, worldXStepRay1]; CSG.ReturnRayToPool[worldXStepRay1]; CSG.ReturnRayToPool[worldXStepRay2]; } ELSE { -- orthographic projection CSG.StuffCameraRayLiterally[cameraXStepRay1, [stepSize,0,0], [0,0,0]]; ray _ CSG.TransformNewRay[cameraXStepRay1, cameraWRTWorld]; } }; -- end of GetXStepRayInWorld MasterObjectColorFromPrimitive: PROC [primitive: Primitive, t: REAL, worldRay: Ray, primitiveNormal: Vector3d] RETURNS [color: Color] = { localRay: Ray; point3d: Point3d; artwork: Artwork _ primitive.artwork; SELECT artwork.class FROM justColor => color _ artwork.color; simpleSurface => { localRay _ CSG.TransformRay[worldRay, artwork.coordSys.worldWRTlocal]; point3d _ CSG.EvaluateLocalRay[localRay, t]; CSG.ReturnRayToPool[localRay]; color _ SVArtwork.FindColorAtSurfacePoint[artwork, point3d, primitiveNormal]; }; spaceFunction => { localRay _ CSG.TransformRay[worldRay, artwork.coordSys.worldWRTlocal]; point3d _ CSG.EvaluateLocalRay[localRay, t]; CSG.ReturnRayToPool[localRay]; color _ SVArtwork.FindColorAtSpacePoint[artwork, point3d, primitiveNormal]; }; ENDCASE => ERROR; }; ColorFromClass: PROC [class: Classification, x, y: REAL, lightSources: LightSourceList, camera: Camera, worldRay: Ray, tree: CSGTree, feedback: FeedbackData, makeStream: BOOL _ FALSE, indent: NAT _ 0] RETURNS [r,g,b: REAL] = { surf: Surface; surfColor: Color; eyePoint, surfacePt, p: Point3d; d: Vector3d; primitive: Primitive; visibleLights: LightSourceList; t: REAL; worldNormal, primitiveNormal: Vector3d; IF class.count = 0 THEN { IF NOT class.classifs[1] THEN [r,g,b] _ Shading.ExtractRGB[tree.backgroundColor] ELSE r _ g _ b _ 0.0; RETURN}; surf _ class.surfaces[1]; t _ class.params[1];-- the parameter of the ray intersection primitive _ class.primitives[1]; primitiveNormal _ class.normals[1]; surfColor _ MasterObjectColorFromPrimitive[primitive, t, worldRay, primitiveNormal]; worldNormal _ Matrix3d.UpdateVectorWithInverse[primitive.worldWRTPrim, primitiveNormal]; surfacePt _ CSG.EvaluateLocalRay[worldRay, t]; [p, d] _ CSG.GetLocalRay[worldRay]; eyePoint _ SVVector3d.Sub[p, d]; visibleLights _ IF tree.shadows THEN SVFancyRays.VisibleLights[lightSources, surfacePt, tree, camera.useBoundSpheresForShadows, feedback, makeStream, indent] ELSE lightSources; SELECT primitive.artwork.material FROM chalk => [r,g,b] _ Shading.DiffuseReflectance[worldNormal, surfacePt, surfColor, visibleLights]; plastic => [r,g,b] _ Shading.DiffuseAndSpecularReflectance[eyePoint, worldNormal, surfacePt, surfColor, visibleLights]; ENDCASE => ERROR; }; -- end of ColorFromClass ScanLine: TYPE = REF ScanLineObj; ScanLineObj: TYPE = RECORD [ seq: SEQUENCE lineLen: NAT OF Color]; CreateScanLine: PROC [len: NAT] RETURNS [scanLine: ScanLine] = { scanLine _ NEW[ScanLineObj[len]]; }; CopyScanLine: PROC [from: ScanLine, to: ScanLine] = { FOR i: NAT IN [0..to.lineLen) DO to[i] _ from[i]; ENDLOOP; }; PutColorInScanLine: PROC [scanLine: ScanLine, index: NAT, color: Color] = { scanLine[index] _ color; }; TopColorCast: PROC [cameraPoint: Point2d, worldRay: Ray, tree: CSGTree, lightSources: LightSourceList, camera: Camera, sceneBox: BoundBox, boundSphere: BoundSphere, feedback: FeedbackData, makeStream: BOOL _ FALSE, indent: NAT _ 0] RETURNS [r,g,b: REAL] = { node: REF ANY _ tree.son; class: Classification; IF tree.son = NIL THEN { [r,g,b] _ Shading.ExtractRGB[tree.backgroundColor]; RETURN; }; IF camera.useBoundBoxes THEN { IF SVBoundBox.PointInBoundBox[cameraPoint, sceneBox] THEN { finalClassCount, firstClassCount: NAT; firstClassCount _ NumberOfClassesInPool[]; -- for debugging purposes. class _ RayCast[cameraPoint, worldRay, node, TRUE, feedback, makeStream, indent]; [r,g,b] _ ColorFromClass[class, cameraPoint[1], cameraPoint[2], lightSources, camera, worldRay, tree, feedback, makeStream, indent]; ReturnClassToPool[class]; finalClassCount _ NumberOfClassesInPool[]; -- for debugging purposes. IF finalClassCount < firstClassCount THEN { Feedback.PutF[feedback, oneLiner, "WARNING: A Classification was lost while casting a ray at [%g, %g]", [real[cameraPoint[1]]], [real[cameraPoint[2]]]]; }; } ELSE [r,g,b] _ Shading.ExtractRGB[tree.backgroundColor]; } ELSE { -- Use Bounding Spheres IF CSG.RayHitsBoundSphere[worldRay, boundSphere] THEN { finalClassCount, firstClassCount: NAT; firstClassCount _ NumberOfClassesInPool[]; -- for debugging purposes. class _ CastRays.RayCastBoundingSpheres[worldRay, node, TRUE, feedback, makeStream, indent]; [r,g,b] _ ColorFromClass[class, cameraPoint[1], cameraPoint[2], lightSources, camera, worldRay, tree, feedback, makeStream, indent]; ReturnClassToPool[class]; finalClassCount _ NumberOfClassesInPool[]; -- for debugging purposes. IF finalClassCount < firstClassCount THEN { Feedback.PutF[feedback, oneLiner, "WARNING: A Classification was lost while casting a ray at [%g, %g]", [real[cameraPoint[1]]], [real[cameraPoint[2]]]]; }; } ELSE [r,g,b] _ Shading.ExtractRGB[tree.backgroundColor]; }; }; SetUpRayTrace: PROC [boundBox: BoundBox, camera: Camera, aisRope: Rope.ROPE, bAndWOnly: BOOL, resolution: REAL, feedback: FeedbackData] RETURNS [I: Image, xSamples, ySamples: NAT, stepSize, xStart, yStart: REAL] = { extentX, extentY, projectionX, projectionY, trueExtentX, trueExtentY: REAL; stepSize _ 72.0/resolution; -- in screen dots per sample IF camera.frame.fullScreen THEN { [I, xSamples, ySamples] _ SVImage.OpenImage[aisRope, bAndWOnly, boundBox.minVert[1], boundBox.minVert[2], boundBox.maxVert[1], boundBox.maxVert[2], resolution, feedback]; extentX _ boundBox.maxVert[1] - boundBox.minVert[1]; extentY _ boundBox.maxVert[2] - boundBox.minVert[2]; } ELSE { [I, xSamples, ySamples] _ SVImage.OpenImage[aisRope, bAndWOnly, camera.frame.downLeft[1], camera.frame.downLeft[2], camera.frame.upRight[1], camera.frame.upRight[2], resolution, feedback]; extentX _ camera.frame.upRight[1] - camera.frame.downLeft[1]; extentY _ camera.frame.upRight[2] - camera.frame.downLeft[2]; }; trueExtentX _ Real.Float[xSamples-1]*stepSize; trueExtentY _ Real.Float[ySamples-1]*stepSize; projectionX _ (trueExtentX - extentX)/2.0; projectionY _ (trueExtentY - extentY)/2.0; IF camera.frame.fullScreen THEN { xStart _ boundBox.minVert[1] - projectionX; yStart _ boundBox.minVert[2] - projectionY; } ELSE { xStart _ camera.frame.downLeft[1] - projectionX; yStart _ camera.frame.downLeft[2] - projectionY; }; xStart _ xStart - stepSize/2.0; yStart _ yStart - stepSize/2.0; }; -- end of SetUpRayTrace ShutDownRayTrace: PROC [aisRope: Rope.ROPE, I: Image, camera: Camera, startTime: BasicTime.GMT, feedback: FeedbackData] = { comment: Rope.ROPE; totalTime: INT; endTime: BasicTime.GMT; endTime _ BasicTime.Now[]; totalTime _ BasicTime.Period[from: startTime, to: endTime]; comment _ IO.PutFR["res: %g dpi, rayTraceTime: (%r)", [real[camera.resolution]], [integer[totalTime]]]; SVImage.CloseImage[I, aisRope, comment, feedback]; }; AbortDrawTree: PROC [I: Image, aisRope: Rope.ROPE, camera: Camera, startTime: BasicTime.GMT, feedback: FeedbackData] = { comment: Rope.ROPE; totalTime: INT; endTime: BasicTime.GMT; endTime _ BasicTime.Now[]; totalTime _ BasicTime.Period[from: startTime, to: endTime]; comment _ IO.PutFR["res: %g dpi, rayTraceTime: (%r)", [real[camera.resolution]], [integer[totalTime]]]; SVImage.CloseImage[I, aisRope, comment, feedback]; Feedback.Append[feedback, "CastRays aborted. Partial files saved.", oneLiner]; }; UpdateMaxSamples: PROC [maxSamples: MaxSamples, r,g,b: REAL] = { maxSamples.maxRed _ MAX[maxSamples.maxRed, r]; maxSamples.maxGreen _ MAX[maxSamples.maxRed, g]; maxSamples.maxBlue _ MAX[maxSamples.maxRed, b]; }; FillScanLine: PROC [startX, stepSize: REAL, xSamples: NAT, y: REAL, cameraXStepRayInWorld: Ray, worldRay: Ray, tree: CSGTree, lightSources: LightSourceList, camera: Camera, boundBox: BoundBox, boundSphere: BoundSphere, scanLine: ScanLine, feedback: FeedbackData, maxSamples: MaxSamples] = { color: Color; r,g,b: REAL; thisX: REAL; [r,g,b] _ TopColorCast[[startX, y], worldRay, tree, lightSources, camera, boundBox, boundSphere, feedback]; UpdateMaxSamples[maxSamples, r, g, b]; color _ Shading.NormalizeRGB[r,g,b]; PutColorInScanLine[scanLine, 0, color]; CSG.AddRay[cameraXStepRayInWorld, worldRay]; -- updates worldRay FOR j: INTEGER IN[1..xSamples] DO -- left to right thisX _ startX+Real.Float[j]*stepSize; [r,g,b] _ TopColorCast[[thisX, y], worldRay, tree, lightSources, camera, boundBox, boundSphere, feedback]; UpdateMaxSamples[maxSamples, r, g, b]; color _ Shading.NormalizeRGB[r,g,b]; PutColorInScanLine[scanLine, j, color]; CSG.AddRay[cameraXStepRayInWorld, worldRay]; -- updates worldRay ENDLOOP; }; OutputCameraInfo: PROC [camera: Camera, feedback: FeedbackData] = { IF camera.useBoundBoxes THEN Feedback.PutF[feedback, oneLiner, "Use Bounding Boxes.\n"] ELSE Feedback.PutF[feedback, oneLiner, "Use Bounding Spheres.\n"]; IF camera.useBoundSpheresForShadows THEN Feedback.PutF[feedback, oneLiner, "Use Bound Spheres for Shadows.\n"] ELSE Feedback.PutF[feedback, oneLiner, "Use nothing for Shadows.\n"]; }; DrawTreeWithStartLine: PUBLIC PROC [startLine: REAL, tree: CSGTree, lightSources: LightSourceList, camera: Camera, aisRope: Rope.ROPE, bAndWOnly: BOOL, notify: NotifyOfProgressProc _ NoOpNotifyOfProgress, clientData: REF ANY _ NIL, feedback: FeedbackData] RETURNS [success: BOOL, maxRed, maxGreen, maxBlue, maxBlack: NAT] = { topNode: REF ANY; -- tree.son. The top active node of the CSG Tree I: Image; boundBox: BoundBox; boundSphere: BoundSphere; cameraWRTWorld: Matrix4by4; cameraXStepRayInWorld, cameraRay, worldRay: Ray; stepSize, xStart, yStart, yMiddleStart, thisY: REAL; maxSamples: MaxSamples _ NEW[MaxSamplesObj]; xSamples, ySamples, iStart: NAT; color: Color; scanLine1, scanLine2: ScanLine; startTime: BasicTime.GMT; startTime _ BasicTime.Now[]; success _ TRUE; topNode _ tree.son; camera.abort _ FALSE; -- if camera.abort becomes TRUE, close files and return. [boundBox, boundSphere] _ Preprocess3d.PreprocessForImage[tree, camera]; IF camera.frame.fullScreen AND boundBox = NIL THEN { ComplainInfiniteScene[feedback]; success _ FALSE; RETURN}; [I, xSamples, ySamples, stepSize, xStart, yStart] _ SetUpRayTrace [boundBox, camera, aisRope, bAndWOnly, camera.resolution, feedback]; OutputTreeInfo[topNode, I, feedback]; OutputCameraInfo[camera, feedback]; cameraRay _ CSG.CreateRay[]; -- DrawTree recycles its own ray scanLine1 _ CreateScanLine[xSamples+1]; scanLine2 _ CreateScanLine[xSamples+1]; cameraWRTWorld _ CoordSys.FindInTermsOfWorld[camera.coordSys]; cameraXStepRayInWorld _ GetXStepRayInWorld[stepSize, cameraWRTWorld, camera]; iStart _ Real.Fix[(startLine-yStart)/stepSize]; yMiddleStart _ yStart+iStart*stepSize; CSG.StuffCameraRay[cameraRay, [xStart, yMiddleStart], camera]; worldRay _ CSG.TransformRayToWorld[cameraRay, cameraWRTWorld]; -- allocates ray from pool FillScanLine [xStart, stepSize, xSamples, yMiddleStart, cameraXStepRayInWorld, worldRay, tree, lightSources, camera, boundBox, boundSphere, scanLine1, feedback, maxSamples]; CSG.ReturnRayToPool[worldRay]; FOR i: INTEGER IN[iStart+1..ySamples] DO -- For each ray bottom to top. IF camera.abort = TRUE THEN {AbortDrawTree[I, aisRope, camera, startTime, feedback]; RETURN}; notify[yStart+i*stepSize, xStart, yStart, xStart+xSamples*stepSize, yStart+ySamples*stepSize, clientData]; -- tell the user interface that we have just cast line i - 1. thisY _ yStart+i*stepSize; CSG.StuffCameraRay[cameraRay, [xStart, thisY], camera]; worldRay _ CSG.TransformRayToWorld[cameraRay, cameraWRTWorld]; -- from pool FillScanLine [xStart, stepSize, xSamples, thisY, cameraXStepRayInWorld, worldRay, tree, lightSources, camera, boundBox, boundSphere, scanLine2, feedback, maxSamples]; CSG.ReturnRayToPool[worldRay]; FOR k: NAT IN[0..xSamples) DO color _ ColorAverage[scanLine1[k], scanLine1[k+1], scanLine2[k], scanLine2[k+1]]; SVImage.PutImage[I, i, k, color, xSamples, ySamples]; ENDLOOP; CopyScanLine [scanLine2, scanLine1]; ENDLOOP; ShutDownRayTrace[aisRope, I, camera, startTime, feedback]; [maxRed, maxGreen, maxBlue, maxBlack] _ SVImage.RGBTo8Bits[maxSamples.maxRed, maxSamples.maxGreen, maxSamples.maxBlue]; }; -- end of DrawTreeWithStartLine ComplainInfiniteScene: PROC [feedback: FeedbackData] = { Feedback.Append[feedback, "Infinite Scene. Please define a bounding frame.", oneLiner]; Feedback.Blink[feedback]; }; MaxSamples: TYPE = REF MaxSamplesObj; MaxSamplesObj: TYPE = RECORD [maxRed, maxGreen, maxBlue: REAL _ 0]; DrawTree: PUBLIC PROC [tree: CSGTree, lightSources: LightSourceList, camera: Camera, aisRope: Rope.ROPE, bAndWOnly: BOOL, notify: NotifyOfProgressProc _ NoOpNotifyOfProgress, clientData: REF ANY _ NIL, feedback: FeedbackData] RETURNS [success: BOOL, maxRed, maxGreen, maxBlue, maxBlack: NAT] = { topNode: REF ANY; -- tree.son. The top active node of the CSG Tree I: Image; boundBox: BoundBox; boundSphere: BoundSphere; cameraWRTWorld: Matrix4by4; cameraXStepRayInWorld, cameraRay, worldRay: Ray; stepSize, xStart, yStart, thisY: REAL; xSamples, ySamples: NAT; maxSamples: MaxSamples _ NEW[MaxSamplesObj]; color: Color; scanLine1, scanLine2: ScanLine; startTime: BasicTime.GMT; startTime _ BasicTime.Now[]; success _ TRUE; topNode _ tree.son; camera.abort _ FALSE; -- if camera.abort becomes TRUE, close files and return. [boundBox, boundSphere] _ Preprocess3d.PreprocessForImage[tree, camera]; IF camera.frame.fullScreen AND boundBox = NIL THEN { ComplainInfiniteScene[feedback]; success _ FALSE; RETURN}; [I, xSamples, ySamples, stepSize, xStart, yStart] _ SetUpRayTrace [boundBox, camera, aisRope, bAndWOnly, camera.resolution, feedback]; OutputTreeInfo[topNode, I, feedback]; OutputCameraInfo[camera, feedback]; scanLine1 _ CreateScanLine[xSamples+1]; scanLine2 _ CreateScanLine[xSamples+1]; cameraRay _ CSG.CreateRay[]; -- DrawTree recycles its own ray cameraWRTWorld _ CoordSys.FindInTermsOfWorld[camera.coordSys]; cameraXStepRayInWorld _ GetXStepRayInWorld[stepSize, cameraWRTWorld, camera]; CSG.StuffCameraRay[cameraRay, [xStart, yStart], camera]; worldRay _ CSG.TransformRayToWorld[cameraRay, cameraWRTWorld]; -- allocates ray from pool FillScanLine [xStart, stepSize, xSamples, yStart, cameraXStepRayInWorld, worldRay, tree, lightSources, camera, boundBox, boundSphere, scanLine1, feedback, maxSamples]; CSG.ReturnRayToPool[worldRay]; FOR i: INTEGER IN[1..ySamples] DO -- For each ray bottom to top. IF camera.abort = TRUE THEN {AbortDrawTree[I, aisRope, camera, startTime, feedback]; RETURN}; notify[yStart+i*stepSize, xStart, yStart, xStart+xSamples*stepSize, yStart+ySamples*stepSize, clientData]; -- tell the user interface that we have just cast line i - 1. thisY _ yStart+i*stepSize; CSG.StuffCameraRay[cameraRay, [xStart, thisY], camera]; worldRay _ CSG.TransformRayToWorld[cameraRay, cameraWRTWorld]; -- allocates ray from pool FillScanLine [xStart, stepSize, xSamples, thisY, cameraXStepRayInWorld, worldRay, tree, lightSources, camera, boundBox, boundSphere, scanLine2, feedback, maxSamples]; CSG.ReturnRayToPool[worldRay]; FOR k: NAT IN[0..xSamples) DO color _ ColorAverage[scanLine1[k], scanLine1[k+1], scanLine2[k], scanLine2[k+1]]; SVImage.PutImage[I, i, k, color, xSamples, ySamples]; ENDLOOP; CopyScanLine [scanLine2, scanLine1]; ENDLOOP; ShutDownRayTrace[aisRope, I, camera, startTime, feedback]; [maxRed, maxGreen, maxBlue, maxBlack] _ SVImage.RGBTo8Bits[maxSamples.maxRed, maxSamples.maxGreen, maxSamples.maxBlue]; }; -- end of DrawTree MoreOrLessTheSame: PROC [a, b, c, d: REAL] RETURNS [BOOL] = { min, max: REAL; min _ max _ a; IF b < min THEN min _ b ELSE IF b > max THEN max _ b; IF c < min THEN min _ c ELSE IF c > max THEN max _ c; IF d < min THEN min _ d ELSE IF d > max THEN max _ d; IF max - min > 10 THEN RETURN[FALSE] ELSE RETURN[TRUE]; }; -- end of MoreOrLessTheSame ColorAverage: PROC [a, b, c, d: Color] RETURNS [avgColor: Color] = { ar, ag, ab, br, bg, bb, cr, cg, cb, dr, dg, db, red, green, blue: REAL; [ar, ag, ab] _ Shading.ExtractRGB[a]; [br, bg, bb] _ Shading.ExtractRGB[b]; [cr, cg, cb] _ Shading.ExtractRGB[c]; [dr, dg, db] _ Shading.ExtractRGB[d]; red _ (ar + br + cr + dr)/4.0; green _ (ag + bg + cg + dg)/4.0; blue _ (ab + bb + cb + db)/4.0; avgColor _ ImagerColor.ColorFromRGB[[red, green, blue]]; }; -- end of ColorAverage NoOpNotifyOfProgress: PUBLIC NotifyOfProgressProc = {}; GetClassFromPool: PUBLIC PROC RETURNS [class: Classification] = { IF globalPoolPointer = 0 THEN AddAClass[]; class _ globalPool[globalPoolPointer - 1]; globalPoolPointer _ globalPoolPointer - 1; }; ClassPoolEmpty: SIGNAL = CODE; ReturnClassToPool: PUBLIC PROC [class: Classification] = { IF globalPoolPointer = globalPool.maxClasses THEN SIGNAL ClassPoolFull; globalPoolPointer _ globalPoolPointer + 1; globalPool[globalPoolPointer - 1] _ class; }; ClassPoolFull: SIGNAL = CODE; NumberOfClassesInPool: PUBLIC PROC RETURNS [count: NAT] = { count _ globalPoolPointer; }; AddAClass: PROC = { newPool: Pool _ NEW[PoolObj[globalPool.maxClasses+1]]; IF globalPool.maxClasses > 50 THEN {-- there must be a leak in the classification system Feedback.AppendTypescriptRaw[$Solidviews, "CastRaysImplA Warning: More than 50 Classifications!!", oneLiner]; }; FOR i: NAT IN [0..globalPoolPointer) DO newPool[i] _ globalPool[i]; ENDLOOP; globalPoolPointer _ globalPoolPointer + 1; globalPool _ newPool; globalPool[globalPoolPointer - 1] _ NEW[ClassificationObj]; globalPool[globalPoolPointer - 1].surfaces _ NEW[SurfaceArrayObj]; }; GetCompactFromPool: PROC RETURNS [compact: CompactArray] = { IF globalCompactPoolPointer = 0 THEN SIGNAL CompactPoolEmpty; compact _ globalCompactPool[globalCompactPoolPointer]; globalCompactPoolPointer _ globalCompactPoolPointer -1; }; CompactPoolEmpty: SIGNAL = CODE; ReturnCompactToPool: PROC [compact: CompactArray] = { IF globalCompactPoolPointer = globalCompactPoolCount THEN SIGNAL CompactPoolFull; globalCompactPoolPointer _ globalCompactPoolPointer + 1; globalCompactPool[globalCompactPoolPointer] _ compact; }; CompactPoolFull: SIGNAL = CODE; MakeClassAMiss: PUBLIC PROC [class: Classification] = { class.count _ 0; class.classifs[1] _ FALSE; }; Init: PROC = { globalPool _ NEW[PoolObj[globalPoolCount]]; FOR i: NAT IN[0..globalPoolCount) DO globalPool[i] _ NEW[ClassificationObj]; globalPool[i].surfaces _ NEW[SurfaceArrayObj]; ENDLOOP; globalPoolPointer _ globalPoolCount; globalCompactPool _ NEW[CompactPoolObj]; FOR i: NAT IN[1..globalCompactPoolCount] DO globalCompactPool[i] _ NEW[CompactArrayObj]; ENDLOOP; globalCompactPoolPointer _ globalCompactPoolCount; }; Init[]; END. 'ψFile: CastRaysImplA.mesa Author: Eric Bier in the summer of 1982 Copyright c 1984 by Xerox Corporation. All rights reserved. Last edited by Bier on March 13, 1987 0:16:12 am PST Contents: The ray casting (as opposed to tree building) part of the CSG package. CSG.mesa builds the trees RayCast is about to return class. Write the name of comp and summarize the classification. The main ray casting procedure. Scene Ray must be in WORLD coordinates before this procedure is called. Before casting each ray, see if the ray will be in the bounding box of the son node. For optimizing, here is the plan: 1) Check ray for left bound box. Set leftBoxHit if appropriate. 2) If leftBoxHit then cast the ray. Set leftHit if appropriate. 3) If not leftHit then if comp.operation = intersection or difference, return miss. 4) If hit, or union, then right box test. Set RightBoxMiss if appropriate. 5) If miss then return: leftclass for difference, empty for intersection, leftClass for union. 6) Else cast ray. 7) Return rightclass or combination if appropriate 1) Check ray for left bound box. Set leftBoxHit if appropriate. 2) If leftBoxHit then cast the ray. Set leftHit if appropriate. 3) If not leftHit then if comp.operation = intersection or difference, return miss. leftClass is (or is equivalent to) EmptyClass[]; 4) If hit, or union, then right box test. Set RightBoxMiss if appropriate. (we don't have to test for this state. It is the only one left.) 5) If miss then return EmptyClass. Else cast ray. This could be a union with or without a left miss or (intersection/difference) with an initial hit. 6) Else cast ray. We have Union, or (intersection/difference) with left hit. Ray hits box. 7) Return rightclass, combination or empty if appropriate Ignore any bounding boxes which were computed. This is useful if the ray does not originate from the screen (as for computing shadows). Of course, bounding spheres would be useful in this case. The main ray casting procedure. Scene Ray must be in WORLD coordinates before this procedure is called. For optimizing, here is the plan: 1) Cast the left ray. Set leftHit if appropriate. 2) If not leftHit then if comp.operation = intersection or difference, return miss. 3) If hit, or union, then cast right ray. 4) Return rightclass or combination if appropriate 1) Cast the left ray. Set leftHit if appropriate. 2) If not leftHit then if comp.operation = intersection or difference, return miss. leftClass is (or is equivalent to) EmptyClass[]; 3) If hit, or union, then cast right ray. 4) Return rightclass, combination or empty if appropriate Like HitsTree but returns the parameter value at the first inward-going hit, if any. If we start inside of an object, wait until we are out. Each primitive shape must have a procedure here which can classify a ray with respect to it. Merge the two sorted lists together classifying the segments by the OR of the Classifs for each segment Merge the two sorted lists together classifying the segments by the AND of the Classifs for each segment Merge the two sorted lists together classifying the segments by the (left AND NOT right) of the Classifs for each segment Combine adjacent regions which have the same classif and throw out the surface and parameter information at those points recall ClassificationObj is RECORD [count, params, surfaces, classifs, topNormal]; Compact[i] is TRUE if we should keep class.*[i], FALSE otherwise. Order is preserved among the items we keep. The in-out value on the far side of the last param that changed in-out will always be the last value given in the class. Cast a single ray at the scene. Report the results to the output stream and find the color at that point. ray with respect to Camera (perspective) Find WORLD ray. IF makeStream THEN Feedback.PutFTypescript[feedback, oneLiner, "\n"]; IF makeStream THEN Feedback.PutFTypescript[feedback, oneLiner, "\n"]; Cast a single ray at the scene from the given cameraPoint (point in CAMERA coords on the z = 0 plane). The client must be sure that the tree has been preprocessed so RayCast will have bounding boxes to work with. (See Preprocess3d). The client must be sure to call ReturnClassToPool[class] when he is done with it. Ray not allocated from the pool. Ray not allocated from the pool. We are given a classification, a list of lightsources, a camera, the screen point from which the ray was shot, and the ray in WORLD coordinates from which we can derive the eyepoint. To produce an image with shadows, we proceed as follows: Make a new list of lightsources which includes only those lightsources visible from the surface point then proceed in the usual way. Since worldRay is in WORLD coordinates, this finds eyePoint in WORLD coordinates ColorCast: PROC [cameraPoint: Point2d, worldRay: Ray, tree: CSGTree, lightSources: LightSourceList, camera: Camera, makeStream: BOOL _ FALSE, f: IO.STREAM _ NIL, indent: NAT _ 0] RETURNS [color: Color] = { class: Classification; class _ RayCast[cameraPoint, worldRay, tree.son, makeStream, f, indent]; color _ ColorFromClass[class, cameraPoint[1], cameraPoint[2], lightSources, camera, worldRay, tree]; ReturnClassToPool[class]; }; Look at the frame of the camera. If frame.fullscreen is TRUE then use the bounding box of the scene. If it is FALSE, then use the frame parameters to determine the bounding box of our ray tracing. In this case, we should check before casting each ray to see if it is in the scene's bounding box before casting it. We know the size of the box which we wish to raycast and the resolution of the casting in samples per inch. Our box size is in screen dots (at 72 per inch). We wish to know screen dots per sample. (Extent/72)*resolution = inches*(samples per inch) = samples. Extent/samples = screen dots/sample as required. Compactly, then, we need 72/resolution screen dots per sample and Extent/(screen dots per sample) for total number of samples. Now for the hard part. boundBox tells us the outline of the initial box. trueExtentX represents the actual extent from the left of the first pixel to the right of the last pixel. Likewise for trueExtentY. We subtract the initial extent from the true extent and split the difference. Subtracting the result to the original bounding box origin gives the ray tracing grid outline. Now (xStart, yStart) is the center of the origin pixel. Subtracting another half a pixel will give us the lower left hand corner of the pixel. Cast the first ray of the y scan line Like DrawTree in CastRaysImplA, but we ignore all values of "i" until yStart+i*stepSize >= startCameraPoint[2]. So we start with i = (startCameraPoint[2]-yStart)/stepSize. Must preprocess before casting rays. Calculates current transfrom matrices and bounding boxes. Allocate the scan line and ray storage. Compute the ray step. Cast the first scan line. Cast the next ray. We have two complete scan lines.Average values in fours and write to ais. IF MoreOrLessTheSame[scanLine1[k], scanLine1[k+1], scanLine2[k], scanLine2[k+1]] THEN Must preprocess before casting rays. Calculate current transfrom matrices and bounding boxes. Allocate Storage for Scan Lines and Ray. Compute the Ray increment. Cast the first scan line. Cast the next scan line. We now have two complete scan lines. Average values in fours and write to ais. IF MoreOrLessTheSame[scanLine1[k], scanLine1[k+1], scanLine2[k], scanLine2[k+1]] THEN ELSE color _ CastMoreRays[ul: scanLine1[k], ur: scanLine1[k+1], dl: scanLine2[k], dr: scanLine2[k+1], left: k, right: k+1, top: i, bottom: i-1, topNode: topNode, focalLength: focalLength, lightSources: lightSources, cameraWRTWorld: cameraWRTWorld]; CastMoreRays: PROC [ul, ur, dl, dr: Color, left, right, top, bottom: REAL, tree: CSGTree, focalLength: REAL, lightSources: LightSourceList, camera: Camera] RETURNS [color: Color] = { Cast rays left, right, top, bottom, and middle. Use rays ul, ur, dl, and dr. This further subdivides each square for a more accurate intensity value. cameraRay: Ray _ GetRayFromPool[]; worldRay: Ray; cameraWRTWorld: Matrix4by4 _ camera.coordSys.mat; leftColor, rightColor, topColor, bottomColor, middleColor: Color; midLeftY, midTopX: REAL; midLeftY _ (top-bottom)/2.0; midTopX _ (right-left)/2.0; cameraRay.basePt _ [left, midLeftY, 0];cameraRay.direction _ [left, midLeftY, focalLength]; worldRay _ TransformRay[cameraRay, cameraWRTWorld]; -- allocates ray from pool leftColor _ ColorCast[[left, midLeftY], worldRay, tree, lightSources, camera]; ReturnRayToPool[worldRay]; cameraRay.basePt _ [right, midLeftY, 0];cameraRay.direction _ [right, midLeftY, focalLength]; worldRay _ TransformRay[cameraRay, cameraWRTWorld]; -- allocates ray from pool rightColor _ ColorCast[[right, midLeftY], worldRay, tree, lightSources, camera]; ReturnRayToPool[worldRay]; cameraRay.basePt _ [midTopX, top, 0];cameraRay.direction _ [midTopX, top, focalLength]; worldRay _ TransformRay[cameraRay, cameraWRTWorld]; -- allocates ray from pool topColor _ ColorCast[[midTopX, top], worldRay, tree, lightSources, camera]; ReturnRayToPool[worldRay]; cameraRay.basePt _ [midTopX, bottom, 0];cameraRay.direction _ [midTopX, bottom, focalLength]; worldRay _ TransformRay[cameraRay, cameraWRTWorld]; -- allocates ray from pool bottomColor _ ColorCast[[midTopX, bottom], worldRay, tree, lightSources, camera]; ReturnRayToPool[worldRay]; cameraRay.basePt _ [midTopX, midLeftY, 0];cameraRay.direction _ [midTopX, midLeftY, focalLength]; worldRay _ TransformRay[cameraRay, cameraWRTWorld]; -- allocates ray from pool middleColor _ ColorCast[[midTopX, midLeftY], worldRay, tree, lightSources, camera]; ReturnRayToPool[worldRay]; color _ ColorAverage[ ColorAverage[ul, topColor, leftColor, middleColor], ColorAverage[topColor, ur, middleColor, rightColor], ColorAverage[leftColor, middleColor, dl, bottomColor], ColorAverage[middleColor, rightColor, bottomColor, dr] ]; ReturnRayToPool[cameraRay]; }; -- end of CastMoreRays This scene contains sections complicated enough that the original allocation of classifications does not cover the most complicated rays. Add another classification to the pool. Create a Classification Pool Create a Compact Pool Κ*>– "cedar" style˜Ihead1šœ™Iprocšœ'™'Jšœ Οmœ1™˜>Lšœ? ˜YLšžœ žœ3™EL˜Lšœ ‘ œd˜zLšœ$˜$Lšžœ žœ3™ELšœ˜Lšœ ˜L˜—šŸ œžœžœDžœžœ&žœžœžœΟuœ ˜ΜLšœκ™κLšœQ™QLšœ žœžœ ˜Lšœ’œ˜LšœJ˜JL˜Lšœ’œžœ˜!Lšžœ’œ˜3Lšœ’œžœ’œ ˜YLšžœ’œ˜Lšœ‘œ’œ1˜VLšœ ˜—L˜šŸ œžœžœžœ žœžœ žœ˜HLš žœžœžœžœžœ˜šžœžœž˜Lšœ#žœ˜+šœ"˜"Lšžœ žœž˜Lšžœ žœ˜Lšœ žœ˜Lšœ žœžœ˜"Lšœ žœžœ˜$Lšœ$˜$Lšœ%˜%Lšœ˜Lšžœ˜Lšœ˜—Lšžœžœ˜—Lšœ ˜—š ŸœžœžœžœŸœ$˜JLšœžœ˜L˜Lšœ ˜ Lšœfžœ,žœ$˜ΈLšœ˜Lšœ ˜—L˜šŸœžœ žœ.žœ˜lLšœ&˜&Lšœ$˜$L˜Lšœ"˜"šžœ!žœ˜)Lšœ3˜3Lšœ"˜"Lšœ:˜:LšœJ˜JLšœJ˜Jšœ7˜7L™ —Lšœ$˜$Lšœ$˜$L˜—šžœ ˜!LšœF˜Fšœ;˜;L™ —L˜—Lšœ ˜—L˜šŸœžœžœ,žœ˜‰Lšœ˜Lšœ˜Lšœ%˜%Lšžœž˜Lšœ#˜#šœ˜Lšœ žœ‘œ˜FLšœ žœ˜,Lšžœ˜Lšœ‘œ$˜MLšœ˜—˜Lšœ žœ‘œ˜FLšœ žœ˜,Lšžœ˜Lšœ‘œ$˜KL˜—Lšžœžœ˜Lšœ˜L˜—šŸœžœžœsžœžœ žœžœ˜βL™πL™„Lšœ˜Lšœ˜Lšœ ˜ L˜ Lšœ˜Lšœ˜Lšœžœ˜Lšœ'˜'šžœžœ˜Lšžœžœžœ3˜PLšžœ˜Lšžœ˜—Lšœ˜Lšœ (˜˜BLšžœ"žœF˜nLšžœA˜EL˜L˜—šŸœžœžœ žœNžœ žœCžœžœžœžœ žœ%žœ˜ΕLšœ¬™¬Lšœ žœžœ 1˜CLšžœ ˜ Lšœ˜L˜Lšœ˜Lšœ0˜0Lšœ/žœ˜4Lšœžœ˜,Lšœžœ˜ Lšœ-˜-Jšœžœ˜L˜Lšœ˜Lšœ žœ˜Lšœ˜šœžœ 8˜NLš‘$™$—LšœH˜Hšžœžœ žœžœ˜4Lšœ+žœžœ˜:Lš‘9™9—Lšœžœ„˜†šœžœ0˜ILš‘'™'—Lšœ  ˜=šœO˜OLš‘™—Lšœ>˜>šœM˜MLš‘™—Lšœ/˜/Lšœ&˜&Lšœ>˜>Lšœ? ˜YLšœ­˜­Lšœ˜L˜š žœžœžœžœ ˜GLšžœžœžœ:žœ˜]šœk =˜¨Lš‘™—Lšœ˜Lšœ7˜7Lšœ?  ˜KLšœ¦˜¦šœ˜Lš‘I™I—šžœžœžœž˜Lšœ2™2Lšœ"™"LšœQ˜QLšœžœ#˜5—Lšžœ˜Lšœ$˜$—Lšžœ˜L˜Lšœžœ˜:Lšœw˜wLšœ ˜"L˜—šŸœžœ˜8LšœLžœ ˜XLšœ˜L˜L˜—Lšœ žœžœ˜%Lšœžœžœžœ˜CL˜šŸœžœžœNžœ žœCžœžœžœžœ žœ%žœ˜§Lšœ žœžœ 1˜CLšžœ ˜ Lšœ˜L˜Lšœ˜Lšœ0˜0Lšœ!žœ˜&Lšœžœ˜Lšœžœ˜,Lšœ-˜-Jšœžœ˜L˜Lšœ˜Lšœ žœ˜Lšœ˜šœžœ 8˜NLš‘$™$—LšœH˜Hšžœžœ žœžœ˜4Lšœ+žœžœ˜:Lš‘8™8—Lšœžœ„˜†šœžœ0˜ILš‘(™(—LšœO˜Ošœ  ˜=Lš‘™—Lšœ>˜>šœM˜MLš‘™—Lšœ8˜8Lšœ? ˜YLšœ§˜§Lšœ˜L˜š žœžœžœžœ ˜@Lšžœžœžœžœ1˜]šœk =˜¨Lš‘™—Lšœ˜Lšœ7˜7Lšœ? ˜YLšœ¦˜¦Lšœ˜L˜LšœO™Ošžœžœžœž˜Lšœ2™2Lšœ"™"LšœQ˜QLšœžœ#˜5—Lšžœ˜Lšœ$˜$—Lšžœ˜L˜Lšœžœ˜:Lšœw˜wLšœ ˜—L˜š Ÿœžœžœžœžœ˜=Lšœ žœ˜Lšœ˜Lš žœ žœ žœžœ žœ ˜5Lš žœ žœ žœžœ žœ ˜5Lš žœ žœ žœžœ žœ ˜5Lšžœžœžœžœžœžœžœ˜7Lšœ ˜—L˜šŸ œžœžœ˜DLšœBžœ˜GLšœ%˜%Lšœ%˜%Lšœ%˜%Lšœ%˜%Lšœ˜Lšœ ˜ Lšœ˜Lšœ8˜8Lšœ ˜—˜Lšœψ™ψ—š Ÿ œžœ3žœžœ1žœ™ΆLšœ—™—L™Lšœ"™"Lšœ™Lšœ1™1LšœA™ALšœžœ™Lšœ™Lšœ™L™Lšœ[™[Lšœ4 ™NLšœN™NLšœ™L™Lšœ]™]Lšœ4 ™NLšœP™PLšœ™L™LšœW™WLšœ4 ™NLšœK™KLšœ™L™Lšœ]™]Lšœ4 ™NLšœQ™QLšœ™L™Lšœa™aLšœ4 ™NLšœS™SLšœ™L™LšœI™ILšœ4™4Lšœ6™6Lšœ9™9L™Lšœ™L™Lšœ ™—L˜Lšœžœ˜7L˜šŸœžœžœžœ˜ALšžœžœ ˜*Lšœ*˜*Lšœ*˜*Lšœ˜—Lšœžœžœ˜šŸœžœžœ˜:Lšžœ+žœžœ˜GLšœ*˜*Lšœ*˜*Lšœ˜—Lšœžœžœ˜š Ÿœžœžœžœ žœ˜;Lšœ˜L˜L˜—šŸ œžœ˜L™²Lšœžœ#˜6šžœžœž 4˜XLšœm˜mL˜—šžœžœžœž˜'Lšœ˜Lšžœ˜—Lšœ*˜*Lšœ˜Lšœ$žœ˜;Lšœ-žœ˜BL˜—L˜šŸœžœžœ˜