File: SVGraphicsImpl.mesa
Last edited by Bier on September 23, 1987 7:40:07 pm PDT
Contents: Implementation of a simple graphics package for 3d renderings.
DIRECTORY
SVCoordSys, SVGraphics, FunctionCache, Imager, ImagerColor, ImagerColorPrivate, ImagerFont, ImagerPath, SVMatrix3d, SVShading, SVPolygon3d, Real, Rope, SV2d, SV3d, SVDraw, SVLines2d, SVVector3d, SVModelTypes, Vectors2d;
SVGraphicsImpl:
CEDAR PROGRAM
IMPORTS SVCoordSys, FunctionCache, Imager, ImagerColor, ImagerColorPrivate, ImagerFont, ImagerPath, SVMatrix3d, Rope, SVShading, SVDraw, SVLines2d, SVPolygon3d, SVVector3d, Vectors2d
EXPORTS SVGraphics =
BEGIN
Artwork: TYPE = SVModelTypes.Artwork;
Camera: TYPE = REF CameraObj;
CameraObj: TYPE = SVModelTypes.CameraObj;
Color: TYPE = Imager.Color;
CoordSystem: TYPE = SVModelTypes.CoordSystem;
DisplayStyle: TYPE = SVModelTypes.DisplayStyle;
DrawStyle: TYPE = SVModelTypes.DrawStyle;
FrameBox: TYPE = SVModelTypes.FrameBox;
LightSource: TYPE = SVModelTypes.LightSource;
LightSourceList: TYPE = SVShading.LightSourceList;
Matrix4by4: TYPE = SV3d.Matrix4by4;
Plane: TYPE = SV3d.Plane;
Point2d: TYPE = SV2d.Point2d;
Point3d: TYPE = SV3d.Point3d;
Poly3d: TYPE = SV3d.Poly3d;
Projection: TYPE = SVModelTypes.Projection; -- {perspective, orthogonal}
QualityMode: TYPE = SVModelTypes.QualityMode;
Ray2d: TYPE = SV2d.Ray2d;
StrokeEnd: TYPE = Imager.StrokeEnd;
Vector3d: TYPE = SV3d.Vector3d;
CreateCamera:
PUBLIC PROC [viewName: Rope.
ROPE, coordSys: CoordSystem, screenCS: CoordSystem, resolution:
REAL, focalLength:
REAL, projection: Projection, frame: FrameBox, clippingPlanes:
LIST
OF Plane, visibleAssemblies:
LIST
OF Rope.
ROPE, style: DrawStyle, colorFilm:
BOOL, useBoundBoxes:
BOOL, useBoundSpheresForShadows:
BOOL]
RETURNS [camera: Camera]
= {
displayStyle: DisplayStyle ← print;
camera ← NEW[CameraObj ← [viewName, coordSys, screenCS, resolution, focalLength, projection, frame, clippingPlanes, visibleAssemblies, style, colorFilm, fast, displayStyle, FALSE, useBoundBoxes, useBoundSpheresForShadows, [0,0,0]]];
}; -- end of CreateCamera
PlaceCamera:
PUBLIC
PROC [camera: Camera, focus: Point3d, origin: Point3d, slant:
REAL] = {
zAxis: Vector3d ← SVVector3d.Sub[origin, focus];
xAxis should be normal to zAxis, parallel to the xz plane, and counter-clockwise from the projection of z onto the xz plane. If z is vertical, I arbitrarily chose x axis aligned with world x axis.
SVCoordSys.SetMat[camera.coordSys, SVMatrix3d.MakeHorizontalMatFromZAxis[zAxis, origin]];
Now, rotate the coordSys counter-clockwise slant degrees around its z axis.
SVCoordSys.SetMat[camera.coordSys, SVMatrix3d.LocalRotateZ[SVCoordSys.GetMat[camera.coordSys], slant]];
};
SetFocalLengthCamera:
PUBLIC
PROC [camera: Camera, focalLength:
REAL] = {
camera.focalLength ← focalLength;
};
SetQualityCamera:
PUBLIC
PROC [camera: Camera, qual: QualityMode] = {
How you wish to make the speed/print quality trade off
camera.quality ← qual;
};
ColorFilmCamera:
PUBLIC
PROC [camera: Camera, colorFilm:
BOOL] = {
camera.colorFilm ← colorFilm;
};
Clip:
PUBLIC
PROC [dc: Imager.Context, camera: Camera] = {
downLeft, upRight: Point2d;
IF camera.frame.fullScreen
THEN
RETURN;
puts into SCREEN coords
downLeft ← SVCoordSys.CameraToScreen[camera.frame.downLeft, camera.screenCS];
upRight ← SVCoordSys.CameraToScreen[camera.frame.upRight, camera.screenCS];
Imager.ClipRectangle[dc, [downLeft[1], downLeft[2], upRight[1] - downLeft[1], upRight[2] - downLeft[2]]];
};
DrawFrame:
PUBLIC
PROC [dc: Imager.Context, camera: Camera] = {
downLeft, upRight: Point2d;
IF camera.frame.fullScreen THEN RETURN;
downLeft ← SVCoordSys.CameraToScreen[camera.frame.downLeft, camera.screenCS];
upRight ← SVCoordSys.CameraToScreen[camera.frame.upRight, camera.screenCS];
SVDraw.LineSandwich[dc, downLeft[1], downLeft[2], downLeft[1], upRight[2]];
SVDraw.LineSandwich[dc, downLeft[1], upRight[2], upRight[1], upRight[2]];
SVDraw.LineSandwich[dc, upRight[1], upRight[2], upRight[1], downLeft[2]];
SVDraw.LineSandwich[dc, upRight[1], downLeft[2], downLeft[1], downLeft[2]];
}; -- fast, low quality
DoProjection:
PUBLIC
PROC [point3d: Point3d, camera: Camera]
RETURNS [newPoint: Point2d] = {
Perform a perspective or orthogonal projection on point3d.
IF camera.projection = orthogonal
THEN {
newPoint[1] ← point3d[1];
newPoint[2] ← point3d[2];
RETURN;
}
ELSE RETURN[SVMatrix3d.PerspectiveTrans[point3d, camera.focalLength]];
};
LocalToCamera:
PUBLIC
PROC [localPoint: Point3d, localCS: CoordSystem, cameraCS: CoordSystem]
RETURNS [cameraPoint: Point3d] = {
cameraPoint ← SVMatrix3d.Update[localPoint, SVCoordSys.WRTCamera[localCS, cameraCS]];
};
LocalToCameraInternal:
PROC [localPoint: Point3d, local
Camera: Matrix4by4]
RETURNS [cameraPoint: Point3d] = {
cameraPoint ← SVMatrix3d.Update[localPoint, localCamera];
};
LocalToWorld:
PUBLIC
PROC [localPt: Point3d, localCS: CoordSystem]
RETURNS [worldPt: Point3d] = {
worldPt ← SVMatrix3d.Update[localPt, SVCoordSys.WRTWorld[localCS]];
};
VectorToWorld:
PUBLIC
PROC [vector: Vector3d, localCS: CoordSystem]
RETURNS [worldVector: Vector3d] = {
worldVector ← SVMatrix3d.UpdateVectorWithInverse[SVCoordSys.FindWorldInTermsOf[localCS], vector];
};
SetCP:
PUBLIC
PROC [dc: Imager.Context, point3d: Point3d, camera: Camera, local
Camera: Matrix4by4] = {
camera.lastPoint ← SVMatrix3d.Update[point3d, localCamera];
};
SetCPAbsolute:
PUBLIC
PROC [dc: Imager.Context, point3d: Point3d, camera: Camera] = {
camera.lastPoint ← point3d;
};
DrawTo:
PUBLIC
PROC [dc: Imager.Context, point3d: Point3d, camera: Camera, local
Camera: Matrix4by4, strokeWidth:
REAL ← 1.0] = {
Find out how much of the line defined by point3d (in CAMERA) and lastCameraPoint is visible. If this section includes lastCameraPoint and camera.quality = qual and we are working on a visible section of polygon, then LineTo lastCameraPoint and LineTo point3d. If this is a new visible section, then MoveTo lastCameraPoint and LineTo point3d.
newP1, newP2, thisCameraPoint: Point3d;
lastScreenPoint, thisScreenPoint: Point2d;
nullSegment: BOOL;
thisCameraPoint ← SVMatrix3d.Update[point3d, localCamera];
[newP1, newP2, ----, ----, nullSegment] ←
SVPolygon3d.ClipLineSegmentToPlanes[camera.lastPoint, thisCameraPoint, camera.clippingPlanes];
IF nullSegment THEN {camera.lastPoint ← thisCameraPoint; RETURN};
lastScreenPoint ← DoProjection[newP1, camera];
lastScreenPoint ← SVCoordSys.CameraToScreen[lastScreenPoint, camera.screenCS];
thisScreenPoint ← DoProjection[newP2, camera];
thisScreenPoint ← SVCoordSys.CameraToScreen[thisScreenPoint, camera.screenCS];
IF camera.quality = quality
THEN {
-- use round-ended strokes
Imager.SetStrokeWidth[dc, strokeWidth];
Imager.SetStrokeEnd[dc, round];
}
ELSE {
-- not a quality camera. Draw it fast.
Imager.SetStrokeWidth[dc, strokeWidth];
Imager.SetStrokeEnd[dc, butt];
};
Imager.MaskVector[dc, [lastScreenPoint[1], lastScreenPoint[2]], [thisScreenPoint[1], thisScreenPoint[2]]];
camera.lastPoint ← thisCameraPoint;
}; -- end of DrawTo
DrawToAbsolute:
PUBLIC
PROC [dc: Imager.Context, point3d: Point3d, camera: Camera, strokeWidth:
REAL ← 1.0] = {
Like DrawTo, except that point3d is assumed to be already in CAMERA coordinates.
newP1, newP2, thisCameraPoint: Point3d;
lastScreenPoint, thisScreenPoint: Point2d;
newP1isP1, newP2isP2, nullSegment: BOOL;
thisCameraPoint ← point3d;
[newP1, newP2, newP1isP1, newP2isP2, nullSegment] ← SVPolygon3d.ClipLineSegmentToPlanes[camera.lastPoint, thisCameraPoint, camera.clippingPlanes];
IF nullSegment THEN {camera.lastPoint ← thisCameraPoint; RETURN};
lastScreenPoint ← DoProjection[newP1, camera];
lastScreenPoint ← SVCoordSys.CameraToScreen[lastScreenPoint, camera.screenCS];
thisScreenPoint ← DoProjection[newP2, camera];
thisScreenPoint ← SVCoordSys.CameraToScreen[thisScreenPoint, camera.screenCS];
IF camera.quality = quality
THEN {
-- use round-ended strokes
Imager.SetStrokeWidth[dc, strokeWidth];
Imager.SetStrokeEnd[dc, round];
}
ELSE {
-- not a quality camera. Use butt-ended strokes
Imager.SetStrokeWidth[dc, strokeWidth];
Imager.SetStrokeEnd[dc, butt];
};
Imager.MaskVector[dc, [lastScreenPoint[1], lastScreenPoint[2]], [thisScreenPoint[1], thisScreenPoint[2]]];
camera.lastPoint ← thisCameraPoint;
}; -- end of DrawToAbsolute
MoveTo:
PUBLIC
PROC [point3d: Point3d, camera: Camera, localCS: CoordSystem]
RETURNS [path: ImagerPath.Trajectory] = {
objPoint: Point2d;
point3d ← LocalToCamera[point3d, localCS, camera.coordSys];
objPoint ← DoProjection[point3d, camera];
objPoint ← SVCoordSys.CameraToScreen[objPoint, camera.screenCS]; -- puts into SCREEN coords
path ← ImagerPath.MoveTo[[objPoint[1], objPoint[2]]];
};
MoveToAbsolute:
PUBLIC
PROC [point3d: Point3d, camera: Camera]
RETURNS [path: ImagerPath.Trajectory] = {
objPoint: Point2d;
objPoint ← DoProjection[point3d, camera];
objPoint ← SVCoordSys.CameraToScreen[objPoint, camera.screenCS]; -- puts into SCREEN coords
path ← ImagerPath.MoveTo[[objPoint[1], objPoint[2]]];
};
Assumes point3d is a point in CAMERA coordinate system.
LineTo:
PUBLIC
PROC [path: ImagerPath.Trajectory, point3d: Point3d, camera: Camera, localCS: CoordSystem]
RETURNS [newPath: ImagerPath.Trajectory] = {
objPoint: Point2d;
point3d ← LocalToCamera[point3d, localCS, camera.coordSys]; -- puts in CAMERA
objPoint ← DoProjection[point3d, camera];
objPoint ← SVCoordSys.CameraToScreen[objPoint, camera.screenCS]; -- puts into SCREEN coords
newPath ← ImagerPath.LineTo[path, [objPoint[1], objPoint[2]]];
};
LineToAbsolute:
PUBLIC
PROC [path: ImagerPath.Trajectory, point3d: Point3d, camera: Camera]
RETURNS [newPath: ImagerPath.Trajectory] = {
objPoint: Point2d;
objPoint ← DoProjection[point3d, camera];
objPoint ← SVCoordSys.CameraToScreen[objPoint, camera.screenCS]; -- puts into SCREEN coords
newPath ← ImagerPath.LineTo[path, [objPoint[1], objPoint[2]]];
};
DrawStroke:
PUBLIC
PROC [dc: Imager.Context, path: ImagerPath.Trajectory, width:
REAL ← 1, closed:
BOOLEAN ←
FALSE, ends: StrokeEnd ← butt] = {
Imager.SetStrokeWidth[dc, width];
Imager.SetStrokeEnd[dc, ends];
Imager.MaskStrokeTrajectory[dc, path, closed];
};
DrawFilled:
PUBLIC PROC[dc: Imager.Context, path: ImagerPath.Trajectory, parityFill:
BOOLEAN ←
FALSE] = {
Imager.MaskFillTrajectory[dc, path, parityFill];
};
DrawPolygonAbsolute:
PUBLIC PROC [dc: Imager.Context, poly: Poly3d, width:
REAL ← 1, ends: StrokeEnd ← butt, camera: Camera] = {
Draws the edges of poly assuming that the vertex coordinates are CAMERA coordinates.
Assumes poly3d in camera coords.
outline: ImagerPath.Trajectory;
objPoint: Point2d;
Clip the polygon against the camera's clipping planes to produce a new clipped polygon.
poly ← SVPolygon3d.ClipPolyToPlanes[poly, camera.clippingPlanes];
objPoint ← DoProjection[poly[0], camera];
objPoint ← SVCoordSys.CameraToScreen[objPoint, camera.screenCS]; -- puts into SCREEN coords
outline ← ImagerPath.MoveTo[[objPoint[1], objPoint[2]]];
FOR i:
NAT
IN[1..poly.len)
DO
objPoint ← DoProjection[poly[i], camera];
objPoint ← SVCoordSys.CameraToScreen[objPoint, camera.screenCS]; -- puts into SCREEN coords
outline ← ImagerPath.LineTo[outline, [objPoint[1], objPoint[2]]];
ENDLOOP;
Imager.SetStrokeWidth[dc, width];
Imager.SetStrokeEnd[dc, ends];
Imager.MaskStrokeTrajectory[dc, outline, TRUE];
};
DrawLine:
PUBLIC
PROC [dc: Imager.Context, start: Point3d, end: Point3d, camera: Camera, localCS: CoordSystem] = {
Like DrawTo except that starting and ending points are given at the same time.
lastScreenPoint, thisScreenPoint: Point2d;
newP1isP1, newP2isP2, nullSegment: BOOL;
newP1, newP2: Point3d;
start ← LocalToCamera[start, localCS, camera.coordSys]; -- puts in CAMERA
end ← LocalToCamera[end, localCS, camera.coordSys]; -- puts in CAMERA
[newP1, newP2, newP1isP1, newP2isP2, nullSegment] ← SVPolygon3d.ClipLineSegmentToPlanes[start, end, camera.clippingPlanes];
IF nullSegment THEN RETURN;
lastScreenPoint ← DoProjection[newP1, camera];
lastScreenPoint ← SVCoordSys.CameraToScreen[lastScreenPoint, camera.screenCS]; -- puts into SCREEN coords
thisScreenPoint ← DoProjection[newP2, camera];
thisScreenPoint ← SVCoordSys.CameraToScreen[thisScreenPoint, camera.screenCS];
-- puts into SCREEN coords
IF camera.quality = quality
THEN {
-- use round-ended strokes
line: ImagerPath.Trajectory;
line ← ImagerPath.MoveTo[[lastScreenPoint[1], lastScreenPoint[2]]];
line ← ImagerPath.LineTo[line, [thisScreenPoint[1], thisScreenPoint[2]]];
Imager.SetStrokeWidth[dc, 1];
Imager.SetStrokeEnd[dc, round];
Imager.MaskStrokeTrajectory[dc, line, FALSE];
}
ELSE {
-- not a quality camera. Just draw line segment.
Imager.MaskVector[dc, [lastScreenPoint[1], lastScreenPoint[2]], [thisScreenPoint[1], thisScreenPoint[2]]];
};
}; -- end of DrawLine
DrawLineOnScreen:
PUBLIC
PROC [dc: Imager.Context, screenPoint1, screenPoint2: Point2d, camera: Camera] = {
IF camera.quality = quality
THEN {
-- use round-ended strokes
line: ImagerPath.Trajectory;
line ← ImagerPath.MoveTo[[screenPoint1[1], screenPoint1[2]]];
line ← ImagerPath.LineTo[line, [screenPoint2[1], screenPoint2[2]]];
Imager.SetStrokeWidth[dc, 1];
Imager.SetStrokeEnd[dc, round];
Imager.MaskStrokeTrajectory[dc, line, FALSE];
}
ELSE {
-- not a quality camera. Just draw line segment.
Imager.MaskVector[dc, [screenPoint1[1], screenPoint1[2]], [screenPoint2[1], screenPoint2[2]]];
};
};
DrawChar:
PUBLIC PROC [dc: Imager.Context, c:
CHARACTER, camera: Camera] = {
screenPoint: Point2d;
screenPoint ← DoProjection[camera.lastPoint, camera];
screenPoint ← SVCoordSys.CameraToScreen[screenPoint, camera.screenCS]; -- puts into SCREEN coords
Imager.SetXY[dc, [screenPoint[1], screenPoint[2]]];
Imager.SetFont[dc, coordFont];
Imager.ShowChar[dc, c];
}; -- end of DrawChar
CameraPolygon:
PROC [poly3d: Poly3d, local
Camera: Matrix4by4]
RETURNS [cameraPoly: Poly3d] = {
Converts a polygon from local to CAMERA coordinates
IF poly3d.len = 4 THEN {cameraPoly ← quadPoly; SVPolygon3d.ClearPoly[cameraPoly]}
ELSE cameraPoly ← SVPolygon3d.CreatePoly[poly3d.len];
FOR i:
NAT
IN [0..poly3d.len)
DO
cameraPoly ← SVPolygon3d.AddPolyPoint[cameraPoly, LocalToCameraInternal[poly3d[i], localCamera]];
ENDLOOP;
DrawArea:
PUBLIC
PROC [dc: Imager.Context, localNormal: Vector3d, poly3d: Poly3d, artwork: Artwork, lightSources: LightSourceList, camera: Camera, localCS: CoordSystem] = {
Given an ordered list (array) of Point3d's, defined in the current local coordinate system, find the corresponding WORLD points, and project the polygon they define onto the image plane with a perspective projection.
worldPoint3d: Point3d;
objPoint: Point2d;
colorshade: Imager.Color;
r, g, b, scaleFactor: REAL;
cameraNormal: Vector3d;
worldNormal: Vector3d;
eyepoint: Point3d;
cameraPolygon: Poly3d;
outline: ImagerPath.Trajectory;
Find the real normal.
cameraNormal ← SVMatrix3d.UpdateVectorWithInverse[SVCoordSys.FindCameraInTermsOf[localCS, camera.coordSys], localNormal];
worldNormal ← SVMatrix3d.UpdateVectorWithInverse[SVCoordSys.FindWorldInTermsOf[localCS], localNormal];
A reverse-facing surface is one which is not visible to the camera. In our current world of opaque surfaces, we need not draw reverse-facing surfaces. To detect a reverse-facing surface, take its normal N. Find the dot product of N with the vector from the a point on the surface to the camera lens. If the result is negative, the surface is reverse-facing.
A simpler test which removes most reverse-facing surfaces is this one:
IF cameraNormal[3]<=0 THEN RETURN;
Don't draw reverse-facing surfaces.
Look at the first point.
worldPoint3d ← SVMatrix3d.Update[poly3d[0], SVCoordSys.WRTWorld[localCS]];
Use this point to estimate surface color
eyepoint ← LocalToWorld[[0,0,camera.focalLength], camera.coordSys];
SELECT artwork.material
FROM
plastic => [r,g,b] ← SVShading.DiffuseAndSpecularReflectance[eyepoint, worldNormal, worldPoint3d, artwork.color, lightSources];
chalk => [r,g,b] ← SVShading.DiffuseReflectance[worldNormal, worldPoint3d, artwork.color, lightSources];
ENDCASE => ERROR;
Find the polygon in camera coordinates.
cameraPolygon ← CameraPolygon[poly3d, SVCoordSys.WRTCamera[localCS, camera.coordSys]];
Clip this polygon against the camera's clipping planes.
cameraPolygon ← SVPolygon3d.ClipPolyToPlanes[cameraPolygon, camera.clippingPlanes];
Move to the first point.
objPoint ← DoProjection[cameraPolygon[0], camera];
objPoint ← SVCoordSys.CameraToScreen[objPoint, camera.screenCS]; -- puts into SCREEN coords
outline ← ImagerPath.MoveTo[[objPoint[1], objPoint[2]]];
Create a path which forms the edges of this area.
FOR i:
NAT
IN[1..cameraPolygon.len)
DO
objPoint ← DoProjection[cameraPolygon[i], camera];
objPoint ← SVCoordSys.CameraToScreen[objPoint, camera.screenCS]; -- puts into SCREEN coords
outline ← ImagerPath.LineTo[outline, [objPoint[1], objPoint[2]]];
ENDLOOP;
scaleFactor ← MAX[r,g,b];
IF scaleFactor > 1.0
THEN {
r ← r/scaleFactor; g ← g/scaleFactor; b ← b/scaleFactor;
};
colorshade ← ImagerColor.ColorFromRGB[[r,g,b]];
SetColor[dc, camera, colorshade];
Imager.MaskFillTrajectory[dc, outline];
};
quadPoly: Poly3d ← SVPolygon3d.CreatePoly[4];
ComputeShading:
PROC [cameraNormal: Vector3d, cameraPolygon: Poly3d, artwork: Artwork, lightSources: LightSourceList, camera: Camera, hiddenLine:
BOOL]
RETURNS [colorshade: Imager.Color, backfacing:
BOOL ←
FALSE] = {
IF
NOT hiddenLine
THEN {
Use the first point point to estimate surface color.
r, g, b, scaleFactor: REAL;
worldNormal: Vector3d;
eyepoint, worldPoint3d: Point3d;
worldNormal ← SVMatrix3d.UpdateVectorWithInverse[SVCoordSys.FindWorldInTermsOf[camera.coordSys], cameraNormal];
worldPoint3d ← LocalToWorld[cameraPolygon[0], camera.coordSys]; -- poly[0] in World
eyepoint ← LocalToWorld[[0,0,camera.focalLength], camera.coordSys]; -- eyepoint in World
BEGIN
OPEN SVVector3d;
IF DotProduct[Sub[worldPoint3d, eyepoint], worldNormal] >=0 THEN RETURN[NIL, TRUE];
END;
IF artwork = NIL THEN RETURN[NIL, FALSE];
SELECT artwork.material
FROM
plastic => [r,g,b] ← SVShading.DiffuseAndSpecularReflectance[eyepoint, worldNormal, worldPoint3d, artwork.color, lightSources];
chalk => [r,g,b] ← SVShading.DiffuseReflectance[worldNormal, worldPoint3d, artwork.color, lightSources];
ENDCASE => ERROR;
scaleFactor ← MAX[r,g,b];
IF scaleFactor > 1.0
THEN {
r ← r/scaleFactor; g ← g/scaleFactor; b ← b/scaleFactor;
};
colorshade ← ImagerColor.ColorFromRGB[[r,g,b]];
}
ELSE colorshade ← Imager.black;
};
DrawAreaNormalAbsolute:
PUBLIC
PROC [dc: Imager.Context, cameraNormal: Vector3d, poly3d: Poly3d, artwork: Artwork, lightSources: LightSourceList, camera: Camera, local
Camera: Matrix4by4, hiddenLine:
BOOL, strokeColor: Color ←
NIL] = {
given a Poly3d, defined in local coordinates, find the WORLD points, and project the polygon onto the image plane with the specified projection.
objPoint: Point2d;
colorshade: Imager.Color;
cameraPolygon: Poly3d;
outline: ImagerPath.Trajectory;
backfacing: BOOL ← FALSE;
cameraPolygon ← CameraPolygon[poly3d, localCamera];
[colorshade, backfacing] ← ComputeShading[cameraNormal, cameraPolygon, artwork, lightSources, camera, hiddenLine];
IF backfacing THEN RETURN;
cameraPolygon ← SVPolygon3d.ClipPolyToPlanes[cameraPolygon, camera.clippingPlanes]; -- for now, suppress clipping
Build an Imager Trajectory.
objPoint ← DoProjection[cameraPolygon[0], camera];
objPoint ← SVCoordSys.CameraToScreen[objPoint, camera.screenCS]; -- puts into SCREEN coords
outline ← ImagerPath.MoveTo[[objPoint[1], objPoint[2]]];
Create a path which forms the edges of this area.
FOR i:
NAT
IN[1..cameraPolygon.len)
DO
objPoint ← DoProjection[cameraPolygon[i], camera];
objPoint ← SVCoordSys.CameraToScreen[objPoint, camera.screenCS]; -- puts into SCREEN coords
outline ← ImagerPath.LineTo[outline, [objPoint[1], objPoint[2]]];
ENDLOOP;
DrawOutlineCached[dc, outline, colorshade, camera, hiddenLine, strokeColor];
};
DrawAreaAbsolute:
PUBLIC
PROC [dc: Imager.Context, cameraNormal: Vector3d, poly3d: Poly3d, artwork: Artwork, lightSources: LightSourceList, camera: Camera, hiddenLine:
BOOL, strokeColor: Color ←
NIL] = {
Assumes poly3d in camera coords.
outline: ImagerPath.Trajectory;
objPoint: Point2d;
colorshade: Color;
backfacing: BOOL ← FALSE;
[colorshade, backfacing] ← ComputeShading[cameraNormal, poly3d, artwork, lightSources, camera, hiddenLine];
IF backfacing THEN RETURN;
poly3d ← SVPolygon3d.ClipPolyToPlanes[poly3d, camera.clippingPlanes]; -- for now, suppress clipping
Build an Imager Trajectory.
objPoint ← DoProjection[poly3d[0], camera];
objPoint ← SVCoordSys.CameraToScreen[objPoint, camera.screenCS]; -- puts into SCREEN coords
outline ← ImagerPath.MoveTo[[objPoint[1], objPoint[2]]];
FOR i:
NAT
IN[1..poly3d.len)
DO
objPoint ← DoProjection[poly3d[i], camera];
objPoint ← SVCoordSys.CameraToScreen[objPoint, camera.screenCS]; -- puts into SCREEN coords
outline ← ImagerPath.LineTo[outline, [objPoint[1], objPoint[2]]];
ENDLOOP;
Imager.MaskFillTrajectory[dc, outline];
DrawOutlineCached[dc, outline, colorshade, camera, hiddenLine, strokeColor];
}; -- end of DrawAreaAbsolute
CacheContents: TYPE = REF CacheContentsObj;
CacheContentsObj:
TYPE =
RECORD [
lineObject: Imager.Object,
fillObject: Imager.Object
];
AlmostZeroVec:
PROC [vec: Imager.
VEC]
RETURNS [
BOOL] = {
epsilon: REAL = 0.5; -- half a pixel
RETURN[ABS[vec.x] < epsilon AND ABS[vec.y] < epsilon];
};
LookUpOutline:
PROC [a: ImagerPath.Trajectory]
RETURNS [lineObject: Imager.Object, fillObject: Imager.Object] = {
CompareShape: FunctionCache.CompareProc = {
CompareProc: TYPE ~ PROC [argument: Domain] RETURNS [good: BOOL];
OPEN Vectors2d;
b: ImagerPath.Trajectory ← NARROW[argument];
aPt, bPt, diff, thisDiff: Imager.VEC;
trajA, trajB: ImagerPath.Trajectory;
IF a.length # b.length THEN RETURN[FALSE];
trajA ← a; trajB ← b;
aPt ← trajA.lp;
bPt ← trajB.lp;
diff ← Sub[aPt, bPt];
trajA ← trajA.prev; trajB ← trajB.prev;
FOR i:
INT
IN [1..(a.length-1)]
DO
thisDiff ← Add[Sub[trajB.lp, trajA.lp], diff];
IF NOT AlmostZeroVec[thisDiff] THEN RETURN[FALSE];
trajA ← trajA.prev; trajB ← trajB.prev;
ENDLOOP;
RETURN[TRUE];
};
value: FunctionCache.Range;
cacheContents: CacheContents;
maxPixels: REAL ← 10000.0;
ok: BOOL;
[value, ok] ← FunctionCache.Lookup[x: cache, compare: CompareShape, clientID: $SVPolygonObject];
IF ok THEN cacheContents ← NARROW[value] ELSE RETURN[NIL, NIL];
lineObject ← cacheContents.lineObject;
fillObject ← cacheContents.fillObject;
};
showBox: BOOL ← FALSE;
cache: FunctionCache.Cache;
OutlineBox:
PROC [outline: ImagerPath.Trajectory]
RETURNS [rect: Imager.Rectangle] = {
loX, loY, hiX, hiY: REAL;
loX ← hiX ← outline.lp.x;
loY ← hiY ← outline.lp.y;
FOR thisTraj: ImagerPath.Trajectory ← outline.prev, thisTraj.prev
UNTIL thisTraj =
NIL
DO
loX ← MIN[thisTraj.lp.x, loX];
loY ← MIN[thisTraj.lp.y, loY];
hiX ← MAX[thisTraj.lp.x, hiX];
hiY ← MAX[thisTraj.lp.y, hiY];
ENDLOOP;
Allow for stroke width.
loX ← loX - 2.0;
loY ← loY - 2.0;
hiX ← hiX + 2.0;
hiY ← hiY + 2.0;
rect ← [loX, loY, hiX-loX, hiY-loY];
};
DrawOutlineCached:
PROC [dc: Imager.Context, outline: ImagerPath.Trajectory, color: Imager.Color, camera: Camera, hiddenLine:
BOOL, strokeColor: Color ←
NIL] = {
lineObject, fillObject: Imager.Object;
cacheContents: CacheContents;
IF
NOT hiddenLine
THEN {
OldDrawOutline[dc, outline, color, camera, strokeColor];
RETURN;
};
[lineObject, fillObject] ← LookUpOutline[outline];
IF lineObject =
NIL
THEN {
-- put it in the cache
clipR: Imager.Rectangle ← OutlineBox[outline];
IF showBox
THEN {
Imager.SetGray[dc, 0.2];
Imager.MaskRectangle[dc, clipR];
};
lineObject ← NEW[Imager.ObjectRep ← [draw: OutlineDrawLines, clip: clipR, data: outline]];
fillObject ← NEW[Imager.ObjectRep ← [draw: OutlineDrawFill, clip: clipR, data: outline]];
cacheContents ← NEW[CacheContentsObj ← [lineObject: lineObject, fillObject: fillObject]];
FunctionCache.Insert[cache, outline, cacheContents, 60, $SVPolygonObject];
};
BEGIN
traj: ImagerPath.Trajectory ← NARROW[lineObject.data];
interactive: BOOL ← camera.quality = fast;
position: Imager.VEC ← Vectors2d.Sub[outline.lp, traj.lp];
Imager.SetColor[dc, Imager.white];
Imager.DrawObject[context: dc, object: fillObject, interactive: interactive, position: position];
Imager.SetColor[dc, Imager.black];
Imager.DrawObject[context: dc, object: lineObject, interactive: interactive, position: position];
END;
};
OutlineDrawFill:
PROC [self: Imager.Object, context: Imager.Context] = {
traj: ImagerPath.Trajectory ← NARROW[self.data];
Imager.SetStrokeWidth[context, 2.0];
Imager.SetStrokeEnd[context, round]; -- needed to make object caching work
Imager.SetStrokeJoint[context, round]; -- needed to make object caching work
Imager.MaskFillTrajectory[context, traj];
};
OutlineDrawLines:
PROC [self: Imager.Object, context: Imager.Context] = {
traj: ImagerPath.Trajectory ← NARROW[self.data];
Imager.SetStrokeWidth[context, 2.0];
Imager.SetStrokeEnd[context, round]; -- needed to make object caching work
Imager.SetStrokeJoint[context, round]; -- needed to make object caching work
Imager.MaskStrokeTrajectory[context, traj, TRUE];
};
OldDrawOutline:
PROC [dc: Imager.Context, outline: ImagerPath.Trajectory, color: Imager.Color, camera: Camera, strokeColor: Color ←
NIL] = {
IF color #
NIL
THEN {
SetColor[dc, camera, color];
Imager.MaskFillTrajectory[dc, outline];
};
IF strokeColor #
NIL
THEN {
Imager.SetStrokeWidth[dc, 2.0];
SetColor[dc, camera, strokeColor];
Imager.SetStrokeJoint[dc, bevel];
Imager.MaskStrokeTrajectory[dc, outline, TRUE];
};
};
DrawHorizonOfPlane:
PUBLIC
PROC [dc: Imager.Context, plane: Plane, camera: Camera, localCS: CoordSystem] = {
Given a plane, in local coordinates, convert the plane to CAMERA coordinates and then use DrawHorizonOfPlaneAbsolute.
plane ← SVMatrix3d.UpdatePlaneWithInverse[plane, SVCoordSys.FindCameraInTermsOf[localCS, camera.coordSys]];
DrawHorizonOfPlaneAbsolute[dc, plane, camera];
};
-- end of DrawHorizonOfPlane
DrawHorizonOfPlaneAbsolute:
PUBLIC
PROC [dc: Imager.Context, plane: Plane, camera: Camera] = {
Given a plane, in CAMERA coordinates, find two points, in CAMERA coordinates, which are on the projection of the horizon onto the screen. Convert to SCREEN coordinates and draw as an infinite line.
If the plane is parallel to the screen, then there is no horizon.
almostZero: REAL ← 1.0e-12;
t: REAL;
vanishCamera1, vanishCamera2, vanishScreen1, vanishScreen2: Point2d;
IF Abs[plane.A] <= almostZero AND Abs[plane.B] <= almostZero THEN RETURN;
IF Abs[plane.
B] > Abs[plane.
A]
THEN {
y1, y2: REAL;
The plane is more nearly horizontal than vertical.
Find two direction vectors which are parallel to the plane. Choose the first to be [-1,y,-1] and the second to be [1,y,-1]. If the plane is [A, B, C, D], then the first vector has the property that [A, B, C] dot [-1, y, -1] = 0. That is:
-A + By -C = 0. y1 = (A+C)/B.
Likewise, A + By -C = 0. y2 = (C-A)/B.
y1 ← (plane.A+plane.C)/plane.B;
y2 ← (plane.C-plane.A)/plane.B;
Find the first vanishing point.
t ← camera.focalLength; -- since we have chosen direction[3] to be -1
vanishCamera1[1] ← -t;
vanishCamera1[2] ← t*y1;
vanishScreen1 ← SVCoordSys.CameraToScreen[vanishCamera1, camera.screenCS];
Find the second vanishing point.
vanishCamera2[1] ← t;
vanishCamera2[2] ← t*y2;
vanishScreen2 ← SVCoordSys.CameraToScreen[vanishCamera2, camera.screenCS];
}
ELSE {
x1, x2: REAL;
The plane is more nearly vertical than horizontal. Proceed as above, but use the vectors [x, -1, -1] and [x, 1, -1]. The two equations are:
Ax -B -C = 0. x1 = (B+C)/A. and
Ax +B -C = 0. x2 = (C-B)/A.
x1 ← (plane.B+plane.C)/plane.A;
x2 ← (plane.C-plane.B)/plane.A;
Find the first vanishing point.
t ← camera.focalLength; -- since we have chosen direction[3] to be -1
vanishCamera1[1] ← t*x1;
vanishCamera1[2] ← -t;
vanishScreen1 ← SVCoordSys.CameraToScreen[vanishCamera1, camera.screenCS];
Find the second vanishing point.
vanishCamera2[1] ← t*x2;
vanishCamera2[2] ← t;
vanishScreen2 ← SVCoordSys.CameraToScreen[vanishCamera2, camera.screenCS];
};
DrawLineOnScreen[dc, vanishScreen1, vanishScreen2, camera];
};
-- end of DrawHorizonOfPlaneAbsolute
DrawInfiniteLine:
PUBLIC
PROC [dc: Imager.Context, p1, p2: Point3d, camera: Camera, localCS: CoordSystem, clippedBy: Imager.Rectangle] = {
p1 and p2 are assumed to be in local coordinates.
First find p1 and p2 in CAMERA coordinates:
cameraP1, cameraP2: Point3d;
screenP1, screenP2, vanishCamera, vanishScreen: Point2d;
almostZero: REAL ← 1.0e-12;
count: NAT;
ray: Ray2d;
params: ARRAY[1..2] OF REAL;
cameraP1 ← LocalToCamera[p1, localCS, camera.coordSys];
cameraP2 ← LocalToCamera[p2, localCS, camera.coordSys];
screenP1 ← DoProjection[cameraP1, camera];
screenP1 ← SVCoordSys.CameraToScreen[screenP1, camera.screenCS];
screenP2 ← DoProjection[cameraP2, camera];
screenP2 ← SVCoordSys.CameraToScreen[screenP2, camera.screenCS];
Next find the vanishing point. "Shoot a ray" with direction p2-p1 from the eyepoint and find its intersection with the screen. In CAMERA coordinates, the screen has equation z = 0. The eyepoint is at [0,0,focalLength]. Hence, our ray has equation R[t] = [0,0,focalLength] + t*(p2-p1). z(t) = focalLength+t*(p2[3]-p1[3]). So the intersection occurs where z=0, i.e. t = -focalLength/(p2[3]-p1[3]). If p2[3]=p1[3], then there is no vanishing point.
IF camera.projection = perspective
AND Abs[cameraP2[3]-cameraP1[3]] > almostZero
THEN {
-- there is a vanishing point
t: REAL;
t ← -camera.focalLength/(cameraP2[3]-cameraP1[3]);
vanishCamera[1] ← t*(cameraP2[1]-cameraP1[1]);
vanishCamera[2] ← t*(cameraP2[2]-cameraP1[2]);
vanishScreen ← SVCoordSys.CameraToScreen[vanishCamera, camera.screenCS];
There other point can be found by projecting p1 and p2 onto the screen and finding the intersection of the ray (from the vanishing point to whichever has the farther (less negative) z component) with the bounding box or our display context.
IF cameraP2[3] > cameraP1[3] THEN ray ← SVLines2d.CreateRayFromPoints[vanishScreen, screenP2]
ELSE ray ← SVLines2d.CreateRayFromPoints[vanishScreen, screenP1];
[count, params] ← SVLines2d.RayMeetsBox[ray, clippedBy.x, clippedBy.y, clippedBy.x + clippedBy.w, clippedBy.y + clippedBy.h];
IF count = 0 THEN RETURN;
t ← params[count]; -- use the highest value of t.
screenP2 ← SVLines2d.EvalRay[ray, t];
IF camera.quality = quality
THEN {
-- use round-ended strokes
line: ImagerPath.Trajectory;
line ← ImagerPath.MoveTo[[vanishScreen[1], vanishScreen[2]]];
line ← ImagerPath.LineTo[line, [screenP2[1], screenP2[2]]];
Imager.SetStrokeWidth[dc, 1];
Imager.SetStrokeEnd[dc, round];
Imager.MaskStrokeTrajectory[dc, line, FALSE];
}
ELSE {
-- not a quality camera. Just draw line segment.
Imager.MaskVector[dc, [vanishScreen[1], vanishScreen[2]], [screenP2[1], screenP2[2]]];
};
}
ELSE {
There is no vanishing point. Find the projections of p1 and p2 onto the screen. Call the line determined by the resulting points L. Find the intersection of L with the bounding rectangle of our display context.
Consider the ray r(t) = screenP1 + t*(screenP2-screenP1).
ray ← SVLines2d.CreateRayFromPoints[screenP1, screenP2];
[count, params] ← SVLines2d.LineRayMeetsBox[ray, clippedBy.x, clippedBy.y, clippedBy.x + clippedBy.w, clippedBy.y + clippedBy.h];
IF count = 2
THEN {
screenP1 ← SVLines2d.EvalRay[ray, params[1]];
screenP2 ← SVLines2d.EvalRay[ray, params[2]];
IF camera.quality = quality
THEN {
-- use round-ended strokes
line: ImagerPath.Trajectory;
line ← ImagerPath.MoveTo[[screenP1[1], screenP1[2]]];
line ← ImagerPath.LineTo[line, [screenP2[1], screenP2[2]]];
Imager.SetStrokeWidth[dc, 1];
Imager.SetStrokeEnd[dc, round];
Imager.MaskStrokeTrajectory[dc, line, FALSE];
}
ELSE {
-- not a quality camera. Just draw line segment.
Imager.MaskVector[dc, [screenP1[1], screenP1[2]], [screenP2[1], screenP2[2]]];
};
};
};
}; -- end of DrawInfiniteLine
Abs:
PROC [r:
REAL]
RETURNS [
REAL] = {
RETURN[IF r >= 0 THEN r ELSE -r];
};
DrawInfiniteLineAbsolute:
PUBLIC PROC [dc: Imager.Context, p1, p2: Point3d, camera: Camera, localCS: CoordSystem, clippedBy: Imager.Rectangle] = {};
p1 and p2 are assumed to be in CAMERA coordinates.
DrawInfinitePlaneWireFrame:
PUBLIC PROC [dc: Imager.Context, plane: Plane, camera: Camera, localCS: CoordSystem] = {
plane is assumed to be in local coordinates.
};
DrawInfinitePlaneWireFrameAbsolute:
PUBLIC PROC [dc: Imager.Context, plane: Plane, camera: Camera, localCS: CoordSystem] = {};
plane is assumed to be in CAMERA coordinates.
DrawInfinitePlaneShaded:
PUBLIC PROC [dc: Imager.Context, plane: Plane, artwork: Artwork, lightSources: LightSourceList, camera: Camera, localCS: CoordSystem] = {};
plane is assumed to be in local coordinates.
DrawInfinitePlaneShadedAbsolute:
PUBLIC PROC [dc: Imager.Context, plane: Plane, artwork: Artwork, lightSources: LightSourceList, camera: Camera] = {};
plane is assumed to be in CAMERA coordinates.
SetColor:
PRIVATE
PROC [dc: Imager.Context, camera: Camera, color: Color] = {
IF camera.colorFilm THEN Imager.SetColor [dc, color]
ELSE {
intensity: REAL ← ImagerColorPrivate.IntensityFromColor[NARROW[color]];
Imager.SetColor[dc, ImagerColor.ColorFromGray[1.0-intensity]]
};
};
DrawStyleToRope:
PUBLIC
PROC [drawStyle: DrawStyle]
RETURNS [rope: Rope.
ROPE] = {
SELECT drawStyle
FROM
wire => rope ← "wireframe";
shaded => rope ← "shaded";
hiddenLine => rope ← "hiddenLine";
rayCast => rope ← "rayCast";
normals => rope ← "normals";
ENDCASE => ERROR UpdateThisCode;
};
RopeToDrawStyle:
PUBLIC
PROC [rope: Rope.
ROPE]
RETURNS [drawStyle: DrawStyle, success:
BOOL] = {
Does the inverse of DrawStyleToRope.
success ← TRUE;
SELECT
TRUE
FROM
Rope.Equal[rope, "wireframe", FALSE] => drawStyle ← wire;
Rope.Equal[rope, "shaded", FALSE] => drawStyle ← shaded;
Rope.Equal[rope, "hiddenLine", FALSE] => drawStyle ← hiddenLine;
Rope.Equal[rope, "rayCast", FALSE] => drawStyle ← rayCast;
Rope.Equal[rope, "normals", FALSE] => drawStyle ← normals;
ENDCASE => success ← FALSE;
};
ProjectionToRope:
PUBLIC
PROC [projection: Projection]
RETURNS [rope: Rope.
ROPE] = {
SELECT projection
FROM
perspective => rope ← "perspect";
orthogonal => rope ← "ortho";
ENDCASE => ERROR UpdateThisCode;
};
RopeToProjection:
PUBLIC
PROC [rope: Rope.
ROPE]
RETURNS [projection: Projection, success:
BOOL] = {
Does the inverse of ProjectionToRope.
success ← TRUE;
SELECT
TRUE
FROM
Rope.Equal[rope, "perspect", FALSE] => projection ← perspective;
Rope.Equal[rope, "ortho", FALSE] => projection ← orthogonal;
ENDCASE => success ← FALSE;
};
UpdateThisCode: PUBLIC ERROR = CODE;
coordFont: ImagerFont.Font;
Init:
PROC [] = {
coordFont ← ImagerFont.Scale[ImagerFont.Find["xerox/TiogaFonts/Helvetica14B"], 1.0];
cache ← FunctionCache.Create[maxEntries: 100];
};
Init[];
END.