AnalyzeDumpLogsImpl.mesa
Copyright Ó 1992 by Xerox Corporation. All rights reserved.
Chauser, April 9, 1992 3:45 pm PDT
This program analyzes unix dump logs to produce the statistics about incremental dumps used by the dump schedule generator. The current model for the size of the incremental dump is (FSSize*a0 + FSSize*a1*(numberOfDays-since-fullDump)). a0 and a1 are determined by linear programming from a set of constraints generated from an existing set of dumps. based on the actual size of the level 0 dumps.
DIRECTORY
IO, Rope, RedBlackTree, BasicTime, PFS, Commander, CommanderOps, QPSolve;
AnalyzeDumpLogsImpl: CEDAR PROGRAM
IMPORTS IO, Rope, RedBlackTree, BasicTime, PFS, Commander, CommanderOps, QPSolve
~ BEGIN
ROPE: TYPE ~ Rope.ROPE;
STREAM: TYPE ~ IO.STREAM;
nullGMT: BasicTime.GMT ~ BasicTime.nullGMT;
LogContents: TYPE ~ REF LogContentsRep;
LogContentsRep: TYPE ~ RECORD [
tree: RedBlackTree.Table
];
FS: TYPE ~ REF FSRep;
FSRep: TYPE ~ RECORD [
name: ROPE,
level0Size: INT ¬ 0,
nIncrs: INT ¬ 0,
incrs: LIST OF Incr ¬ NIL
];
Incr: TYPE ~ RECORD [
days: INT,
size: INT
];
halfDay: INT ~ 12*60*60; -- 12 hours of seconds
wholeDay: INT ~ 2*halfDay; -- 24 hours of seconds
GetKey: RedBlackTree.GetKey ~ {
fs: FS ~ NARROW[data];
RETURN[fs.name];
};
Compare: RedBlackTree.Compare ~ {
key: ROPE ¬ NARROW[k];
fs: FS ~ NARROW[data];
RETURN [ Rope.Compare[key, fs.name] ];
};
ReadFilteredLog: PROC [in: STREAM, msgs: STREAM] RETURNS [lc: LogContents] ~ {
lineSelect: ROPE ¬ in.GetTokenRope[].token;
currentFS: FS;
currLine: INT ¬ 0;
lev5Date: BasicTime.GMT;
lev0Date: BasicTime.GMT;
DoDriveLine: PROC ~ {
ExceptWhitespace: IO.BreakProc ~ {
RETURN[SELECT char FROM
IN [IO.NUL .. IO.SP] => sepr,
ENDCASE => other]
};
drive: ROPE ¬ in.GetTokenRope[ExceptWhitespace].token;
IF (currentFS ¬ NARROW[lc.tree.Lookup[drive]]) = NIL THEN {
currentFS ¬ NEW[FSRep ¬ [name: drive]];
lc.tree.Insert[currentFS, drive];
};
lev5Date ¬ nullGMT;
lev0Date ¬ nullGMT;
};
DoLev5Line: PROC ~ {
lev5Date ¬ in.GetTime[];
};
DoLev0Line: PROC ~ {
lev0Date ¬ in.GetTime[];
};
DoSizeLine: PROC ~ {
dumpSize: INT ¬ in.GetInt[];
SELECT TRUE FROM
lev5Date # nullGMT AND lev0Date # nullGMT => {
daysSinceLev0: INT ¬ (BasicTime.Period[from~lev0Date, to~lev5Date]+halfDay)/wholeDay;
currentFS.incrs ¬ CONS[[size~dumpSize, days~daysSinceLev0], currentFS.incrs];
currentFS.nIncrs ¬ currentFS.nIncrs+1;
};
lev0Date # nullGMT => {
currentFS.level0Size ¬ dumpSize;
};
ENDCASE => NULL;
};
{
ENABLE {
IO.EndOfStream=> GOTO done;
};
lc ¬ NEW[LogContentsRep];
lc.tree ¬ RedBlackTree.Create[GetKey, Compare];
currLine ¬ 0;
DO
currLine ¬ currLine+1;
{
ENABLE {
IO.Error=> {
msgs.PutF["Parsing error in line %g at position %g of input file.\n",
[integer[currLine]], [integer[in.GetIndex[]]]];
CONTINUE
};
};
SELECT TRUE FROM
Rope.Equal[lineSelect, "drive"] => DoDriveLine[];
Rope.Equal[lineSelect, "level5"] => DoLev5Line[];
Rope.Equal[lineSelect, "level0"] => DoLev0Line[];
Rope.Equal[lineSelect, "size"] => DoSizeLine[];
ENDCASE => NULL;
};
[] ¬ in.GetLineRope[];
lineSelect ¬ in.GetTokenRope[].token;
ENDLOOP;
EXITS done => RETURN;
}
};
AnalyzeOneFS: PROC [fs: FS, out: STREAM] ~ {
IF fs.level0Size = 0 THEN {
out.PutF1["No full dump data for %g. Can't compute percentages for incrementals.\n", [rope[fs.name]]];
RETURN;
};
SELECT fs.nIncrs FROM
0 => {
out.PutF1["No incremental data for %g.\n", [rope[fs.name]]];
RETURN;
};
1 => {
out.PutF["%g %g (based on one incremental dump).\n", [rope[fs.name]],
[real[(100.0*fs.incrs.first.size)/fs.level0Size]] ];
RETURN;
};
ENDCASE => NULL;
{
l: LIST OF Incr ¬ fs.incrs;
c: QPSolve.RVector ~ NEW[QPSolve.RVectorRep[2+fs.nIncrs] ];
A: QPSolve.Matrix ~ QPSolve.NewMatrix[fs.nIncrs, 2+fs.nIncrs];
lobd: QPSolve.RVector ~ NEW[QPSolve.RVectorRep[2+fs.nIncrs]];
x: QPSolve.RVector ~ NEW[QPSolve.RVectorRep[2+fs.nIncrs]];
iVar: QPSolve.IVector ~ NEW[QPSolve.IVectorRep[2+fs.nIncrs]];
minMaxDay: INT ¬ INT.LAST;
maxbd: REAL ¬ 0;
nFR: NAT ¬ 0;
nFX: NAT ¬ 2+fs.nIncrs-1;
sum: REAL ¬ 0;
c.n ¬ 2+fs.nIncrs;
lobd.n ¬ 2+fs.nIncrs;
x.n ¬ 2+fs.nIncrs;
iVar.n ¬ 2+fs.nIncrs;
FOR i: INT IN [0..fs.nIncrs) DO
A[i][0] ¬ 1;
A[i][1] ¬ l.first.days;
sum ¬ sum + l.first.days;
A[i][2+i] ¬ -1;
lobd[2+i] ¬ (100.0*l.first.size)/fs.level0Size;
IF lobd[2+i] >= maxbd THEN {
maxbd ¬ lobd[2+i];
IF l.first.days < minMaxDay THEN minMaxDay ¬ l.first.days;
};
c[2+i] ¬ 0;
l ¬ l.rest;
ENDLOOP;
FOR i: INT IN [0..fs.nIncrs) DO
x[2+i] ¬ maxbd;
IF lobd[2+i]= maxbd AND A[i][1]=minMaxDay THEN {
iVar[nFX] ¬ 2+i;
nFX ¬ nFX-1;
}
ELSE {
IF lobd[2+i]= maxbd THEN lobd[2+i] ¬ .9999*lobd[2+i]; -- hack to preserve dimension of problem
iVar[nFR] ¬ 2+i;
nFR ¬ nFR+1;
};
ENDLOOP;
x[0] ¬ maxbd;
iVar[nFR] ¬ 0;
x[1] ¬ 0;
iVar[nFX] ¬ 1;
nFR ¬ nFX;
c[0] ¬ fs.nIncrs*1.0e3;
c[1] ¬ sum*1.0e3;
[] ¬ QPSolve.QPSolve[c, A, lobd, x, iVar, nFR];
out.PutFL["%g %4g %4g (based on %g incrementals).\n", LIST[[rope[fs.name]], [real[x[0]]], [real[x[1]]], [integer[fs.nIncrs]]]];
};
};
AnalyzeDumpLog: Commander.CommandProc ~ {
inStream: STREAM ¬ PFS.StreamOpen[PFS.PathFromRope[CommanderOps.NextArgument[cmd]]];
lc: LogContents ¬ ReadFilteredLog[inStream, cmd.out];
EachFS: RedBlackTree.EachNode ~ {
fs: FS ¬ NARROW[data];
AnalyzeOneFS[fs, cmd.out];
};
RedBlackTree.EnumerateIncreasing[lc.tree, EachFS];
};
Commander.Register["AnalyzeDumpLog", AnalyzeDumpLog];
END.