%!
%%BoundingBox: (atend)
%%Pages: (atend)
%%DocumentFonts: (atend)
%%EndComments
%%BeginProlog
%
% FrameMaker postscript_prolog 3.0, for use with FrameMaker 3.0
% This postscript_prolog file is Copyright (c) 1986-1991 Frame Technology
% Corporation. All rights reserved. This postscript_prolog file may be
% freely copied and distributed in conjunction with documents created using
% FrameMaker.
%
% Known Problems:
% Due to bugs in Transcript, the 'PS-Adobe-' is omitted from line 1
/FMversion (3.0) def
% Set up Color vs. Black-and-White
/FMPrintInColor false def
/colorimage where { pop
/currentcolortransfer where { pop
/FMPrintInColor true def
statusdict begin
/processcolors where {
pop processcolors 1 le {
/FMPrintInColor false def
} if
}{
/deviceinfo where {
pop deviceinfo /Colors known {
deviceinfo /Colors get 1 le {
/FMPrintInColor false def
} if
} if
} if
} ifelse
end
/currentcanvas where { % NeWSprint?
pop systemdict /separationdict known not {
/FMPrintInColor false def
} if
} if
} if
} if
% Uncomment this line to force b&w on color printer
% /FMPrintInColor false def
/FrameDict 195 dict def
systemdict /errordict known not {/errordict 10 dict def
errordict /rangecheck {stop} put} if
% The readline in 23.0 doesn't recognize cr's as nl's on AppleTalk
FrameDict /tmprangecheck errordict /rangecheck get put
errordict /rangecheck {FrameDict /bug true put} put
FrameDict /bug false put
mark
% Some PS machines read past the CR, so keep the following 3 lines together!
currentfile 5 string readline
00
0000000000
cleartomark
errordict /rangecheck FrameDict /tmprangecheck get put
FrameDict /bug get {
/readline {
/gstring exch def
/gfile exch def
/gindex 0 def
{
gfile read pop
dup 10 eq {exit} if
dup 13 eq {exit} if
gstring exch gindex exch put
/gindex gindex 1 add def
} loop
pop
gstring 0 gindex getinterval true
} def
} if
/FMVERSION {
FMversion ne {
/Times-Roman findfont 18 scalefont setfont
100 100 moveto
(FrameMaker version does not match postscript_prolog!)
dup =
show showpage
} if
} def
/FMLOCAL {
FrameDict begin
0 def
end
} def
/gstring FMLOCAL
/gfile FMLOCAL
/gindex FMLOCAL
/orgxfer FMLOCAL
/orgproc FMLOCAL
/organgle FMLOCAL
/orgfreq FMLOCAL
/yscale FMLOCAL
/xscale FMLOCAL
/manualfeed FMLOCAL
/paperheight FMLOCAL
/paperwidth FMLOCAL
/FMDOCUMENT {
array /FMfonts exch def
/#copies exch def
FrameDict begin
0 ne dup {setmanualfeed} if
/manualfeed exch def
/paperheight exch def
/paperwidth exch def
/yscale exch def
/xscale exch def
currenttransfer cvlit /orgxfer exch def
currentscreen cvlit /orgproc exch def
/organgle exch def /orgfreq exch def
setpapername
manualfeed {true} {papersize} ifelse
{manualpapersize} {false} ifelse
{desperatepapersize} if
end
} def
/pagesave FMLOCAL
/orgmatrix FMLOCAL
/landscape FMLOCAL
/FMBEGINPAGE {
FrameDict begin
/pagesave save def
3.86 setmiterlimit
/landscape exch 0 ne def
landscape {
90 rotate 0 exch neg translate pop
}
{pop pop}
ifelse
xscale yscale scale
/orgmatrix matrix def
gsave
} def
/FMENDPAGE {
grestore
pagesave restore
end
showpage
} def
/FMFONTDEFINE {
FrameDict begin
findfont
ReEncode
1 index exch
definefont
FMfonts 3 1 roll
put
end
} def
/FMFILLS {
FrameDict begin
array /fillvals exch def
end
} def
/FMFILL {
FrameDict begin
fillvals 3 1 roll put
end
} def
/FMNORMALIZEGRAPHICS {
newpath
0.0 0.0 moveto
1 setlinewidth
0 setlinecap
0 0 0 sethsbcolor
0 setgray
} bind def
/fx FMLOCAL
/fy FMLOCAL
/fh FMLOCAL
/fw FMLOCAL
/llx FMLOCAL
/lly FMLOCAL
/urx FMLOCAL
/ury FMLOCAL
/FMBEGINEPSF {
end
/FMEPSF save def
/showpage {} def
FMNORMALIZEGRAPHICS
[/fy /fx /fh /fw /ury /urx /lly /llx] {exch def} forall
fx fy translate
rotate
fw urx llx sub div fh ury lly sub div scale
llx neg lly neg translate
} bind def
/FMENDEPSF {
FMEPSF restore
FrameDict begin
} bind def
FrameDict begin
/setmanualfeed {
%%BeginFeature *ManualFeed True
statusdict /manualfeed true put
%%EndFeature
} def
/max {2 copy lt {exch} if pop} bind def
/min {2 copy gt {exch} if pop} bind def
/inch {72 mul} def
/pagedimen {
paperheight sub abs 16 lt exch
paperwidth sub abs 16 lt and
{/papername exch def} {pop} ifelse
} def
/papersizedict FMLOCAL
/setpapername {
/papersizedict 14 dict def
papersizedict begin
/papername /unknown def
/Letter 8.5 inch 11.0 inch pagedimen
/LetterSmall 7.68 inch 10.16 inch pagedimen
/Tabloid 11.0 inch 17.0 inch pagedimen
/Ledger 17.0 inch 11.0 inch pagedimen
/Legal 8.5 inch 14.0 inch pagedimen
/Statement 5.5 inch 8.5 inch pagedimen
/Executive 7.5 inch 10.0 inch pagedimen
/A3 11.69 inch 16.5 inch pagedimen
/A4 8.26 inch 11.69 inch pagedimen
/A4Small 7.47 inch 10.85 inch pagedimen
/B4 10.125 inch 14.33 inch pagedimen
/B5 7.16 inch 10.125 inch pagedimen
end
} def
/papersize {
papersizedict begin
/Letter {lettertray letter} def
/LetterSmall {lettertray lettersmall} def
/Tabloid {11x17tray 11x17} def
/Ledger {ledgertray ledger} def
/Legal {legaltray legal} def
/Statement {statementtray statement} def
/Executive {executivetray executive} def
/A3 {a3tray a3} def
/A4 {a4tray a4} def
/A4Small {a4tray a4small} def
/B4 {b4tray b4} def
/B5 {b5tray b5} def
/unknown {unknown} def
papersizedict dup papername known {papername} {/unknown} ifelse get
end
/FMdicttop countdictstack 1 add def
statusdict begin stopped end
countdictstack -1 FMdicttop {pop end} for
} def
/manualpapersize {
papersizedict begin
/Letter {letter} def
/LetterSmall {lettersmall} def
/Tabloid {11x17} def
/Ledger {ledger} def
/Legal {legal} def
/Statement {statement} def
/Executive {executive} def
/A3 {a3} def
/A4 {a4} def
/A4Small {a4small} def
/B4 {b4} def
/B5 {b5} def
/unknown {unknown} def
papersizedict dup papername known {papername} {/unknown} ifelse get
end
stopped
} def
/desperatepapersize {
statusdict /setpageparams known
{
paperwidth paperheight 0 1
statusdict begin
{setpageparams} stopped pop
end
} if
} def
/savematrix {
orgmatrix currentmatrix pop
} bind def
/restorematrix {
orgmatrix setmatrix
} bind def
/dmatrix matrix def
/dpi 72 0 dmatrix defaultmatrix dtransform
dup mul exch dup mul add sqrt def
/freq dpi 18.75 div 8 div round dup 0 eq {pop 1} if 8 mul dpi exch div def
/sangle 1 0 dmatrix defaultmatrix dtransform exch atan def
/DiacriticEncoding [
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /space /exclam /quotedbl
/numbersign /dollar /percent /ampersand /quotesingle /parenleft
/parenright /asterisk /plus /comma /hyphen /period /slash /zero /one
/two /three /four /five /six /seven /eight /nine /colon /semicolon
/less /equal /greater /question /at /A /B /C /D /E /F /G /H /I /J /K
/L /M /N /O /P /Q /R /S /T /U /V /W /X /Y /Z /bracketleft /backslash
/bracketright /asciicircum /underscore /grave /a /b /c /d /e /f /g /h
/i /j /k /l /m /n /o /p /q /r /s /t /u /v /w /x /y /z /braceleft /bar
/braceright /asciitilde /.notdef /Adieresis /Aring /Ccedilla /Eacute
/Ntilde /Odieresis /Udieresis /aacute /agrave /acircumflex /adieresis
/atilde /aring /ccedilla /eacute /egrave /ecircumflex /edieresis
/iacute /igrave /icircumflex /idieresis /ntilde /oacute /ograve
/ocircumflex /odieresis /otilde /uacute /ugrave /ucircumflex
/udieresis /dagger /.notdef /cent /sterling /section /bullet
/paragraph /germandbls /registered /copyright /trademark /acute
/dieresis /.notdef /AE /Oslash /.notdef /.notdef /.notdef /.notdef
/yen /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/ordfeminine /ordmasculine /.notdef /ae /oslash /questiondown
/exclamdown /logicalnot /.notdef /florin /.notdef /.notdef
/guillemotleft /guillemotright /ellipsis /.notdef /Agrave /Atilde
/Otilde /OE /oe /endash /emdash /quotedblleft /quotedblright
/quoteleft /quoteright /.notdef /.notdef /ydieresis /Ydieresis
/fraction /currency /guilsinglleft /guilsinglright /fi /fl /daggerdbl
/periodcentered /quotesinglbase /quotedblbase /perthousand
/Acircumflex /Ecircumflex /Aacute /Edieresis /Egrave /Iacute
/Icircumflex /Idieresis /Igrave /Oacute /Ocircumflex /.notdef /Ograve
/Uacute /Ucircumflex /Ugrave /dotlessi /circumflex /tilde /macron
/breve /dotaccent /ring /cedilla /hungarumlaut /ogonek /caron
] def
/ReEncode {
dup
length
dict begin
{
1 index /FID ne
{def}
{pop pop} ifelse
} forall
0 eq {/Encoding DiacriticEncoding def} if
currentdict
end
} bind def
/graymode true def
/bwidth FMLOCAL
/bpside FMLOCAL
/bstring FMLOCAL
/onbits FMLOCAL
/offbits FMLOCAL
/xindex FMLOCAL
/yindex FMLOCAL
/x FMLOCAL
/y FMLOCAL
/setpattern {
/bwidth exch def
/bpside exch def
/bstring exch def
/onbits 0 def /offbits 0 def
freq sangle landscape {90 add} if
{/y exch def
/x exch def
/xindex x 1 add 2 div bpside mul cvi def
/yindex y 1 add 2 div bpside mul cvi def
bstring yindex bwidth mul xindex 8 idiv add get
1 7 xindex 8 mod sub bitshift and 0 ne
{/onbits onbits 1 add def 1}
{/offbits offbits 1 add def 0}
ifelse
}
setscreen
{} settransfer
offbits offbits onbits add div FMsetgray
/graymode false def
} bind def
/grayness {
FMsetgray
graymode not {
/graymode true def
orgxfer cvx settransfer
orgfreq organgle orgproc cvx setscreen
} if
} bind def
/HUE FMLOCAL
/SAT FMLOCAL
/BRIGHT FMLOCAL
/Colors FMLOCAL
FMPrintInColor
{
/HUE 0 def
/SAT 0 def
/BRIGHT 0 def
% array of arrays Hue and Sat values for the separations [HUE BRIGHT]
/Colors
[[0 0 ] % black
[0 0 ] % white
[0.00 1.0] % red
[0.37 1.0] % green
[0.60 1.0] % blue
[0.50 1.0] % cyan
[0.83 1.0] % magenta
[0.16 1.0] % comment / yellow
] def
/BEGINBITMAPCOLOR {
BITMAPCOLOR} def
/BEGINBITMAPCOLORc {
BITMAPCOLORc} def
/BEGINBITMAPTRUECOLOR {
BITMAPTRUECOLOR } def
/BEGINBITMAPTRUECOLORc {
BITMAPTRUECOLORc } def
/K {
Colors exch get dup
0 get /HUE exch store
1 get /BRIGHT exch store
HUE 0 eq BRIGHT 0 eq and
{1.0 SAT sub setgray}
{HUE SAT BRIGHT sethsbcolor}
ifelse
} def
/FMsetgray {
/SAT exch 1.0 exch sub store
HUE 0 eq BRIGHT 0 eq and
{1.0 SAT sub setgray}
{HUE SAT BRIGHT sethsbcolor}
ifelse
} bind def
}
{
/BEGINBITMAPCOLOR {
BITMAPGRAY} def
/BEGINBITMAPCOLORc {
BITMAPGRAYc} def
/BEGINBITMAPTRUECOLOR {
BITMAPTRUEGRAY } def
/BEGINBITMAPTRUECOLORc {
BITMAPTRUEGRAYc } def
/FMsetgray {setgray} bind def
/K {
pop
} def
}
ifelse
/normalize {
transform round exch round exch itransform
} bind def
/dnormalize {
dtransform round exch round exch idtransform
} bind def
/lnormalize {
0 dtransform exch cvi 2 idiv 2 mul 1 add exch idtransform pop
} bind def
/H {
lnormalize setlinewidth
} bind def
/Z {
setlinecap
} bind def
/fillvals FMLOCAL
/X {
fillvals exch get
dup type /stringtype eq
{8 1 setpattern}
{grayness}
ifelse
} bind def
/V {
gsave eofill grestore
} bind def
/N {
stroke
} bind def
/M {newpath moveto} bind def
/E {lineto} bind def
/D {curveto} bind def
/O {closepath} bind def
/n FMLOCAL
/L {
/n exch def
newpath
normalize
moveto
2 1 n {pop normalize lineto} for
} bind def
/Y {
L
closepath
} bind def
/x1 FMLOCAL
/x2 FMLOCAL
/y1 FMLOCAL
/y2 FMLOCAL
/rad FMLOCAL
/R {
/y2 exch def
/x2 exch def
/y1 exch def
/x1 exch def
x1 y1
x2 y1
x2 y2
x1 y2
4 Y
} bind def
/RR {
/rad exch def
normalize
/y2 exch def
/x2 exch def
normalize
/y1 exch def
/x1 exch def
newpath
x1 y1 rad add moveto
x1 y2 x2 y2 rad arcto
x2 y2 x2 y1 rad arcto
x2 y1 x1 y1 rad arcto
x1 y1 x1 y2 rad arcto
closepath
16 {pop} repeat
} bind def
/C {
grestore
gsave
R
clip
} bind def
/FMpointsize FMLOCAL
/F {
FMfonts exch get
FMpointsize scalefont
setfont
} bind def
/Q {
/FMpointsize exch def
F
} bind def
/T {
moveto show
} bind def
/RF {
rotate
0 ne {-1 1 scale} if
} bind def
/TF {
gsave
moveto
RF
show
grestore
} bind def
/P {
moveto
0 32 3 2 roll widthshow
} bind def
/PF {
gsave
moveto
RF
0 32 3 2 roll widthshow
grestore
} bind def
/S {
moveto
0 exch ashow
} bind def
/SF {
gsave
moveto
RF
0 exch ashow
grestore
} bind def
/B {
moveto
0 32 4 2 roll 0 exch awidthshow
} bind def
/BF {
gsave
moveto
RF
0 32 4 2 roll 0 exch awidthshow
grestore
} bind def
/G {
gsave
newpath
normalize translate 0.0 0.0 moveto
dnormalize scale
0.0 0.0 1.0 5 3 roll arc
closepath fill
grestore
} bind def
/A {
gsave
savematrix
newpath
2 index 2 div add exch 3 index 2 div sub exch
normalize 2 index 2 div sub exch 3 index 2 div add exch
translate
scale
0.0 0.0 1.0 5 3 roll arc
restorematrix
stroke
grestore
} bind def
/x FMLOCAL
/y FMLOCAL
/w FMLOCAL
/h FMLOCAL
/xx FMLOCAL
/yy FMLOCAL
/ww FMLOCAL
/hh FMLOCAL
/FMsaveobject FMLOCAL
/FMoptop FMLOCAL
/FMdicttop FMLOCAL
/BEGINPRINTCODE {
/FMdicttop countdictstack 1 add def
/FMoptop count 4 sub def
/FMsaveobject save def
userdict begin
/showpage {} def
FMNORMALIZEGRAPHICS
3 index neg 3 index neg translate
} bind def
/ENDPRINTCODE {
count -1 FMoptop {pop pop} for
countdictstack -1 FMdicttop {pop end} for
FMsaveobject restore
} bind def
/gn {
0
{ 46 mul
cf read pop
32 sub
dup 46 lt {exit} if
46 sub add
} loop
add
} bind def
/str FMLOCAL
/cfs {
/str sl string def
0 1 sl 1 sub {str exch val put} for
str def
} bind def
/ic [
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0223
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0223
0
{0 hx} {1 hx} {2 hx} {3 hx} {4 hx} {5 hx} {6 hx} {7 hx} {8 hx} {9 hx}
{10 hx} {11 hx} {12 hx} {13 hx} {14 hx} {15 hx} {16 hx} {17 hx} {18 hx}
{19 hx} {gn hx} {0} {1} {2} {3} {4} {5} {6} {7} {8} {9} {10} {11} {12}
{13} {14} {15} {16} {17} {18} {19} {gn} {0 wh} {1 wh} {2 wh} {3 wh}
{4 wh} {5 wh} {6 wh} {7 wh} {8 wh} {9 wh} {10 wh} {11 wh} {12 wh}
{13 wh} {14 wh} {gn wh} {0 bl} {1 bl} {2 bl} {3 bl} {4 bl} {5 bl} {6 bl}
{7 bl} {8 bl} {9 bl} {10 bl} {11 bl} {12 bl} {13 bl} {14 bl} {gn bl}
{0 fl} {1 fl} {2 fl} {3 fl} {4 fl} {5 fl} {6 fl} {7 fl} {8 fl} {9 fl}
{10 fl} {11 fl} {12 fl} {13 fl} {14 fl} {gn fl}
] def
/sl FMLOCAL
/val FMLOCAL
/ws FMLOCAL
/im FMLOCAL
/bs FMLOCAL
/cs FMLOCAL
/len FMLOCAL
/pos FMLOCAL
/ms {
/sl exch def
/val 255 def
/ws cfs
/im cfs
/val 0 def
/bs cfs
/cs cfs
} bind def
400 ms
/ip {
is
0
cf cs readline pop
{ ic exch get exec
add
} forall
pop
} bind def
/wh {
/len exch def
/pos exch def
ws 0 len getinterval im pos len getinterval copy pop
pos len
} bind def
/bl {
/len exch def
/pos exch def
bs 0 len getinterval im pos len getinterval copy pop
pos len
} bind def
/s1 1 string def
/fl {
/len exch def
/pos exch def
/val cf s1 readhexstring pop 0 get def
pos 1 pos len add 1 sub {im exch val put} for
pos len
} bind def
/hx {
3 copy getinterval
cf exch readhexstring pop pop
} bind def
/h FMLOCAL
/w FMLOCAL
/d FMLOCAL
/lb FMLOCAL
/bitmapsave FMLOCAL
/is FMLOCAL
/cf FMLOCAL
/wbytes {
dup
8 eq {pop} {1 eq {7 add 8 idiv} {3 add 4 idiv} ifelse} ifelse
} bind def
/BEGINBITMAPBWc {
1 {} COMMONBITMAPc
} bind def
/BEGINBITMAPGRAYc {
8 {} COMMONBITMAPc
} bind def
/BEGINBITMAP2BITc {
2 {} COMMONBITMAPc
} bind def
/COMMONBITMAPc {
/r exch def
/d exch def
gsave
translate rotate scale /h exch def /w exch def
/lb w d wbytes def
sl lb lt {lb ms} if
/bitmapsave save def
r
/is im 0 lb getinterval def
ws 0 lb getinterval is copy pop
/cf currentfile def
w h d [w 0 0 h neg 0 h]
{ip} image
bitmapsave restore
grestore
} bind def
/BEGINBITMAPBW {
1 {} COMMONBITMAP
} bind def
/BEGINBITMAPGRAY {
8 {} COMMONBITMAP
} bind def
/BEGINBITMAP2BIT {
2 {} COMMONBITMAP
} bind def
/COMMONBITMAP {
/r exch def
/d exch def
gsave
translate rotate scale /h exch def /w exch def
/bitmapsave save def
r
/is w d wbytes string def
/cf currentfile def
w h d [w 0 0 h neg 0 h]
{cf is readhexstring pop} image
bitmapsave restore
grestore
} bind def
/proc1 FMLOCAL
/proc2 FMLOCAL
/newproc FMLOCAL
/Fmcc {
/proc2 exch cvlit def
/proc1 exch cvlit def
/newproc proc1 length proc2 length add array def
newproc 0 proc1 putinterval
newproc proc1 length proc2 putinterval
newproc cvx
} bind def
/ngrayt 256 array def
/nredt 256 array def
/nbluet 256 array def
/ngreent 256 array def
/gryt FMLOCAL
/blut FMLOCAL
/grnt FMLOCAL
/redt FMLOCAL
/indx FMLOCAL
/cynu FMLOCAL
/magu FMLOCAL
/yelu FMLOCAL
/k FMLOCAL
/u FMLOCAL
/colorsetup {
currentcolortransfer
/gryt exch def
/blut exch def
/grnt exch def
/redt exch def
0 1 255 {
/indx exch def
/cynu 1 red indx get 255 div sub def
/magu 1 green indx get 255 div sub def
/yelu 1 blue indx get 255 div sub def
/k cynu magu min yelu min def
/u k currentundercolorremoval exec def
nredt indx 1 0 cynu u sub max sub redt exec put
ngreent indx 1 0 magu u sub max sub grnt exec put
nbluet indx 1 0 yelu u sub max sub blut exec put
ngrayt indx 1 k currentblackgeneration exec sub gryt exec put
} for
{255 mul cvi nredt exch get}
{255 mul cvi ngreent exch get}
{255 mul cvi nbluet exch get}
{255 mul cvi ngrayt exch get}
setcolortransfer
{pop 0} setundercolorremoval
{} setblackgeneration
} bind def
/tran FMLOCAL
/fakecolorsetup {
/tran 256 string def
0 1 255 {/indx exch def
tran indx
red indx get 77 mul
green indx get 151 mul
blue indx get 28 mul
add add 256 idiv put} for
currenttransfer
{255 mul cvi tran exch get 255.0 div}
exch Fmcc settransfer
} bind def
/BITMAPCOLOR {
/d 8 def
gsave
translate rotate scale /h exch def /w exch def
/bitmapsave save def
colorsetup
/is w d wbytes string def
/cf currentfile def
w h d [w 0 0 h neg 0 h]
{cf is readhexstring pop} {is} {is} true 3 colorimage
bitmapsave restore
grestore
} bind def
/BITMAPCOLORc {
/d 8 def
gsave
translate rotate scale /h exch def /w exch def
/lb w d wbytes def
sl lb lt {lb ms} if
/bitmapsave save def
colorsetup
/is im 0 lb getinterval def
ws 0 lb getinterval is copy pop
/cf currentfile def
w h d [w 0 0 h neg 0 h]
{ip} {is} {is} true 3 colorimage
bitmapsave restore
grestore
} bind def
/BITMAPTRUECOLORc {
gsave
translate rotate scale /h exch def /w exch def
/bitmapsave save def
/is w string def
ws 0 w getinterval is copy pop
/cf currentfile def
w h 8 [w 0 0 h neg 0 h]
{ip} {gip} {bip} true 3 colorimage
bitmapsave restore
grestore
} bind def
/BITMAPTRUECOLOR {
gsave
translate rotate scale /h exch def /w exch def
/bitmapsave save def
/is w string def
/gis w string def
/bis w string def
/cf currentfile def
w h 8 [w 0 0 h neg 0 h]
{ cf is readhexstring pop }
{ cf gis readhexstring pop }
{ cf bis readhexstring pop }
true 3 colorimage
bitmapsave restore
grestore
} bind def
/BITMAPTRUEGRAYc {
gsave
translate rotate scale /h exch def /w exch def
/bitmapsave save def
/is w string def
ws 0 w getinterval is copy pop
/cf currentfile def
w h 8 [w 0 0 h neg 0 h]
{ip gip bip w gray} image
bitmapsave restore
grestore
} bind def
/ww FMLOCAL
/r FMLOCAL
/g FMLOCAL
/b FMLOCAL
/i FMLOCAL
/gray {
/ww exch def
/b exch def
/g exch def
/r exch def
0 1 ww 1 sub { /i exch def r i get .299 mul g i get .587 mul
b i get .114 mul add add r i 3 -1 roll floor cvi put } for
r
} bind def
/BITMAPTRUEGRAY {
gsave
translate rotate scale /h exch def /w exch def
/bitmapsave save def
/is w string def
/gis w string def
/bis w string def
/cf currentfile def
w h 8 [w 0 0 h neg 0 h]
{ cf is readhexstring pop
cf gis readhexstring pop
cf bis readhexstring pop w gray} image
bitmapsave restore
grestore
} bind def
/BITMAPGRAY {
8 {fakecolorsetup} COMMONBITMAP
} bind def
/BITMAPGRAYc {
8 {fakecolorsetup} COMMONBITMAPc
} bind def
/ENDBITMAP {
} bind def
end
/ALDsave FMLOCAL
/ALDmatrix matrix def ALDmatrix currentmatrix pop
/StartALD {
/ALDsave save def
savematrix
ALDmatrix setmatrix
} bind def
/InALD {
restorematrix
} bind def
/DoneALD {
ALDsave restore
} bind def
%%EndProlog
%%BeginSetup
(3.0) FMVERSION
1 1 612 792 0 1 8 FMDOCUMENT
0 0 /Times-Bold FMFONTDEFINE
1 0 /Times-Roman FMFONTDEFINE
2 0 /Times-Italic FMFONTDEFINE
32 FMFILLS
0 0 FMFILL
1 0.1 FMFILL
2 0.3 FMFILL
3 0.5 FMFILL
4 0.7 FMFILL
5 0.9 FMFILL
6 0.97 FMFILL
7 1 FMFILL
8 <0f1e3c78f0e1c387> FMFILL
9 <0f87c3e1f0783c1e> FMFILL
10 FMFILL
11 FMFILL
12 <8142241818244281> FMFILL
13 <03060c183060c081> FMFILL
14 <8040201008040201> FMFILL
16 1 FMFILL
17 0.9 FMFILL
18 0.7 FMFILL
19 0.5 FMFILL
20 0.3 FMFILL
21 0.1 FMFILL
22 0.03 FMFILL
23 0 FMFILL
24 FMFILL
25 FMFILL
26 <3333333333333333> FMFILL
27 <0000ffff0000ffff> FMFILL
28 <7ebddbe7e7dbbd7e> FMFILL
29 FMFILL
30 <7fbfdfeff7fbfdfe> FMFILL
%%EndSetup
%%Page: "1" 1
%%BeginPaperSize: Letter
%%EndPaperSize
612 792 0 FMBEGINPAGE
72 18 540 720 R
7 X
0 K
V
0 24 Q
0 X
(INFORMATION COMPRESSION AND) 95.34 474 T
(LOGIC) 265.99 448 T
0 18 Q
(J Gerard Wolff) 246.51 406 T
1 12 Q
(October 1995) 273.17 380 T
1 14 Q
-1.11 (School of Electronic Engineering and Computer Systems, University of Wales, Dean) 72 81.67 P
(Street, Bangor, Gwynedd, LL57 1UT, UK. Tel: +44 1248 382691. E-mail:) 72 65.67 T
(gerry@sees.bangor.ac.uk. Fax: +44 1248 361429. Web: http://) 72 49.67 T
(www.sees.bangor.ac.uk/~gerry.) 72 33.67 T
FMENDPAGE
%%EndPage: "1" 2
%%Page: "2" 2
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
1 12 Q
0 X
(- 2 -) 296 757 T
72 18 540 720 R
7 X
V
0 F
0 X
(Abstract) 72 712 T
1 F
0.77 (The purpose of this article is to present and discuss some of the meta-theoretical considerations) 72 688 P
0.81 (which provide some rationale for the idea that logic may usefully be understood as information) 72 672 P
-0.68 (compression \050IC\051. This article may be seen as an extended introduction to the accompanying article) 72 656 P
([23] which presents some relatively concrete proposals in this area.) 72 640 T
2.23 (This article first reviews some concepts associated with information and the compression of) 72 614 P
1.18 (information. Then it considers some aspects of human perception and cognition which may be) 72 598 P
1.03 (understood as IC and which appear to be relevant to understanding the nature of logic. Next is) 72 582 P
0.71 (considered some interconnections amongst concepts of randomness, redundancy, \050inductive and) 72 566 P
0.67 (deductive\051 inference and IC. Finally, some features of logic are considered which seem to point) 72 550 P
(more directly to the idea that IC may provide a unifying theme for understanding logic.) 72 534 T
FMENDPAGE
%%EndPage: "2" 3
%%Page: "3" 3
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
1 12 Q
0 X
(- 3 -) 296 757 T
72 18 540 720 R
7 X
V
0 F
0 X
(1) 72 712 T
(Introduction) 108 712 T
1 F
-0.44 (In every field of enquiry, there is a continuing need to integrate and rationalise concepts developed) 72 683 P
-0.7 (in the field or its sub-fields, to create and refine one or more theories aiming to maximise coherence) 72 667 P
(and simplicity in the field as a whole.) 72 651 T
2.49 (In logic and mathematics this tradition is exemplified classically by work aiming to reduce) 72 625 P
0.77 (mathematics to logic \050Frege, Hilbert, Russell and Whitehead\051, to reduce logic to number theory) 72 609 P
(\050Skolem, G\232del\051 or to model logic within set theory \050Tarski, Bourbaki school\051.) 72 593 T
0.6 (This article and the accompanying article [23] continue in this tradition, presenting the idea that) 72 567 P
1.25 (information compression \050IC\051 and associated concepts may provide a framework within which) 72 551 P
0.54 (concepts in \324logic\325) 72 535 P
1 10 Q
0.45 (1) 162.4 539.8 P
1 12 Q
0.54 ( may usefully be understood. Given the close relationship between logic and) 167.4 535 P
(mathematics, it appears that much of this thinking may also be applied to mathematics.) 72 519 T
0.34 (Although concepts of compression and simplification are recognised, for example, in connection) 72 493 P
-0.22 (with the need to avoid replication of steps in the proving of theorems [4, Section 4.5] and the need) 72 477 P
0.04 (to simplify intermediate structures created as a proof proceeds [4, Section 4.4; 9, Section 4.1], the) 72 461 P
0.24 (possibility that IC might have fundamental significance as a unifying concept in a general theory) 72 445 P
(of logic seems to be not widely recognised.) 72 429 T
0.77 (The accompanying article presents some examples intended to illustrate the potential of IC as a) 72 403 P
0.02 (unifying theme by showing in a relatively concrete way how some familiar concepts in logic may) 72 387 P
(be understood in those terms.) 72 371 T
0.77 (The purpose of this article is to present and discuss some of the meta-theoretical considerations) 72 345 P
1.62 (which provide some rationale for the proposals and some justification for pursuing what may) 72 329 P
(otherwise seem an outlandish idea.) 72 313 T
2 F
(1.1) 72 287 T
(Background) 108 287 T
1 F
(This work is part of a programme of research [24-36] developing the \324SP\325 conjecture that:) 72 263 T
2 F
4.58 (All kinds of computing and formal reasoning may usefully be understood as) 108 237 P
(information compression by pattern matching, unification) 108 221 T
2 10 Q
(2) 386 225.8 T
2 12 Q
( and search) 391 221 T
1 F
0.85 (This research is motivated partly by the perceived need for rationalisation and simplification of) 72 195 P
-0.42 (concepts in computing and partly by the more practical consideration that this line of thinking may) 72 179 P
0.73 (provide a foundation for developing a \324new generation\325 computing system with more flexibility) 72 163 P
72 130 540 144.98 C
72 130 540 144.98 R
7 X
0 K
V
81 142.96 225 142.96 2 L
V
0.5 H
2 Z
0 X
N
0 0 612 792 C
1 12 Q
0 X
0 K
-0.2 (1. In these two articles, the term \324logic\325 is taken to cover the kinds of ideas discussed in [1,) 90 122 P
(8, 10].) 90 106 T
1.26 (2.) 90 90 P
1.26 (Unless otherwise speci\336ed, the term) 103.26 90 P
2 F
1.26 (uni\336cation) 286.2 90 P
1 F
1.26 ( in these two articles means a simple) 337.54 90 P
0.63 (mer) 90 74 P
0.63 (ging of matching patterns, close to the non-technical meaning of the word. Although) 108.44 74 P
-0.35 (there is some risk of confusion with the more complicated concept of \324uni\336cation\325 in logic,) 90 58 P
2.15 (the term has been adopted in the SP programme because no other word captures the) 90 42 P
(intended meaning so well.) 90 26 T
FMENDPAGE
%%EndPage: "3" 4
%%Page: "4" 4
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
1 12 Q
0 X
(- 4 -) 296 757 T
72 18 540 720 R
7 X
V
0 X
(and \324intelligence\325 than conventional systems.) 72 712 T
-0.17 (Until recently, the main focus of the programme has been developing this conjecture in relation to) 72 686 P
1.74 (aspects of computing such as unsupervised learning, best-match information retrieval, \324fuzzy\325) 72 670 P
1.19 (pattern recognition, and others. The possibility that \324logic\325 in some sense might fall within the) 72 654 P
-0.22 (scope of the conjecture has been kept clearly in view but only a few tentative steps had been taken) 72 638 P
(in that area \050see, for example, [35, Chapter 5]\051.) 72 622 T
-0.53 (Recent insights [24-28] have now clarified a number of relevant issues and a much clearer path can) 72 596 P
-0.38 (now be seen towards the assimilation and integration of concepts from logic within the developing) 72 580 P
(framework of ideas.) 72 564 T
2 F
(1.2) 72 538 T
(Why IC?) 108 538 T
1 F
3.54 (The term \324information compression\325 \050or \324data compression\325\051 is commonly associated with) 72 514 P
-0.47 (computer programs designed to facilitate the transmission, processing or storage of information by) 72 498 P
(reducing its size. But IC is significant in \050at least\051 two other areas:) 72 482 T
(\245) 72 456 T
(The workings of brains and nervous systems.) 108 456 T
(\245) 72 430 T
(Inductive inference and the philosophy of science.) 108 430 T
1.55 (And of course, since the concept of IC includes the concept of \324information\325, there is a close) 72 404 P
(relation between IC and theories of information.) 72 388 T
0.08 (Part of the motivation for exploring the idea that IC may provide a unifying theme in logic is that) 72 362 P
0.39 (this theme suggests the additional possibility of integrating concepts from logic within a broader) 72 346 P
0.21 (theoretical framework which includes theories of information, inductive inference, philosophy of) 72 330 P
(science, and brain science. Some of these considerations are discussed in Sections 3, 4 and 5.) 72 314 T
0 F
(2) 72 278 T
(Information and Compression of Information) 108 278 T
1 F
0.8 (As an introduction to what follows, this section presents, in outline, some basic ideas related to) 72 249 P
-0.29 (concepts of information and IC. In this section and later, the implicit subject of much discussion is) 72 233 P
(some given body of information which, for the sake of brevity, will be referred to as \324) 72 217 T
2 F
(I) 483.25 217 T
1 F
(\325.) 487.25 217 T
2 F
(2.1) 72 191 T
(Shannon\325s Information Theory) 108 191 T
1 F
2.76 (In Shannon\325s Information Theory [17], information is viewed as a pattern of discriminable) 72 167 P
0.62 (differences distributed in one or more dimensions. One form of information which accords with) 72 151 P
-0.71 (this view is any kind of continuously-varying wave. But the advent of digital communications, CD-) 72 135 P
0.57 (ROMS and so on has now made familiar the idea that any wave can be translated with arbitrary) 72 119 P
1.39 (precision into a pattern of atomic symbols drawn with replacement from a finite \324alphabet\325 of) 72 103 P
-0.58 (symbol types \050often \3240\325 and \3241\325\051, and) 72 87 P
2 F
-0.58 (vice versa) 247.87 87 P
1 F
-0.58 (. In this discussion, it will be convenient to consider) 295.6 87 P
-0.52 (cases where) 72 71 P
2 F
-0.52 (I) 131.6 71 P
1 F
-0.52 ( is a one-dimensional sequence of symbols. Unless otherwise stated, the alphabet will) 135.6 71 P
(be assumed to be \3240\325 and \3241\325.) 72 55 T
3.36 (If, at a given location in) 72 29 P
2 F
3.36 (I) 209.8 29 P
1 F
3.36 (, the symbol types in the alphabet are equi-probable, then the) 213.79 29 P
FMENDPAGE
%%EndPage: "4" 5
%%Page: "5" 5
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
1 12 Q
0 X
(- 5 -) 296 757 T
72 18 540 720 R
7 X
V
0 X
1.67 (communicative value of the symbol which actually appears in that location is at a theoretical) 72 712 P
-0.45 (maximum. If symbol types are equi-probable at all locations in) 72 696 P
2 F
-0.45 (I) 372.49 696 P
1 F
-0.45 ( then) 376.49 696 P
2 F
-0.45 (I) 402.26 696 P
1 F
-0.45 ( is considered to be) 406.25 696 P
2 F
-0.45 (random) 499.67 696 P
1 F
-0.45 (.) 537 696 P
(In terms of the theory, its communicative value is maximal.) 72 680 T
0.12 (If symbol types are not equi-probable in any part of) 72 654 P
2 F
0.12 (I) 323.77 654 P
1 F
0.12 ( then) 327.76 654 P
2 F
0.12 (I) 354.66 654 P
1 F
0.12 ( is not random and its communicative) 358.65 654 P
1.48 (value is less than the theoretical maximum. The difference between the actual communicative) 72 638 P
-0.18 (value of) 72 622 P
2 F
-0.18 (I) 113.63 622 P
1 F
-0.18 ( and the theoretical maximum is the) 117.62 622 P
2 F
-0.18 (redundancy) 291.34 622 P
1 F
-0.18 ( in) 347.99 622 P
2 F
-0.18 (I) 362.97 622 P
1 F
-0.18 (. In an extreme case, all the symbols) 366.96 622 P
0.67 (are the same \050e.g., \324000000000 ...\325 or \3241111111111 ...\325\051 and, apart from the first symbol and the) 72 606 P
(length of the sequence,) 72 590 T
2 F
(I) 185.64 590 T
1 F
( is wholely redundant.) 189.64 590 T
2 F
(2.1.1) 72 564 T
(Absolute and Contextual Probabilities) 108 564 T
1 F
0.2 (In the simplest analysis, the probability of each symbol type at any location in) 72 540 P
2 F
0.2 (I) 452.43 540 P
1 F
0.2 ( can be estimated) 456.42 540 P
(by counting the number of occurrences of each of the symbol types in) 72 524 T
2 F
(I) 410.27 524 T
1 F
(.) 414.26 524 T
0.22 (But it has been recognised from the beginning of work in this area that there are often contextual) 72 498 P
2.24 (constraints amongst symbols in) 72 482 P
2 F
2.24 (I) 234.99 482 P
1 F
2.24 ( which mean that the simplest analysis is likely to give an) 238.98 482 P
0.04 (inaccurate measure of the relevant probabilities. In English, for example, it is well known that the) 72 466 P
0.11 (probability of the letter \324u\325 to the right of the letter \324q\325 is almost 1. In that context, the letter \324u\325 is) 72 450 P
(almost completely redundant.) 72 434 T
4.8 (Assessing contextual probabilities of symbols is more difficult than estimating absolute) 72 408 P
1.59 (probabilities mainly because of uncertainty about what the \324context\325 of any symbol might be.) 72 392 P
2.15 (However, as discussed elsewhere [29, 34], it is possible to get a handle on this problem by) 72 376 P
0.58 (generalising the concept of \324symbol\325 to include sequences of one) 72 360 P
2 F
0.58 (or more) 392.43 360 P
1 F
0.58 ( basic symbols and, in) 431.34 360 P
0.44 (effect, assimilating the notion of \324context\325 within this generalised concept of \324symbol\325. To avoid) 72 344 P
2.07 (confusion, the generalised concept will be referred to as a) 72 328 P
2 F
2.07 (pattern) 372.32 328 P
1 F
2.07 (. The term) 406.98 328 P
2 F
2.07 (symbol) 465.86 328 P
1 F
2.07 ( will be) 499.85 328 P
(reserved for basic or \324atomic\325 symbols as normally understood.) 72 312 T
0.37 (Given this generalisation, contextual probabilities for symbols give way to absolute probabilities) 72 286 P
1.05 (of patterns. As with symbols, these probabilities may be estimated by counting the numbers of) 72 270 P
0.59 (occurrences of patterns in) 72 254 P
2 F
0.59 (I) 200.98 254 P
1 F
0.59 (. A point which deserves emphasis in this connection is that, while a) 204.97 254 P
-0.46 (pattern is simply a sequence of symbols, the) 72 238 P
2 F
-0.46 (instances) 283.31 238 P
1 F
-0.46 ( of any pattern as they appear in) 327.97 238 P
2 F
-0.46 (I) 480.93 238 P
1 F
-0.46 ( include the) 484.92 238 P
0.71 (many cases where the symbols in the given pattern are not contiguous in) 72 222 P
2 F
0.71 (I) 432.18 222 P
1 F
0.71 (.) 436.17 222 P
1 10 Q
0.59 (3) 439.17 226.8 P
1 12 Q
0.71 ( Naturally, if one is) 444.17 222 P
1.18 (counting the number of occurrences of any pattern, one would consider only) 72 206 P
2 F
1.18 (disjoint) 455.81 206 P
1 F
1.18 ( instances) 491.82 206 P
(which do not have any symbol instances in common.) 72 190 T
0.47 (The frequency of occurrence of any pattern within a random body of information depends on its) 72 164 P
-0.45 (size: small patterns repeat more frequently than large ones. When) 72 148 P
-0.45 (frequencies and probabilities are) 385.07 148 P
0.92 (not balanced:) 72 132 P
2 F
0.92 (a pattern which repeats more frequently in I than other patterns of the same size) 141.16 132 P
72 98 540 112.98 C
72 98 540 112.98 R
7 X
0 K
V
81 110.96 225 110.96 2 L
V
0.5 H
2 Z
0 X
N
0 0 612 792 C
1 12 Q
0 X
0 K
0.47 (3. In this article and the accompanying one [23], the term) 90 90 P
2 F
0.47 (substring) 373.47 90 P
1 F
0.47 ( means a sequence of) 418.15 90 P
1.95 (symbols within a larger sequence where the symbols are contiguous within the larger) 90 74 P
0.81 (sequence. The term) 90 58 P
2 F
0.81 (subsequence) 189.08 58 P
1 F
0.81 ( means a sequence of symbols within a larger sequence) 249.73 58 P
1.26 (where the symbols are) 90 42 P
2 F
1.26 (not) 205.67 42 P
1 F
1.26 ( necessarily contiguous within the larger sequence. The term) 221 42 P
2 F
(pattern) 90 26 T
1 F
( means any sequence, string, substring or subsequence of symbols.) 124.67 26 T
FMENDPAGE
%%EndPage: "5" 6
%%Page: "6" 6
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
1 12 Q
0 X
(- 6 -) 296 757 T
72 18 540 720 R
7 X
V
2 F
0 X
1.72 (represents redundancy in I) 72 712 P
1 F
1.72 (. Equivalently,) 206.12 712 P
2 F
1.72 (a repeating pattern in I which is bigger than other) 282.89 712 P
(patterns of the same frequency represents redundancy in I) 72 696 T
1 F
(.) 351.29 696 T
(A fuller discussion of these points may be found in [29].) 72 670 T
2 F
(2.2) 72 644 T
(IC by Pattern Matching, Unification and Search) 108 644 T
1 F
-0.66 (IC is largely a matter of extracting redundancy from) 72 620 P
2 F
-0.66 (I) 319.65 620 P
1 F
-0.66 ( although it can sometimes also be convenient) 323.65 620 P
0.38 (to sacrifice some of the non-redundant information in) 72 604 P
2 F
0.38 (I) 335.03 604 P
1 F
0.38 ( \050to achieve more compaction or to speed) 339.02 604 P
(up processing\051.) 72 588 T
-0.57 (As indicated in Section 2.1, there is an intimate relation between Shannon\325s concept of redundancy) 72 562 P
1.62 (and concepts of probability estimated from frequencies of occurrence and sizes of patterns of) 72 546 P
2.03 (symbols. But \324frequency of occurrence\325 implies repetition of information: redundancy in) 72 530 P
2 F
2.03 (I) 522.97 530 P
1 F
2.03 ( is) 526.96 530 P
1.5 (information which repeats what is already present in) 72 514 P
2 F
1.5 (I) 338.65 514 P
1 F
1.5 ( and is thus unnecessary in terms of its) 342.65 514 P
2.83 (communicative value \050although redundancy has well-known uses in error correction and in) 72 498 P
(increasing the reliability of systems\051.) 72 482 T
2.26 (The idea that redundancy in) 72 456 P
2 F
2.26 (I) 220.26 456 P
1 F
2.26 ( equates with the repetition of patterns in) 224.26 456 P
2 F
2.26 (I) 441.64 456 P
1 F
2.26 ( provides a key to) 445.64 456 P
(understanding how redundancy may be detected in) 72 440 T
2 F
(I) 318.95 440 T
1 F
( and then removed:) 322.94 440 T
(\245) 72 414 T
-0.6 (To detect repetition of patterns it is necessary to make same-different comparisons between) 108 414 P
(patterns - to match patterns against each other.) 108 398 T
(\245) 72 372 T
3.38 (Where a pattern repeats in) 108 372 P
2 F
3.38 (I) 253.88 372 P
1 F
3.38 ( more often than other patterns of the same size, the) 257.87 372 P
0.84 (corresponding redundancy may be reduced by merging or \324unifying\325 all instances of the) 108 356 P
0.06 (pattern to make a single \324chunk\325. To preserve information about the locations of instances) 108 340 P
-0.15 (of the chunk that have been deleted, it may be necessary to mark each such location with a) 108 324 P
0.65 (relatively short \324code\325 or \324tag\325 pattern. An example from this article is the use of \324IC\325 as) 108 308 P
(shorthand for \324information compression\325.) 108 292 T
(\245) 72 266 T
0.52 (In any realistically large body of information, there is an astronomically large number of) 108 266 P
0.34 (alternative ways in which patterns may be matched and, typically, many alternative ways) 108 250 P
1.08 (in which patterns may be unified. Given the need to maximise compression for a given) 108 234 P
-0.11 (computational cost, some kind of) 108 218 P
2 F
-0.11 (search) 270.12 218 P
1 F
-0.11 ( is required amongst the many alternatives. Given) 302.12 218 P
1.08 (that exhaustive search of the entire abstract space is normally impossible, some kind of) 108 202 P
0.02 (\324metrics-guided\325 search is normally required \050e.g., \324hill climbing\325, \324beam search\325, \324genetic) 108 186 P
0.23 (algorithm\325, \324simulated annealing\325\051, or the search space must be constrained in some other) 108 170 P
(way.) 108 154 T
0.36 (Given heavily constrained versions of the search space, these ideas form the basis of most of the) 72 128 P
-0.47 (simpler \324standard\325 techniques for information compression \050see [19]\051. A working hypothesis in the) 72 112 P
0.38 (SP programme is that, at some stage,) 72 96 P
2 F
0.38 (all) 254.99 96 P
1 F
0.38 ( techniques for information compression may be seen to) 267.66 96 P
(rest on these basic principles.) 72 80 T
3.21 (In this article and its companion, the terms \324pattern matching, unification and search\325 are) 72 54 P
(abbreviated as \324PMUS\325.) 72 38 T
FMENDPAGE
%%EndPage: "6" 7
%%Page: "7" 7
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
1 12 Q
0 X
(- 7 -) 296 757 T
72 18 540 720 R
7 X
V
2 F
0 X
(2.3) 72 712 T
(Algorithmic Information Theory) 108 712 T
1 F
-0.33 (In Algorithmic Information Theory \050AIT\051 \050see, for example, [6, 14]\051, a key idea is that) 72 688 P
2 F
-0.33 (I) 485.32 688 P
1 F
-0.33 ( is) 489.32 688 P
2 F
-0.33 (random) 502.67 688 P
1 F
0.47 (if no computer program can be discovered or devised which can generate) 72 672 P
2 F
0.47 (I) 432.15 672 P
1 F
0.47 ( and which is smaller) 436.15 672 P
0.61 (than) 72 656 P
2 F
0.61 (I) 96.27 656 P
1 F
0.61 (. For most realistically large bodies of information, it is not possible to prove that no such) 100.27 656 P
-0.24 (computer program can be found. For any \050reasonably large\051) 72 640 P
2 F
-0.24 (I) 359.79 640 P
1 F
-0.24 ( which is apparently random, there is) 363.78 640 P
0.29 (always the possibility that, in the future, some computer program may be found which is smaller) 72 624 P
(than) 72 608 T
2 F
(I) 95.66 608 T
1 F
( but which will generate it.) 99.66 608 T
0.55 (This view of the concept of randomness may, at first sight, appear to be radically different from) 72 582 P
0.29 (the concept of randomness in Shannon\325s information theory. But, given the idea that redundancy) 72 566 P
2.6 (in the Shannon view of information may be interpreted as \324relatively frequent repetition of) 72 550 P
-0.47 (patterns\325 and given the idea that unification of repeated patterns is the key to compression, then the) 72 534 P
0.26 (two views may be seen to complement each other quite well: where) 72 518 P
2 F
0.26 (I) 403.01 518 P
1 F
0.26 ( contains redundancy in the) 407 518 P
0.24 (form of repeated patterns, it is compressible; and, where no such patterns can be found,) 72 502 P
2 F
0.24 (I) 497.53 502 P
1 F
0.24 ( may be) 501.52 502 P
(regarded as random.) 72 486 T
-0.58 (Readers may notice an apparent contradiction between the idea of \324computing as compression\325 and) 72 460 P
1.65 (the obvious fact \050which forms part of the AIT concept of randomness, as just described\051 that) 72 444 P
0.02 (computers may be used to decompress information and create redundancy. The way in which this) 72 428 P
(apparent contradiction may be resolved is discussed in [26, 29, 33].) 72 412 T
-0.41 (Within AIT, there is no suggestion that \324computing\325 may itself be understood as IC. This and other) 72 386 P
(important differences between AIT and the SP theory are described in [29, 33].) 72 370 T
0 F
(3) 72 334 T
(Information Processing in Brains and Nervous Systems) 108 334 T
1 F
0.65 (Formal logic, especially when it is embedded in a computer system, seems to have an existence) 72 305 P
0.28 (and a validity which is independent of how we think. Few people, these days, would see logic as) 72 289 P
-0.66 (part of human psychology but early workers in logic had little doubt that they were studying human) 72 273 P
0.93 (thinking, witness the title of George Boole\325s book:) 72 257 P
2 F
0.93 (An Investigation of the Laws of Thought on) 325.79 257 P
-0.45 (Which are Founded the Mathematical Theories of Logic and Probabilities) 72 241 P
1 F
-0.45 (. And there is no dispute) 424.93 241 P
-0.25 (that, for most of its history, logic has been exclusively a form of human thinking and that informal) 72 225 P
(kinds of logic are a prominent part of everyday thinking.) 72 209 T
-0.7 (Whilst it would be absurd to try to turn the clock back and make logic a mere branch of psychology,) 72 183 P
0.02 (it seems that developments in psychology since that subject split away from logic may shed some) 72 167 P
1.48 (light on the latter. If \324AI\325 is regarded as part of psychology, this is in keeping with Gabbay\325s) 72 151 P
0.06 (suggestion that: \322we should try and export from AI into logic, let logic evolve, and then reuse the) 72 135 P
0.71 (new logic in AI.\323 [10, p. vi]. More generally, IC may provide a unifying theme for a \324cognitive) 72 119 P
(science\325 which embraces both cognitive psychology and logic.) 72 103 T
2 F
(3.1) 72 77 T
(IC and the Workings of Brains and Nervous Systems) 108 77 T
1 F
0.13 (Since at least as long ago as Ernst Mach\325s writings in the last century, it has been recognised that) 72 53 P
-0.11 (IC may be an important feature of the workings of brains and nervous systems. More specifically,) 72 37 P
FMENDPAGE
%%EndPage: "7" 8
%%Page: "8" 8
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
1 12 Q
0 X
(- 8 -) 296 757 T
72 18 540 720 R
7 X
V
0 X
0.38 (it has been recognised that inhibitory processes in nervous tissue have a key role in compressing) 72 712 P
(information by the extraction of redundancy from information \050see, for example, [3, 15, 21]\051.) 72 696 T
1.24 (In any biological system for information processing which is exposed to the rigours of natural) 72 670 P
2.31 (selection, IC makes good sense from the perspective of basic engineering because it should) 72 654 P
1.88 (facilitate the transmission or storage of information. IC has the additional, and perhaps more) 72 638 P
0.03 (significant advantage, that it is closely linked to inductive inference \050as discussed in Sections 4.3,) 72 622 P
2.4 (4.4 and 4.5, below\051 which means an ability to anticipate or predict future events from past) 72 606 P
(observations - and that means an ability to evade danger and find what is needed for living.) 72 590 T
0.34 (It is not appropriate here to review writings in this area in detail since reviews and summaries of) 72 564 P
0.65 (the literature are published elsewhere [29, 34]. The purpose here is to highlight some aspects of) 72 548 P
-0.5 (this thinking which appear to be relevant to logic. Some of the ideas introduced here will be further) 72 532 P
(developed in Sections 4 and 5.) 72 516 T
2 F
(3.2) 72 490 T
(Unsupervised Learning) 108 490 T
1 F
0.36 (Of the several meanings of the word \324learning\325, the one intended here is the kind of unconscious) 72 466 P
-0.33 (and automatic assimilation of knowledge without explicit teaching which a young child appears to) 72 450 P
(do in learning his or her first language.) 72 434 T
2.47 (There is good evidence that this kind of \324unsupervised\325 learning may, to a large extent, be) 72 408 P
0.62 (understood as IC: reducing large samples of language heard to compact descriptions in terms of) 72 392 P
-0.56 (\324grammars\325. There is empirical support for this view: computer models of language learning which) 72 376 P
2.22 (perform IC by PMUS show remarkable correspondences with observed features of language) 72 360 P
(development in children [37].) 72 344 T
2.08 (The connection between learning in this sense and logic may seem remote but unsupervised) 72 318 P
0.03 (learning is closely related to inductive inference which, as suggested in Section 4, below, appears) 72 302 P
0.7 (to be more closely related to deductive inference than has traditionally been thought. In another) 72 286 P
-0.13 (paper [28], I have made some tentative proposals about possible ways in which learning and logic) 72 270 P
(might be integrated.) 72 254 T
2 F
(3.3) 72 228 T
(The Concept of an Object) 108 228 T
1 F
1.74 (The everyday concept of an \324object\325 - person, tree, car, house, and so on - is not part of the) 72 204 P
0.18 (machinery of formal logic but logic is often applied to \324real world\325 objects, and symbolic entities) 72 188 P
(such as the elements of a logical set are often equated with such objects.) 72 172 T
2.6 (It is often assumed that an object is some kind of conceptual primitive which is somehow) 72 146 P
0.4 (immediately \324given\325 by the environment in which we live. But visual recognition of an object as) 72 130 P
2.98 (an object is almost certainly the product of complex processing by our brains. The visual) 72 114 P
0.08 (information which we receive from the environment is like a series of frames of cinema film each) 72 98 P
-0.55 (one of which is merely a pattern of light, shade and colour with no differentiation of the constituent) 72 82 P
0.27 (objects from each other or from their backgrounds. Achieving that differentiation is a task which) 72 66 P
0.13 (our brains perform so effortlessly and unconsciously that normally we perceive objects as simply) 72 50 P
(\324there\325.) 72 34 T
FMENDPAGE
%%EndPage: "8" 9
%%Page: "9" 9
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
1 12 Q
0 X
(- 9 -) 296 757 T
72 18 540 720 R
7 X
V
0 X
1.29 (It appears that the process by which the brain distinguishes objects as objects is largely IC by) 72 712 P
0.06 (PMUS. Some of the thinking behind this proposition is described in [34] and will not be repeated) 72 696 P
1.29 (here. In brief, the argument is that matching and unification of patterns coupled with heuristic) 72 680 P
0.59 (search through the many alternatives can differentiate the relatively constant pattern represented) 72 664 P
(by an object from the relatively changeable patterns of the object\325s environment.) 72 648 T
2 F
(3.4) 72 622 T
(Classes and Inheritance) 108 622 T
1 F
0.37 (Psychologists, logicians and others concerned with the representation of knowledge in natural or) 72 598 P
0.07 (artificial systems recognise that objects and other entities may form goupings which are variously) 72 582 P
1.82 (called classes, sets, concepts, categories, and so on, depending on the context. It is generally) 72 566 P
0.43 (recognised that any such grouping may be described extensionally by listing the members of the) 72 550 P
0 (group or it may be described intensionally in terms of the attributes of each member of the group.) 72 534 P
-0.14 (In the latter case, it is recognised that an element of a group or a whole group may share or) 72 518 P
2 F
-0.14 (inherit) 508 518 P
1 F
1.6 (the attributes of one or more groups within which they may be included, and this recursively) 72 502 P
0.7 (through a hierarchy or heterarchy of groups and sub-groups. In addition to these characteristics,) 72 486 P
-0.28 (psychologists recognise that the kind of \324natural\325 categories which people seem to use in everyday) 72 470 P
1.05 (thinking often have the property of) 72 454 P
2 F
1.05 (polythesis) 248.93 454 P
1 F
1.05 (: that no single attribute need be present in every) 296.93 454 P
(member of the group.) 72 438 T
1.87 (In this connection, inheritance is a powerful mechanism for minimising redundancy and thus) 72 412 P
-0.19 (compressing information. For the kinds of concepts we ordinarily use, the impact of inheritance is) 72 396 P
2.23 (enormous. It is, for example, hard to imagine that anyone would store in their brains a full) 72 380 P
1.43 (description of a human being for every one of the people that they know. It is so much more) 72 364 P
0.65 (economical to store the description of a human being once and then retrieve that description for) 72 348 P
(each individual person as and when required.) 72 332 T
-0.2 (This kind of thinking has led psychologists to believe, not unreasonably, that inheritance is part of) 72 306 P
1.12 (our cognitive machinery. Attempts to demonstrate this experimentally \050e.g., [7]\051 have not been) 72 290 P
1.48 (entirely successful but this may say more about the limitations of the experimental method in) 72 274 P
0.43 (cognitive psychology than about the role of inheritance in cognition. In general, it seems hard to) 72 258 P
0.22 (resist the conclusion that inheritance and the associated mechanisms for IC are a key part of how) 72 242 P
(we store and use information in our brains.) 72 226 T
0.34 (Inheritance is clearly a form of inference and, as suggested in [23], it seems to be closely related) 72 200 P
2.2 (to) 72 184 P
2 F
2.2 (modus ponens) 86.53 184 P
1 F
2.2 ( reasoning. The possible roles of inheritance in logic are considered in, for) 157.06 184 P
(example, [12].) 72 168 T
2 F
(3.5) 72 142 T
(Pattern Recognition) 108 142 T
1 F
0.03 (One of the most prominent features of human \050and animal\051 perception and cognition is the ability) 72 118 P
0.53 (to recognise objects and, more generally, patterns despite distortions of various kinds: omission,) 72 102 P
(addition or substitution of elements relative to the pattern that is recognised.) 72 86 T
3.45 (Establishing in broad terms what people can do in this area does not require any clever) 72 60 P
2.22 (experimental method or sophisticated analysis. One needs only to look at a typical scene to) 72 44 P
0.39 (understand that one object frequently occludes another and that we recognise many of the things) 72 28 P
FMENDPAGE
%%EndPage: "9" 10
%%Page: "10" 10
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
1 12 Q
0 X
(- 10 -) 293 757 T
72 18 540 720 R
7 X
V
0 X
1.73 (in the scene each from only a subset of its parts. Consider, for example, how easily we may) 72 712 P
0.09 (recognise a building as a building despite the intervening trunk and branches of a tree which may) 72 696 P
(obscure quite large parts of the building.) 72 680 T
-0.6 (That pattern recognition may be interpreted as IC has been recognised for some time [22]. It should) 72 654 P
0.14 (in any case be clear that, since pattern recognition is largely a process of unifying a pattern in the) 72 638 P
0.09 (incoming information with a pattern stored in memory, it can be seen as IC by PMUS \050cf. Section) 72 622 P
(2.2\051.) 72 606 T
-0.42 (As described in Section 4.5, this capability which we have for recognising patterns from subsets of) 72 580 P
0.38 (their parts seems to provide a basis for both inductive and deductive reasoning of a \324naturalistic\325) 72 564 P
(kind. This thinking is further developed in [23, Sections 5 and 6].) 72 548 T
0 F
(4) 72 512 T
(Randomness, Redundancy, Inference and IC) 108 512 T
1 F
0.05 (This section traces some connections which suggest that inductive and probabilistic reasoning are) 72 483 P
1.29 (more closely related to deductive reasoning than has traditionally been thought and that IC by) 72 467 P
2.47 (PMUS may provide a unifying theme. In this area of thinking there seems to be a web of) 72 451 P
(interconnections which suggests the possibility of integration into a coherent whole.) 72 435 T
2 F
(4.1) 72 409 T
(Deduction Rests on the Inductive Principle?) 108 409 T
1 F
1.67 (Concepts of randomness and redundancy in Shannon\325s information theory and AIT provide a) 72 385 P
(useful perspective on logic and, indeed, on information processing of any kind.) 72 369 T
0.71 (Consider, for example, a world which is) 72 343 P
2 F
0.71 (totally) 272.64 343 P
1 F
0.71 ( random. Such a world would consist entirely of) 303.31 343 P
-0.19 (variation, something like a combination of white light with \324white\325 hissing sound. There would be) 72 327 P
0.65 (no persistent or repeating patterns, no regularities and no structure of any kind. That means that) 72 311 P
0.17 (there would be none of the persistent structured entities that we are familiar with in everyday life) 72 295 P
1.36 (and that includes those persistent structured entities - brains and computers - which we use to) 72 279 P
(process information, including logic.) 72 263 T
2.32 (In a world that was totally random, the absence of brains and computers would be of little) 72 237 P
-0.45 (consequence because there would be nothing to be achieved by the use of such mechanisms for the) 72 221 P
0.09 (storage or processing of information. Data stored at one moment would probably be invalid in the) 72 205 P
1.39 (next nano-second. An algorithm which gives one result today would probably give a different) 72 189 P
1.34 (result tomorrow. A logical deduction made on one occasion is unlikely to be valid on another) 72 173 P
(occasion.) 72 157 T
0.19 (In short, concepts of calculation and logical deduction are predicated on the belief that we live in) 72 131 P
0.15 (a world which is) 72 115 P
2 F
0.15 (not) 155.26 115 P
1 F
0.15 ( random, in which certain entities and regularities are persistent from time to) 170.59 115 P
(time and from place to place.) 72 99 T
-0.53 (If this is accepted, then another conclusion, which is perhaps surprising, seems to follow: that logic) 72 73 P
-0.19 (and mathematics are founded on the inductive principle that the past is a guide to the future. Since) 72 57 P
0.69 (inductive inferences can never be proved, we are led to the conclusion that the whole edifice of) 72 41 P
(proof and calculation rests on an unprovable hypothesis or conjecture!) 72 25 T
FMENDPAGE
%%EndPage: "10" 11
%%Page: "11" 11
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
1 12 Q
0 X
(- 11 -) 293 757 T
72 18 540 720 R
7 X
V
0 X
2.01 (If logic has uncertain foundations, any inference made within logic can never be completely) 72 712 P
(certain.) 72 696 T
2 F
(4.2) 72 670 T
(Uncertainty in Arithmetic) 108 670 T
1 F
-0.22 (In relation to the inter-linked themes of compression and uncertainty in inference, it is pertinent to) 72 646 P
0.05 (mention Chaitin\325s work in the framework of AIT: \322I have recently been able to take a further step) 72 630 P
-0.1 (along the path laid out by G\232del and Turing. By translating a particular computer program into an) 72 614 P
0.16 (algebraic equation of a type that was familiar even to the ancient Greeks, I have shown that there) 72 598 P
-0.4 (is randomness in the branch of pure mathematics known as number theory. My work indicates that) 72 582 P
(- to borrow Einstein\325s metaphor - God sometimes plays dice with whole numbers!\323 [5].) 72 566 T
-0.2 (Number theory is not the same as logic but logic and mathematics are sufficiently close to suggest) 72 540 P
(that results in one area may generalise to the other.) 72 524 T
2 F
(4.3) 72 498 T
(Inductive Inference and IC) 108 498 T
1 F
0.01 (The connection between inductive inference and IC has been recognised from at least as long ago) 72 474 P
0.17 (as Solomonoff\325s classic papers [18] which considered the problem of estimating the probabilities) 72 458 P
1.25 (of the strings of unobserved symbols which might follow the end of a long string of observed) 72 442 P
(symbols.) 72 426 T
0.67 (In broad terms, the solutions which Solomonoff proposed amounted to finding an \324encoding\325 of) 72 400 P
-0.7 (the observed symbols which was as small as possible, where the size of the encoding was to include) 72 384 P
0.6 (both the encoded sequence of symbols and the definition of the code. The required probabilities) 72 368 P
(could then be derived from these encodings.) 72 352 T
1.71 (This and later research on \324Minimum Length Encoding\325, grammatical inference and language) 72 326 P
0.07 (learning \050see [29] for more references\051 is clearly related to the principle of simplicity in scientific) 72 310 P
1.34 (explanation which is itself a version of Occam\325s Razor: that \324entities should not be multiplied) 72 294 P
(without necessity\325.) 72 278 T
1.5 (If it is accepted that the primary goal of science is to find compressed descriptions of natural) 72 252 P
1.27 (phenomena then an implication is that there is no absolute truth or absolute proof in scientific) 72 236 P
-0.16 (explanation. This is because it is never possible to know whether the most economical description) 72 220 P
-0.13 (has been found \050cf. Section 2.3\051. This was partially recognised by Popper [16] who suggested that) 72 204 P
0.6 (scientific theories could never be proved, only disproved, and more fully recognised by Lakatos) 72 188 P
-0.35 ([13] who suggested that scientific theories could be neither proved nor disproved, only weighed as) 72 172 P
(more or less probable.) 72 156 T
1.79 (In general, there is wide consensus that inductive inference is a probabilistic matter which is) 72 130 P
(closely related to IC.) 72 114 T
2 F
(4.4) 72 88 T
(PMUS and the Discovery of Inductive Regularities) 108 88 T
1 F
0.02 (The inductive principle that the past is a guide to the future is, more precisely stated, that patterns) 72 64 P
-0.33 (which have occurred relatively frequently in the past are relatively likely to occur in the future: we) 72 48 P
0.3 (expect the sun to rise each morning because it always has in the past; we discount the possibility) 72 32 P
FMENDPAGE
%%EndPage: "11" 12
%%Page: "12" 12
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
1 12 Q
0 X
(- 12 -) 293 757 T
72 18 540 720 R
7 X
V
0 X
0.07 (that the next paving stone in a path will drop us into a hole because this has never happened to us) 72 712 P
(in the past.) 72 696 T
0.12 (But, following the arguments in Section 2.2, relatively frequent occurrence of patterns in the past) 72 670 P
0.05 (means redundancy. And we can only know that a pattern has occurred relatively frequently in the) 72 654 P
2.17 (past by the matching and unification of the several instances of that pattern. Thus inductive) 72 638 P
(reasoning depends on IC by PMUS.) 72 622 T
2 F
(4.5) 72 596 T
(Inductive and Deductive Reasoning as Partial Pattern Recognition) 108 596 T
1 F
1.65 (In order to discover an inductive regularity like the ones just considered, we need to observe) 72 572 P
1.11 (several complete instances of the relevant pattern. But when we use such a pattern to make an) 72 556 P
-0.22 (inductive inference, our inference is based on observing only part of the pattern and predicting the) 72 540 P
0.03 (rest. Inductive inference may be seen as a process of recognising a whole pattern from a subset of) 72 524 P
0.11 (its parts. As discussed in Section 3.5, this kind of capability for partial pattern recognition is very) 72 508 P
(well developed in people and in animals too.) 72 492 T
0.69 (Partial pattern recognition is itself yet another example of IC by PMUS, except that in this case) 72 466 P
(only part of a given pattern is matched and unified with another pattern.) 72 450 T
0.06 (Most of the examples that we might give of inductive inference as partial pattern recognition may) 72 424 P
0.27 (also be seen as deduction. If we see blue litmus paper being placed in acid, we expect that it will) 72 408 P
-0.64 (turn red. This is inductive reasoning based on the normal behaviour of litmus paper. But it may also) 72 392 P
(be presented as a kind of) 72 376 T
2 F
(modus tollens) 193.97 376 T
1 F
( reasoning like this:) 260.3 376 T
2 F
(If blue litmus paper is placed in acid, then it will turn red.) 108 350 T
(This piece of litmus paper is being placed in acid.) 108 332 T
(Therefore, this piece of litmus paper will turn red.) 108 314 T
1 F
0.34 (In a similar way, we may reason that, since Wednesday always follows Tuesday and since today) 72 288 P
-0.19 (is Tuesday, that tomorrow will be Wednesday. This reasoning may be seen as inductive reasoning) 72 272 P
0.01 (based on the regular pattern of Wednesday always following Tuesday or it may be seen as simple) 72 256 P
0.05 (syllogistic) 72 240 P
0.05 (reasoning with two premises and a conclusion. Whichever way it is seen, any example) 124.4 240 P
0.05 (like this may be interpreted as partial pattern recognition which itself may be interpreted as IC by) 72 224 P
(PMUS.) 72 208 T
1.89 (The interpretation of deduction as partial pattern matching, just described, does not precisely) 72 182 P
0.55 (capture the formal view of) 72 166 P
2 F
0.55 (modus ponens) 205.03 166 P
1 F
0.55 ( deduction which incorporates the concept of \324material) 273.9 166 P
-0.25 (implication\325 which itself incorporates a concept of negation. The way in which formal concepts of) 72 150 P
-0.39 (deduction may be accommodated in a framework of IC by PMUS is discussed in [23, Section 5.1].) 72 134 P
0 F
(5) 72 98 T
(IC and the Elements of Logic) 108 98 T
1 F
-0.25 (This section considers in relatively concrete terms how IC may be seen at work in some aspects of) 72 69 P
1.89 (current formalisms and mechanisms of logic. Some of the ideas described in this section are) 72 53 P
(developed more fully in the accompanying article [23].) 72 37 T
FMENDPAGE
%%EndPage: "12" 13
%%Page: "13" 13
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
1 12 Q
0 X
(- 13 -) 293 757 T
72 18 540 720 R
7 X
V
2 F
0 X
(5.1) 72 712 T
(Unification in Logic) 108 712 T
1 F
0.32 (Unification as it is being used in these two articles means a simple merging of identical patterns.) 72 688 P
0.41 (This has an obvious role in the reduction of redundancy in any body of information and thus the) 72 672 P
(compression of that information \050as described in Section 2.2\051.) 72 656 T
0.72 (\324Unification\325 in the sense of logic means making substitutions for variables in two structures so) 72 630 P
-0.6 (that the two structures become the same. If the two identical structures are merged, which is at least) 72 614 P
-0.4 (implicit in logical systems, this will also clearly have the effect of reducing redundancy in relevant) 72 598 P
(structures and thus compressing them.) 72 582 T
2.46 (\322All present day deduction systems - whether they are based on resolution or not - have a) 72 556 P
-0.66 (unification algorithm for first- or higher-order terms as their essential component: it is the \324addition) 72 540 P
2.44 (and multiplication\325 of deduction work.\323 [2, p. 58]. The importance in deduction systems of) 72 524 P
1.36 (unification in the sense of logic, and its obvious relevance to IC, is one of the strongest clues) 72 508 P
(suggesting that logic in its entirety might usefully be understood as IC.) 72 492 T
2 F
(5.2) 72 466 T
(Sets and the Union and Intersection of Sets.) 108 466 T
1 F
0.12 (IC is implicit in the concept of a set because of the rule that no element of a set may appear more) 72 442 P
-0.32 (than once in the set. In other words, any redundancy represented by the appearance of two or more) 72 426 P
2.67 (instances of any element in a collection of elements must have been eliminated before the) 72 410 P
(collection can be called a set.) 72 394 T
0.98 (IC is also implicit in the concepts of \324union\325 and \324intersection\325 of sets. The \324union\325 of two sets) 72 368 P
0.09 (means forming their elements into a single collection and then applying IC to eliminate replicated) 72 352 P
0.13 (instances of any element. If each set of identical instances is unified in the sense that it is merged) 72 336 P
0.06 (into one instance, then, within the union of the two original sets, the set of unified elements is the) 72 320 P
(\324intersection\325 of those two sets.) 72 304 T
(In short, IC is intimately related to these three basic ideas in logic.) 72 278 T
2 F
(5.3) 72 252 T
(Chunking with Tags) 108 252 T
1 F
1.45 (As described in Section 2.2, a basic idea in most of the \324standard\325 methods for IC is to unify) 72 228 P
1.11 (repeated instances of a pattern into a single \324chunk\325, to label that chunk with a relatively short) 72 212 P
0.06 (\324code\325 or \324tag\325 and to use that tag as a marker for the positions of all instances of the chunk which) 72 196 P
0.09 (have been deleted. As previously mentioned, this is a common device in ordinary language where) 72 180 P
1.69 (a relatively long phrase may be shortened to a few letters like \324IC\325 or \324PMUS\325. Many of the) 72 164 P
0.43 (ordinary words of natural language may be regarded as tags for the relatively complex bodies of) 72 148 P
(information which are their meanings.) 72 132 T
0.09 (This idea can be seen at work in rewrite rules, at least when they are used so that there is always a) 72 106 P
-0.33 (single symbol on one side of each rule \050usually the left-hand side\051 and two or more symbols on the) 72 90 P
0.08 (other side. When rewrite rules are used in this way, the symbol on the \324small\325 side may be seen as) 72 74 P
0.06 (a tag and the symbols on the \324large\325 side may be seen as a chunk of information which is labelled) 72 58 P
1.38 (by the tag. Rewrite rules like this can provide an effective means of reducing a large body of) 72 42 P
(information to a compressed version expressed in a \324grammar\325.) 72 26 T
FMENDPAGE
%%EndPage: "13" 14
%%Page: "14" 14
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
1 12 Q
0 X
(- 14 -) 293 757 T
72 18 540 720 R
7 X
V
0 X
-0.59 (Another example of the chuning-with-tags idea is the relationship between a function and its name.) 72 712 P
0.8 (The former is normally a relatively large body of information and the latter is a relatively short) 72 696 P
-0.41 (identifier which, with our without associated arguments, may be used to mark the use of a function) 72 680 P
(in each context where it is required without the necessity of writing out the whole function.) 72 664 T
0.16 (Further discussion of the way formal notations provide mechanisms for compressing information) 72 638 P
(may be found in [34, 32].) 72 622 T
2 F
(5.4) 72 596 T
(TRUE, FALSE and IC) 108 596 T
1 F
1.9 (In propositional logic and other standard forms of logic, TRUE and FALSE are symmetrical) 72 572 P
-0.43 (primitive concepts. This kind of symmetry is also found in databases where there is a roughly even) 72 556 P
0.69 (distribution of some two-valued attribute such as whether seats in a train are facing forwards or) 72 540 P
(backwards or whether a railway compartment is smoking or non-smoking.) 72 524 T
-0.26 (But in systems like Prolog and in ordinary language, there is a striking asymmetry between TRUE) 72 498 P
1.11 (and FALSE: everything is assumed to be true unless it is explicitly marked as false. The same) 72 482 P
0.27 (asymmetry is found in databases where there is a very uneven distribution of propositions as, for) 72 466 P
1.14 (example, a travel agent\325s database recording direct flights between pairs of cities: the database) 72 450 P
0.29 (records the flights that are available and says nothing about the much greater number of possible) 72 434 P
(flights which are not available \050cf. [11]\051.) 72 418 T
0.15 (The asymmetry between TRUE and FALSE, where it exists, seems to be closely related to IC. In) 72 392 P
2.13 (the case of the travel agent\325s database, it would be extremely uneconomical to record every) 72 376 P
-0.2 (possible flight which is available with an explicit marker as TRUE with a record of every possible) 72 360 P
1.93 (flight which is not available with an explicit marker as FALSE. The default assumption that) 72 344 P
(everything in the database is true, saves a lot of space.) 72 328 T
0.06 (This default assumption may be seen as a simple example of inheritance \050cf. Section 3.4. See also) 72 302 P
0.84 ([23, Section 6]\051. This saves much space but requires explicit denial where there are exceptions.) 72 286 P
0.51 (When Nixon said \322I am not a crook,\323 he was acknowledging the prior expectation that he) 72 270 P
2 F
0.51 (was) 512.48 270 P
1 F
0.51 ( a) 531.16 270 P
(crook.) 72 254 T
0 F
(6) 72 218 T
(Conclusions) 108 218 T
1 F
0.17 (This article has reviewed some of the considerations relating to the nature of information, human) 72 189 P
0.72 (thinking, inductive inference, and aspects of logic which seem to support the SP conjecture and) 72 173 P
(suggest that information compression may provide a unifying theme for understanding logic.) 72 157 T
0.77 (In the accompanying article [23], this thinking is developed into more concrete proposals about) 72 131 P
0.07 (how logic may be understood as information compression by) 72 115 P
2 F
0.07 (multiple alignment) 367.64 115 P
1 F
0.07 ( of patterns \050with) 457.9 115 P
0.27 (unification and search\051, where \324multiple alignment\325 has a meaning which is close to the meaning) 72 99 P
(of that term in bio-informatics.) 72 83 T
0.55 (I hope that readers will feel that there is sufficient substance in these proposals to justify further) 72 57 P
(investigation and development.) 72 41 T
FMENDPAGE
%%EndPage: "14" 15
%%Page: "15" 15
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
1 12 Q
0 X
(- 15 -) 293 757 T
72 18 540 720 R
7 X
V
0 F
0 X
(References) 72 712 T
1 F
(1) 72 688 T
2.43 (Abramsky, S., Gabbay, D. M. and Maibaum, T. S. E. \050Eds.\051,) 108 688 P
2 F
2.43 (Handbook of Logic in) 427.71 688 P
(Computer Science) 108 674 T
1 F
(, Oxford: Clarendon Press, Vols. 1-4, 1992.) 195.65 674 T
(2) 72 650 T
(Baader, F. and Siekmann, J. H. \324Unification theory,\325 in [10], Vol. 2, pp. 41-125, 1994.) 108 650 T
(3) 72 626 T
0.08 (Barlow, H. B., \324Trigger features, adaptation and economy of impulses.\325 In K. N. Leibovic) 108 626 P
0.97 (\050Ed.\051,) 108 612 P
2 F
0.97 (Information Processing in the Nervous System) 139.3 612 P
1 F
0.97 (. New York: Springer, pp 209-230,) 367.15 612 P
(1969.) 108 598 T
(4) 72 574 T
(Bibel, W. and Eder, E. \324Methods and calculi for deduction,\325 in [10], Vol. 1, pp. 68-169.) 108 574 T
(5) 72 550 T
(Chaitin, G. J. \324Randomness in Arithmetic,\325) 108 550 T
2 F
(Scientific American) 317.33 550 T
0 F
(259) 414.65 550 T
1 F
(\0501\051, 80-85, 1988.) 432.65 550 T
(6) 72 526 T
0.74 (Chaitin, G. J.) 108 526 P
2 F
0.74 (Algorithmic Information Theory) 176.9 526 P
1 F
0.74 (, Cambridge: Cambridge University Press,) 333.05 526 P
(1987.) 108 512 T
(7) 72 488 T
2.39 (Collins, A. M. and Quillian, M. R. \324Experiments on semantic memory and language) 108 488 P
1.34 (comprehension,\325 in L. W. Gregg \050Ed.\051,) 108 474 P
2 F
1.34 (Cognition in learning and memory) 305.65 474 P
1 F
1.34 (. New York:) 477.67 474 P
(Wiley, pp. 117-147, 1972.) 108 460 T
(8) 72 436 T
(Copi, I. M.) 108 436 T
2 F
(Introduction to Logic) 164 436 T
1 F
(, New York: MacMillan, 1986.) 266.68 436 T
(9) 72 412 T
0.46 (Eisinger, N. and Ohlbach, H. J. \324Deduction systems based on resolution,\325 in [10], Vol. 1,) 108 412 P
(pp. 183-271.) 108 398 T
(10) 72 374 T
0.25 (Gabbay, D. M., Hogger, C. J. and Robinson, J. A. \050Eds.\051,) 108 374 P
2 F
0.25 (Handbook of Logic in Artificial) 387.67 374 P
(Intelligence and Logic Programming) 108 360 T
1 F
(, Vols. 1-3, Oxford: Clarendon Press, 1993-4.) 286.32 360 T
(11) 72 336 T
(Ginsberg, M. L. \324AI and nonmonotonic reasoning,\325 in [10], Vol. 3, pp. 1-33, 1994.) 108 336 T
(12) 72 312 T
0.43 (Horty, J. F. \324Some direct theories of nonmonotonic inheritance,\325 in [10], Vol. 3, pp. 111-) 108 312 P
(187, 1994.) 108 298 T
(13) 72 274 T
1.31 (Lakatos, I. \324Falsification and the methodology of scientific research programmes,\325 in I.) 108 274 P
0.14 (Lakatos and A. E. Musgrave \050Eds.\051,) 108 260 P
2 F
0.14 (Criticism and the Growth of Knowledge) 284.14 260 P
1 F
0.14 (, Cambridge:) 477.2 260 P
(Cambridge University Press, pp. 91-196, 1970.) 108 246 T
(14) 72 222 T
0.97 (Li, M. and Vit\207nyi, P.) 108 222 P
2 F
0.97 (An Introduction to Kolmogorov Complexity and Its Applications) 221.2 222 P
1 F
0.97 (,) 537 222 P
(New York: Springer-Verlag, 1993.) 108 208 T
(15) 72 184 T
0.7 (Mahowald, M. A. and Mead, C., \324The silicon retina,\325) 108 184 P
2 F
0.7 (Scientific American) 371.59 184 P
0 F
0.7 (264) 470.31 184 P
1 F
0.7 (\0505\051, 40-47,) 488.31 184 P
(1991.) 108 170 T
(16) 72 146 T
0.15 (Popper, K. R.) 108 146 P
2 F
0.15 (Conjectures and Refutations: the Growth of Scientific Knowledge) 177.12 146 P
1 F
0.15 (, London:) 493.18 146 P
(Routledge and Paul, 1962.) 108 132 T
(17) 72 108 T
1.31 (Shannon, C. E. and Weaver, W.) 108 108 P
2 F
1.31 (The Mathematical Theory of Communication) 272.15 108 P
1 F
1.31 (, Urbana:) 494.04 108 P
(University of Illinois Press, 1949.) 108 94 T
(18) 72 70 T
-0.38 (Solomonoff, R. J., \324A formal theory of inductive inference. Parts I and II,\325) 108 70 P
2 F
-0.38 (Information and) 462.04 70 P
(Control) 108 56 T
0 F
(7) 148.34 56 T
1 F
(, 1-22 and 224-254, 1964.) 154.34 56 T
(19) 72 32 T
1.5 (Storer, J. A.) 108 32 P
2 F
1.5 (Data Compression: Methods and Theory) 173.17 32 P
1 F
1.5 (, Rockville, Maryland: Computer) 375.83 32 P
FMENDPAGE
%%EndPage: "15" 16
%%Page: "16" 16
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
1 12 Q
0 X
(- 16 -) 293 757 T
72 18 540 720 R
7 X
V
0 X
(Science Press, 1988.) 108 712 T
(20) 72 688 T
0.03 (Uspensky, V. A. \324Kolmogorov and mathematical logic,\325) 108 688 P
2 F
0.03 (Journal of Symbolic Logic) 380.78 688 P
0 F
0.03 (57) 510.67 688 P
1 F
0.03 (\0502\051:) 522.67 688 P
(385-412, 1992.) 108 674 T
(21) 72 650 T
(Von B\216k\216sy, G.,) 108 650 T
2 F
(Sensory Inhibition) 190.66 650 T
1 F
(, Princeton, NJ: Princeton University Press, 1967.) 278.99 650 T
(22) 72 626 T
1.71 (Watanabe, S., \324Pattern recognition as information compression,\325 in S. Watanabe \050Ed.\051,) 108 626 P
2 F
(Frontiers of Pattern Recognition) 108 612 T
1 F
(, New York: Academic Press, 1972.) 265.67 612 T
(23) 72 588 T
(Wolff, J. G. \324Multiple alignment and logic,\325) 108 588 T
2 F
(Bulletin of the IGPL) 321.65 588 T
1 F
(, this volume.) 419.32 588 T
(24) 72 564 T
(Wolff, J. G. \324Multiple alignment and computing\325 \050submitted\051.) 108 564 T
(25) 72 540 T
-0.63 (Wolff, J. G. \324Computing as compression by multiple alignment, unification and search \0501\051,\325) 108 540 P
(\050submitted\051.) 108 526 T
(26) 72 502 T
-0.63 (Wolff, J. G. \324Computing as compression by multiple alignment, unification and search \0502\051,\325) 108 502 P
(\050submitted\051.) 108 488 T
(27) 72 464 T
0.39 (Wolff, J. G. \324Information compression by multiple alignment, unification and search as a) 108 464 P
(general theory of computing,\325 \050in preparation\051.) 108 450 T
(28) 72 426 T
0.58 (Wolff, J. G. \324Learning and reasoning as information compression by multiple alignment,) 108 426 P
5.19 (unification and search,\325 To appear in) 108 412 P
2 F
5.19 (Computational Learning and Probabilistic) 318.74 412 P
-0.56 (Reasoning) 108 398 P
1 F
-0.56 (, A. Gammerman \050Ed.\051, New York: Wiley. This is a revised version of \324Learning) 158.66 398 P
-0.21 (and reasoning as compression by multiple alignment and unification\325 presented at Applied) 108 384 P
0.46 (Decision Technologies \32495, Brunel University, April 1995. A copy of that paper is in the) 108 370 P
-0.2 (Proceedings for Stream 1 \050Computational Learning and Probabilistic Reasoning\051, pp. 223-) 108 356 P
(236.) 108 342 T
(29) 72 318 T
0.24 (Wolff, J. G. \324Computing as compression: an overview of the SP theory and system,\325) 108 318 P
2 F
0.24 (New) 518.66 318 P
(Generation Computing) 108 304 T
0 F
(13) 222 304 T
1 F
(: 187-214, 1995.) 234 304 T
(30) 72 280 T
-0.36 (Wolff, J. G. \324Computing as compression: SP20\325,) 108 280 P
2 F
-0.36 (New Generation Computing) 341.79 280 P
0 F
-0.36 (13) 479.03 280 P
1 F
-0.36 (: 215-241,) 491.03 280 P
(1995.) 108 266 T
(31) 72 242 T
-0.52 (Wolff, J. G. \324A scaleable technique for best-match retrieval of sequential information using) 108 242 P
(metrics-guided search,\325) 108 228 T
2 F
(Journal of Information Science) 223.63 228 T
0 F
(20) 376.28 228 T
1 F
( \0501\051: 16-28, 1994.) 388.28 228 T
(32) 72 204 T
-0.47 (Wolff, J. G. \324Towards a new concept of software,\325) 108 204 P
2 F
-0.47 (Software Engineering Journal) 348.69 204 P
0 F
-0.47 (9) 495.61 204 P
1 F
-0.47 ( \0501\051: 27-) 501.61 204 P
(38, 1994.) 108 190 T
(33) 72 166 T
0 (Wolff, J. G. \324Computing and information compression: a reply,\325) 108 166 P
2 F
0 (AI Communications) 418.99 166 P
0 F
0 (7) 517.66 166 P
2 F
0 (\050) 526.67 166 P
1 F
0 (3/) 530.66 166 P
(4\051: 203-219, 1994.) 108 152 T
(34) 72 128 T
0.44 (Wolff, J. G. \324Computing, cognition and information compression,\325) 108 128 P
2 F
0.44 (AI Communications) 434.46 128 P
0 F
0.44 (6) 534 128 P
2 F
(\050) 108 114 T
1 F
(2\051: 107-127, 1993.) 112 114 T
(35) 72 90 T
0.53 (Wolff, J. G.) 108 90 P
2 F
0.53 (Towards a Theory of Cognition and Computing) 169.57 90 P
1 F
0.53 (, Chichester: Ellis Horwood,) 401.43 90 P
(1991.) 108 76 T
(36) 72 52 T
-0.54 (Wolff, J. G. \324Simplicity and power - some unifying ideas in computing) 108 52 P
2 F
-0.54 (,\325 Computer Journal) 442.74 52 P
0 F
(33) 108 38 T
2 F
( \050) 120 38 T
1 F
(6\051: 518-534, 1990.) 127 38 T
FMENDPAGE
%%EndPage: "16" 17
%%Page: "17" 17
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
1 12 Q
0 X
(- 17 -) 293 757 T
72 18 540 720 R
7 X
V
0 X
(37) 72 712 T
2.67 (Wolff, J. G., \324Learning syntax and meanings through optimization and distributional) 108 712 P
2.29 (analysis,\325 in Y. Levy, I. M. Schlesinger and M. D. S. Braine \050Eds.\051,) 108 698 P
2 F
2.29 (Categories and) 464.04 698 P
2.26 (Processes in Language Acquisition) 108 684 P
1 F
2.26 (, Hillsdale, N.J.: Lawrence, Erlbaum, pp. 179-215,) 283.79 684 P
(1988.) 108 670 T
FMENDPAGE
%%EndPage: "17" 18
%%Trailer
%%BoundingBox: 0 0 612 792
%%Pages: 17 1
%%DocumentFonts: Times-Bold
%%+ Times-Roman
%%+ Times-Italic