%!
%%BoundingBox: (atend)
%%Pages: (atend)
%%DocumentFonts: (atend)
%%EndComments
%%BeginProlog
%
% FrameMaker postscript_prolog 3.0, for use with FrameMaker 3.0
% This postscript_prolog file is Copyright (c) 1986-1991 Frame Technology
% Corporation. All rights reserved. This postscript_prolog file may be
% freely copied and distributed in conjunction with documents created using
% FrameMaker.
%
% Known Problems:
% Due to bugs in Transcript, the 'PS-Adobe-' is omitted from line 1
/FMversion (3.0) def
% Set up Color vs. Black-and-White
/FMPrintInColor false def
/colorimage where { pop
/currentcolortransfer where { pop
/FMPrintInColor true def
statusdict begin
/processcolors where {
pop processcolors 1 le {
/FMPrintInColor false def
} if
}{
/deviceinfo where {
pop deviceinfo /Colors known {
deviceinfo /Colors get 1 le {
/FMPrintInColor false def
} if
} if
} if
} ifelse
end
/currentcanvas where { % NeWSprint?
pop systemdict /separationdict known not {
/FMPrintInColor false def
} if
} if
} if
} if
% Uncomment this line to force b&w on color printer
% /FMPrintInColor false def
/FrameDict 195 dict def
systemdict /errordict known not {/errordict 10 dict def
errordict /rangecheck {stop} put} if
% The readline in 23.0 doesn't recognize cr's as nl's on AppleTalk
FrameDict /tmprangecheck errordict /rangecheck get put
errordict /rangecheck {FrameDict /bug true put} put
FrameDict /bug false put
mark
% Some PS machines read past the CR, so keep the following 3 lines together!
currentfile 5 string readline
00
0000000000
cleartomark
errordict /rangecheck FrameDict /tmprangecheck get put
FrameDict /bug get {
/readline {
/gstring exch def
/gfile exch def
/gindex 0 def
{
gfile read pop
dup 10 eq {exit} if
dup 13 eq {exit} if
gstring exch gindex exch put
/gindex gindex 1 add def
} loop
pop
gstring 0 gindex getinterval true
} def
} if
/FMVERSION {
FMversion ne {
/Times-Roman findfont 18 scalefont setfont
100 100 moveto
(FrameMaker version does not match postscript_prolog!)
dup =
show showpage
} if
} def
/FMLOCAL {
FrameDict begin
0 def
end
} def
/gstring FMLOCAL
/gfile FMLOCAL
/gindex FMLOCAL
/orgxfer FMLOCAL
/orgproc FMLOCAL
/organgle FMLOCAL
/orgfreq FMLOCAL
/yscale FMLOCAL
/xscale FMLOCAL
/manualfeed FMLOCAL
/paperheight FMLOCAL
/paperwidth FMLOCAL
/FMDOCUMENT {
array /FMfonts exch def
/#copies exch def
FrameDict begin
0 ne dup {setmanualfeed} if
/manualfeed exch def
/paperheight exch def
/paperwidth exch def
/yscale exch def
/xscale exch def
currenttransfer cvlit /orgxfer exch def
currentscreen cvlit /orgproc exch def
/organgle exch def /orgfreq exch def
setpapername
manualfeed {true} {papersize} ifelse
{manualpapersize} {false} ifelse
{desperatepapersize} if
end
} def
/pagesave FMLOCAL
/orgmatrix FMLOCAL
/landscape FMLOCAL
/FMBEGINPAGE {
FrameDict begin
/pagesave save def
3.86 setmiterlimit
/landscape exch 0 ne def
landscape {
90 rotate 0 exch neg translate pop
}
{pop pop}
ifelse
xscale yscale scale
/orgmatrix matrix def
gsave
} def
/FMENDPAGE {
grestore
pagesave restore
end
showpage
} def
/FMFONTDEFINE {
FrameDict begin
findfont
ReEncode
1 index exch
definefont
FMfonts 3 1 roll
put
end
} def
/FMFILLS {
FrameDict begin
array /fillvals exch def
end
} def
/FMFILL {
FrameDict begin
fillvals 3 1 roll put
end
} def
/FMNORMALIZEGRAPHICS {
newpath
0.0 0.0 moveto
1 setlinewidth
0 setlinecap
0 0 0 sethsbcolor
0 setgray
} bind def
/fx FMLOCAL
/fy FMLOCAL
/fh FMLOCAL
/fw FMLOCAL
/llx FMLOCAL
/lly FMLOCAL
/urx FMLOCAL
/ury FMLOCAL
/FMBEGINEPSF {
end
/FMEPSF save def
/showpage {} def
FMNORMALIZEGRAPHICS
[/fy /fx /fh /fw /ury /urx /lly /llx] {exch def} forall
fx fy translate
rotate
fw urx llx sub div fh ury lly sub div scale
llx neg lly neg translate
} bind def
/FMENDEPSF {
FMEPSF restore
FrameDict begin
} bind def
FrameDict begin
/setmanualfeed {
%%BeginFeature *ManualFeed True
statusdict /manualfeed true put
%%EndFeature
} def
/max {2 copy lt {exch} if pop} bind def
/min {2 copy gt {exch} if pop} bind def
/inch {72 mul} def
/pagedimen {
paperheight sub abs 16 lt exch
paperwidth sub abs 16 lt and
{/papername exch def} {pop} ifelse
} def
/papersizedict FMLOCAL
/setpapername {
/papersizedict 14 dict def
papersizedict begin
/papername /unknown def
/Letter 8.5 inch 11.0 inch pagedimen
/LetterSmall 7.68 inch 10.16 inch pagedimen
/Tabloid 11.0 inch 17.0 inch pagedimen
/Ledger 17.0 inch 11.0 inch pagedimen
/Legal 8.5 inch 14.0 inch pagedimen
/Statement 5.5 inch 8.5 inch pagedimen
/Executive 7.5 inch 10.0 inch pagedimen
/A3 11.69 inch 16.5 inch pagedimen
/A4 8.26 inch 11.69 inch pagedimen
/A4Small 7.47 inch 10.85 inch pagedimen
/B4 10.125 inch 14.33 inch pagedimen
/B5 7.16 inch 10.125 inch pagedimen
end
} def
/papersize {
papersizedict begin
/Letter {lettertray letter} def
/LetterSmall {lettertray lettersmall} def
/Tabloid {11x17tray 11x17} def
/Ledger {ledgertray ledger} def
/Legal {legaltray legal} def
/Statement {statementtray statement} def
/Executive {executivetray executive} def
/A3 {a3tray a3} def
/A4 {a4tray a4} def
/A4Small {a4tray a4small} def
/B4 {b4tray b4} def
/B5 {b5tray b5} def
/unknown {unknown} def
papersizedict dup papername known {papername} {/unknown} ifelse get
end
/FMdicttop countdictstack 1 add def
statusdict begin stopped end
countdictstack -1 FMdicttop {pop end} for
} def
/manualpapersize {
papersizedict begin
/Letter {letter} def
/LetterSmall {lettersmall} def
/Tabloid {11x17} def
/Ledger {ledger} def
/Legal {legal} def
/Statement {statement} def
/Executive {executive} def
/A3 {a3} def
/A4 {a4} def
/A4Small {a4small} def
/B4 {b4} def
/B5 {b5} def
/unknown {unknown} def
papersizedict dup papername known {papername} {/unknown} ifelse get
end
stopped
} def
/desperatepapersize {
statusdict /setpageparams known
{
paperwidth paperheight 0 1
statusdict begin
{setpageparams} stopped pop
end
} if
} def
/savematrix {
orgmatrix currentmatrix pop
} bind def
/restorematrix {
orgmatrix setmatrix
} bind def
/dmatrix matrix def
/dpi 72 0 dmatrix defaultmatrix dtransform
dup mul exch dup mul add sqrt def
/freq dpi 18.75 div 8 div round dup 0 eq {pop 1} if 8 mul dpi exch div def
/sangle 1 0 dmatrix defaultmatrix dtransform exch atan def
/DiacriticEncoding [
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/.notdef /.notdef /.notdef /.notdef /space /exclam /quotedbl
/numbersign /dollar /percent /ampersand /quotesingle /parenleft
/parenright /asterisk /plus /comma /hyphen /period /slash /zero /one
/two /three /four /five /six /seven /eight /nine /colon /semicolon
/less /equal /greater /question /at /A /B /C /D /E /F /G /H /I /J /K
/L /M /N /O /P /Q /R /S /T /U /V /W /X /Y /Z /bracketleft /backslash
/bracketright /asciicircum /underscore /grave /a /b /c /d /e /f /g /h
/i /j /k /l /m /n /o /p /q /r /s /t /u /v /w /x /y /z /braceleft /bar
/braceright /asciitilde /.notdef /Adieresis /Aring /Ccedilla /Eacute
/Ntilde /Odieresis /Udieresis /aacute /agrave /acircumflex /adieresis
/atilde /aring /ccedilla /eacute /egrave /ecircumflex /edieresis
/iacute /igrave /icircumflex /idieresis /ntilde /oacute /ograve
/ocircumflex /odieresis /otilde /uacute /ugrave /ucircumflex
/udieresis /dagger /.notdef /cent /sterling /section /bullet
/paragraph /germandbls /registered /copyright /trademark /acute
/dieresis /.notdef /AE /Oslash /.notdef /.notdef /.notdef /.notdef
/yen /.notdef /.notdef /.notdef /.notdef /.notdef /.notdef
/ordfeminine /ordmasculine /.notdef /ae /oslash /questiondown
/exclamdown /logicalnot /.notdef /florin /.notdef /.notdef
/guillemotleft /guillemotright /ellipsis /.notdef /Agrave /Atilde
/Otilde /OE /oe /endash /emdash /quotedblleft /quotedblright
/quoteleft /quoteright /.notdef /.notdef /ydieresis /Ydieresis
/fraction /currency /guilsinglleft /guilsinglright /fi /fl /daggerdbl
/periodcentered /quotesinglbase /quotedblbase /perthousand
/Acircumflex /Ecircumflex /Aacute /Edieresis /Egrave /Iacute
/Icircumflex /Idieresis /Igrave /Oacute /Ocircumflex /.notdef /Ograve
/Uacute /Ucircumflex /Ugrave /dotlessi /circumflex /tilde /macron
/breve /dotaccent /ring /cedilla /hungarumlaut /ogonek /caron
] def
/ReEncode {
dup
length
dict begin
{
1 index /FID ne
{def}
{pop pop} ifelse
} forall
0 eq {/Encoding DiacriticEncoding def} if
currentdict
end
} bind def
/graymode true def
/bwidth FMLOCAL
/bpside FMLOCAL
/bstring FMLOCAL
/onbits FMLOCAL
/offbits FMLOCAL
/xindex FMLOCAL
/yindex FMLOCAL
/x FMLOCAL
/y FMLOCAL
/setpattern {
/bwidth exch def
/bpside exch def
/bstring exch def
/onbits 0 def /offbits 0 def
freq sangle landscape {90 add} if
{/y exch def
/x exch def
/xindex x 1 add 2 div bpside mul cvi def
/yindex y 1 add 2 div bpside mul cvi def
bstring yindex bwidth mul xindex 8 idiv add get
1 7 xindex 8 mod sub bitshift and 0 ne
{/onbits onbits 1 add def 1}
{/offbits offbits 1 add def 0}
ifelse
}
setscreen
{} settransfer
offbits offbits onbits add div FMsetgray
/graymode false def
} bind def
/grayness {
FMsetgray
graymode not {
/graymode true def
orgxfer cvx settransfer
orgfreq organgle orgproc cvx setscreen
} if
} bind def
/HUE FMLOCAL
/SAT FMLOCAL
/BRIGHT FMLOCAL
/Colors FMLOCAL
FMPrintInColor
{
/HUE 0 def
/SAT 0 def
/BRIGHT 0 def
% array of arrays Hue and Sat values for the separations [HUE BRIGHT]
/Colors
[[0 0 ] % black
[0 0 ] % white
[0.00 1.0] % red
[0.37 1.0] % green
[0.60 1.0] % blue
[0.50 1.0] % cyan
[0.83 1.0] % magenta
[0.16 1.0] % comment / yellow
] def
/BEGINBITMAPCOLOR {
BITMAPCOLOR} def
/BEGINBITMAPCOLORc {
BITMAPCOLORc} def
/BEGINBITMAPTRUECOLOR {
BITMAPTRUECOLOR } def
/BEGINBITMAPTRUECOLORc {
BITMAPTRUECOLORc } def
/K {
Colors exch get dup
0 get /HUE exch store
1 get /BRIGHT exch store
HUE 0 eq BRIGHT 0 eq and
{1.0 SAT sub setgray}
{HUE SAT BRIGHT sethsbcolor}
ifelse
} def
/FMsetgray {
/SAT exch 1.0 exch sub store
HUE 0 eq BRIGHT 0 eq and
{1.0 SAT sub setgray}
{HUE SAT BRIGHT sethsbcolor}
ifelse
} bind def
}
{
/BEGINBITMAPCOLOR {
BITMAPGRAY} def
/BEGINBITMAPCOLORc {
BITMAPGRAYc} def
/BEGINBITMAPTRUECOLOR {
BITMAPTRUEGRAY } def
/BEGINBITMAPTRUECOLORc {
BITMAPTRUEGRAYc } def
/FMsetgray {setgray} bind def
/K {
pop
} def
}
ifelse
/normalize {
transform round exch round exch itransform
} bind def
/dnormalize {
dtransform round exch round exch idtransform
} bind def
/lnormalize {
0 dtransform exch cvi 2 idiv 2 mul 1 add exch idtransform pop
} bind def
/H {
lnormalize setlinewidth
} bind def
/Z {
setlinecap
} bind def
/fillvals FMLOCAL
/X {
fillvals exch get
dup type /stringtype eq
{8 1 setpattern}
{grayness}
ifelse
} bind def
/V {
gsave eofill grestore
} bind def
/N {
stroke
} bind def
/M {newpath moveto} bind def
/E {lineto} bind def
/D {curveto} bind def
/O {closepath} bind def
/n FMLOCAL
/L {
/n exch def
newpath
normalize
moveto
2 1 n {pop normalize lineto} for
} bind def
/Y {
L
closepath
} bind def
/x1 FMLOCAL
/x2 FMLOCAL
/y1 FMLOCAL
/y2 FMLOCAL
/rad FMLOCAL
/R {
/y2 exch def
/x2 exch def
/y1 exch def
/x1 exch def
x1 y1
x2 y1
x2 y2
x1 y2
4 Y
} bind def
/RR {
/rad exch def
normalize
/y2 exch def
/x2 exch def
normalize
/y1 exch def
/x1 exch def
newpath
x1 y1 rad add moveto
x1 y2 x2 y2 rad arcto
x2 y2 x2 y1 rad arcto
x2 y1 x1 y1 rad arcto
x1 y1 x1 y2 rad arcto
closepath
16 {pop} repeat
} bind def
/C {
grestore
gsave
R
clip
} bind def
/FMpointsize FMLOCAL
/F {
FMfonts exch get
FMpointsize scalefont
setfont
} bind def
/Q {
/FMpointsize exch def
F
} bind def
/T {
moveto show
} bind def
/RF {
rotate
0 ne {-1 1 scale} if
} bind def
/TF {
gsave
moveto
RF
show
grestore
} bind def
/P {
moveto
0 32 3 2 roll widthshow
} bind def
/PF {
gsave
moveto
RF
0 32 3 2 roll widthshow
grestore
} bind def
/S {
moveto
0 exch ashow
} bind def
/SF {
gsave
moveto
RF
0 exch ashow
grestore
} bind def
/B {
moveto
0 32 4 2 roll 0 exch awidthshow
} bind def
/BF {
gsave
moveto
RF
0 32 4 2 roll 0 exch awidthshow
grestore
} bind def
/G {
gsave
newpath
normalize translate 0.0 0.0 moveto
dnormalize scale
0.0 0.0 1.0 5 3 roll arc
closepath fill
grestore
} bind def
/A {
gsave
savematrix
newpath
2 index 2 div add exch 3 index 2 div sub exch
normalize 2 index 2 div sub exch 3 index 2 div add exch
translate
scale
0.0 0.0 1.0 5 3 roll arc
restorematrix
stroke
grestore
} bind def
/x FMLOCAL
/y FMLOCAL
/w FMLOCAL
/h FMLOCAL
/xx FMLOCAL
/yy FMLOCAL
/ww FMLOCAL
/hh FMLOCAL
/FMsaveobject FMLOCAL
/FMoptop FMLOCAL
/FMdicttop FMLOCAL
/BEGINPRINTCODE {
/FMdicttop countdictstack 1 add def
/FMoptop count 4 sub def
/FMsaveobject save def
userdict begin
/showpage {} def
FMNORMALIZEGRAPHICS
3 index neg 3 index neg translate
} bind def
/ENDPRINTCODE {
count -1 FMoptop {pop pop} for
countdictstack -1 FMdicttop {pop end} for
FMsaveobject restore
} bind def
/gn {
0
{ 46 mul
cf read pop
32 sub
dup 46 lt {exit} if
46 sub add
} loop
add
} bind def
/str FMLOCAL
/cfs {
/str sl string def
0 1 sl 1 sub {str exch val put} for
str def
} bind def
/ic [
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0223
0 0 0 0 0 0 0 0 0 0 0 0 0 0 0 0223
0
{0 hx} {1 hx} {2 hx} {3 hx} {4 hx} {5 hx} {6 hx} {7 hx} {8 hx} {9 hx}
{10 hx} {11 hx} {12 hx} {13 hx} {14 hx} {15 hx} {16 hx} {17 hx} {18 hx}
{19 hx} {gn hx} {0} {1} {2} {3} {4} {5} {6} {7} {8} {9} {10} {11} {12}
{13} {14} {15} {16} {17} {18} {19} {gn} {0 wh} {1 wh} {2 wh} {3 wh}
{4 wh} {5 wh} {6 wh} {7 wh} {8 wh} {9 wh} {10 wh} {11 wh} {12 wh}
{13 wh} {14 wh} {gn wh} {0 bl} {1 bl} {2 bl} {3 bl} {4 bl} {5 bl} {6 bl}
{7 bl} {8 bl} {9 bl} {10 bl} {11 bl} {12 bl} {13 bl} {14 bl} {gn bl}
{0 fl} {1 fl} {2 fl} {3 fl} {4 fl} {5 fl} {6 fl} {7 fl} {8 fl} {9 fl}
{10 fl} {11 fl} {12 fl} {13 fl} {14 fl} {gn fl}
] def
/sl FMLOCAL
/val FMLOCAL
/ws FMLOCAL
/im FMLOCAL
/bs FMLOCAL
/cs FMLOCAL
/len FMLOCAL
/pos FMLOCAL
/ms {
/sl exch def
/val 255 def
/ws cfs
/im cfs
/val 0 def
/bs cfs
/cs cfs
} bind def
400 ms
/ip {
is
0
cf cs readline pop
{ ic exch get exec
add
} forall
pop
} bind def
/wh {
/len exch def
/pos exch def
ws 0 len getinterval im pos len getinterval copy pop
pos len
} bind def
/bl {
/len exch def
/pos exch def
bs 0 len getinterval im pos len getinterval copy pop
pos len
} bind def
/s1 1 string def
/fl {
/len exch def
/pos exch def
/val cf s1 readhexstring pop 0 get def
pos 1 pos len add 1 sub {im exch val put} for
pos len
} bind def
/hx {
3 copy getinterval
cf exch readhexstring pop pop
} bind def
/h FMLOCAL
/w FMLOCAL
/d FMLOCAL
/lb FMLOCAL
/bitmapsave FMLOCAL
/is FMLOCAL
/cf FMLOCAL
/wbytes {
dup
8 eq {pop} {1 eq {7 add 8 idiv} {3 add 4 idiv} ifelse} ifelse
} bind def
/BEGINBITMAPBWc {
1 {} COMMONBITMAPc
} bind def
/BEGINBITMAPGRAYc {
8 {} COMMONBITMAPc
} bind def
/BEGINBITMAP2BITc {
2 {} COMMONBITMAPc
} bind def
/COMMONBITMAPc {
/r exch def
/d exch def
gsave
translate rotate scale /h exch def /w exch def
/lb w d wbytes def
sl lb lt {lb ms} if
/bitmapsave save def
r
/is im 0 lb getinterval def
ws 0 lb getinterval is copy pop
/cf currentfile def
w h d [w 0 0 h neg 0 h]
{ip} image
bitmapsave restore
grestore
} bind def
/BEGINBITMAPBW {
1 {} COMMONBITMAP
} bind def
/BEGINBITMAPGRAY {
8 {} COMMONBITMAP
} bind def
/BEGINBITMAP2BIT {
2 {} COMMONBITMAP
} bind def
/COMMONBITMAP {
/r exch def
/d exch def
gsave
translate rotate scale /h exch def /w exch def
/bitmapsave save def
r
/is w d wbytes string def
/cf currentfile def
w h d [w 0 0 h neg 0 h]
{cf is readhexstring pop} image
bitmapsave restore
grestore
} bind def
/proc1 FMLOCAL
/proc2 FMLOCAL
/newproc FMLOCAL
/Fmcc {
/proc2 exch cvlit def
/proc1 exch cvlit def
/newproc proc1 length proc2 length add array def
newproc 0 proc1 putinterval
newproc proc1 length proc2 putinterval
newproc cvx
} bind def
/ngrayt 256 array def
/nredt 256 array def
/nbluet 256 array def
/ngreent 256 array def
/gryt FMLOCAL
/blut FMLOCAL
/grnt FMLOCAL
/redt FMLOCAL
/indx FMLOCAL
/cynu FMLOCAL
/magu FMLOCAL
/yelu FMLOCAL
/k FMLOCAL
/u FMLOCAL
/colorsetup {
currentcolortransfer
/gryt exch def
/blut exch def
/grnt exch def
/redt exch def
0 1 255 {
/indx exch def
/cynu 1 red indx get 255 div sub def
/magu 1 green indx get 255 div sub def
/yelu 1 blue indx get 255 div sub def
/k cynu magu min yelu min def
/u k currentundercolorremoval exec def
nredt indx 1 0 cynu u sub max sub redt exec put
ngreent indx 1 0 magu u sub max sub grnt exec put
nbluet indx 1 0 yelu u sub max sub blut exec put
ngrayt indx 1 k currentblackgeneration exec sub gryt exec put
} for
{255 mul cvi nredt exch get}
{255 mul cvi ngreent exch get}
{255 mul cvi nbluet exch get}
{255 mul cvi ngrayt exch get}
setcolortransfer
{pop 0} setundercolorremoval
{} setblackgeneration
} bind def
/tran FMLOCAL
/fakecolorsetup {
/tran 256 string def
0 1 255 {/indx exch def
tran indx
red indx get 77 mul
green indx get 151 mul
blue indx get 28 mul
add add 256 idiv put} for
currenttransfer
{255 mul cvi tran exch get 255.0 div}
exch Fmcc settransfer
} bind def
/BITMAPCOLOR {
/d 8 def
gsave
translate rotate scale /h exch def /w exch def
/bitmapsave save def
colorsetup
/is w d wbytes string def
/cf currentfile def
w h d [w 0 0 h neg 0 h]
{cf is readhexstring pop} {is} {is} true 3 colorimage
bitmapsave restore
grestore
} bind def
/BITMAPCOLORc {
/d 8 def
gsave
translate rotate scale /h exch def /w exch def
/lb w d wbytes def
sl lb lt {lb ms} if
/bitmapsave save def
colorsetup
/is im 0 lb getinterval def
ws 0 lb getinterval is copy pop
/cf currentfile def
w h d [w 0 0 h neg 0 h]
{ip} {is} {is} true 3 colorimage
bitmapsave restore
grestore
} bind def
/BITMAPTRUECOLORc {
gsave
translate rotate scale /h exch def /w exch def
/bitmapsave save def
/is w string def
ws 0 w getinterval is copy pop
/cf currentfile def
w h 8 [w 0 0 h neg 0 h]
{ip} {gip} {bip} true 3 colorimage
bitmapsave restore
grestore
} bind def
/BITMAPTRUECOLOR {
gsave
translate rotate scale /h exch def /w exch def
/bitmapsave save def
/is w string def
/gis w string def
/bis w string def
/cf currentfile def
w h 8 [w 0 0 h neg 0 h]
{ cf is readhexstring pop }
{ cf gis readhexstring pop }
{ cf bis readhexstring pop }
true 3 colorimage
bitmapsave restore
grestore
} bind def
/BITMAPTRUEGRAYc {
gsave
translate rotate scale /h exch def /w exch def
/bitmapsave save def
/is w string def
ws 0 w getinterval is copy pop
/cf currentfile def
w h 8 [w 0 0 h neg 0 h]
{ip gip bip w gray} image
bitmapsave restore
grestore
} bind def
/ww FMLOCAL
/r FMLOCAL
/g FMLOCAL
/b FMLOCAL
/i FMLOCAL
/gray {
/ww exch def
/b exch def
/g exch def
/r exch def
0 1 ww 1 sub { /i exch def r i get .299 mul g i get .587 mul
b i get .114 mul add add r i 3 -1 roll floor cvi put } for
r
} bind def
/BITMAPTRUEGRAY {
gsave
translate rotate scale /h exch def /w exch def
/bitmapsave save def
/is w string def
/gis w string def
/bis w string def
/cf currentfile def
w h 8 [w 0 0 h neg 0 h]
{ cf is readhexstring pop
cf gis readhexstring pop
cf bis readhexstring pop w gray} image
bitmapsave restore
grestore
} bind def
/BITMAPGRAY {
8 {fakecolorsetup} COMMONBITMAP
} bind def
/BITMAPGRAYc {
8 {fakecolorsetup} COMMONBITMAPc
} bind def
/ENDBITMAP {
} bind def
end
/ALDsave FMLOCAL
/ALDmatrix matrix def ALDmatrix currentmatrix pop
/StartALD {
/ALDsave save def
savematrix
ALDmatrix setmatrix
} bind def
/InALD {
restorematrix
} bind def
/DoneALD {
ALDsave restore
} bind def
%%EndProlog
%%BeginSetup
(3.0) FMVERSION
1 1 612 792 0 1 12 FMDOCUMENT
0 0 /Times-Roman FMFONTDEFINE
1 0 /Times-Italic FMFONTDEFINE
2 0 /Times-Bold FMFONTDEFINE
3 0 /Helvetica FMFONTDEFINE
4 0 /Helvetica-Bold FMFONTDEFINE
5 0 /Times-BoldItalic FMFONTDEFINE
32 FMFILLS
0 0 FMFILL
1 0.1 FMFILL
2 0.3 FMFILL
3 0.5 FMFILL
4 0.7 FMFILL
5 0.9 FMFILL
6 0.97 FMFILL
7 1 FMFILL
8 <0f1e3c78f0e1c387> FMFILL
9 <0f87c3e1f0783c1e> FMFILL
10 FMFILL
11 FMFILL
12 <8142241818244281> FMFILL
13 <03060c183060c081> FMFILL
14 <8040201008040201> FMFILL
16 1 FMFILL
17 0.9 FMFILL
18 0.7 FMFILL
19 0.5 FMFILL
20 0.3 FMFILL
21 0.1 FMFILL
22 0.03 FMFILL
23 0 FMFILL
24 FMFILL
25 FMFILL
26 <3333333333333333> FMFILL
27 <0000ffff0000ffff> FMFILL
28 <7ebddbe7e7dbbd7e> FMFILL
29 FMFILL
30 <7fbfdfeff7fbfdfe> FMFILL
%%EndSetup
%%Page: "26" 26
%%BeginPaperSize: Letter
%%EndPaperSize
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
0 12 Q
0 X
(- 26 -) 293 757 T
72 27 540 720 R
7 X
V
0 X
([14]) 72 712 T
2.88 (Solomonoff, R. J. \0501986\051. The application of algorithmic probability to problems in) 108 712 P
0.88 (artificial intelligence. In L. N. Karnal and J. F. Lemmer \050Eds.\051,) 108 698 P
1 F
0.88 (Uncertainty in Artificial) 422.24 698 P
(Intelligence) 108 684 T
0 F
(, Elsevier Science, pp. 473-491.) 164.65 684 T
([15]) 72 665 T
-0.14 (Stephen, G. A. and Mather, P. \0501993\051. Sweeping away the problems that dog the industry?) 108 665 P
1 F
(AI Communications) 108 651 T
0 F
( 6\0503/4\051: 213-218.) 203.66 651 T
([16]) 72 632 T
(Sudkamp, T. A. \0501988\051.) 108 632 T
1 F
(Languages and Machines) 225.32 632 T
0 F
(, Reading, Mass.: Addison-Wesley.) 348.65 632 T
([17]) 72 613 T
0.19 (Uspensky, V. A. \0501992\051. Kolmogorov and mathematical logic.) 108 613 P
1 F
0.19 (Journal of Symbolic Logic) 412.45 613 P
0 F
(57\0502\051: 385-412.) 108 599 T
([18]) 72 580 T
0.75 (Wallace, C. S. and Freeman, P. R. \0501987\051. Estimation and inference by compact coding.) 108 580 P
1 F
(Journal of the Royal Statistical Society B) 108 566 T
0 F
(49\0503\051: 240-252.) 308.33 566 T
([19]) 72 547 T
1.44 (Wolff, J. G. \0501977\051. The discovery of segments in natural language.) 108 547 P
1 F
1.44 (British Journal of) 451.78 547 P
(Psychology) 108 533 T
0 F
( 67: 377-390.) 163.32 533 T
([20]) 72 514 T
-0.39 (Wolff, J. G. \0501982\051. Language acquisition, data compression and generalization.) 108 514 P
1 F
-0.39 (Language) 492 514 P
(& Communication) 108 500 T
0 F
( 2: 57-89. \050Reprinted in Chapter 3 in [23]\051.) 197 500 T
([21]) 72 481 T
5.49 (Wolff, J. G. \0501987\051. Cognitive development as optimisation. In L. Bolc \050Ed.\051,) 108 481 P
1 F
(Computational Models of Learning) 108 467 T
0 F
(, Berlin: Springer-Verlag, pp. 161-205.) 277.68 467 T
([22]) 72 448 T
0.26 (Wolff, J. G. \0501990\051. Simplicity and power - some unifying ideas in computing.) 108 448 P
1 F
0.26 (Computer) 492 448 P
(Journal) 108 434 T
0 F
( 33\0506\051: 518-534. \050Reprinted in Chapter 4 in [23]\051.) 145.33 434 T
([23]) 72 415 T
1.19 (Wolff, J. G. \0501991a\051.) 108 415 P
1 F
1.19 (Towards a Theory of Cognition and Computing) 216.08 415 P
0 F
1.19 (, Chichester: Ellis) 451.94 415 P
(Horwood.) 108 401 T
([24]) 72 382 T
1.84 (Wolff J G \0501991b\051. On the integration of learning, logical deduction and probabilistic) 108 382 P
0.64 (inductive inference.) 108 368 P
1 F
0.64 (Proceedings of the First International Workshop on Inductive Logic) 207.58 368 P
(Programming, Vienna do Costello, Portugal, 1991) 108 354 T
0 F
(, pp. 177-191) 352.01 354 T
([25]) 72 335 T
7.52 (Wolff, J. G. \0501993\051. Computing, cognition and information compression.) 108 335 P
1 F
7.52 (AI) 528.67 335 P
(Communications) 108 321 T
0 F
( 6\0502\051: 107-127.) 189.34 321 T
([26]) 72 302 T
0.98 (Wolff, J. G. \0501994\051. Towards a new concept of software.) 108 302 P
1 F
0.98 (Software Engineering Journal) 392.71 302 P
0 F
(9\0501\051: 27-38.) 108 288 T
([27]) 72 269 T
4.4 (Wolff, J. G. \0501994\051. A scaleable technique for best-match retrieval of sequential) 108 269 P
(information using metrics-guided search.) 108 255 T
1 F
( Journal of Information Studies) 305.3 255 T
0 F
( 20\0501\051, 16-28.) 455.98 255 T
([28]) 72 236 T
-0.49 (Wolff, J. G. \050submitted for publication\051. Computing as compression: an overview of the SP) 108 236 P
(theory and system.) 108 222 T
([29]) 72 203 T
(Wolff, J. G. \050submitted for publication\051. Computing as compression: SP20.) 108 203 T
([30]) 72 184 T
0.89 (Wolff, J. G. and Chipperfield, A. J. \0501989\051. SP: a computing system for the twenty first) 108 184 P
2.04 (century. Technical Report CS-89-4.0, School of Electronic Engineering, University of) 108 170 P
(Wales, Bangor.) 108 156 T
([31]) 72 137 T
0.87 (Wolff, J. G. and Chipperfield, A. J. \0501990\051. Unifying computing: inductive learning and) 108 137 P
2.09 (logic. In the Proceedings of Expert Systems 90: T. R. Addis and R. M. Muir \050Eds.\051,) 108 123 P
1 F
2.3 (Research and Development in Expert Systems VII) 108 109 P
0 F
2.3 (, Cambridge: Cambridge University) 360.43 109 P
(Press, pp. 263-276.) 108 95 T
([32]) 72 76 T
3.12 (Zvonkin, A. K. and Levin, L. A. \0501970\051. The complexity of finite objects and the) 108 76 P
1.16 (development of the concepts of information and randomness by means of the theory of) 108 62 P
(algorithms.) 108 48 T
1 F
(Russian Mathematical Surveys) 165.34 48 T
0 F
( 25: 83-124.) 313.32 48 T
FMENDPAGE
%%EndPage: "26" 25
%%Page: "25" 25
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
0 12 Q
0 X
(- 25 -) 293 757 T
72 27 540 720 R
7 X
V
2 F
0 X
(Acknowledgements) 72 712 T
0 F
1.92 (I am very grateful to Dr Tim Porter and to Dr Chris Wensley, both of the School of) 108 693 P
4.92 (Mathematics, University of Wales, Bangor, for advice and discussion about aspects of) 72 677 P
1.64 (mathematics. Detlef Morgenstern of Dresden has provided many useful observations on these) 72 661 P
(ideas and constructive comments on earlier drafts of this article.) 72 645 T
0.52 (The preparation of this article was supported in part by a grant to Dr J G Wolff from the) 108 624 P
0.17 (UK Science and Engineering Research Council for \322The Further Development of SP: Integration) 72 608 P
(of Inductive Learning, Inference and Information Retrieval\323 \050Ref. GR/G51565\051.) 72 592 T
2 F
(Correction) 72 566 T
0 F
0.11 (I would like to take this opportunity to correct a printing error in [25]. The formula which) 108 547 P
1.16 (was given in Section 3.2 \050p. 111\051 for the number of possible subsequences in a sequence of) 72 531 P
1 F
1.16 (N) 531.99 531 P
0 F
(symbols should, of course, have been) 72 515 T
1 F
( P) 251.64 515 T
0 F
( = 2) 261.97 515 T
1 10 Q
(N) 280.74 519.8 T
0 12 Q
( - 1.) 287.41 515 T
2 F
(References) 72 489 T
0 F
([1]) 72 470 T
0 (Chaitin, G. J. \0501987\051.) 108 470 P
1 F
0 (Algorithmic Information Theory) 212.67 470 P
0 F
0 (, Cambridge: Cambridge University) 367.34 470 P
(Press.) 108 456 T
([2]) 72 437 T
5.49 (Chaitin, G. J. \0501987\051.) 108 437 P
1 F
5.49 (Information, Randomness and Incompleteness: Papers on) 234.6 437 P
(Algorithmic Information Theory) 108 423 T
0 F
(, Singapore: World Scientific Press.) 262.67 423 T
([3]) 72 404 T
0.74 (Chaitin, G. J. \0501975\051. Randomness and mathematical proof.) 108 404 P
1 F
0.74 (Scientific American) 402.88 404 P
0 F
0.74 (, 232\0505\051:) 497.93 404 P
(47-52.) 108 390 T
([4]) 72 371 T
0.23 (Cook, C. M. and Rosenfeld, A. \0501976\051. Some experiments in grammatical inference. In J.) 108 371 P
-0.43 (C. Simon \050Ed.\051,) 108 357 P
1 F
-0.43 (Computer Oriented Learning Processes) 185.38 357 P
0 F
-0.43 (, Leyden: Noordhoff, pp. 157-174.) 375.75 357 P
([5]) 72 338 T
(Harris, Z. S. \0501954\051. Distributional structure.) 108 338 T
1 F
(Linguistics Today) 325.64 338 T
0 F
( 10: 146-62.) 411.32 338 T
([6]) 72 319 T
0.38 (Klop, J. W. \0501992\051. Term rewriting systems. In S. Abramsky, D. M. Gabbay and T. S. E.) 108 319 P
1.88 (Maibaum \050Eds.\051,) 108 305 P
1 F
1.88 (Handbook of Logic in Computer Science, Vol. 2) 195.75 305 P
0 F
1.88 (, Oxford: Clarendon) 439.25 305 P
(Press, pp. 1-116.) 108 291 T
([7]) 72 272 T
-0.24 (Kolmogorov, A. N. \0501965\051. Three approaches to the quantitative definition of information.) 108 272 P
1 F
(Problems of Information Transmission) 108 258 T
0 F
( 1\0501\051: 1-7.) 294.35 258 T
([8]) 72 239 T
-0.4 (Li, M. and Vitanyi, P. M. B. \0501990\051. Kolmogorov complexity and its applications. In J. van) 108 239 P
4.09 (Leeuwen \050Ed.\051,) 108 225 P
1 F
4.09 (Handbook of Theoretical Computer Science) 193.49 225 P
0 F
4.09 (, Amsterdam: Elsevier.) 421.16 225 P
(Chapter 4, pp. 188-254.) 108 211 T
([9]) 72 192 T
2.9 (Minsky, M. \0501956\051. Some universal elements for finite automata.) 108 192 P
1 F
2.9 (Automata Studies) 449.76 192 P
0 F
2.9 (,) 537 192 P
(Princeton, NJ.: Princeton University Press, pp. 117-128.) 108 178 T
([10]) 72 159 T
3.14 (Pednault, E. P. D. \0501991\051. Minimal length encoding and inductive inference. In G.) 108 159 P
4.82 (Piatetsky-Shapiro and W. J. Frawley \050Eds.\051,) 108 145 P
1 F
4.82 (Knowledge Discovery in Databases) 350.89 145 P
0 F
4.82 (,) 537 145 P
(Cambridge Mass.: MIT Press.) 108 131 T
([11]) 72 112 T
3.87 (Post, E. \0501943\051. Formal reductions of the general combinatorial decision problem.) 108 112 P
1 F
(American Journal of Mathematics) 108 98 T
0 F
( 65: 197-268.) 272.32 98 T
([12]) 72 79 T
1.64 (Shannon, C. E. and Weaver, W. \0501949\051.) 108 79 P
1 F
1.64 (The Mathematical Theory of Communication) 313.78 79 P
0 F
1.64 (,) 537 79 P
(Urbana: University of Illinois Press.) 108 65 T
([13]) 72 46 T
3.77 (Solomonoff, R. J. \0501964\051. A formal theory of inductive inference. Parts I and II.) 108 46 P
1 F
(Information and Control) 108 32 T
0 F
(7: 1-22 and 224-254.) 229.68 32 T
FMENDPAGE
%%EndPage: "25" 24
%%Page: "24" 24
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
0 12 Q
0 X
(- 24 -) 293 757 T
72 27 540 720 R
7 X
V
0 X
-0.33 (any body of information. The early models \050e.g., SP6\051 confine their attention to redundancy which) 72 712 P
1.1 (may be attributed to repeating substrings and largely ignore other kinds of redundancy such as) 72 696 P
4.01 (redundancy due to repeated subsequences \050where constituent symbols are not necessarily) 72 680 P
2.47 (contiguous in the data\051 or redundancy due to mirror images \050e.g., palindromes\051. Hence, the) 72 664 P
1.32 (published descriptions which were cited refer to) 72 648 P
1 F
1.32 (extractable) 314.82 648 P
0 F
1.32 ( redundancy \050[31], p. 270; [23], p.) 368.81 648 P
0.32 (147\051\051. Extractable redundancy is a very different concept from) 72 632 P
1 F
0.32 (total) 378.09 632 P
0 F
0.32 ( redundancy which would be) 400.1 632 P
0.04 (needed for optimal compression or for the extraction of redundancy \322until there remains none left) 72 616 P
(to extract\323.) 72 600 T
0.7 (Furthermore, the small examples used with the early SP models are not \322arbitrary data\323.) 108 579 P
0.99 (Since they are small, they can be searched in a way which is not feasible for realistically large) 72 563 P
-0.71 (examples. As explained in Section 6.2, the choice of simple but inefficient search methods for early) 72 547 P
0.08 (models was dictated by the focus on integration of functions and the need to avoid \324tramlining\325 of) 72 531 P
-0.13 (ideas. Later models \050SP20 and SP21\051 use much better search methods which are not exhaustive in) 72 515 P
(any sense.) 72 499 T
2 F
(7.4) 72 473 T
(Most Strings are Random) 108 473 T
0 F
-0.49 (\322... contrary to the thrust of Wolff\325s conjecture, Kolmogorov complexity theory proves that) 108 454 P
-0.3 (almost all finite strings are random \050and hence incompressible\051, but cannot be proved to be) 108 438 P
(so \050see again, e.g., [8][32]\051.\323) 108 422 T
0.67 (That most strings in the space of all possible universes are incompressible but cannot be) 108 401 P
1.41 (proved to be so is certainly interesting but has no bearing on the truth or otherwise of the SP) 72 385 P
0.54 (computational conjecture. Of course, \324incompressible\325 here means \324incompressible unless one is) 72 369 P
(prepared to lose non-redundant information\325.) 72 353 T
0.05 (Without redundancy \050and thus the possibility of compression\051, any kind of prediction by a) 108 332 P
3.61 (brain or computer becomes impossible. In a totally random \050incompressible\051 universe, an) 72 316 P
-0.48 (algorithm which gives one result today may give a different result tomorrow. Data stored now may) 72 300 P
2.41 (not be valid in the next nano-second. Indeed, all kinds of persistent, structured entities like) 72 284 P
(\324algorithm\325, \324brain\325 and \324computer\325 will have disappeared in the white hiss of randomness.) 72 268 T
-0.57 (That redundancy is a rare commodity in the space of all possible universes only emphasises) 108 247 P
0.11 (its importance. Since all kinds of prediction depend on it, we need to be able to find it. Hence the) 72 231 P
(prominence of pattern matching and search in the SP theory.) 72 215 T
2 F
(8) 72 184 T
(Conclusion) 108 184 T
0 F
-0.48 (In this article, I have tried to answer the points raised in [15], to correct the many errors and) 108 165 P
(misunderstandings, and to discuss associated issues.) 72 149 T
1.89 (That computing and formal reasoning may be seen as information compression is not) 108 128 P
1.36 (widely recognised and certainly strikes many people as an outlandish idea. A broader view of) 72 112 P
0.64 (information processing, which accommodates brains and nervous systems as well as computers,) 72 96 P
0.52 (brings information compression more clearly into view. As described in [25], there is a range of) 72 80 P
3.73 (phenomena across this broader canvas which, taken together, suggest rather strongly that) 72 64 P
-0.11 (information compression may provide a unifying theme for information storage and processing in) 72 48 P
(all kinds of system, both man-made and natural.) 72 32 T
FMENDPAGE
%%EndPage: "24" 23
%%Page: "23" 23
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
0 12 Q
0 X
(- 23 -) 293 757 T
72 27 540 720 R
7 X
V
0 X
(by AIT as it stands now.) 72 712 T
(What, then, is the relationship between the SP theory and AIT?) 108 691 T
-0.53 (Shannon\325s information theory was an inspiration for the SP theory but, as was noted in [25]) 108 670 P
-0.68 (\050Section 2.3 and later\051, AIT provides a view of redundancy and randomness which is different from) 72 654 P
2.29 (Shannon\325s view [12] but may be integrated with it within a framework of pattern matching) 72 638 P
(unification and search:) 72 622 T
(\245) 72 601 T
3.03 (As described in [25], Section 2.2, Shannon\325s basic measure of information may be) 108 601 P
-0.38 (generalised so that \324sequential\325 redundancy may be seen as relatively frequent repetition of) 108 585 P
(patterns, where a \324pattern\325 may be a coherent substring or a fragmented subsequence.) 108 569 T
(\245) 72 548 T
-0.46 (If PMUS is indeed a fundamental mechanism for the detection and removal of redundancy,) 108 548 P
-0.33 (then the existence of relatively frequent repetition of patterns means that the information is) 108 532 P
(compressible, in accordance with the AIT view of redundancy.) 108 516 T
2.67 (Thus the SP theory seems to rest naturally on the twin foundations of AIT and \050a modest) 72 495 P
0.7 (generalisation of\051 Shannon\325s information theory. The main differences between AIT and the SP) 72 479 P
(theory are:) 72 463 T
(\245) 72 442 T
-0.57 (In general, the main focus of interest in AIT has been on defining concepts like randomness) 108 442 P
2.45 (and in exploring associated theoretical limits. Since [13], there have been links with) 108 426 P
-0.17 (research on inductive inference. The field is well reviewed in [8]. By contrast, the focus of) 108 410 P
0.44 (interest in the \050computational part of the\051 SP programme has been to understand whether) 108 394 P
2.8 (or not all forms of computing and formal reasoning may usefully be understood as) 108 378 P
(information compression by pattern matching, unification and search.) 108 362 T
(\245) 72 341 T
1.94 (The concept of an \324algorithm\325 in AIT \050see, for example, [17]\051 seems to have little in) 108 341 P
-0.28 (common with ideas about how functions may be executed by compression \050as described in) 108 325 P
(Sections 2 and 5, above, and in [26]\051.) 108 309 T
(\245) 72 288 T
0.37 (Because of the contradiction discussed above, AIT cannot, as it stands, accommodate the) 108 288 P
(idea that computing may itself be a process of information compression.) 108 272 T
2 F
(7.3) 72 246 T
(Attempting the Impossible?) 108 246 T
0 F
1.54 (\322... it should be noted that some important results arising from this field [Kolmogorov) 108 227 P
3.78 (complexity] are completely ignored in SP. For example, it is known that optimal) 108 211 P
0.7 (compression of arbitrary data is not computable \050see, e.g., [8][32]\051. But, it would appear) 108 195 P
0.37 (that the SP compression algorithm, as presented in the literature \050e.g., [31][23]\051, attempts) 108 179 P
0.18 (just such a task by striving to extract redundancy until there remains none left to extract.\323) 108 163 P
(\050[15], p. 217\051.) 108 147 T
0.23 (That optimal compression of arbitrary data is not computable has been clearly recognised) 108 126 P
-0.13 (in the SP programme. Readers are referred again to the two quotations in Section 6.1 \050[22], p. 522) 72 110 P
2.28 (and [31], p. 265\051. Since one cannot be sure that all the redundancy in an arbitrary body of) 72 94 P
(information has been found, one cannot be sure that all the redundancy has been extracted.) 72 78 T
2.46 (Not only has this point been clearly recognised but, contrary to the quotation at the) 108 57 P
0.48 (beginning of this section, none of the SP models have attempted to extract all the redundancy in) 72 41 P
FMENDPAGE
%%EndPage: "23" 22
%%Page: "22" 22
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
0 12 Q
0 X
(- 22 -) 293 757 T
72 27 540 720 R
7 X
V
0 X
-0.19 (\322thoroughly investigated\323 30 years ago we would certainly expect them to be now available in the) 72 712 P
(shops.) 72 696 T
2 F
(7.1) 72 670 T
(Inductive Inference) 108 670 T
0 F
2.06 (One of Solomonoff\325s achievements \050see, for example, [13]\051 has been to highlight the) 108 651 P
0.45 (significance of information compression in the field of inductive inference \050predicting the future) 72 635 P
0.04 (from the past\051 and the related field of grammatical inference. Notions of pattern matching and the) 72 619 P
1.93 (unification of patterns were the basis of \324distributional analysis\325 as a method of grammatical) 72 603 P
1.64 (inference in the field of \324structural\325 linguistics \050e.g., [5]\051 but Solomonoff brought information) 72 587 P
(compression more sharply into focus.) 72 571 T
0.41 (With pleasing modesty, Solomonoff described his own proposals as \322rudimentary\323 \050[13],) 108 550 P
0.04 (p. 11\051, thereby acknowledging that there were many questions still to be answered. Some of these) 72 534 P
0.02 (questions have been addressed in subsequent research \050e.g., [4][10][18][20]\051 but much remains to) 72 518 P
0.09 (be done. We are, for example, still a long way from achieving unsupervised inductive learning of) 72 502 P
(a grammar for any natural language.) 72 486 T
0.34 (In later research \050e.g., [14]\051, Solomonoff has proposed a general problem solving method) 108 465 P
2.16 (based on algorithmic probability theory. This research differs from the SP programme in its) 72 449 P
0.83 (objectives and approach. But even without these differences, there was and is much work to be) 72 433 P
(done before it could be said that SP systems had been \322thoroughly investigated.\323) 72 417 T
2 F
(7.2) 72 391 T
(Algorithmic Information Theory) 108 391 T
0 F
-0.53 (\322... it seems that,) 108 372 P
1 F
-0.53 (neglecting its central conjecture) 189.2 372 P
0 F
-0.53 (, the SP theory is wholly subsumed by the) 342.58 372 P
(area of Kolmogorov complexity ...\323 \050[15], p. 217, emphasis added\051.) 108 356 T
1.67 (It is not clear how one theory may be subsumed by another if, at the same time, it is) 108 335 P
1.12 (necessary to neglect its central conjecture! Although nothing is said, one may suppose that the) 72 319 P
0.19 (authors of [15] have included this extraordinary qualification because they recognise that there is) 72 303 P
(a basic conflict between the two theories.) 72 287 T
2.64 (A central idea in Kolmogorov complexity theory, which is perhaps better known as) 108 266 P
2.72 (Algorithmic Information Theory \050AIT\051, is that randomness may be defined in terms of the) 72 250 P
0.23 (compressibility of information: \322A series of numbers is random if the smallest algorithm capable) 72 234 P
-0.53 (of specifying it to a computer has about the same number of bits of information as the series itself.\323) 72 218 P
0.83 (\050[2], p. 48\051. If an algorithm can be found which is significantly smaller than the given series of) 72 202 P
(numbers then the series is not random.) 72 186 T
0.75 (This idea, that an algorithm \050or computer program\051 may generate a series of numbers or) 108 165 P
-0.41 (other information which is bigger than the algorithm itself is closely related to the point which was) 72 149 P
1.73 (raised in [15] as a difficulty for the SP theory: \322... one wonders how such a theory could be) 72 133 P
2.34 (reconciled with the obvious counter example - the set of algorithms whose express purpose) 72 117 P
-0.2 (involves an increase in the redundancy of a set of data.\323 \050p. 214\051. As was discussed in Section 2.3,) 72 101 P
-0.38 (above, there is a possible resolution of the conflict, but this was not apparent to the authors of [15].) 72 85 P
0.37 (More to the point in this context is that, to my knowledge, AIT provides no means of explaining) 72 69 P
0.24 (how \324computing as compression\325 might be reconciled with the algorithmic creation of a string of) 72 53 P
-0.71 (symbols which is longer than the algorithm itself. Consequently, the SP theory cannot be subsumed) 72 37 P
FMENDPAGE
%%EndPage: "22" 21
%%Page: "21" 21
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
0 12 Q
0 X
(- 21 -) 293 757 T
72 27 540 720 R
7 X
V
0 X
0.36 (Section 5.6, memory access may be seen in those terms. The same may be true of the operations) 72 712 P
-0.62 (of an ALU. To make the core useful, there must also be programs, and programs, in turn, need data.) 72 696 P
0.58 (The PMUS mechanisms in the core of a conventional computer are very fast but heavily) 108 675 P
2.8 (constrained so that only the simplest kind of searching is performed. To make up for this) 72 659 P
1.93 (deficiency, programs frequently contain code which has the effect of supplementing the core) 72 643 P
-0.4 (mechanisms to achieve more sophisticated kinds of PMUS. This kind of augmentation of the basic) 72 627 P
0.73 (PMUS mechanisms can be seen most clearly in applications like database searching and \324grep\325.) 72 611 P
0.07 (Less obviously, it appears in the workings of compilers, in accessing the names of data structures) 72 595 P
1.34 (and functions in programs, in \324relaxation\325 techniques, and in other operations and applications) 72 579 P
0.6 (especially in AI, as discussed in [25] and [28]. These supplementary mechanisms are frequently) 72 563 P
(\324re-invented\325 in varied forms in many different programs.) 72 547 T
-0.7 (In the design of an SP system it is intended that the core should contain one general-purpose) 108 526 P
-0.4 (mechanism for PMUS \050which may be adapted for \324broad\325 or \324narrow\325 searching\051. This mechanism) 72 510 P
-0.17 (would be more sophisticated than the mechanisms in conventional computers and, for that reason,) 72 494 P
0.34 (may well be more \324costly\325. However, there should be a net saving of resources because it should) 72 478 P
0.08 (then be possible to eliminate supplementary PMUS mechanisms in \324programs\325 and, perhaps more) 72 462 P
(importantly, eliminate their frequent re-creation in different applications and systems.) 72 446 T
-0.75 (In an SP system, a \324program\325 is a body of already-compressed information and \324data\325 is new) 108 425 P
0.46 (information \050which may or may not be compressed\051 which is to be merged \050fully or partly\051 with) 72 409 P
-0.64 (the pre-established \324program\325. As described in Section 2, PMUS is seen as fundamental in) 72 393 P
1 F
-0.64 (all) 498.96 393 P
0 F
-0.64 ( kinds) 511.64 393 P
0.05 (of computing system, including conventional computers. What is envisaged for an SP system is a) 72 377 P
1 F
1.97 (re-organisation and re-distribution) 72 361 P
0 F
1.97 ( of PMUS mechanisms to eliminate unnecessary structural) 245.27 361 P
(complexity and to maximise the benefits of compression techniques.) 72 345 T
1.48 (The quotation at the beginning of this section assumes that the SP model is inherently) 108 324 P
1.27 (intractable. From the discussion in earlier parts of Section 6 it should be clear that \324broad\325 SP) 72 308 P
0.5 (should be no more power hungry than existing heuristic systems, many of which produce useful) 72 292 P
0.05 (results without undue computational demands. And the \324broad\325 model may be constrained so that) 72 276 P
(computational demands are reduced \050with a corresponding loss of flexibility and \324intelligence\325\051.) 72 260 T
-0.74 (The quotation also assumes that the computational demands of SP \324programs\325 will be added) 108 239 P
0.04 (to the demands of the core system. From the discussion in this section, it should now be clear that) 72 223 P
-0.55 (it makes little sense to regard SP \324programs\325 as having any intrinsic computational complexity. For) 72 207 P
3.1 (the PMUS core of an SP system, the \324programs\325 are, in effect, \324data\325. The computational) 72 191 P
0.89 (complexity of the SP core may be analysed with respect to these and other kinds of data in the) 72 175 P
(normal way.) 72 159 T
2 F
(7) 72 128 T
(Related Research) 108 128 T
0 F
-0.25 (In Section 9 of [15] it is suggested that systems equivalent to the proposed SP system were) 108 109 P
-0.74 (\322thoroughly investigated about 30 years ago \050e.g., [13]\051\323 and that \322neglecting its central conjecture,) 72 93 P
-0.9 (the SP theory is wholly subsumed by the area of Kolmogorov complexity \050e.g., [7][13][32][2][8]\051.\323) 72 77 P
-0.6 (If the concepts are so well established, it is not clear why they should \322defy common sense\323) 108 56 P
1.17 (\050[15], p. 214\051 or be misunderstood in the way that they have been in [15]. If SP systems were) 72 40 P
FMENDPAGE
%%EndPage: "21" 20
%%Page: "20" 20
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
0 12 Q
0 X
(- 20 -) 293 757 T
72 27 540 720 R
7 X
V
0 X
-0.33 (The overall effect of these design decisions is to constrain the process of matching patterns) 108 712 P
0.43 (in the source code against patterns in the grammar so that many potential comparisons are never) 72 696 P
0.31 (made. The application of this constraint has the effect of reducing the computational demands of) 72 680 P
-0.11 (the parser. More importantly in the present context, the constraint makes the search so simple that) 72 664 P
(we can be sure that the \324correct\325 comparisons and unifications are always made.) 72 648 T
0.41 (The same principles may be applied to an SP system. If the search process is constrained) 108 627 P
-0.33 (\050cf. \324narrow searching\325 described in Section 5.5, above\051, the computational demands of the system) 72 611 P
1 (will be reduced but any ability which the system might have had to find alternative answers to) 72 595 P
0.18 (problems will be progressively reduced. The system may thus be made to be deterministic and to) 72 579 P
0.29 (deliver one \324correct\325 result for any given input. However, the constrained system is also likely to) 72 563 P
(acquire the unattractive \324rigidity\325 of conventional computers.) 72 547 T
2 F
(6.4) 72 521 T
(Tractability) 108 521 T
0 F
0.84 (\322... it should be noted that any universal model of computation should have a negligible) 108 502 P
0.98 (complexity overhead in its own operation to be of use. In the case of SP the intractable) 108 486 P
0 (nature of its own operation would, in turn, dominate the complexity of any algorithms run) 108 470 P
1.72 (by it. In effect, all algorithms implemented to run under SP would take an intractable) 108 454 P
-0.14 (amount of time to do so, regardless of their own inherent complexity. They would become) 108 438 P
0.31 (swamped by the computational demands of the SP universal model of computation itself.) 108 422 P
(And this would thus make everything run under SP essentially intractable.\323 \050[15], p. 217\051.) 108 406 T
0.41 (Issues raised here may be clarified by reference to Figure 2 which suggests, in schematic) 108 385 P
(form, how the organisation of a conventional computer and an SP computer may be seen.) 72 369 T
2.47 (Figure 2. Schematic representation of a conventional computer and an SP computer.) 108.5 98 P
(\324PMUS\325 means mechanisms for pattern matching, unification and metrics-guided search.) 108.5 84 T
0.17 (In the SP view, a conventional computer contains a \324core\325 \050the CPU with mechanisms for) 108 60 P
0.23 (accessing memory\051, part of which may be seen as PMUS mechanisms. As was discussed in [25],) 72 44 P
72 27 540 720 C
72 115 540 365 C
0 12 Q
0 X
0 K
(Conventional) 290.67 347.71 T
(computer) 290.67 335.71 T
(SP computer) 290.67 211.41 T
144 246 504 327 R
7 X
V
0.5 H
2 Z
0 X
N
144 121 504 202 R
7 X
V
0 X
N
157 260 211 287 R
7 X
V
0 X
N
(PMUS) 167.66 269.41 T
(Core) 172.34 311 T
(Programs) 290 311 T
(Data) 443 311 T
(Core) 186.34 183 T
241.75 273.38 282.25 293.62 R
7 X
V
0 X
N
(PMUS) 245.66 279.73 T
349.75 274.38 390.25 294.62 R
7 X
V
0 X
N
(PMUS) 353.66 280.73 T
157.12 132.06 234.88 170.94 R
7 X
V
0 X
N
0 18 Q
(PMUS) 171.49 146.93 T
221 327 221 246 2 L
N
250 202 250 121 2 L
N
296.75 254.38 337.25 274.62 R
7 X
V
0 X
N
0 12 Q
(PMUS) 300.66 260.73 T
412 327 412 246 2 L
N
(Information \050\324programs\325 with \324data\325\051) 290 183 T
72 27 540 720 C
0 0 612 792 C
FMENDPAGE
%%EndPage: "20" 19
%%Page: "19" 19
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
0 12 Q
0 X
(- 19 -) 293 757 T
72 27 540 720 R
7 X
V
0 X
-0.71 (search method that was used was merely a stop-gap while the integration issue was explored. Using) 72 712 P
2.88 (the earlier method might have led to \324tramlining\325 in one\325s thinking, obscuring the kind of) 72 696 P
-0.73 (reorganisation needed to achieve integration. It was reasonably clear at those early stages \050and even) 72 680 P
(more clear now\051 that the organisation of the MK10 model was not appropriate for an SP model.) 72 664 T
-0.4 (Given previous experience with this kind of search, and given the general understanding of) 108 643 P
-0.42 (how metrics-guided search can tame the biggest search problems \050Section 6.1\051, there was no doubt) 72 627 P
0.05 (that better search methods with acceptable computational complexity could be introduced into SP) 72 611 P
-0.33 (models at a later time. This expectation has been born out in the SP20 and SP21 models, described) 72 595 P
(above \050Section 4.2\051.) 72 579 T
2 F
(6.3) 72 553 T
(Exhaustive Search) 108 553 T
0 F
-0.71 (\322Having seen that exhaustive searching in SP is not a feasible proposition, it is now relevant) 108 534 P
2.07 (to state that all of its claimed capabilities would appear to require this very mode of) 108 518 P
(execution for their correct operation ....\323 \050[15], p. 217\051.) 108 502 T
0.17 (This, of course, is nonsense. Unsupervised inductive learning and best-match information) 108 481 P
1.09 (retrieval are just two of the capabilities targeted in the SP programme which can be done very) 72 465 P
(effectively with non-exhaustive searching.) 72 449 T
0.17 (\322To take but one example, consider the case of procedural programming in SP. If it is not) 108 428 P
-0.18 (feasible to search for all the redundancy in the input, then it cannot be guaranteed than any) 108 412 P
0.49 (particular pair of equivalent substrings will in fact be merged by the system. This in turn) 108 396 P
-0.49 (means that the particular \324compression\325 of the \324program\325 necessary for its correct operation) 108 380 P
1.26 (cannot be guaranteed, which leads to serious doubts over the determinism of otherwise) 108 364 P
(correct SP \324programs\325.\323 \050[15], p. 217\051.) 108 348 T
0.48 (The logical) 108 327 P
1 F
0.48 (non sequitur) 166.29 327 P
0 F
0.48 ( here is the belief that correct operation of a procedural program) 227.11 327 P
1.12 (requires a guarantee that \322any particular pair of equivalent substrings will ... be merged by the) 72 311 P
-0.12 (system\323. What is required \050) 72 295 P
1 F
-0.12 (inter alia) 203.14 295 P
0 F
-0.12 (\051 for a procedural program to execute correctly is a guarantee) 247.36 295 P
0.46 (that) 72 279 P
1 F
0.46 (some) 93.46 279 P
0 F
0.46 ( particular pair or pairs of equivalent substrings will always be merged. The existential) 118.12 279 P
(quantifier is needed, not the universal quantifier.) 72 263 T
0.08 (Consider, for example, the front end of a compiler for an ordinary programming language) 108 242 P
-0.22 (on an ordinary computer - a typical deterministic procedural program. The front end of a compiler) 72 226 P
0.45 (must parse the source code and produce one \324correct\325 translation if the source code is correct, or) 72 210 P
0.52 (one set of error messages when there are errors. \050Of course, the back end of a compiler is also a) 72 194 P
(deterministic program with similar characteristics.\051) 72 178 T
0.01 (Parsing is an operation which is naturally non-deterministic. Grammars and languages are) 108 157 P
0.06 (frequently ambiguous so that there are often two or more \324good\325 ways of parsing a particular text.) 72 141 P
1.34 (Ambiguity is a prominent feature of natural languages and their grammars. Parsers for natural) 72 125 P
(languages are normally designed to give alternative parsings where ambiguity exists.) 72 109 T
0.58 (The challenge for the designer of a compiler for a programming language is to eliminate) 108 88 P
0.98 (ambiguity. The grammar of the programming language to be parsed is normally expressed in a) 72 72 P
1.73 (suitable form \050e.g., LL\0501\051\051 so that, with a suitable parsing technique \050e.g., recursive descent\051,) 72 56 P
(ambiguity is removed and the parser always delivers one \324best\325 parsing.) 72 40 T
FMENDPAGE
%%EndPage: "19" 18
%%Page: "18" 18
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
0 12 Q
0 X
(- 18 -) 293 757 T
72 27 540 720 R
7 X
V
0 X
-0.13 (search etc.\051 are taught in every AI textbook and are \324common currency\325 for researchers in AI. It is) 72 712 P
0.23 (widely recognised that, while exhaustive search is normally impossible, hill climbing and related) 72 696 P
2.09 (techniques can often produce useful results without undue computational demands. It is also) 72 680 P
-0.25 (widely recognised that, because these techniques save computation by ignoring parts of the search) 72 664 P
(space \050usually very large parts\051, they cannot guarantee that the best possible result will be found.) 72 648 T
4.04 (From the beginning of the current programme of research into the computational) 108 627 P
(conjecture, these principles have been clearly flagged:) 72 611 T
-0.23 (\322There is no algorithmic way of ensuring that C is always maximised.) 108 590 P
0 10 Q
-0.19 (5) 441.38 594.8 P
0 12 Q
-0.23 ( Except for trivially) 446.38 590 P
-0.71 (simple cases, it is always necessary to use the kinds of) 108 574 P
1 F
-0.71 (search) 364.19 574 P
0 F
-0.71 ( technique familiar in artificial) 396.19 574 P
1.74 (intelligence \050most notably,) 108 558 P
1 F
1.74 (hill climbing) 244.55 558 P
0 F
1.74 (\051. These techniques cannot guarantee a perfect) 307.3 558 P
1.26 (solution, but they provide a practical means of finding at least an approximation to the) 108 542 P
(desired goal.\323 \050[22], p. 522, emphasis in the original\051.) 108 526 T
(and) 72 505 T
1.53 (\322Finding a good set of unifications for the repeating patterns in a body of information) 108 484 P
0.21 (amongst all the many possible sets of unifications means a process of) 108 468 P
1 F
0.21 (searching) 446.48 468 P
0 F
0.21 (. Because) 493.81 468 P
0.52 (the search space is usually very large, often infinite, it is necessary to use a) 108 452 P
1 F
0.52 (hill-climbing) 478 452 P
0 F
0.01 (technique - or something equivalent - to achieve an effective search in a reasonable time.\323) 108 436 P
(\050[31], p. 265, emphasis in the original\051.) 108 420 T
2 F
(6.2) 72 394 T
(Early SP Models) 108 394 T
0 F
3 (\322Despite token recognition in the SP literature of the impracticability of attempting) 108 375 P
-0.26 (exhaustive searches in the input corpora, it is nonetheless this very activity upon which the) 108 359 P
2.38 (SP prototypes have been based. For example, the system most often cited in the SP) 108 343 P
0.28 (literature, namely SP6, attempts an exhaustive substring search and is consequently of no) 108 327 P
(practical use for all but the most trivial inputs.\323 \050[15], pp. 216-217\051.) 108 311 T
1.42 (The inefficiency of the search process in early SP models is not a secret and has been) 108 290 P
0.81 (clearly stated in published descriptions, e.g.: \322A relatively unsophisticated kind of hill climbing) 72 274 P
3.36 (technique is being used [in SP6]. In future versions of SP, there is scope for substantial) 72 258 P
0.87 (improvements in the efficiency of the searching process \050using \324efficiency\325 here in the sense of) 72 242 P
(cutting out unnecessary repetition of operations\051.\323 \050[23], p. 145\051.) 72 226 T
1.86 (Very much more efficient and scaleable methods of finding repeating substrings were) 108 205 P
-0.29 (available at the time SP6 and other early SP models were developed. For example, my own MK10) 72 189 P
1.47 (program uses a hill-climbing technique which enables it to perform very effective searches of) 72 173 P
6.34 (realistically large quantities of unsegmented natural language text with quite modest) 72 157 P
(computational resources \050see, for example, [19]\051.) 72 141 T
-0.54 (The reason that one of these more efficient and scaleable methods was not used in SP6 \050and) 108 120 P
0.58 (other early models\051 was that the primary purpose of developing these models was to understand) 72 104 P
0 (more fully how diverse computing functions might be) 72 88 P
1 F
0 (integrated) 334.01 88 P
0 F
0 (. The simple and very inefficient) 383.35 88 P
72 59 540 73.98 C
72 59 540 73.98 R
7 X
0 K
V
81 71.96 225 71.96 2 L
V
0.5 H
2 Z
0 X
N
0 0 612 792 C
0 12 Q
0 X
0 K
-0.12 (5. C is a value whose formula is given in [22] which reflects that part of the redundancy in) 90 51 P
(a body of sequential information which is due to repeated substrings.) 90 35 T
FMENDPAGE
%%EndPage: "18" 17
%%Page: "17" 17
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
0 12 Q
0 X
(- 17 -) 293 757 T
72 27 540 720 R
7 X
V
0 X
(1) 108 712 T
1 F
1.82 (\322Broad searching) 144 712 P
0 F
1.82 ( means searching which is relatively thorough. The cost of a) 232.82 712 P
0.72 (thorough search is that it is hungry for computing resources and is likely to need) 144 696 P
0.48 (high levels of parallelism in processing to achieve adequate speed. The benefit of) 144 680 P
0.44 (thorough search in a computer system is that the system can be relatively flexible) 144 664 P
(and intelligent.) 144 648 T
(2) 108 627 T
1 F
1.4 (\322Narrow searching) 144 627 P
0 F
1.4 (. Where computational resources are limited - as they are in) 239.74 627 P
-0.33 (ordinary computers - the search process may be constrained in various ways so that) 144 611 P
1.45 (relatively large portions of the search space can never be reached. This kind of) 144 595 P
1 (constraint reduces computational costs but means that the computing system has) 144 579 P
2.27 (less flexibility and more of that \324brittleness\325 and stupidity which characterises) 144 563 P
(current systems.) 144 547 T
-0.17 (\322Amongst existing computer languages, those classified as \324declarative\325 seem to provide a) 108 526 P
4.24 (relatively broad search compared with those classified as \324procedural\325. Procedural) 108 510 P
-0.16 (languages seem to have been developed as a response, at least in part, to the limited power) 108 494 P
0.47 (of present-day computers. The greater power of computers in the future should allow the) 108 478 P
(declarative style to flower.\323 \050[23], p. 139; see also [22], p. 526\051.) 108 462 T
2 F
(6) 72 431 T
(Computational Complexity) 108 431 T
0 F
0 (Section 8 of [15], which deals with questions of computational complexity, contains more) 108 412 P
(than its fair share of errors and misconceptions. I will take each one in turn.) 72 396 T
2 F
(6.1) 72 370 T
(Complexity Ignored?) 108 370 T
0 F
-0.33 (\322... an issue of paramount importance to the practicability of the proposed scheme, and one) 108 351 P
-0.23 (that until very recently has been completely ignored in the SP theory, is the question of the) 108 335 P
(computational complexity of the SP compression process.\323 \050p. 216\051.) 108 319 T
-0.22 (Far from having been \322until very recently ... completely ignored\323, the question was clearly) 108 298 P
2.01 (recognised in the programme of research on unsupervised inductive learning from which the) 72 282 P
(computational conjecture arose:) 72 266 T
0.24 (\322One feature of this program [MK10] \050and SNPR\051 which deserves special attention is the) 108 245 P
-0.35 (way the combinatorial explosion is handled. For most values of) 108 229 P
1 F
-0.35 (s) 412.13 229 P
0 F
-0.35 ( there is a huge number of) 416.8 229 P
0.05 (possible dictionaries) 108 213 P
0 10 Q
0.04 (4) 206.28 217.8 P
0 12 Q
0.05 (. The number of alternative groupings that have to be considered [by) 211.28 213 P
1.23 (the program] at any one time is constrained by the fact that only pairs of neighbouring) 108 197 P
3.16 (elements are recorded. This restriction does not prevent the program from building) 108 181 P
-0.24 (arbitrarily large elements by successive pairings. The number of possible pairs which have) 108 165 P
-0.43 (to be recorded at any one time is never more than) 108 149 P
1 F
-0.43 (n) 343.58 149 P
0 F
-0.43 ( - 1 \050where) 349.58 149 P
1 F
-0.43 (n) 403.18 149 P
0 F
-0.43 ( is the number of characters) 409.18 149 P
0.02 (in the sample\051 and is often less than this. There is never any risk of being swamped by the) 108 133 P
-0.52 (number of possible groupings. The technique can be characterised as a hill-climbing search) 108 117 P
(of the space of possible dictionaries.\323 \050[21], p. 169\051.) 108 101 T
0.58 (What the authors of [15] seem not to understand is that, because most AI problems have) 108 80 P
1.67 (astronomically large problem spaces, principles of metrics-guided search \050hill climbing, beam) 72 64 P
72 43 540 57.98 C
72 43 540 57.98 R
7 X
0 K
V
81 55.96 225 55.96 2 L
V
0.5 H
2 Z
0 X
N
0 0 612 792 C
0 12 Q
0 X
0 K
(4.) 90 35 T
1 F
(s) 102 35 T
0 F
( is the size of the set of patterns to be discovered by the program.) 106.67 35 T
FMENDPAGE
%%EndPage: "17" 16
%%Page: "16" 16
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
0 12 Q
0 X
(- 16 -) 293 757 T
72 27 540 720 R
7 X
V
0 X
0.44 (which are associated with the resulting unifications. Examples of how this may be) 140.47 712 P
-0.2 (done are given in [26]. The examples and discussion in Sections 2.3 and 5.3, above,) 140.47 696 P
(are also relevant.) 140.47 680 T
0.03 (This outline, and the fuller account in [26], provides a basis for answering points raised in) 108 659 P
([15]. The three main points are considered in the next three sub-sections.) 72 643 T
1 F
(5.5.3) 72 617 T
(Composition of Functions) 108 617 T
0 F
(\322... SP is unable to handle compositions of functions ....\323 \050[15], p. 216\051.) 108 598 T
0.26 (Taking \322compositions of functions\323 to mean the \324calling\325 of one function by another, this) 108 577 P
0.34 (seems to correspond to recursive embedding of \324tagged\325 structures which has been demonstrated) 72 561 P
-0.12 (by SP6 \050[23], p. 149; [26], Section 4.1\051. However, for essentially the same reasons that current SP) 72 545 P
1.31 (models have no more than a very restricted ability to de-reference identifiers through multiple) 72 529 P
0.09 (levels \050see Section 5.3, above\051, they cannot imitate the familiar \324cascading\325 of function calls. This) 72 513 P
1.01 (appears to be a defect in current models rather than a more general defect in the framework of) 72 497 P
(ideas.) 72 481 T
1 F
(5.5.4) 72 455 T
(Explicit I/O Values?) 108 455 T
0 F
-0.44 (\322... the idea of having to store explicitly tabulated function values for all the possible inputs) 108 436 P
0.42 (for every function that may be required hardly seems a practicable proposition.\323 \050[15], p.) 108 420 P
(216\051.) 108 404 T
0.88 (Of course, explicit values would only be used for small sets of I/O patterns. In all other) 108 383 P
(cases, as explained above, compressed representations would be used.) 72 367 T
1 F
(5.5.5) 72 341 T
(\324Algorithmic\325 Computation) 108 341 T
0 F
2.65 (\322... the proposed approach relies on the relevant values somehow being available in) 108 322 P
2.69 (advance. ... in SP there is no possibility of being able to compute a function value) 108 306 P
-0.71 (algorithmically. This means that an SP system would in fact require some alternative means) 108 290 P
0.79 (- a program written in a \324conventional\325 programming language running on an \324ordinary\325) 108 274 P
-0.28 (computer. ... The failure of SP to handle functions adds further weight to the argument that) 108 258 P
1.66 (programming in SP is not possible. ... an inability to implement functions suggests an) 108 242 P
(inability to execute programs.\323 \050[15], p. 216\051.) 108 226 T
2.89 (These quotations show that the thrust of the computational conjecture has not been) 108 205 P
0.89 (understood by the authors of [15]. It is part of the computational conjecture that computing the) 72 189 P
2.9 (value of a function \324algorithmically\325 and the execution of \324programs\325 may be achieved by) 72 173 P
-0.6 (information compression. I believe the arguments summarised above \050Section 5.5.2\051 and presented) 72 157 P
0.51 (more fully in [26], and the examples in Sections 2.3 and 5.3 above provide good support for the) 72 141 P
(hypothesis.) 72 125 T
1 F
(5.5.6) 72 99 T
(\324Broad\325 and \324Narrow\325 Searching) 108 99 T
0 F
3.25 (Before leaving the topic of procedural programming, it is pertinent to mention the) 108 80 P
(distinction between \324broad\325 and \324narrow\325 searching in SP systems:) 72 64 T
(\322Within the scope of the hill climbing concept, there is another dimension:) 108 43 T
FMENDPAGE
%%EndPage: "16" 15
%%Page: "15" 15
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
0 12 Q
0 X
(- 15 -) 293 757 T
72 27 540 720 R
7 X
V
0 X
-0.35 (The parsimonious design is motivated by the need for parsimony in the SP theory. The aim) 108 712 P
0.01 (of the research programme is to see whether or not it is possible for \324conventional\325 concepts to be) 72 696 P
0.45 (understood as) 72 680 P
1 F
0.45 (emergent) 142.23 680 P
0 F
0.45 ( properties of a system built from PMUS, and, if so, how. If the kinds of) 186.88 680 P
1.67 (features mentioned were simply bolted on to the SP language then the conceptual framework) 72 664 P
-0.38 (would slide back into the conventional mould and the possibility of achieving a new and improved) 72 648 P
(synthesis of ideas would quickly recede.) 72 632 T
1 F
(5.5.2) 72 606 T
(The Execution of Functions by Information Compression) 108 606 T
0 F
0.51 (The way in which software might be designed) 108 587 P
1 F
0.51 (and executed) 336.68 587 P
0 F
0.51 ( by information compression) 400.16 587 P
(has been discussed quite fully in [26]. The gist of the argument is this:) 72 571 T
(\245) 72 550 T
-0.49 (Conceptually, any function is a set of patterns which maps inputs to corresponding outputs.) 108 550 P
-0.28 (This is the \324extensional\325 view of functions which is widely recognised in computer science) 108 534 P
(\050see, for example, [16], p. 279\051.) 108 518 T
(\245) 72 497 T
1.93 (In simple cases \050e.g., a thermostat function or the \324exclusive OR\325 \050XOR\051 function\051, a) 108 497 P
0 (function may be defined by an unmodified set of I/O patterns. But in most cases, this kind) 108 481 P
-0.14 (of representation would be too large to be practical. Consequently, the set of patterns must) 108 465 P
(be compressed.) 108 449 T
(\245) 72 428 T
0.06 (The devices commonly used for representing the structure of a computer program \050named) 108 428 P
1 (functions with or without arguments;) 108 412 P
1 F
1 (while) 294.01 412 P
0 F
1 ( loops;) 320.01 412 P
1 F
1 (fo) 357.36 412 P
0 F
1 (r loops; recursion; class hierarchies) 366.69 412 P
-0.35 (with inheritance; etc.\051 may be seen as examples of devices commonly used for information) 108 396 P
0.15 (compression \050\324chunking with tags\325; \324run-length coding\325; \324schema-plus-correction\325\051. Thus,) 108 380 P
0.59 (a set of patterns which has been compressed by means of these devices will have a form) 108 364 P
3.7 (which resembles the organisation of a conventional computer program. This is an) 108 348 P
(\324intensional\325 definition of the function.) 108 332 T
(\245) 72 311 T
2.88 (If a compressed set of patterns contains one or more recursive structures, then that) 108 311 P
-0.38 (compressed structure may represent an) 108 295 P
1 F
-0.38 (infinite) 295.73 295 P
0 F
-0.38 ( range of I/O values \050cf. Section 2.3, above\051.) 329.74 295 P
1.52 (Recursion may be seen as an example of the run-length coding device for information) 108 279 P
(compression.) 108 263 T
(\245) 72 242 T
1.41 (As discussed in [26], the) 108 242 P
1 F
1.41 (execution) 236.37 242 P
0 F
1.41 ( of a function may also be achieved by information) 282.35 242 P
(compression:) 108 226 T
(\245) 104.46 205 T
0.08 (If the function is represented by an explicit \050uncompressed\051 set of patterns, then the) 140.47 205 P
-0.12 (matching and unification of an \324input\325 pattern with \050the \324input\325 side of\051 one or more) 140.47 189 P
-0.51 (entries in the set of patterns achieves the effect of \324table lookup\325. The \324output\325 in this) 140.47 173 P
-0.72 (case is the unmatched symbols in the entry or entries which have been accessed. This) 140.47 157 P
-0.55 (is similar to information retrieval by the partial matching of patterns \050cf. Section 5.3,) 140.47 141 P
(above\051.) 140.47 125 T
(\245) 104.46 104 T
0.22 (Even if the function is a) 140.47 104 P
1 F
0.22 (compressed) 259.76 104 P
0 F
0.22 ( representation of I/O patterns, \324output\325 values) 316.41 104 P
-0.6 (may be accessed in a similar way: by matching and unification of \324input\325 values with) 140.47 88 P
-0.7 (patterns within the compressed structure, and the retrieval of the unmatched symbols) 140.47 72 P
FMENDPAGE
%%EndPage: "15" 14
%%Page: "14" 14
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
0 12 Q
0 X
(- 14 -) 293 757 T
72 27 540 720 R
7 X
V
3 F
0 X
([\050John eats food\051) 108 712 T
(\050\050Subj _\051\050V1 _\051\050Obj _\051\051) 108 697 T
(\050\050Obj _\051 is \050V2 _\051 by \050Subj _\051\051) 108 682 T
(\050Subj John\051) 108 667 T
(\050V1 eats\051) 108 652 T
(\050V2 eaten\051) 108 637 T
(\050Obj food\051]) 108 622 T
(=>) 108 592 T
([\050\050Subj _\051\050V1 _\051\050Obj _\051\051) 108 562 T
(\050\050Obj _\051 is \050V2 _\051 by \050Subj _\051\051) 108 547 T
(\050Subj) 108 532 T
4 F
(John) 139.34 532 T
3 F
(\051) 168.01 532 T
(\050V1) 108 517 T
4 F
(eats) 130.01 517 T
3 F
(\051) 154.02 517 T
(\050V2 eaten\051) 108 502 T
(\050Obj) 108 487 T
4 F
(food) 134 487 T
3 F
(\051]) 160 487 T
(=>) 108 457 T
([\050\050Subj _\051\050V1 _\051\050Obj _\051\051) 108 427 T
(\050\050) 108 412 T
4 F
(Obj) 115.99 412 T
(food) 139.33 412 T
3 F
(\051 is \050) 165.32 412 T
4 F
(V2) 188.65 412 T
3 F
( eaten\051 by \050) 203.33 412 T
4 F
(Subj) 264.02 412 T
(John) 293.36 412 T
3 F
(\051\051) 322.03 412 T
(\050V1) 108 397 T
4 F
(eats) 130.01 397 T
3 F
(\051].) 154.02 397 T
0 F
0.47 (In this example, the order of the words in the original sentence has been lost but could, perhaps,) 72 378 P
0.61 (have been preserved by attaching appropriate tags to the) 72 362 P
3 F
0.68 (\050\050Subj _\051\050V1 _\051\050Obj _\051\051) 350.44 362 P
0 F
0.61 ( pattern in the) 471.84 362 P
0.8 (\324grammar\325. A more comprehensive grammar would, no doubt, mark the \324semantic\325 connections) 72 346 P
(between active and passive forms and between \324) 72 330 T
3 F
(eats) 303.6 330 T
0 F
(\325 and \324) 326.28 330 T
3 F
(eaten) 357.6 330 T
0 F
(\325.) 387.62 330 T
2.9 (The general point here is that an ability to transpose information is not necessarily) 108 309 P
0.19 (primitive. It may be an \324emergent\325 property of a system which is dedicated to PMUS. If it can be) 72 293 P
0.42 (shown that Post canonical systems can be imitated within the PMUS framework of an SP model) 72 277 P
-0.25 (and if it is accepted that Post canonical systems are \324universal\325 then the SP framework is universal) 72 261 P
(too.) 72 245 T
2 F
(5.5) 72 219 T
(Procedural Programming) 108 219 T
1 F
(5.5.1) 72 195 T
(A Simple Language) 108 195 T
0 F
0.05 (\322[The SP language] is a language with no reserved words; no defined data types; virtually) 108 176 P
0.14 (no data structures; no arithmetic/logical operators; no input/output primitives; no iterative) 108 160 P
0.17 (constructs; and, perhaps most importantly, no means of controlling program flow. In fact,) 108 144 P
-0.64 (it is difficult even to conceive of how an algorithm could be represented, let alone executed,) 108 128 P
(in the SP language.\323 \050[15], p. 216\051.) 108 112 T
0.39 (The SP language has been correctly characterised here as lacking in many of the features) 108 91 P
1.36 (which are commonly provided in conventional systems. And, with the possible exception of a) 72 75 P
1.47 (negation operator, there is no intention of adding features like these in the future as primitive) 72 59 P
(components of the language.) 72 43 T
FMENDPAGE
%%EndPage: "14" 13
%%Page: "13" 13
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
0 12 Q
0 X
(- 13 -) 293 757 T
72 27 540 720 R
7 X
V
0 X
-0.22 (this in turn may be seen as a relatively simple case of \324best-match\325 retrieval in which there may be) 72 712 P
0.26 (mis-matches between the query and the retrieved pattern. Best-match retrieval is discussed much) 72 696 P
(more fully in [27].) 72 680 T
2 F
(5.4) 72 654 T
(Transposition of Information) 108 654 T
0 F
1.74 (As described in [15], a Post canonical system [11] is one \322comprising a single axiom) 108 635 P
-0.43 (together with productions of the form:) 72 619 P
3 F
-0.47 (g$ -> $h) 256.09 619 P
0 F
-0.43 (, where) 299.5 619 P
3 F
-0.47 (g) 336.97 619 P
0 F
-0.43 ( and) 343.64 619 P
3 F
-0.47 (h) 366.11 619 P
0 F
-0.43 ( are constants and) 372.79 619 P
3 F
-0.47 ($) 459.72 619 P
0 F
-0.43 ( is a \324wild card\325) 466.39 619 P
0.61 (operator which can match zero or more symbols.\323 \050p. 216\051. The key point is that, with a system) 72 603 P
1.49 (like this, it is possible to transpose information from one part of a sequence to another. If, as) 72 587 P
-0.25 (appears to be the case, \322Post canonical systems constitute another universal model of computation) 72 571 P
-0.14 (and ...) 72 555 P
1 F
-0.14 (all) 104.04 555 P
0 F
-0.14 (formal systems may be expressed as Post canonical systems.\323 \050[15], p. 216, emphasis in) 119.57 555 P
1.49 (the original\051 and if, as may be the case, transposition cannot be accommodated within the SP) 72 539 P
-0.59 (framework, then there is a problem for the SP computational conjecture. There would be a problem) 72 523 P
2.48 (even if Post canonical systems were not \324universal\325 because they clearly support a form of) 72 507 P
1.08 (\324computing\325 or \324formal reasoning\325. There is in any case a very reasonable expectation that any) 72 491 P
0.56 (general-purpose computing system should be able to transpose information from one location to) 72 475 P
(another.) 72 459 T
-0.66 (That an SP system \050or any other computing system\051 should be able to transpose information) 108 438 P
-0.5 (does not, in itself, mean that that capability has to be provided as a) 72 422 P
1 F
-0.5 (primitive) 387.67 422 P
0 F
-0.5 ( element of the system.) 431 422 P
-0.6 (Although it cannot yet be demonstrated in any SP model \050and has, indeed, not been a primary focus) 72 406 P
0.02 (of attention in work done to date\051, it appears that transposition of information may be achieved as) 72 390 P
2.67 (a by-product of the processes of pattern matching, unification and search which have been) 72 374 P
(identified tentatively as key mechanisms for information compression.) 72 358 T
(Consider an example like this:) 108 337 T
3 F
([\050\050Subj _\051\050V1 _\051\050Obj _\051\051) 108 316 T
(\050\050Obj _\051 is \050V2 _\051 by \050Subj _\051\051) 108 301 T
(\050Subj John\051) 108 286 T
(\050V1 eats\051) 108 271 T
(\050V2 eaten\051) 108 256 T
(\050Obj food\051].) 108 241 T
0 F
0.16 (In the light of the discussion and examples in Section 2.3 and the first part of Section 5, it should) 72 222 P
1.01 (be clear that an SP system that is capable of creating alternative unifications should create two) 72 206 P
(alternative \324best\325 versions of this structure:) 72 190 T
3 F
([\050\050) 108 169 T
4 F
(Subj) 119.33 169 T
3 F
( John\051\050) 145.33 169 T
4 F
(V1) 182.68 169 T
3 F
( eats\051\050) 197.35 169 T
4 F
(Obj) 231.36 169 T
3 F
( food\051\051) 251.36 169 T
(\050\050Obj _\051 is \050V2 _\051 by \050Subj _\051\051) 108 154 T
(\050V2 eaten\051]) 108 139 T
0 F
(and) 72 120 T
3 F
([\050\050Subj _\051\050V1 _\051\050Obj _\051\051) 108 99 T
(\050\050) 108 84 T
4 F
(Obj) 115.99 84 T
3 F
( food\051 is \050) 136 84 T
4 F
(V2) 186.01 84 T
3 F
( eaten\051 by \050) 200.69 84 T
4 F
(Subj) 261.38 84 T
3 F
( John\051\051) 287.39 84 T
(\050V1 eats\051].) 108 69 T
0 F
0.38 (It is a relatively small step to see how an \324active\325 sentence may first be \324parsed\325 and then) 108 50 P
(reconstructed in the \324passive\325 form with its \324subject\325 and its \324object\325 transposed:) 72 34 T
FMENDPAGE
%%EndPage: "13" 12
%%Page: "12" 12
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
0 12 Q
0 X
(- 12 -) 293 757 T
72 27 540 720 R
7 X
V
3 F
0 X
(S -> N V ADV) 108 712 T
(N -> John) 108 697 T
(N -> Mary) 108 682 T
(V -> walks) 108 667 T
(V -> runs) 108 652 T
(ADV -> fast) 108 637 T
(ADV -> slowly.) 108 622 T
0 F
0.25 (With this example \050similar to the example shown in Section 2.3\051, a \324sentence\325 may be created by) 72 603 P
0.73 (re-writing) 72 587 P
3 F
0.81 (S) 123.72 587 P
0 F
0.73 ( as) 131.72 587 P
3 F
0.81 (N) 149.17 587 P
0 F
0.73 ( followed by) 157.84 587 P
3 F
0.81 (V) 223.68 587 P
0 F
0.73 ( followed by) 231.68 587 P
3 F
0.81 (ADV) 297.53 587 P
0 F
0.73 ( \050i.e., replacing) 322.2 587 P
3 F
0.81 (S) 399.69 587 P
0 F
0.73 ( by those three symbols, in) 407.7 587 P
0.47 (order\051, then re-writing) 72 571 P
3 F
0.52 (N) 183.38 571 P
0 F
0.47 ( by \324) 192.05 571 P
3 F
0.52 (John) 214.99 571 P
0 F
0.47 (\325 or \324) 241 571 P
3 F
0.52 (Mary) 265.94 571 P
0 F
0.47 (\325, re-writing) 292.6 571 P
3 F
0.52 (V) 354.53 571 P
0 F
0.47 ( by \324) 362.53 571 P
3 F
0.52 (walks) 385.47 571 P
0 F
0.47 (\325 or \324) 415.47 571 P
3 F
0.52 (runs) 440.4 571 P
0 F
0.47 (\325 and re-writing) 463.74 571 P
3 F
0.67 (ADV) 72 555 P
0 F
0.61 ( by \324) 96.67 555 P
3 F
0.67 (fast) 119.88 555 P
0 F
0.61 (\325 or \324) 139.22 555 P
3 F
0.67 (slowly) 164.42 555 P
0 F
0.61 (\325. In more realistic examples, re-writing is done through three or more) 197.08 555 P
(\324levels\325. In some applications, rules of this type may be run \324backwards\325.) 72 539 T
2.72 (In the SP framework, each rule may be seen as a sequential pattern which may be) 108 518 P
(represented in the SP notation like this:) 72 502 T
3 F
([\050\050N _\051\050V _\051\050ADV _\051\051) 108 481 T
(\050N John\051) 108 466 T
(\050N Mary\051) 108 451 T
(\050V walks\051) 108 436 T
(\050V runs\051) 108 421 T
(\050ADV fast\051) 108 406 T
(\050ADV slowly\051].) 108 391 T
0 F
0.44 (There are some differences in style between this example and the one given in Section 2.3 but it) 72 372 P
0.35 (would take us too far afield to discuss them and they are not relevant to the point in hand. When) 72 356 P
(this structure is processed by SP6, it is compressed into this form:) 72 340 T
3 F
([\050\050) 108 319 T
4 F
(N) 119.33 319 T
3 F
( John\051\050) 127.99 319 T
4 F
(V) 165.34 319 T
3 F
( walks\051\050) 173.34 319 T
4 F
(ADV) 214.67 319 T
3 F
( fast\051\051) 240 319 T
(\050N Mary\051) 108 304 T
(\050V runs\051) 108 289 T
(\050ADV slowly\051].) 108 274 T
0 F
2.03 (Bold type is used to mark symbols which are the result of unification. The effect of pattern) 72 255 P
0.06 (matching and unification in the example has been to \324pull\325 the elements) 72 239 P
3 F
0.06 (\050N John\051, \050V walks\051) 417.91 239 P
0 F
0.06 ( and) 519.73 239 P
3 F
-0.26 (\050ADV fast\051) 72 223 P
0 F
-0.23 ( into positions within the sentence framework thus creating a simple sentence. SP6 has) 127.08 223 P
0.2 (only a very restricted ability to achieve this kind of effect through two or more \324levels\325 \050see [23],) 72 207 P
0.36 (p. 158\051 and, as indicated earlier, it only finds one \324best\325 set of unifications and cannot, therefore,) 72 191 P
(create alternative sentences.) 72 175 T
-0.26 (As illustrated by this example, re-writing may be seen as a process of information retrieval) 108 154 P
0.52 (where the retrieval \324key\325 is the left side of the rule and the unmatched symbol or symbols in the) 72 138 P
-0.55 (retrieved pattern correspond to the right side of the rule. Likewise, re-writing \050and retrieval by key\051) 72 122 P
0.03 (may be seen as the \324de-referencing\325 of an identifier, where the left side of the rule is the identifier) 72 106 P
(and the right.side is the identified object.) 72 90 T
0.01 (This kind of information retrieval by key or de-referencing of an identifier may be seen as) 108 69 P
-0.18 (a simple case of a more general idea: that any sub-sequence of a sequential pattern may be used to) 72 53 P
-0.59 (retrieve that pattern \050perhaps also with other patterns in which the same sub-sequence occurs\051. And) 72 37 P
FMENDPAGE
%%EndPage: "12" 11
%%Page: "11" 11
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
0 12 Q
0 X
(- 11 -) 293 757 T
72 27 540 720 R
7 X
V
0 X
1.44 (abduction and others. But making such judgements is not in any way a claim that the models) 72 712 P
-0.4 (perform these functions comprehensively or perfectly. It was reasonable for the Wright brothers to) 72 696 P
-0.27 (claim that they had demonstrated powered flight, even though they were not able to carry a Jumbo) 72 680 P
(jet load of passengers at high speed across the Atlantic.) 72 664 T
0.86 (That current models have weaknesses has been made abundantly clear: \322Any one of the) 108 643 P
0.03 (functions described can be done more effectively by some existing computing system or program) 72 627 P
-0.66 (....\323 \050[23], p. 147\051; \322OAOs have been used [in this example] rather than the more appropriate UAOs) 72 611 P
-0.22 (because SP6 lacks the ability to look for patterns and sub-groups within UAOs. Future versions of) 72 595 P
0.09 (SP will plug this gap.\323 \050[31], p. 272\051; \322... this version of SP6 has actually failed to preserve one of) 72 579 P
1.09 (the premises in this example \050that \324All men are mortal\325\051 although, in designing the system, we) 72 563 P
-0.35 (intended that it should not lose power in its knowledge structures. We expect this weakness in SP6) 72 547 P
0.51 (to be cured in later versions of SP ...\323 \050[31], p. 273\051. \322Notice that this example would not give a) 72 531 P
0.55 (valid conclusion if the relationship were, say, \324not-equals\325. This and other examples point to the) 72 515 P
0.05 (conclusion that negation will require special treatment in SP, ...\323 \050[31], p. 152\051; With more space,) 72 499 P
(many other similar quotations could be given.) 72 483 T
0.54 (\322SP can imitate the effect of a re-write system\323. This quotation from [31] is presented in) 108 462 P
0.51 ([15] as an example of a proposition with little support. The quotation is actually a fragment of a) 72 446 P
0.34 (sentence which appears in a section on \324Related Work\325: \322Although SP can imitate the effect of a) 72 430 P
0.18 (re-write system it does) 72 414 P
1 F
0.18 (not) 184.71 414 P
0 F
0.18 ( use any re-writing technique in the technical sense of that term ...\323 \050p.) 200.05 414 P
-0.4 (266, emphasis in the original\051. From the context and from the fuller sentence it can be seen that the) 72 398 P
0.56 (aim at that point was to differentiate SP concepts from re-writing systems rather than justify the) 72 382 P
1.36 (first proposition in the sentence. Relevant evidence \050which could have been quoted if the first) 72 366 P
0.17 (proposition had been the focus of attention\051 was presented in [22], Sections 5.1 and 5.4. The SP6) 72 350 P
-0.64 (model \050which was the subject of [31]\051 can imitate re-writing in a restricted way, as discussed below) 72 334 P
(\050Section 5.3\051.) 72 318 T
3.64 (\322The SP prototype ... demonstrates an ability to make logical inferences which is) 108 297 P
-0.64 (comparable with the capability of systems like Prolog\323. This quotation in [15] from an unpublished) 72 281 P
0.02 (document \050[30], p. 18\051 certainly gives a misleading impression of what the SP prototype could do) 72 265 P
0.61 (if \322is comparable with\323 is taken to mean \322is as good as\323 rather than the stricter meaning \050which) 72 249 P
-0.17 (was intended\051 of \322may be compared with\323. The impression that any undue claim for the prototype) 72 233 P
2.07 (is being made is less strong if the immediately following sentence is given: \322) 72 217 P
1 F
2.07 (There are still) 467.86 217 P
0.02 (problems to be solved) 72 201 P
0 F
0.02 ( but we expect the) 176.93 201 P
1 F
0.02 (mature) 267.16 201 P
0 F
0.02 ( SP system to be able to store logical propositions) 301.15 201 P
0.38 (and to make logical inferences at least as well as existing systems for theorem proving and logic) 72 185 P
-0.61 (programming.\323 \050emphasis added\051. And the impression is even less strong when published accounts) 72 169 P
1.03 (of SP prototypes point out their shortcomings so clearly \050see above\051. It does not require \322close) 72 153 P
-0.14 (examination\323 to discover that a negation operator is missing and is probably required because this) 72 137 P
(has been clearly stated and discussed \050[31], pp. 274-275\051.) 72 121 T
2 F
(5.3) 72 95 T
(Re-write Rules) 108 95 T
0 F
(In their simplest form, re-write rules look like this:) 72 76 T
FMENDPAGE
%%EndPage: "11" 10
%%Page: "10" 10
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
0 12 Q
0 X
(- 10 -) 293 757 T
72 27 540 720 R
7 X
V
2 F
0 X
(5) 72 712 T
(Comments) 108 712 T
(5.1) 72 688 T
(The Development of Computer Models) 108 688 T
0 F
0.13 (It is suggested in [15] that the development of working models has been at the expense of) 108 669 P
-0.33 (\322seriously addressing important theoretical problems\323 \050p. 215\051. This observation appears to derive) 72 653 P
2.08 (from the view that theoretical work should be done exclusively by \324pure thought\325. This was) 72 637 P
2.03 (necessarily the case before computers were invented. But since that time, many people have) 72 621 P
0.02 (discovered that the process of developing computer models and running them is a powerful aid to) 72 605 P
(theorising. The technique is now widely used in many areas including mathematics \050e.g., [1]\051.) 72 589 T
0.65 (There is, of course, a risk of blind \324hacking\325. But if computer models are developed in a) 108 568 P
1.15 (disciplined way in conjunction with hard thinking about the theory which they are designed to) 72 552 P
0.16 (express, the process of creating a model can sharpen one\325s view of theoretical problems. And the) 72 536 P
1.21 (process of running the model on appropriate examples can quickly reveal weaknesses in one\325s) 72 520 P
(understanding that might otherwise never come to light.) 72 504 T
0.43 (Even with the aid of computer modelling, this kind of theory development is hard. There) 108 483 P
0.2 (are many opportunities for false moves and many blind alleys that can be entered. The goal at all) 72 467 P
0.69 (times is to preserve simplicity in the concepts and, at the same time, to accommodate as wide a) 72 451 P
2.42 (range of phenomena as possible \050\324simplicity\325 with \324power\325!\051. The temptation to add) 72 435 P
1 F
2.42 (ad hoc) 505.26 435 P
0 F
(mechanisms to meet specific problems must be resisted.) 72 419 T
1 F
(5.1.1) 72 393 T
(Rigorous Definition) 108 393 T
0 F
2.17 (Regarding the suggestion in [15] that the meaning of the \324wild card\325 object \050i.e., the) 108 374 P
0.37 (variable \050\324_\325\051\051 should, by now, have been \322rigorously\323 defined, the exact nature of this construct) 72 358 P
-0.18 (within the SP system, or whether it is needed at all, has indeed been a recurring puzzle. Of course,) 72 342 P
-0.56 (an arbitrary formal meaning could have been given to this construct at an early stage but this would) 72 326 P
0.8 (almost certainly) 72 310 P
1 F
0.8 (not) 153.6 310 P
0 F
0.8 ( have been the route to a good theory. Concepts within a developing theory) 168.94 310 P
(need to be bootstrapped with care to ensure that the overall structure is sound.) 72 294 T
1.2 (My tentative view has been that the need for the variable might disappear when search) 108 273 P
0.36 (methods were improved. But until an improved search method had been devised \050with the aid of) 72 257 P
0.04 (computer modelling\051, it was not easy to resolve this point. It turns out that, at least in applications) 72 241 P
0.03 (like best-match information retrieval, the search method in SP20 and SP21 completely eliminates) 72 225 P
(the need for any wild card object.) 72 209 T
2 F
(5.2) 72 183 T
(What Are the Models Doing?) 108 183 T
0 F
0.12 (It is correctly observed in [15] that a judgement about what the models are doing depends) 108 164 P
0.14 (on how the output is interpreted. This, of course, is true of any computer system. Also relevant is) 72 148 P
1.83 (our knowledge of the internal workings of the given system. We have few qualms about the) 72 132 P
0.49 (conclusion that a pocket calculator is \324doing\325 arithmetic or that a Prolog system is \324doing\325 logic.) 72 116 P
0.44 (But these judgements rest ultimately on the patterns we see on the LCD or computer screen, our) 72 100 P
0.56 (knowledge of how the systems work, and how these things relate to our concepts. Making these) 72 84 P
(connections is a process of perception and analogy.) 72 68 T
0.38 (I believe it is reasonable to interpret the output and the workings of current SP models in) 108 47 P
2.71 (terms of such concepts as information retrieval, unsupervised inductive learning, deduction,) 72 31 P
FMENDPAGE
%%EndPage: "10" 9
%%Page: "9" 9
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
0 12 Q
0 X
(- 9 -) 296 757 T
72 27 540 720 R
7 X
V
0 X
1.34 (planning within a framework of information compression by pattern matching, unification and) 72 712 P
(search. Examples from the model are presented in the sources cited above.) 72 696 T
-0.23 (The focus has been on) 108 675 P
1 F
-0.23 (integration) 217.14 675 P
0 F
-0.23 ( within one uniform framework: \322I stress that) 270.48 675 P
1 F
-0.23 (the variety) 489.24 675 P
0.54 (of capabilities illustrated in this section are the product of one unifying concept - the search for) 72 659 P
0.96 (efficiency in information.) 72 643 P
0 10 Q
0.8 (3) 194.91 647.8 P
0 12 Q
0.96 ( Any one of the functions described can be done more effectively by) 199.91 643 P
-0.68 (some existing computing system or program but, as far as I know, there is no existing system which) 72 627 P
0.44 (embraces all these capabilities as a product of one organizing principle.\323 \050[23], p. 147, emphasis) 72 611 P
(in the original\051.) 72 595 T
1.51 (Amongst the several shortcomings of SP6 there are three main weaknesses: the search) 108 574 P
-0.23 (method is inefficient and, more importantly, will not scale up to handle realistically large volumes) 72 558 P
-0.24 (of data; it only delivers one \325best\325 answer to problems instead of the alternative answers which are) 72 542 P
4.48 (often required; and it has only a very restricted ability to discover patterns which are) 72 526 P
(\324discontinuous\325 or \324fragmented\325.) 72 510 T
2 F
(4.2) 72 484 T
(SP20 and SP21) 108 484 T
0 F
0.29 (The main goal in developing SP20 has been to overcome those three main weaknesses of) 108 465 P
0.7 (SP6 whilst maintaining an ability to integrate diverse functions \324cleanly\325 within a framework of) 72 449 P
0.69 (information compression. A subsidiary goal has been to achieve these ends with a model which) 72 433 P
(would lend itself to parallel processing.) 72 417 T
0.69 (As described in [29], the search method in SP20 is) 108 396 P
1 F
0.69 (much) 360.55 396 P
0 F
0.69 ( more efficient than in SP6 and) 386.54 396 P
-0.33 (appears to be scaleable to large bodies of information. In a serial processing environment, the time) 72 380 P
0.34 (complexity of the model is O\050N) 72 364 P
0 10 Q
0.28 (2) 226.68 368.8 P
0 12 Q
0.34 (\051and the space complexity of the model is O\050N\051, where N is the) 231.68 364 P
0.18 (number of symbols in the information supplied to the model. Analysis of the model suggests that) 72 348 P
-0.64 (a parallel version \050not yet attempted\051 would have the same space complexity and a time complexity) 72 332 P
0.15 (approaching O\050N\051, depending how well the parallelism is applied. The model has a robust ability) 72 316 P
0.93 (to find recurrent patterns which are discontinuous or fragmented \050i.e., subsequences\051 as well as) 72 300 P
-0.13 (coherent substrings. It can deliver a graded series of alternative matchings for patterns but there is) 72 284 P
(more work to be done on the formation of alternative unifications.) 72 268 T
-0.73 (SP20 is a substantial advance over SP6 but in some respects the earlier model is better. Both) 108 247 P
-0.41 (models are quite restricted in what they can do and there are several significant problems still to be) 72 231 P
(solved. These problems are described in [29].) 72 215 T
1.13 (In terms of potential practical applications, SP20 is strongest in the area of information) 108 194 P
-0.29 (retrieval. With a view to possible future \324spin-off\325, SP21 is a variant of SP20 which is dedicated to) 72 178 P
0.2 (\324best-match\325 information retrieval [27]. It incorporates some refinements of the search method in) 72 162 P
(SP20 and of the metric used to guide the search.) 72 146 T
72 91 540 105.98 C
72 91 540 105.98 R
7 X
0 K
V
81 103.96 225 103.96 2 L
V
0.5 H
2 Z
0 X
N
0 0 612 792 C
0 12 Q
0 X
0 K
1 (3.) 90 83 P
1 F
1 (Efficiency) 103 83 P
0 F
1 ( is the ratio of non-redundant information to total information. This ratio is) 150.99 83 P
5.01 (increased by the extraction of redundancy from information and the consequent) 90 67 P
3.38 (compression of the information \050provided any accompanying loss of non-redundant) 90 51 P
(information is not too great\051.) 90 35 T
FMENDPAGE
%%EndPage: "9" 8
%%Page: "8" 8
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
0 12 Q
0 X
(- 8 -) 296 757 T
72 27 540 720 R
7 X
V
0 X
-0.59 (case is where two inputs of different sizes have the same amount of non-redundant information and) 72 712 P
(may therefore be compressed to the same size.) 72 696 T
0.05 (Although an SP system may itself perform compression which is monotonic, this does not) 108 675 P
1.08 (mean that functions executed by the system necessarily have that property. This is because the) 72 659 P
-0.35 (meaning of the terms \324input\325 and \324output\325 are different in the two cases. This may be seen from the) 72 643 P
-0.4 (outline description, below, of how SP functions may be executed \050Section 5.5\051 and the much fuller) 72 627 P
(description which is given in [26].) 72 611 T
(In brief, the argument is that:) 108 590 T
(\245) 72 569 T
0.75 (In simple cases, a function may be represented as a set of patterns of I/O values and the) 108 569 P
(effect of \324table lookup\325 may be achieved by pattern matching, unification and search.) 108 553 T
(\245) 72 532 T
0.1 (In more complex cases, a function may be represented as a compressed set of patterns but) 108 532 P
(output values may still be obtained by PMUS.) 108 516 T
0.14 (In each of the two examples shown in Figure 1, it is not hard to see that one of the two rows may) 72 495 P
0.25 (be discriminated by the matching of an \324input\325 pattern with patterns in the \324input\325 column so that) 72 479 P
0.84 (the corresponding \324output\325 may be delivered. This process is independent of the \324output\325 and is) 72 463 P
(thus indifferent to the distinction between monotonic and non-monotonic functions.) 72 447 T
2 F
(3) 72 416 T
(Information Theory) 108 416 T
0 F
-0.1 (My earlier article [25] traces a connection between Shannon\325s concept of redundancy [15]) 108 397 P
0.12 (and the concept of redundancy as relatively frequent repetition of patterns. From this, it is argued) 72 381 P
1.67 (that redundancy in information may be reduced by decreasing the repetition of those patterns) 72 365 P
1.32 (which repeat more often than other patterns of the same size. Relatively frequent repetition of) 72 349 P
4.43 (patterns may be detected by searching for matching patterns. Possible connections with) 72 333 P
(algorithmic information theory \050e.g., [1]\051 are briefly described.) 72 317 T
0.08 (If the authors of [15] had wished to demonstrate \322a confusing terminological hodgepodge,) 108 296 P
0.08 (fraught with ambiguity, inconsistency, and imprecision\323 \050p. 214\051, one would expect them to have) 72 280 P
0.45 (made their case from the discussion in [25]. Since these qualities are merely asserted rather than) 72 264 P
(demonstrated, no other response is required.) 72 248 T
2 F
(4) 72 217 T
(SP Models) 108 217 T
0 F
-0.23 (In Sections 6 and 7 of [15], existing prototypes of the SP system, and their capabilities, are) 108 198 P
0.02 (examined in general terms. Before offering responses to observations made in those two sections,) 72 182 P
(I will describe current models in outline.) 72 166 T
2 F
(4.1) 72 140 T
(SP6) 108 140 T
0 F
0.02 (SP6 is an early prototype described in [31], [23] \050Chapter 5\051 and [24]. Examples from this) 108 121 P
2.07 (prototype are presented in [26] in support of the idea that the design of functions, and their) 72 105 P
(execution, may both be achieved by information compression.) 72 89 T
0.91 (The main aim in developing this prototype was to explore whether and how it might be) 108 68 P
3.2 (possible to integrate such operations as unsupervised inductive learning, logical deduction,) 72 52 P
0.64 (probabilistic inference, information retrieval, the execution of functions, pattern recognition and) 72 36 P
FMENDPAGE
%%EndPage: "8" 7
%%Page: "7" 7
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
0 12 Q
0 X
(- 7 -) 296 757 T
72 27 540 720 R
7 X
V
0 X
2.14 (process may work \324backwards\325: a collection of \324sentences\325 like these may be compressed to) 72 712 P
(produce a \324grammar\325 like the one shown at the top of the example above \050see [23], pp. 148-150\051.) 72 696 T
1 F
(2.3.1) 72 670 T
(Recursion) 108 670 T
0 F
1.48 (Here is another example to show how unification of patterns can mean the creation of) 108 651 P
(redundancy. Consider this simple structure in SP notation:) 72 635 T
3 F
([Loop \050Step1 Step2 [Loop _]\051].) 108 614 T
0 F
0.53 (The variable \050\324) 72 595 P
3 F
0.59 (_) 144.36 595 P
0 F
0.53 (\325\051 in this structure represents an arbitrary amount of missing information so that) 151.03 595 P
3 F
0.23 ([Loop _]) 72 579 P
0 F
0.21 ( should unify with) 115.6 579 P
3 F
0.23 ([Loop \050Step1 Step2 [Loop _]\051]) 207.1 579 P
0 F
0.21 ( giving) 365.46 579 P
3 F
0.23 ([Loop \050Step1 Step2 [Loop) 402.55 579 P
(_]\051]) 72 563 T
0 F
( as the result.) 89.34 563 T
0.04 (Clearly, this process is recursive and may continue without limit. On each cycle, there is a) 108 542 P
0.11 (unification of patterns in accordance with the principle of compression. But because the structure) 72 526 P
-0.19 (is recursive, there is no overall reduction of the size of the structure at the end of each cycle. More) 72 510 P
0.36 (importantly in the present context, an \324audit trail\325 or \324trace\325 of the) 72 494 P
1 F
0.36 (behaviour) 393.92 494 P
0 F
0.36 ( of the program will) 442.58 494 P
(show large amounts of redundancy.) 72 478 T
2 F
(2.4) 72 452 T
(Monotonicity and Non-Monotonicity) 108 452 T
0 F
0.02 (\322... in [9] it is proven that non-monotonicity is required as an element for a universal basis) 108 433 P
0.04 (of computation. In this light it is odd that Wolff attempts to model with SP the entire class) 108 417 P
-0.55 (of both monotonic and non-monotonic computable functions using only the class of strictly) 108 401 P
(monotonic compression functions.\323 \050p. 214\051.) 108 385 T
-0.64 (In the article referred to, Minsky [9] argues that, in a network of \324elements\325 of certain kinds,) 108 364 P
0 (organised in a particular way, at least one of the elements must be non-monotonic in order for the) 72 348 P
0.86 (network as a whole to be non-monotonic. The point is not actually \322proven\323 \050it depends on the) 72 332 P
0.4 (words \322it is easy to show that ...\323\051 but it is plausible enough. However, the arguments have been) 72 316 P
(applied only to that particular type of system and may not be applicable to other models.) 72 300 T
-0.17 (In [9], a \324response function\325 is) 108 279 P
1 F
-0.17 (monotonic) 255.27 279 P
0 F
-0.17 ( if, for any two sets of input values where one set) 305.93 279 P
0.46 (is a sub-set of the other, the same relation holds for the corresponding two sets of output values.) 72 263 P
(The function is) 72 247 T
1 F
(non-monotonic) 147.66 247 T
0 F
( if the relation does not hold. Examples are shown in Figure 1.) 220.32 247 T
1.9 (Figure 1. Inputs and outputs for a monotonic function and a non-monotonic function.) 108.5 107 P
(Following the usual convention, sets have been marked with \324{\325 and \324}\325.) 108.5 93 T
4.67 (Compression in an SP system would normally be monotonic, assuming that the) 108 69 P
1.01 (effectiveness of the compression process does not vary or that the proportion of non-redundant) 72 53 P
0.75 (information which is discarded \050if any\051 does not vary. With \324lossless\325 compression, the limiting) 72 37 P
72 27 540 720 C
72 124 540 243 C
107.92 131.29 246.92 212.29 R
7 X
0 K
V
0.5 H
2 Z
0 X
N
0 12 Q
({A, B}) 118 166 T
({X, Y}) 187 166 T
({A, B, C}) 118 145 T
({X, Y, Z}) 187 145 T
(Input) 118 195 T
(Output) 187 195 T
178 212 178 132 2 L
N
108 185 247 185 2 L
N
(Monotonic) 151.13 227 T
(Non-monotonic) 348.46 226 T
316.92 131.29 455.92 212.29 R
7 X
V
0 X
N
({A, B}) 327 166 T
({X, Y, Z}) 397 166 T
({A, B, C}) 327 145 T
({X, Y}) 397 145 T
(Input) 327 195 T
(Output) 397 195 T
387 212 387 132 2 L
N
317 185 456 185 2 L
N
72 27 540 720 C
0 0 612 792 C
FMENDPAGE
%%EndPage: "7" 6
%%Page: "6" 6
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
0 12 Q
0 X
(- 6 -) 296 757 T
72 27 540 720 R
7 X
V
3 F
0 X
([\050[N _][V _][ADV _]\051) 144 712 T
([N john]) 144 697 T
([N mary]) 144 682 T
([V walks]) 144 667 T
([V runs]) 144 652 T
([ADV fast]) 144 637 T
([ADV slowly]]) 144 622 T
(=>) 144 592 T
([\050[N john][V walks][ADV fast]\051) 144 562 T
([N mary]) 144 547 T
([V runs]) 144 532 T
([ADV slowly]]) 144 517 T
(OR) 144 487 T
([\050[N mary][V walks][ADV fast]\051) 144 457 T
([N john]) 144 442 T
([V runs]) 144 427 T
([ADV slowly]]) 144 412 T
(OR) 144 382 T
([\050[N john][V runs][ADV fast]\051) 144 352 T
([N mary]) 144 337 T
([V walks]) 144 322 T
([ADV slowly]]) 144 307 T
(etc.\323) 144 277 T
0 F
(\050pp. 141-142\051.) 170.68 277 T
0.29 (For readers unfamiliar with the SP notation, \324\050...\051\325 represents a sequence, \324[...]\325 represents) 108 258 P
-0.14 (an unordered collection in which items may be repeated \050the same as a \324bag\325 in logic\051, and \324_\325 is a) 72 242 P
1.78 (place-marker for missing information, rather like an unnamed variable in Prolog. A \324symbol\325) 72 226 P
0.34 (\050which is an \324atom\325 of information in the notation\051 is any string of characters bounded by spaces) 72 210 P
0.43 (or brackets. The arrow \050\324=>\325\051 is not part of the notation - it merely marks the transformation \050by) 72 194 P
-0.24 (compression\051 from one structure to another. The distinction between upper- and lower-case letters) 72 178 P
(has no formal significance.) 72 162 T
-0.54 (Each of the three alternatives in this example illustrates a simple idea which lies at the heart) 108 141 P
0.16 (of the SP programme: that redundancy may be reduced by the merging or unification of identical) 72 125 P
-0.49 (patterns. In each case, two of the three instances of \324) 72 109 P
3 F
-0.55 (N) 318.34 109 P
0 F
-0.49 (\325 have been merged, and likewise for \324) 327 109 P
3 F
-0.55 (V) 508.16 109 P
0 F
-0.49 (\325 and) 516.17 109 P
1.17 (\324) 72 93 P
3 F
1.3 (ADV) 76 93 P
0 F
1.17 (\325. The effect of these unifications is to \324pull\325 structures like) 100.67 93 P
3 F
1.3 ([N john]) 399.51 93 P
0 F
1.17 ( and) 442.17 93 P
3 F
1.3 ([V walks]) 467.84 93 P
0 F
1.17 ( into) 517.16 93 P
(position within the) 72 77 T
3 F
(\050[N _][V _][ADV _]\051) 165.01 77 T
0 F
( structure.) 264.38 77 T
-0.61 (The effect of a search process which can find) 108 56 P
1 F
-0.61 (alternative) 322.76 56 P
0 F
-0.61 (unifications is to create alternative) 377.14 56 P
3.2 (three-word \324sentences\325 which, collectively, contain significant amounts of redundancy. The) 72 40 P
FMENDPAGE
%%EndPage: "6" 5
%%Page: "5" 5
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
0 12 Q
0 X
(- 5 -) 296 757 T
72 27 540 720 R
7 X
V
0 X
1.25 (\322a stance that seems almost to defy common sense\323 \050p. 214\051 because, in my experience, other) 72 712 P
-0.75 (people react in a similar way. But common sense is a fickle mistress in science. If we were to follow) 72 696 P
-0.42 (it slavishly we would still believe that the world is flat and that the sun moves round the earth. And) 72 680 P
3.61 (the counter-intuitive insights of relativity and quantum mechanics would never have won) 72 664 P
(acceptance.) 72 648 T
2 F
(2.2) 72 622 T
(Turing Machine) 108 622 T
0 F
-0.19 (\322It is not at all clear what could be gained from such a view over existing, widely accepted) 108 603 P
(models of computation, such as the Turing machine.\323 \050[15], p. 214\051.) 108 587 T
1.45 (It is a common misconception that alternative theories in one domain must necessarily) 108 566 P
1.15 (compete. Each theory may be \324correct\325 in its own way. Alternative theories may eventually be) 72 550 P
(integrated \050witness the wave theory and the corpuscular theory of light\051.) 72 534 T
0.28 (Turing\325s achievements are, of course, monumental. But the concept of a Turing machine,) 108 513 P
-0.59 (while it provides many insights, also leaves many questions unanswered. Since Turing\325s time there) 72 497 P
0.02 (has been much research in areas like logic progamming, functional programming, object-oriented) 72 481 P
0.28 (design, expert systems, machine learning, natural language processing and many others. There is) 72 465 P
0.05 (a pressing need to integrate and simplify concepts which have been developed in areas like these.) 72 449 P
1.46 (Information compression may provide that unifying framework, either in conjunction with the) 72 433 P
(Turing model or, perhaps, independently of it.) 72 417 T
2 F
(2.3) 72 391 T
(A Contradiction) 108 391 T
0 F
0.04 (The next point in [15] \050p. 214\051 is potentially lethal for the proposition that computing may) 108 372 P
-0.59 (be understood as information compression: \322... one wonders how such a theory could be reconciled) 72 356 P
1.81 (with the obvious counter example - the set of algorithms whose express purpose involves an) 72 340 P
-0.36 (increase in the redundancy of a set of data.\323 \050[15], p. 214\051. Here is an example of a simple function) 72 324 P
(which has this effect:) 72 308 T
3 F
(silly\050\051) 108 287 T
({) 108 272 T
(print\050\324A\325\051 ;) 144 257 T
(silly\050\051 ;) 144 242 T
(}) 108 227 T
0 F
1.34 (The creation of redundancy in this way conflicts with the idea of \324computing as compression\325) 72 208 P
(because compression requires redundancy to be decreased.) 72 192 T
0.73 (Since this is such an important point, it surprising that it is given so little prominence in) 108 171 P
0.04 ([15]. It is not clear why the authors have omitted to describe an explanation of the paradox which) 72 155 P
(appears in the very source which they have cited [23]:) 72 139 T
0.3 (\322The answer seems to lie in the idea that the search process may take more than one path) 108 118 P
0.01 (through the search space and may offer more than one solution to any problem. In general) 108 102 P
1.13 (terms, an SP system can, in effect, create redundancy by showing a variety of different) 108 86 P
0.38 (\324good\325 answers to the unification search. Here is an example, using the SP notation ..., to) 108 70 P
(show how this might happen:) 108 54 T
FMENDPAGE
%%EndPage: "5" 4
%%Page: "4" 4
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
0 12 Q
0 X
(- 4 -) 296 757 T
72 27 540 720 R
7 X
V
0 X
-0.33 (It is quite inaccurate to describe these and related possibilities as \324claims\325 for the SP theory) 108 712 P
1.11 ([15]. It is obvious that they are sketches of where the research may lead which are necessarily) 72 696 P
2.12 (tentative. If they were totally predictable then there would be no research to be done. Their) 72 680 P
(importance, as I have said, is to provide the motivation for the research.) 72 664 T
2 F
(2) 72 633 T
(The SP Conjectures) 108 633 T
0 F
-0.15 (Section 4 of [15] considers some aspects of the conjectures on which the SP programme is) 108 614 P
(based. In summary, these conjectures are:) 72 598 T
(\245) 72 577 T
1.62 (The) 108 577 P
1 F
1.62 (biological conjecture) 131.28 577 P
0 F
1.62 ( that the storage and processing of information in brains and) 235.22 577 P
1.58 (nervous systems may often be understood as information compression. Aspects of this) 108 561 P
0.51 (conjecture have been discussed mainly in [25] and [23], Chapters 2 and 3. Although it is) 108 545 P
2.14 (currently out of fashion, this idea has a good pedigree and may even be regarded as) 108 529 P
(established fact.) 108 513 T
(\245) 72 492 T
-0.39 (The) 108 492 P
1 F
-0.39 (computational conjecture) 129.27 492 P
0 F
-0.39 ( that \050in artificial systems\051) 251.87 492 P
1 F
-0.39 (all kinds of computing and formal) 378.93 492 P
1.09 (reasoning may usefully be seen as information compression) 108 476 P
0 F
1.09 (. A qualified version of this) 402.58 476 P
1.52 (idea was the subject of [22]: \322The organisation and use of any kind of formal system,) 108 460 P
4.5 (knowledge structure or computing system may usefully be seen in terms of the) 108 444 P
0.78 (management of redundancy in information.\323 \050pp. 518-519\051. The phrase \322management of) 108 428 P
-0.49 (redundancy\323 was used rather than \322compression\323 because of uncertainty at that stage about) 108 412 P
-0.35 (how the creation of redundancy by computers might be explained \050see Section 2.3, below\051.) 108 396 P
0.37 (In subsequent publications \050e.g., [23]\051 the conjecture has been strengthened to essentially) 108 380 P
(its present form.) 108 364 T
(\245) 72 343 T
-0.38 (Part of the computational conjecture as it is being developed in this programme of research) 108 343 P
0.28 (is the) 108 327 P
1 F
0.28 (compression conjecture) 137.23 327 P
0 F
0.28 (: the idea that information compression to support all forms) 251.82 327 P
1.06 (of computing and formal reasoning may be achieved by the comparison or matching of) 108 311 P
-0.46 (patterns, the merging or \324unification\325) 108 291.53 P
0 10 Q
-0.38 (1) 283.81 296.33 P
0 12 Q
-0.46 ( of patterns which are the same, combined with some) 288.81 291.53 P
-0.55 (kind of metrics-guided search \050hill-climbing, beam search etc.\051 to find as much redundancy) 108 275.53 P
-0.33 (as possible for a given amount of processing \050[22], Sections 2.6 and 2.7; [23], pp. 136-139,) 108 259.53 P
0.39 ([28]\051.) 108 240.07 P
0 10 Q
0.32 (2) 134.99 244.87 P
0 12 Q
0.39 ( An implication is that) 139.99 240.07 P
1 F
0.39 (all) 252.26 240.07 P
0 F
0.39 ( kinds of information compression may be understood in) 264.93 240.07 P
(these terms \050[25], p. 109\051.) 108 224.07 T
(The computational conjecture is the main focus of attention in [15].) 108 203.07 T
2 F
(2.1) 72 177.07 T
(Common Sense) 108 177.07 T
0 F
-0.1 (One can sympathise when the authors of [15] characterise the computational conjecture as) 108 158.07 P
72 139 540 153.98 C
72 139 540 153.98 R
7 X
0 K
V
81 151.96 225 151.96 2 L
V
0.5 H
2 Z
0 X
N
0 0 612 792 C
0 12 Q
0 X
0 K
0.84 (1.) 90 131 P
1 F
0.84 (Unification) 102.84 131 P
0 F
0.84 ( in this research programme means a simple merging of identical patterns,) 157.51 131 P
0.31 (close to the non-technical meaning of the word. Although there is some risk of confusion) 90 115 P
1.29 (with the more complicated concept of \324unification\325 in logic, the term has been adopted) 90 99 P
(because no other word captures the intended meaning so well.) 90 83 T
-0.3 (2. In the rest of this article, pattern matching, unification and metrics-guided search will be) 90 67 P
(abbreviated as PMUS.) 90 51 T
FMENDPAGE
%%EndPage: "4" 3
%%Page: "3" 3
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
0 12 Q
0 X
(- 3 -) 296 757 T
72 27 540 720 R
7 X
V
2 F
0 X
(1) 72 712 T
(Introduction) 108 712 T
0 F
0.49 (In a previous article in this journal [25], I discussed the proposition that \322the storage and) 108 693 P
1.47 (processing of information in computers and in brains may often be understood as) 72 677 P
1 F
1.47 (information) 483.32 677 P
-0.7 (compression) 72 661 P
0 F
-0.7 (.\323 \050p 107\051. In a later article [15], two of my colleagues offer a critique of the computing) 132.66 661 P
-0.45 (aspects of [25] and research on the more specific conjecture that all kinds of computing and formal) 72 645 P
(reasoning may usefully be understood as information compression.) 72 629 T
0.27 (The present article answers the main points in [15], tries to correct the many inaccuracies) 108 608 P
-0.64 (and misconceptions in that article, and discusses related issues. It is intended that this article should) 72 592 P
(be comprehensible without recourse to the previous articles.) 72 576 T
0.44 (Information compression may be seen as a process which increases the) 108 555 P
1 F
0.44 (simplicity) 456.46 555 P
0 F
0.44 ( of data) 503.12 555 P
3.34 (\050by reduction of redundancy\051 whilst preserving as much as possible of the non-redundant) 72 539 P
1.83 (descriptive) 72 523 P
1 F
1.83 (power) 129.48 523 P
0 F
1.83 (. Hence the mnemonic \324SP\325 applied to ideas in this area. Since information) 159.48 523 P
2.18 (compression is a rather broad concept, \324SP\325 is intended as a label for the relatively specific) 72 507 P
(proposals described at the beginning of Section 2, below, and elsewhere [28]) 72 491 T
2 F
(1.1) 72 465 T
(The Development of Theory) 108 465 T
0 F
-0.52 (Within [15] there seems to be an underlying misconception about the nature of theories and) 108 446 P
0.18 (how they arise. The authors appear to believe that a theory springs into life, fully formed with all) 72 430 P
2.02 (its details in place. But a theory normally starts as the germ of an idea which must then be) 72 414 P
1.18 (developed until the theory can provide a coherent account of the phenomena which it seeks to) 72 398 P
-0.22 (address. This process of development is often compared aptly with the process of solving a jigsaw) 72 382 P
2.67 (puzzle, except that the development of theory often takes years and can require significant) 72 366 P
(backtracking when false leads have been followed.) 72 350 T
-0.37 (The SP theory is based on an idea whose tentative status is clearly marked by the use of the) 108 329 P
0.59 (word \324conjecture\325. The jigsaw puzzle has reached the stage where some of the pieces have been) 72 313 P
(put into groups and a few have been tentatively fitted together.) 72 297 T
2 F
(1.2) 72 271 T
(Motivation for Research) 108 271 T
0 F
-0.72 (This misconception about theories and how they are developed manifests itself most clearly) 108 252 P
3.08 (in observations in [15] about what the SP theory supposedly \324claims\325. Any good research) 72 236 P
-0.64 (programme should be guided by some kind of vision of where it may lead and the potential benefits) 72 220 P
0.96 (in terms of science or applications or both. Without this vision, much of the motivation for the) 72 204 P
0.67 (research is missing. There is no conflict between this idea and the undoubted fact that scientific) 72 188 P
0.07 (advances can arise from chance occurrences and unplanned insights. Without the longer view, we) 72 172 P
(would have no way of recognising the significance of chance discoveries when they occur.) 72 156 T
0.39 (The future possibilities and practical benefits that I see for this research programme have) 108 135 P
-0.54 (been described most fully in [23], Chapter 6, and more briefly elsewhere. In the broadest terms, the) 72 119 P
(two possibilities which provide the main motivation for this research programme are:) 72 103 T
(\245) 72 82 T
(Integration and simplification of concepts in computing and cognition.) 108 82 T
(\245) 72 61 T
0.16 (The development of a \324new generation\325 computing system with potential advantages over) 108 61 P
(conventional computers.) 108 45 T
FMENDPAGE
%%EndPage: "3" 2
%%Page: "2" 2
612 792 0 FMBEGINPAGE
72 747 540 765 R
7 X
0 K
V
0 12 Q
0 X
(- 2 -) 296 757 T
72 27 540 720 R
7 X
V
5 F
0 X
(Abstract) 72 712 T
0 F
3.39 (An earlier article [25] discusses the proposition that the storage and processing of) 108 691 P
0.62 (information in computers and in brains may often be understood as information compression. A) 72 675 P
0.25 (subsequent article [15] criticises the computing aspects of [25] and research on the more specific) 72 659 P
2.29 (conjecture that all forms of computing and formal reasoning may usefully be understood as) 72 643 P
(information compression.) 72 627 T
0.03 (The present article, which is intended to be intelligible without recourse to earlier articles,) 108 606 P
0.05 (answers the main points in [15], tries to correct the many inaccuracies and misconceptions in that) 72 590 P
(article, and discusses related issues.) 72 574 T
0.15 (Topics which are discussed include: the way theories are or should be developed; the role) 108 553 P
1.21 (of evidence in motivating research; apparent shortcomings in the Turing machine concept as a) 72 537 P
2.9 (reason for seeking new principles of computing; the apparent conflict between the idea of) 72 521 P
1.15 (\324computing as compression\325 and the fact that computers may create redundancy - and how the) 72 505 P
2.19 (contradiction may be resolved; monotonicity and non-monotonicity of functions; information) 72 489 P
-0.66 (theory as a basis for \324computing as compression\325; computer models of a proposed \324new generation\325) 72 473 P
1.84 (computing system dedicated to information compression by pattern matching, unification and) 72 457 P
2.5 (metrics-guided search \050and how, within this framework, the effect of re-write rules may be) 72 441 P
1.01 (imitated; how information may be transposed from one place to another; and how the effect of) 72 425 P
7.01 (procedural programming may be achieved\051; computational complexity of information) 72 409 P
3.8 (compression; the relationship of current proposals to research on inductive inference and) 72 393 P
(algorithmic information theory.) 72 377 T
FMENDPAGE
%%EndPage: "2" 1
%%Page: "1" 1
612 792 0 FMBEGINPAGE
72 27 540.46 720.46 R
7 X
0 K
V
0 12 Q
0 X
(In) 72 712.46 T
1 F
(AI Communications) 85 712.46 T
(7 \0503/4\051) 183.66 712.46 T
0 F
(, 203-219, 1994) 215.99 712.46 T
2 24 Q
(COMPUTING AND INFORMATION) 108.91 473.46 T
(COMPRESSION: A REPLY) 157.56 447.46 T
2 18 Q
(J Gerard Wolff) 246.74 410.46 T
0 12 Q
(June 1994) 281.73 384.46 T
(1.2) 298.73 365.46 T
0 14 Q
-0.83 (School of Electronic Engineering and Computer Systems, University of Wales, Dean) 72 72.12 P
0.82 (Street, Bangor, Gwynedd, LL57 1UT, UK. Telephone: +44 248 382691. Fax: +44) 72 56.12 P
(248 361429. E-mail: gerry@sees.bangor.ac.uk.) 72 40.12 T
FMENDPAGE
%%EndPage: "1" 0
%%Trailer
%%BoundingBox: 0 0 612 792
%%Pages: 26 -1
%%DocumentFonts: Times-Roman
%%+ Times-Italic
%%+ Times-Bold
%%+ Helvetica
%%+ Helvetica-Bold
%%+ Times-BoldItalic