diff --git a/books/bookvol5.pamphlet b/books/bookvol5.pamphlet
index d51b820..abc005c 100644
--- a/books/bookvol5.pamphlet
+++ b/books/bookvol5.pamphlet
@@ -498,7 +498,6 @@ information is initialized.
\end{chunk}
\defun{restart0}{Non-interactive restarts}
-\calls{restart0}{compressopen}
\calls{restart0}{interpopen}
\calls{restart0}{operationopen}
\calls{restart0}{categoryopen}
@@ -506,7 +505,6 @@ information is initialized.
\calls{restart0}{getEnv}
\begin{chunk}{defun restart0}
(defun restart0 ()
- (compressopen) ;; set up the compression tables
(interpopen) ;; open up the interpreter database
(operationopen) ;; all of the operations known to the system
(categoryopen) ;; answer hasCategory question
@@ -36684,9 +36682,8 @@ page~\pageref{TheFrameMechanism}.
\section{Database structure}
In order to understand this program you need to understand some details
of the structure of the databases it reads. Axiom has 5 databases,
-the interp.daase, operation.daase, category.daase, compress.daase, and
-browse.daase. The compress.daase is special and does not follow the
-normal database format.
+the interp.daase, operation.daase, category.daase, and
+browse.daase.
\subsection{kaf File Format}
This documentation refers to kaf files which are random access files.
@@ -36720,9 +36717,6 @@ data so that the very long names don't take up so much space.
We could probably remove the compression algorithm as 64k is no
longer considered 'huge'. The database-abbreviation routine
handles this on read and write-compress handles this on write.
-The squeeze routine is used to compress the keys, the unsqueeze
-routine uncompresses them. Making these two routines disappear
-should remove all of the compression.
Indeed, a faster optimization is to simply read the whole database
into the image before it is saved. The system would be easier to
@@ -36943,30 +36937,6 @@ database is opened and the ..-stream-stamp* variable matches the
position information in the database then the database is NOT
read in and is assumed to match the in-core version
-\defvar{*compressvector*}
-\begin{chunk}{initvars}
-(defvar *compressvector* nil "a vector of things to compress in the databases")
-
-\end{chunk}
-
-\defvar{*compressVectorLength*}
-\begin{chunk}{initvars}
-(defvar *compressVectorLength* 0 "length of the compress vector")
-
-\end{chunk}
-
-\defvar{*compress-stream*}
-\begin{chunk}{initvars}
-(defvar *compress-stream* nil "an stream containing the compress vector")
-
-\end{chunk}
-
-\defvar{*compress-stream-stamp*}
-\begin{chunk}{initvars}
-(defvar *compress-stream-stamp* 0 "*compress-stream* (position . time)")
-
-\end{chunk}
-
\defvar{*interp-stream*}
\begin{chunk}{initvars}
(defvar *interp-stream* nil "an open stream to the interpreter database")
@@ -37030,7 +37000,6 @@ This is indexed by (domain . category)
\end{chunk}
\defun{resethashtables}{Reset all hash tables before saving system}
-\calls{resethashtables}{compressopen}
\calls{resethashtables}{interpopen}
\calls{resethashtables}{operationopen}
\calls{resethashtables}{browseopen}
@@ -37044,8 +37013,6 @@ This is indexed by (domain . category)
\uses{resethashtables}{*category-stream-stamp*}
\uses{resethashtables}{*operation-stream-stamp*}
\uses{resethashtables}{*interp-stream-stamp*}
-\uses{resethashtables}{*compress-stream-stamp*}
-\uses{resethashtables}{*compressvector*}
\uses{resethashtables}{*allconstructors*}
\uses{resethashtables}{*operation-hash*}
\uses{resethashtables}{*hascategory-hash*}
@@ -37055,15 +37022,11 @@ This is indexed by (domain . category)
(declare (special *sourcefiles* *interp-stream* *operation-stream*
*category-stream* *browse-stream* *category-stream-stamp*
*operation-stream-stamp* *interp-stream-stamp*
- *compress-stream-stamp* *compressvector*
*allconstructors* *operation-hash* *hascategory-hash*))
(setq *hascategory-hash* (make-hash-table :test #'equal))
(setq *operation-hash* (make-hash-table))
(setq *allconstructors* nil)
- (setq *compressvector* nil)
(setq *sourcefiles* nil)
- (setq *compress-stream-stamp* '(0 . 0))
- (compressopen)
(setq *interp-stream-stamp* '(0 . 0))
(interpopen)
(setq *operation-stream-stamp* '(0 . 0))
@@ -37194,7 +37157,6 @@ Format of an entry in interp.daase:
ancestors -- used to compute new category updates
)
\end{verbatim}
-\calls{interpOpen}{unsqueeze}
\calls{interpOpen}{make-database}
\calls{interpOpen}{DaaseName}
\usesdollar{interpOpen}{spadroot}
@@ -37216,7 +37178,6 @@ Format of an entry in interp.daase:
(file-position *interp-stream* pos)
(setq constructors (read *interp-stream*))
(dolist (item constructors)
- (setq item (unsqueeze item))
(setq *allconstructors* (adjoin (first item) *allconstructors*))
(setq dbstruct (make-database))
(setf (get (car item) 'database) dbstruct)
@@ -37263,7 +37224,6 @@ Format of an entry in browse.daase:
predicates
)
\end{verbatim}
-\calls{browseOpen}{unsqueeze}
\usesdollar{browseOpen}{spadroot}
\uses{browseOpen}{*allconstructors*}
\uses{browseOpen}{*browse-stream*}
@@ -37283,7 +37243,6 @@ Format of an entry in browse.daase:
(file-position *browse-stream* pos)
(setq constructors (read *browse-stream*))
(dolist (item constructors)
- (setq item (unsqueeze item))
(unless (setq dbstruct (get (car item) 'database))
(format t "browseOpen:~%")
(format t "the browse database contains a contructor ~a~%" item)
@@ -37303,7 +37262,6 @@ Format of an entry in browse.daase:
\end{chunk}
\defun{categoryOpen}{Open the category database}
-\calls{categoryOpen}{unsqueeze}
\usesdollar{categoryOpen}{spadroot}
\uses{categoryOpen}{*hasCategory-hash*}
\uses{categoryOpen}{*category-stream*}
@@ -37324,14 +37282,12 @@ Format of an entry in browse.daase:
(setq keys (read *category-stream*))
(setq *hasCategory-hash* (make-hash-table :test #'equal))
(dolist (item keys)
- (setq item (unsqueeze item))
(setf (gethash (first item) *hasCategory-hash*) (second item))))
(format t "~&")))
\end{chunk}
\defun{operationOpen}{Open the operations database}
-\calls{operationOpen}{unsqueeze}
\usesdollar{operationOpen}{spadroot}
\uses{operationOpen}{*operation-hash*}
\uses{operationOpen}{*operation-stream*}
@@ -37351,7 +37307,6 @@ Format of an entry in browse.daase:
(file-position *operation-stream* pos)
(setq operations (read *operation-stream*))
(dolist (item operations)
- (setq item (unsqueeze item))
(setf (gethash (car item) *operation-hash*) (cdr item))))
(format t "~&")))
@@ -37460,7 +37415,6 @@ Format of an entry in browse.daase:
\defun{getdatabase}{Get constructor information for a database key}
\calls{getdatabase}{warn}
-\calls{getdatabase}{unsqueeze}
\usesdollar{getdatabase}{spadroot}
\uses{getdatabase}{*miss*}
\uses{getdatabase}{*hascategory-hash*}
@@ -37589,7 +37543,7 @@ Format of an entry in browse.daase:
(when (numberp data) ;fetch the real data
(when *miss* (format t "getdatabase miss: ~20a ~a~%" constructor key))
(file-position stream data)
- (setq data (unsqueeze (read stream)))
+ (setq data (read stream))
(case key ; cache the result of the database read
(operation (setf (gethash constructor *operation-hash*) data))
(hascategory (setf (gethash constructor *hascategory-hash*) data))
@@ -37882,7 +37836,6 @@ constructor abbreviation to pamphlet file name.
\calls{make-databases}{saveUsersHashTable}
\calls{make-databases}{mkDependentsHashTable}
\calls{make-databases}{saveDependentsHashTable}
-\calls{make-databases}{write-compress}
\calls{make-databases}{write-browsedb}
\calls{make-databases}{write-operationdb}
\calls{make-databases}{write-categorydb}
@@ -37894,7 +37847,6 @@ constructor abbreviation to pamphlet file name.
\calls{make-databases}{write-warmdata}
\usesdollar{make-databases}{constructorList}
\uses{make-databases}{*sourcefiles*}
-\uses{make-databases}{*compressvector*}
\uses{make-databases}{*allconstructors*}
\uses{make-databases}{*operation-hash*}
\begin{chunk}{defun make-databases}
@@ -37956,7 +37908,7 @@ constructor abbreviation to pamphlet file name.
(format nil "~a.daase~a" root ext))
)
(let (d)
- (declare (special |$constructorList| *sourcefiles* *compressvector*
+ (declare (special |$constructorList| *sourcefiles*
*allconstructors* *operation-hash*))
(do-symbols (symbol)
(when (get symbol 'database)
@@ -37964,7 +37916,6 @@ constructor abbreviation to pamphlet file name.
(setq *hascategory-hash* (make-hash-table :test #'equal))
(setq *operation-hash* (make-hash-table))
(setq *allconstructors* nil)
- (setq *compressvector* nil)
(withSpecialConstructors)
(localdatabase nil
(list (list '|dir| (namestring (truename "./")) ))
@@ -37989,7 +37940,6 @@ constructor abbreviation to pamphlet file name.
(|mkDependentsHashTable|)
(|saveDependentsHashTable|)
; (|buildGloss|)
- (write-compress)
(write-browsedb)
(write-operationdb)
; note: genCategoryTable creates a new *hascategory-hash* table
@@ -38012,9 +37962,6 @@ constructor abbreviation to pamphlet file name.
; does gethash calls into it rather than doing a getdatabase call.
(write-interpdb)
(write-warmdata)
- (when (probe-file (final-name "compress"))
- (delete-file (final-name "compress")))
- (rename-file "compress.build" (final-name "compress"))
(when (probe-file (final-name "interp"))
(delete-file (final-name "interp")))
(rename-file "interp.build" (final-name "interp"))
@@ -38093,147 +38040,6 @@ constructor abbreviation to pamphlet file name.
\end{chunk}
-\subsection{compress.daase}
-The compress database is special. It contains a list of symbols.
-The character string name of a symbol in the other databases is
-represented by a negative number. To get the real symbol back you
-take the absolute value of the number and use it as a byte index
-into the compress database. In this way long symbol names become
-short negative numbers.
-
-\defun{compressOpen}{Set up compression vectors for the databases}
-\calls{compressOpen}{DaaseName}
-\usesdollar{compressOpen}{spadroot}
-\uses{compressOpen}{*compressvector*}
-\uses{compressOpen}{*compressVectorLength*}
-\uses{compressOpen}{*compress-stream*}
-\uses{compressOpen}{*compress-stream-stamp*}
-\begin{chunk}{defun compressOpen}
-(defun compressOpen ()
- (let (lst stamp pos)
- (declare (special $spadroot *compressvector* *compressVectorLength*
- *compress-stream* *compress-stream-stamp*))
- (setq *compress-stream*
- (open (DaaseName "compress.daase" nil) :direction :input))
- (setq stamp (read *compress-stream*))
- (unless (equal stamp *compress-stream-stamp*)
- (format t " Re-reading compress.daase")
- (setq *compress-stream-stamp* stamp)
- (setq pos (car stamp))
- (file-position *compress-stream* pos)
- (setq lst (read *compress-stream*))
- (setq *compressVectorLength* (car lst))
- (setq *compressvector*
- (make-array (car lst) :initial-contents (cdr lst))))))
-
-\end{chunk}
-
-\defvar{*attributes*}
-\begin{chunk}{initvars}
-(defvar *attributes*
- '(|nil| |infinite| |arbitraryExponent| |approximate| |complex|
- |shallowMutable| |canonical| |noetherian| |central|
- |partiallyOrderedSet| |arbitraryPrecision| |canonicalsClosed|
- |noZeroDivisors| |rightUnitary| |leftUnitary|
- |additiveValuation| |unitsKnown| |canonicalUnitNormal|
- |multiplicativeValuation| |finiteAggregate| |shallowlyMutable|
- |commutative|) "The list of known algebra attributes")
-
-\end{chunk}
-
-\defun{write-compress}{Write out the compress database}
-\calls{write-compress}{allConstructors}
-\calls{write-compress}{allOperations}
-\uses{write-compress}{*compress-stream*}
-\uses{write-compress}{*attributes*}
-\uses{write-compress}{*compressVectorLength*}
-\begin{chunk}{defun write-compress}
-(defun write-compress ()
- (let (compresslist masterpos out)
- (declare (special *compress-stream* *attributes* *compressVectorLength*))
- (close *compress-stream*)
- (setq out (open "compress.build" :direction :output))
- (princ " " out)
- (finish-output out)
- (setq masterpos (file-position out))
- (setq compresslist
- (append (|allConstructors|) (|allOperations|) *attributes*))
- (push "algebra" compresslist)
- (push "failed" compresslist)
- (push 'signature compresslist)
- (push '|ofType| compresslist)
- (push '|Join| compresslist)
- (push 'and compresslist)
- (push '|nobranch| compresslist)
- (push 'category compresslist)
- (push '|category| compresslist)
- (push '|domain| compresslist)
- (push '|package| compresslist)
- (push 'attribute compresslist)
- (push '|isDomain| compresslist)
- (push '|ofCategory| compresslist)
- (push '|Union| compresslist)
- (push '|Record| compresslist)
- (push '|Mapping| compresslist)
- (push '|Enumeration| compresslist)
- (setq *compressVectorLength* (length compresslist))
- (setq *compressvector*
- (make-array *compressVectorLength* :initial-contents compresslist))
- (print (cons (length compresslist) compresslist) out)
- (finish-output out)
- (file-position out 0)
- (print (cons masterpos (get-universal-time)) out)
- (finish-output out)
- (close out)))
-
-\end{chunk}
-
-\defun{squeeze}{Compress an expression using the compress vector}
-This function is used to minimize the size of the databases by
-replacing symbols with indexes into the compression vector.
-\uses{squeeze}{*compressvector*}
-\begin{chunk}{defun squeeze}
-(defun squeeze (expr)
- (declare (special *compressvector*))
- (let (leaves pos (bound (length *compressvector*)))
- (labels (
- (flat (expr)
- (when (and (numberp expr) (< expr 0) (>= expr bound))
- (print expr)
- (break "squeeze found a negative number"))
- (if (atom expr)
- (unless (or (null expr)
- (and (symbolp expr) (char= (schar (symbol-name expr) 0) #\*)))
- (setq leaves (adjoin expr leaves)))
- (progn
- (flat (car expr))
- (flat (cdr expr))))))
- (setq leaves nil)
- (flat expr)
- (dolist (leaf leaves)
- (when (setq pos (position leaf *compressvector*))
- (nsubst (- pos) leaf expr)))
- expr)))
-
-\end{chunk}
-
-\defun{unsqueeze}{Uncompress an expression using the compress vector}
-This function is used to recover symbols from the databases by
-using integers as indexes into the compression vector.
-\uses{unsqueeze}{*compressvector*}
-\begin{chunk}{defun unsqueeze}
-(defun unsqueeze (expr)
- (declare (special *compressvector*))
- (cond ((atom expr)
- (cond ((and (numberp expr) (<= expr 0))
- (svref *compressVector* (- expr)))
- (t expr)))
- (t (rplaca expr (unsqueeze (car expr)))
- (rplacd expr (unsqueeze (cdr expr)))
- expr)))
-
-\end{chunk}
-
\subsection{Building the interp.daase from hash tables}
\begin{verbatim}
format of an entry in interp.daase:
@@ -38295,9 +38101,9 @@ Here I'll try to outline the interp database write procedure
(setq opalistpos (file-position out))
-; 5b. We get the "operationalist", compress it, and write it out
+; 5b. We get the "operationalist" and write it out
- (print (squeeze (database-operationalist struct)) out)
+ (print (database-operationalist struct) out)
; 5c. We make sure it was written
@@ -38308,9 +38114,9 @@ Here I'll try to outline the interp database write procedure
(setq cmodemappos (file-position out))
-; 6b. We get the "constructormodemap", compress it, and write it out
+; 6b. We get the "constructormodemap" and write it out
- (print (squeeze (database-constructormodemap struct)) out)
+ (print (database-constructormodemap struct) out)
; 6c. We make sure it was written
@@ -38321,9 +38127,9 @@ Here I'll try to outline the interp database write procedure
(setq modemapspos (file-position out))
-; 7b. We get the "modemaps", compress it, and write it out
+; 7b. We get the "modemaps" and write it out
- (print (squeeze (database-modemaps struct)) out)
+ (print (database-modemaps struct) out)
; 7c. We make sure it was written
@@ -38340,9 +38146,9 @@ Here I'll try to outline the interp database write procedure
(first (last (pathname-directory (database-object struct)))))))
; 9. We write the "constructorcategory", if it is a category, else nil
-; 9a. Get the constructorcategory and compress it
+; 9a. Get the constructorcategory
- (setq concategory (squeeze (database-constructorcategory struct)))
+ (setq concategory (database-constructorcategory struct))
; 9b. If we have any data we write it out, else we don't write it
; Note that if there is no data then the byte index for the
@@ -38378,11 +38184,11 @@ Here I'll try to outline the interp database write procedure
(setq defaultdomain (database-defaultdomain struct))
-; 11. The ancestor data might exist. If it does we fetch it,
-; compress it, and write it out. If it does not we place
+; 11. The ancestor data might exist. If it does we fetch it
+; and write it out. If it does not we place
; and immediate value of nil in the key-value table
- (setq ancestors (squeeze (gethash constructor *ancestors-hash*))) ;cattable.boot
+ (setq ancestors (gethash constructor *ancestors-hash*)) ;cattable.boot
(if ancestors
(progn
(setq ancestorspos (file-position out))
@@ -38407,9 +38213,9 @@ Here I'll try to outline the interp database write procedure
(setq masterpos (file-position out))
-; 15. We compress and print the key-value table
+; 15. We print the key-value table
- (print (mapcar #'squeeze master) out)
+ (print master out)
; 16. We make sure we write the table
@@ -38442,7 +38248,6 @@ Here I'll try to outline the interp database write procedure
\end{verbatim}
\defun{write-interpdb}{Write the interp database}
-\calls{write-interpdb}{squeeze}
\usesdollar{write-interpdb}{spadroot}
\uses{write-interpdb}{*ancestors-hash*}
\uses{write-interpdb}{*print-pretty*}
@@ -38462,13 +38267,13 @@ Here I'll try to outline the interp database write procedure
(let (struct)
(setq struct (get constructor 'database))
(setq opalistpos (file-position out))
- (print (squeeze (database-operationalist struct)) out)
+ (print (database-operationalist struct) out)
(finish-output out)
(setq cmodemappos (file-position out))
- (print (squeeze (database-constructormodemap struct)) out)
+ (print (database-constructormodemap struct) out)
(finish-output out)
(setq modemapspos (file-position out))
- (print (squeeze (database-modemaps struct)) out)
+ (print (database-modemaps struct) out)
(finish-output out)
(if (consp (database-object struct)) ; if asharp code ...
(setq obj
@@ -38477,7 +38282,7 @@ Here I'll try to outline the interp database write procedure
(setq obj
(pathname-name
(first (last (pathname-directory (database-object struct)))))))
- (setq concategory (squeeze (database-constructorcategory struct)))
+ (setq concategory (database-constructorcategory struct))
(if concategory ; if category then write data else write nil
(progn
(setq categorypos (file-position out))
@@ -38489,8 +38294,7 @@ Here I'll try to outline the interp database write procedure
(setq cosig (database-cosig struct))
(setq kind (database-constructorkind struct))
(setq defaultdomain (database-defaultdomain struct))
- (setq ancestors
- (squeeze (gethash constructor *ancestors-hash*))) ;cattable.boot
+ (setq ancestors (gethash constructor *ancestors-hash*)) ;cattable.boot
(if ancestors
(progn
(setq ancestorspos (file-position out))
@@ -38502,7 +38306,7 @@ Here I'll try to outline the interp database write procedure
ancestorspos) master)))
(finish-output out)
(setq masterpos (file-position out))
- (print (mapcar #'squeeze master) out)
+ (print master out)
(finish-output out)
(file-position out 0)
(print (cons masterpos (get-universal-time)) out)
@@ -38534,7 +38338,6 @@ time stamp at the top of the file and close the file.
\defun{write-browsedb}{Write the browse database}
\calls{write-browsedb}{allConstructors}
-\calls{write-browsedb}{squeeze}
\usesdollar{write-browsedb}{spadroot}
\uses{write-browsedb}{*sourcefiles*}
\uses{write-browsedb}{*print-pretty*}
@@ -38554,21 +38357,21 @@ time stamp at the top of the file and close the file.
; sourcefile is small. store the string directly
(setq src (gethash constructor *sourcefiles*))
(setq formpos (file-position out))
- (print (squeeze (database-constructorform struct)) out)
+ (print (database-constructorform struct) out)
(finish-output out)
(setq docpos (file-position out))
(print (database-documentation struct) out)
(finish-output out)
(setq attpos (file-position out))
- (print (squeeze (database-attributes struct)) out)
+ (print (database-attributes struct) out)
(finish-output out)
(setq predpos (file-position out))
- (print (squeeze (database-predicates struct)) out)
+ (print (database-predicates struct) out)
(finish-output out)
(push (list constructor src formpos docpos attpos predpos) master)))
(finish-output out)
(setq masterpos (file-position out))
- (print (mapcar #'squeeze master) out)
+ (print master out)
(finish-output out)
(file-position out 0)
(print (cons masterpos (get-universal-time)) out)
@@ -38583,7 +38386,6 @@ database format.
\defun{write-categorydb}{Write the category database}
\calls{write-categorydb}{genCategoryTable}
-\calls{write-categorydb}{squeeze}
\uses{write-categorydb}{*print-pretty*}
\uses{write-categorydb}{*hasCategory-hash*}
\begin{chunk}{defun write-categorydb}
@@ -38601,12 +38403,12 @@ database format.
(setq pos value)
(progn
(setq pos (file-position out))
- (print (squeeze value) out)
+ (print value out)
(finish-output out)))
(push (list key pos) master))
*hasCategory-hash*)
(setq pos (file-position out))
- (print (mapcar #'squeeze master) out)
+ (print master out)
(finish-output out)
(file-position out 0)
(print (cons pos (get-universal-time)) out)
@@ -38619,7 +38421,6 @@ database format.
This is a single table of operations hash table information, dumped in the
database format.
\defun{write-operationdb}{Write the operations database}
-\calls{write-operationdb}{squeeze}
\uses{write-operationdb}{*operation-hash*}
\begin{chunk}{defun write-operationdb}
(defun write-operationdb ()
@@ -38630,13 +38431,13 @@ database format.
(finish-output out)
(maphash #'(lambda (key value)
(setq pos (file-position out))
- (print (squeeze value) out)
+ (print value out)
(finish-output out)
(push (cons key pos) master))
*operation-hash*)
(finish-output out)
(setq pos (file-position out))
- (print (mapcar #'squeeze master) out)
+ (print master out)
(file-position out 0)
(print (cons pos (get-universal-time)) out)
(finish-output out)
@@ -53733,7 +53534,6 @@ digits in TechExplorer. Since Saturn is gone we can remove it.
\getchunk{defun compileBoot}
\getchunk{defun compiledLookup}
\getchunk{defun compiledLookupCheck}
-\getchunk{defun compressOpen}
\getchunk{defun computeDomainVariableAlist}
\getchunk{defun condErrorMsg}
\getchunk{defun conOpPage}
@@ -54980,7 +54780,6 @@ digits in TechExplorer. Since Saturn is gone we can remove it.
\getchunk{defun spleI}
\getchunk{defun spleI1}
\getchunk{defun splitIntoOptionBlocks}
-\getchunk{defun squeeze}
\getchunk{defun stackTraceOptionError}
\getchunk{defun startsComment?}
\getchunk{defun startsNegComment?}
@@ -55053,7 +54852,6 @@ digits in TechExplorer. Since Saturn is gone we can remove it.
\getchunk{defun unifyStruct}
\getchunk{defun unifyStructVar}
\getchunk{defun unparseInputForm}
-\getchunk{defun unsqueeze}
\getchunk{defun untrace}
\getchunk{defun untraceDomainConstructor}
\getchunk{defun untraceDomainConstructor,keepTraced?}
@@ -55084,7 +54882,6 @@ digits in TechExplorer. Since Saturn is gone we can remove it.
\getchunk{defun wrap}
\getchunk{defun write-browsedb}
\getchunk{defun write-categorydb}
-\getchunk{defun write-compress}
\getchunk{defun writeHiFi}
\getchunk{defun writeHistModesAndValues}
\getchunk{defun writeInputLines}
diff --git a/changelog b/changelog
index 6f89121..913ad30 100644
--- a/changelog
+++ b/changelog
@@ -1,3 +1,11 @@
+20140712 tpd src/axiom-website/patches.html 20140712.01.tpd.patch
+20140712 tpd books/bookvol5 remove compression
+20140712 tpd src/interp/util.lisp remove compression
+20140712 tpd src/share/algebra/browse.daase no compression
+20140712 tpd src/share/algebra/category.daase no compression
+20140712 tpd src/share/algebra/interp.daase no compression
+20140712 tpd src/share/algebra/operation.daase no compression
+20140712 tpd src/share/algebra/compress.daase deleted
20140710 tpd src/axiom-website/patches.html 20140710.04.tpd.patch
20140710 tpd books/bookvol5 GCL 2.6.10 assoc sematics change requires GETL fix
20140710 tpd src/axiom-website/patches.html 20140710.03.tpd.patch
diff --git a/patch b/patch
index 29c10d0..314945b 100644
--- a/patch
+++ b/patch
@@ -1,4 +1,3 @@
-books/bookvol5 GCL 2.6.10 assoc sematics change requires a GETL fix
+books/bookvol5, src/interp/util.lisp, *.daase
-port to GCL 2.6.10 runs into a semantics change in assoc handling
-of list arguments. GETL handles this with the new fix.
+remove the database compression
diff --git a/src/axiom-website/patches.html b/src/axiom-website/patches.html
index 477c1cb..6e150b4 100644
--- a/src/axiom-website/patches.html
+++ b/src/axiom-website/patches.html
@@ -4544,6 +4544,8 @@ books/bookvol4 add a section on changing GCL2 versions
zips/gcl-2.6.10.tgz add the latest gcl and all of its patches
20140710.04.tpd.patch
books/bookvol5 GCL 2.6.10 assoc sematics change requires GETL fix
+20140712.01.tpd.patch
+books/bookvol5 remove compression