Full Code of jankotek/JDBM3 for AI

master f50c92afe8ff cached
112 files
833.1 KB
193.8k tokens
1459 symbols
1 requests
Download .txt
Showing preview only (877K chars total). Download the full file or copy to clipboard to get everything.
Repository: jankotek/JDBM3
Branch: master
Commit: f50c92afe8ff
Files: 112
Total size: 833.1 KB

Directory structure:
gitextract_fufew3er/

├── LICENSE-2.0.html
├── README.md
├── pom.xml
└── src/
    ├── main/
    │   └── java/
    │       └── org/
    │           └── apache/
    │               └── jdbm/
    │                   ├── BTree.java
    │                   ├── BTreeLazyRecord.java
    │                   ├── BTreeMap.java
    │                   ├── BTreeNode.java
    │                   ├── BTreeSet.java
    │                   ├── DB.java
    │                   ├── DBAbstract.java
    │                   ├── DBCache.java
    │                   ├── DBCacheMRU.java
    │                   ├── DBCacheRef.java
    │                   ├── DBMaker.java
    │                   ├── DBStore.java
    │                   ├── DataInputOutput.java
    │                   ├── DataInputOutput2.java
    │                   ├── HTree.java
    │                   ├── HTreeBucket.java
    │                   ├── HTreeDirectory.java
    │                   ├── HTreeSet.java
    │                   ├── LinkedList2.java
    │                   ├── LogicalRowIdManager.java
    │                   ├── LongHashMap.java
    │                   ├── LongPacker.java
    │                   ├── Magic.java
    │                   ├── ObjectInputStream2.java
    │                   ├── ObjectOutputStream2.java
    │                   ├── PageFile.java
    │                   ├── PageIo.java
    │                   ├── PageManager.java
    │                   ├── PageTransactionManager.java
    │                   ├── PhysicalFreeRowIdManager.java
    │                   ├── PhysicalRowIdManager.java
    │                   ├── RecordHeader.java
    │                   ├── RecordListener.java
    │                   ├── SerialClassInfo.java
    │                   ├── Serialization.java
    │                   ├── SerializationHeader.java
    │                   ├── Serializer.java
    │                   ├── Storage.java
    │                   ├── StorageDisk.java
    │                   ├── StorageDiskMapped.java
    │                   ├── StorageMemory.java
    │                   ├── StorageZip.java
    │                   ├── Utils.java
    │                   └── packageXX.html
    └── test/
        └── java/
            └── org/
                └── apache/
                    └── jdbm/
                        ├── BTreeBench.java
                        ├── BTreeKeyCompressionTest.java
                        ├── BTreeLeadingValuePackTest.java
                        ├── BTreeMapNavigable2Test.java
                        ├── BTreeMapNavigableSubMapExclusiveTest.java
                        ├── BTreeMapNavigableSubMapInclusiveTest.java
                        ├── BTreeMapNavigableTest.java
                        ├── BTreeMapTest.java
                        ├── BTreeNodeTest.java
                        ├── BTreeSetTest.java
                        ├── BTreeTest.java
                        ├── ByteArrayComparator.java
                        ├── CompactTest.java
                        ├── ConcurrentBTreeReadTest.java
                        ├── ConcurrentMapInterfaceTest.java
                        ├── DBCacheMRUTest.java
                        ├── DBCacheTest.java
                        ├── DBMakerTest.java
                        ├── DBTest.java
                        ├── DataInputOutputTest.java
                        ├── DefragTest.java
                        ├── FileHeaderTest.java
                        ├── FileLockTest.java
                        ├── HTreeBucketTest.java
                        ├── HTreeDirectoryTest.java
                        ├── HTreeMapTest.java
                        ├── HTreeSetTest.java
                        ├── HTreeTest.java
                        ├── LinkedList2Test.java
                        ├── LogicalRowIdManagerTest.java
                        ├── LongHashMapTest.java
                        ├── LongTreeMap.java
                        ├── MapInterfaceTest.java
                        ├── ObjectOutputStream2Test.java
                        ├── PageFileTest.java
                        ├── PageIoTest.java
                        ├── PageManagerTest.java
                        ├── PageTransactionManagerTest.java
                        ├── PhysicalFreeRowIdManagerTest.java
                        ├── PhysicalRowIdManagerTest.java
                        ├── RecordHeaderTest.java
                        ├── RollbackTest.java
                        ├── SerialClassInfoTest.java
                        ├── Serialization2Bean.java
                        ├── Serialization2Test.java
                        ├── SerializationHeaderTest.java
                        ├── SerializationTest.java
                        ├── Serialized2DerivedBean.java
                        ├── StorageDiskMappedTest.java
                        ├── StorageZipTest.java
                        ├── StreamCorrupted.java
                        ├── TestCaseWithTestFile.java
                        ├── TestInsertPerf.java
                        ├── TestInsertUpdate.java
                        ├── TestIssues.java
                        ├── TestLargeData.java
                        ├── TestLazyRecordsInTree.java
                        ├── TestRollback.java
                        ├── TestStress.java
                        ├── UtilTT.java
                        ├── UtilsTest.java
                        └── junk/
                            ├── HugeData.java
                            ├── MappedBufferGrow.java
                            ├── MappedBufferVersusRaf.java
                            └── RandomInsertLongs.java

================================================
FILE CONTENTS
================================================

================================================
FILE: LICENSE-2.0.html
================================================
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN"
        "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html>
<head>
    <meta http-equiv="Content-Type" content="text/html; charset=iso-8859-1">
    <link rel="stylesheet" href="LICENSE-2.0_fichiers/style.css" type="text/css">
    <meta name="author" content="The Apache Software Foundation">
    <meta name="email" content="apache.AT.apache.DOT.org">
    <title>Apache License, Version 2.0 - The Apache Software Foundation</title>
</head>
<body>
<p align="center">
    Apache License<br>
    Version 2.0, January 2004<br>
    <a href="http://www.apache.org/licenses/">http://www.apache.org/licenses/</a>
</p>

<p>
    TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
</p>

<p><b><a name="definitions">1. Definitions</a></b>.</p>

<p>
    "License" shall mean the terms and conditions for use, reproduction,
    and distribution as defined by Sections 1 through 9 of this document.
</p>

<p>
    "Licensor" shall mean the copyright owner or entity authorized by
    the copyright owner that is granting the License.
</p>

<p>
    "Legal Entity" shall mean the union of the acting entity and all
    other entities that control, are controlled by, or are under common
    control with that entity. For the purposes of this definition,
    "control" means (i) the power, direct or indirect, to cause the
    direction or management of such entity, whether by contract or
    otherwise, or (ii) ownership of fifty percent (50%) or more of the
    outstanding shares, or (iii) beneficial ownership of such entity.
</p>

<p>
    "You" (or "Your") shall mean an individual or Legal Entity
    exercising permissions granted by this License.
</p>

<p>
    "Source" form shall mean the preferred form for making modifications,
    including but not limited to software source code, documentation
    source, and configuration files.
</p>

<p>
    "Object" form shall mean any form resulting from mechanical
    transformation or translation of a Source form, including but
    not limited to compiled object code, generated documentation,
    and conversions to other media types.
</p>

<p>
    "Work" shall mean the work of authorship, whether in Source or
    Object form, made available under the License, as indicated by a
    copyright notice that is included in or attached to the work
    (an example is provided in the Appendix below).
</p>

<p>
    "Derivative Works" shall mean any work, whether in Source or Object
    form, that is based on (or derived from) the Work and for which the
    editorial revisions, annotations, elaborations, or other modifications
    represent, as a whole, an original work of authorship. For the purposes
    of this License, Derivative Works shall not include works that remain
    separable from, or merely link (or bind by name) to the interfaces of,
    the Work and Derivative Works thereof.
</p>

<p>
    "Contribution" shall mean any work of authorship, including
    the original version of the Work and any modifications or additions
    to that Work or Derivative Works thereof, that is intentionally
    submitted to Licensor for inclusion in the Work by the copyright owner
    or by an individual or Legal Entity authorized to submit on behalf of
    the copyright owner. For the purposes of this definition, "submitted"
    means any form of electronic, verbal, or written communication sent
    to the Licensor or its representatives, including but not limited to
    communication on electronic mailing lists, source code control systems,
    and issue tracking systems that are managed by, or on behalf of, the
    Licensor for the purpose of discussing and improving the Work, but
    excluding communication that is conspicuously marked or otherwise
    designated in writing by the copyright owner as "Not a Contribution."
</p>

<p>
    "Contributor" shall mean Licensor and any individual or Legal Entity
    on behalf of whom a Contribution has been received by Licensor and
    subsequently incorporated within the Work.
</p>

<p><b><a name="copyright">2. Grant of Copyright License</a></b>.
    Subject to the terms and conditions of
    this License, each Contributor hereby grants to You a perpetual,
    worldwide, non-exclusive, no-charge, royalty-free, irrevocable
    copyright license to reproduce, prepare Derivative Works of,
    publicly display, publicly perform, sublicense, and distribute the
    Work and such Derivative Works in Source or Object form.
</p>

<p><b><a name="patent">3. Grant of Patent License</a></b>.
    Subject to the terms and conditions of
    this License, each Contributor hereby grants to You a perpetual,
    worldwide, non-exclusive, no-charge, royalty-free, irrevocable
    (except as stated in this section) patent license to make, have made,
    use, offer to sell, sell, import, and otherwise transfer the Work,
    where such license applies only to those patent claims licensable
    by such Contributor that are necessarily infringed by their
    Contribution(s) alone or by combination of their Contribution(s)
    with the Work to which such Contribution(s) was submitted. If You
    institute patent litigation against any entity (including a
    cross-claim or counterclaim in a lawsuit) alleging that the Work
    or a Contribution incorporated within the Work constitutes direct
    or contributory patent infringement, then any patent licenses
    granted to You under this License for that Work shall terminate
    as of the date such litigation is filed.
</p>

<p><b><a name="redistribution">4. Redistribution</a></b>.
    You may reproduce and distribute copies of the
    Work or Derivative Works thereof in any medium, with or without
    modifications, and in Source or Object form, provided that You
    meet the following conditions:
</p>
<ol type="a">
    <li>You must give any other recipients of the Work or
        Derivative Works a copy of this License; and
        <br> <br></li>

    <li>You must cause any modified files to carry prominent notices
        stating that You changed the files; and
        <br> <br></li>

    <li>You must retain, in the Source form of any Derivative Works
        that You distribute, all copyright, patent, trademark, and
        attribution notices from the Source form of the Work,
        excluding those notices that do not pertain to any part of
        the Derivative Works; and
        <br> <br></li>

    <li>If the Work includes a "NOTICE" text file as part of its
        distribution, then any Derivative Works that You distribute must
        include a readable copy of the attribution notices contained
        within such NOTICE file, excluding those notices that do not
        pertain to any part of the Derivative Works, in at least one
        of the following places: within a NOTICE text file distributed
        as part of the Derivative Works; within the Source form or
        documentation, if provided along with the Derivative Works; or,
        within a display generated by the Derivative Works, if and
        wherever such third-party notices normally appear. The contents
        of the NOTICE file are for informational purposes only and
        do not modify the License. You may add Your own attribution
        notices within Derivative Works that You distribute, alongside
        or as an addendum to the NOTICE text from the Work, provided
        that such additional attribution notices cannot be construed
        as modifying the License.
    </li>
</ol>
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.

<p><b><a name="contributions">5. Submission of Contributions</a></b>.
    Unless You explicitly state otherwise,
    any Contribution intentionally submitted for inclusion in the Work
    by You to the Licensor shall be under the terms and conditions of
    this License, without any additional terms or conditions.
    Notwithstanding the above, nothing herein shall supersede or modify
    the terms of any separate license agreement you may have executed
    with Licensor regarding such Contributions.
</p>

<p><b><a name="trademarks">6. Trademarks</a></b>.
    This License does not grant permission to use the trade
    names, trademarks, service marks, or product names of the Licensor,
    except as required for reasonable and customary use in describing the
    origin of the Work and reproducing the content of the NOTICE file.
</p>

<p><b><a name="no-warranty">7. Disclaimer of Warranty</a></b>.
    Unless required by applicable law or
    agreed to in writing, Licensor provides the Work (and each
    Contributor provides its Contributions) on an "AS IS" BASIS,
    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
    implied, including, without limitation, any warranties or conditions
    of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
    PARTICULAR PURPOSE. You are solely responsible for determining the
    appropriateness of using or redistributing the Work and assume any
    risks associated with Your exercise of permissions under this License.
</p>

<p><b><a name="no-liability">8. Limitation of Liability</a></b>.
    In no event and under no legal theory,
    whether in tort (including negligence), contract, or otherwise,
    unless required by applicable law (such as deliberate and grossly
    negligent acts) or agreed to in writing, shall any Contributor be
    liable to You for damages, including any direct, indirect, special,
    incidental, or consequential damages of any character arising as a
    result of this License or out of the use or inability to use the
    Work (including but not limited to damages for loss of goodwill,
    work stoppage, computer failure or malfunction, or any and all
    other commercial damages or losses), even if such Contributor
    has been advised of the possibility of such damages.
</p>

<p><b><a name="additional">9. Accepting Warranty or Additional Liability</a></b>.
    While redistributing
    the Work or Derivative Works thereof, You may choose to offer,
    and charge a fee for, acceptance of support, warranty, indemnity,
    or other liability obligations and/or rights consistent with this
    License. However, in accepting such obligations, You may act only
    on Your own behalf and on Your sole responsibility, not on behalf
    of any other Contributor, and only if You agree to indemnify,
    defend, and hold each Contributor harmless for any liability
    incurred by, or claims asserted against, such Contributor by reason
    of your accepting any such warranty or additional liability.
</p>

<p>
    END OF TERMS AND CONDITIONS
</p>
</body>
</html>

================================================
FILE: README.md
================================================
**NOTE: this project is in maintenance mode (bug fix only), I redirected my effort to JDBM4 which should provide better concurrent scalability**

JDBM provides TreeMap, HashMap and other collections backed up by disk storage.
Now you can handle billions of items without ever running out of memory.
JDBM is probably the fastest and the simpliest pure Java database. 

JDBM is tiny (160KB nodeps jar), but packed with features such as transactions, 
instance cache and space efficient serialization.
It also has outstanding performance with 1 million inserts per second and 10 million fetches per second (disk based!!). 
It is tightly optimized and has minimal overhead. 
It scales well from Android phone to multi-terrabyte data sets.

JDBM is opensource and free-as-beer under Apache license. 
There is no catch and no strings attached. 

News
====
4th Sep 2012 - JDBM3 alpha4 was released. [Just bugfixes](https://groups.google.com/forum/?fromgroups=#!topic/jdbm/yBB4dLW54Pk)

18st Aug 2012 - First version of JDBM4 is available on [GitHub](https://github.com/jankotek/JDBM4)

30th Apr 2012 - JDBM3 [may soon become part of Apache Foundation](https://groups.google.com/forum/?fromgroups#!topic/jdbm/pb4LWr6qTxM). This will not affect github site, but package may be renamed in a few days (done).

10th Apr 2012 - Alpha3 was just released.  Get [binary jar](https://github.com/downloads/jankotek/JDBM3/JDBM-3.0-alpha3.jar) and [read some notes](http://groups.google.com/group/jdbm/browse_thread/thread/db1f0ed52ce5fb3c)

24th Feb 2012 - Alpha2 released with tons of bugfixes. Get [binary jar](https://github.com/downloads/jankotek/JDBM3/JDBM-3.0-alpha2.jar)

18th Jan 2012 - Alpha1 released, [announcement](http://kotek.net/blog/jdbm_3.0_alpha_1_released) and
[binary jar](https://github.com/downloads/jankotek/JDBM3/JDBM-3.0-alpha-1.jar)

Features
========
* B*Tree with `ConcurrentNavigableMap` interface
  * Very fast for sequential read/write.
  * Small values stored inside tree nodes
  * Small values stored inside tree nodes, large values lazily fetched.
  * Self-balancing, great performance even with 1e12 items.
  * Delta compression on keys
  * Submaps (aka cursors) to view limited collection subsets
  * Custom comparators
* H*Tree with `ConcurrentMap` interface
  * Optimized for random reads/writes
  * Small values stored inside tree nodes, large values lazily fetched.
  * Self-balancing, great performance even with 1e12 items.
* TreeSet and HashSet which uses BTree and HTree without values
* LinkedList, which implements bounded BlockingDeque (not implemented yet)
* Multi code scalability (currently under testing)
  * Everything is thread safe
  * Reads should scale linearly with number of cores (as soon as it fits into cache)
  * All collection implements `Concurrent` interfaces
  * Some multi-core scalability with `ReentrantReadWriteLock`. 
* Instance cache
  * If data fits into cache, reads are almost as fast as in-memory collections.
  * Minimal overhead, works well even with 16MB heap.
  * Scales well into 64GB RAM heaps.
  * Various yet simple tuning options
* Transactions
  * Single transaction per store, avoids concurrent modification stuff
  * Transactions are ACID (with limits for single concurrent transaction)
  * Option to disable transactions for fast inserts/updates
* Low level key-value store
  * Various options for on-disk store (NIO, RAF, locking...)
  * Write performance not affected by store fragmentation
  * In-memory store option
  * Can read data from zip file with reasonable performance
  * Can read data from classpath resource, database is deployable over Java Web Start    
  * Advanced defragmentation
  * Print store statistics
  * Transparent data encryption
  * Only 9 bytes overhead per record (for example BTree node)
* Space efficient serialization
  * Custom code for most `java.util` and `java.lang` classes. For example Long(0) takes only single byte
  * Very small POJO serialization overhead, typically only 3 bytes per class + 1 byte for each field. 
  * Mimic java serialization, fields can be `transient`,  all classes needs to implement `Serializable` interface
  * Supports `Externalizable`  
  * Possible to plug your own `Serializer`
* Performance 
  * Blazing fast 1 million inserts / 10 million reads per second (on my 5GHz machine, but you should get 300000 inserts p.s. easily)
  * Tightly optimized code 
  * Uses NIO stuff you read about, but never see in action.
  * Minimal heap usage, prevents `java.lang.OutOfMemoryError: GC overhead limit`
  * Most logic done with primitives or arrays. Minimal stack usage.



Introduction
============
All classes are contained in package `org.apache..jdbm`. There are only two important classes: `DBMaker` is builder which configures and opens database. `DB` is database itself, it opens collections and controls transactions. Collections in JDBM mimic their `java.util` counter parts. TreeMap uses an on-disk ordered auto-balanced B*Tree index, LinkedList is stored as self referencing entries and so on. Everything should be thread safe (currently under testing).

Maven Dependency
----------------

JDBM is not currently in any Maven repository. TODO: We should have soon custom repo with nightly builds. 

Quick example
-------------

    import org.apache.jdbm.*;

    //Open database using builder pattern. 
    //All options are available with code autocompletion.
    DB db = DBMaker.openFile("test")  
        .deleteFilesAfterClose()
        .enableEncryption("password",false)
        .make();
  
    //open an collection, TreeMap has better performance then HashMap
    SortedMap<Integer,String> map = db.createTreeMap("collectionName");

    map.put(1,"one");
    map.put(2,"two");
    //map.keySet() is now [1,2] even before commit

    db.commit();  //persist changes into disk

    map.put(3,"three");
    //map.keySet() is now [1,2,3] 
    db.rollback(); //revert recent changes
    //map.keySet() is now [1,2] 

    db.close();  

A few quick tricks
------------------
* Disabling transaction increases write performance 6x. Do it by `DBMaker.disableTransactions()`. Do not forget to close store correctly in this case!
* When transactions are enabled all uncommited instances are stored in memory. Make sure you commit on time. It is most common cause of `OutOfMemoryError`.
* JDBM does not try to reclaim unused space after massive delete, you must call `DB.defrag(false)` yourself. 
* TreeMap has usually better performance then HashMap. 
* JDBM uses instance cache with limited size by default. If you have enought memory and large store, use unbounded cache: `DBMaker.enableHardCache()`
* JDBM is optimized for small size records. Sizes: 16 bytes is recommended, 32KB is reasonable maximum, 8MB is hard limit.
* JDBM scales well up to 1e12 records. Batch insert overnight creates multi-terrabyte store.

DBMaker
-------

TODO

DB
--

TODO

Collections
-----------

TODO
 
Instance cache
--------------

JDBM caches created instances similar way as Hibernate or other ORM frameworks. This greatly reduces serialization overhead and speedups database. There are five cache types, each configurable with method on `DBMaker` builder:

*  **Most Recently Used** (MRU) cache. It is fixed size and stores newest entries. This cache is on by default. You can configure its size, default size is 2048. This cache has lowest GC overhead and may be suprisingly faster then other cache types. 

*  **No cache**. You may disable instance cache by using `DBMaker.disableCache()`

*  **Hard reference cache**. All instances fetched by JDBM are stored in cache until released. Good with large memory heaps. `Hard` cache is recommended over `soft` and `weak` as it has smaller overhead.  Use `DBMaker.enableHardCache()` to enable it.

*  **Weak reference cache**. Instances are referenced using `WeakReference`. When item is no longer referenced by other instances, it can be discarded by GC. Use `DBMaker.enableWeakCache()` to enable it.

*  **Soft reference cache**. Instances are referenced using `SoftReference`. Similar to `WeakReference` but holds longer, until systems starts running out of memory. Use `DBMaker.enableSoftCache()` to enable it.


With Weak/Soft/Hard cache JDBM starts backround cleanup thread. It also checks memory usage every 10 seconds, if free memory is bellow 25%, it clears cache. Our tests shows that GC is not fast enought to prevent `OutOfMemoryError`. This may be disabled with `DBMaker.disableCacheAutoClear()`.

You may clear cache manually using `DB.clearCache()`. This is usefull after massive delete, or when you are moving from one type of data to other. 

Transactions
------------

JDBM supports single transaction per store. It does not have multiple concurrent transactions with row/table locks, pessimistic locking and similar stuff. This trade off greatly simplifies design and speeds up operations. Transactions are still 'ACID' but in limited way.

Transaction implementation is sound and solid. Uncommited data are stored in memory. Then during commit appended to end of transaction log file. It is safe, as append operation hardly ever corrupts file. After commit is finished, data are replayed from transaction log file into main storage file. If users calls rollback, transaction log file is discarded.

Keeping transaction log file brings some overhead. It is possible to disable transaction and write changes directly into main storage file. It makes inserts and updates about 6x faster. In this case no effort is made to protect file from corruption, all is sacrificed for maximal speed. It is absolutely necessary to properly close storage before exit. You may disable transactions by using `DBMaker.disableTransactions()`.

Uncommited instances are stored in memory and flushed to disk during commit. So with large transactions you may run out of memory easily. With disabled transactions data are stored in 10 MB memory buffer and flushed to main storage file when buffer is filled.


Serialization
-------------

JDBM has its own space-efficient serialization which tries to mimic standard implementation. All classes must implement `Serializable` interface. You may exclude field from serialization by `transient` keyword. Our serialization also handles cyclic references and some other advanced stuff. You may use your own binary format with `Externalizable` interface or custom `Serializer`.

JDBM has custom serialization code for most classes in `java.lang` and `java.util` packages. For `Date` JDBM writes only 9 bytes: 1-byte-long serialization header and 8-byte-long timestamp. For `true`, `String("")` or `Long(3)` JDBM writes only single-byte serialization header. For array list and other collections JDBM writes serialization header, packed size and data. Custom serializers have maximal space efficiency and low overhead.

Standard java serialization stores class structure data (field names, types...) with record data. This generates huge overhead which multiplies with number of records. JDBM serialization stores class structure data in single space and record data only contains reference. So space overhead for POJOs is typically only 3 bytes per class + 1 byte for each field. 

Our serialization is designed to be very fast on small chunks of data (a few POJOs glued together). With couple of thousands nodes in object tree it becomes slow (N^2 scalability).  Maximal record size in JDBM is 8 MB, so it is good practise to store only small key/value in database. You should always use filesystem for data larger then 500KB.  

Defragmentation
---------------

Store gets fragmented. JDBM is well designed, so this does not slows down write/update/delete operations. But fragmentation slows down read operations as more data needs to be readed from disk. JDBM does not do any sort of magic to reclaim unused data. It relies on user to call `DB.defrag` periodically or after massive update/delete/inserts. 

Defrag can be called at runtime, but `DB.defrag` methods blocks other read/writes until it finishes. Defrag basically recreates copyes data from one store to second store. Then it deletes first store and renames second.  

Defragnentation has two modes controlled by `DB.defrag(boolean fullDefrag)` parameter:

**Quick defrag** is designed to be as fast as possible. It only reclaims unused space (compacts store), but does not reorganize data inside store. It copyes all data from one store to other, without empty spaces between records. It is very fast, limited only by disk sequential write speed. Call it by `DB.defrag(false)`

**Full defrag** is designed to make store as fast as possible. It reorganizes data layout, so nodes from single collection are stored close to each other. This makes future reads from store faster as less data needs to be read. Full defrag is much slower than quick defrag, as it traverses and copies all collections unsequentially. 


Troubleshooting
===============

Please report bug into Github error tracker. There is [mail-group](mailto:jdbm@googlegroups.com) if you would have questions, you may also browse [group archive](http://groups.google.com/group/jdbm).

JDBM uses chained exception so user does not have to write try catch blocks. IOException is usually wrapped in IOError which is unchecked. So please always check first exception.

**OutOfMemoryError**
JDBM keeps uncommited data in memory, so you may need to commit more often. If your memory is limited use MRU cache (on by default). You may increase heap size by starting JVM with extra parameter `-Xmx500MB`.

**OutOfMemoryError: GC overhead limit exceeded**
Your app is creating new object instances faster then GC can collect them. When using Soft/Weak cache use Hard cache to reduce GC overhead (is auto cleared when free memory is low). There is JVM parameter to disable this assertion.

**File locking, OverlappingFileLockException, some IOError**
You are trying to open file already opened by another JDBM. Make sure that you `DB.close()` store correctly, operating system may leave lock after JVM is terminated. You may try `DBMaker.useRandomAccessFile()` which is slower, but does not use such aggressive locking. In read-only mode you can also open store multiple times. You may also disable file locks completely by `DB.disableFileLock()` (at your own risk of course)

**InternalError, Error, AssertionFailedError, IllegalArgumentException, StackOverflowError and so on**
There was an problem in JDBM. It is possible that file store was corrupted thanks to an internal error or disk failure. Disabling cache by `DBMaker.disableCache()` may workaround the problem. Please submit bug report to github. 

---
Special thanks to EJ-Technologies for donating us excellent
[JProfiler](http://www.ej-technologies.com/products/overview.html)







================================================
FILE: pom.xml
================================================
<project xmlns="http://maven.apache.org/POM/4.0.0"
         xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
         xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">

    <modelVersion>4.0.0</modelVersion>
    <groupId>org.apache.jdbm</groupId>
    <artifactId>jdbm</artifactId>
    <version>3.0-SNAPSHOT</version>


    <developers>
        <developer>
            <name>Jan Kotek</name>
            <id>jan</id>
        </developer>
    </developers>

    <licenses>
        <license>
            <name>Apache 2</name>
        </license>
    </licenses>

    <properties>
        <project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
    </properties>

    <dependencies>
        <dependency>
            <groupId>junit</groupId>
            <artifactId>junit</artifactId>
            <version>4.8.2</version>
            <type>jar</type>
            <scope>test</scope>
            <optional>false</optional>
        </dependency>
    </dependencies>

    <build>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
                <version>2.3.2</version>
                <configuration>
                    <target>1.5</target>
                    <source>1.5</source>
                    <encoding>${project.build.sourceEncoding}</encoding>
                </configuration>
            </plugin>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-resources-plugin</artifactId>
                <version>2.5</version>
                <configuration>
                    <encoding>${project.build.sourceEncoding}</encoding>
                </configuration>
            </plugin>

            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-source-plugin</artifactId>
                <version>2.1.2</version>
                <executions>
                    <execution>
                        <id>attach-sources</id>
                        <phase>package</phase>
                        <goals>
                            <goal>jar</goal>
                            <goal>test-jar</goal>
                        </goals>
                    </execution>
                </executions>
            </plugin>
<!-- uncomment to enable proguard (strip down jar)-->
<!--
            <plugin>
                <groupId>com.pyx4me</groupId>
                <artifactId>proguard-maven-plugin</artifactId>
                <executions>
                    <execution>
                        <phase>package</phase>
                        <goals>
                            <goal>proguard</goal>
                        </goals>
                    </execution>
                </executions>
                <configuration>
                    <options>
                        <option>-allowaccessmodification</option>
                        <option>-keep public class * { public *; public static *; }</option>
                    </options>
                    <libs>
                        <lib>${java.home}/lib/rt.jar</lib>
                        <lib>${java.home}/lib/jce.jar</lib>
                    </libs>
                </configuration>
            </plugin>
-->
        </plugins>
    </build>

</project>


================================================
FILE: src/main/java/org/apache/jdbm/BTree.java
================================================
/*******************************************************************************
 * Copyright 2010 Cees De Groot, Alex Boisvert, Jan Kotek
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *   http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 ******************************************************************************/

package org.apache.jdbm;

import java.io.*;
import java.util.Arrays;
import java.util.Comparator;
import java.util.List;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;

/**
 * B+Tree persistent indexing data structure.  B+Trees are optimized for
 * block-based, random I/O storage because they store multiple keys on
 * one tree node (called <code>BTreeNode</code>).  In addition, the leaf nodes
 * directly contain (inline) small values associated with the keys, allowing a
 * single (or sequential) disk read of all the values on the node.
 * <p/>
 * B+Trees are n-airy, yeilding log(N) search cost.  They are self-balancing,
 * preventing search performance degradation when the size of the tree grows.
 * <p/>
 * BTree stores its keys sorted. By default JDBM expects key to implement
 * <code>Comparable</code> interface but user may supply its own <code>Comparator</code>
 * at BTree creation time. Comparator is serialized and stored as part of BTree.
 * <p/>
 * The B+Tree allows traversing the keys in forward and reverse order using a
 * TupleBrowser obtained from the browse() methods. But it is better to use
 * <code>BTreeMap</code> wrapper which implements <code>SortedMap</code> interface
 * <p/>
 * This implementation does not directly support duplicate keys. It is
 * possible to handle duplicates by grouping values using an ArrayList as value.
 * This scenario is supported by JDBM serialization so there is no big performance penalty.
 * <p/>
 * There is no limit on key size or value size, but it is recommended to keep
 * keys as small as possible to reduce disk I/O. If serialized value exceeds 32 bytes,
 * it is stored in separate record and tree contains only recid reference to it.
 * BTree uses delta compression for its keys.
 *
 *
 * @author Alex Boisvert
 * @author Jan Kotek
 */
class BTree<K, V> {


    private static final boolean DEBUG = false;


    /**
     * Default node size (number of entries per node)
     */
    public static final int DEFAULT_SIZE = 32; //TODO test optimal size, it has serious impact on sequencial write and read


    /**
     * Record manager used to persist changes in BTreeNodes
     */
    protected transient DBAbstract _db;


    /**
     * This BTree's record ID in the DB.
     */
    private transient long _recid;


    /**
     * Comparator used to index entries (optional)
     */
    protected Comparator<K> _comparator;


    /**
     * Serializer used to serialize index keys (optional)
     */
    protected Serializer<K> keySerializer;


    /**
     * Serializer used to serialize index values (optional)
     */
    protected Serializer<V> valueSerializer;

    /**
     * indicates if values should be loaded during deserialization, set to false during defragmentation
     */
    boolean loadValues = true;

    /** if false map contains only keys, used for set*/
    boolean hasValues = true;

    /**
     * The number of structural modifications to the tree for fail fast iterators. This value is just for runtime, it is not persisted
     */
    transient int modCount = 0;

    /**
     * cached instance of an insert result, so we do not have to allocate new object on each insert
     */
    protected BTreeNode.InsertResult<K, V> insertResultReuse; //TODO investigate performance impact of removing this


    public Serializer<K> getKeySerializer() {
        return keySerializer;
    }




    public Serializer<V> getValueSerializer() {
        return valueSerializer;
    }


    /**
     * Height of the B+Tree.  This is the number of BTreeNodes you have to traverse
     * to get to a leaf BTreeNode, starting from the root.
     */
    private int _height;


    /**
     * Recid of the root BTreeNode
     */
    private transient long _root;


    /**
     * Total number of entries in the BTree
     */
    protected volatile long _entries;


    /**
     * Serializer used for BTreeNodes of this tree
     */
    private transient BTreeNode<K, V> _nodeSerializer = new BTreeNode();
    {
        _nodeSerializer._btree = this;
    }


    /**
     * Listeners which are notified about changes in records
     */
    protected RecordListener[] recordListeners = new RecordListener[0];

    final protected ReadWriteLock lock = new ReentrantReadWriteLock();

    /**
     * No-argument constructor used by serialization.
     */
    public BTree() {
        // empty
    }




    /**
     * Create a new persistent BTree
     */
    @SuppressWarnings("unchecked")
    public static <K extends Comparable, V> BTree<K, V> createInstance(DBAbstract db)
            throws IOException {
        return createInstance(db, null, null, null,true);
    }


    /**
     * Create a new persistent BTree
     */
    public static <K, V> BTree<K, V> createInstance(DBAbstract db,
                                                    Comparator<K> comparator,
                                                    Serializer<K> keySerializer,
                                                    Serializer<V> valueSerializer,
                                                    boolean hasValues)
            throws IOException {
        BTree<K, V> btree;

        if (db == null) {
            throw new IllegalArgumentException("Argument 'db' is null");
        }

        btree = new BTree<K, V>();
        btree._db = db;
        btree._comparator = comparator;
        btree.keySerializer = keySerializer;
        btree.valueSerializer = valueSerializer;
        btree.hasValues = hasValues;
        btree._recid = db.insert(btree, btree.getRecordManager().defaultSerializer(),false);

        return btree;
    }


    /**
     * Load a persistent BTree.
     *
     * @param db    DB used to store the persistent btree
     * @param recid Record id of the BTree
     */
    @SuppressWarnings("unchecked")
    public static <K, V> BTree<K, V> load(DBAbstract db, long recid)
            throws IOException {
        BTree<K, V> btree = (BTree<K, V>) db.fetch(recid);
        btree._recid = recid;
        btree._db = db;
        btree._nodeSerializer = new BTreeNode<K, V>();
        btree._nodeSerializer._btree = btree;
        return btree;
    }

    /**
     * Get the {@link ReadWriteLock} associated with this BTree.
     * This should be used with browsing operations to ensure
     * consistency.
     *
     * @return
     */
    public ReadWriteLock getLock() {
        return lock;
    }

    /**
     * Insert an entry in the BTree.
     * <p/>
     * The BTree cannot store duplicate entries.  An existing entry can be
     * replaced using the <code>replace</code> flag.   If an entry with the
     * same key already exists in the BTree, its value is returned.
     *
     * @param key     Insert key
     * @param value   Insert value
     * @param replace Set to true to replace an existing key-value pair.
     * @return Existing value, if any.
     */
    public V insert(final K key, final V value,
                    final boolean replace)
            throws IOException {
        if (key == null) {
            throw new IllegalArgumentException("Argument 'key' is null");
        }
        if (value == null) {
            throw new IllegalArgumentException("Argument 'value' is null");
        }
        try {
            lock.writeLock().lock();
            BTreeNode<K, V> rootNode = getRoot();

            if (rootNode == null) {
                // BTree is currently empty, create a new root BTreeNode
                if (DEBUG) {
                    System.out.println("BTree.insert() new root BTreeNode");
                }
                rootNode = new BTreeNode<K, V>(this, key, value);
                _root = rootNode._recid;
                _height = 1;
                _entries = 1;
                _db.update(_recid, this);
                modCount++;
                //notifi listeners
                for (RecordListener<K, V> l : recordListeners) {
                    l.recordInserted(key, value);
                }
                return null;
            } else {
                BTreeNode.InsertResult<K, V> insert = rootNode.insert(_height, key, value, replace);
                boolean dirty = false;
                if (insert._overflow != null) {
                    // current root node overflowed, we replace with a new root node
                    if (DEBUG) {
                        System.out.println("BTreeNode.insert() replace root BTreeNode due to overflow");
                    }
                    rootNode = new BTreeNode<K, V>(this, rootNode, insert._overflow);
                    _root = rootNode._recid;
                    _height += 1;
                    dirty = true;
                }
                if (insert._existing == null) {
                    _entries++;
                    modCount++;
                    dirty = true;
                }
                if (dirty) {
                    _db.update(_recid, this);
                }
                //notify listeners
                for (RecordListener<K, V> l : recordListeners) {
                    if (insert._existing == null)
                        l.recordInserted(key, value);
                    else
                        l.recordUpdated(key, insert._existing, value);
                }

                // insert might have returned an existing value
                V ret = insert._existing;
                //zero out tuple and put it for reuse
                insert._existing = null;
                insert._overflow = null;
                this.insertResultReuse = insert;
                return ret;
            }
        } finally {
            lock.writeLock().unlock();
        }
    }


    /**
     * Remove an entry with the given key from the BTree.
     *
     * @param key Removal key
     * @return Value associated with the key, or null if no entry with given
     *         key existed in the BTree.
     */
    public V remove(K key)
            throws IOException {
        if (key == null) {
            throw new IllegalArgumentException("Argument 'key' is null");
        }
        try {
            lock.writeLock().lock();
            BTreeNode<K, V> rootNode = getRoot();
            if (rootNode == null) {
                return null;
            }
            boolean dirty = false;
            BTreeNode.RemoveResult<K, V> remove = rootNode.remove(_height, key);
            if (remove._underflow && rootNode.isEmpty()) {
                _height -= 1;
                dirty = true;

                _db.delete(_root);
                if (_height == 0) {
                    _root = 0;
                } else {
                    _root = rootNode.loadLastChildNode()._recid;
                }
            }
            if (remove._value != null) {
                _entries--;
                modCount++;
                dirty = true;
            }
            if (dirty) {
                _db.update(_recid, this);
            }
            if (remove._value != null)
                for (RecordListener<K, V> l : recordListeners)
                    l.recordRemoved(key, remove._value);
            return remove._value;
        } finally {
            lock.writeLock().unlock();
        }
    }


    /**
     * Find the value associated with the given key.
     *
     * @param key Lookup key.
     * @return Value associated with the key, or null if not found.
     */
    public V get(K key)
            throws IOException {
        if (key == null) {
            throw new IllegalArgumentException("Argument 'key' is null");
        }
        try {
            lock.readLock().lock();
            BTreeNode<K, V> rootNode = getRoot();
            if (rootNode == null) {
                return null;
            }

            return rootNode.findValue(_height, key);
        } finally {
            lock.readLock().unlock();
        }
    }


    /**
     * Find the value associated with the given key, or the entry immediately
     * following this key in the ordered BTree.
     *
     * @param key Lookup key.
     * @return Value associated with the key, or a greater entry, or null if no
     *         greater entry was found.
     */
    public BTreeTuple<K, V> findGreaterOrEqual(K key)
            throws IOException {
        BTreeTuple<K, V> tuple;
        BTreeTupleBrowser<K, V> browser;

        if (key == null) {
            // there can't be a key greater than or equal to "null"
            // because null is considered an infinite key.
            return null;
        }

        tuple = new BTreeTuple<K, V>(null, null);
        browser = browse(key,true);
        if (browser.getNext(tuple)) {
            return tuple;
        } else {
            return null;
        }
    }


    /**
     * Get a browser initially positioned at the beginning of the BTree.
     * <p><b>
     * WARNING: If you make structural modifications to the BTree during
     * browsing, you will get inconsistent browing results.
     * </b>
     *
     * @return Browser positionned at the beginning of the BTree.
     */
    @SuppressWarnings("unchecked")
    public BTreeTupleBrowser<K, V> browse()
            throws IOException {
        try {
            lock.readLock().lock();
            BTreeNode<K, V> rootNode = getRoot();
            if (rootNode == null) {
                return EMPTY_BROWSER;
            }
            return rootNode.findFirst();
        } finally {
            lock.readLock().unlock();
        }
    }


    /**
     * Get a browser initially positioned just before the given key.
     * <p><b>
     * WARNING: �If you make structural modifications to the BTree during
     * browsing, you will get inconsistent browing results.
     * </b>
     *
     * @param key Key used to position the browser.  If null, the browser
     *            will be positionned after the last entry of the BTree.
     *            (Null is considered to be an "infinite" key)
     * @return Browser positionned just before the given key.
     */
    @SuppressWarnings("unchecked")
    public BTreeTupleBrowser<K, V> browse(final K key, final boolean inclusive)
            throws IOException {
        try {
            lock.readLock().lock();
            BTreeNode<K, V> rootNode = getRoot();
            if (rootNode == null) {
                return EMPTY_BROWSER;
            }
            BTreeTupleBrowser<K, V> browser = rootNode.find(_height, key, inclusive);
            return browser;
        } finally {
            lock.readLock().unlock();
        }
    }


    /**
     * Return the persistent record identifier of the BTree.
     */
    public long getRecid() {
        return _recid;
    }


    /**
     * Return the root BTreeNode, or null if it doesn't exist.
     */
    BTreeNode<K, V> getRoot()
            throws IOException {
        if (_root == 0) {
            return null;
        }
        BTreeNode<K, V> root = _db.fetch(_root, _nodeSerializer);
        if (root != null) {
            root._recid = _root;
            root._btree = this;
        }
        return root;
    }


    static BTree readExternal(DataInput in, Serialization ser)
            throws IOException, ClassNotFoundException {
        BTree tree = new BTree();
        tree._db = ser.db;
        tree._height = in.readInt();
        tree._recid = in.readLong();
        tree._root = in.readLong();
        tree._entries = in.readLong();
        tree.hasValues = in.readBoolean();
        tree._comparator = (Comparator) ser.deserialize(in);
        tree.keySerializer = (Serializer) ser.deserialize(in);
        tree.valueSerializer = (Serializer) ser.deserialize(in);
        return tree;
    }


    public void writeExternal(DataOutput out)
            throws IOException {
        out.writeInt(_height);
        out.writeLong(_recid);
        out.writeLong(_root);
        out.writeLong(_entries);
        out.writeBoolean(hasValues);
        _db.defaultSerializer().serialize(out, _comparator);
        _db.defaultSerializer().serialize(out, keySerializer);
        _db.defaultSerializer().serialize(out, valueSerializer);
    }

    /**
     * Copyes tree from one db to other, defragmenting it allong the way
     * @param recid
     * @param r1
     * @param r2
     * @throws IOException
     */
    public static void defrag(long recid, DBStore r1, DBStore r2) throws IOException {
        try {
            byte[] data = r1.fetchRaw(recid);
            r2.forceInsert(recid, data);
            DataInput in = new DataInputOutput(data);
            BTree t = (BTree) r1.defaultSerializer().deserialize(in);
            t.loadValues = false;
            t._db = r1;
            t._nodeSerializer = new BTreeNode(t, false);


            BTreeNode p = t.getRoot();
            if (p != null) {
                r2.forceInsert(t._root, r1.fetchRaw(t._root));
                p.defrag(r1, r2);
            }

        } catch (ClassNotFoundException e) {
            throw new IOError(e);
        }
    }


    /**
     * Browser returning no element.
     */
    private static final BTreeTupleBrowser EMPTY_BROWSER = new BTreeTupleBrowser() {

        public boolean getNext(BTreeTuple tuple) {
            return false;
        }

        public boolean getPrevious(BTreeTuple tuple) {
            return false;
        }

        public void remove(Object key) {
            throw new IndexOutOfBoundsException();
        }
    };


    /**
     * add RecordListener which is notified about record changes
     *
     * @param listener
     */
    public void addRecordListener(RecordListener<K, V> listener) {
        recordListeners = Arrays.copyOf(recordListeners, recordListeners.length + 1);
        recordListeners[recordListeners.length - 1] = listener;
    }

    /**
     * remove RecordListener which is notified about record changes
     *
     * @param listener
     */
    public void removeRecordListener(RecordListener<K, V> listener) {
        List l = Arrays.asList(recordListeners);
        l.remove(listener);
        recordListeners = (RecordListener[]) l.toArray(new RecordListener[1]);
    }


    public DBAbstract getRecordManager() {
        return _db;
    }


    public Comparator<K> getComparator() {
        return _comparator;
    }

    /**
     * Deletes all BTreeNodes in this BTree
     */
    public void clear()
            throws IOException {
        try {
            lock.writeLock().lock();
            BTreeNode<K, V> rootNode = getRoot();
            if (rootNode != null)
                rootNode.delete();
            _entries = 0;
            modCount++;
        } finally {
            lock.writeLock().unlock();
        }
    }

    /**
     * Used for debugging and testing only.  Populates the 'out' list with
     * the recids of all child nodes in the BTree.
     *
     * @param out
     * @throws IOException
     */
    void dumpChildNodeRecIDs(List<Long> out) throws IOException {
        BTreeNode<K, V> root = getRoot();
        if (root != null) {
            out.add(root._recid);
            root.dumpChildNodeRecIDs(out, _height);
        }
    }

    public boolean hasValues() {
        return hasValues;
    }

    /**
     * Browser to traverse a collection of tuples.  The browser allows for
     * forward and reverse order traversal.
     *
     *
     */
    static interface BTreeTupleBrowser<K, V> {

        /**
         * Get the next tuple.
         *
         * @param tuple Tuple into which values are copied.
         * @return True if values have been copied in tuple, or false if there is no next tuple.
         */
        boolean getNext(BTree.BTreeTuple<K, V> tuple) throws IOException;

        /**
         * Get the previous tuple.
         *
         * @param tuple Tuple into which values are copied.
         * @return True if values have been copied in tuple, or false if there is no previous tuple.
         */
        boolean getPrevious(BTree.BTreeTuple<K, V> tuple) throws IOException;

        /**
         * Remove an entry with given key, and increases browsers expectedModCount
         * This method is here to support 'ConcurrentModificationException' on Map interface.
         *
         * @param key
         */
        void remove(K key) throws IOException;

    }

    /**
     * Tuple consisting of a key-value pair.
     */
    static final class BTreeTuple<K, V> {

        K key;

        V value;

        BTreeTuple() {
            // empty
        }

        BTreeTuple(K key, V value) {
            this.key = key;
            this.value = value;
        }

    }



}



================================================
FILE: src/main/java/org/apache/jdbm/BTreeLazyRecord.java
================================================
package org.apache.jdbm;

import java.io.*;

/**
 * An record lazily loaded from store.
 * This is used in BTree/HTree to store big records outside of index tree
 *
 * @author Jan Kotek
 */
class BTreeLazyRecord<E> {

    private E value = null;
    private DBAbstract db;
    private Serializer<E> serializer;
    final long recid;

    BTreeLazyRecord(DBAbstract db, long recid, Serializer<E> serializer) {
        this.db = db;
        this.recid = recid;
        this.serializer = serializer;
    }


    E get() {
        if (value != null) return value;
        try {
            value = db.fetch(recid, serializer);
        } catch (IOException e) {
            throw new IOError(e);
        }
        return value;
    }

    void delete() {
        try {
            db.delete(recid);
        } catch (IOException e) {
            throw new IOError(e);
        }
        value = null;
        serializer = null;
        db = null;
    }

    /**
     * Serialier used to insert already serialized data into store
     */
    static final Serializer FAKE_SERIALIZER = new Serializer() {

        public void serialize(DataOutput out, Object obj) throws IOException {
            byte[] data = (byte[]) obj;
            out.write(data);
        }

        public Object deserialize(DataInput in) throws IOException, ClassNotFoundException {
            throw new UnsupportedOperationException();
        }
    };


    static Object fastDeser(DataInputOutput in, Serializer serializer, int expectedSize) throws IOException, ClassNotFoundException {
        //we should propably copy data for deserialization into separate buffer and pass it to Serializer
        //but to make it faster, Serializer will operate directly on top of buffer.
        //and we check that it readed correct number of bytes.
        int origAvail = in.available();
        if (origAvail == 0)
            throw new InternalError(); //is backed up by byte[] buffer, so there should be always avail bytes
        Object ret = serializer.deserialize(in);
        //check than valueSerializer did not read more bytes, if yes it readed bytes from next record
        int readed = origAvail - in.available();
        if (readed > expectedSize)
            throw new IOException("Serializer readed more bytes than is record size.");
        else if (readed != expectedSize) {
            //deserializer did not readed all bytes, unussual but valid.
            //Skip some to get into correct position
            for (int ii = 0; ii < expectedSize - readed; ii++)
                in.readUnsignedByte();
        }
        return ret;
    }


    /**
     * if value in tree is serialized in more bytes, it is stored as separate record outside of tree
     * This value must be always smaller than 250
     */
    static final int MAX_INTREE_RECORD_SIZE = 32;

    static {
        if (MAX_INTREE_RECORD_SIZE > 250) throw new Error();
    }

    static final int NULL = 255;
    static final int LAZY_RECORD = 254;

}


================================================
FILE: src/main/java/org/apache/jdbm/BTreeMap.java
================================================
/*******************************************************************************
 * Copyright 2010 Cees De Groot, Alex Boisvert, Jan Kotek
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *   http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 ******************************************************************************/
package org.apache.jdbm;

import java.io.IOError;
import java.io.IOException;
import java.util.*;
import java.util.concurrent.ConcurrentNavigableMap;


/**
 * Wrapper for <code>BTree</code> which implements <code>ConcurrentNavigableMap</code> interface
 *
 * @param <K> key type
 * @param <V> value type
 *
 * @author Jan Kotek
 */
class BTreeMap<K, V> extends AbstractMap<K, V> implements ConcurrentNavigableMap<K, V> {

    protected BTree<K, V> tree;

    protected final K fromKey;

    protected final K toKey;

    protected final boolean readonly;

    protected NavigableSet<K> keySet2;
    private final boolean toInclusive;
    private final boolean fromInclusive;

    public BTreeMap(BTree<K, V> tree, boolean readonly) {
        this(tree, readonly, null, false, null, false);
    }

    protected BTreeMap(BTree<K, V> tree, boolean readonly, K fromKey, boolean fromInclusive, K toKey, boolean toInclusive) {
        this.tree = tree;
        this.fromKey = fromKey;
        this.fromInclusive = fromInclusive;
        this.toKey = toKey;
        this.toInclusive = toInclusive;
        this.readonly = readonly;
    }

    @Override
    public Set<Entry<K, V>> entrySet() {
        return _entrySet;
    }



    private final Set<java.util.Map.Entry<K, V>> _entrySet = new AbstractSet<Entry<K, V>>() {

        protected Entry<K, V> newEntry(K k, V v) {
            return new SimpleEntry<K, V>(k, v) {
                private static final long serialVersionUID = 978651696969194154L;

                public V setValue(V arg0) {
                    BTreeMap.this.put(getKey(), arg0);
                    return super.setValue(arg0);
                }

            };
        }

        public boolean add(java.util.Map.Entry<K, V> e) {
            if (readonly)
                throw new UnsupportedOperationException("readonly");

            try {
                if (e.getKey() == null)
                    throw new NullPointerException("Can not add null key");
                if (!inBounds(e.getKey()))
                    throw new IllegalArgumentException("key outside of bounds");
                return tree.insert(e.getKey(), e.getValue(), true) == null;
            } catch (IOException e1) {
                throw new IOError(e1);
            }
        }

        @SuppressWarnings("unchecked")
        public boolean contains(Object o) {

            if (o instanceof Entry) {
                Entry<K, V> e = (java.util.Map.Entry<K, V>) o;
                try {
                    if (!inBounds(e.getKey()))
                        return false;
                    if (e.getKey() != null && tree.get(e.getKey()) != null)
                        return true;
                } catch (IOException e1) {
                    throw new IOError(e1);
                }
            }
            return false;
        }


        public Iterator<java.util.Map.Entry<K, V>> iterator() {
            try {
                final BTree.BTreeTupleBrowser<K, V> br = fromKey == null ?
                        tree.browse() : tree.browse(fromKey, fromInclusive);
                return new Iterator<Entry<K, V>>() {

                    private Entry<K, V> next;
                    private K lastKey;

                    void ensureNext() {
                        try {
                            BTree.BTreeTuple<K, V> t = new BTree.BTreeTuple<K, V>();
                            if (br.getNext(t) && inBounds(t.key))
                                next = newEntry(t.key, t.value);
                            else
                                next = null;
                        } catch (IOException e1) {
                            throw new IOError(e1);
                        }
                    }

                    {
                        ensureNext();
                    }


                    public boolean hasNext() {
                        return next != null;
                    }

                    public java.util.Map.Entry<K, V> next() {
                        if (next == null)
                            throw new NoSuchElementException();
                        Entry<K, V> ret = next;
                        lastKey = ret.getKey();
                        //move to next position
                        ensureNext();
                        return ret;
                    }

                    public void remove() {
                        if (readonly)
                            throw new UnsupportedOperationException("readonly");

                        if (lastKey == null)
                            throw new IllegalStateException();
                        try {
                            br.remove(lastKey);
                            lastKey = null;
                        } catch (IOException e1) {
                            throw new IOError(e1);
                        }

                    }
                };

            } catch (IOException e) {
                throw new IOError(e);
            }

        }

        @SuppressWarnings("unchecked")
        public boolean remove(Object o) {
            if (readonly)
                throw new UnsupportedOperationException("readonly");

            if (o instanceof Entry) {
                Entry<K, V> e = (java.util.Map.Entry<K, V>) o;
                try {
                    //check for nulls
                    if (e.getKey() == null || e.getValue() == null)
                        return false;
                    if (!inBounds(e.getKey()))
                        throw new IllegalArgumentException("out of bounds");
                    //get old value, must be same as item in entry
                    V v = get(e.getKey());
                    if (v == null || !e.getValue().equals(v))
                        return false;
                    V v2 = tree.remove(e.getKey());
                    return v2 != null;
                } catch (IOException e1) {
                    throw new IOError(e1);
                }
            }
            return false;

        }

        public int size() {
            return BTreeMap.this.size();
        }

        public void clear(){
            if(fromKey!=null || toKey!=null)
                super.clear();
            else
                try {
                    tree.clear();
                } catch (IOException e) {
                    throw new IOError(e);
                }
        }

    };


    public boolean inBounds(K e) {
        if(fromKey == null && toKey == null)
            return true;

        Comparator comp = comparator();
        if (comp == null) comp = Utils.COMPARABLE_COMPARATOR;
        
        if(fromKey!=null){
            final int compare = comp.compare(e, fromKey);
            if(compare<0) return false;
            if(!fromInclusive && compare == 0) return false;
        }
        if(toKey!=null){
            final int compare = comp.compare(e, toKey);
            if(compare>0)return false;
            if(!toInclusive && compare == 0) return false;
        }
        return true;
    }

    @SuppressWarnings("unchecked")
    @Override
    public V get(Object key) {
        try {
            if (key == null)
                return null;
            if (!inBounds((K) key))
                return null;
            return tree.get((K) key);
        } catch (ClassCastException e) {
            return null;
        } catch (IOException e) {
            throw new IOError(e);
        }
    }

    @SuppressWarnings("unchecked")
    @Override
    public V remove(Object key) {
        if (readonly)
            throw new UnsupportedOperationException("readonly");

        try {
            if (key == null || tree.get((K) key) == null)
                return null;
            if (!inBounds((K) key))
                throw new IllegalArgumentException("out of bounds");

            return tree.remove((K) key);
        } catch (ClassCastException e) {
            return null;
        } catch (IOException e) {
            throw new IOError(e);
        }
    }

    public V put(K key, V value) {
        if (readonly)
            throw new UnsupportedOperationException("readonly");

        try {
            if (key == null || value == null)
                throw new NullPointerException("Null key or value");
            if (!inBounds(key))
                throw new IllegalArgumentException("out of bounds");
            return tree.insert(key, value, true);
        } catch (IOException e) {
            throw new IOError(e);
        }
    }

    public void clear(){
        entrySet().clear();
    }

    @SuppressWarnings("unchecked")
    @Override
    public boolean containsKey(Object key) {
        if (key == null)
            return false;
        try {
            if (!inBounds((K) key))
                return false;
            V v = tree.get((K) key);
            return v != null;
        } catch (IOException e) {
            throw new IOError(e);
        } catch (ClassCastException e) {
            return false;
        }
    }

    public Comparator<? super K> comparator() {
        return tree._comparator;
    }

    public K firstKey() {
        if (isEmpty())
            return null;
        try {

            BTree.BTreeTupleBrowser<K, V> b = fromKey == null ? tree.browse() : tree.browse(fromKey,fromInclusive);
            BTree.BTreeTuple<K, V> t = new BTree.BTreeTuple<K, V>();
            b.getNext(t);
            return t.key;
        } catch (IOException e) {
            throw new IOError(e);
        }
    }

    public K lastKey() {
        if (isEmpty())
            return null;
        try {
            BTree.BTreeTupleBrowser<K, V> b = toKey == null ? tree.browse(null,true) : tree.browse(toKey,false);
            BTree.BTreeTuple<K, V> t = new BTree.BTreeTuple<K, V>();            
            b.getPrevious(t);            
            if(!toInclusive && toKey!=null){
                //make sure we wont return last key
                Comparator c = comparator();
                if(c==null) c=Utils.COMPARABLE_COMPARATOR;
                if(c.compare(t.key,toKey)==0)
                    b.getPrevious(t);
            }
            return t.key;
        } catch (IOException e) {
            throw new IOError(e);
        }
    }

    public ConcurrentNavigableMap<K, V> headMap(K toKey2, boolean inclusive) {
        K toKey3 = Utils.min(this.toKey,toKey2,comparator());
        boolean inclusive2 = toKey3 == toKey? toInclusive : inclusive;
        return new BTreeMap<K, V>(tree, readonly, this.fromKey, this.fromInclusive, toKey3, inclusive2);
    }


    public ConcurrentNavigableMap<K, V> headMap(K toKey) {
        return headMap(toKey,false);
    }


    public Entry<K, V> lowerEntry(K key) {
        K k = lowerKey(key);
        return k==null? null : new SimpleEntry<K, V>(k,get(k));
    }

    public K lowerKey(K key) {
        if (isEmpty())
            return null;
        K key2 = Utils.min(key,toKey,comparator());
        try {
            BTree.BTreeTupleBrowser<K, V> b  = tree.browse(key2,true) ;
            BTree.BTreeTuple<K, V> t = new BTree.BTreeTuple<K, V>();
            b.getPrevious(t);

            return t.key;

        } catch (IOException e) {
            throw new IOError(e);
        }

    }

    public Entry<K, V> floorEntry(K key) {
        K k = floorKey(key);
        return k==null? null : new SimpleEntry<K, V>(k,get(k));

    }

    public K floorKey(K key) {
        if (isEmpty())
            return null;

        K key2 = Utils.max(key,fromKey,comparator());
        try {
            BTree.BTreeTupleBrowser<K, V> b  = tree.browse(key2,true) ;
            BTree.BTreeTuple<K, V> t = new BTree.BTreeTuple<K, V>();
            b.getNext(t);
            Comparator comp = comparator();
            if (comp == null) comp = Utils.COMPARABLE_COMPARATOR;
            if(comp.compare(t.key,key2) == 0)
                return t.key;

            b.getPrevious(t);
            b.getPrevious(t);
            return t.key;

        } catch (IOException e) {
            throw new IOError(e);
        }
    }

    public Entry<K, V> ceilingEntry(K key) {
        K k = ceilingKey(key);
        return k==null? null : new SimpleEntry<K, V>(k,get(k));
    }

    public K ceilingKey(K key) {
        if (isEmpty())
            return null;
        K key2 = Utils.min(key,toKey,comparator());

        try {
            BTree.BTreeTupleBrowser<K, V> b  = tree.browse(key2,true) ;
            BTree.BTreeTuple<K, V> t = new BTree.BTreeTuple<K, V>();
            b.getNext(t);
            return t.key;

        } catch (IOException e) {
            throw new IOError(e);
        }
    }

    public Entry<K, V> higherEntry(K key) {
        K k = higherKey(key);
        return k==null? null : new SimpleEntry<K, V>(k,get(k));
    }

    public K higherKey(K key) {
        if (isEmpty())
            return null;

        K key2 = Utils.max(key,fromKey,comparator());

        try {
            BTree.BTreeTupleBrowser<K, V> b  = tree.browse(key2,false) ;
            BTree.BTreeTuple<K, V> t = new BTree.BTreeTuple<K, V>();
            b.getNext(t);
            return t.key;

        } catch (IOException e) {
            throw new IOError(e);
        }
    }

    public Entry<K, V> firstEntry() {
        K k = firstKey();
        return k==null? null : new SimpleEntry<K, V>(k,get(k));
    }

    public Entry<K, V> lastEntry() {
        K k = lastKey();
        return k==null? null : new SimpleEntry<K, V>(k,get(k));
    }

    public Entry<K, V> pollFirstEntry() {
        Entry<K,V> first = firstEntry();
        if(first!=null)
            remove(first.getKey());
        return first;
    }

    public Entry<K, V> pollLastEntry() {
        Entry<K,V> last = lastEntry();
        if(last!=null)
            remove(last.getKey());
        return last;
    }

    public ConcurrentNavigableMap<K, V> descendingMap() {
        throw new  UnsupportedOperationException("not implemented yet");
        //TODO implement descending (reverse order) map
    }


    public NavigableSet<K> keySet() {
        return navigableKeySet();
    }

    public NavigableSet<K> navigableKeySet() {
        if(keySet2 == null)
            keySet2 = new BTreeSet<K>((BTreeMap<K,Object>) this);
        return keySet2;
    }

    public NavigableSet<K> descendingKeySet() {
        return descendingMap().navigableKeySet();
    }



    public ConcurrentNavigableMap<K, V> tailMap(K fromKey) {
        return tailMap(fromKey,true);
    }


    public ConcurrentNavigableMap<K, V> tailMap(K fromKey2, boolean inclusive) {
        K fromKey3 = Utils.max(this.fromKey,fromKey2,comparator());
        boolean inclusive2 = fromKey3 == toKey? toInclusive : inclusive;

        return new BTreeMap<K, V>(tree, readonly, fromKey3, inclusive2, toKey, toInclusive);
    }

    public ConcurrentNavigableMap<K, V> subMap(K fromKey, boolean fromInclusive, K toKey, boolean toInclusive) {
        Comparator comp = comparator();
        if (comp == null) comp = Utils.COMPARABLE_COMPARATOR;
        if (comp.compare(fromKey, toKey) > 0)
            throw new IllegalArgumentException("fromKey is bigger then toKey");
        return new BTreeMap<K, V>(tree, readonly, fromKey, fromInclusive, toKey, toInclusive);
    }

    public ConcurrentNavigableMap<K, V> subMap(K fromKey, K toKey) {
        return subMap(fromKey,true,toKey,false);
    }


    public BTree<K, V> getTree() {
        return tree;
    }


    public void addRecordListener(RecordListener<K, V> listener) {
        tree.addRecordListener(listener);
    }

    public DBAbstract getRecordManager() {
        return tree.getRecordManager();
    }

    public void removeRecordListener(RecordListener<K, V> listener) {
        tree.removeRecordListener(listener);
    }


    public int size() {
        if (fromKey == null && toKey == null)
            return (int) tree._entries; //use fast counter on tree if Map has no bounds
        else {
            //had to count items in iterator
            Iterator iter = keySet().iterator();
            int counter = 0;
            while (iter.hasNext()) {
                iter.next();
                counter++;
            }
            return counter;
        }

    }


    public V putIfAbsent(K key, V value) {
        tree.lock.writeLock().lock();
        try{
            if (!containsKey(key))
                 return put(key, value);
            else
                 return get(key);
        }finally {
            tree.lock.writeLock().unlock();
        }
    }

    public boolean remove(Object key, Object value) {
        tree.lock.writeLock().lock();
        try{
            if (containsKey(key) && get(key).equals(value)) {
                remove(key);
                return true;
            } else return false;
        }finally {
            tree.lock.writeLock().unlock();
        }


    }

    public boolean replace(K key, V oldValue, V newValue) {
        tree.lock.writeLock().lock();
        try{
            if (containsKey(key) && get(key).equals(oldValue)) {
                put(key, newValue);
                return true;
            } else return false;
        }finally {
            tree.lock.writeLock().unlock();
        }

    }

    public V replace(K key, V value) {
        tree.lock.writeLock().lock();
        try{
            if (containsKey(key)) {
                return put(key, value);
             } else return null;
        }finally {
            tree.lock.writeLock().unlock();
        }
    }
}


================================================
FILE: src/main/java/org/apache/jdbm/BTreeNode.java
================================================
/*******************************************************************************
 * Copyright 2010 Cees De Groot, Alex Boisvert, Jan Kotek
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *   http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 ******************************************************************************/


package org.apache.jdbm;

import java.io.*;
import java.util.ConcurrentModificationException;
import java.util.List;

/**
 * Node of a BTree.
 * <p/>
 * The node contains a number of key-value pairs.  Keys are ordered to allow
 * dichotomic search. If value is too big, it is stored in separate record
 * and only recid reference is stored
 * <p/>
 * If the node is a leaf node, the keys and values are user-defined and
 * represent entries inserted by the user.
 * <p/>
 * If the node is non-leaf, each key represents the greatest key in the
 * underlying BTreeNode and the values are recids pointing to the children BTreeNodes.
 * The only exception is the rightmost BTreeNode, which is considered to have an
 * "infinite" key value, meaning that any insert will be to the left of this
 * pseudo-key
 *
 * @author Alex Boisvert
 * @author Jan Kotek
 */
final class BTreeNode<K, V>
        implements Serializer<BTreeNode<K, V>> {

    private static final boolean DEBUG = false;


    /**
     * Parent B+Tree.
     */
    transient BTree<K, V> _btree;


    /**
     * This BTreeNode's record ID in the DB.
     */
    protected transient long _recid;


    /**
     * Flag indicating if this is a leaf BTreeNode.
     */
    protected boolean _isLeaf;


    /**
     * Keys of children nodes
     */
    protected K[] _keys;


    /**
     * Values associated with keys.  (Only valid if leaf node)
     */
    protected Object[] _values;


    /**
     * Children nodes (recids) associated with keys.  (Only valid if non-leaf node)
     */
    protected long[] _children;


    /**
     * Index of first used item at the node
     */
    protected byte _first;


    /**
     * Previous leaf node (only if this node is a leaf)
     */
    protected long _previous;


    /**
     * Next leaf node (only if this node is a leaf)
     */
    protected long _next;

    /**
     * Return the B+Tree that is the owner of this {@link BTreeNode}.
     */
    public BTree<K, V> getBTree() {
        return _btree;
    }

    /**
     * No-argument constructor used by serialization.
     */
    public BTreeNode() {
        // empty
    }


    /**
     * Root node overflow constructor
     */
    @SuppressWarnings("unchecked")
    BTreeNode(BTree<K, V> btree, BTreeNode<K, V> root, BTreeNode<K, V> overflow)
            throws IOException {
        _btree = btree;

        _isLeaf = false;

        _first = BTree.DEFAULT_SIZE - 2;

        _keys = (K[]) new Object[BTree.DEFAULT_SIZE];
        _keys[BTree.DEFAULT_SIZE - 2] = overflow.getLargestKey();
        _keys[BTree.DEFAULT_SIZE - 1] = root.getLargestKey();

        _children = new long[BTree.DEFAULT_SIZE];
        _children[BTree.DEFAULT_SIZE - 2] = overflow._recid;
        _children[BTree.DEFAULT_SIZE - 1] = root._recid;

        _recid = _btree._db.insert(this, this,false);
    }


    /**
     * Root node (first insert) constructor.
     */
    @SuppressWarnings("unchecked")
    BTreeNode(BTree<K, V> btree, K key, V value)
            throws IOException {
        _btree = btree;

        _isLeaf = true;

        _first = BTree.DEFAULT_SIZE - 2;

        _keys = (K[]) new Object[BTree.DEFAULT_SIZE];
        _keys[BTree.DEFAULT_SIZE - 2] = key;
        _keys[BTree.DEFAULT_SIZE - 1] = null;  // I am the root BTreeNode for now

        _values = new Object[BTree.DEFAULT_SIZE];
        _values[BTree.DEFAULT_SIZE - 2] = value;
        _values[BTree.DEFAULT_SIZE - 1] = null;  // I am the root BTreeNode for now

        _recid = _btree._db.insert(this, this,false);
    }


    /**
     * Overflow node constructor.  Creates an empty BTreeNode.
     */
    @SuppressWarnings("unchecked")
    BTreeNode(BTree<K, V> btree, boolean isLeaf){
        _btree = btree;

        _isLeaf = isLeaf;

        // node will initially be half-full
        _first = BTree.DEFAULT_SIZE / 2;

        _keys = (K[]) new Object[BTree.DEFAULT_SIZE];
        if (isLeaf) {
            _values = new Object[BTree.DEFAULT_SIZE];
        } else {
            _children = new long[BTree.DEFAULT_SIZE];
        }

        try{
            _recid = _btree._db.insert(this, this,false);
        }catch(IOException e ){
            throw new IOError(e);
        }
    }


    /**
     * Get largest key under this BTreeNode.  Null is considered to be the
     * greatest possible key.
     */
    K getLargestKey() {
        return _keys[BTree.DEFAULT_SIZE - 1];
    }


    /**
     * Return true if BTreeNode is empty.
     */
    boolean isEmpty() {
        if (_isLeaf) {
            return (_first == _values.length - 1);
        } else {
            return (_first == _children.length - 1);
        }
    }


    /**
     * Return true if BTreeNode is full.
     */
    boolean isFull() {
        return (_first == 0);
    }


    /**
     * Find the object associated with the given key.
     *
     * @param height Height of the current BTreeNode (zero is leaf node)
     * @param key    The key
     * @return TupleBrowser positionned just before the given key, or before
     *         next greater key if key isn't found.
     */
    BTree.BTreeTupleBrowser<K, V> find(int height, final K key, final boolean inclusive)
            throws IOException {
        byte index = findChildren(key,inclusive);

        height -= 1;

        if (height == 0) {
            // leaf node
            return new Browser<K, V>(this, index);
        } else {
            // non-leaf node
            BTreeNode<K, V> child = loadNode(_children[index]);
            return child.find(height, key,inclusive);
        }
    }


    /**
     * Find value associated with the given key.
     *
     * @param height Height of the current BTreeNode (zero is leaf node)
     * @param key    The key
     * @return TupleBrowser positionned just before the given key, or before
     *         next greater key if key isn't found.
     */
    V findValue(int height, K key)
            throws IOException {
        byte index = findChildren(key,true);

        height -= 1;

        if (height == 0) {

            K key2 = _keys[index];
//          // get returns the matching key or the next ordered key, so we must
//          // check if we have an exact match
            if (key2 == null || compare(key, key2) != 0)
                return null;

            // leaf node
            if (_values[index] instanceof BTreeLazyRecord)
                return ((BTreeLazyRecord<V>) _values[index]).get();
            else
                return (V) _values[index];


        } else {
            // non-leaf node
            BTreeNode<K, V> child = loadNode(_children[index]);
            return child.findValue(height, key);
        }
    }

    /**
     * Find first entry and return a browser positioned before it.
     *
     * @return TupleBrowser positionned just before the first entry.
     */
    BTree.BTreeTupleBrowser<K, V> findFirst()
            throws IOException {
        if (_isLeaf) {
            return new Browser<K, V>(this, _first);
        } else {
            BTreeNode<K, V> child = loadNode(_children[_first]);
            return child.findFirst();
        }
    }

    /**
     * Deletes this BTreeNode and all children nodes from the record manager
     */
    void delete()
            throws IOException {
        if (_isLeaf) {
            if (_next != 0) {
                BTreeNode<K, V> nextNode = loadNode(_next);
                if (nextNode._previous == _recid) { // this consistency check can be removed in production code
                    nextNode._previous = _previous;
                    _btree._db.update(nextNode._recid, nextNode, nextNode);
                } else {
                    throw new Error("Inconsistent data in BTree");
                }
            }
            if (_previous != 0) {
                BTreeNode<K, V> previousNode = loadNode(_previous);
                if (previousNode._next != _recid) { // this consistency check can be removed in production code
                    previousNode._next = _next;
                    _btree._db.update(previousNode._recid, previousNode, previousNode);
                } else {
                    throw new Error("Inconsistent data in BTree");
                }
            }
        } else {
            int left = _first;
            int right = BTree.DEFAULT_SIZE - 1;

            for (int i = left; i <= right; i++) {
                BTreeNode<K, V> childNode = loadNode(_children[i]);
                childNode.delete();
            }
        }

        _btree._db.delete(_recid);
    }

    /**
     * Insert the given key and value.
     * <p/>
     * Since the Btree does not support duplicate entries, the caller must
     * specify whether to replace the existing value.
     *
     * @param height  Height of the current BTreeNode (zero is leaf node)
     * @param key     Insert key
     * @param value   Insert value
     * @param replace Set to true to replace the existing value, if one exists.
     * @return Insertion result containing existing value OR a BTreeNode if the key
     *         was inserted and provoked a BTreeNode overflow.
     */
    InsertResult<K, V> insert(int height, K key, final V value, final boolean replace)
            throws IOException {
        InsertResult<K, V> result;
        long overflow;

        final byte index = findChildren(key,true);

        height -= 1;
        if (height == 0) {

            //reuse InsertResult instance to avoid GC trashing on massive inserts
            result = _btree.insertResultReuse;
            _btree.insertResultReuse = null;
            if (result == null)
                result = new InsertResult<K, V>();

            // inserting on a leaf BTreeNode
            overflow = -1;
            if (DEBUG) {
                System.out.println("BTreeNode.insert() Insert on leaf node key=" + key
                        + " value=" + value + " index=" + index);
            }
            if (compare(_keys[index], key) == 0) {
                // key already exists
                if (DEBUG) {
                    System.out.println("BTreeNode.insert() Key already exists.");
                }
                boolean isLazyRecord = _values[index] instanceof BTreeLazyRecord;
                if (isLazyRecord)
                    result._existing = ((BTreeLazyRecord<V>) _values[index]).get();
                else
                    result._existing = (V) _values[index];
                if (replace) {
                    //remove old lazy record if necesarry
                    if (isLazyRecord)
                        ((BTreeLazyRecord) _values[index]).delete();
                    _values[index] = value;
                    _btree._db.update(_recid, this, this);
                }
                // return the existing key
                return result;
            }
        } else {
            // non-leaf BTreeNode
            BTreeNode<K, V> child = loadNode(_children[index]);
            result = child.insert(height, key, value, replace);

            if (result._existing != null) {
                // return existing key, if any.
                return result;
            }

            if (result._overflow == null) {
                // no overflow means we're done with insertion
                return result;
            }

            // there was an overflow, we need to insert the overflow node on this BTreeNode
            if (DEBUG) {
                System.out.println("BTreeNode.insert() Overflow node: " + result._overflow._recid);
            }
            key = result._overflow.getLargestKey();
            overflow = result._overflow._recid;

            // update child's largest key
            _keys[index] = child.getLargestKey();

            // clean result so we can reuse it
            result._overflow = null;
        }

        // if we get here, we need to insert a new entry on the BTreeNode before _children[ index ]
        if (!isFull()) {
            if (height == 0) {
                insertEntry(this, index - 1, key, value);
            } else {
                insertChild(this, index - 1, key, overflow);
            }
            _btree._db.update(_recid, this, this);
            return result;
        }

        // node is full, we must divide the node
        final byte half = BTree.DEFAULT_SIZE >> 1;
        BTreeNode<K, V> newNode = new BTreeNode<K, V>(_btree, _isLeaf);
        if (index < half) {
            // move lower-half of entries to overflow node, including new entry
            if (DEBUG) {
                System.out.println("BTreeNode.insert() move lower-half of entries to overflow BTreeNode, including new entry.");
            }
            if (height == 0) {
                copyEntries(this, 0, newNode, half, index);
                setEntry(newNode, half + index, key, value);
                copyEntries(this, index, newNode, half + index + 1, half - index - 1);
            } else {
                copyChildren(this, 0, newNode, half, index);
                setChild(newNode, half + index, key, overflow);
                copyChildren(this, index, newNode, half + index + 1, half - index - 1);
            }
        } else {
            // move lower-half of entries to overflow node, new entry stays on this node
            if (DEBUG) {
                System.out.println("BTreeNode.insert() move lower-half of entries to overflow BTreeNode. New entry stays");
            }
            if (height == 0) {
                copyEntries(this, 0, newNode, half, half);
                copyEntries(this, half, this, half - 1, index - half);
                setEntry(this, index - 1, key, value);
            } else {
                copyChildren(this, 0, newNode, half, half);
                copyChildren(this, half, this, half - 1, index - half);
                setChild(this, index - 1, key, overflow);
            }
        }

        _first = half - 1;

        // nullify lower half of entries
        for (int i = 0; i < _first; i++) {
            if (height == 0) {
                setEntry(this, i, null, null);
            } else {
                setChild(this, i, null, -1);
            }
        }

        if (_isLeaf) {
            // link newly created node
            newNode._previous = _previous;
            newNode._next = _recid;
            if (_previous != 0) {
                BTreeNode<K, V> previous = loadNode(_previous);
                previous._next = newNode._recid;
                _btree._db.update(_previous, previous, this);

            }
            _previous = newNode._recid;
        }

        _btree._db.update(_recid, this, this);
        _btree._db.update(newNode._recid, newNode, this);

        result._overflow = newNode;
        return result;
    }


    /**
     * Remove the entry associated with the given key.
     *
     * @param height Height of the current BTreeNode (zero is leaf node)
     * @param key    Removal key
     * @return Remove result object
     */
    RemoveResult<K, V> remove(int height, K key)
            throws IOException {
        RemoveResult<K, V> result;

        int half = BTree.DEFAULT_SIZE / 2;
        byte index = findChildren(key,true);

        height -= 1;
        if (height == 0) {
            // remove leaf entry
            if (compare(_keys[index], key) != 0) {
                throw new IllegalArgumentException("Key not found: " + key);
            }
            result = new RemoveResult<K, V>();

            if (_values[index] instanceof BTreeLazyRecord) {
                BTreeLazyRecord<V> r = (BTreeLazyRecord<V>) _values[index];
                result._value = r.get();
                r.delete();
            } else {
                result._value = (V) _values[index];
            }
            removeEntry(this, index);

            // update this node
            _btree._db.update(_recid, this, this);

        } else {
            // recurse into Btree to remove entry on a children node
            BTreeNode<K, V> child = loadNode(_children[index]);
            result = child.remove(height, key);

            // update children
            _keys[index] = child.getLargestKey();
            _btree._db.update(_recid, this, this);

            if (result._underflow) {
                // underflow occured
                if (child._first != half + 1) {
                    throw new IllegalStateException("Error during underflow [1]");
                }
                if (index < _children.length - 1) {
                    // exists greater brother node
                    BTreeNode<K, V> brother = loadNode(_children[index + 1]);
                    int bfirst = brother._first;
                    if (bfirst < half) {
                        // steal entries from "brother" node
                        int steal = (half - bfirst + 1) / 2;
                        brother._first += steal;
                        child._first -= steal;
                        if (child._isLeaf) {
                            copyEntries(child, half + 1, child, half + 1 - steal, half - 1);
                            copyEntries(brother, bfirst, child, 2 * half - steal, steal);
                        } else {
                            copyChildren(child, half + 1, child, half + 1 - steal, half - 1);
                            copyChildren(brother, bfirst, child, 2 * half - steal, steal);
                        }

                        for (int i = bfirst; i < bfirst + steal; i++) {
                            if (brother._isLeaf) {
                                setEntry(brother, i, null, null);
                            } else {
                                setChild(brother, i, null, -1);
                            }
                        }

                        // update child's largest key
                        _keys[index] = child.getLargestKey();

                        // no change in previous/next node

                        // update nodes
                        _btree._db.update(_recid, this, this);
                        _btree._db.update(brother._recid, brother, this);
                        _btree._db.update(child._recid, child, this);

                    } else {
                        // move all entries from node "child" to "brother"
                        if (brother._first != half) {
                            throw new IllegalStateException("Error during underflow [2]");
                        }

                        brother._first = 1;
                        if (child._isLeaf) {
                            copyEntries(child, half + 1, brother, 1, half - 1);
                        } else {
                            copyChildren(child, half + 1, brother, 1, half - 1);
                        }
                        _btree._db.update(brother._recid, brother, this);


                        // remove "child" from current node
                        if (_isLeaf) {
                            copyEntries(this, _first, this, _first + 1, index - _first);
                            setEntry(this, _first, null, null);
                        } else {
                            copyChildren(this, _first, this, _first + 1, index - _first);
                            setChild(this, _first, null, -1);
                        }
                        _first += 1;
                        _btree._db.update(_recid, this, this);

                        // re-link previous and next nodes
                        if (child._previous != 0) {
                            BTreeNode<K, V> prev = loadNode(child._previous);
                            prev._next = child._next;
                            _btree._db.update(prev._recid, prev, this);
                        }
                        if (child._next != 0) {
                            BTreeNode<K, V> next = loadNode(child._next);
                            next._previous = child._previous;
                            _btree._db.update(next._recid, next, this);

                        }

                        // delete "child" node
                        _btree._db.delete(child._recid);
                    }
                } else {
                    // node "brother" is before "child"
                    BTreeNode<K, V> brother = loadNode(_children[index - 1]);
                    int bfirst = brother._first;
                    if (bfirst < half) {
                        // steal entries from "brother" node
                        int steal = (half - bfirst + 1) / 2;
                        brother._first += steal;
                        child._first -= steal;
                        if (child._isLeaf) {
                            copyEntries(brother, 2 * half - steal, child,
                                    half + 1 - steal, steal);
                            copyEntries(brother, bfirst, brother,
                                    bfirst + steal, 2 * half - bfirst - steal);
                        } else {
                            copyChildren(brother, 2 * half - steal, child,
                                    half + 1 - steal, steal);
                            copyChildren(brother, bfirst, brother,
                                    bfirst + steal, 2 * half - bfirst - steal);
                        }

                        for (int i = bfirst; i < bfirst + steal; i++) {
                            if (brother._isLeaf) {
                                setEntry(brother, i, null, null);
                            } else {
                                setChild(brother, i, null, -1);
                            }
                        }

                        // update brother's largest key
                        _keys[index - 1] = brother.getLargestKey();

                        // no change in previous/next node

                        // update nodes
                        _btree._db.update(_recid, this, this);
                        _btree._db.update(brother._recid, brother, this);
                        _btree._db.update(child._recid, child, this);

                    } else {
                        // move all entries from node "brother" to "child"
                        if (brother._first != half) {
                            throw new IllegalStateException("Error during underflow [3]");
                        }

                        child._first = 1;
                        if (child._isLeaf) {
                            copyEntries(brother, half, child, 1, half);
                        } else {
                            copyChildren(brother, half, child, 1, half);
                        }
                        _btree._db.update(child._recid, child, this);

                        // remove "brother" from current node
                        if (_isLeaf) {
                            copyEntries(this, _first, this, _first + 1, index - 1 - _first);
                            setEntry(this, _first, null, null);
                        } else {
                            copyChildren(this, _first, this, _first + 1, index - 1 - _first);
                            setChild(this, _first, null, -1);
                        }
                        _first += 1;
                        _btree._db.update(_recid, this, this);

                        // re-link previous and next nodes
                        if (brother._previous != 0) {
                            BTreeNode<K, V> prev = loadNode(brother._previous);
                            prev._next = brother._next;
                            _btree._db.update(prev._recid, prev, this);
                        }
                        if (brother._next != 0) {
                            BTreeNode<K, V> next = loadNode(brother._next);
                            next._previous = brother._previous;
                            _btree._db.update(next._recid, next, this);
                        }

                        // delete "brother" node
                        _btree._db.delete(brother._recid);
                    }
                }
            }
        }

        // underflow if node is more than half-empty
        result._underflow = _first > half;

        return result;
    }


    /**
     * Find the first children node with a key equal or greater than the given
     * key.
     *
     * @return index of first children with equal or greater key.
     */
    private byte findChildren(final K key, final boolean inclusive) {
        int left = _first;
        int right = BTree.DEFAULT_SIZE - 1;
        int middle;
        final int D = inclusive?0:1;

        // binary search
        while (true) {
            middle = (left + right) / 2;
            if (compare(_keys[middle], key) < D) {
                left = middle + 1;
            } else {
                right = middle;
            }
            if (left >= right) {
                return (byte) right;
            }
        }
    }


    /**
     * Insert entry at given position.
     */
    private static <K, V> void insertEntry(BTreeNode<K, V> node, int index,
                                           K key, V value) {
        K[] keys = node._keys;
        Object[] values = node._values;
        int start = node._first;
        int count = index - node._first + 1;

        // shift entries to the left
        System.arraycopy(keys, start, keys, start - 1, count);
        System.arraycopy(values, start, values, start - 1, count);
        node._first -= 1;
        keys[index] = key;
        values[index] = value;
    }


    /**
     * Insert child at given position.
     */
    private static <K, V> void insertChild(BTreeNode<K, V> node, int index,
                                           K key, long child) {
        K[] keys = node._keys;
        long[] children = node._children;
        int start = node._first;
        int count = index - node._first + 1;

        // shift entries to the left
        System.arraycopy(keys, start, keys, start - 1, count);
        System.arraycopy(children, start, children, start - 1, count);
        node._first -= 1;
        keys[index] = key;
        children[index] = child;
    }

    /**
     * Remove entry at given position.
     */
    private static <K, V> void removeEntry(BTreeNode<K, V> node, int index) {
        K[] keys = node._keys;
        Object[] values = node._values;
        int start = node._first;
        int count = index - node._first;

        System.arraycopy(keys, start, keys, start + 1, count);
        keys[start] = null;
        System.arraycopy(values, start, values, start + 1, count);
        values[start] = null;
        node._first++;
    }


    /**
     * Set the entry at the given index.
     */
    private static <K, V> void setEntry(BTreeNode<K, V> node, int index, K key, V value) {
        node._keys[index] = key;
        node._values[index] = value;
    }


    /**
     * Set the child BTreeNode recid at the given index.
     */
    private static <K, V> void setChild(BTreeNode<K, V> node, int index, K key, long recid) {
        node._keys[index] = key;
        node._children[index] = recid;
    }


    /**
     * Copy entries between two nodes
     */
    private static <K, V> void copyEntries(BTreeNode<K, V> source, int indexSource,
                                           BTreeNode<K, V> dest, int indexDest, int count) {
        System.arraycopy(source._keys, indexSource, dest._keys, indexDest, count);
        System.arraycopy(source._values, indexSource, dest._values, indexDest, count);
    }


    /**
     * Copy child node recids between two nodes
     */
    private static <K, V> void copyChildren(BTreeNode<K, V> source, int indexSource,
                                            BTreeNode<K, V> dest, int indexDest, int count) {
        System.arraycopy(source._keys, indexSource, dest._keys, indexDest, count);
        System.arraycopy(source._children, indexSource, dest._children, indexDest, count);
    }


    /**
     * Load the node at the given recid.
     */
    private BTreeNode<K, V> loadNode(long recid)
            throws IOException {
        BTreeNode<K, V> child = _btree._db.fetch(recid, this);
        child._recid = recid;
        child._btree = _btree;
        return child;
    }


    private  final int compare(final K value1, final K value2) {
        if (value1 == null) {
            return 1;
        }
        if (value2 == null) {
            return -1;
        }

        if (_btree._comparator == null) {
            return ((Comparable) value1).compareTo(value2);
        } else {
            return _btree._comparator.compare(value1, value2);
        }

    }

    /**
     * Dump the structure of the tree on the screen.  This is used for debugging
     * purposes only.
     */
    private void dump(int height) {
        String prefix = "";
        for (int i = 0; i < height; i++) {
            prefix += "    ";
        }
        System.out.println(prefix + "-------------------------------------- BTreeNode recid=" + _recid);
        System.out.println(prefix + "first=" + _first);
        for (int i = 0; i < BTree.DEFAULT_SIZE; i++) {
            if (_isLeaf) {
                System.out.println(prefix + "BTreeNode [" + i + "] " + _keys[i] + " " + _values[i]);
            } else {
                System.out.println(prefix + "BTreeNode [" + i + "] " + _keys[i] + " " + _children[i]);
            }
        }
        System.out.println(prefix + "--------------------------------------");
    }


    /**
     * Recursively dump the state of the BTree on screen.  This is used for
     * debugging purposes only.
     */
    void dumpRecursive(int height, int level)
            throws IOException {
        height -= 1;
        level += 1;
        if (height > 0) {
            for (byte i = _first; i < BTree.DEFAULT_SIZE; i++) {
                if (_keys[i] == null) break;
                BTreeNode<K, V> child = loadNode(_children[i]);
                child.dump(level);
                child.dumpRecursive(height, level);
            }
        }
    }

    /**
     * Deserialize the content of an object from a byte array.
     */
    @SuppressWarnings("unchecked")
    public BTreeNode<K, V> deserialize(DataInput ois2)
            throws IOException {
        DataInputOutput ois = (DataInputOutput) ois2;


        BTreeNode<K, V> node = new BTreeNode<K, V>();

        switch (ois.readUnsignedByte()) {
            case SerializationHeader.BTREE_NODE_LEAF:
                node._isLeaf = true;
                break;
            case SerializationHeader.BTREE_NODE_NONLEAF:
                node._isLeaf = false;
                break;
            default:
                throw new InternalError("wrong BTreeNode header");
        }

        if (node._isLeaf) {
            node._previous = LongPacker.unpackLong(ois);
            node._next = LongPacker.unpackLong(ois);
        }


        node._first = ois.readByte();

        if (!node._isLeaf) {
            node._children = new long[BTree.DEFAULT_SIZE];
            for (int i = node._first; i < BTree.DEFAULT_SIZE; i++) {
                node._children[i] = LongPacker.unpackLong(ois);
            }
        }

        if (!_btree.loadValues)
            return node;

        try {

            node._keys = readKeys(ois, node._first);

        } catch (ClassNotFoundException except) {
            throw new IOException(except.getMessage());
        }

        if (node._isLeaf) {

            try {
                readValues(ois, node);
            } catch (ClassNotFoundException except) {
                throw new IOException(except);
            }
        }

        return node;

    }


    /**
     * Serialize the content of an object into a byte array.
     *
     * @param obj Object to serialize
     * @return a byte array representing the object's state
     */
    public void serialize(DataOutput oos, BTreeNode<K, V> obj)
            throws IOException {


        // note:  It is assumed that BTreeNode instance doing the serialization is the parent
        // of the BTreeNode object being serialized.

        BTreeNode<K, V> node = obj;

        oos.writeByte(node._isLeaf ? SerializationHeader.BTREE_NODE_LEAF : SerializationHeader.BTREE_NODE_NONLEAF);
        if (node._isLeaf) {
            LongPacker.packLong(oos, node._previous);
            LongPacker.packLong(oos, node._next);
        }

        oos.write(node._first);

        if (!node._isLeaf) {
            for (int i = node._first; i < BTree.DEFAULT_SIZE; i++) {
                LongPacker.packLong(oos, node._children[i]);
            }
        }

        writeKeys(oos, node._keys, node._first);

        if (node._isLeaf && _btree.hasValues()) {
            writeValues(oos, node);
        }
    }


    private void readValues(DataInputOutput ois, BTreeNode<K, V> node) throws IOException, ClassNotFoundException {
        node._values = new Object[BTree.DEFAULT_SIZE];
        if(_btree.hasValues()){
            Serializer<V> serializer = _btree.valueSerializer != null ? _btree.valueSerializer : (Serializer<V>) _btree.getRecordManager().defaultSerializer();
            for (int i = node._first; i < BTree.DEFAULT_SIZE; i++) {
                int header = ois.readUnsignedByte();
                if (header == BTreeLazyRecord.NULL) {
                    node._values[i] = null;
                } else if (header == BTreeLazyRecord.LAZY_RECORD) {
                    long recid = LongPacker.unpackLong(ois);
                    node._values[i] = new BTreeLazyRecord(_btree._db, recid, serializer);
                } else {
                    node._values[i] = BTreeLazyRecord.fastDeser(ois, serializer, header);
                }
            }
        }else{
            //create fake values
            for (int i = node._first; i < BTree.DEFAULT_SIZE; i++) {
                if(node._keys[i]!=null)
                    node._values[i] = Utils.EMPTY_STRING;
            }
        }
    }


    private void writeValues(DataOutput oos, BTreeNode<K, V> node) throws IOException {


        DataInputOutput output = null;
        Serializer serializer = _btree.valueSerializer != null ? _btree.valueSerializer : _btree.getRecordManager().defaultSerializer();
        for (int i = node._first; i < BTree.DEFAULT_SIZE; i++) {
            if (node._values[i] instanceof BTreeLazyRecord) {
                oos.write(BTreeLazyRecord.LAZY_RECORD);
                LongPacker.packLong(oos, ((BTreeLazyRecord) node._values[i]).recid);
            } else if (node._values[i] != null) {
                if (output == null) {
                    output = new DataInputOutput();
                } else {
                    output.reset();
                }

                serializer.serialize(output, node._values[i]);

                if (output.getPos() > BTreeLazyRecord.MAX_INTREE_RECORD_SIZE) {
                    //write as separate record
                    long recid = _btree._db.insert(output.toByteArray(), BTreeLazyRecord.FAKE_SERIALIZER,true);
                    oos.write(BTreeLazyRecord.LAZY_RECORD);
                    LongPacker.packLong(oos, recid);
                } else {
                    //write as part of btree
                    oos.write(output.getPos());
                    oos.write(output.getBuf(), 0, output.getPos());
                }
            } else {
                oos.write(BTreeLazyRecord.NULL);
            }
        }
    }


    private static final int ALL_NULL = 0;
    private static final int ALL_INTEGERS = 1 << 5;
    private static final int ALL_INTEGERS_NEGATIVE = 2 << 5;
    private static final int ALL_LONGS = 3 << 5;
    private static final int ALL_LONGS_NEGATIVE = 4 << 5;
    private static final int ALL_STRINGS = 5 << 5;
    private static final int ALL_OTHER = 6 << 5;


    private K[] readKeys(DataInput ois, final int firstUse) throws IOException, ClassNotFoundException {
        Object[] ret = new Object[BTree.DEFAULT_SIZE];
        final int type = ois.readUnsignedByte();
        if (type == ALL_NULL) {
            return (K[]) ret;
        } else if (type == ALL_INTEGERS || type == ALL_INTEGERS_NEGATIVE) {
            long first = LongPacker.unpackLong(ois);
            if (type == ALL_INTEGERS_NEGATIVE)
                first = -first;
            ret[firstUse] = Integer.valueOf((int) first);
            for (int i = firstUse + 1; i < BTree.DEFAULT_SIZE; i++) {
//				ret[i] = Serialization.readObject(ois);
                long v = LongPacker.unpackLong(ois);
                if (v == 0) continue; //null
                v = v + first;
                ret[i] = Integer.valueOf((int) v);
                first = v;
            }
            return (K[]) ret;
        } else if (type == ALL_LONGS || type == ALL_LONGS_NEGATIVE) {
            long first = LongPacker.unpackLong(ois);
            if (type == ALL_LONGS_NEGATIVE)
                first = -first;

            ret[firstUse] = Long.valueOf(first);
            for (int i = firstUse + 1; i < BTree.DEFAULT_SIZE; i++) {
                //ret[i] = Serialization.readObject(ois);
                long v = LongPacker.unpackLong(ois);
                if (v == 0) continue; //null
                v = v + first;
                ret[i] = Long.valueOf(v);
                first = v;
            }
            return (K[]) ret;
        } else if (type == ALL_STRINGS) {
            byte[] previous = null;
            for (int i = firstUse; i < BTree.DEFAULT_SIZE; i++) {
                byte[] b = leadingValuePackRead(ois, previous, 0);
                if (b == null) continue;
                ret[i] = new String(b,Serialization.UTF8);
                previous = b;
            }
            return (K[]) ret;

        } else if (type == ALL_OTHER) {

            //TODO why this block is here?
            if (_btree.keySerializer == null || _btree.keySerializer == _btree.getRecordManager().defaultSerializer()) {
                for (int i = firstUse; i < BTree.DEFAULT_SIZE; i++) {
                    ret[i] = _btree.getRecordManager().defaultSerializer().deserialize(ois);
                }
                return (K[]) ret;
            }


            Serializer ser = _btree.keySerializer != null ? _btree.keySerializer : _btree.getRecordManager().defaultSerializer();
            DataInputOutput in2 = null;
            byte[] previous = null;
            for (int i = firstUse; i < BTree.DEFAULT_SIZE; i++) {
                byte[] b = leadingValuePackRead(ois, previous, 0);
                if (b == null) continue;
                if (in2 == null) {
                    in2 = new DataInputOutput();
                }
                in2.reset(b);
                ret[i] = ser.deserialize(in2);
                previous = b;
            }
            return (K[]) ret;

        } else {
            throw new InternalError("unknown BTreeNode header type: " + type);
        }

    }


    @SuppressWarnings("unchecked")
    private void writeKeys(DataOutput oos, K[] keys, final int firstUse) throws IOException {
        if (keys.length != BTree.DEFAULT_SIZE)
            throw new IllegalArgumentException("wrong keys size");

        //check if all items on key are null
        boolean allNull = true;
        for (int i = firstUse; i < BTree.DEFAULT_SIZE; i++) {
            if (keys[i] != null) {
                allNull = false;
                break;
            }
        }
        if (allNull) {
            oos.write(ALL_NULL);
            return;
        }

        /**
         * Special compression to compress Long and Integer
         */
        if ((_btree._comparator == Utils.COMPARABLE_COMPARATOR || _btree._comparator == null) &&
                (_btree.keySerializer == null || _btree.keySerializer == _btree.getRecordManager().defaultSerializer())) {
            boolean allInteger = true;
            for (int i = firstUse; i < BTree.DEFAULT_SIZE; i++) {
                if (keys[i] != null && keys[i].getClass() != Integer.class) {
                    allInteger = false;
                    break;
                }
            }
            boolean allLong = true;
            for (int i = firstUse; i < BTree.DEFAULT_SIZE; i++) {
                if (keys[i] != null && (keys[i].getClass() != Long.class ||
                        //special case to exclude Long.MIN_VALUE from conversion, causes problems to LongPacker
                        ((Long) keys[i]).longValue() == Long.MIN_VALUE)
                        ) {
                    allLong = false;
                    break;
                }
            }

            if (allLong) {
                //check that diff between MIN and MAX fits into PACKED_LONG
                long max = Long.MIN_VALUE;
                long min = Long.MAX_VALUE;
                for (int i = firstUse; i < BTree.DEFAULT_SIZE; i++) {
                    if (keys[i] == null) continue;
                    long v = (Long) keys[i];
                    if (v > max) max = v;
                    if (v < min) min = v;
                }
                //now convert to Double to prevent overflow errors
                double max2 = max;
                double min2 = min;
                double maxDiff = Long.MAX_VALUE;
                if (max2 - min2 > maxDiff / 2) // divide by two just to by sure
                    allLong = false;

            }

            if (allLong && allInteger)
                throw new InternalError();

            if (allLong || allInteger) {
                long first = ((Number) keys[firstUse]).longValue();
                //write header
                if (allInteger) {
                    if (first > 0) oos.write(ALL_INTEGERS);
                    else oos.write(ALL_INTEGERS_NEGATIVE);
                } else if (allLong) {
                    if (first > 0) oos.write(ALL_LONGS);
                    else oos.write(ALL_LONGS_NEGATIVE);
                } else {
                    throw new InternalError();
                }

                //write first
                LongPacker.packLong(oos, Math.abs(first));
                //write others
                for (int i = firstUse + 1; i < BTree.DEFAULT_SIZE; i++) {
//					Serialization.writeObject(oos, keys[i]);
                    if (keys[i] == null)
                        LongPacker.packLong(oos, 0);
                    else {
                        long v = ((Number) keys[i]).longValue();
                        if (v <= first) throw new InternalError("not ordered");
                        LongPacker.packLong(oos, v - first);
                        first = v;
                    }
                }
                return;
            } else {
                //another special case for Strings
                boolean allString = true;
                for (int i = firstUse; i < BTree.DEFAULT_SIZE; i++) {
                    if (keys[i] != null && (keys[i].getClass() != String.class)
                            ) {
                        allString = false;
                        break;
                    }
                }
                if (allString) {
                    oos.write(ALL_STRINGS);
                    byte[] previous = null;
                    for (int i = firstUse; i < BTree.DEFAULT_SIZE; i++) {
                        if (keys[i] == null) {
                            leadingValuePackWrite(oos, null, previous, 0);
                        } else {
                            byte[] b = ((String) keys[i]).getBytes(Serialization.UTF8);
                            leadingValuePackWrite(oos, b, previous, 0);
                            previous = b;
                        }
                    }
                    return;
                }
            }
        }

        /**
         * other case, serializer is provided or other stuff
         */
        oos.write(ALL_OTHER);
        if (_btree.keySerializer == null || _btree.keySerializer == _btree.getRecordManager().defaultSerializer()) {
            for (int i = firstUse; i < BTree.DEFAULT_SIZE; i++) {
                _btree.getRecordManager().defaultSerializer().serialize(oos, keys[i]);
            }
            return;
        }

        //custom serializer is provided, use it

        Serializer ser = _btree.keySerializer;
        byte[] previous = null;


        DataInputOutput out3 = new DataInputOutput();
        for (int i = firstUse; i < BTree.DEFAULT_SIZE; i++) {
            if (keys[i] == null) {
                leadingValuePackWrite(oos, null, previous, 0);
            } else {
                out3.reset();
                ser.serialize(out3, keys[i]);
                byte[] b = out3.toByteArray();
                leadingValuePackWrite(oos, b, previous, 0);
                previous = b;
            }
        }


    }

    public void defrag(DBStore r1, DBStore r2) throws IOException {
        if (_children != null)
            for (long child : _children) {
                if (child == 0) continue;
                byte[] data = r1.fetchRaw(child);
                r2.forceInsert(child, data);
                BTreeNode t = deserialize(new DataInputOutput(data));
                t._btree = _btree;
                t.defrag(r1, r2);
            }
    }


    /**
     * STATIC INNER CLASS
     * Result from insert() method call
     */
    static final class InsertResult<K, V> {

        /**
         * Overflow node.
         */
        BTreeNode<K, V> _overflow;

        /**
         * Existing value for the insertion key.
         */
        V _existing;

    }

    /**
     * STATIC INNER CLASS
     * Result from remove() method call
     */
    static final class RemoveResult<K, V> {

        /**
         * Set to true if underlying nodes underflowed
         */
        boolean _underflow;

        /**
         * Removed entry value
         */
        V _value;
    }


    /**
     * PRIVATE INNER CLASS
     * Browser to traverse leaf nodes.
     */
    static final class Browser<K, V>
            implements BTree.BTreeTupleBrowser<K, V> {

        /**
         * Current node.
         */
        private BTreeNode<K, V> _node;

        /**
         * Current index in the node.  The index positionned on the next
         * tuple to return.
         */
        private byte _index;


        private int expectedModCount;


        /**
         * Create a browser.
         *
         * @param node  Current node
         * @param index Position of the next tuple to return.
         */
        Browser(BTreeNode<K, V> node, byte index) {
            _node = node;
            _index = index;
            expectedModCount = node._btree.modCount;
        }

        public boolean getNext(BTree.BTreeTuple<K, V> tuple)
                throws IOException {
            if (expectedModCount != _node._btree.modCount)
                throw new ConcurrentModificationException();
            if (_node == null) {
                //last record in iterator was deleted, so iterator is at end of node
                return false;
            }

            if (_index < BTree.DEFAULT_SIZE) {
                if (_node._keys[_index] == null) {
                    // reached end of the tree.
                    return false;
                }
            } else if (_node._next != 0) {
                // move to next node
                _node = _node.loadNode(_node._next);
                _index = _node._first;
            }
            tuple.key = _node._keys[_index];
            if (_node._values[_index] instanceof BTreeLazyRecord)
                tuple.value = ((BTreeLazyRecord<V>) _node._values[_index]).get();
            else
                tuple.value = (V) _node._values[_index];
            _index++;
            return true;
        }

        public boolean getPrevious(BTree.BTreeTuple<K, V> tuple)
                throws IOException {
            if (expectedModCount != _node._btree.modCount)
                throw new ConcurrentModificationException();

            if (_node == null) {
                //deleted last record, but this situation is only supportedd on getNext
                throw new InternalError();
            }

            if (_index == _node._first) {

                if (_node._previous != 0) {
                    _node = _node.loadNode(_node._previous);
                    _index = BTree.DEFAULT_SIZE;
                } else {
                    // reached beginning of the tree
                    return false;
                }
            }
            _index--;
            tuple.key = _node._keys[_index];
            if (_node._values[_index] instanceof BTreeLazyRecord)
                tuple.value = ((BTreeLazyRecord<V>) _node._values[_index]).get();
            else
                tuple.value = (V) _node._values[_index];

            return true;

        }

        public void remove(K key) throws IOException {
            if (expectedModCount != _node._btree.modCount)
                throw new ConcurrentModificationException();

            _node._btree.remove(key);
            expectedModCount++;

            //An entry was removed and this may trigger tree rebalance,
            //This would change current node layout, so find our position again
            BTree.BTreeTupleBrowser b = _node._btree.browse(key,true);
            //browser is positioned just before value which was currently deleted, so find if we have new value
            if (b.getNext(new BTree.BTreeTuple(null, null))) {
                //next value value exists, copy its state
                Browser b2 = (Browser) b;
                this._node = b2._node;
                this._index = b2._index;
            } else {
                this._node = null;
                this._index = -1;
            }


        }
    }

    /**
     * Used for debugging and testing only.  Recursively obtains the recids of
     * all child BTreeNodes and adds them to the 'out' list.
     *
     * @param out
     * @param height
     * @throws IOException
     */
    void dumpChildNodeRecIDs(List out, int height)
            throws IOException {
        height -= 1;
        if (height > 0) {
            for (byte i = _first; i < BTree.DEFAULT_SIZE; i++) {
                if (_children[i] == 0) continue;

                BTreeNode child = loadNode(_children[i]);
                out.add(new Long(child._recid));
                child.dumpChildNodeRecIDs(out, height);
            }
        }
    }


    /**
     * Read previously written data
     *
     * @author Kevin Day
     */
    static byte[] leadingValuePackRead(DataInput in, byte[] previous, int ignoreLeadingCount) throws IOException {
        int len = LongPacker.unpackInt(in) - 1;  // 0 indicates null
        if (len == -1)
            return null;

        int actualCommon = LongPacker.unpackInt(in);

        byte[] buf = new byte[len];

        if (previous == null) {
            actualCommon = 0;
        }


        if (actualCommon > 0) {
            in.readFully(buf, 0, ignoreLeadingCount);
            System.arraycopy(previous, ignoreLeadingCount, buf, ignoreLeadingCount, actualCommon - ignoreLeadingCount);
        }
        in.readFully(buf, actualCommon, len - actualCommon);
        return buf;
    }

    /**
     * This method is used for delta compression for keys.
     * Writes the contents of buf to the DataOutput out, with special encoding if
     * there are common leading bytes in the previous group stored by this compressor.
     *
     * @author Kevin Day
     */
    static void leadingValuePackWrite(DataOutput out, byte[] buf, byte[] previous, int ignoreLeadingCount) throws IOException {
        if (buf == null) {
            LongPacker.packInt(out, 0);
            return;
        }

        int actualCommon = ignoreLeadingCount;

        if (previous != null) {
            int maxCommon = buf.length > previous.length ? previous.length : buf.length;

            if (maxCommon > Short.MAX_VALUE) maxCommon = Short.MAX_VALUE;

            for (; actualCommon < maxCommon; actualCommon++) {
                if (buf[actualCommon] != previous[actualCommon])
                    break;
            }
        }


        // there are enough common bytes to justify compression
        LongPacker.packInt(out, buf.length + 1);// store as +1, 0 indicates null
        LongPacker.packInt(out, actualCommon);
        out.write(buf, 0, ignoreLeadingCount);
        out.write(buf, actualCommon, buf.length - actualCommon);

    }


    BTreeNode<K, V> loadLastChildNode() throws IOException {
        return loadNode(_children[BTree.DEFAULT_SIZE - 1]);
    }


}


================================================
FILE: src/main/java/org/apache/jdbm/BTreeSet.java
================================================
/*
 *  Licensed to the Apache Software Foundation (ASF) under one or more
 *  contributor license agreements.  See the NOTICE file distributed with
 *  this work for additional information regarding copyright ownership.
 *  The ASF licenses this file to You under the Apache License, Version 2.0
 *  (the "License"); you may not use this file except in compliance with
 *  the License.  You may obtain a copy of the License at
 *
 *     http://www.apache.org/licenses/LICENSE-2.0
 *
 *  Unless required by applicable law or agreed to in writing, software
 *  distributed under the License is distributed on an "AS IS" BASIS,
 *  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 *  See the License for the specific language governing permissions and
 *  limitations under the License.
 */

package org.apache.jdbm;

import java.util.*;

/**
 * Wrapper class for <code>>SortedMap</code> to implement <code>>NavigableSet</code>
 * <p/>
 * This code originally comes from Apache Harmony, was adapted by Jan Kotek for JDBM
 */
class BTreeSet<E> extends AbstractSet<E> implements NavigableSet<E> {


    /**
     * use keyset from this map
     */
    final BTreeMap<E, Object> map;

    BTreeSet(BTreeMap<E, Object> map) {
        this.map = map;
    }

    public boolean add(E object) {
        return map.put(object, Utils.EMPTY_STRING) == null;
    }


    public boolean addAll(Collection<? extends E> collection) {
        return super.addAll(collection);
    }


    public void clear() {
        map.clear();
    }

    public Comparator<? super E> comparator() {
        return map.comparator();
    }


    public boolean contains(Object object) {
        return map.containsKey(object);
    }


    public boolean isEmpty() {
        return map.isEmpty();
    }


    public E lower(E e) {
        return map.lowerKey(e);
    }

    public E floor(E e) {
        return map.floorKey(e);
    }

    public E ceiling(E e) {
        return map.ceilingKey(e);
    }

    public E higher(E e) {
        return map.higherKey(e);
    }

    public E pollFirst() {
        Map.Entry<E,Object> e = map.pollFirstEntry();
        return e!=null? e.getKey():null;
    }

    public E pollLast() {
        Map.Entry<E,Object> e = map.pollLastEntry();
        return e!=null? e.getKey():null;
    }

    public Iterator<E> iterator() {
        final Iterator<Map.Entry<E,Object>> iter = map.entrySet().iterator();
        return new Iterator<E>() {
            public boolean hasNext() {
                return iter.hasNext();
            }

            public E next() {
                Map.Entry<E,Object> e = iter.next();
                return e!=null?e.getKey():null;
            }

            public void remove() {
                iter.remove();
            }
        };
    }

    public NavigableSet<E> descendingSet() {
        return map.descendingKeySet();
    }

    public Iterator<E> descendingIterator() {
        return map.descendingKeySet().iterator();
    }

    public NavigableSet<E> subSet(E fromElement, boolean fromInclusive, E toElement, boolean toInclusive) {
        return map.subMap(fromElement,fromInclusive,toElement,toInclusive).navigableKeySet();
    }

    public NavigableSet<E> headSet(E toElement, boolean inclusive) {
        return map.headMap(toElement,inclusive).navigableKeySet();
    }

    public NavigableSet<E> tailSet(E fromElement, boolean inclusive) {
        return map.tailMap(fromElement,inclusive).navigableKeySet();
    }


    public boolean remove(Object object) {
        return map.remove(object) != null;
    }

    public int size() {
        return map.size();
    }


    public E first() {
        return map.firstKey();
    }


    public E last() {
        return map.lastKey();
    }


    public SortedSet<E> subSet(E start, E end) {
        Comparator<? super E> c = map.comparator();
        int compare = (c == null) ? ((Comparable<E>) start).compareTo(end) : c
                .compare(start, end);
        if (compare <= 0) {
            return new BTreeSet<E>((BTreeMap<E,Object>) map.subMap(start, true,end,false));
        }
        throw new IllegalArgumentException();
    }


    public SortedSet<E> headSet(E end) {
        // Check for errors
        Comparator<? super E> c = map.comparator();
        if (c == null) {
            ((Comparable<E>) end).compareTo(end);
        } else {
            c.compare(end, end);
        }
        return new BTreeSet<E>((BTreeMap<E,Object>) map.headMap(end,false));
    }


    public SortedSet<E> tailSet(E start) {
        // Check for errors
        Comparator<? super E> c = map.comparator();
        if (c == null) {
            ((Comparable<E>) start).compareTo(start);
        } else {
            c.compare(start, start);
        }
        return new BTreeSet<E>((BTreeMap<E,Object>) map.tailMap(start,true));
    }


}


================================================
FILE: src/main/java/org/apache/jdbm/DB.java
================================================
package org.apache.jdbm;

import java.util.*;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ConcurrentNavigableMap;

/**
 * Database is root class for creating and loading persistent collections. It also contains
 * transaction operations.
 * //TODO just write some readme
 * <p/>
 *
 * @author Jan Kotek
 * @author Alex Boisvert
 * @author Cees de Groot
 */
public interface DB {

    /**
     * Closes the DB and release resources.
     * DB can not be used after it was closed
     */
    void close();

    /** @return true if db was already closed */
    boolean isClosed();

    /**
     * Clear cache and remove all entries it contains.
     * This may be useful for some Garbage Collection when reference cache is used.
     */
    void clearCache();

    /**
     * Defragments storage so it consumes less space.
     * It basically copyes all records into different store and then renames it, replacing original store.
     * <p/>
     * Defrag has two steps: In first collections are rearranged, so records in collection are close to each other,
     * and read speed is improved. In second step all records are sequentially transferred, reclaiming all unused space.
     * First step is optinal and may slow down defragmentation significantly as ut requires many random-access reads.
     * Second step reads and writes data sequentially and is very fast, comparable to copying files to new location.
     *
     * <p/>
     * This commits any uncommited data. Defrag also requires free space, as store is basically recreated at new location.
     *
     * @param sortCollections if collection records should be rearranged during defragment, this takes some extra time
     */
    void defrag(boolean sortCollections);

    /**
     * Commit (make persistent) all changes since beginning of transaction.
     * JDBM supports only single transaction.
     */
    void commit();

    /**
     * Rollback (cancel) all changes since beginning of transaction.
     * JDBM supports only single transaction.
     * This operations affects all maps created or loaded by this DB.
     */
    void rollback();

    /**
     * This calculates some database statistics such as collection sizes and record distributions.
     * Can be useful for performance optimalisations and trouble shuting.
     * This method can run for very long time.
     *
     * @return statistics contained in string
     */
    String calculateStatistics();


    /**
     * Copy database content into ZIP file
     * @param zipFile
     */
    void copyToZip(String zipFile);



    /**
     * Get a <code>Map</code> which was already created and saved in DB.
     * This map uses disk based H*Tree and should have similar performance
     * as <code>HashMap</code>.
     *
     * @param name of hash map
     *
     * @return map
     */
    <K, V> ConcurrentMap<K, V> getHashMap(String name);

    /**
     * Creates Map which persists data into DB.
     *
     * @param name record name
     * @return
     */
    <K, V> ConcurrentMap<K, V> createHashMap(String name);


    /**
     * Creates  Hash Map which persists data into DB.
     * Map will use custom serializers for Keys and Values.
     * Leave keySerializer null to use default serializer for keys
     *
     * @param <K>             Key type
     * @param <V>             Value type
     * @param name            record name
     * @param keySerializer   serializer to be used for Keys, leave null to use default serializer
     * @param valueSerializer serializer to be used for Values
     * @return
     */
    <K, V> ConcurrentMap<K, V> createHashMap(String name, Serializer<K> keySerializer, Serializer<V> valueSerializer);

    <K> Set<K> createHashSet(String name);

    <K> Set<K> getHashSet(String name);

    <K> Set<K> createHashSet(String name, Serializer<K> keySerializer);

    <K, V> ConcurrentNavigableMap<K, V> getTreeMap(String name);

    /**
     * Create  TreeMap which persists data into DB.
     *
     * @param <K>  Key type
     * @param <V>  Value type
     * @param name record name
     * @return
     */
    <K extends Comparable, V> NavigableMap<K, V> createTreeMap(String name);

    /**
     * Creates TreeMap which persists data into DB.
     *
     * @param <K>             Key type
     * @param <V>             Value type
     * @param name            record name
     * @param keyComparator   Comparator used to sort keys
     * @param keySerializer   Serializer used for keys. This may reduce disk space usage     *
     * @param valueSerializer Serializer used for values. This may reduce disk space usage
     * @return
     */
    <K, V> ConcurrentNavigableMap<K, V> createTreeMap(String name,
                                         Comparator<K> keyComparator, Serializer<K> keySerializer, Serializer<V> valueSerializer);

    <K> NavigableSet<K> getTreeSet(String name);

    <K> NavigableSet<K> createTreeSet(String name);

    <K> NavigableSet<K> createTreeSet(String name, Comparator<K> keyComparator, Serializer<K> keySerializer);

    <K> List<K> createLinkedList(String name);

    <K> List<K> createLinkedList(String name, Serializer<K> serializer);

    <K> List<K> getLinkedList(String name);

    /** returns unmodifiable map which contains all collection names and collections thenselfs*/
    Map<String,Object> getCollections();

    /** completely remove collection from store*/
    void deleteCollection(String name);

    /** Java Collections returns their size as int. This may not be enought for JDBM collections.
     * This method returns number of elements in JDBM collection as long.
     *
     * @param collection created by JDBM
     * @return number of elements in collection as long
     */
    long collectionSize(Object collection);

}


================================================
FILE: src/main/java/org/apache/jdbm/DBAbstract.java
================================================
/*******************************************************************************
 * Copyright 2010 Cees De Groot, Alex Boisvert, Jan Kotek
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *   http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 ******************************************************************************/
package org.apache.jdbm;

import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOError;
import java.io.IOException;
import java.lang.ref.WeakReference;
import java.util.*;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.ConcurrentNavigableMap;

/**
 * An abstract class implementing most of DB.
 * It also has some JDBM package protected stuff (getNamedRecord)
 */
abstract class DBAbstract implements DB {


    /**
     * Reserved slot for name directory recid.
     */
    static final byte NAME_DIRECTORY_ROOT = 0;


    /**
     * Reserved slot for version number
     */
    static final byte STORE_VERSION_NUMBER_ROOT = 1;

    /**
     * Reserved slot for recid where Serial class info is stored
     *
     * NOTE when introducing more roots, do not forget to update defrag
     */
    static final byte SERIAL_CLASS_INFO_RECID_ROOT = 2;

    /** to prevent double instances of the same collection, we use weak value map
     *
     * //TODO what to do when there is rollback?
     * //TODO clear on close
     */
    final private Map<String,WeakReference<Object>> collections = new HashMap<String,WeakReference<Object>>();



    /**
     * Inserts a new record using a custom serializer.
     *
     * @param obj        the object for the new record.
     * @param serializer a custom serializer
     * @return the rowid for the new record.
     * @throws java.io.IOException when one of the underlying I/O operations fails.
     */
    abstract <A> long insert(A obj, Serializer<A> serializer,boolean disableCache) throws IOException;

    /**
     * Deletes a record.
     *
     * @param recid the rowid for the record that should be deleted.
     * @throws java.io.IOException when one of the underlying I/O operations fails.
     */
    abstract void delete(long recid) throws IOException;


    /**
     * Updates a record using a custom serializer.
     * If given recid does not exist, IOException will be thrown before/during commit (cache).
     *
     * @param recid      the recid for the record that is to be updated.
     * @param obj        the new object for the record.
     * @param serializer a custom serializer
     * @throws java.io.IOException when one of the underlying I/O operations fails
     */
    abstract <A> void update(long recid, A obj, Serializer<A> serializer)
            throws IOException;


    /**
     * Fetches a record using a custom serializer.
     *
     * @param recid      the recid for the record that must be fetched.
     * @param serializer a custom serializer
     * @return the object contained in the record, null if given recid does not exist
     * @throws java.io.IOException when one of the underlying I/O operations fails.
     */
    abstract <A> A fetch(long recid, Serializer<A> serializer)
            throws IOException;

    /**
     * Fetches a record using a custom serializer and optionaly disabled cache
     *
     * @param recid        the recid for the record that must be fetched.
     * @param serializer   a custom serializer
     * @param disableCache true to disable any caching mechanism
     * @return the object contained in the record, null if given recid does not exist
     * @throws java.io.IOException when one of the underlying I/O operations fails.
     */
    abstract <A> A fetch(long recid, Serializer<A> serializer, boolean disableCache)
            throws IOException;


    public long insert(Object obj) throws IOException {
        return insert(obj, defaultSerializer(),false);
    }


    public void update(long recid, Object obj) throws IOException {
        update(recid, obj, defaultSerializer());
    }


    synchronized public <A> A fetch(long recid) throws IOException {
        return (A) fetch(recid, defaultSerializer());
    }

    synchronized public <K, V> ConcurrentMap<K, V> getHashMap(String name) {
        Object o = getCollectionInstance(name);
        if(o!=null)
            return (ConcurrentMap<K, V>) o;

        try {
            long recid = getNamedObject(name);
            if(recid == 0) return null;

            HTree tree = fetch(recid);
            tree.setPersistenceContext(this);
            if(!tree.hasValues()){
                throw new ClassCastException("HashSet is not HashMap");
            }
            collections.put(name,new WeakReference<Object>(tree));
            return tree;
        } catch (IOException e) {
            throw new IOError(e);
        }
    }

    synchronized public <K, V> ConcurrentMap<K, V> createHashMap(String name) {
        return createHashMap(name, null, null);
    }


    public synchronized <K, V> ConcurrentMap<K, V> createHashMap(String name, Serializer<K> keySerializer, Serializer<V> valueSerializer) {
        try {
            assertNameNotExist(name);

            HTree<K, V> tree = new HTree(this, keySerializer, valueSerializer,true);
            long recid = insert(tree);
            setNamedObject(name, recid);
            collections.put(name,new WeakReference<Object>(tree));
            return tree;
        } catch (IOException e) {
            throw new IOError(e);
        }
    }

    public synchronized <K> Set<K> getHashSet(String name) {
        Object o = getCollectionInstance(name);
        if(o!=null)
            return (Set<K>) o;

        try {
            long recid = getNamedObject(name);
            if(recid == 0) return null;

            HTree tree = fetch(recid);
            tree.setPersistenceContext(this);
            if(tree.hasValues()){
                throw new ClassCastException("HashMap is not HashSet");
            }
            Set<K> ret  =  new HTreeSet(tree);
            collections.put(name,new WeakReference<Object>(ret));
            return ret;
        } catch (IOException e) {
            throw new IOError(e);
        }
    }

    public synchronized <K> Set<K> createHashSet(String name) {
        return createHashSet(name, null);
    }

    public synchronized <K> Set<K> createHashSet(String name, Serializer<K> keySerializer) {
        try {
            assertNameNotExist(name);

            HTree<K, Object> tree = new HTree(this, keySerializer, null,false);
            long recid = insert(tree);
            setNamedObject(name, recid);

            Set<K> ret =  new HTreeSet<K>(tree);
            collections.put(name,new WeakReference<Object>(ret));
            return ret;
        } catch (IOException e) {
            throw new IOError(e);
        }
    }

    synchronized public <K, V> ConcurrentNavigableMap<K, V> getTreeMap(String name) {
        Object o = getCollectionInstance(name);
        if(o!=null)
            return (ConcurrentNavigableMap<K, V> ) o;

        try {
            long recid = getNamedObject(name);
            if(recid == 0) return null;

            BTree t =  BTree.<K, V>load(this, recid);
            if(!t.hasValues())
                throw new ClassCastException("TreeSet is not TreeMap");
            ConcurrentNavigableMap<K,V> ret = new BTreeMap<K, V>(t,false); //TODO put readonly flag here
            collections.put(name,new WeakReference<Object>(ret));
            return ret;
        } catch (IOException e) {
            throw new IOError(e);
        }
    }

    synchronized public <K extends Comparable, V> ConcurrentNavigableMap<K, V> createTreeMap(String name) {
        return createTreeMap(name, null, null, null);
    }


    public synchronized <K, V> ConcurrentNavigableMap<K, V> createTreeMap(String name,
                                                                          Comparator<K> keyComparator,
                                                                          Serializer<K> keySerializer,
                                                                          Serializer<V> valueSerializer) {
        try {
            assertNameNotExist(name);
            BTree<K, V> tree = BTree.createInstance(this, keyComparator, keySerializer, valueSerializer,true);
            setNamedObject(name, tree.getRecid());
            ConcurrentNavigableMap<K,V> ret = new BTreeMap<K, V>(tree,false); //TODO put readonly flag here
            collections.put(name,new WeakReference<Object>(ret));
            return ret;
        } catch (IOException e) {
            throw new IOError(e);
        }
    }


    public synchronized <K> NavigableSet<K> getTreeSet(String name) {
        Object o = getCollectionInstance(name);
        if(o!=null)
            return (NavigableSet<K> ) o;

        try {
            long recid = getNamedObject(name);
            if(recid == 0) return null;

            BTree t =  BTree.<K, Object>load(this, recid);
            if(t.hasValues())
                throw new ClassCastException("TreeMap is not TreeSet");
            BTreeSet<K> ret =  new BTreeSet<K>(new BTreeMap(t,false));
            collections.put(name,new WeakReference<Object>(ret));
            return ret;

        } catch (IOException e) {
            throw new IOError(e);
        }
    }

    public synchronized <K> NavigableSet<K> createTreeSet(String name) {
        return createTreeSet(name, null, null);
    }


    public synchronized <K> NavigableSet<K> createTreeSet(String name, Comparator<K> keyComparator, Serializer<K> keySerializer) {
        try {
            assertNameNotExist(name);
            BTree<K, Object> tree = BTree.createInstance(this, keyComparator, keySerializer, null,false);
            setNamedObject(name, tree.getRecid());
            BTreeSet<K> ret =  new BTreeSet<K>(new BTreeMap(tree,false));
            collections.put(name,new WeakReference<Object>(ret));
            return ret;

        } catch (IOException e) {
            throw new IOError(e);
        }

    }


    synchronized public <K> List<K> createLinkedList(String name) {
        return createLinkedList(name, null);
    }

    synchronized public <K> List<K> createLinkedList(String name, Serializer<K> serializer) {
        try {
            assertNameNotExist(name);

            //allocate record and overwrite it

            LinkedList2<K> list = new LinkedList2<K>(this, serializer);
            long recid = insert(list);
            setNamedObject(name, recid);

            collections.put(name,new WeakReference<Object>(list));

            return list;
        } catch (IOException e) {
            throw new IOError(e);
        }
    }

    synchronized public <K> List<K> getLinkedList(String name) {
        Object o = getCollectionInstance(name);
        if(o!=null)
            return (List<K> ) o;

        try {
            long recid = getNamedObject(name);
            if(recid == 0) return null;
            LinkedList2<K> list = (LinkedList2<K>) fetch(recid);
            list.setPersistenceContext(this);
            collections.put(name,new WeakReference<Object>(list));
            return list;
        } catch (IOException e) {
            throw new IOError(e);
        }
    }

    private synchronized  Object getCollectionInstance(String name){
        WeakReference ref = collections.get(name);
        if(ref==null)return null;
        Object o = ref.get();
        if(o != null) return o;
        //already GCed
        collections.remove(name);
        return null;
    }


    private void assertNameNotExist(String name) throws IOException {
        if (getNamedObject(name) != 0)
            throw new IllegalArgumentException("Object with name '" + name + "' already exists");
    }



    /**
     * Obtain the record id of a named object. Returns 0 if named object
     * doesn't exist.
     * Named objects are used to store Map views and other well known objects.
     */
    synchronized protected long getNamedObject(String name) throws IOException{
        long nameDirectory_recid = getRoot(NAME_DIRECTORY_ROOT);
        if(nameDirectory_recid == 0){
            return 0;
        }
        HTree<String,Long> m = fetch(nameDirectory_recid);
        Long res = m.get(name);
        if(res == null)
            return 0;
        return res;
    }


    /**
     * Set the record id of a named object.
     * Named objects are used to store Map views and other well known objects.
     */
    synchronized protected void setNamedObject(String name, long recid) throws IOException{
        long nameDirectory_recid = getRoot(NAME_DIRECTORY_ROOT);
        HTree<String,Long> m = null;
        if(nameDirectory_recid == 0){
            //does not exists, create it
            m = new HTree<String, Long>(this,null,null,true);
            nameDirectory_recid = insert(m);
            setRoot(NAME_DIRECTORY_ROOT,nameDirectory_recid);
        }else{
            //fetch it
            m = fetch(nameDirectory_recid);
        }
        m.put(name,recid);
    }




    synchronized public Map<String,Object> getCollections(){
        try{
            Map<String,Object> ret = new LinkedHashMap<String, Object>();
            long nameDirectory_recid = getRoot(NAME_DIRECTORY_ROOT);
            if(nameDirectory_recid==0)
                return ret;
            HTree<String,Long> m = fetch(nameDirectory_recid);

            for(Map.Entry<String,Long> e:m.entrySet()){
                Object o = fetch(e.getValue());
                if(o instanceof BTree){
                    if(((BTree) o).hasValues)
                        o = getTreeMap(e.getKey());
                    else
                        o = getTreeSet(e.getKey());
                }
                else if( o instanceof  HTree){
                    if(((HTree) o).hasValues)
                        o = getHashMap(e.getKey());
                    else
                        o = getHashSet(e.getKey());
                }

                ret.put(e.getKey(), o);
            }
            return Collections.unmodifiableMap(ret);
        }catch(IOException e){
            throw new IOError(e);
        }

    }


    synchronized public void deleteCollection(String name){
        try{
            long nameDirectory_recid = getRoot(NAME_DIRECTORY_ROOT);
            if(nameDirectory_recid==0)
                throw new IOException("Collection not found");
            HTree<String,Long> dir = fetch(nameDirectory_recid);

            Long recid = dir.get(name);
            if(recid == null) throw new IOException("Collection not found");

            Object o = fetch(recid);
            //we can not use O instance since it is not correctly initialized
            if(o instanceof LinkedList2){
                LinkedList2 l = (LinkedList2) o;
                l.clear();
                delete(l.rootRecid);
            }else if(o instanceof BTree){
                ((BTree) o).clear();
            } else if( o instanceof  HTree){
                HTree t = (HTree) o;
                t.clear();
                HTreeDirectory n = (HTreeDirectory) fetch(t.rootRecid,t.SERIALIZER);
                n.deleteAllChildren();
                delete(t.rootRecid);
            }else{
                throw new InternalError("unknown collection type: "+(o==null?null:o.getClass()));
            }
            delete(recid);
            collections.remove(name);


            dir.remove(name);

        }catch(IOException e){
            throw new IOError(e);
        }

    }


    /** we need to set reference to this DB instance, so serializer needs to be here*/
    final Serializer<Serialization> defaultSerializationSerializer = new Serializer<Serialization>(){

        public void serialize(DataOutput out, Serialization obj) throws IOException {
            LongPacker.packLong(out,obj.serialClassInfoRecid);
            SerialClassInfo.serializer.serialize(out,obj.registered);
        }

        public Serialization deserialize(DataInput in) throws IOException, ClassNotFoundException {
            final long recid = LongPacker.unpackLong(in);
            final ArrayList<SerialClassInfo.ClassInfo> classes = SerialClassInfo.serializer.deserialize(in);
            return new Serialization(DBAbstract.this,recid,classes);
        }
    };

    public synchronized Serializer defaultSerializer() {

        try{
            long serialClassInfoRecid = getRoot(SERIAL_CLASS_INFO_RECID_ROOT);
            if (serialClassInfoRecid == 0) {
                //allocate new recid 
                serialClassInfoRecid = insert(null,Utils.NULL_SERIALIZER,false);
                //and insert new serializer
                Serialization ser = new Serialization(this,serialClassInfoRecid,new ArrayList<SerialClassInfo.ClassInfo>());

                update(serialClassInfoRecid,ser, defaultSerializationSerializer);
                setRoot(SERIAL_CLASS_INFO_RECID_ROOT, serialClassInfoRecid);
                return ser;
            }else{
                return fetch(serialClassInfoRecid,defaultSerializationSerializer);
            }

        } catch (IOException e) {
            throw new IOError(e);
        }

    }


    final protected void checkNotClosed(){
        if(isClosed()) throw new IllegalStateException("db was closed");
    }

    protected abstract void setRoot(byte root, long recid);
    protected abstract long getRoot(byte root);


    synchronized public long collectionSize(Object collection){
        if(collection instanceof BTreeMap){
            BTreeMap t = (BTreeMap) collection;
            if(t.fromKey!=null|| t.toKey!=null) throw new IllegalArgumentException("collectionSize does not work on BTree submap");
            return t.tree._entries;
        }else if(collection instanceof  HTree){
            return ((HTree)collection).getRoot().size;
        }else if(collection instanceof  HTreeSet){
            return collectionSize(((HTreeSet) collection).map);
        }else if(collection instanceof  BTreeSet){
            return collectionSize(((BTreeSet) collection).map);
        }else if(collection instanceof LinkedList2){
            return ((LinkedList2)collection).getRoot().size;
        }else{
            throw new IllegalArgumentException("Not JDBM collection");
        }
    }


    void addShutdownHook(){
        if(shutdownCloseThread!=null){
            shutdownCloseThread = new ShutdownCloseThread();
            Runtime.getRuntime().addShutdownHook(shutdownCloseThread);
        }
    }

    public void close(){
        if(shutdownCloseThread!=null){
            Runtime.getRuntime().removeShutdownHook(shutdownCloseThread);
            shutdownCloseThread.dbToClose = null;
            shutdownCloseThread = null;
        }
    }


    ShutdownCloseThread shutdownCloseThread = null;

    private static class ShutdownCloseThread extends Thread{

        DBAbstract dbToClose = null;

        ShutdownCloseThread(){
            super("JDBM shutdown");
        }

        public void run(){
            if(dbToClose!=null && !dbToClose.isClosed()){
                dbToClose.shutdownCloseThread = null;
                dbToClose.close();
            }
        }

    }

    synchronized public void   rollback() {
        try {
        for(WeakReference<Object> o:collections.values()){
            Object c =  o.get();
            if(c != null && c instanceof BTreeMap){
                //reload tree
                BTreeMap m = (BTreeMap) c;
                m.tree = fetch(m.tree.getRecid());
            }
            if(c != null && c instanceof BTreeSet){
                //reload tree
                BTreeSet m = (BTreeSet) c;
                m.map.tree = fetch(m.map.tree.getRecid());
            }


        }
        } catch (IOException e) {
            throw new IOError(e);
        }

    }
}


================================================
FILE: src/main/java/org/apache/jdbm/DBCache.java
================================================
package org.apache.jdbm;

import javax.crypto.Cipher;
import java.io.IOError;
import java.io.IOException;
import java.util.Comparator;
import java.util.Iterator;

/**
 * Abstract class with common cache functionality
 */
abstract class DBCache extends DBStore{

    static final int NUM_OF_DIRTY_RECORDS_BEFORE_AUTOCOMIT = 1024;

    static final byte NONE = 1;
    static final byte MRU = 2;
    static final byte WEAK = 3;
    static final byte SOFT = 4;
    static final byte HARD = 5;

    static final class DirtyCacheEntry {
        long _recid; //TODO recid is already part of _hashDirties, so this field could be removed to save memory
        Object _obj;
        Serializer _serializer;
    }


    /**
     * Dirty status of _hash CacheEntry Values
     */
    final protected LongHashMap<DirtyCacheEntry> _hashDirties = new LongHashMap<DirtyCacheEntry>();

    private Serializer cachedDefaultSerializer = null;


    /**
     * Construct a CacheRecordManager wrapping another DB and
     * using a given cache policy.
     */
    public DBCache(String filename, boolean readonly, boolean transactionDisabled,
                      Cipher cipherIn, Cipher cipherOut, boolean useRandomAccessFile,
                     boolean deleteFilesAfterClose,boolean lockingDisabled){

        super(filename, readonly, transactionDisabled,
                cipherIn, cipherOut, useRandomAccessFile,
                deleteFilesAfterClose,lockingDisabled);

    }


    @Override
    public synchronized  Serializer defaultSerializer(){
        if(cachedDefaultSerializer==null)
            cachedDefaultSerializer = super.defaultSerializer();
        return cachedDefaultSerializer;
    }

    @Override
    boolean needsAutoCommit() {
        return super.needsAutoCommit()||
                (transactionsDisabled &&  !commitInProgress && _hashDirties.size() > NUM_OF_DIRTY_RECORDS_BEFORE_AUTOCOMIT);
    }



    public synchronized <A> long insert(final A obj, final Serializer<A> serializer, final boolean disableCache)
            throws IOException {
        checkNotClosed();

        if(super.needsAutoCommit())
            commit();

        if(disableCache)
            return super.insert(obj, serializer, disableCache);


        //prealocate recid so we have something to return
        final long recid = super.insert(PREALOCATE_OBJ, null, disableCache);

//        super.update(recid, obj,serializer);

//        return super.insert(obj,serializer,disableCache);

        //and create new dirty record for future update
        final DirtyCacheEntry e = new DirtyCacheEntry();
        e._recid = recid;
        e._obj = obj;
        e._serializer = serializer;
        _hashDirties.put(recid,e);

        return recid;
    }



    public synchronized void commit() {
        try{
            commitInProgress = true;
            updateCacheEntries();
            super.commit();
        }finally {
            commitInProgress = false;
        }
    }

    public synchronized  void rollback(){
        cachedDefaultSerializer = null;
        _hashDirties.clear();
        super.rollback();
    }

    
    private static final Comparator<DirtyCacheEntry> DIRTY_COMPARATOR = new Comparator<DirtyCacheEntry>() {
        final public int compare(DirtyCacheEntry o1, DirtyCacheEntry o2) {
            return (int) (o1._recid - o2._recid);

        }
    };
    

    /**
     * Update all dirty cache objects to the underlying DB.
     */
    protected void updateCacheEntries() {
        try {
            synchronized(_hashDirties){

                while(!_hashDirties.isEmpty()){
                    //make defensive copy of values as _db.update() may trigger changes in db
                    //and this would modify dirties again
                    DirtyCacheEntry[] vals = new DirtyCacheEntry[_hashDirties.size()];
                    Iterator<DirtyCacheEntry> iter = _hashDirties.valuesIterator();

                    for(int i = 0;i<vals.length;i++){
                        vals[i] = iter.next();
                    }
                    iter = null;

                    java.util.Arrays.sort(vals,DIRTY_COMPARATOR);

                    
                    for(int i = 0;i<vals.length;i++){
                        final DirtyCacheEntry entry = vals[i];
                        vals[i] = null;
                        super.update(entry._recid, entry._obj, entry._serializer);
                        _hashDirties.remove(entry._recid);

                    }

                    //update may have triggered more records to be added into dirties, so repeat until all records are written.
                }
            }
        } catch (IOException e) {
            throw new IOError(e);
        }

    }



}


================================================
FILE: src/main/java/org/apache/jdbm/DBCacheMRU.java
================================================
/*******************************************************************************
 * Copyright 2010 Cees De Groot, Alex Boisvert, Jan Kotek
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *   http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 ******************************************************************************/

package org.apache.jdbm;

import javax.crypto.Cipher;
import java.io.IOException;

/**
 * A DB wrapping and caching another DB.
 *
 * @author Jan Kotek
 * @author Alex Boisvert
 * @author Cees de Groot
 *
 * TODO add 'cache miss' statistics
 */
class DBCacheMRU
        extends DBCache {


    private static final boolean debug = false;



    /**
     * Cached object hashtable
     */
    protected LongHashMap<CacheEntry> _hash;



    /**
     * Maximum number of objects in the cache.
     */
    protected int _max;



    /**
     * Beginning of linked-list of cache elements.  First entry is element
     * which has been used least recently.
     */
    protected CacheEntry _first;

    /**
     * End of linked-list of cache elements.  Last entry is element
     * which has been used most recently.
     */
    protected CacheEntry _last;


    /**
     * Construct a CacheRecordManager wrapping another DB and
     * using a given cache policy.
     */
    public DBCacheMRU(String filename, boolean readonly, boolean transactionDisabled,
                      Cipher cipherIn, Cipher cipherOut, boolean useRandomAccessFile,
                     boolean deleteFilesAfterClose, int cacheMaxRecords, boolean lockingDisabled)  {
        super(filename, readonly, transactionDisabled,
            cipherIn, cipherOut, useRandomAccessFile,
            deleteFilesAfterClose,lockingDisabled);

        _hash = new LongHashMap<CacheEntry>(cacheMaxRecords);
        _max = cacheMaxRecords;

    }



    public synchronized <A> A fetch(long recid, Serializer<A> serializer, boolean disableCache) throws IOException {

        if (disableCache)
            return super.fetch(recid, serializer, disableCache);
        else
            return fetch(recid, serializer);
    }


    public synchronized void delete(long recid)
            throws IOException {
        checkNotClosed();

        super.delete(recid);
        synchronized (_hash){
            CacheEntry entry = _hash.get(recid);
            if (entry != null) {
                removeEntry(entry);
                _hash.remove(entry._recid);
            }
            _hashDirties.remove(recid);
        }

        if(super.needsAutoCommit())
            commit();

    }

    public synchronized <A> void update(final long recid, final A obj, final Serializer<A> serializer) throws IOException {
        checkNotClosed();

        synchronized (_hash){

            //remove entry if it already exists
            CacheEntry entry = cacheGet(recid);
            if (entry != null) {
                _hash.remove(recid);
                removeEntry(entry);
            }

            //check if entry is in dirties, in this case just update its object
            DirtyCacheEntry e = _hashDirties.get(recid);
            if(e!=null){
                if(recid!=e._recid) throw new Error();
                e._obj = obj;
                e._serializer = serializer;
                return;
            }
            
            //create new dirty entry
            e = new DirtyCacheEntry();
            e._recid = recid;
            e._obj = obj;
            e._serializer = serializer;
            _hashDirties.put(recid,e);
        }

        if(super.needsAutoCommit())
            commit();

    }


    public synchronized <A> A fetch(long recid, Serializer<A> serializer)
            throws IOException {

        checkNotClosed();

        final CacheEntry entry = cacheGet(recid);
        if (entry != null) {
            return (A) entry._obj;
        }

        //check dirties
        final DirtyCacheEntry entry2 = _hashDirties.get(recid);
        if(entry2!=null){
            return (A) entry2._obj;
        }



        A value = super.fetch(recid, serializer);

        if(super.needsAutoCommit())
            commit();


        //put record into MRU cache
        cachePut(recid, value);

        return value;
    }


    public synchronized void close() {

        if(isClosed())
            return;

        updateCacheEntries();
        super.close();
        _hash = null;
    }



    public synchronized void rollback() {

        // discard all cache entries since we don't know which entries
        // where part of the transaction
        synchronized (_hash){
            _hash.clear();
            _first = null;
            _last = null;
        }

        super.rollback();
    }


    /**
     * Obtain an object in the cache
     */
    protected CacheEntry cacheGet(long key) {
        synchronized (_hash){
            CacheEntry entry = _hash.get(key);
            if ( entry != null && _last != entry) {
                //touch entry
                removeEntry(entry);
                addEntry(entry);
            }
            return entry;
        }
    }


    /**
     * Place an object in the cache.
     *
     * @throws IOException
     */
    protected void cachePut(final long recid, final Object value) throws IOException {
        synchronized (_hash){
            CacheEntry entry = _hash.get(recid);
            if (entry != null) {
                entry._obj = value;
                //touch entry
                if (_last != entry) {
                    removeEntry(entry);
                    addEntry(entry);
                }
            } else {

                if (_hash.size() >= _max) {
                    // purge and recycle entry
                    entry = purgeEntry();
                    entry._recid = recid;
                    entry._obj = value;
                } else {
                    entry = new CacheEntry(recid, value);
                }
                addEntry(entry);
                _hash.put(entry._recid, entry);
            }
        }
    }

    /**
     * Add a CacheEntry.  Entry goes at the end of the list.
     */
    protected void addEntry(CacheEntry entry) {
        synchronized (_hash){
            if (_first == null) {
                _first = entry;
                _last = entry;
            } else {
                _last._next = entry;
                entry._previous = _last;
                _last = entry;
            }
        }
    }


    /**
     * Remove a CacheEntry from linked list
     */
    protected void removeEntry(CacheEntry entry) {
        synchronized (_hash){
            if (entry == _first) {
                _first = entry._next;
            }
            if (_last == entry) {
                _last = entry._previous;
            }
            CacheEntry previous = entry._previous;
            CacheEntry next = entry._next;
            if (previous != null) {
                previous._next = next;
            }
            if (next != null) {
                next._previous = previous;
            }
            entry._previous = null;
            entry._next = null;
        }
    }

    /**
     * Purge least recently used object from the cache
     *
     * @return recyclable CacheEntry
     */
    protected CacheEntry purgeEntry() {
        synchronized (_hash){
            CacheEntry entry = _first;
            if (entry == null)
                return new CacheEntry(-1, null);

            removeEntry(entry);
            _hash.remove(entry._recid);
            entry._obj = null;
            return entry;
        }
    }




    @SuppressWarnings("unchecked")
    static final class CacheEntry {

        protected long _recid;
        protected Object _obj;


        protected CacheEntry _previous;
        protected CacheEntry _next;


        CacheEntry(long recid, Object obj) {
            _recid = recid;
            _obj = obj;
        }

    }


    public void clearCache() {
        if(debug)
            System.err.println("DBCache: Clear cache");

        // discard all cache entries since we don't know which entries
        // where part of the transaction
        synchronized (_hash){
            _hash.clear();
            _first = null;
            _last = null;

            //clear dirties
            updateCacheEntries();

        }
    }


}


================================================
FILE: src/main/java/org/apache/jdbm/DBCacheRef.java
================================================
/*******************************************************************************
 * Copyright 2010 Cees De Groot, Alex Boisvert, Jan Kotek
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *   http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 ******************************************************************************/

package org.apache.jdbm;

import javax.crypto.Cipher;
import java.io.IOException;
import java.lang.ref.ReferenceQueue;
import java.lang.ref.SoftReference;
import java.lang.ref.WeakReference;
import java.util.Iterator;
import java.util.concurrent.atomic.AtomicInteger;

/**
 * A DB wrapping and caching another DB.
 *
 * @author Jan Kotek
 * @author Alex Boisvert
 * @author Cees de Groot
 *
 * TODO add 'cache miss' statistics
 */
public class DBCacheRef
        extends DBCache {



    private static final boolean debug = false;



    /**
     * If Soft Cache is enabled, this contains softly referenced clean entries.
     * If entry became dirty, it is moved to _hash with limited size.
     * This map is accessed from SoftCache Disposer thread, so all access must be
     * synchronized
     */
    protected LongHashMap _softHash;

    /**
     * Reference queue used to collect Soft Cache entries
     */
    protected ReferenceQueue<ReferenceCacheEntry> _refQueue;



    /**
     * Thread in which Soft Cache references are disposed
     */
    protected Thread _softRefThread;

    protected static AtomicInteger threadCounter = new AtomicInteger(0);


    /** counter which counts number of insert since last 'action'*/
    protected int insertCounter = 0;

    private final boolean _autoClearReferenceCacheOnLowMem;
    private final byte _cacheType;


    /**
     * Construct a CacheRecordManager wrapping another DB and
     * using a given cache policy.
     */
    public DBCacheRef(String filename, boolean readonly, boolean transactionDisabled,
                      Cipher cipherIn, Cipher cipherOut, boolean useRandomAccessFile,
                      boolean deleteFilesAfterClose,
                      byte cacheType, boolean cacheAutoClearOnLowMem, boolean lockingDisabled) {

        super(filename, readonly, transactionDisabled,
                cipherIn, cipherOut, useRandomAccessFile,
                deleteFilesAfterClose, lockingDisabled);


        this._cacheType = cacheType;
        _autoClearReferenceCacheOnLowMem = cacheAutoClearOnLowMem;


        _softHash = new LongHashMap<ReferenceCacheEntry>();
        _refQueue = new ReferenceQueue<ReferenceCacheEntry>();
        _softRefThread = new Thread(
                    new SoftRunnable(this, _refQueue),
                    "JDBM Soft Cache Disposer " + (threadCounter.incrementAndGet()));
        _softRefThread.setDaemon(true);
        _softRefThread.start();

    }


    void clearCacheIfLowOnMem() {

        insertCounter = 0;

        if(!_autoClearReferenceCacheOnLowMem)
            return;

        Runtime r = Runtime.getRuntime();
        long max = r.maxMemory();
        if(max == Long.MAX_VALUE)
            return;

        double free = r.freeMemory();
        double total = r.totalMemory();
        //We believe that free refers to total not max.
        //Increasing heap size to max would increase to max
        free = free + (max-total);

        if(debug)
            System.err.println("DBCache: freemem = " +free + " = "+(free/max)+"%");

        if(free<1e7 || free*4 <max)
            clearCache();


    }

    public synchronized <A> A fetch(long recid, Serializer<A> serializer, boolean disableCache) throws IOException {

        if (disableCache)
            return super.fetch(recid, serializer, disableCache);
        else
            return fetch(recid, serializer);
    }


    public synchronized void delete(long recid)
            throws IOException {
        checkNotClosed();

        super.delete(recid);
        synchronized (_hashDirties){
            _hashDirties.remove(recid);
        }
        synchronized (_softHash) {
            Object e = _softHash.remove(recid);
            if (e != null && e instanceof ReferenceCacheEntry) {
                ((ReferenceCacheEntry)e).clear();
            }
        }

        if(needsAutoCommit())
            commit();

    }

    public synchronized <A> void update(final long recid, A obj, Serializer<A> serializer) throws IOException {
        checkNotClosed();


        synchronized (_softHash) {
            //soft cache can not contain dirty objects
            Object e = _softHash.remove(recid);
            if (e != null && e instanceof ReferenceCacheEntry) {
                ((ReferenceCacheEntry)e).clear();
            }
        }
        synchronized (_hashDirties){
            //put into dirty cache
            final DirtyCacheEntry e = new DirtyCacheEntry();
            e._recid = recid;
            e._obj = obj;
            e._serializer = serializer;
            _hashDirties.put(recid,e);
        }

        if(needsAutoCommit())
            commit();

    }


    public synchronized <A> A fetch(long recid, Serializer<A> serializer)
            throws IOException {
        checkNotClosed();

        synchronized (_softHash) {
            Object e = _softHash.get(recid);
            if (e != null) {

                if(e instanceof ReferenceCacheEntry)
                    e = ((ReferenceCacheEntry)e).get();
                if (e != null) {
                    return (A) e;
                }
            }
        }


        synchronized (_hashDirties){
            DirtyCacheEntry e2 = _hashDirties.get(recid);
            if(e2!=null){
                return (A) e2._obj;
            }
        }




        A value = super.fetch(recid, serializer);

        if(needsAutoCommit())
            commit();

           synchronized (_softHash) {

                    if (_cacheType == SOFT)
                        _softHash.put(recid, new SoftCacheEntry(recid, value, _refQueue));
                    else if (_cacheType == WEAK)
                        _softHash.put(recid, new WeakCacheEntry(recid, value, _refQueue));
                    else
                        _softHash.put(recid,value);
           }


        return value;
    }


    public synchronized void close() {
        checkNotClosed();

        updateCacheEntries();
        super.close();
        _softHash = null;
        _softRefThread.interrupt();
    }




    public synchronized void rollback() {
        checkNotClosed();


        // discard all cache entries since we don't know which entries
        // where part of the transaction
        synchronized (_softHash) {
            Iterator<ReferenceCacheEntry> iter = _softHash.valuesIterator();
            while (iter.hasNext()) {
                ReferenceCacheEntry e = iter.next();
                e.clear();
            }
            _softHash.clear();
        }

        super.rollback();
    }




    
	protected boolean isCacheEntryDirty(DirtyCacheEntry entry) {
		return _hashDirties.get(entry._recid) != null;
	}

	protected void setCacheEntryDirty(DirtyCacheEntry entry, boolean dirty) {
		if (dirty) {
			_hashDirties.put(entry._recid, entry);
		} else {
			_hashDirties.remove(entry._recid);
		}
	}



    interface ReferenceCacheEntry {
        long getRecid();

        void clear();

        Object get();
    }

    @SuppressWarnings("unchecked")
    static final class SoftCacheEntry extends SoftReference implements ReferenceCacheEntry {
        protected final long _recid;

        public long getRecid() {
            return _recid;
        }

        SoftCacheEntry(long recid, Object obj, ReferenceQueue queue) {
            super(obj, queue);
            _recid = recid;
        }
    }

    @SuppressWarnings("unchecked")
    static final class WeakCacheEntry extends WeakReference implements ReferenceCacheEntry {
        protected final long _recid;

        public long getRecid() {
            return _recid;
        }

        WeakCacheEntry(long recid, Object obj, ReferenceQueue queue) {
            super(obj, queue);
            _recid = recid;
        }


    }


    /**
     * Runs in separate thread and cleans SoftCache.
     * Runnable auto exists when CacheRecordManager is GCed
     *
     * @author Jan Kotek
     */
    static final class SoftRunnable implements Runnable {

        private ReferenceQueue<ReferenceCacheEntry> entryQueue;
        private WeakReference<DBCacheRef> db2;

        public SoftRunnable(DBCacheRef db,
                            ReferenceQueue<ReferenceCacheEntry> entryQueue) {
            this.db2 = new WeakReference<DBCacheRef>(db);
            this.entryQueue = entryQueue;
        }

        public void run() {
            while (true) try {

                //collect next item from cache,
                //limit 10000 ms is to keep periodically checking if db was GCed
                ReferenceCacheEntry e = (ReferenceCacheEntry) entryQueue.remove(10000);

                //check if  db was GCed, cancel in that case
                DBCacheRef db = db2.get();
                if (db == null)
                    return;

                if (e != null) {

                    synchronized (db._softHash) {
                        int counter = 0;
                        while (e != null) {
                            db._softHash.remove(e.getRecid());
                            e = (SoftCacheEntry) entryQueue.poll();
                            if(debug)
                                counter++;
                        }
                        if(debug)
                            System.err.println("DBCache: "+counter+" objects released from ref cache.");
                    }
                }else{
                    //check memory consumption every 10 seconds
                    db.clearCacheIfLowOnMem();

                }


            } catch (InterruptedException e) {
                return;
            } catch (Throwable e) {
                //this thread must keep spinning,
                //otherwise SoftCacheEntries would not be disposed
                e.printStackTrace();
            }
        }

    }


    public void clearCache() {
        if(debug)
            System.err.println("DBCache: Clear cache");


        synchronized (_softHash) {
            if(_cacheType!=HARD){
                Iterator<ReferenceCacheEntry> iter = _softHash.valuesIterator();
                while (iter.hasNext()) {
                    ReferenceCacheEntry e = iter.next();
                    e.clear();
                }
            }
            _softHash.clear();
        }

    }



}


================================================
FILE: src/main/java/org/apache/jdbm/DBMaker.java
================================================
/*******************************************************************************
 * Copyright 2010 Cees De Groot, Alex Boisvert, Jan Kotek
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *   http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 ******************************************************************************/

package org.apache.jdbm;

import javax.crypto.Cipher;
import javax.crypto.SecretKey;
import javax.crypto.SecretKeyFactory;
import javax.crypto.spec.IvParameterSpec;
import javax.crypto.spec.PBEKeySpec;
import javax.crypto.spec.SecretKeySpec;
import java.io.IOError;
import java.security.spec.KeySpec;

/**
 * Class used to configure and create DB. It uses builder pattern.
 */
public class DBMaker {

    private byte cacheType = DBCacheRef.MRU;
    private int mruCacheSize = 2048;

    private String location = null;

    private boolean disableTransactions = false;
    private boolean lockingDisabled = false;
    private boolean readonly = false;
    private String password = null;
    private boolean useAES256Bit = true;
    private boolean useRandomAccessFile = false;
    private boolean autoClearRefCacheOnLowMem = true;
    private  boolean closeOnJVMExit = false;
    private  boolean deleteFilesAfterCloseFlag = false;


    private DBMaker(){}

    /**
     * Creates new DBMaker and sets file to load data from.
     * @param file to load data from
     * @return new DBMaker
     */
    public static DBMaker openFile(String file){
        DBMaker m = new DBMaker();
        m.location = file;
        return m;
    }

    /**
     * Creates new DBMaker which uses in memory store. Data will be lost after JVM exits.
     * @return new DBMaker
     */
    public static DBMaker openMemory(){
        return new DBMaker();
    }

    /**
     * Open store in zip file
     *
     * @param zip file
     * @return new DBMaker
     */
    public static DBMaker openZip(String zip) {
        DBMaker m = new DBMaker();
        m.location = "$$ZIP$$://"+zip;
        return m;
    }

     static String  isZipFileLocation(String location){
         String match = "$$ZIP$$://";
         if( location.startsWith(match)){
             return location.substring(match.length());
         }
         return null;
     }

    /**
     * Use WeakReference for cache.
     * This cache does not improve performance much,
     * but prevents JDBM from creating multiple instances of the same object.
     *
     * @return this builder
     */
    public DBMaker enableWeakCache() {
        cacheType = DBCacheRef.WEAK;
        return this;
    }

    /**
     * Use SoftReference for cache.
     * This cache greatly improves performance if you have enoguth memory.
     * Instances in cache are Garbage Collected when memory gets low
     *
     * @return this builder
     */
    public DBMaker enableSoftCache() {
        cacheType = DBCacheRef.SOFT;
        return this;
    }

    /**
     * Use hard reference for cache.
     * This greatly improves performance if there is enought memory
     * Hard cache has smaller memory overhead then Soft or Weak, because
     * reference objects and queue does not have to be maintained
     *
     * @return this builder
     */
    public DBMaker enableHardCache() {
        cacheType = DBCacheRef.HARD;
        return this;
    }


    /**
     * Use 'Most Recently Used' cache with limited size.
     * Oldest instances are released from cache when new instances are fetched.
     * This cache is not cleared by GC. Is good for systems with limited memory.
     * <p/>
     * Default size for MRU cache is 2048 records.
     *
     * @return this builder
     */
    public DBMaker enableMRUCache() {
        cacheType = DBCacheRef.MRU;
        return this;
    }

    /**
     *
     * Sets 'Most Recently Used' cache size. This cache is activated by default with size 2048
     *
     * @param cacheSize number of instances which will be kept in cache.
     * @return this builder
     */
    public DBMaker setMRUCacheSize(int cacheSize) {
        if (cacheSize < 0) throw new IllegalArgumentException("Cache size is smaller than zero");
        cacheType = DBCacheRef.MRU;
        mruCacheSize = cacheSize;
        return this;
    }

    /**
     * If reference (soft,weak or hard) cache is enabled,
     * GC may not release references fast enough (or not at all in case of hard cache).
     * So JDBM periodically checks amount of free heap memory.
     * If free memory is less than 25% or 10MB,
     * JDBM completely clears its reference cache to prevent possible memory issues.
     * <p>
     * Calling this method disables auto cache clearing when mem is low.
     * And of course it can cause some out of memory exceptions.
     *
     * @return this builder
     */
    public DBMaker disableCacheAutoClear(){
        this.autoClearRefCacheOnLowMem = false;
        return this;
    }


    /**
     * Enabled storage encryption using AES cipher. JDBM supports both 128 bit and 256 bit encryption if JRE provides it.
     * There are some restrictions on AES 256 bit and not all JREs have it  by default.
     * <p/>
     * Storage can not be read (decrypted), unless the key is provided next time it is opened
     *
     * @param password used to encrypt store
     * @param useAES256Bit if true strong AES 256 bit encryption is used. Otherwise more usual AES 128 bit is used.
     * @return this builder
     */
    public DBMaker enableEncryption(String password, boolean useAES256Bit) {
        this.password = password;
        this.useAES256Bit = useAES256Bit;
        return this;
    }




    /**
     * Make DB readonly.
     * Update/delete/insert operation will throw 'UnsupportedOperationException'
     *
     * @return this builder
     */
    public DBMaker readonly() {
        readonly = true;
        return this;
    }


    /**
     * Disable cache completely
     *
     * @return this builder
     */
    public DBMaker disableCache() {
        cacheType = DBCacheRef.NONE;
        return this;
    }


    /**
     * Option to disable transaction (to increase performance at the cost of potential data loss).
     * Transactions are enabled by default
     * <p/>
     * Switches off transactioning for the record manager. This means
     * that a) a transaction log is not kept, and b) writes aren't
     * synch'ed after every update. Writes are cached in memory and then flushed
     * to disk every N writes. You may also flush writes manually by calling commit().
     * This is useful when batch inserting into a new database.
     * <p/>
     * When using this, database must be properly closed before JVM shutdown.
     * Failing to do so may and WILL corrupt store.
     *
     * @return this builder
     */
    public DBMaker disableTransactions() {
        this.disableTransactions = true;
        return this;
    }
    
    /**
     * Disable file system based locking (for file systems that do not support it).
     * 
     * Locking is not supported by many remote or distributed file systems; such
     * as Lustre and NFS. Attempts to perform locks will result in an 
     * IOException with the message "Function not implemented".
     * 
     * Disabling locking will avoid this issue, though of course it comes with
     * all the issues of uncontrolled file access.
     * 
     * @return this builder
     */
    public DBMaker disableLocking(){
        this.lockingDisabled = true;
        return this;
    }
    
    /**
     * By default JDBM uses mapped memory buffers to read from files.
     * But this may behave strangely on some platforms.
     * Safe alternative is to use old RandomAccessFile rather then mapped ByteBuffer.
     * There is typically slower (pages needs to be copyed into memory on every write).
     *
     * @return this builder
     */
    public DBMaker useRandomAccessFile(){
        this.useRandomAccessFile = true;
        return this;
    }


    /**
     * Registers shutdown hook and close database on JVM exit, if it was not already closed; 
     * 
     * @return this builder
     */
    public DBMaker closeOnExit(){
        this.closeOnJVMExit = true;
        return this;
    }

    /**
     * Delete all storage files after DB is closed
     *
     * @return this builder
     */
    public DBMaker deleteFilesAfterClose(){
        this.deleteFilesAfterCloseFlag = true;
        return this;
    }

    /**
     * Opens database with settings earlier specified in this builder.
     *
     * @return new DB
     * @throws java.io.IOError if db could not be opened
     */
    public DB make() {

        Cipher cipherIn = null;
        Cipher cipherOut = null;
        if (password != null) try {
            //initialize ciphers
            //this code comes from stack owerflow
            //http://stackoverflow.com/questions/992019/java-256bit-aes-encryption/992413#992413
            byte[] salt = new byte[]{3, -34, 123, 53, 78, 121, -12, -1, 45, -12, -48, 89, 11, 100, 99, 8};

            SecretKeyFactory factory = SecretKeyFactory.getInstance("PBKDF2WithHmacSHA1");
            KeySpec spec = new PBEKeySpec(password.toCharArray(), salt, 1024, useAES256Bit?256:128);
            SecretKey tmp = factory.generateSecret(spec);
            SecretKey secret = new SecretKeySpec(tmp.getEncoded(), "AES");

            String transform = "AES/CBC/NoPadding";
            IvParameterSpec params = new IvParameterSpec(salt);

            cipherIn = Cipher.getInstance(transform);
            cipherIn.init(Cipher.ENCRYPT_MODE, secret, params);

            cipherOut = Cipher.getInstance(transform);
            cipherOut.init(Cipher.DECRYPT_MODE, secret, params);

            //sanity check, try with page size
            byte[] data = new byte[Storage.PAGE_SIZE];
            byte[] encData = cipherIn.doFinal(data);
            if (encData.length != Storage.PAGE_SIZE)
                throw new Error("Page size changed after encryption, make sure you use '/NoPadding'");
            byte[] data2 = cipherOut.doFinal(encData);
            for (int i = 0; i < data.length; i++) {
                if (data[i] != data2[i]) throw new Error("Encryption provided by JRE does not work");
            }

        } catch (Exception e) {
            throw new IOError(e);
        }

        DBAbstract db = null;


        if (cacheType == DBCacheRef.MRU){
          db = new DBCacheMRU(location, readonly, disableTransactions, cipherIn, cipherOut,useRandomAccessFile,deleteFilesAfterCloseFlag, mruCacheSize,lockingDisabled);
        }else if( cacheType == DBCacheRef.SOFT || cacheType == DBCacheRef.HARD || cacheType == DBCacheRef.WEAK) {
            db = new DBCacheRef(location, readonly, disableTransactions, cipherIn, cipherOut,useRandomAccessFile,deleteFilesAfterCloseFlag, cacheType,autoClearRefCacheOnLowMem,lockingDisabled);
        } else if (cacheType == DBCacheRef.NONE) {
            db = new DBStore(location, readonly, disableTransactions, cipherIn, cipherOut,useRandomAccessFile,deleteFilesAfterCloseFlag,lockingDisabled);
        } else {
            throw new IllegalArgumentException("Unknown cache type: " + cacheType);
        }
        
        if(closeOnJVMExit){
            db.addShutdownHook();
        }

        return db;
    }

}


================================================
FILE: src/main/java/org/apache/jdbm/DBStore.java
================================================
/*******************************************************************************
 * Copyright 2010 Cees De Groot, Alex Boisvert, Jan Kotek
 *
 * Licensed under the Apache License, Version 2.0 (the "License");
 * you may not use this file except in compliance with the License.
 * You may obtain a copy of the License at
 *
 *   http://www.apache.org/licenses/LICENSE-2.0
 *
 * Unless required by applicable law or agreed to in writing, software
 * distributed under the License is distributed on an "AS IS" BASIS,
 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 * See the License for the specific language governing permissions and
 * limitations under the License.
 ******************************************************************************/

package org.apache.jdbm;


import javax.crypto.Cipher;
import java.io.*;
import java.util.*;
import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream;

/**
 * This class manages records, which are uninterpreted blobs of data. The
 * set of operations is simple and straightforward: you communicate with
 * the class using long "rowids" and byte[] data blocks. Rowids are returned
 * on inserts and you can stash them away someplace safe to be able to get
 * back to them. Data blocks can be as long as you wish, and may have
 * lengths different from the original when updating.
 * <p/>
 * Operations are synchronized, so that only one of them will happen
 * concurrently even if you hammer away from multiple threads. Operations
 * are made atomic by keeping a transaction log which is recovered after
 * a crash, so the operations specified by this interface all have ACID
 * properties.
 * <p/>
 * You identify a file by just the name. The package attaches <tt>.db</tt>
 * for the database file, and <tt>.lg</tt> for the transaction log. The
 * transaction log is synchronized regularly and then restarted, so don't
 * worry if you see the size going up and down.
 *
 * @author Alex Boisvert
 * @author Cees de Groot
 */
class DBStore
        extends DBAbstract {


    /**
     * Version of storage. It should be safe to open lower versions, but engine should throw exception
     * while opening new versions (as it contains unsupported features or serialization)
     */
    static final long STORE_FORMAT_VERSION = 1L;

    /**
     * Underlying file for store records.
     */
    private PageFile _file;

    /**
     * Page manager for physical manager.
     */
    private PageManager _pageman;

    /**
     * Physical row identifier manager.
     */
    private PhysicalRowIdManager _physMgr;

    /**
     * Indicated that store is opened for readonly operations
     * If true, store will throw UnsupportedOperationException when update/insert/delete operation is called
     */
    private final boolean readonly;
    final boolean transactionsDisabled;
    private final boolean deleteFilesAfterClose;

    private static final int AUTOCOMMIT_AFTER_N_PAGES = 1024 * 5;


    boolean commitInProgress = false;


    /**
     * cipher used for decryption, may be null
     */
    private Cipher cipherOut;
    /**
     * cipher used for encryption, may be null
     */
    private Cipher cipherIn;
    private boolean useRandomAccessFile;
    private boolean lockingDisabled;

    void checkCanWrite() {
        if (readonly)
            throw new UnsupportedOperationException("Could not write, store is opened as read-only");
    }




    /**
     * Logigal to Physical row identifier manager.
     */
    private LogicalRowIdManager _logicMgr;


    /**
     * Static debugging flag
     */
    public static final boolean DEBUG = false;


    
    static final long PREALOCATE_PHYS_RECID = Short.MIN_VALUE;
    
    static final Object PREALOCATE_OBJ = new Object();




    private final DataInputOutput buffer = new DataInputOutput();
    private boolean bufferInUse = false;


    private final String _filename;

    public DBStore(String filename, boolean readonly, boolean transactionDisabled, boolean lockingDisabled) throws IOException {
        this(filename, readonly, transactionDisabled, null, null, false,false,false);
    }


    /**
     * Creates a record manager for the indicated file
     *
     * @throws IOException when the file cannot be opened or is not
     *                     a valid file content-wise.
     */
    public DBStore(String filename, boolean readonly, boolean transactionDisabled,
                   Cipher cipherIn, Cipher cipherOut, boolean useRandomAccessFile,
                   boolean deleteFilesAfterClose, boolean lockingDisabled){
        _filename = filename;
        this.readonly = readonly;
        this.transactionsDisabled = transactionDisabled;
        this.cipherIn = cipherIn;
        this.cipherOut = cipherOut;
        this.useRandomAccessFile = useRandomAccessFile;
        this.deleteFilesAfterClose = deleteFilesAfterClose;
        this.lockingDisabled = lockingDisabled;
        reopen();
    }


    private void reopen()  {
        try{
        _file = new PageFile(_filename, readonly, transactionsDisabled, cipherIn, cipherOut,useRandomAccessFile,lockingDisabled);
        _pageman = new PageManager(_file);
        _physMgr = new PhysicalRowIdManager(_file, _pageman);

        _logicMgr = new LogicalRowIdManager(_file, _pageman);

        long versionNumber = getRoot(STORE_VERSION_NUMBER_ROOT);
        if (versionNumber > STORE_FORMAT_VERSION)
            throw new IOException("Unsupported version of store. Please update JDBM. Minimal supported ver:" + STORE_FORMAT_VERSION + ", store ver:" + versionNumber);
        if (!readonly)
            setRoot(STORE_VERSION_NUMBER_ROOT, STORE_FORMAT_VERSION);
        }catch(
Download .txt
gitextract_fufew3er/

├── LICENSE-2.0.html
├── README.md
├── pom.xml
└── src/
    ├── main/
    │   └── java/
    │       └── org/
    │           └── apache/
    │               └── jdbm/
    │                   ├── BTree.java
    │                   ├── BTreeLazyRecord.java
    │                   ├── BTreeMap.java
    │                   ├── BTreeNode.java
    │                   ├── BTreeSet.java
    │                   ├── DB.java
    │                   ├── DBAbstract.java
    │                   ├── DBCache.java
    │                   ├── DBCacheMRU.java
    │                   ├── DBCacheRef.java
    │                   ├── DBMaker.java
    │                   ├── DBStore.java
    │                   ├── DataInputOutput.java
    │                   ├── DataInputOutput2.java
    │                   ├── HTree.java
    │                   ├── HTreeBucket.java
    │                   ├── HTreeDirectory.java
    │                   ├── HTreeSet.java
    │                   ├── LinkedList2.java
    │                   ├── LogicalRowIdManager.java
    │                   ├── LongHashMap.java
    │                   ├── LongPacker.java
    │                   ├── Magic.java
    │                   ├── ObjectInputStream2.java
    │                   ├── ObjectOutputStream2.java
    │                   ├── PageFile.java
    │                   ├── PageIo.java
    │                   ├── PageManager.java
    │                   ├── PageTransactionManager.java
    │                   ├── PhysicalFreeRowIdManager.java
    │                   ├── PhysicalRowIdManager.java
    │                   ├── RecordHeader.java
    │                   ├── RecordListener.java
    │                   ├── SerialClassInfo.java
    │                   ├── Serialization.java
    │                   ├── SerializationHeader.java
    │                   ├── Serializer.java
    │                   ├── Storage.java
    │                   ├── StorageDisk.java
    │                   ├── StorageDiskMapped.java
    │                   ├── StorageMemory.java
    │                   ├── StorageZip.java
    │                   ├── Utils.java
    │                   └── packageXX.html
    └── test/
        └── java/
            └── org/
                └── apache/
                    └── jdbm/
                        ├── BTreeBench.java
                        ├── BTreeKeyCompressionTest.java
                        ├── BTreeLeadingValuePackTest.java
                        ├── BTreeMapNavigable2Test.java
                        ├── BTreeMapNavigableSubMapExclusiveTest.java
                        ├── BTreeMapNavigableSubMapInclusiveTest.java
                        ├── BTreeMapNavigableTest.java
                        ├── BTreeMapTest.java
                        ├── BTreeNodeTest.java
                        ├── BTreeSetTest.java
                        ├── BTreeTest.java
                        ├── ByteArrayComparator.java
                        ├── CompactTest.java
                        ├── ConcurrentBTreeReadTest.java
                        ├── ConcurrentMapInterfaceTest.java
                        ├── DBCacheMRUTest.java
                        ├── DBCacheTest.java
                        ├── DBMakerTest.java
                        ├── DBTest.java
                        ├── DataInputOutputTest.java
                        ├── DefragTest.java
                        ├── FileHeaderTest.java
                        ├── FileLockTest.java
                        ├── HTreeBucketTest.java
                        ├── HTreeDirectoryTest.java
                        ├── HTreeMapTest.java
                        ├── HTreeSetTest.java
                        ├── HTreeTest.java
                        ├── LinkedList2Test.java
                        ├── LogicalRowIdManagerTest.java
                        ├── LongHashMapTest.java
                        ├── LongTreeMap.java
                        ├── MapInterfaceTest.java
                        ├── ObjectOutputStream2Test.java
                        ├── PageFileTest.java
                        ├── PageIoTest.java
                        ├── PageManagerTest.java
                        ├── PageTransactionManagerTest.java
                        ├── PhysicalFreeRowIdManagerTest.java
                        ├── PhysicalRowIdManagerTest.java
                        ├── RecordHeaderTest.java
                        ├── RollbackTest.java
                        ├── SerialClassInfoTest.java
                        ├── Serialization2Bean.java
                        ├── Serialization2Test.java
                        ├── SerializationHeaderTest.java
                        ├── SerializationTest.java
                        ├── Serialized2DerivedBean.java
                        ├── StorageDiskMappedTest.java
                        ├── StorageZipTest.java
                        ├── StreamCorrupted.java
                        ├── TestCaseWithTestFile.java
                        ├── TestInsertPerf.java
                        ├── TestInsertUpdate.java
                        ├── TestIssues.java
                        ├── TestLargeData.java
                        ├── TestLazyRecordsInTree.java
                        ├── TestRollback.java
                        ├── TestStress.java
                        ├── UtilTT.java
                        ├── UtilsTest.java
                        └── junk/
                            ├── HugeData.java
                            ├── MappedBufferGrow.java
                            ├── MappedBufferVersusRaf.java
                            └── RandomInsertLongs.java
Download .txt
SYMBOL INDEX (1459 symbols across 107 files)

FILE: src/main/java/org/apache/jdbm/BTree.java
  class BTree (line 57) | class BTree<K, V> {
    method getKeySerializer (line 117) | public Serializer<K> getKeySerializer() {
    method getValueSerializer (line 124) | public Serializer<V> getValueSerializer() {
    method BTree (line 167) | public BTree() {
    method createInstance (line 177) | @SuppressWarnings("unchecked")
    method createInstance (line 187) | public static <K, V> BTree<K, V> createInstance(DBAbstract db,
    method load (line 217) | @SuppressWarnings("unchecked")
    method getLock (line 235) | public ReadWriteLock getLock() {
    method insert (line 251) | public V insert(final K key, final V value,
    method remove (line 330) | public V remove(K key)
    method get (line 378) | public V get(K key)
    method findGreaterOrEqual (line 405) | public BTreeTuple<K, V> findGreaterOrEqual(K key)
    method browse (line 435) | @SuppressWarnings("unchecked")
    method browse (line 463) | @SuppressWarnings("unchecked")
    method getRecid (line 483) | public long getRecid() {
    method getRoot (line 491) | BTreeNode<K, V> getRoot()
    method readExternal (line 505) | static BTree readExternal(DataInput in, Serialization ser)
    method writeExternal (line 521) | public void writeExternal(DataOutput out)
    method defrag (line 540) | public static void defrag(long recid, DBStore r1, DBStore r2) throws I...
    method getNext (line 568) | public boolean getNext(BTreeTuple tuple) {
    method getPrevious (line 572) | public boolean getPrevious(BTreeTuple tuple) {
    method remove (line 576) | public void remove(Object key) {
    method addRecordListener (line 587) | public void addRecordListener(RecordListener<K, V> listener) {
    method removeRecordListener (line 597) | public void removeRecordListener(RecordListener<K, V> listener) {
    method getRecordManager (line 604) | public DBAbstract getRecordManager() {
    method getComparator (line 609) | public Comparator<K> getComparator() {
    method clear (line 616) | public void clear()
    method dumpChildNodeRecIDs (line 637) | void dumpChildNodeRecIDs(List<Long> out) throws IOException {
    method hasValues (line 645) | public boolean hasValues() {
    type BTreeTupleBrowser (line 655) | static interface BTreeTupleBrowser<K, V> {
      method getNext (line 663) | boolean getNext(BTree.BTreeTuple<K, V> tuple) throws IOException;
      method getPrevious (line 671) | boolean getPrevious(BTree.BTreeTuple<K, V> tuple) throws IOException;
      method remove (line 679) | void remove(K key) throws IOException;
    class BTreeTuple (line 686) | static final class BTreeTuple<K, V> {
      method BTreeTuple (line 692) | BTreeTuple() {
      method BTreeTuple (line 696) | BTreeTuple(K key, V value) {

FILE: src/main/java/org/apache/jdbm/BTreeLazyRecord.java
  class BTreeLazyRecord (line 11) | class BTreeLazyRecord<E> {
    method BTreeLazyRecord (line 18) | BTreeLazyRecord(DBAbstract db, long recid, Serializer<E> serializer) {
    method get (line 25) | E get() {
    method delete (line 35) | void delete() {
    method serialize (line 51) | public void serialize(DataOutput out, Object obj) throws IOException {
    method deserialize (line 56) | public Object deserialize(DataInput in) throws IOException, ClassNotFo...
    method fastDeser (line 62) | static Object fastDeser(DataInputOutput in, Serializer serializer, int...

FILE: src/main/java/org/apache/jdbm/BTreeMap.java
  class BTreeMap (line 32) | class BTreeMap<K, V> extends AbstractMap<K, V> implements ConcurrentNavi...
    method BTreeMap (line 46) | public BTreeMap(BTree<K, V> tree, boolean readonly) {
    method BTreeMap (line 50) | protected BTreeMap(BTree<K, V> tree, boolean readonly, K fromKey, bool...
    method entrySet (line 59) | @Override
    method newEntry (line 68) | protected Entry<K, V> newEntry(K k, V v) {
    method add (line 80) | public boolean add(java.util.Map.Entry<K, V> e) {
    method contains (line 95) | @SuppressWarnings("unchecked")
    method iterator (line 113) | public Iterator<java.util.Map.Entry<K, V>> iterator() {
    method remove (line 175) | @SuppressWarnings("unchecked")
    method size (line 202) | public int size() {
    method clear (line 206) | public void clear(){
    method inBounds (line 220) | public boolean inBounds(K e) {
    method get (line 240) | @SuppressWarnings("unchecked")
    method remove (line 256) | @SuppressWarnings("unchecked")
    method put (line 276) | public V put(K key, V value) {
    method clear (line 291) | public void clear(){
    method containsKey (line 295) | @SuppressWarnings("unchecked")
    method comparator (line 312) | public Comparator<? super K> comparator() {
    method firstKey (line 316) | public K firstKey() {
    method lastKey (line 330) | public K lastKey() {
    method headMap (line 350) | public ConcurrentNavigableMap<K, V> headMap(K toKey2, boolean inclusiv...
    method headMap (line 357) | public ConcurrentNavigableMap<K, V> headMap(K toKey) {
    method lowerEntry (line 362) | public Entry<K, V> lowerEntry(K key) {
    method lowerKey (line 367) | public K lowerKey(K key) {
    method floorEntry (line 384) | public Entry<K, V> floorEntry(K key) {
    method floorKey (line 390) | public K floorKey(K key) {
    method ceilingEntry (line 413) | public Entry<K, V> ceilingEntry(K key) {
    method ceilingKey (line 418) | public K ceilingKey(K key) {
    method higherEntry (line 434) | public Entry<K, V> higherEntry(K key) {
    method higherKey (line 439) | public K higherKey(K key) {
    method firstEntry (line 456) | public Entry<K, V> firstEntry() {
    method lastEntry (line 461) | public Entry<K, V> lastEntry() {
    method pollFirstEntry (line 466) | public Entry<K, V> pollFirstEntry() {
    method pollLastEntry (line 473) | public Entry<K, V> pollLastEntry() {
    method descendingMap (line 480) | public ConcurrentNavigableMap<K, V> descendingMap() {
    method keySet (line 486) | public NavigableSet<K> keySet() {
    method navigableKeySet (line 490) | public NavigableSet<K> navigableKeySet() {
    method descendingKeySet (line 496) | public NavigableSet<K> descendingKeySet() {
    method tailMap (line 502) | public ConcurrentNavigableMap<K, V> tailMap(K fromKey) {
    method tailMap (line 507) | public ConcurrentNavigableMap<K, V> tailMap(K fromKey2, boolean inclus...
    method subMap (line 514) | public ConcurrentNavigableMap<K, V> subMap(K fromKey, boolean fromIncl...
    method subMap (line 522) | public ConcurrentNavigableMap<K, V> subMap(K fromKey, K toKey) {
    method getTree (line 527) | public BTree<K, V> getTree() {
    method addRecordListener (line 532) | public void addRecordListener(RecordListener<K, V> listener) {
    method getRecordManager (line 536) | public DBAbstract getRecordManager() {
    method removeRecordListener (line 540) | public void removeRecordListener(RecordListener<K, V> listener) {
    method size (line 545) | public int size() {
    method putIfAbsent (line 562) | public V putIfAbsent(K key, V value) {
    method remove (line 574) | public boolean remove(Object key, Object value) {
    method replace (line 588) | public boolean replace(K key, V oldValue, V newValue) {
    method replace (line 601) | public V replace(K key, V value) {

FILE: src/main/java/org/apache/jdbm/BTreeNode.java
  class BTreeNode (line 43) | final class BTreeNode<K, V>
    method getBTree (line 105) | public BTree<K, V> getBTree() {
    method BTreeNode (line 112) | public BTreeNode() {
    method BTreeNode (line 120) | @SuppressWarnings("unchecked")
    method BTreeNode (line 144) | @SuppressWarnings("unchecked")
    method BTreeNode (line 168) | @SuppressWarnings("unchecked")
    method getLargestKey (line 196) | K getLargestKey() {
    method isEmpty (line 204) | boolean isEmpty() {
    method isFull (line 216) | boolean isFull() {
    method find (line 229) | BTree.BTreeTupleBrowser<K, V> find(int height, final K key, final bool...
    method findValue (line 254) | V findValue(int height, K key)
    method findFirst (line 287) | BTree.BTreeTupleBrowser<K, V> findFirst()
    method delete (line 300) | void delete()
    method insert (line 347) | InsertResult<K, V> insert(int height, K key, final V value, final bool...
    method remove (line 501) | RemoveResult<K, V> remove(int height, K key)
    method findChildren (line 715) | private byte findChildren(final K key, final boolean inclusive) {
    method insertEntry (line 739) | private static <K, V> void insertEntry(BTreeNode<K, V> node, int index,
    method insertChild (line 758) | private static <K, V> void insertChild(BTreeNode<K, V> node, int index,
    method removeEntry (line 776) | private static <K, V> void removeEntry(BTreeNode<K, V> node, int index) {
    method setEntry (line 793) | private static <K, V> void setEntry(BTreeNode<K, V> node, int index, K...
    method setChild (line 802) | private static <K, V> void setChild(BTreeNode<K, V> node, int index, K...
    method copyEntries (line 811) | private static <K, V> void copyEntries(BTreeNode<K, V> source, int ind...
    method copyChildren (line 821) | private static <K, V> void copyChildren(BTreeNode<K, V> source, int in...
    method loadNode (line 831) | private BTreeNode<K, V> loadNode(long recid)
    method compare (line 840) | private  final int compare(final K value1, final K value2) {
    method dump (line 860) | private void dump(int height) {
    method dumpRecursive (line 882) | void dumpRecursive(int height, int level)
    method deserialize (line 899) | @SuppressWarnings("unchecked")
    method serialize (line 964) | public void serialize(DataOutput oos, BTreeNode<K, V> obj)
    method readValues (line 995) | private void readValues(DataInputOutput ois, BTreeNode<K, V> node) thr...
    method writeValues (line 1020) | private void writeValues(DataOutput oos, BTreeNode<K, V> node) throws ...
    method readKeys (line 1064) | private K[] readKeys(DataInput ois, final int firstUse) throws IOExcep...
    method writeKeys (line 1141) | @SuppressWarnings("unchecked")
    method defrag (line 1292) | public void defrag(DBStore r1, DBStore r2) throws IOException {
    class InsertResult (line 1309) | static final class InsertResult<K, V> {
    class RemoveResult (line 1327) | static final class RemoveResult<K, V> {
    class Browser (line 1345) | static final class Browser<K, V>
      method Browser (line 1369) | Browser(BTreeNode<K, V> node, byte index) {
      method getNext (line 1375) | public boolean getNext(BTree.BTreeTuple<K, V> tuple)
      method getPrevious (line 1403) | public boolean getPrevious(BTree.BTreeTuple<K, V> tuple)
      method remove (line 1434) | public void remove(K key) throws IOException {
    method dumpChildNodeRecIDs (line 1467) | void dumpChildNodeRecIDs(List out, int height)
    method leadingValuePackRead (line 1487) | static byte[] leadingValuePackRead(DataInput in, byte[] previous, int ...
    method leadingValuePackWrite (line 1516) | static void leadingValuePackWrite(DataOutput out, byte[] buf, byte[] p...
    method loadLastChildNode (line 1545) | BTreeNode<K, V> loadLastChildNode() throws IOException {

FILE: src/main/java/org/apache/jdbm/BTreeSet.java
  class BTreeSet (line 27) | class BTreeSet<E> extends AbstractSet<E> implements NavigableSet<E> {
    method BTreeSet (line 35) | BTreeSet(BTreeMap<E, Object> map) {
    method add (line 39) | public boolean add(E object) {
    method addAll (line 44) | public boolean addAll(Collection<? extends E> collection) {
    method clear (line 49) | public void clear() {
    method comparator (line 53) | public Comparator<? super E> comparator() {
    method contains (line 58) | public boolean contains(Object object) {
    method isEmpty (line 63) | public boolean isEmpty() {
    method lower (line 68) | public E lower(E e) {
    method floor (line 72) | public E floor(E e) {
    method ceiling (line 76) | public E ceiling(E e) {
    method higher (line 80) | public E higher(E e) {
    method pollFirst (line 84) | public E pollFirst() {
    method pollLast (line 89) | public E pollLast() {
    method iterator (line 94) | public Iterator<E> iterator() {
    method descendingSet (line 112) | public NavigableSet<E> descendingSet() {
    method descendingIterator (line 116) | public Iterator<E> descendingIterator() {
    method subSet (line 120) | public NavigableSet<E> subSet(E fromElement, boolean fromInclusive, E ...
    method headSet (line 124) | public NavigableSet<E> headSet(E toElement, boolean inclusive) {
    method tailSet (line 128) | public NavigableSet<E> tailSet(E fromElement, boolean inclusive) {
    method remove (line 133) | public boolean remove(Object object) {
    method size (line 137) | public int size() {
    method first (line 142) | public E first() {
    method last (line 147) | public E last() {
    method subSet (line 152) | public SortedSet<E> subSet(E start, E end) {
    method headSet (line 163) | public SortedSet<E> headSet(E end) {
    method tailSet (line 175) | public SortedSet<E> tailSet(E start) {

FILE: src/main/java/org/apache/jdbm/DB.java
  type DB (line 17) | public interface DB {
    method close (line 23) | void close();
    method isClosed (line 26) | boolean isClosed();
    method clearCache (line 32) | void clearCache();
    method defrag (line 48) | void defrag(boolean sortCollections);
    method commit (line 54) | void commit();
    method rollback (line 61) | void rollback();
    method calculateStatistics (line 70) | String calculateStatistics();
    method copyToZip (line 77) | void copyToZip(String zipFile);
    method getHashMap (line 90) | <K, V> ConcurrentMap<K, V> getHashMap(String name);
    method createHashMap (line 98) | <K, V> ConcurrentMap<K, V> createHashMap(String name);
    method createHashMap (line 113) | <K, V> ConcurrentMap<K, V> createHashMap(String name, Serializer<K> ke...
    method createHashSet (line 115) | <K> Set<K> createHashSet(String name);
    method getHashSet (line 117) | <K> Set<K> getHashSet(String name);
    method createHashSet (line 119) | <K> Set<K> createHashSet(String name, Serializer<K> keySerializer);
    method getTreeMap (line 121) | <K, V> ConcurrentNavigableMap<K, V> getTreeMap(String name);
    method createTreeMap (line 131) | <K extends Comparable, V> NavigableMap<K, V> createTreeMap(String name);
    method createTreeMap (line 144) | <K, V> ConcurrentNavigableMap<K, V> createTreeMap(String name,
    method getTreeSet (line 147) | <K> NavigableSet<K> getTreeSet(String name);
    method createTreeSet (line 149) | <K> NavigableSet<K> createTreeSet(String name);
    method createTreeSet (line 151) | <K> NavigableSet<K> createTreeSet(String name, Comparator<K> keyCompar...
    method createLinkedList (line 153) | <K> List<K> createLinkedList(String name);
    method createLinkedList (line 155) | <K> List<K> createLinkedList(String name, Serializer<K> serializer);
    method getLinkedList (line 157) | <K> List<K> getLinkedList(String name);
    method getCollections (line 160) | Map<String,Object> getCollections();
    method deleteCollection (line 163) | void deleteCollection(String name);
    method collectionSize (line 171) | long collectionSize(Object collection);

FILE: src/main/java/org/apache/jdbm/DBAbstract.java
  class DBAbstract (line 31) | abstract class DBAbstract implements DB {
    method insert (line 69) | abstract <A> long insert(A obj, Serializer<A> serializer,boolean disab...
    method delete (line 77) | abstract void delete(long recid) throws IOException;
    method update (line 89) | abstract <A> void update(long recid, A obj, Serializer<A> serializer)
    method fetch (line 101) | abstract <A> A fetch(long recid, Serializer<A> serializer)
    method fetch (line 113) | abstract <A> A fetch(long recid, Serializer<A> serializer, boolean dis...
    method insert (line 117) | public long insert(Object obj) throws IOException {
    method update (line 122) | public void update(long recid, Object obj) throws IOException {
    method fetch (line 127) | synchronized public <A> A fetch(long recid) throws IOException {
    method getHashMap (line 131) | synchronized public <K, V> ConcurrentMap<K, V> getHashMap(String name) {
    method createHashMap (line 152) | synchronized public <K, V> ConcurrentMap<K, V> createHashMap(String na...
    method createHashMap (line 157) | public synchronized <K, V> ConcurrentMap<K, V> createHashMap(String na...
    method getHashSet (line 171) | public synchronized <K> Set<K> getHashSet(String name) {
    method createHashSet (line 193) | public synchronized <K> Set<K> createHashSet(String name) {
    method createHashSet (line 197) | public synchronized <K> Set<K> createHashSet(String name, Serializer<K...
    method getTreeMap (line 213) | synchronized public <K, V> ConcurrentNavigableMap<K, V> getTreeMap(Str...
    method createTreeMap (line 233) | synchronized public <K extends Comparable, V> ConcurrentNavigableMap<K...
    method createTreeMap (line 238) | public synchronized <K, V> ConcurrentNavigableMap<K, V> createTreeMap(...
    method getTreeSet (line 255) | public synchronized <K> NavigableSet<K> getTreeSet(String name) {
    method createTreeSet (line 276) | public synchronized <K> NavigableSet<K> createTreeSet(String name) {
    method createTreeSet (line 281) | public synchronized <K> NavigableSet<K> createTreeSet(String name, Com...
    method createLinkedList (line 297) | synchronized public <K> List<K> createLinkedList(String name) {
    method createLinkedList (line 301) | synchronized public <K> List<K> createLinkedList(String name, Serializ...
    method getLinkedList (line 319) | synchronized public <K> List<K> getLinkedList(String name) {
    method getCollectionInstance (line 336) | private synchronized  Object getCollectionInstance(String name){
    method assertNameNotExist (line 347) | private void assertNameNotExist(String name) throws IOException {
    method getNamedObject (line 359) | synchronized protected long getNamedObject(String name) throws IOExcep...
    method setNamedObject (line 376) | synchronized protected void setNamedObject(String name, long recid) th...
    method getCollections (line 394) | synchronized public Map<String,Object> getCollections(){
    method deleteCollection (line 427) | synchronized public void deleteCollection(String name){
    method serialize (line 470) | public void serialize(DataOutput out, Serialization obj) throws IOExce...
    method deserialize (line 475) | public Serialization deserialize(DataInput in) throws IOException, Cla...
    method defaultSerializer (line 482) | public synchronized Serializer defaultSerializer() {
    method checkNotClosed (line 506) | final protected void checkNotClosed(){
    method setRoot (line 510) | protected abstract void setRoot(byte root, long recid);
    method getRoot (line 511) | protected abstract long getRoot(byte root);
    method collectionSize (line 514) | synchronized public long collectionSize(Object collection){
    method addShutdownHook (line 533) | void addShutdownHook(){
    method close (line 540) | public void close(){
    class ShutdownCloseThread (line 551) | private static class ShutdownCloseThread extends Thread{
      method ShutdownCloseThread (line 555) | ShutdownCloseThread(){
      method run (line 559) | public void run(){
    method rollback (line 568) | synchronized public void   rollback() {

FILE: src/main/java/org/apache/jdbm/DBCache.java
  class DBCache (line 12) | abstract class DBCache extends DBStore{
    class DirtyCacheEntry (line 22) | static final class DirtyCacheEntry {
    method DBCache (line 41) | public DBCache(String filename, boolean readonly, boolean transactionD...
    method defaultSerializer (line 52) | @Override
    method needsAutoCommit (line 59) | @Override
    method insert (line 67) | public synchronized <A> long insert(final A obj, final Serializer<A> s...
    method commit (line 97) | public synchronized void commit() {
    method rollback (line 107) | public synchronized  void rollback(){
    method compare (line 115) | final public int compare(DirtyCacheEntry o1, DirtyCacheEntry o2) {
    method updateCacheEntries (line 125) | protected void updateCacheEntries() {

FILE: src/main/java/org/apache/jdbm/DBCacheMRU.java
  class DBCacheMRU (line 31) | class DBCacheMRU
    method DBCacheMRU (line 70) | public DBCacheMRU(String filename, boolean readonly, boolean transacti...
    method fetch (line 84) | public synchronized <A> A fetch(long recid, Serializer<A> serializer, ...
    method delete (line 93) | public synchronized void delete(long recid)
    method update (line 112) | public synchronized <A> void update(final long recid, final A obj, fin...
    method fetch (line 147) | public synchronized <A> A fetch(long recid, Serializer<A> serializer)
    method close (line 178) | public synchronized void close() {
    method rollback (line 190) | public synchronized void rollback() {
    method cacheGet (line 207) | protected CacheEntry cacheGet(long key) {
    method cachePut (line 225) | protected void cachePut(final long recid, final Object value) throws I...
    method addEntry (line 254) | protected void addEntry(CacheEntry entry) {
    method removeEntry (line 271) | protected void removeEntry(CacheEntry entry) {
    method purgeEntry (line 297) | protected CacheEntry purgeEntry() {
    class CacheEntry (line 313) | @SuppressWarnings("unchecked")
      method CacheEntry (line 324) | CacheEntry(long recid, Object obj) {
    method clearCache (line 332) | public void clearCache() {

FILE: src/main/java/org/apache/jdbm/DBCacheRef.java
  class DBCacheRef (line 36) | public class DBCacheRef
    method DBCacheRef (line 79) | public DBCacheRef(String filename, boolean readonly, boolean transacti...
    method clearCacheIfLowOnMem (line 104) | void clearCacheIfLowOnMem() {
    method fetch (line 131) | public synchronized <A> A fetch(long recid, Serializer<A> serializer, ...
    method delete (line 140) | public synchronized void delete(long recid)
    method update (line 160) | public synchronized <A> void update(final long recid, A obj, Serialize...
    method fetch (line 186) | public synchronized <A> A fetch(long recid, Serializer<A> serializer)
    method close (line 233) | public synchronized void close() {
    method rollback (line 245) | public synchronized void rollback() {
    method isCacheEntryDirty (line 267) | protected boolean isCacheEntryDirty(DirtyCacheEntry entry) {
    method setCacheEntryDirty (line 271) | protected void setCacheEntryDirty(DirtyCacheEntry entry, boolean dirty) {
    type ReferenceCacheEntry (line 281) | interface ReferenceCacheEntry {
      method getRecid (line 282) | long getRecid();
      method clear (line 284) | void clear();
      method get (line 286) | Object get();
    class SoftCacheEntry (line 289) | @SuppressWarnings("unchecked")
      method getRecid (line 293) | public long getRecid() {
      method SoftCacheEntry (line 297) | SoftCacheEntry(long recid, Object obj, ReferenceQueue queue) {
    class WeakCacheEntry (line 303) | @SuppressWarnings("unchecked")
      method getRecid (line 307) | public long getRecid() {
      method WeakCacheEntry (line 311) | WeakCacheEntry(long recid, Object obj, ReferenceQueue queue) {
    class SoftRunnable (line 326) | static final class SoftRunnable implements Runnable {
      method SoftRunnable (line 331) | public SoftRunnable(DBCacheRef db,
      method run (line 337) | public void run() {
    method clearCache (line 381) | public void clearCache() {

FILE: src/main/java/org/apache/jdbm/DBMaker.java
  class DBMaker (line 31) | public class DBMaker {
    method DBMaker (line 49) | private DBMaker(){}
    method openFile (line 56) | public static DBMaker openFile(String file){
    method openMemory (line 66) | public static DBMaker openMemory(){
    method openZip (line 76) | public static DBMaker openZip(String zip) {
    method isZipFileLocation (line 82) | static String  isZipFileLocation(String location){
    method enableWeakCache (line 97) | public DBMaker enableWeakCache() {
    method enableSoftCache (line 109) | public DBMaker enableSoftCache() {
    method enableHardCache (line 122) | public DBMaker enableHardCache() {
    method enableMRUCache (line 137) | public DBMaker enableMRUCache() {
    method setMRUCacheSize (line 149) | public DBMaker setMRUCacheSize(int cacheSize) {
    method disableCacheAutoClear (line 168) | public DBMaker disableCacheAutoClear(){
    method enableEncryption (line 184) | public DBMaker enableEncryption(String password, boolean useAES256Bit) {
    method readonly (line 199) | public DBMaker readonly() {
    method disableCache (line 210) | public DBMaker disableCache() {
    method disableTransactions (line 231) | public DBMaker disableTransactions() {
    method disableLocking (line 248) | public DBMaker disableLocking(){
    method useRandomAccessFile (line 261) | public DBMaker useRandomAccessFile(){
    method closeOnExit (line 272) | public DBMaker closeOnExit(){
    method deleteFilesAfterClose (line 282) | public DBMaker deleteFilesAfterClose(){
    method make (line 293) | public DB make() {

FILE: src/main/java/org/apache/jdbm/DBStore.java
  class DBStore (line 48) | class DBStore
    method checkCanWrite (line 98) | void checkCanWrite() {
    method DBStore (line 132) | public DBStore(String filename, boolean readonly, boolean transactionD...
    method DBStore (line 143) | public DBStore(String filename, boolean readonly, boolean transactionD...
    method reopen (line 158) | private void reopen()  {
    method close (line 182) | public synchronized void close() {
    method isClosed (line 201) | public boolean isClosed() {
    method insert (line 206) | public synchronized <A> long insert(final A obj, final Serializer<A> s...
    method needsAutoCommit (line 233) | boolean needsAutoCommit() {
    method insert2 (line 239) | private <A> long insert2(A obj, Serializer<A> serializer, DataInputOut...
    method delete (line 267) | public synchronized void delete(long logRowId)
    method update (line 295) | public synchronized <A> void update(long recid, A obj, Serializer<A> s...
    method update2 (line 324) | private <A> void update2(long logRecid, final A obj, final Serializer<...
    method fetch (line 351) | public synchronized <A> A fetch(final long recid, final Serializer<A> ...
    method fetch (line 372) | public synchronized <A> A fetch(long recid, Serializer<A> serializer, ...
    method fetch2 (line 378) | private <A> A fetch2(long recid, final Serializer<A> serializer, final...
    method fetchRaw (line 406) | byte[] fetchRaw(long recid) throws IOException {
    method getRoot (line 419) | public synchronized long getRoot(final byte id){
    method setRoot (line 426) | public synchronized void setRoot(final byte id, final long rowid){
    method commit (line 436) | public synchronized void commit() {
    method rollback (line 457) | public synchronized void rollback() {
    method copyToZip (line 474) | public void copyToZip(String zipFile) {
    method clearCache (line 556) | public synchronized void clearCache() {
    method statisticsCountPages (line 561) | private long statisticsCountPages(short pageType) throws IOException {
    method calculateStatistics (line 576) | public synchronized String calculateStatistics() {
    method defrag (line 668) | public synchronized void defrag(boolean sortCollections) {
    method forceInsert (line 849) | void forceInsert(long logicalRowId, byte[] data) throws IOException {
    method countRecords (line 866) | long countRecords() throws IOException {
    method compressRecid (line 906) | static long compressRecid(final long recid) {
    method decompressRecid (line 919) | static long decompressRecid(final long recid) {

FILE: src/main/java/org/apache/jdbm/DataInputOutput.java
  class DataInputOutput (line 13) | class DataInputOutput implements DataInput, DataOutput, ObjectInput, Obj...
    method DataInputOutput (line 20) | public DataInputOutput() {
    method DataInputOutput (line 24) | public DataInputOutput(byte[] data) {
    method getBuf (line 29) | public byte[] getBuf() {
    method getPos (line 33) | public int getPos() {
    method reset (line 38) | public void reset() {
    method resetForReading (line 44) | public void resetForReading() {
    method reset (line 49) | public void reset(byte[] b) {
    method toByteArray (line 55) | public byte[] toByteArray() {
    method available (line 61) | public int available() {
    method readFully (line 66) | public void readFully(byte[] b) throws IOException {
    method readFully (line 70) | public void readFully(byte[] b, int off, int len) throws IOException {
    method skipBytes (line 75) | public int skipBytes(int n) throws IOException {
    method readBoolean (line 80) | public boolean readBoolean() throws IOException {
    method readByte (line 84) | public byte readByte() throws IOException {
    method readUnsignedByte (line 88) | public int readUnsignedByte() throws IOException {
    method readShort (line 92) | public short readShort() throws IOException {
    method readUnsignedShort (line 99) | public int readUnsignedShort() throws IOException {
    method readChar (line 104) | public char readChar() throws IOException {
    method readInt (line 108) | public int readInt() throws IOException {
    method readLong (line 117) | public long readLong() throws IOException {
    method readFloat (line 130) | public float readFloat() throws IOException {
    method readDouble (line 134) | public double readDouble() throws IOException {
    method readLine (line 138) | public String readLine() throws IOException {
    method readUTF (line 142) | public String readUTF() throws IOException {
    method ensureAvail (line 149) | private void ensureAvail(int n) {
    method write (line 158) | public void write(int b) throws IOException {
    method write (line 163) | public void write(byte[] b) throws IOException {
    method write (line 167) | public void write(byte[] b, int off, int len) throws IOException {
    method writeBoolean (line 173) | public void writeBoolean(boolean v) throws IOException {
    method writeByte (line 178) | public void writeByte(int v) throws IOException {
    method writeShort (line 183) | public void writeShort(int v) throws IOException {
    method writeChar (line 190) | public void writeChar(int v) throws IOException {
    method writeInt (line 194) | public void writeInt(int v) throws IOException {
    method writeLong (line 203) | public void writeLong(long v) throws IOException {
    method writeFloat (line 215) | public void writeFloat(float v) throws IOException {
    method writeDouble (line 220) | public void writeDouble(double v) throws IOException {
    method writeBytes (line 225) | public void writeBytes(String s) throws IOException {
    method writeChars (line 229) | public void writeChars(String s) throws IOException {
    method writeUTF (line 233) | public void writeUTF(String s) throws IOException {
    method writeFromByteBuffer (line 238) | public void writeFromByteBuffer(ByteBuffer b, int offset, int length) {
    method readObject (line 251) | public Object readObject() throws ClassNotFoundException, IOException {
    method read (line 258) | public int read() throws IOException {
    method read (line 263) | public int read(byte[] b) throws IOException {
    method read (line 269) | public int read(byte[] b, int off, int len) throws IOException {
    method skip (line 275) | public long skip(long n) throws IOException {
    method close (line 281) | public void close() throws IOException {
    method writeObject (line 286) | public void writeObject(Object obj) throws IOException {
    method flush (line 292) | public void flush() throws IOException {

FILE: src/main/java/org/apache/jdbm/HTree.java
  class HTree (line 33) | class HTree<K, V> extends AbstractMap<K, V> implements ConcurrentMap<K, ...
    method deserialize (line 37) | public Object deserialize(DataInput ds2) throws IOException {
    method serialize (line 64) | public void serialize(DataOutput out, Object obj) throws IOException {
    method getKeySerializer (line 110) | public Serializer<K> getKeySerializer() {
    method getValueSerializer (line 114) | public Serializer<V> getValueSerializer() {
    method HTree (line 128) | public HTree(DBAbstract db, Serializer<K> keySerializer, Serializer<V>...
    method HTree (line 144) | public HTree(DBAbstract db,long rootRecid, Serializer<K> keySerializer...
    method setPersistenceContext (line 153) | void setPersistenceContext(DBAbstract db) {
    method put (line 158) | public V put(K key, V value) {
    method get (line 194) | public V get(Object key) {
    method remove (line 210) | public V remove(Object key) {
    method containsKey (line 244) | public boolean containsKey(Object key) {
    method clear (line 252) | public void clear() {
    method keys (line 272) | public Iterator<K> keys()
    method getRecordManager (line 283) | public DBAbstract getRecordManager() {
    method addRecordListener (line 292) | public void addRecordListener(RecordListener<K, V> listener) {
    method removeRecordListener (line 302) | public void removeRecordListener(RecordListener<K, V> listener) {
    method entrySet (line 309) | public Set<Entry<K, V>> entrySet() {
    method newEntry (line 315) | protected Entry<K, V> newEntry(K k, V v) {
    method add (line 328) | public boolean add(java.util.Map.Entry<K, V> e) {
    method contains (line 344) | @SuppressWarnings("unchecked")
    method iterator (line 357) | public Iterator<java.util.Map.Entry<K, V>> iterator() {
    method remove (line 384) | @SuppressWarnings("unchecked")
    method size (line 411) | @Override
    method getRoot (line 433) | HTreeDirectory<K, V> getRoot() {
    method deserialize (line 444) | public static HTree deserialize(DataInput is, Serialization ser) throw...
    method serialize (line 453) | void serialize(DataOutput out) throws IOException {
    method defrag (line 461) | static void defrag(Long recid, DBStore r1, DBStore r2) throws IOExcept...
    method size (line 483) | public int size(){
    method hasValues (line 487) | public boolean hasValues() {
    method putIfAbsent (line 491) | public V putIfAbsent(K key, V value) {
    method remove (line 503) | public boolean remove(Object key, Object value) {
    method replace (line 517) | public boolean replace(K key, V oldValue, V newValue) {
    method replace (line 530) | public V replace(K key, V value) {

FILE: src/main/java/org/apache/jdbm/HTreeBucket.java
  class HTreeBucket (line 43) | final class HTreeBucket<K, V> {
    method HTreeBucket (line 72) | public HTreeBucket(HTree<K, V> tree) {
    method HTreeBucket (line 81) | public HTreeBucket(HTree<K, V> tree, byte level) {
    method getElementCount (line 96) | public int getElementCount() {
    method isLeaf (line 104) | public boolean isLeaf() {
    method hasRoom (line 112) | public boolean hasRoom() {
    method addElement (line 129) | public V addElement(K key, V value) {
    method removeElement (line 165) | public V removeElement(K key) {
    method getValue (line 206) | public V getValue(K key) {
    method getKeys (line 236) | ArrayList<K> getKeys() {
    method getValues (line 252) | ArrayList<V> getValues() {
    method writeExternal (line 262) | public void writeExternal(DataOutput out)
    method readExternal (line 316) | public void readExternal(DataInputOutput in) throws IOException, Class...

FILE: src/main/java/org/apache/jdbm/HTreeDirectory.java
  class HTreeDirectory (line 27) | final class HTreeDirectory<K, V> {
    method HTreeDirectory (line 82) | public HTreeDirectory(HTree<K, V> tree) {
    method HTreeDirectory (line 91) | HTreeDirectory(HTree<K, V> tree, byte depth) {
    method setPersistenceContext (line 104) | void setPersistenceContext(long recid) {
    method getRecid (line 112) | long getRecid() {
    method isEmpty (line 121) | boolean isEmpty() {
    method get (line 141) | V get(K key)
    method getRecid (line 165) | private long getRecid(int hash) {
    method putRecid (line 170) | private void putRecid(int hash, long recid) {
    method put (line 189) | Object put(final Object key, final Object value)
    method remove (line 267) | Object remove(Object key) throws IOException {
    method hashCode (line 314) | private int hashCode(Object key) {
    method hashMask (line 334) | int hashMask() {
    method keys (line 347) | Iterator<K> keys()
    method values (line 355) | Iterator<V> values()
    method writeExternal (line 361) | public void writeExternal(DataOutput out)
    method readExternal (line 395) | public void readExternal(DataInputOutput in)
    method defrag (line 415) | public void defrag(DBStore r1, DBStore r2) throws IOException, ClassNo...
    method deleteAllChildren (line 430) | void deleteAllChildren() throws IOException {
    class HDIterator (line 451) | class HDIterator<A> implements Iterator<A> {
      method HDIterator (line 494) | HDIterator(boolean iterateKeys)
      method next2 (line 512) | public A next2() {
      method prepareNext (line 537) | private void prepareNext() throws IOException {
      method hasNext (line 589) | public boolean hasNext() {
      method next (line 594) | public A next() {
      method remove (line 604) | public void remove() {

FILE: src/main/java/org/apache/jdbm/HTreeSet.java
  class HTreeSet (line 10) | class HTreeSet<E> extends AbstractSet<E> {
    method HTreeSet (line 14) | HTreeSet(HTree map) {
    method iterator (line 18) | public Iterator<E> iterator() {
    method size (line 22) | public int size() {
    method isEmpty (line 27) | public boolean isEmpty() {
    method contains (line 31) | public boolean contains(Object o) {
    method add (line 35) | public boolean add(E e) {
    method remove (line 39) | public boolean remove(Object o) {
    method clear (line 43) | public void clear() {

FILE: src/main/java/org/apache/jdbm/LinkedList2.java
  class LinkedList2 (line 30) | class LinkedList2<E> extends AbstractSequentialList<E> {
    class Root (line 42) | static final class Root{
    method serialize (line 50) | public void serialize(DataOutput out, Root obj) throws IOException {
    method deserialize (line 56) | public Root deserialize(DataInput in) throws IOException, ClassNotFoun...
    method LinkedList2 (line 73) | LinkedList2(DBAbstract db,long rootRecid, Serializer<E> valueSerialize...
    method LinkedList2 (line 80) | LinkedList2(DBAbstract db, Serializer<E> valueSerializer) throws IOExc...
    method setPersistenceContext (line 89) | void setPersistenceContext(DBAbstract db) {
    method listIterator (line 94) | public ListIterator<E> listIterator(int index) {
    method getRoot (line 118) | Root getRoot(){
    method size (line 129) | public int size() {
    method descendingIterator (line 140) | public Iterator<E> descendingIterator() {
    method add (line 144) | public boolean add(Object value) {
    method fetch (line 175) | private Entry<E> fetch(long recid) {
    method deserialize (line 189) | static LinkedList2 deserialize(DataInput is, Serialization ser) throws...
    method serialize (line 197) | void serialize(DataOutput out) throws IOException {
    method serialize (line 205) | public void serialize(DataOutput out, Entry e) throws IOException {
    method deserialize (line 214) | public Entry<E> deserialize(DataInput in) throws IOException, ClassNot...
    class Entry (line 224) | static class Entry<E> {
      method Entry (line 230) | public Entry(long prev, long next, E value) {
    class Iter (line 237) | private final class Iter implements ListIterator<E> {
      method hasNext (line 247) | public boolean hasNext() {
      method next (line 252) | public E next() {
      method hasPrevious (line 265) | public boolean hasPrevious() {
      method previous (line 269) | public E previous() {
      method nextIndex (line 279) | public int nextIndex() {
      method previousIndex (line 283) | public int previousIndex() {
      method remove (line 287) | public void remove() {
      method set (line 365) | public void set(E value) {
      method add (line 391) | public void add(E value) {
      method checkForComodification (line 438) | final void checkForComodification() {
    method defrag (line 447) | static void defrag(long recid, DBStore r1, DBStore r2) throws IOExcept...

FILE: src/main/java/org/apache/jdbm/LogicalRowIdManager.java
  class LogicalRowIdManager (line 25) | final class LogicalRowIdManager {
    method LogicalRowIdManager (line 45) | LogicalRowIdManager(PageFile file, PageManager pageman) throws IOExcep...
    method insert (line 56) | long insert(final long physloc) throws IOException {
    method forceInsert (line 87) | void forceInsert(final long logicalRowId, final long physLoc) throws I...
    method delete (line 98) | void delete(final long logicalrowid) throws IOException {
    method update (line 113) | void update(final long logicalrowid, final long physloc) throws IOExce...
    method fetch (line 127) | long fetch(long logicalrowid) throws IOException {
    method commit (line 143) | void commit() throws IOException {
    method clearFreeRecidsInTransaction (line 175) | private void clearFreeRecidsInTransaction() {
    method rollback (line 181) | void rollback() throws IOException {
    method getFreeSlot (line 190) | long getFreeSlot() throws IOException {
    method putFreeSlot (line 228) | void putFreeSlot(long rowid) throws IOException {

FILE: src/main/java/org/apache/jdbm/LongHashMap.java
  class LongHashMap (line 34) | class LongHashMap<V> implements Serializable {
    class Entry (line 49) | static final class Entry<V> implements  Serializable{
      method Entry (line 58) | Entry(long theKey) {
    class HashMapIterator (line 67) | static class HashMapIterator<V> implements Iterator<V> {
      method HashMapIterator (line 79) | HashMapIterator(LongHashMap<V> hm) {
      method hasNext (line 83) | public boolean hasNext() {
      method next (line 106) | public V next() {
      method remove (line 128) | public void remove() {
    method newElementArray (line 157) | @SuppressWarnings("unchecked")
    method LongHashMap (line 167) | public LongHashMap() {
    method LongHashMap (line 178) | public LongHashMap(int capacity) {
    method clear (line 201) | public void clear() {
    method computeMaxSize (line 221) | private void computeMaxSize() {
    method get (line 235) | public V get(final long key) {
    method isEmpty (line 262) | public boolean isEmpty() {
    method put (line 291) | public V put(final long key, final V value) {
    method createHashedEntry (line 317) | Entry<V> createHashedEntry(final long key, final int index) {
    method rehash (line 333) | void rehash(final int capacity) {
    method rehash (line 351) | void rehash() {
    method remove (line 364) | public V remove(final long key) {
    method removeEntry (line 376) | Entry<V> removeEntry(final long key) {
    method size (line 410) | public int size() {
    method valuesIterator (line 417) | public Iterator<V> valuesIterator() {
    method powerHash (line 422) | static final private int powerHash(final long key){

FILE: src/main/java/org/apache/jdbm/LongPacker.java
  class LongPacker (line 25) | public final class LongPacker {
    method packLong (line 36) | static public void packLong(DataOutput os, long value) throws IOExcept...
    method unpackLong (line 57) | static public long unpackLong(DataInput is) throws IOException {
    method packInt (line 80) | static public void packInt(DataOutput os, int value) throws IOException {
    method unpackInt (line 94) | static public int unpackInt(DataInput is) throws IOException {

FILE: src/main/java/org/apache/jdbm/Magic.java
  type Magic (line 22) | interface Magic {

FILE: src/main/java/org/apache/jdbm/ObjectInputStream2.java
  class ObjectInputStream2 (line 12) | public class ObjectInputStream2 extends DataInputStream implements Objec...
    method ObjectInputStream2 (line 15) | public ObjectInputStream2(InputStream in) {
    method readObject (line 19) | public Object readObject() throws ClassNotFoundException, IOException {

FILE: src/main/java/org/apache/jdbm/ObjectOutputStream2.java
  class ObjectOutputStream2 (line 9) | public class ObjectOutputStream2 extends DataOutputStream implements Obj...
    method ObjectOutputStream2 (line 11) | public ObjectOutputStream2(OutputStream out) {
    method writeObject (line 15) | public void writeObject(Object obj) throws IOException {

FILE: src/main/java/org/apache/jdbm/PageFile.java
  class PageFile (line 37) | final class PageFile {
    method PageFile (line 84) | PageFile(String fileName, boolean readonly, boolean transactionsDisabl...
    method PageFile (line 108) | public PageFile(String filename) throws IOException {
    method get (line 122) | PageIo get(long pageId) throws IOException {
    method release (line 181) | void release(final long pageId, final boolean isDirty) throws IOExcept...
    method release (line 199) | void release(final PageIo page) throws IOException {
    method discard (line 215) | void discard(PageIo page) {
    method commit (line 224) | void commit() throws IOException {
    method rollback (line 277) | void rollback() throws IOException {
    method close (line 300) | void close() throws IOException {
    method forceClose (line 335) | void forceClose() throws IOException {
    method showList (line 345) | private void showList(Iterator<PageIo> i) {
    method synch (line 357) | void synch(PageIo node) throws IOException {
    method releaseFromTransaction (line 371) | void releaseFromTransaction(PageIo node)
    method sync (line 379) | void sync() throws IOException {
    method getDirtyPageCount (line 383) | public int getDirtyPageCount() {
    method deleteAllFiles (line 387) | public void deleteAllFiles() throws IOException {

FILE: src/main/java/org/apache/jdbm/PageIo.java
  class PageIo (line 37) | final class PageIo {
    method PageIo (line 51) | public PageIo() {
    method PageIo (line 59) | PageIo(long pageId, byte[] data) {
    method PageIo (line 64) | public PageIo(long pageId, ByteBuffer data) {
    method ensureHeapBuffer (line 72) | void ensureHeapBuffer(){
    method getData (line 85) | ByteBuffer getData() {
    method getPageId (line 92) | long getPageId() {
    method setDirty (line 99) | void setDirty() {
    method setClean (line 113) | void setClean() {
    method isDirty (line 120) | boolean isDirty() {
    method isInTransaction (line 128) | boolean isInTransaction() {
    method incrementTransactionCount (line 137) | void incrementTransactionCount() {
    method decrementTransactionCount (line 145) | void decrementTransactionCount() {
    method readByte (line 156) | public byte readByte(int pos) {
    method writeByte (line 163) | public void writeByte(int pos, byte value) {
    method readShort (line 171) | public short readShort(int pos) {
    method writeShort (line 178) | public void writeShort(int pos, short value) {
    method readInt (line 186) | public int readInt(int pos) {
    method writeInt (line 193) | public void writeInt(int pos, int value) {
    method readLong (line 201) | public long readLong(int pos) {
    method writeLong (line 208) | public void writeLong(int pos, long value) {
    method readSixByteLong (line 217) | public long readSixByteLong(int pos) {
    method writeSixByteLong (line 235) | public void writeSixByteLong(int pos, long value) {
    method toString (line 258) | public String toString() {
    method readExternal (line 264) | public void readExternal(DataInputStream in, Cipher cipherOut) throws ...
    method writeExternal (line 278) | public void writeExternal(DataOutput out, Cipher cipherIn) throws IOEx...
    method getByteArray (line 284) | public byte[] getByteArray() {
    method writeByteArray (line 293) | public void writeByteArray(byte[] buf, int srcOffset, int offset, int ...
    method fileHeaderCheckHead (line 300) | public void fileHeaderCheckHead(boolean isNew){
    method fileHeaderGetFirstOf (line 313) | long fileHeaderGetFirstOf(int list) {
    method fileHeaderSetFirstOf (line 320) | void fileHeaderSetFirstOf(int list, long value) {
    method fileHeaderGetLastOf (line 327) | long fileHeaderGetLastOf(int list) {
    method fileHeaderSetLastOf (line 334) | void fileHeaderSetLastOf(int list, long value) {
    method fileHeaderOffsetOfFirst (line 342) | private short fileHeaderOffsetOfFirst(int list) {
    method fileHeaderOffsetOfLast (line 349) | private short fileHeaderOffsetOfLast(int list) {
    method fileHeaderGetRoot (line 364) | long fileHeaderGetRoot(final int root) {
    method fileHeaderSetRoot (line 373) | void fileHeaderSetRoot(final int root, final long rowid) {
    method pageHeaderMagicOk (line 382) | boolean pageHeaderMagicOk() {
    method pageHeaderParanoiaMagicOk (line 390) | protected void pageHeaderParanoiaMagicOk() {
    method pageHeaderGetMagic (line 395) | short pageHeaderGetMagic() {
    method pageHeaderGetNext (line 399) | long pageHeaderGetNext() {
    method pageHeaderSetNext (line 404) | void pageHeaderSetNext(long next) {
    method pageHeaderGetPrev (line 409) | long pageHeaderGetPrev() {
    method pageHeaderSetPrev (line 415) | void pageHeaderSetPrev(long prev) {
    method pageHeaderSetType (line 419) | void pageHeaderSetType(short type) {
    method pageHeaderGetLocation (line 423) | long pageHeaderGetLocation(final short pos){
    method pageHeaderSetLocation (line 428) | void pageHeaderSetLocation(short pos, long value) {
    method dataPageGetFirst (line 434) | short dataPageGetFirst() {
    method dataPageSetFirst (line 438) | void dataPageSetFirst(short value) {

FILE: src/main/java/org/apache/jdbm/PageManager.java
  class PageManager (line 26) | final class PageManager {
    method PageManager (line 35) | PageManager(PageFile file) throws IOException {
    method allocate (line 49) | long allocate(short type) throws IOException {
    method free (line 118) | void free(short type, long recid) throws IOException {
    method getNext (line 162) | long getNext(long page) throws IOException {
    method getPrev (line 173) | long getPrev(long page) throws IOException {
    method getFirst (line 184) | long getFirst(short type) throws IOException {
    method getLast (line 191) | long getLast(short type) throws IOException {
    method commit (line 201) | void commit() throws IOException {
    method rollback (line 216) | void rollback() throws IOException {
    method close (line 230) | void close() throws IOException {
    method getHeaderBufData (line 240) | ByteBuffer getHeaderBufData() {
    method getFileHeader (line 244) | public PageIo getFileHeader() {

FILE: src/main/java/org/apache/jdbm/PageTransactionManager.java
  class PageTransactionManager (line 38) | final class PageTransactionManager {
    method PageTransactionManager (line 67) | PageTransactionManager(PageFile owner, Storage storage, Cipher cipherI...
    method synchronizeLog (line 84) | public void synchronizeLog()
    method synchronizeLogFromMemory (line 95) | private void synchronizeLogFromMemory() throws IOException {
    method open (line 131) | private void open() throws IOException {
    method recover (line 142) | private void recover() throws IOException {
    method synchronizePages (line 174) | private void synchronizePages(Iterable<PageIo> pages, boolean fromCore)
    method setClean (line 192) | private void setClean(ArrayList<PageIo> pages)
    method discardPages (line 202) | private void discardPages(ArrayList<PageIo> pages)
    method start (line 218) | void start() throws IOException {
    method add (line 230) | void add(PageIo page) throws IOException {
    method commit (line 238) | void commit() throws IOException {
    method sync (line 258) | private void sync() throws IOException {
    method shutdown (line 266) | void shutdown() throws IOException {
    method close (line 274) | private void close() throws IOException {
    method forceClose (line 284) | void forceClose() throws IOException {
    method synchronizeLogFromDisk (line 294) | void synchronizeLogFromDisk() throws IOException {
    method compare (line 316) | public int compare(PageIo page1, PageIo page2) {

FILE: src/main/java/org/apache/jdbm/PhysicalFreeRowIdManager.java
  class PhysicalFreeRowIdManager (line 26) | final class PhysicalFreeRowIdManager {
    method PhysicalFreeRowIdManager (line 63) | PhysicalFreeRowIdManager(PageFile file, PageManager pageman) throws IO...
    method getFreeRecord (line 68) | long getFreeRecord(final int size) throws IOException {
    method sizeToRootOffset (line 112) | static final  int sizeToRootOffset(int size) {
    method putFreeRecord (line 120) | void putFreeRecord(final long rowid, final int size) throws IOException {
    method commit (line 130) | public void commit() throws IOException {
    method rollback (line 191) | public void rollback() {
    method clearFreeInTrans (line 195) | private void clearFreeInTrans() {
    method getRootPage (line 202) | final PageIo getRootPage() throws IOException {

FILE: src/main/java/org/apache/jdbm/PhysicalRowIdManager.java
  class PhysicalRowIdManager (line 26) | final class PhysicalRowIdManager {
    method PhysicalRowIdManager (line 40) | PhysicalRowIdManager(PageFile file, PageManager pageManager) throws IO...
    method insert (line 51) | long insert(final byte[] data, final int start, final int length) thro...
    method update (line 65) | long update(long rowid, final byte[] data, final int start, final int ...
    method fetch (line 89) | void fetch(final DataInputOutput out, final long rowid) throws IOExcep...
    method alloc (line 134) | private long alloc(int size) throws IOException {
    method allocNew (line 147) | private long allocNew(int size, long start) throws IOException {
    method free (line 262) | void free(final long id) throws IOException {
    method write (line 307) | private void write(final long rowid, final  byte[] data,final  int sta...
    method rollback (line 344) | void rollback() throws IOException {
    method commit (line 351) | void commit() throws IOException {

FILE: src/main/java/org/apache/jdbm/RecordHeader.java
  class RecordHeader (line 31) | final class RecordHeader {
    method getCurrentSize (line 47) | static int getCurrentSize(final PageIo page, final short pos) {
    method setCurrentSize (line 57) | static void setCurrentSize(final PageIo page, final short pos, int val...
    method getAvailableSize (line 71) | static int getAvailableSize(final PageIo page, final short pos) {
    method setAvailableSize (line 78) | static void setAvailableSize(final PageIo page, final short pos, int v...
    method convertAvailSize (line 88) | static short convertAvailSize(final int recordSize) {
    method deconvertAvailSize (line 103) | static int deconvertAvailSize(final short converted) {
    method roundAvailableSize (line 116) | static int roundAvailableSize(int value) {

FILE: src/main/java/org/apache/jdbm/RecordListener.java
  type RecordListener (line 30) | interface RecordListener<K, V> {
    method recordInserted (line 32) | void recordInserted(K key, V value) throws IOException;
    method recordUpdated (line 34) | void recordUpdated(K key, V oldValue, V newValue) throws IOException;
    method recordRemoved (line 36) | void recordRemoved(K key, V value) throws IOException;

FILE: src/main/java/org/apache/jdbm/SerialClassInfo.java
  class SerialClassInfo (line 17) | abstract class SerialClassInfo {
    method serialize (line 21) | public void serialize(DataOutput out, ArrayList<ClassInfo> obj) throws...
    method deserialize (line 38) | public ArrayList<ClassInfo> deserialize(DataInput in) throws IOExcepti...
    method SerialClassInfo (line 62) | public SerialClassInfo(DBAbstract db, long serialClassInfoRecid, Array...
    class ClassInfo (line 72) | static class ClassInfo {
      method ClassInfo (line 84) | ClassInfo(final String name, final FieldInfo[] fields, final boolean...
      method getName (line 96) | public String getName() {
      method getFields (line 100) | public FieldInfo[] getFields() {
      method getField (line 104) | public FieldInfo getField(String name) {
      method getFieldId (line 108) | public int getFieldId(String name) {
      method getField (line 115) | public FieldInfo getField(int serialId) {
      method addFieldInfo (line 119) | public int addFieldInfo(FieldInfo field) {
      method getObjectStreamFields (line 126) | public ObjectStreamField[] getObjectStreamFields() {
      method setObjectStreamFields (line 130) | public void setObjectStreamFields(ObjectStreamField[] objectStreamFi...
    class FieldInfo (line 141) | static class FieldInfo {
      method FieldInfo (line 153) | public FieldInfo(String name, boolean primitive, String type, Class ...
      method initSetter (line 167) | private void initSetter() {
      method initGetter (line 203) | private void initGetter() {
      method FieldInfo (line 239) | public FieldInfo(ObjectStreamField sf, Class clazz) {
      method getName (line 243) | public String getName() {
      method isPrimitive (line 247) | public boolean isPrimitive() {
      method getType (line 251) | public String getType() {
      method firstCharCap (line 255) | private String firstCharCap(String s) {
    method registerClass (line 268) | public void registerClass(Class clazz) throws IOException {
    method getFields (line 293) | private ObjectStreamField[] getFields(Class clazz) {
    method assertClassSerializable (line 322) | private void assertClassSerializable(Class clazz) throws NotSerializab...
    method getFieldValue (line 330) | public Object getFieldValue(String fieldName, Object object) {
    method getFieldValue (line 340) | public Object getFieldValue(FieldInfo fieldInfo, Object object) {
    method setFieldValue (line 358) | public void setFieldValue(String fieldName, Object object, Object valu...
    method setFieldValue (line 368) | public void setFieldValue(FieldInfo fieldInfo, Object object, Object v...
    method containsClass (line 387) | public boolean containsClass(Class clazz) {
    method getClassId (line 391) | public int getClassId(Class clazz) {
    method writeObject (line 399) | public void writeObject(DataOutput out, Object obj, FastArrayList obje...
    method readObject (line 447) | public Object readObject(DataInput in, FastArrayList objectStack) thro...
    method createInstance (line 507) | private static <T> T createInstance(Class<T> clazz, Class<? super T> p...
    method deserialize (line 527) | protected abstract Object deserialize(DataInput in, FastArrayList obje...
    method serialize (line 529) | protected abstract void serialize(DataOutput out, Object fieldValue, F...

FILE: src/main/java/org/apache/jdbm/Serialization.java
  class Serialization (line 40) | @SuppressWarnings("unchecked")
    method Serialization (line 52) | Serialization(DBAbstract db, long serialClassInfoRecid, ArrayList<Clas...
    method Serialization (line 56) | public Serialization() {
    method serialize (line 65) | public byte[] serialize(Object obj)
    method isSerializable (line 74) | boolean isSerializable(Object obj) {
    method serialize (line 85) | public void serialize(final DataOutput out, final Object obj) throws I...
    method serialize (line 90) | public void serialize(final DataOutput out, final Object obj, FastArra...
    method serializeString (line 404) | static void serializeString(DataOutput out, String obj) throws IOExcep...
    method serializeUUID (line 414) | private void serializeUUID(DataOutput out, UUID uuid) throws IOException
    method serializeMap (line 420) | private void serializeMap(int header, DataOutput out, Object obj, Fast...
    method serializeCollection (line 430) | private void serializeCollection(int header, DataOutput out, Object ob...
    method serializeByteArrayInt (line 440) | private void serializeByteArrayInt(DataOutput out, byte[] b) throws IO...
    method writeLongArray (line 445) | private void writeLongArray(DataOutput da, long[] obj) throws IOExcept...
    method writeIntArray (line 483) | private void writeIntArray(DataOutput da, int[] obj) throws IOException {
    method writeInteger (line 525) | private void writeInteger(DataOutput da, final int val) throws IOExcep...
    method writeLong (line 560) | private void writeLong(DataOutput da, final long val) throws IOExcepti...
    method deserialize (line 602) | public Object deserialize(byte[] buf) throws ClassNotFoundException, I...
    method deserializeString (line 612) | static String deserializeString(DataInput buf) throws IOException {
    method deserialize (line 622) | public Object deserialize(DataInput is) throws IOException, ClassNotFo...
    method deserialize (line 626) | public Object deserialize(DataInput is, FastArrayList objectStack) thr...
    method deserializeClass (line 975) | private Class deserializeClass(DataInput is) throws IOException, Class...
    method deserializeArrayByteInt (line 982) | private byte[] deserializeArrayByteInt(DataInput is) throws IOException {
    method deserializeArrayLongL (line 990) | private long[] deserializeArrayLongL(DataInput is) throws IOException {
    method deserializeArrayLongI (line 999) | private long[] deserializeArrayLongI(DataInput is) throws IOException {
    method deserializeArrayLongS (line 1008) | private long[] deserializeArrayLongS(DataInput is) throws IOException {
    method deserializeArrayLongB (line 1017) | private long[] deserializeArrayLongB(DataInput is) throws IOException {
    method deserializeArrayIntIInt (line 1029) | private int[] deserializeArrayIntIInt(DataInput is) throws IOException {
    method deserializeArrayIntSInt (line 1038) | private int[] deserializeArrayIntSInt(DataInput is) throws IOException {
    method deserializeArrayIntBInt (line 1047) | private int[] deserializeArrayIntBInt(DataInput is) throws IOException {
    method deserializeArrayIntPack (line 1059) | private int[] deserializeArrayIntPack(DataInput is) throws IOException {
    method deserializeArrayLongPack (line 1071) | private long[] deserializeArrayLongPack(DataInput is) throws IOExcepti...
    method deserializeUUID (line 1083) | private UUID deserializeUUID(DataInput is) throws IOException
    method deserializeArrayIntB255 (line 1088) | private int[] deserializeArrayIntB255(DataInput is) throws IOException {
    method deserializeArrayObject (line 1103) | private Object[] deserializeArrayObject(DataInput is, FastArrayList ob...
    method deserializeArrayObjectPackedLong (line 1117) | private Object[] deserializeArrayObjectPackedLong(DataInput is) throws...
    method deserializeArrayList (line 1131) | private ArrayList<Object> deserializeArrayList(DataInput is, FastArray...
    method deserializeArrayListPackedLong (line 1141) | private ArrayList<Object> deserializeArrayListPackedLong(DataInput is)...
    method deserializeLinkedList (line 1158) | private java.util.LinkedList deserializeLinkedList(DataInput is, FastA...
    method deserializeVector (line 1168) | private Vector<Object> deserializeVector(DataInput is, FastArrayList o...
    method deserializeHashSet (line 1178) | private HashSet<Object> deserializeHashSet(DataInput is, FastArrayList...
    method deserializeLinkedHashSet (line 1188) | private LinkedHashSet<Object> deserializeLinkedHashSet(DataInput is, F...
    method deserializeTreeSet (line 1198) | private TreeSet<Object> deserializeTreeSet(DataInput is, FastArrayList...
    method deserializeTreeMap (line 1212) | private TreeMap<Object, Object> deserializeTreeMap(DataInput is, FastA...
    method deserializeHashMap (line 1226) | private HashMap<Object, Object> deserializeHashMap(DataInput is, FastA...
    method deserializeIdentityHashMap (line 1236) | private IdentityHashMap<Object, Object> deserializeIdentityHashMap(Dat...
    method deserializeLinkedHashMap (line 1246) | private LinkedHashMap<Object, Object> deserializeLinkedHashMap(DataInp...
    method deserializeHashtable (line 1257) | private Hashtable<Object, Object> deserializeHashtable(DataInput is, F...
    method deserializeProperties (line 1268) | private Properties deserializeProperties(DataInput is, FastArrayList o...
    class FastArrayList (line 1282) | static class FastArrayList<K> {
      method get (line 1287) | K get(int index) {
      method add (line 1292) | void add(K o) {
      method size (line 1302) | int size() {
      method identityIndexOf (line 1319) | int identityIndexOf(Object obj) {

FILE: src/main/java/org/apache/jdbm/SerializationHeader.java
  class SerializationHeader (line 7) | final class SerializationHeader {

FILE: src/main/java/org/apache/jdbm/Serializer.java
  type Serializer (line 28) | public interface Serializer<A> {
    method serialize (line 36) | public void serialize(DataOutput out, A obj)
    method deserialize (line 48) | public A deserialize(DataInput in)

FILE: src/main/java/org/apache/jdbm/Storage.java
  type Storage (line 9) | interface Storage {
    method write (line 37) | void write(long pageNumber, ByteBuffer data) throws IOException;
    method read (line 39) | ByteBuffer read(long pageNumber) throws IOException;
    method forceClose (line 41) | void forceClose() throws IOException;
    method isReadonly (line 43) | boolean isReadonly();
    method readTransactionLog (line 45) | DataInputStream readTransactionLog();
    method deleteTransactionLog (line 47) | void deleteTransactionLog();
    method sync (line 49) | void sync() throws IOException;
    method openTransactionLog (line 51) | DataOutputStream openTransactionLog() throws IOException;
    method deleteAllFiles (line 53) | void deleteAllFiles() throws IOException;

FILE: src/main/java/org/apache/jdbm/StorageDisk.java
  class StorageDisk (line 13) | class StorageDisk implements Storage {
    method StorageDisk (line 26) | public StorageDisk(String fileName,boolean readonly, boolean lockingDi...
    method getRaf (line 43) | RandomAccessFile getRaf(long pageNumber) throws IOException {
    method write (line 65) | public void write(long pageNumber, ByteBuffer data) throws IOException {
    method read (line 80) | public ByteBuffer read(long pageNumber) throws IOException {
    method openTransactionLog (line 107) | public DataOutputStream openTransactionLog() throws IOException {
    method deleteAllFiles (line 122) | public void deleteAllFiles() {
    method sync (line 131) | public void sync() throws IOException {
    method forceClose (line 140) | public void forceClose() throws IOException {
    method readTransactionLog (line 154) | public DataInputStream readTransactionLog() {
    method deleteTransactionLog (line 183) | public void deleteTransactionLog() {
    method isReadonly (line 189) | public boolean isReadonly() {

FILE: src/main/java/org/apache/jdbm/StorageDiskMapped.java
  class StorageDiskMapped (line 17) | class StorageDiskMapped implements Storage {
    method StorageDiskMapped (line 43) | public StorageDiskMapped(String fileName, boolean readonly, boolean tr...
    method getChannel (line 61) | private FileChannel getChannel(long pageNumber) throws IOException {
    method makeFileName (line 81) | static String makeFileName(String fileName, long pageNumber, int fileN...
    method write (line 86) | public void write(long pageNumber, ByteBuffer data) throws IOException {
    method unmapBuffer (line 123) | private void unmapBuffer(MappedByteBuffer b) {
    method read (line 131) | public ByteBuffer read(long pageNumber) throws IOException {
    method forceClose (line 157) | public void forceClose() throws IOException {
    method sync (line 174) | public void sync() throws IOException {
    method openTransactionLog (line 181) | public DataOutputStream openTransactionLog() throws IOException {
    method deleteAllFiles (line 196) | public void deleteAllFiles() throws IOException {
    method deleteFiles (line 201) | static void deleteFiles(String fileName) {
    method readTransactionLog (line 219) | public DataInputStream readTransactionLog() {
    method deleteTransactionLog (line 248) | public void deleteTransactionLog() {
    method isReadonly (line 254) | public boolean isReadonly() {

FILE: src/main/java/org/apache/jdbm/StorageMemory.java
  class StorageMemory (line 10) | class StorageMemory implements Storage {
    method StorageMemory (line 15) | StorageMemory(boolean transactionsDisabled){
    method read (line 20) | public ByteBuffer read(long pageNumber) throws IOException {
    method write (line 37) | public void write(long pageNumber, ByteBuffer data) throws IOException {
    method sync (line 56) | public void sync() throws IOException {
    method forceClose (line 60) | public void forceClose() throws IOException {
    method readTransactionLog (line 66) | public DataInputStream readTransactionLog() {
    method deleteTransactionLog (line 80) | public void deleteTransactionLog() {
    method openTransactionLog (line 84) | public DataOutputStream openTransactionLog() throws IOException {
    method deleteAllFiles (line 90) | public void deleteAllFiles() throws IOException {
    method isReadonly (line 93) | public boolean isReadonly() {

FILE: src/main/java/org/apache/jdbm/StorageZip.java
  class StorageZip (line 14) | class StorageZip implements Storage {
    method StorageZip (line 21) | StorageZip(String zipFile) throws IOException {
    method write (line 27) | public void write(long pageNumber, ByteBuffer data) throws IOException {
    method read (line 31) | public ByteBuffer read(long pageNumber) throws IOException {
    method forceClose (line 44) | public void forceClose() throws IOException {
    method readTransactionLog (line 49) | public DataInputStream readTransactionLog() {
    method deleteTransactionLog (line 53) | public void deleteTransactionLog() {
    method sync (line 57) | public void sync() throws IOException {
    method openTransactionLog (line 61) | public DataOutputStream openTransactionLog() throws IOException {
    method deleteAllFiles (line 65) | public void deleteAllFiles() throws IOException {
    method isReadonly (line 68) | public boolean isReadonly() {

FILE: src/main/java/org/apache/jdbm/Utils.java
  class Utils (line 14) | class Utils {
    method encrypt (line 23) | public static byte[] encrypt(Cipher cipherIn, ByteBuffer b) {
    method encrypt (line 32) | public static byte[] encrypt(Cipher cipherIn, byte[] b) {
    method compare (line 49) | public int compare(Comparable o1, Comparable o2) {
    method formatSpaceUsage (line 55) | static String formatSpaceUsage(long size) {
    method allZeros (line 67) | static boolean allZeros(byte[] b) {
    method max (line 75) | static <E> E max(E e1, E e2, Comparator comp){
    method min (line 84) | static <E> E min(E e1, E e2, Comparator comp){
    method serialize (line 96) | public void serialize(DataOutput out, Object obj) throws IOException {
    method deserialize (line 100) | public Object deserialize(DataInput in) throws IOException, ClassNotFo...

FILE: src/test/java/org/apache/jdbm/BTreeBench.java
  class BTreeBench (line 29) | public class BTreeBench extends TestCaseWithTestFile {
    method test_001 (line 39) | public void test_001() throws IOException {
    method doTest (line 47) | public static void doTest(DB db, BTree<Long, Long> tree, int ITERATIONS)
    method random (line 79) | static long random(int min, int max) {
    method compare (line 83) | static void compare(BTree<Long, Long> tree, Hashtable<Long, Long> hash...

FILE: src/test/java/org/apache/jdbm/BTreeKeyCompressionTest.java
  class BTreeKeyCompressionTest (line 8) | public class BTreeKeyCompressionTest extends TestCaseWithTestFile {
    method testExpand (line 13) | public void testExpand() throws IOException {
    method testCornersLimitsLong (line 29) | public void testCornersLimitsLong() throws IOException {
    method testCornersLimitsInt (line 67) | public void testCornersLimitsInt() throws IOException {
    method testStrings (line 104) | public void testStrings() throws IOException {

FILE: src/test/java/org/apache/jdbm/BTreeLeadingValuePackTest.java
  class BTreeLeadingValuePackTest (line 13) | public class BTreeLeadingValuePackTest extends TestCase {
    class ByteArraySource (line 15) | public static class ByteArraySource {
      method ByteArraySource (line 19) | public ByteArraySource(long seed) {
      method getBytesWithCommonPrefix (line 24) | public byte[] getBytesWithCommonPrefix(int len, int common) {
    method doCompressUncompressTestFor (line 40) | private void doCompressUncompressTestFor(byte[][] groups) throws IOExc...
    method getIncrementingGroups (line 62) | private byte[][] getIncrementingGroups(int groupCount, long seed, int ...
    method testCompDecompEqualLenEqualCommon (line 73) | public void testCompDecompEqualLenEqualCommon() throws IOException {
    method testCompDecompEqualLenIncrCommon (line 86) | public void testCompDecompEqualLenIncrCommon() throws IOException {
    method testCompDecompEqualLenDecrCommon (line 99) | public void testCompDecompEqualLenDecrCommon() throws IOException {
    method testCompDecompIncrLenEqualCommon (line 112) | public void testCompDecompIncrLenEqualCommon() throws IOException {
    method testCompDecompDecrLenEqualCommon (line 125) | public void testCompDecompDecrLenEqualCommon() throws IOException {
    method testCompDecompNoCommon (line 138) | public void testCompDecompNoCommon() throws IOException {
    method testCompDecompNullGroups (line 151) | public void testCompDecompNullGroups() throws IOException {

FILE: src/test/java/org/apache/jdbm/BTreeMapNavigable2Test.java
  class BTreeMapNavigable2Test (line 5) | public  class BTreeMapNavigable2Test extends TestCaseWithTestFile
    method setUp (line 12) | public void setUp() throws Exception
    method testSize (line 28) | public void testSize()
    method testContainsKey (line 40) | public void testContainsKey()
    method testContainsValue (line 58) | public void testContainsValue()
    method testPut (line 75) | public void testPut()
    method testLowerEntry (line 85) | public void testLowerEntry()
    method testLowerKey (line 92) | public void testLowerKey()
    method testFloorEntry (line 99) | public void testFloorEntry()
    method testFloorKey (line 109) | public void testFloorKey()
    method testCeilingEntry (line 119) | public void testCeilingEntry()
    method testCeilingKey (line 129) | public void testCeilingKey()
    method testHigherEntry (line 139) | public void testHigherEntry()
    method testHigherKey (line 149) | public void testHigherKey()
    method testFirstEntry (line 159) | public void testFirstEntry()
    method testLastEntry (line 167) | public void testLastEntry()
    method testPollFirstEntry (line 175) | public void testPollFirstEntry()
    method testPollLastEntry (line 188) | public void testPollLastEntry()
    method testNavigableKeySet (line 219) | public void testNavigableKeySet()
    method testSubMap (line 251) | public void testSubMap()
    method testSubMap2 (line 276) | public void testSubMap2()
    method testSubMap3 (line 301) | public void testSubMap3()
    method testSubMap4 (line 326) | public void testSubMap4()
    method testHeadMap (line 351) | public void testHeadMap()
    method testHeadMap2 (line 369) | public void testHeadMap2()
    method testHeadMap3 (line 387) | public void testHeadMap3()
    method testHeadMap4 (line 405) | public void testHeadMap4()
    method testTailMap (line 423) | public void testTailMap()
    method testTailMap2 (line 432) | public void testTailMap2()
    method testTailMap3 (line 441) | public void testTailMap3()
    method testTailMap4 (line 450) | public void testTailMap4()
    method testIsEmpty (line 459) | public void testIsEmpty()
    method testClearSubmap (line 467) | public void testClearSubmap()
    method testConcurrentModification (line 479) | public void testConcurrentModification()

FILE: src/test/java/org/apache/jdbm/BTreeMapNavigableSubMapExclusiveTest.java
  class BTreeMapNavigableSubMapExclusiveTest (line 4) | public class BTreeMapNavigableSubMapExclusiveTest extends BTreeMapNaviga...
    method setUp (line 6) | public void setUp() throws Exception {
    method testPut (line 16) | public void testPut(){

FILE: src/test/java/org/apache/jdbm/BTreeMapNavigableSubMapInclusiveTest.java
  class BTreeMapNavigableSubMapInclusiveTest (line 4) | public class BTreeMapNavigableSubMapInclusiveTest extends BTreeMapNaviga...
    method setUp (line 6) | public void setUp() throws Exception {
    method testPut (line 14) | public void testPut(){

FILE: src/test/java/org/apache/jdbm/BTreeMapNavigableTest.java
  class BTreeMapNavigableTest (line 59) | public class BTreeMapNavigableTest extends TestCaseWithTestFile {
    method setUp (line 80) | public void setUp() throws IOException {
    method testLowerEntry (line 85) | public void testLowerEntry() {
    method testLowerKey (line 94) | public void testLowerKey() {
    method testFloorEntry (line 101) | public void testFloorEntry() {
    method testFloorKey (line 113) | public void testFloorKey() {
    method testCeilingEntry (line 121) | public void testCeilingEntry() {
    method testCeilingKey (line 133) | public void testCeilingKey() {
    method testHigherEntry (line 141) | public void testHigherEntry() {
    method testHigherKey (line 151) | public void testHigherKey() {
    method testFirstEntry (line 158) | public void testFirstEntry() {
    method testLastEntry (line 167) | public void testLastEntry() {
    method testPollFirstEntry (line 176) | public void testPollFirstEntry() {
    method testPollLastEntry (line 189) | public void testPollLastEntry() {
    method testNavigableKeySet (line 244) | public void testNavigableKeySet() {
    method testSubMap (line 286) | public void testSubMap() {
    method testHeadMap (line 305) | public void testHeadMap() {
    method testTailMap (line 324) | public void testTailMap() {

FILE: src/test/java/org/apache/jdbm/BTreeMapTest.java
  class BTreeMapTest (line 52) | public class BTreeMapTest
    method BTreeMapTest (line 55) | public BTreeMapTest() {
    method setUp (line 61) | public void setUp() throws Exception {
    method getKeyNotInPopulatedMap (line 65) | @Override
    method getValueNotInPopulatedMap (line 70) | @Override
    method getSecondValueNotInPopulatedMap (line 75) | @Override
    method makeEmptyMap (line 80) | @Override
    method makePopulatedMap (line 90) | @Override
    method makeEitherMap (line 100) | @Override
    method testOrdering (line 109) | @SuppressWarnings("unchecked") // Needed for null comparator
    method testFirstKeyEmpty (line 132) | public void testFirstKeyEmpty() {
    method testFirstKeyNonEmpty (line 145) | public void testFirstKeyNonEmpty() {
    method testLastKeyEmpty (line 157) | public void testLastKeyEmpty() {
    method testLastKeyNonEmpty (line 168) | public void testLastKeyNonEmpty() {
    method toList (line 183) | private static <E> List<E> toList(Collection<E> collection) {
    method subListSnapshot (line 187) | private static <E> List<E> subListSnapshot(
    method testHeadMap (line 196) | public void testHeadMap() {
    method testTailMap (line 227) | public void testTailMap() {
    method testSubMap (line 257) | public void testSubMap() {
    method testSubMapIllegal (line 309) | public void testSubMapIllegal() {

FILE: src/test/java/org/apache/jdbm/BTreeNodeTest.java
  class BTreeNodeTest (line 27) | public class BTreeNodeTest extends TestCaseWithTestFile {
    method testBasics (line 33) | public void testBasics() throws IOException {

FILE: src/test/java/org/apache/jdbm/BTreeSetTest.java
  class BTreeSetTest (line 28) | public class BTreeSetTest extends TestCaseWithTestFile {
    class ReversedIntegerComparator (line 32) | public static class ReversedIntegerComparator implements Comparator, S...
      method compare (line 33) | public int compare(Object o1, Object o2) {
      method equals (line 37) | public boolean equals(Object o1, Object o2) {
    method test_Constructor (line 51) | public void test_Constructor() {
    method test_ConstructorLjava_util_Comparator (line 60) | public void test_ConstructorLjava_util_Comparator() {
    method test_ConstructorLjava_util_SortedSet (line 77) | public void test_ConstructorLjava_util_SortedSet() {
    method test_addLjava_lang_Object (line 99) | public void test_addLjava_lang_Object() {
    method test_addAllLjava_util_Collection (line 111) | public void test_addAllLjava_util_Collection() {
    method test_clear (line 126) | public void test_clear() {
    method test_comparator (line 136) | public void test_comparator() {
    method test_containsLjava_lang_Object (line 147) | public void test_containsLjava_lang_Object() {
    method test_first (line 158) | public void test_first() {
    method test_headSetLjava_lang_Object (line 168) | public void test_headSetLjava_lang_Object() {
    method test_isEmpty (line 180) | public void test_isEmpty() {
    method test_iterator (line 189) | public void test_iterator() {
    method test_last (line 204) | public void test_last() {
    method test_removeLjava_lang_Object (line 213) | public void test_removeLjava_lang_Object() {
    method test_size (line 224) | public void test_size() {
    method test_subSetLjava_lang_ObjectLjava_lang_Object (line 232) | public void test_subSetLjava_lang_ObjectLjava_lang_Object() {
    method test_tailSetLjava_lang_Object (line 257) | public void test_tailSetLjava_lang_Object() {
    method test_equals (line 271) | public void test_equals() throws Exception {
    method setUp (line 297) | public void setUp() throws Exception {
    method tearDown (line 311) | public void tearDown() throws Exception {

FILE: src/test/java/org/apache/jdbm/BTreeTest.java
  class BTreeTest (line 35) | public class BTreeTest
    method run (line 60) | public void run(TestResult result) {
    method handleThreadException (line 78) | protected void handleThreadException(final Throwable t) {
    method testBasics (line 91) | public void testBasics() throws IOException {
    method testBasics2 (line 143) | public void testBasics2() throws IOException {
    method testClose (line 181) | public void testClose()
    method testInsert (line 262) | public void testInsert()
    method testRemove (line 303) | public void testRemove()
    method testFind (line 349) | public void testFind()
    method testDelete (line 381) | public void testDelete()
    method testLargeDataAmount (line 403) | public void testLargeDataAmount()
    method testRecordListener (line 445) | public void testRecordListener() throws IOException {
    method testDeleteAllNodes (line 512) | public void testDeleteAllNodes() throws Exception {
    method testMultithreadAccess (line 575) | public void testMultithreadAccess()
    method containsKey (line 633) | protected static boolean containsKey(Object key, BTree btree)
    method containsValue (line 642) | protected static boolean containsValue(Object value, BTree btree)
    method contains (line 664) | protected static boolean contains(Map.Entry entry, BTree btree)
    class TestThread (line 678) | class TestThread
      method TestThread (line 686) | TestThread(String name, BTree btree, Map content) {
      method setStop (line 692) | public void setStop() {
      method action (line 696) | private void action()
      method run (line 737) | public void run() {
    class ObjectTT (line 761) | static class ObjectTT
      method ObjectTT (line 766) | private ObjectTT() {
      method ObjectTT (line 771) | public ObjectTT(Object content) {
      method getContent (line 776) | Object getContent() {
      method equals (line 781) | public boolean equals(Object obj) {
      method toString (line 788) | public String toString() {
    method testIssue2 (line 794) | public void testIssue2(){

FILE: src/test/java/org/apache/jdbm/ByteArrayComparator.java
  class ByteArrayComparator (line 26) | public final class ByteArrayComparator
    method compare (line 43) | public int compare(byte[] obj1, byte[] obj2) {
    method compareByteArray (line 59) | public static int compareByteArray(byte[] thisKey, byte[] otherKey) {

FILE: src/test/java/org/apache/jdbm/CompactTest.java
  class CompactTest (line 7) | public class CompactTest extends TestCaseWithTestFile {
    method testHashCompaction (line 11) | public void testHashCompaction() throws IOException{
    method testBTreeCompaction (line 45) | public void testBTreeCompaction() throws IOException{

FILE: src/test/java/org/apache/jdbm/ConcurrentBTreeReadTest.java
  class ConcurrentBTreeReadTest (line 24) | public class ConcurrentBTreeReadTest extends TestCaseWithTestFile {
    class Dummy (line 27) | public static class Dummy implements Serializable {
      method Dummy (line 34) | public Dummy() {
      method Dummy (line 37) | public Dummy(long key) {
      method hashCode (line 41) | @Override
      method equals (line 46) | @Override
    method setUp (line 66) | public void setUp() throws Exception {
    method testConcurrent (line 74) | public void testConcurrent() throws Exception {
    method fetch (line 112) | private Object fetch(Long id) throws IOException {
    method commit (line 122) | private void commit() throws IOException {
    method read (line 126) | private void read() {

FILE: src/test/java/org/apache/jdbm/ConcurrentMapInterfaceTest.java
  class ConcurrentMapInterfaceTest (line 32) | public abstract class ConcurrentMapInterfaceTest<K, V>
    method ConcurrentMapInterfaceTest (line 35) | protected ConcurrentMapInterfaceTest(boolean allowsNullKeys,
    method getSecondValueNotInPopulatedMap (line 51) | protected abstract V getSecondValueNotInPopulatedMap()
    method makeEmptyMap (line 54) | @Override protected abstract ConcurrentMap<K, V> makeEmptyMap()
    method makePopulatedMap (line 57) | @Override protected abstract ConcurrentMap<K, V> makePopulatedMap()
    method makeEitherMap (line 60) | @Override protected ConcurrentMap<K, V> makeEitherMap() {
    method testPutIfAbsentNewKey (line 68) | public void testPutIfAbsentNewKey() {
    method testPutIfAbsentExistingKey (line 98) | public void testPutIfAbsentExistingKey() {
    method testPutIfAbsentNullKey (line 129) | public void testPutIfAbsentNullKey() {
    method testPutIfAbsentNewKeyNullValue (line 163) | public void testPutIfAbsentNewKeyNullValue() {
    method testRemoveKeyValueExisting (line 198) | public void testRemoveKeyValueExisting() {
    method testRemoveKeyValueMissingKey (line 224) | public void testRemoveKeyValueMissingKey() {
    method testRemoveKeyValueDifferentValue (line 250) | public void testRemoveKeyValueDifferentValue() {
    method testRemoveKeyValueNullKey (line 279) | public void testRemoveKeyValueNullKey() {
    method testRemoveKeyValueExistingKeyNullValue (line 311) | public void testRemoveKeyValueExistingKeyNullValue() {
    method testRemoveKeyValueMissingKeyNullValue (line 343) | public void testRemoveKeyValueMissingKeyNullValue() {
    method testReplace2ExistingKey (line 377) | public void testReplace2ExistingKey() {
    method testReplace2MissingKey (line 407) | public void testReplace2MissingKey() {
    method testReplace2NullKey (line 436) | public void testReplace2NullKey() {
    method testReplace2ExistingKeyNullValue (line 468) | public void testReplace2ExistingKeyNullValue() {
    method testReplace2MissingKeyNullValue (line 502) | public void testReplace2MissingKeyNullValue() {
    method testReplace3ExistingKeyValue (line 538) | public void testReplace3ExistingKeyValue() {
    method testReplace3ExistingKeyDifferentValue (line 570) | public void testReplace3ExistingKeyDifferentValue() {
    method testReplace3MissingKey (line 603) | public void testReplace3MissingKey() {
    method testReplace3NullKey (line 634) | public void testReplace3NullKey() {
    method testReplace3ExistingKeyNullOldValue (line 668) | public void testReplace3ExistingKeyNullOldValue() {
    method testReplace3MissingKeyNullOldValue (line 704) | public void testReplace3MissingKeyNullOldValue() {
    method testReplace3MissingKeyNullNewValue (line 738) | public void testReplace3MissingKeyNullNewValue() {
    method testReplace3ExistingKeyValueNullNewValue (line 772) | public void testReplace3ExistingKeyValueNullNewValue() {

FILE: src/test/java/org/apache/jdbm/DBCacheMRUTest.java
  class DBCacheMRUTest (line 6) | public class DBCacheMRUTest extends TestCaseWithTestFile {
    method testPurgeEntryClearsCache (line 8) | public void testPurgeEntryClearsCache() throws IOException {
    method newEntry (line 21) | DBCacheMRU.CacheEntry newEntry(long i){
    method testCacheMaxSize (line 26) | public void testCacheMaxSize() throws IOException {

FILE: src/test/java/org/apache/jdbm/DBCacheTest.java
  class DBCacheTest (line 7) | public class DBCacheTest extends TestCaseWithTestFile {
    method test_Issue_11_soft_cache_record_disappear (line 11) | public void test_Issue_11_soft_cache_record_disappear(){
    method test_issue_xyz (line 41) | public void test_issue_xyz(){

FILE: src/test/java/org/apache/jdbm/DBMakerTest.java
  class DBMakerTest (line 6) | public class DBMakerTest extends TestCaseWithTestFile {
    method testMemory (line 8) | public void testMemory() throws IOException {
    method testDisk (line 35) | public void testDisk() throws IOException {
    method testEncrypt (line 50) | public void testEncrypt(){

FILE: src/test/java/org/apache/jdbm/DBTest.java
  class DBTest (line 26) | public class DBTest extends TestCaseWithTestFile {
    method testCtor (line 32) | public void testCtor()
    method testBasics (line 43) | public void testBasics()
    method testDeleteAndReuse (line 85) | public void testDeleteAndReuse()
    method testRollback (line 128) | public void testRollback()
    method testNonExistingRecid (line 177) | public void testNonExistingRecid() throws IOException {
    class Serial (line 197) | public static class Serial implements Serializer<String>,Serializable {
      method deserialize (line 199) | public String deserialize(DataInput in) throws IOException, ClassNot...
      method serialize (line 204) | public void serialize(DataOutput out, String obj) throws IOException {
    method testTreeMapValueSerializer (line 210) | public void testTreeMapValueSerializer() throws Exception {
    method testCountRecid (line 223) | public void testCountRecid() throws Exception {
    method testGetCollections (line 238) | public void testGetCollections() throws IOException {
    method testRegisterShutdown (line 255) | public void testRegisterShutdown(){
    method testDeleteAfterExit (line 260) | public void testDeleteAfterExit(){
    method testDeleteAfterExitRAF (line 279) | public void testDeleteAfterExitRAF(){
    method testDeleteLinkedList (line 298) | public void testDeleteLinkedList() throws IOException {
    method testDeleteTreeMap (line 313) | public void testDeleteTreeMap() throws IOException {
    method testDeleteHashMap (line 328) | public void testDeleteHashMap() throws IOException {
    method testDeleteEmptyLinkedList (line 343) | public void testDeleteEmptyLinkedList() throws IOException {
    method testDeleteEmptyTreeMap (line 356) | public void testDeleteEmptyTreeMap() throws IOException {
    method testDeleteEmptyHashMap (line 369) | public void testDeleteEmptyHashMap() throws IOException {
    method testHugeRecord (line 383) | public void testHugeRecord() throws IOException {
    method testCompressRecid (line 396) | public void testCompressRecid(){
    method testCollectionSize (line 408) | public void testCollectionSize() throws IOException {
    method testDeleteAndPutCollection (line 440) | public void testDeleteAndPutCollection() throws IOException {

FILE: src/test/java/org/apache/jdbm/DataInputOutputTest.java
  class DataInputOutputTest (line 7) | public class DataInputOutputTest extends TestCase {
    method testInt (line 11) | public void testInt() throws IOException {
    method testLong (line 18) | public void testLong() throws IOException {
    method testBooelean (line 26) | public void testBooelean() throws IOException {
    method testByte (line 38) | public void testByte() throws IOException {
    method testUnsignedByte (line 49) | public void testUnsignedByte() throws IOException {
    method testLongPacker (line 60) | public void testLongPacker() throws IOException {

FILE: src/test/java/org/apache/jdbm/DefragTest.java
  class DefragTest (line 6) | public class DefragTest extends TestCaseWithTestFile {
    method testDefrag1 (line 9) | public void testDefrag1() throws IOException {
    method testDefrag2 (line 20) | public void testDefrag2() throws IOException {
    method testDefragBtree (line 39) | public void testDefragBtree() throws IOException {
    method testDefragLinkedList (line 56) | public void testDefragLinkedList() throws Exception {

FILE: src/test/java/org/apache/jdbm/FileHeaderTest.java
  class FileHeaderTest (line 21) | public class FileHeaderTest extends TestCase {
    method testSetWriteRead (line 27) | public void testSetWriteRead() throws Exception {
    method testRootRowids (line 45) | public void testRootRowids() throws Exception {

FILE: src/test/java/org/apache/jdbm/FileLockTest.java
  class FileLockTest (line 22) | public class FileLockTest extends TestCaseWithTestFile {
    method testLock (line 24) | public void testLock() throws IOException {

FILE: src/test/java/org/apache/jdbm/HTreeBucketTest.java
  class HTreeBucketTest (line 28) | public class HTreeBucketTest extends TestCaseWithTestFile {
    method testBasics (line 34) | public void testBasics() throws IOException {
    class LongSerializer (line 68) | public static class LongSerializer implements Serializer<Long>,Seriali...
      method LongSerializer (line 70) | public LongSerializer() {
      method serialize (line 74) | public void serialize(DataOutput out, Long obj) throws IOException {
      method deserialize (line 78) | public Long deserialize(DataInput in) throws IOException, ClassNotFo...
    method testCustomSerializer (line 83) | public void testCustomSerializer() throws IOException {

FILE: src/test/java/org/apache/jdbm/HTreeDirectoryTest.java
  class HTreeDirectoryTest (line 28) | public class HTreeDirectoryTest extends TestCaseWithTestFile {
    method testBasics (line 34) | public void testBasics() throws IOException {
    method testMixed (line 52) | public void testMixed() throws IOException {
    method checkEnumerations (line 121) | void checkEnumerations(Hashtable hash, HTreeDirectory dir)

FILE: src/test/java/org/apache/jdbm/HTreeMapTest.java
  class HTreeMapTest (line 20) | public class HTreeMapTest extends ConcurrentMapInterfaceTest<Integer, St...
    method HTreeMapTest (line 22) | public HTreeMapTest() {
    method setUp (line 28) | public void setUp() throws Exception {
    method getKeyNotInPopulatedMap (line 32) | @Override
    method getValueNotInPopulatedMap (line 37) | @Override
    method getSecondValueNotInPopulatedMap (line 42) | @Override
    method makeEmptyMap (line 47) | @Override
    method makePopulatedMap (line 53) | @Override

FILE: src/test/java/org/apache/jdbm/HTreeSetTest.java
  class HTreeSetTest (line 29) | public class HTreeSetTest extends TestCaseWithTestFile {
    method test_Constructor (line 45) | public void test_Constructor() {
    method test_addLjava_lang_Object (line 55) | public void test_addLjava_lang_Object() {
    method test_clear (line 69) | public void test_clear() {
    method test_containsLjava_lang_Object (line 83) | public void test_containsLjava_lang_Object() {
    method test_isEmpty (line 94) | public void test_isEmpty() {
    method test_iterator (line 103) | public void test_iterator() {
    method test_removeLjava_lang_Object (line 119) | public void test_removeLjava_lang_Object() {
    method test_size (line 131) | public void test_size() {
    method setUp (line 143) | public void setUp() throws Exception {
    method tearDown (line 155) | public void tearDown() throws Exception {
    method testContains (line 161) | public void testContains(){

FILE: src/test/java/org/apache/jdbm/HTreeTest.java
  class HTreeTest (line 32) | public class HTreeTest extends TestCaseWithTestFile {
    method testIterator (line 38) | public void testIterator() throws IOException {
    method testRecordListener (line 62) | public void testRecordListener() throws IOException {
    method testIssue (line 119) | public void testIssue(){

FILE: src/test/java/org/apache/jdbm/LinkedList2Test.java
  class LinkedList2Test (line 31) | public class LinkedList2Test extends TestCaseWithTestFile {
    method test_addILjava_lang_Object (line 61) | public void test_addILjava_lang_Object() {
    method test_addAllILjava_util_Collection (line 105) | public void test_addAllILjava_util_Collection() {
    method test_addAllILjava_util_Collection_2 (line 145) | public void test_addAllILjava_util_Collection_2() {
    method test_addAllLjava_util_Collection (line 158) | public void test_addAllLjava_util_Collection() {
    method test_clear (line 204) | public void test_clear() {
    method test_containsLjava_lang_Object (line 215) | public void test_containsLjava_lang_Object() {
    method test_getI (line 232) | public void test_getI() {
    method test_indexOfLjava_lang_Object (line 246) | public void test_indexOfLjava_lang_Object() {
    method test_lastIndexOfLjava_lang_Object (line 260) | public void test_lastIndexOfLjava_lang_Object() {
    method test_listIteratorI (line 277) | public void test_listIteratorI() {
    method test_removeI (line 327) | public void test_removeI() {
    method test_removeLjava_lang_Object (line 346) | public void test_removeLjava_lang_Object() {
    method test_setILjava_lang_Object (line 360) | public void test_setILjava_lang_Object() {
    method test_size (line 370) | public void test_size() {
    method test_toArray (line 389) | public void test_toArray() {
    method test_toArray$Ljava_lang_Object (line 404) | public void test_toArray$Ljava_lang_Object() {
    method test_remove (line 442) | public void test_remove() {
    method setUp (line 460) | public void setUp() throws Exception {

FILE: src/test/java/org/apache/jdbm/LogicalRowIdManagerTest.java
  class LogicalRowIdManagerTest (line 22) | public class LogicalRowIdManagerTest extends TestCaseWithTestFile {
    method testCtor (line 28) | public void testCtor() throws Exception {
    method testBasics (line 42) | public void testBasics() throws Exception {
    method testFreeBasics (line 63) | public void testFreeBasics() throws Exception {

FILE: src/test/java/org/apache/jdbm/LongHashMapTest.java
  class LongHashMapTest (line 24) | public class LongHashMapTest extends TestCase {
    method testAll (line 26) | public void testAll() {
    method testRandomCompare (line 64) | public void testRandomCompare() {
    method checkEquals (line 85) | public void checkEquals(LongHashMap<String> v1, TreeMap<Long, String> ...
    method test2 (line 104) | public void test2() {
    method testMemoryConsuptio (line 116) | public void testMemoryConsuptio() {

FILE: src/test/java/org/apache/jdbm/LongTreeMap.java
  class LongTreeMap (line 53) | public class LongTreeMap<V> {
    method get (line 75) | public V get(long key) {
    method put (line 96) | public V put(long key, V value) {
    method remove (line 115) | public V remove(long key) {
    method clear (line 135) | public void clear() {
    class Entry (line 145) | private static class Entry<V> {
      method Entry (line 153) | Entry(long key, V value) {
      method toString (line 158) | public String toString() {
    method valuesIterator (line 167) | public Iterator<V> valuesIterator() {
    method keyIterator (line 174) | public LongIterator keyIterator() {
    class MapIterator (line 179) | private class MapIterator {
      method MapIterator (line 185) | MapIterator() {
      method hasNext (line 191) | public boolean hasNext() {
      method remove (line 195) | final public void remove() {
      method makeNext (line 209) | final void makeNext() {
    class ValueIterator (line 220) | private class ValueIterator extends MapIterator implements Iterator<V> {
      method next (line 221) | public V next() {
    class LongIterator (line 227) | public class LongIterator extends MapIterator implements Iterator<Long> {
      method next (line 228) | public Long next() {
      method nextLong (line 233) | public long nextLong() {
    method isEmpty (line 240) | public boolean isEmpty() {
    method size (line 244) | public int size() {
    method toString (line 249) | public String toString() {
    method find (line 268) | private Entry<V> find(long object) {
    method minimum (line 284) | private Entry<V> minimum(Entry<V> x) {
    method successor (line 291) | Entry<V> successor(Entry<V> x) {
    method rbDelete (line 303) | void rbDelete(Entry<V> z) {
    method fixup (line 331) | private void fixup(Entry<V> x) {
    method leftRotate (line 405) | private void leftRotate(Entry<V> x) {
    method rightRotate (line 425) | private void rightRotate(Entry<V> x) {
    method rbInsert (line 445) | private Entry<V> rbInsert(long object) {
    method balance (line 486) | void balance(Entry<V> x) {

FILE: src/test/java/org/apache/jdbm/MapInterfaceTest.java
  class MapInterfaceTest (line 48) | public abstract class MapInterfaceTest<K, V> extends TestCase {
    method makeEmptyMap (line 63) | protected abstract Map<K, V> makeEmptyMap()
    method makePopulatedMap (line 73) | protected abstract Map<K, V> makePopulatedMap()
    method getKeyNotInPopulatedMap (line 84) | protected abstract K getKeyNotInPopulatedMap()
    method getValueNotInPopulatedMap (line 95) | protected abstract V getValueNotInPopulatedMap()
    method MapInterfaceTest (line 102) | protected MapInterfaceTest(
    method makeEitherMap (line 123) | protected Map<K, V> makeEitherMap() {
    method supportsValuesHashCode (line 131) | protected final boolean supportsValuesHashCode(Map<K, V> map) {
    method assertInvariants (line 155) | protected final void assertInvariants(Map<K, V> map) {
    method assertMoreInvariants (line 247) | protected void assertMoreInvariants(Map<K, V> map) {
    method testClear (line 250) | public void testClear() {
    method testContainsKey (line 272) | public void testContainsKey() {
    method testContainsValue (line 294) | public void testContainsValue() {
    method testEntrySet (line 316) | public void testEntrySet() {
    method testEntrySetForEmptyMap (line 341) | public void testEntrySetForEmptyMap() {
    method testEntrySetContainsEntryNullKeyPresent (line 351) | public void testEntrySetContainsEntryNullKeyPresent() {
    method testEntrySetContainsEntryNullKeyMissing (line 378) | public void testEntrySetContainsEntryNullKeyMissing() {
    method testEntrySetIteratorRemove (line 400) | public void testEntrySetIteratorRemove() {
    method testEntrySetRemove (line 435) | public void testEntrySetRemove() {
    method testEntrySetRemoveMissingKey (line 460) | public void testEntrySetRemoveMissingKey() {
    method testEntrySetRemoveDifferentValue (line 489) | public void testEntrySetRemoveDifferentValue() {
    method testEntrySetRemoveNullKeyPresent (line 517) | public void testEntrySetRemoveNullKeyPresent() {
    method testEntrySetRemoveNullKeyMissing (line 547) | public void testEntrySetRemoveNullKeyMissing() {
    method testEntrySetRemoveAll (line 573) | public void testEntrySetRemoveAll() {
    method testEntrySetRemoveAllNullFromEmpty (line 603) | public void testEntrySetRemoveAllNullFromEmpty() {
    method testEntrySetRetainAll (line 632) | public void testEntrySetRetainAll() {
    method testEntrySetRetainAllNullFromEmpty (line 662) | public void testEntrySetRetainAllNullFromEmpty() {
    method testEntrySetClear (line 691) | public void testEntrySetClear() {
    method testEntrySetAddAndAddAll (line 714) | public void testEntrySetAddAndAddAll() {
    method testEntrySetSetValue (line 740) | public void testEntrySetSetValue() {
    method testEntrySetSetValueSameValue (line 767) | public void testEntrySetSetValueSameValue() {
    method testEqualsForEqualMap (line 792) | public void testEqualsForEqualMap() {
    method testEqualsForLargerMap (line 807) | public void testEqualsForLargerMap() {
    method testEqualsForSmallerMap (line 825) | public void testEqualsForSmallerMap() {
    method testEqualsForEmptyMap (line 844) | public void testEqualsForEmptyMap() {
    method testGet (line 860) | public void testGet() {
    method testGetForEmptyMap (line 881) | public void testGetForEmptyMap() {
    method testGetNull (line 893) | public void testGetNull() {
    method testHashCode (line 910) | public void testHashCode() {
    method testHashCodeForEmptyMap (line 920) | public void testHashCodeForEmptyMap() {
    method testPutNewKey (line 930) | public void testPutNewKey() {
    method testPutExistingKey (line 959) | public void testPutExistingKey() {
    method testPutNullKey (line 988) | public void testPutNullKey() {
    method testPutNullValue (line 1017) | public void testPutNullValue() {
    method testPutNullValueForExistingKey (line 1048) | public void testPutNullValueForExistingKey() {
    method testPutAllNewKey (line 1080) | public void testPutAllNewKey() {
    method testPutAllExistingKey (line 1109) | public void testPutAllExistingKey() {
    method testRemove (line 1139) | public void testRemove() {
    method testRemoveMissingKey (line 1166) | public void testRemoveMissingKey() {
    method testSize (line 1190) | public void testSize() {
    method testKeySetClear (line 1194) | public void testKeySetClear() {
    method testKeySetRemoveAllNullFromEmpty (line 1217) | public void testKeySetRemoveAllNullFromEmpty() {
    method testKeySetRetainAllNullFromEmpty (line 1246) | public void testKeySetRetainAllNullFromEmpty() {
    method testValues (line 1275) | public void testValues() {
    method testValuesIteratorRemove (line 1297) | public void testValuesIteratorRemove() {
    method testValuesRemove (line 1334) | public void testValuesRemove() {
    method testValuesRemoveMissing (line 1361) | public void testValuesRemoveMissing() {
    method testValuesRemoveAll (line 1386) | public void testValuesRemoveAll() {
    method testValuesRemoveAllNullFromEmpty (line 1415) | public void testValuesRemoveAllNullFromEmpty() {
    method testValuesRetainAll (line 1444) | public void testValuesRetainAll() {
    method testValuesRetainAllNullFromEmpty (line 1473) | public void testValuesRetainAllNullFromEmpty() {
    method testValuesClear (line 1502) | public void testValuesClear() {
    method mapEntry (line 1525) | private static <K, V> Entry<K, V> mapEntry(K key, V value) {

FILE: src/test/java/org/apache/jdbm/ObjectOutputStream2Test.java
  class ObjectOutputStream2Test (line 10) | public class ObjectOutputStream2Test extends TestCase {
    method neser (line 13) | <E> E neser(E e) throws IOException, ClassNotFoundException {
    method testSimple (line 19) | public void testSimple() throws ClassNotFoundException, IOException {

FILE: src/test/java/org/apache/jdbm/PageFileTest.java
  class PageFileTest (line 24) | final public class PageFileTest
    method deleteFile (line 28) | public static void deleteFile(String filename) {
    method testCtor (line 47) | public void testCtor()
    method testAddZero (line 57) | public void testAddZero()
    method testWithHoles (line 76) | public void testWithHoles()
    method testWrongRelease (line 118) | public void testWrongRelease()

FILE: src/test/java/org/apache/jdbm/PageIoTest.java
  class PageIoTest (line 30) | public class PageIoTest extends TestCase {
    method testWrite (line 41) | public void testWrite() throws Exception {
    method testRead (line 66) | public void testRead() throws Exception {
    method testNegativeSixByte (line 83) | public void testNegativeSixByte(){
    method testPageHeaderSetWriteRead (line 97) | public void testPageHeaderSetWriteRead() throws Exception {

FILE: src/test/java/org/apache/jdbm/PageManagerTest.java
  class PageManagerTest (line 22) | public class PageManagerTest extends TestCaseWithTestFile {
    method testCtor (line 28) | public void testCtor() throws Exception {
    method testAllocSingleList (line 38) | public void testAllocSingleList() throws Exception {

FILE: src/test/java/org/apache/jdbm/PageTransactionManagerTest.java
  class PageTransactionManagerTest (line 25) | public class PageTransactionManagerTest extends TestCaseWithTestFile {
    method testCtor (line 33) | public void testCtor() throws Exception {
    method XtestRecovery (line 42) | public void XtestRecovery() throws Exception {
    method XtestSynching (line 75) | public void XtestSynching() throws Exception {
    method assertDataSizeEquals (line 105) | void assertDataSizeEquals(String msg, long size) {
    method assertLogSizeEquals (line 111) | void assertLogSizeEquals(String msg, long size) {
    method assertLogSizeNotZero (line 117) | void assertLogSizeNotZero(String msg) {

FILE: src/test/java/org/apache/jdbm/PhysicalFreeRowIdManagerTest.java
  class PhysicalFreeRowIdManagerTest (line 25) | public class PhysicalFreeRowIdManagerTest extends TestCaseWithTestFile {
    method testCtor (line 30) | public void testCtor() throws Exception {
    method testBasics (line 43) | public void testBasics() throws Exception {
    method testPhysRecRootPage (line 57) | public void testPhysRecRootPage() throws IOException {
    method test_size_to_root_offset (line 73) | public void test_size_to_root_offset(){
    method test_record_reallocation (line 82) | public void test_record_reallocation() throws IOException {
    method test_all_sizes_deallocation (line 96) | public void test_all_sizes_deallocation() throws IOException {

FILE: src/test/java/org/apache/jdbm/PhysicalRowIdManagerTest.java
  class PhysicalRowIdManagerTest (line 26) | public class PhysicalRowIdManagerTest extends TestCaseWithTestFile {
    method testCtor (line 34) | public void testCtor() throws Exception {
    method testBasics (line 48) | public void testBasics() throws Exception {
    method testTwoRecords (line 96) | public void testTwoRecords() throws IOException {
    method testDeleteRecord (line 108) | public void testDeleteRecord() throws IOException {
    method testTwoLargeRecord (line 124) | public void testTwoLargeRecord() throws IOException {
    method testManyLargeRecord (line 138) | public void testManyLargeRecord() throws IOException {
    method testSplitRecordAcrossPage (line 158) | public void testSplitRecordAcrossPage() throws IOException {
    method testFreeMidPages (line 179) | public void testFreeMidPages() throws IOException {
    method listRecords (line 203) | List<Integer> listRecords(PageManager pageman) throws IOException {
    method arrayList (line 236) | List<Integer> arrayList(Integer... args){

FILE: src/test/java/org/apache/jdbm/RecordHeaderTest.java
  class RecordHeaderTest (line 26) | public class RecordHeaderTest extends TestCase {
    method testReadWrite (line 32) | public void testReadWrite() throws Exception {
    method testRecordSize (line 43) | public void testRecordSize() {
    method testMaxRecordSize (line 91) | public void testMaxRecordSize() {
    method testRoundingSmall (line 104) | public void testRoundingSmall() {
    method testRounding (line 110) | public void testRounding() {
    method testSetCurrentSize (line 120) | public void testSetCurrentSize() {

FILE: src/test/java/org/apache/jdbm/RollbackTest.java
  class RollbackTest (line 8) | public class RollbackTest extends TestCaseWithTestFile{
    method test_treemap (line 10) | public void test_treemap() throws IOException {
    method test_hashmap (line 26) | public void test_hashmap() throws IOException {
    method test_treeset (line 42) | public void test_treeset() throws IOException {
    method test_hashset (line 59) | public void test_hashset() throws IOException {
    method test_linkedlist (line 75) | public void test_linkedlist() throws IOException {

FILE: src/test/java/org/apache/jdbm/SerialClassInfoTest.java
  class SerialClassInfoTest (line 7) | public class SerialClassInfoTest extends TestCaseWithTestFile {
    class Bean1 (line 9) | static class Bean1 implements Serializable {
      method equals (line 11) | public boolean equals(Object o) {
      method getField2 (line 39) | public String getField2() {
      method setField2 (line 44) | public void setField2(String field2) {
      method Bean1 (line 49) | Bean1(String field1, String field2) {
      method Bean1 (line 54) | Bean1() {
    class Bean2 (line 58) | static class Bean2 extends Bean1 {
      method equals (line 60) | public boolean equals(Object o) {
      method hashCode (line 72) | @Override
      method Bean2 (line 79) | Bean2(String field1, String field2, String field3) {
      method Bean2 (line 84) | Bean2() {
    method setUp (line 91) | public void setUp() throws IOException {
    method testGetFieldValue1 (line 98) | public void testGetFieldValue1() throws Exception {
    method testGetFieldValue2 (line 102) | public void testGetFieldValue2() throws Exception {
    method testGetFieldValue3 (line 107) | public void testGetFieldValue3() throws Exception {
    method testGetFieldValue4 (line 111) | public void testGetFieldValue4() throws Exception {
    method testGetFieldValue5 (line 116) | public void testGetFieldValue5() throws Exception {
    method testSetFieldValue1 (line 120) | public void testSetFieldValue1() {
    method testSetFieldValue2 (line 125) | public void testSetFieldValue2() {
    method testSetFieldValue3 (line 131) | public void testSetFieldValue3() {
    method testSetFieldValue4 (line 136) | public void testSetFieldValue4() {
    method testSetFieldValue5 (line 142) | public void testSetFieldValue5() {
    method testGetPrimitiveField (line 147) | public void testGetPrimitiveField() {
    method testSetPrimitiveField (line 155) | public void testSetPrimitiveField() {
    method serialize (line 167) | <E> E serialize(E e) throws ClassNotFoundException, IOException {
    method testSerializable (line 177) | public void testSerializable() throws Exception {
    method testRecursion (line 183) | public void testRecursion() throws Exception {
    method testRecursion2 (line 193) | public void testRecursion2() throws Exception {
    method testRecursion3 (line 204) | public void testRecursion3() throws Exception {
    method testPersistedSimple (line 216) | public void testPersistedSimple() throws Exception {
    method testPersisted (line 233) | public void testPersisted() throws Exception {

FILE: src/test/java/org/apache/jdbm/Serialization2Bean.java
  class Serialization2Bean (line 6) | public class Serialization2Bean implements Serializable {
    method hashCode (line 19) | @Override
    method equals (line 33) | @Override

FILE: src/test/java/org/apache/jdbm/Serialization2Test.java
  class Serialization2Test (line 8) | public class Serialization2Test extends TestCaseWithTestFile {
    method test2 (line 11) | public void test2() throws IOException {
    method test3 (line 29) | public void test3() throws IOException {
    class AAA (line 51) | static class AAA implements Serializable {
    method testReopenWithDefrag (line 56) | public void testReopenWithDefrag(){

FILE: src/test/java/org/apache/jdbm/SerializationHeaderTest.java
  class SerializationHeaderTest (line 10) | public class SerializationHeaderTest extends TestCase {
    method testUnique (line 12) | public void testUnique() throws IllegalAccessException {

FILE: src/test/java/org/apache/jdbm/SerializationTest.java
  class SerializationTest (line 28) | @SuppressWarnings("unchecked")
    method SerializationTest (line 33) | public SerializationTest() throws IOException {
    method testInt (line 37) | public void testInt() throws IOException, ClassNotFoundException {
    method testShort (line 55) | public void testShort() throws IOException, ClassNotFoundException {
    method testDouble (line 71) | public void testDouble() throws IOException, ClassNotFoundException {
    method testFloat (line 84) | public void testFloat() throws IOException, ClassNotFoundException {
    method testChar (line 96) | public void testChar() throws IOException, ClassNotFoundException {
    method testLong (line 109) | public void testLong() throws IOException, ClassNotFoundException {
    method testBoolean1 (line 128) | public void testBoolean1() throws IOException, ClassNotFoundException {
    method testString (line 141) | public void testString() throws IOException, ClassNotFoundException {
    method testBigString (line 147) | public void testBigString() throws IOException, ClassNotFoundException {
    method testObject (line 157) | public void testObject() throws ClassNotFoundException, IOException {
    method testNoArgumentConstructorInJavaSerialization (line 164) | public void testNoArgumentConstructorInJavaSerialization() throws Clas...
    method testArrayList (line 174) | public void testArrayList() throws ClassNotFoundException, IOException {
    method testLinkedList (line 184) | public void testLinkedList() throws ClassNotFoundException, IOException {
    method testVector (line 194) | public void testVector() throws ClassNotFoundException, IOException {
    method testTreeSet (line 205) | public void testTreeSet() throws ClassNotFoundException, IOException {
    method testHashSet (line 215) | public void testHashSet() throws ClassNotFoundException, IOException {
    method testLinkedHashSet (line 225) | public void testLinkedHashSet() throws ClassNotFoundException, IOExcep...
    method testHashMap (line 235) | public void testHashMap() throws ClassNotFoundException, IOException {
    method testTreeMap (line 245) | public void testTreeMap() throws ClassNotFoundException, IOException {
    method testLinkedHashMap (line 255) | public void testLinkedHashMap() throws ClassNotFoundException, IOExcep...
    method testHashtable (line 265) | public void testHashtable() throws ClassNotFoundException, IOException {
    method testProperties (line 275) | public void testProperties() throws ClassNotFoundException, IOException {
    method testClass (line 286) | public void testClass() throws IOException, ClassNotFoundException {
    method testClass2 (line 292) | public void testClass2() throws IOException, ClassNotFoundException {
    method testUnicodeString (line 299) | public void testUnicodeString() throws ClassNotFoundException, IOExcep...
    method testSerializationHeader (line 307) | public void testSerializationHeader() throws IOException {
    method testPackedLongCollection (line 322) | public void testPackedLongCollection() throws ClassNotFoundException, ...
    method testNegativeLongsArray (line 332) | public void testNegativeLongsArray() throws ClassNotFoundException, IO...
    method testNegativeIntArray (line 339) | public void testNegativeIntArray() throws ClassNotFoundException, IOEx...
    method testNegativeShortArray (line 346) | public void testNegativeShortArray() throws ClassNotFoundException, IO...
    method testBooleanArray (line 352) | public void testBooleanArray() throws ClassNotFoundException, IOExcept...
    method testDoubleArray (line 358) | public void testDoubleArray() throws ClassNotFoundException, IOExcepti...
    method testFloatArray (line 364) | public void testFloatArray() throws ClassNotFoundException, IOException {
    method testByteArray (line 370) | public void testByteArray() throws ClassNotFoundException, IOException {
    method testCharArray (line 376) | public void testCharArray() throws ClassNotFoundException, IOException {
    method testDate (line 383) | public void testDate() throws IOException, ClassNotFoundException {
    method testBigDecimal (line 390) | public void testBigDecimal() throws IOException, ClassNotFoundException {
    method testBigInteger (line 397) | public void testBigInteger() throws IOException, ClassNotFoundException {
    method testUUID (line 404) | public void testUUID() throws IOException, ClassNotFoundException {
    method testLocale (line 417) | public void testLocale() throws Exception{
    type Order (line 424) | enum Order
    method testEnum (line 429) | public void testEnum() throws Exception{
    class Extr (line 445) | static class Extr implements  Externalizable{
      method writeExternal (line 450) | public void writeExternal(ObjectOutput out) throws IOException {
      method readExternal (line 456) | public void readExternal(ObjectInput in) throws IOException, ClassNo...
    method testExternalizable (line 463) | public void testExternalizable() throws Exception{
    method testObjectArrayArray (line 474) | public void testObjectArrayArray() throws IOException, ClassNotFoundEx...

FILE: src/test/java/org/apache/jdbm/Serialized2DerivedBean.java
  class Serialized2DerivedBean (line 3) | public class Serialized2DerivedBean extends Serialization2Bean {
    method hashCode (line 13) | @Override
    method equals (line 26) | @Override

FILE: src/test/java/org/apache/jdbm/StorageDiskMappedTest.java
  class StorageDiskMappedTest (line 6) | public class StorageDiskMappedTest extends TestCase {
    method testNumberOfPages (line 8) | public void testNumberOfPages(){

FILE: src/test/java/org/apache/jdbm/StorageZipTest.java
  class StorageZipTest (line 7) | public class StorageZipTest extends TestCaseWithTestFile {
    method test_archive_creation (line 11) | public void test_archive_creation() throws IOException {

FILE: src/test/java/org/apache/jdbm/StreamCorrupted.java
  class StreamCorrupted (line 55) | public class StreamCorrupted
    method testStreamCorrupted (line 62) | public void testStreamCorrupted()

FILE: src/test/java/org/apache/jdbm/TestCaseWithTestFile.java
  class TestCaseWithTestFile (line 29) | abstract class TestCaseWithTestFile extends TestCase {
    method setUp (line 35) | public void setUp() throws Exception {
    method tearDown (line 41) | public void tearDown() throws Exception {
    method newTestFile (line 51) | static public String newTestFile() {
    method newRecordFile (line 55) | static public PageFile newRecordFile() throws IOException {
    method newDBCache (line 59) | static public DBAbstract newDBCache() throws IOException {
    method newDBNoCache (line 63) | static public DBStore newDBNoCache() throws IOException {

FILE: src/test/java/org/apache/jdbm/TestInsertPerf.java
  class TestInsertPerf (line 23) | public class TestInsertPerf extends TestCaseWithTestFile {
    method testInsert (line 29) | public void testInsert() throws IOException {

FILE: src/test/java/org/apache/jdbm/TestInsertUpdate.java
  class TestInsertUpdate (line 8) | public class TestInsertUpdate extends TestCaseWithTestFile {
    method testInsertUpdateWithCustomSerializer (line 15) | @Test

FILE: src/test/java/org/apache/jdbm/TestIssues.java
  class TestIssues (line 7) | public class TestIssues extends TestCaseWithTestFile {
    method testHTreeClear (line 14) | public void testHTreeClear() throws IOException {
    method testBTreeClear (line 31) | public void testBTreeClear() throws IOException {
    method test_issue_17_double_concurrent_get (line 48) | public void test_issue_17_double_concurrent_get() throws InterruptedEx...
    method test_issue_84_reopen_after_close (line 73) | public void test_issue_84_reopen_after_close(){

FILE: src/test/java/org/apache/jdbm/TestLargeData.java
  class TestLargeData (line 6) | public class TestLargeData extends TestCaseWithTestFile {
    method testLargeData (line 8) | public void testLargeData() throws IOException {
    method testAllSizes (line 46) | public void testAllSizes() throws IOException {

FILE: src/test/java/org/apache/jdbm/TestLazyRecordsInTree.java
  class TestLazyRecordsInTree (line 6) | public class TestLazyRecordsInTree extends TestCaseWithTestFile {
    method makeString (line 8) | String makeString(int size) {
    method doIt (line 16) | void doIt(DBStore r, Map<Integer, String> m) throws IOException {
    method testBTree (line 45) | public void testBTree() throws IOException {
    method testHTree (line 51) | public void testHTree() throws IOException {

FILE: src/test/java/org/apache/jdbm/TestRollback.java
  class TestRollback (line 23) | public class TestRollback
    method testRollback1 (line 30) | public void testRollback1()
    method testRollback2 (line 56) | public void testRollback2()
    method testRollback1b (line 79) | public void testRollback1b()
    method testRollback2b (line 105) | public void testRollback2b()

FILE: src/test/java/org/apache/jdbm/TestStress.java
  class TestStress (line 24) | public class TestStress extends TestCaseWithTestFile {
    class RecordData (line 37) | class RecordData {
      method RecordData (line 42) | RecordData(long rowid, int size, byte b) {
      method toString (line 48) | public String toString() {
    method getRandomAllocatedSlot (line 53) | private int getRandomAllocatedSlot(RecordData[] d) {
    method getRandomAllocatedRoot (line 66) | private int getRandomAllocatedRoot() {
    method testBasics (line 79) | public void testBasics() throws Exception {

FILE: src/test/java/org/apache/jdbm/UtilTT.java
  class UtilTT (line 24) | public class UtilTT {
    method makeRecord (line 28) | public static byte[] makeRecord(int length, byte b) {
    method checkRecord (line 38) | public static boolean checkRecord(byte[] data, int length, byte b) {

FILE: src/test/java/org/apache/jdbm/UtilsTest.java
  class UtilsTest (line 5) | public class UtilsTest extends TestCase {
    method testFormatSpaceUsage (line 7) | public void testFormatSpaceUsage() {

FILE: src/test/java/org/apache/jdbm/junk/HugeData.java
  class HugeData (line 12) | public class HugeData {
    method main (line 14) | static public void main(String[] args) throws IOException, Interrupted...

FILE: src/test/java/org/apache/jdbm/junk/MappedBufferGrow.java
  class MappedBufferGrow (line 12) | public class MappedBufferGrow {
    method main (line 14) | public static void main(String[] args) throws IOException {

FILE: src/test/java/org/apache/jdbm/junk/MappedBufferVersusRaf.java
  class MappedBufferVersusRaf (line 13) | public class MappedBufferVersusRaf {
    method main (line 19) | public static void main(String[] args) throws IOException {

FILE: src/test/java/org/apache/jdbm/junk/RandomInsertLongs.java
  class RandomInsertLongs (line 11) | public class RandomInsertLongs {
    method main (line 13) | public static void main(String[] args) throws IOException {
    method makeLong (line 50) | public static Long makeLong(long value){
Condensed preview — 112 files, each showing path, character count, and a content snippet. Download the .json file or copy for the full structured content (894K chars).
[
  {
    "path": "LICENSE-2.0.html",
    "chars": 10998,
    "preview": "<!DOCTYPE html PUBLIC \"-//W3C//DTD XHTML 1.0 Transitional//EN\"\n        \"http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitio"
  },
  {
    "path": "README.md",
    "chars": 14777,
    "preview": "**NOTE: this project is in maintenance mode (bug fix only), I redirected my effort to JDBM4 which should provide better "
  },
  {
    "path": "pom.xml",
    "chars": 3412,
    "preview": "<project xmlns=\"http://maven.apache.org/POM/4.0.0\"\n         xmlns:xsi=\"http://www.w3.org/2001/XMLSchema-instance\"\n      "
  },
  {
    "path": "src/main/java/org/apache/jdbm/BTree.java",
    "chars": 21334,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/main/java/org/apache/jdbm/BTreeLazyRecord.java",
    "chars": 2994,
    "preview": "package org.apache.jdbm;\n\nimport java.io.*;\n\n/**\n * An record lazily loaded from store.\n * This is used in BTree/HTree t"
  },
  {
    "path": "src/main/java/org/apache/jdbm/BTreeMap.java",
    "chars": 18266,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/main/java/org/apache/jdbm/BTreeNode.java",
    "chars": 53256,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/main/java/org/apache/jdbm/BTreeSet.java",
    "chars": 4868,
    "preview": "/*\n *  Licensed to the Apache Software Foundation (ASF) under one or more\n *  contributor license agreements.  See the N"
  },
  {
    "path": "src/main/java/org/apache/jdbm/DB.java",
    "chars": 5780,
    "preview": "package org.apache.jdbm;\n\nimport java.util.*;\nimport java.util.concurrent.ConcurrentMap;\nimport java.util.concurrent.Con"
  },
  {
    "path": "src/main/java/org/apache/jdbm/DBAbstract.java",
    "chars": 20242,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/main/java/org/apache/jdbm/DBCache.java",
    "chars": 4738,
    "preview": "package org.apache.jdbm;\n\nimport javax.crypto.Cipher;\nimport java.io.IOError;\nimport java.io.IOException;\nimport java.ut"
  },
  {
    "path": "src/main/java/org/apache/jdbm/DBCacheMRU.java",
    "chars": 8763,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/main/java/org/apache/jdbm/DBCacheRef.java",
    "chars": 11020,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/main/java/org/apache/jdbm/DBMaker.java",
    "chars": 11697,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/main/java/org/apache/jdbm/DBStore.java",
    "chars": 31554,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/main/java/org/apache/jdbm/DataInputOutput.java",
    "chars": 7669,
    "preview": "package org.apache.jdbm;\n\nimport java.io.*;\nimport java.nio.ByteBuffer;\nimport java.util.Arrays;\n\n/**\n * Utility class w"
  },
  {
    "path": "src/main/java/org/apache/jdbm/DataInputOutput2.java",
    "chars": 5150,
    "preview": "///*\n//package org.apache.jdbm;\n//\n//import java.io.DataInput;\n//import java.io.DataOutput;\n//import java.io.IOException"
  },
  {
    "path": "src/main/java/org/apache/jdbm/HTree.java",
    "chars": 16207,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/main/java/org/apache/jdbm/HTreeBucket.java",
    "chars": 11498,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/main/java/org/apache/jdbm/HTreeDirectory.java",
    "chars": 18965,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/main/java/org/apache/jdbm/HTreeSet.java",
    "chars": 822,
    "preview": "package org.apache.jdbm;\n\n\nimport java.util.AbstractSet;\nimport java.util.Iterator;\n\n/**\n * Wrapper for HTree to impleme"
  },
  {
    "path": "src/main/java/org/apache/jdbm/LinkedList2.java",
    "chars": 15406,
    "preview": "/*\n *  Licensed to the Apache Software Foundation (ASF) under one or more\n *  contributor license agreements.  See the N"
  },
  {
    "path": "src/main/java/org/apache/jdbm/LogicalRowIdManager.java",
    "chars": 8272,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/main/java/org/apache/jdbm/LongHashMap.java",
    "chars": 11363,
    "preview": "/*\n *  Licensed to the Apache Software Foundation (ASF) under one or more\n *  contributor license agreements.  See the N"
  },
  {
    "path": "src/main/java/org/apache/jdbm/LongPacker.java",
    "chars": 3905,
    "preview": "/*\nCopyright (c) 2008, Nathan Sweet\nAll rights reserved.\n\nRedistribution and use in source and binary forms, with or wit"
  },
  {
    "path": "src/main/java/org/apache/jdbm/Magic.java",
    "chars": 2885,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/main/java/org/apache/jdbm/ObjectInputStream2.java",
    "chars": 749,
    "preview": "package org.apache.jdbm;\n\nimport java.io.DataInputStream;\nimport java.io.IOException;\nimport java.io.InputStream;\nimport"
  },
  {
    "path": "src/main/java/org/apache/jdbm/ObjectOutputStream2.java",
    "chars": 714,
    "preview": "package org.apache.jdbm;\n\nimport java.io.*;\nimport java.util.ArrayList;\n\n/**\n * An alternative to <code>java.io.ObjectOu"
  },
  {
    "path": "src/main/java/org/apache/jdbm/PageFile.java",
    "chars": 12136,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/main/java/org/apache/jdbm/PageIo.java",
    "chars": 12173,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/main/java/org/apache/jdbm/PageManager.java",
    "chars": 7769,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/main/java/org/apache/jdbm/PageTransactionManager.java",
    "chars": 8967,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/main/java/org/apache/jdbm/PhysicalFreeRowIdManager.java",
    "chars": 7729,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/main/java/org/apache/jdbm/PhysicalRowIdManager.java",
    "chars": 13195,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/main/java/org/apache/jdbm/RecordHeader.java",
    "chars": 4427,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/main/java/org/apache/jdbm/RecordListener.java",
    "chars": 1369,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/main/java/org/apache/jdbm/SerialClassInfo.java",
    "chars": 16683,
    "preview": "package org.apache.jdbm;\n\nimport org.apache.jdbm.Serialization.FastArrayList;\n\nimport java.io.*;\nimport java.lang.reflec"
  },
  {
    "path": "src/main/java/org/apache/jdbm/Serialization.java",
    "chars": 45655,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/main/java/org/apache/jdbm/SerializationHeader.java",
    "chars": 4819,
    "preview": "package org.apache.jdbm;\n\n/**\n * Header byte, is used at start of each record to indicate data type\n * WARNING !!! value"
  },
  {
    "path": "src/main/java/org/apache/jdbm/Serializer.java",
    "chars": 1579,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/main/java/org/apache/jdbm/Storage.java",
    "chars": 1078,
    "preview": "package org.apache.jdbm;\n\nimport java.io.*;\nimport java.nio.ByteBuffer;\n\n/**\n *\n */\ninterface Storage {\n\n    /**\n     * "
  },
  {
    "path": "src/main/java/org/apache/jdbm/StorageDisk.java",
    "chars": 6004,
    "preview": "package org.apache.jdbm;\n\nimport java.io.*;\nimport java.nio.ByteBuffer;\nimport java.nio.channels.OverlappingFileLockExce"
  },
  {
    "path": "src/main/java/org/apache/jdbm/StorageDiskMapped.java",
    "chars": 8447,
    "preview": "package org.apache.jdbm;\n\nimport sun.misc.Cleaner;\n\nimport java.io.*;\nimport java.nio.ByteBuffer;\nimport java.nio.Mapped"
  },
  {
    "path": "src/main/java/org/apache/jdbm/StorageMemory.java",
    "chars": 2426,
    "preview": "package org.apache.jdbm;\n\nimport java.io.*;\nimport java.nio.ByteBuffer;\n\n/**\n * Storage which keeps all data in memory.\n"
  },
  {
    "path": "src/main/java/org/apache/jdbm/StorageZip.java",
    "chars": 1840,
    "preview": "package org.apache.jdbm;\n\nimport java.io.*;\nimport java.nio.ByteBuffer;\nimport java.util.zip.ZipEntry;\nimport java.util."
  },
  {
    "path": "src/main/java/org/apache/jdbm/Utils.java",
    "chars": 2717,
    "preview": "package org.apache.jdbm;\n\nimport javax.crypto.Cipher;\nimport java.io.DataInput;\nimport java.io.DataOutput;\nimport java.i"
  },
  {
    "path": "src/main/java/org/apache/jdbm/packageXX.html",
    "chars": 10931,
    "preview": "<html>\n<body>\n<h1>WARNING incomplete and missleading doc!!!</h1>\n<p>This package contains public API and introduction</p"
  },
  {
    "path": "src/test/java/org/apache/jdbm/BTreeBench.java",
    "chars": 3608,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/BTreeKeyCompressionTest.java",
    "chars": 4410,
    "preview": "package org.apache.jdbm;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.ArrayList;\nimport java.util."
  },
  {
    "path": "src/test/java/org/apache/jdbm/BTreeLeadingValuePackTest.java",
    "chars": 5345,
    "preview": "package org.apache.jdbm;\n\nimport java.io.ByteArrayInputStream;\nimport java.io.ByteArrayOutputStream;\nimport java.io.Data"
  },
  {
    "path": "src/test/java/org/apache/jdbm/BTreeMapNavigable2Test.java",
    "chars": 10861,
    "preview": "package org.apache.jdbm;\n\nimport java.util.*;\n\npublic  class BTreeMapNavigable2Test extends TestCaseWithTestFile\n{\n\tstat"
  },
  {
    "path": "src/test/java/org/apache/jdbm/BTreeMapNavigableSubMapExclusiveTest.java",
    "chars": 420,
    "preview": "package org.apache.jdbm;\n\n\npublic class BTreeMapNavigableSubMapExclusiveTest extends BTreeMapNavigable2Test{\n\n    public"
  },
  {
    "path": "src/test/java/org/apache/jdbm/BTreeMapNavigableSubMapInclusiveTest.java",
    "chars": 360,
    "preview": "package org.apache.jdbm;\n\n\npublic class BTreeMapNavigableSubMapInclusiveTest extends BTreeMapNavigable2Test{\n\n    public"
  },
  {
    "path": "src/test/java/org/apache/jdbm/BTreeMapNavigableTest.java",
    "chars": 13699,
    "preview": "/*\n* Copyright 2012 Luc Peuvrier\n* All rights reserved.\n*\n* This file is a part of JOAFIP.\n*\n* JOAFIP is free software: "
  },
  {
    "path": "src/test/java/org/apache/jdbm/BTreeMapTest.java",
    "chars": 11461,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/BTreeNodeTest.java",
    "chars": 2144,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/BTreeSetTest.java",
    "chars": 11276,
    "preview": "/*\n *  Licensed to the Apache Software Foundation (ASF) under one or more\n *  contributor license agreements.  See the N"
  },
  {
    "path": "src/test/java/org/apache/jdbm/BTreeTest.java",
    "chars": 26497,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/ByteArrayComparator.java",
    "chars": 3072,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/CompactTest.java",
    "chars": 1988,
    "preview": "package org.apache.jdbm;\n\n\nimport java.io.IOException;\nimport java.util.Map;\n\npublic class CompactTest extends TestCaseW"
  },
  {
    "path": "src/test/java/org/apache/jdbm/ConcurrentBTreeReadTest.java",
    "chars": 3804,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/ConcurrentMapInterfaceTest.java",
    "chars": 23479,
    "preview": "/*\n * Copyright (C) 2008 Google Inc.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may no"
  },
  {
    "path": "src/test/java/org/apache/jdbm/DBCacheMRUTest.java",
    "chars": 1054,
    "preview": "package org.apache.jdbm;\n\nimport java.io.IOException;\nimport java.util.ArrayList;\n\npublic class DBCacheMRUTest extends T"
  },
  {
    "path": "src/test/java/org/apache/jdbm/DBCacheTest.java",
    "chars": 1920,
    "preview": "package org.apache.jdbm;\n\nimport java.util.Map;\nimport java.util.Set;\n\n\npublic class DBCacheTest extends TestCaseWithTes"
  },
  {
    "path": "src/test/java/org/apache/jdbm/DBMakerTest.java",
    "chars": 1805,
    "preview": "package org.apache.jdbm;\n\nimport java.io.IOException;\nimport java.util.Set;\n\npublic class DBMakerTest extends TestCaseWi"
  },
  {
    "path": "src/test/java/org/apache/jdbm/DBTest.java",
    "chars": 14089,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/DataInputOutputTest.java",
    "chars": 1545,
    "preview": "package org.apache.jdbm;\n\nimport junit.framework.TestCase;\n\nimport java.io.IOException;\n\npublic class DataInputOutputTes"
  },
  {
    "path": "src/test/java/org/apache/jdbm/DefragTest.java",
    "chars": 2577,
    "preview": "package org.apache.jdbm;\n\nimport java.io.IOException;\nimport java.util.*;\n\npublic class DefragTest extends TestCaseWithT"
  },
  {
    "path": "src/test/java/org/apache/jdbm/FileHeaderTest.java",
    "chars": 1988,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/FileLockTest.java",
    "chars": 1467,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/HTreeBucketTest.java",
    "chars": 2831,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/HTreeDirectoryTest.java",
    "chars": 4138,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/HTreeMapTest.java",
    "chars": 2028,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/HTreeSetTest.java",
    "chars": 5152,
    "preview": "/*\n *  Licensed to the Apache Software Foundation (ASF) under one or more\n *  contributor license agreements.  See the N"
  },
  {
    "path": "src/test/java/org/apache/jdbm/HTreeTest.java",
    "chars": 4500,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/LinkedList2Test.java",
    "chars": 16983,
    "preview": "/*\n *  Licensed to the Apache Software Foundation (ASF) under one or more\n *  contributor license agreements.  See the N"
  },
  {
    "path": "src/test/java/org/apache/jdbm/LogicalRowIdManagerTest.java",
    "chars": 2455,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/LongHashMapTest.java",
    "chars": 4457,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/LongTreeMap.java",
    "chars": 15497,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/MapInterfaceTest.java",
    "chars": 49557,
    "preview": "/*\n * Copyright (C) 2008 Google Inc.\n *\n * Licensed under the Apache License, Version 2.0 (the \"License\");\n * you may no"
  },
  {
    "path": "src/test/java/org/apache/jdbm/ObjectOutputStream2Test.java",
    "chars": 740,
    "preview": "package org.apache.jdbm;\n\nimport junit.framework.TestCase;\nimport org.apache.jdbm.SerialClassInfoTest.*;\n\nimport java.io"
  },
  {
    "path": "src/test/java/org/apache/jdbm/PageFileTest.java",
    "chars": 3838,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/PageIoTest.java",
    "chars": 3633,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/PageManagerTest.java",
    "chars": 1956,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/PageTransactionManagerTest.java",
    "chars": 3678,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/PhysicalFreeRowIdManagerTest.java",
    "chars": 3547,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/PhysicalRowIdManagerTest.java",
    "chars": 7828,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/RecordHeaderTest.java",
    "chars": 5339,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/RollbackTest.java",
    "chars": 2319,
    "preview": "package org.apache.jdbm;\n\nimport java.io.IOException;\nimport java.util.List;\nimport java.util.Map;\nimport java.util.Set;"
  },
  {
    "path": "src/test/java/org/apache/jdbm/SerialClassInfoTest.java",
    "chars": 7074,
    "preview": "package org.apache.jdbm;\n\nimport java.io.*;\nimport java.util.AbstractMap;\nimport java.util.ArrayList;\n\npublic class Seri"
  },
  {
    "path": "src/test/java/org/apache/jdbm/Serialization2Bean.java",
    "chars": 2806,
    "preview": "package org.apache.jdbm;\n\nimport java.io.Serializable;\n\n\npublic class Serialization2Bean implements Serializable {\n    /"
  },
  {
    "path": "src/test/java/org/apache/jdbm/Serialization2Test.java",
    "chars": 1827,
    "preview": "package org.apache.jdbm;\n\n\nimport java.io.IOException;\nimport java.io.Serializable;\nimport java.util.Map;\n\npublic class "
  },
  {
    "path": "src/test/java/org/apache/jdbm/SerializationHeaderTest.java",
    "chars": 632,
    "preview": "package org.apache.jdbm;\n\nimport junit.framework.TestCase;\n\nimport java.lang.reflect.Field;\nimport java.util.Set;\nimport"
  },
  {
    "path": "src/test/java/org/apache/jdbm/SerializationTest.java",
    "chars": 17648,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/Serialized2DerivedBean.java",
    "chars": 2173,
    "preview": "package org.apache.jdbm;\n\npublic class Serialized2DerivedBean extends Serialization2Bean {\n    private static final long"
  },
  {
    "path": "src/test/java/org/apache/jdbm/StorageDiskMappedTest.java",
    "chars": 268,
    "preview": "package org.apache.jdbm;\n\nimport junit.framework.TestCase;\n\n\npublic class StorageDiskMappedTest extends TestCase {\n    \n"
  },
  {
    "path": "src/test/java/org/apache/jdbm/StorageZipTest.java",
    "chars": 1025,
    "preview": "package org.apache.jdbm;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.util.Set;\n//TODO Make this work!!"
  },
  {
    "path": "src/test/java/org/apache/jdbm/StreamCorrupted.java",
    "chars": 3121,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/TestCaseWithTestFile.java",
    "chars": 2219,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/TestInsertPerf.java",
    "chars": 1767,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/TestInsertUpdate.java",
    "chars": 740,
    "preview": "package org.apache.jdbm;\n\nimport java.io.*;\nimport java.util.Map;\n\nimport org.junit.Test;\n\npublic class TestInsertUpdate"
  },
  {
    "path": "src/test/java/org/apache/jdbm/TestIssues.java",
    "chars": 2044,
    "preview": "package org.apache.jdbm;\n\n\nimport java.io.IOException;\nimport java.util.Map;\n\npublic class TestIssues extends TestCaseWi"
  },
  {
    "path": "src/test/java/org/apache/jdbm/TestLargeData.java",
    "chars": 1897,
    "preview": "package org.apache.jdbm;\n\nimport java.io.IOException;\nimport java.util.Arrays;\n\npublic class TestLargeData extends TestC"
  },
  {
    "path": "src/test/java/org/apache/jdbm/TestLazyRecordsInTree.java",
    "chars": 1734,
    "preview": "package org.apache.jdbm;\n\nimport java.io.IOException;\nimport java.util.Map;\n\npublic class TestLazyRecordsInTree extends "
  },
  {
    "path": "src/test/java/org/apache/jdbm/TestRollback.java",
    "chars": 3716,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/TestStress.java",
    "chars": 7799,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/UtilTT.java",
    "chars": 1571,
    "preview": "/*******************************************************************************\n * Copyright 2010 Cees De Groot, Alex B"
  },
  {
    "path": "src/test/java/org/apache/jdbm/UtilsTest.java",
    "chars": 400,
    "preview": "package org.apache.jdbm;\n\nimport junit.framework.TestCase;\n\npublic class UtilsTest extends TestCase {\n\n    public void t"
  },
  {
    "path": "src/test/java/org/apache/jdbm/junk/HugeData.java",
    "chars": 1160,
    "preview": "package org.apache.jdbm.junk;\n\nimport org.apache.jdbm.DB;\nimport org.apache.jdbm.DBMaker;\n\nimport java.io.IOException;\ni"
  },
  {
    "path": "src/test/java/org/apache/jdbm/junk/MappedBufferGrow.java",
    "chars": 1109,
    "preview": "package org.apache.jdbm.junk;\n\nimport java.io.File;\nimport java.io.IOException;\nimport java.io.RandomAccessFile;\nimport "
  },
  {
    "path": "src/test/java/org/apache/jdbm/junk/MappedBufferVersusRaf.java",
    "chars": 2278,
    "preview": "package org.apache.jdbm.junk;\n\n\nimport java.io.*;\nimport java.nio.ByteBuffer;\nimport java.nio.MappedByteBuffer;\nimport j"
  },
  {
    "path": "src/test/java/org/apache/jdbm/junk/RandomInsertLongs.java",
    "chars": 1500,
    "preview": "package org.apache.jdbm.junk;\n\n\nimport org.apache.jdbm.*;\n\nimport java.io.IOException;\nimport java.util.Random;\nimport j"
  }
]

About this extraction

This page contains the full source code of the jankotek/JDBM3 GitHub repository, extracted and formatted as plain text for AI agents and large language models (LLMs). The extraction includes 112 files (833.1 KB), approximately 193.8k tokens, and a symbol index with 1459 extracted functions, classes, methods, constants, and types. Use this with OpenClaw, Claude, ChatGPT, Cursor, Windsurf, or any other AI tool that accepts text input. You can copy the full output to your clipboard or download it as a .txt file.

Extracted by GitExtract — free GitHub repo to text converter for AI. Built by Nikandr Surkov.

Copied to clipboard!