diff --git a/common/source/java/ch/systemsx/cisd/common/hdf5/CompressingHdf5WriterWrapper.java b/common/source/java/ch/systemsx/cisd/common/hdf5/CompressingHdf5WriterWrapper.java
deleted file mode 100644
index 1560bfe643061702d41d48b5f837300089aaf7e2..0000000000000000000000000000000000000000
--- a/common/source/java/ch/systemsx/cisd/common/hdf5/CompressingHdf5WriterWrapper.java
+++ /dev/null
@@ -1,68 +0,0 @@
-/*
- * Copyright 2010 ETH Zuerich, CISD
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package ch.systemsx.cisd.common.hdf5;
-
-import ch.systemsx.cisd.hdf5.HDF5GenericStorageFeatures;
-import ch.systemsx.cisd.hdf5.HDF5IntStorageFeatures;
-import ch.systemsx.cisd.hdf5.IHDF5Writer;
-
-/**
- * @author Chandrasekhar Ramakrishnan
- */
-class CompressingHdf5WriterWrapper implements IHDF5ContainerWriter
-{
-    private final IHDF5Writer writer;
-
-    // Store this, though I'm not yet using it
-    @SuppressWarnings("unused")
-    private final HDF5GenericStorageFeatures genericStorageFeatures;
-
-    private final HDF5IntStorageFeatures intStorageFeatures;
-    
-    CompressingHdf5WriterWrapper(Hdf5Container parent, IHDF5Writer writer, boolean compress)
-    {
-        this.writer = writer;
-        if (compress)
-        {
-            this.genericStorageFeatures = HDF5GenericStorageFeatures.GENERIC_DEFLATE;
-            this.intStorageFeatures = HDF5IntStorageFeatures.INT_DEFLATE;
-        } else
-        {
-            this.genericStorageFeatures = HDF5GenericStorageFeatures.GENERIC_CONTIGUOUS;
-            this.intStorageFeatures = HDF5IntStorageFeatures.INT_CONTIGUOUS;
-        }
-    }
-
-    /**
-     * @param objectPath
-     * @param data
-     * @see ch.systemsx.cisd.hdf5.IHDF5SimpleWriter#writeByteArray(java.lang.String, byte[])
-     */
-    public void writeByteArray(String objectPath, byte[] data)
-    {
-        writer.writeByteArray(objectPath, data, intStorageFeatures);
-    }
-
-    /**
-     * @see ch.systemsx.cisd.hdf5.IHDF5SimpleReader#close()
-     */
-    public void close()
-    {
-        writer.close();
-    }
-
-}
diff --git a/common/source/java/ch/systemsx/cisd/common/hdf5/Hdf5Container.java b/common/source/java/ch/systemsx/cisd/common/hdf5/HDF5Container.java
similarity index 69%
rename from common/source/java/ch/systemsx/cisd/common/hdf5/Hdf5Container.java
rename to common/source/java/ch/systemsx/cisd/common/hdf5/HDF5Container.java
index 9f14f263cb84227c61b096be01e64f9173bb47b5..3dd949d3a4777cb070652503286a38e050f24803 100644
--- a/common/source/java/ch/systemsx/cisd/common/hdf5/Hdf5Container.java
+++ b/common/source/java/ch/systemsx/cisd/common/hdf5/HDF5Container.java
@@ -17,10 +17,9 @@
 package ch.systemsx.cisd.common.hdf5;
 
 import java.io.File;
-import java.util.List;
+
 
 import ch.systemsx.cisd.hdf5.HDF5FactoryProvider;
-import ch.systemsx.cisd.hdf5.IHDF5SimpleReader;
 import ch.systemsx.cisd.hdf5.IHDF5SimpleWriter;
 
 /**
@@ -30,7 +29,7 @@ import ch.systemsx.cisd.hdf5.IHDF5SimpleWriter;
  * 
  * @author Chandrasekhar Ramakrishnan
  */
-public class Hdf5Container
+public class HDF5Container
 {
     private final File hdf5Container;
 
@@ -39,7 +38,7 @@ public class Hdf5Container
      * 
      * @author Chandrasekhar Ramakrishnan
      */
-    public static interface IHdf5WriterClient
+    public static interface IHDF5WriterClient
     {
         /**
          * Run code using a writer. Implementations do <b>not</b> need to close the writer.
@@ -52,7 +51,7 @@ public class Hdf5Container
      * 
      * @author Chandrasekhar Ramakrishnan
      */
-    public static interface IHdf5ReaderClient
+    public static interface IHDF5ReaderClient
     {
         /**
          * Run code using a reader. Implementations do <b>not</b> need to close the reader.
@@ -66,12 +65,12 @@ public class Hdf5Container
      * @param hdf5Container A file designated to be the hdf5 container. The file need not exist --
      *            it will be created when a writer is accessed.
      */
-    public Hdf5Container(File hdf5Container)
+    public HDF5Container(File hdf5Container)
     {
         this.hdf5Container = hdf5Container;
     }
 
-    public File getHdf5File()
+    public File getHDF5File()
     {
         return hdf5Container;
     }
@@ -83,35 +82,7 @@ public class Hdf5Container
      */
     public IHDF5ContainerReader createSimpleReader()
     {
-        return new IHDF5ContainerReader()
-            {
-                final IHDF5SimpleReader innerReader = HDF5FactoryProvider.get().openForReading(hdf5Container);
-
-                public void close()
-                {
-                    innerReader.close();
-                }
-
-                public boolean exists(String objectPath)
-                {
-                    return innerReader.exists(objectPath);
-                }
-
-                public boolean isGroup(String objectPath)
-                {
-                    return innerReader.isGroup(objectPath);
-                }
-
-                public List<String> getGroupMembers(String groupPath)
-                {
-                    return innerReader.getGroupMembers(groupPath);
-                }
-
-                public byte[] readAsByteArray(String objectPath)
-                {
-                    return innerReader.readAsByteArray(objectPath);
-                }
-            };
+        return new HDF5ContainerReader(hdf5Container);
     }
 
     /**
@@ -123,15 +94,15 @@ public class Hdf5Container
      */
     private IHDF5ContainerWriter createSimpleWriter(boolean isContentCompressed)
     {
-        return new CompressingHdf5WriterWrapper(this, HDF5FactoryProvider.get().open(
-                hdf5Container), isContentCompressed);
+        return new HDF5ContainerWriter(this, HDF5FactoryProvider.get().open(hdf5Container),
+                isContentCompressed);
     }
 
     /**
      * Run a writer client on this Hdf5 container. Ensures that the writer is closed when the client
      * finishes running.
      */
-    public void runWriterClient(boolean isContentCompressed, IHdf5WriterClient client)
+    public void runWriterClient(boolean isContentCompressed, IHDF5WriterClient client)
     {
         IHDF5ContainerWriter writer = createSimpleWriter(isContentCompressed);
         try
@@ -147,7 +118,7 @@ public class Hdf5Container
      * Run a reader client on this Hdf5 container. Ensures that the reader is closed when the client
      * finishes running.
      */
-    public void runReaderClient(IHdf5ReaderClient client)
+    public void runReaderClient(IHDF5ReaderClient client)
     {
         IHDF5ContainerReader reader = createSimpleReader();
         try
diff --git a/common/source/java/ch/systemsx/cisd/common/hdf5/HDF5ContainerReader.java b/common/source/java/ch/systemsx/cisd/common/hdf5/HDF5ContainerReader.java
new file mode 100644
index 0000000000000000000000000000000000000000..fe28c5f90f8c41c61ee22b988bd81fec5947eb60
--- /dev/null
+++ b/common/source/java/ch/systemsx/cisd/common/hdf5/HDF5ContainerReader.java
@@ -0,0 +1,91 @@
+/*
+ * Copyright 2011 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.common.hdf5;
+
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+import java.util.List;
+
+import org.apache.commons.io.IOUtils;
+
+import ch.systemsx.cisd.base.exceptions.IOExceptionUnchecked;
+import ch.systemsx.cisd.hdf5.HDF5FactoryProvider;
+import ch.systemsx.cisd.hdf5.IHDF5Reader;
+import ch.systemsx.cisd.hdf5.io.HDF5IOAdapterFactory;
+
+/**
+ * An implementation of {@link IHDF5ContainerReader}.
+ * 
+ * @author Bernd Rinn
+ */
+final class HDF5ContainerReader implements IHDF5ContainerReader
+{
+    private final IHDF5Reader innerReader;
+
+    HDF5ContainerReader(final File hdf5Container)
+    {
+        this.innerReader = HDF5FactoryProvider.get().openForReading(hdf5Container);
+    }
+
+    public void close()
+    {
+        innerReader.close();
+    }
+
+    public boolean exists(String objectPath)
+    {
+        return innerReader.exists(objectPath);
+    }
+
+    public boolean isGroup(String objectPath)
+    {
+        return innerReader.isGroup(objectPath);
+    }
+
+    public List<String> getGroupMembers(String groupPath)
+    {
+        return innerReader.getGroupMembers(groupPath);
+    }
+
+    public void readFromHDF5Container(String objectPath, OutputStream ostream)
+    {
+        final InputStream istream = HDF5IOAdapterFactory.asInputStream(innerReader, objectPath);
+        Exception e = null;
+        try
+        {
+            IOUtils.copyLarge(istream, ostream);
+        } catch (IOException ex)
+        {
+            e = ex;
+            throw new IOExceptionUnchecked(ex);
+        } finally
+        {
+            try
+            {
+                istream.close();
+            } catch (IOException ex)
+            {
+                if (e == null)
+                {
+                    throw new IOExceptionUnchecked(ex);
+                }
+            }
+        }
+    }
+}
\ No newline at end of file
diff --git a/common/source/java/ch/systemsx/cisd/common/hdf5/HDF5ContainerWriter.java b/common/source/java/ch/systemsx/cisd/common/hdf5/HDF5ContainerWriter.java
new file mode 100644
index 0000000000000000000000000000000000000000..127cbea8c5c0ff5327e8839944c128752a775ac7
--- /dev/null
+++ b/common/source/java/ch/systemsx/cisd/common/hdf5/HDF5ContainerWriter.java
@@ -0,0 +1,110 @@
+/*
+ * Copyright 2010 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.common.hdf5;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+
+import org.apache.commons.io.IOUtils;
+
+import ch.systemsx.cisd.base.exceptions.IOExceptionUnchecked;
+import ch.systemsx.cisd.hdf5.HDF5GenericStorageFeatures;
+import ch.systemsx.cisd.hdf5.IHDF5Writer;
+import ch.systemsx.cisd.hdf5.io.HDF5IOAdapterFactory;
+
+/**
+ * An implementation of {@link IHDF5ContainerWriter}.
+ * 
+ * @author Chandrasekhar Ramakrishnan
+ */
+class HDF5ContainerWriter implements IHDF5ContainerWriter
+{
+    private final static int KB = 1024;
+
+    private final static int MB = 1024 * KB;
+
+    private static final int COMPACT_SIZE_LIMIT = 8 * KB;
+
+    private static final String OPAQUE_TAG_FILE = "FILE";
+
+    final static int BUFFER_SIZE = 10 * MB;
+
+    private final IHDF5Writer writer;
+
+    private final HDF5GenericStorageFeatures genericStorageFeatures;
+
+    HDF5ContainerWriter(HDF5Container parent, IHDF5Writer writer, boolean compress)
+    {
+        this.writer = writer;
+        if (compress)
+        {
+            this.genericStorageFeatures = HDF5GenericStorageFeatures.GENERIC_DEFLATE;
+        } else
+        {
+            this.genericStorageFeatures = HDF5GenericStorageFeatures.GENERIC_CHUNKED;
+        }
+    }
+
+    public void writeToHDF5Container(String objectPath, InputStream istream, long size)
+            throws IOExceptionUnchecked
+    {
+        final OutputStream ostream;
+        if (size <= COMPACT_SIZE_LIMIT)
+        {
+            ostream =
+                    HDF5IOAdapterFactory.asOutputStream(writer, objectPath,
+                            HDF5GenericStorageFeatures.GENERIC_COMPACT_DELETE, (int) size,
+                            OPAQUE_TAG_FILE);
+        } else
+        {
+            ostream =
+                    HDF5IOAdapterFactory.asOutputStream(writer, objectPath, genericStorageFeatures,
+                            (int) Math.min(size, BUFFER_SIZE), OPAQUE_TAG_FILE);
+        }
+        IOException e = null;
+        try
+        {
+            IOUtils.copyLarge(istream, ostream);
+        } catch (IOException ex)
+        {
+            e = ex;
+            throw new IOExceptionUnchecked(ex);
+        } finally
+        {
+            try
+            {
+                ostream.close();
+            } catch (IOException ex)
+            {
+                if (e == null)
+                {
+                    throw new IOExceptionUnchecked(ex);
+                }
+            }
+        }
+    }
+
+    /**
+     * @see ch.systemsx.cisd.hdf5.IHDF5SimpleReader#close()
+     */
+    public void close()
+    {
+        writer.close();
+    }
+
+}
diff --git a/common/source/java/ch/systemsx/cisd/common/hdf5/HierarchicalStructureDuplicatorFileToHdf5.java b/common/source/java/ch/systemsx/cisd/common/hdf5/HierarchicalStructureDuplicatorFileToHDF5.java
similarity index 78%
rename from common/source/java/ch/systemsx/cisd/common/hdf5/HierarchicalStructureDuplicatorFileToHdf5.java
rename to common/source/java/ch/systemsx/cisd/common/hdf5/HierarchicalStructureDuplicatorFileToHDF5.java
index b956b85e52081f3bdab6e1cf589004491f78667c..66612878627048b5bae296af756a9b41bf56ac70 100644
--- a/common/source/java/ch/systemsx/cisd/common/hdf5/HierarchicalStructureDuplicatorFileToHdf5.java
+++ b/common/source/java/ch/systemsx/cisd/common/hdf5/HierarchicalStructureDuplicatorFileToHDF5.java
@@ -17,9 +17,9 @@
 package ch.systemsx.cisd.common.hdf5;
 
 import java.io.File;
+import java.io.FileInputStream;
 import java.io.IOException;
-
-import org.apache.commons.io.FileUtils;
+import java.io.InputStream;
 
 import ch.systemsx.cisd.base.exceptions.CheckedExceptionTunnel;
 import ch.systemsx.cisd.base.exceptions.IOExceptionUnchecked;
@@ -30,7 +30,7 @@ import ch.systemsx.cisd.common.filesystem.FileUtilities;
  * 
  * @author Chandrasekhar Ramakrishnan
  */
-public class HierarchicalStructureDuplicatorFileToHdf5
+public class HierarchicalStructureDuplicatorFileToHDF5
 {
     private final File file;
 
@@ -41,7 +41,7 @@ public class HierarchicalStructureDuplicatorFileToHdf5
      * 
      * @author Chandrasekhar Ramakrishnan
      */
-    public static class DuplicatorWriterClient implements Hdf5Container.IHdf5WriterClient
+    public static class DuplicatorWriterClient implements HDF5Container.IHDF5WriterClient
     {
         private final File file;
 
@@ -52,7 +52,7 @@ public class HierarchicalStructureDuplicatorFileToHdf5
 
         public void runWithSimpleWriter(IHDF5ContainerWriter writer)
         {
-            HierarchicalStructureDuplicatorFileToHdf5.makeDuplicate(file, writer);
+            HierarchicalStructureDuplicatorFileToHDF5.makeDuplicate(file, writer);
         }
 
     }
@@ -68,10 +68,10 @@ public class HierarchicalStructureDuplicatorFileToHdf5
      */
     public static void makeDuplicate(File file, IHDF5ContainerWriter writer)
     {
-        new HierarchicalStructureDuplicatorFileToHdf5(file, writer).makeDuplicate();
+        new HierarchicalStructureDuplicatorFileToHDF5(file, writer).makeDuplicate();
     }
 
-    private HierarchicalStructureDuplicatorFileToHdf5(File file, IHDF5ContainerWriter writer)
+    private HierarchicalStructureDuplicatorFileToHDF5(File file, IHDF5ContainerWriter writer)
     {
         this.file = file;
         this.writer = writer;
@@ -97,7 +97,6 @@ public class HierarchicalStructureDuplicatorFileToHdf5
             // Mirror the whole file structure
             mirrorGroup(file, "/");
         }
-
     }
 
     private void mirrorGroup(File directory, String groupPath)
@@ -127,13 +126,31 @@ public class HierarchicalStructureDuplicatorFileToHdf5
                     "Symbolic links are not supported for mirroring in a HDF5 container.");
         }
 
+        InputStream istream = null;
+        IOException e = null;
         try
         {
-            byte[] data = FileUtils.readFileToByteArray(normalFile);
-            writer.writeByteArray(hdf5Path, data);
+            istream = new FileInputStream(normalFile); 
+            writer.writeToHDF5Container(hdf5Path, istream, normalFile.length());
         } catch (IOException ex)
         {
+            e = ex;
             throw new IOExceptionUnchecked(ex);
+        } finally
+        {
+            if (istream != null)
+            {
+                try
+                {
+                    istream.close();
+                } catch (IOException ex)
+                {
+                    if (e == null)
+                    {
+                        throw new IOExceptionUnchecked(ex);
+                    }
+                }
+            }
         }
 
     }
diff --git a/common/source/java/ch/systemsx/cisd/common/hdf5/IHDF5ContainerReader.java b/common/source/java/ch/systemsx/cisd/common/hdf5/IHDF5ContainerReader.java
index 61759ed72a82525169188c85c7aff5bf7fb067d8..9521002511075fa6ce649e47b5eb39b0ebe40553 100644
--- a/common/source/java/ch/systemsx/cisd/common/hdf5/IHDF5ContainerReader.java
+++ b/common/source/java/ch/systemsx/cisd/common/hdf5/IHDF5ContainerReader.java
@@ -16,6 +16,7 @@
 
 package ch.systemsx.cisd.common.hdf5;
 
+import java.io.OutputStream;
 import java.util.List;
 
 /**
@@ -52,11 +53,10 @@ public interface IHDF5ContainerReader
     public boolean isGroup(final String objectPath);
 
     /**
-     * Reads the data set <var>objectPath</var> as byte array (of rank 1).
+     * Reads the data set <var>objectPath</var> into the <var>ostream</var>.
      * 
      * @param objectPath The name (including path information) of the data set object in the file.
-     * @return The data read from the data set.
      */
-    public byte[] readAsByteArray(final String objectPath);
+    public void readFromHDF5Container(final String objectPath, final OutputStream ostream);
 
 }
diff --git a/common/source/java/ch/systemsx/cisd/common/hdf5/IHDF5ContainerWriter.java b/common/source/java/ch/systemsx/cisd/common/hdf5/IHDF5ContainerWriter.java
index 18ec61e7bcfcd4af81e125e6e865ab9740609081..511fd0937faa12cd65255a6b8c8bad7a6cfe85f5 100644
--- a/common/source/java/ch/systemsx/cisd/common/hdf5/IHDF5ContainerWriter.java
+++ b/common/source/java/ch/systemsx/cisd/common/hdf5/IHDF5ContainerWriter.java
@@ -16,21 +16,27 @@
 
 package ch.systemsx.cisd.common.hdf5;
 
+import java.io.InputStream;
+
+import ch.systemsx.cisd.base.exceptions.IOExceptionUnchecked;
+
 /**
  * A simple abstraction of the methods needed to write an HDF5 container.
- *
+ * 
  * @author Bernd Rinn
  */
 public interface IHDF5ContainerWriter
 {
     /**
-     * Writes out a <code>byte</code> array (of rank 1). Uses a compact storage layout. Should only
-     * be used for small data sets.
+     * Write the given <code>istream</code> to a new data set named <code>objectPath</code>.
      * 
-     * @param objectPath The name (including path information) of the data set object in the file.
-     * @param data The data to write. Must not be <code>null</code>.
+     * @param objectPath The path of the data set to write the {@link InputStream} to.
+     * @param istream The stream to get the data from. This method will <i>not</i> close the
+     *            <code>istream</code>!</i>
+     * @param size The size of the file represented by the <var>istream</var>.
      */
-    public void writeByteArray(final String objectPath, final byte[] data);
+    void writeToHDF5Container(final String objectPath, final InputStream istream, final long size)
+            throws IOExceptionUnchecked;
 
     /**
      * Closes this object and the file referenced by this object. This object must not be used after
diff --git a/common/source/java/ch/systemsx/cisd/common/io/HDF5DataSetBasedContent.java b/common/source/java/ch/systemsx/cisd/common/io/HDF5DataSetBasedContent.java
index 116c471dbc0c77584431fbaf420de895102200e2..57339d44bc1ae8d9042ab82e1535052d8f298c0a 100644
--- a/common/source/java/ch/systemsx/cisd/common/io/HDF5DataSetBasedContent.java
+++ b/common/source/java/ch/systemsx/cisd/common/io/HDF5DataSetBasedContent.java
@@ -28,6 +28,7 @@ import ch.systemsx.cisd.common.filesystem.FileUtilities;
 import ch.systemsx.cisd.hdf5.HDF5FactoryProvider;
 import ch.systemsx.cisd.hdf5.IHDF5Reader;
 import ch.systemsx.cisd.hdf5.io.HDF5DataSetRandomAccessFile;
+import ch.systemsx.cisd.hdf5.io.HDF5IOAdapterFactory;
 
 /**
  * An {@link IContent} implementation based on an HDF5 dataset.
@@ -85,7 +86,7 @@ public class HDF5DataSetBasedContent implements IContent, Closeable
     public IRandomAccessFile getReadOnlyRandomAccessFile()
     {
         final HDF5DataSetRandomAccessFile randomAccessFile =
-                HDF5DataSetRandomAccessFile.createForReading(hdf5File, dataSetPath);
+                HDF5IOAdapterFactory.asRandomAccessFileReadOnly(hdf5File, dataSetPath);
         randomAccessFiles.add(randomAccessFile);
         return randomAccessFile;
     }
diff --git a/common/source/java/ch/systemsx/cisd/common/io/hierarchical_content/HDF5ContainerBasedHierarchicalContentNode.java b/common/source/java/ch/systemsx/cisd/common/io/hierarchical_content/HDF5ContainerBasedHierarchicalContentNode.java
index b33c9b3e61a2e4376c41b06174d6de1af0e3d766..a703304c21f813e391fd73ff6ec26efea8a42101 100644
--- a/common/source/java/ch/systemsx/cisd/common/io/hierarchical_content/HDF5ContainerBasedHierarchicalContentNode.java
+++ b/common/source/java/ch/systemsx/cisd/common/io/hierarchical_content/HDF5ContainerBasedHierarchicalContentNode.java
@@ -23,7 +23,7 @@ import java.util.List;
 
 import ch.systemsx.cisd.base.io.IRandomAccessFile;
 import ch.systemsx.cisd.common.filesystem.FileUtilities;
-import ch.systemsx.cisd.common.hdf5.Hdf5Container;
+import ch.systemsx.cisd.common.hdf5.HDF5Container;
 import ch.systemsx.cisd.common.hdf5.IHDF5ContainerReader;
 import ch.systemsx.cisd.common.io.HDF5DataSetBasedContent;
 import ch.systemsx.cisd.common.io.IContent;
@@ -38,13 +38,13 @@ import ch.systemsx.cisd.common.io.hierarchical_content.api.IHierarchicalContentN
 public class HDF5ContainerBasedHierarchicalContentNode extends
         DefaultFileBasedHierarchicalContentNode
 {
-    private final Hdf5Container hdf5Container;
+    private final HDF5Container hdf5Container;
 
     public HDF5ContainerBasedHierarchicalContentNode(IHierarchicalContent root,
             File hdf5ContainerFile)
     {
         super(root, hdf5ContainerFile);
-        this.hdf5Container = new Hdf5Container(hdf5ContainerFile);
+        this.hdf5Container = new HDF5Container(hdf5ContainerFile);
     }
 
     private IHDF5ContainerReader createReader()
@@ -112,7 +112,7 @@ public class HDF5ContainerBasedHierarchicalContentNode extends
     public String toString()
     {
         return "HDF5ContainerBasedHierarchicalContentNode [root=" + root + ", container="
-                + hdf5Container.getHdf5File() + "]";
+                + hdf5Container.getHDF5File() + "]";
     }
 
     @Override
@@ -120,7 +120,7 @@ public class HDF5ContainerBasedHierarchicalContentNode extends
     {
         final int prime = 31;
         int result = 1;
-        result = prime * result + hdf5Container.getHdf5File().hashCode();
+        result = prime * result + hdf5Container.getHDF5File().hashCode();
         result = prime * result + root.hashCode();
         return result;
     }
@@ -142,7 +142,7 @@ public class HDF5ContainerBasedHierarchicalContentNode extends
         }
         HDF5ContainerBasedHierarchicalContentNode other =
                 (HDF5ContainerBasedHierarchicalContentNode) obj;
-        if (!hdf5Container.getHdf5File().equals(other.hdf5Container.getHdf5File()))
+        if (!hdf5Container.getHDF5File().equals(other.hdf5Container.getHDF5File()))
         {
             return false;
         }
diff --git a/common/sourceTest/java/ch/systemsx/cisd/common/hdf5/FileToHdf5DuplicationVerifier.java b/common/sourceTest/java/ch/systemsx/cisd/common/hdf5/FileToHDF5DuplicationVerifier.java
similarity index 77%
rename from common/sourceTest/java/ch/systemsx/cisd/common/hdf5/FileToHdf5DuplicationVerifier.java
rename to common/sourceTest/java/ch/systemsx/cisd/common/hdf5/FileToHDF5DuplicationVerifier.java
index c115b7e20aec2da72a4313f73c439ba47d19b2ce..f1ca3248309292d8293dbc91451f1f4ac46fab03 100644
--- a/common/sourceTest/java/ch/systemsx/cisd/common/hdf5/FileToHdf5DuplicationVerifier.java
+++ b/common/sourceTest/java/ch/systemsx/cisd/common/hdf5/FileToHDF5DuplicationVerifier.java
@@ -16,41 +16,42 @@
 
 package ch.systemsx.cisd.common.hdf5;
 
+import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.IOException;
 
 import org.apache.commons.io.FileUtils;
 import org.testng.AssertJUnit;
 
-import ch.systemsx.cisd.common.hdf5.Hdf5Container.IHdf5ReaderClient;
+import ch.systemsx.cisd.common.hdf5.HDF5Container.IHDF5ReaderClient;
 
 /**
  * Helper class that verifies that a file structure is matched by the HDF5 structure.
  * 
  * @author Chandrasekhar Ramakrishnan
  */
-public class FileToHdf5DuplicationVerifier extends AssertJUnit
+public class FileToHDF5DuplicationVerifier extends AssertJUnit
 {
 
     private final File sourceFolderOrFile;
 
-    private final Hdf5Container container;
+    private final HDF5Container container;
 
     private final IHDF5ContainerReader reader;
 
-    public static IHdf5ReaderClient createVerifierClient(File sourceFolderOrFile,
-            Hdf5Container container)
+    public static IHDF5ReaderClient createVerifierClient(File sourceFolderOrFile,
+            HDF5Container container)
     {
         return new ReaderClient(sourceFolderOrFile, container);
     }
 
-    private static class ReaderClient implements IHdf5ReaderClient
+    private static class ReaderClient implements IHDF5ReaderClient
     {
         private final File sourceFolderOrFile;
 
-        private final Hdf5Container container;
+        private final HDF5Container container;
 
-        private ReaderClient(File sourceFolderOrFile, Hdf5Container container)
+        private ReaderClient(File sourceFolderOrFile, HDF5Container container)
         {
             this.sourceFolderOrFile = sourceFolderOrFile;
             this.container = container;
@@ -58,12 +59,12 @@ public class FileToHdf5DuplicationVerifier extends AssertJUnit
 
         public void runWithSimpleReader(IHDF5ContainerReader reader)
         {
-            new FileToHdf5DuplicationVerifier(sourceFolderOrFile, container, reader)
+            new FileToHDF5DuplicationVerifier(sourceFolderOrFile, container, reader)
                     .verifyDuplicate();
         }
     }
 
-    public FileToHdf5DuplicationVerifier(File sourceFolderOrFile, Hdf5Container container,
+    public FileToHDF5DuplicationVerifier(File sourceFolderOrFile, HDF5Container container,
             IHDF5ContainerReader reader)
     {
         this.sourceFolderOrFile = sourceFolderOrFile;
@@ -73,7 +74,7 @@ public class FileToHdf5DuplicationVerifier extends AssertJUnit
 
     public void verifyDuplicate()
     {
-        assertTrue(container.getHdf5File().length() > 0);
+        assertTrue(container.getHDF5File().length() > 0);
 
         if (sourceFolderOrFile.isFile())
         {
@@ -112,7 +113,9 @@ public class FileToHdf5DuplicationVerifier extends AssertJUnit
         try
         {
             byte[] fileContent = FileUtils.readFileToByteArray(file);
-            byte[] content = reader.readAsByteArray(hdf5Path);
+            final ByteArrayOutputStream ostream = new ByteArrayOutputStream();
+            reader.readFromHDF5Container(hdf5Path, ostream);
+            byte[] content = ostream.toByteArray();
             assertEquals(file.getAbsolutePath() + " does not equal " + hdf5Path, fileContent,
                     content);
         } catch (IOException ex)
diff --git a/common/sourceTest/java/ch/systemsx/cisd/common/hdf5/HDF5ContainerTest.java b/common/sourceTest/java/ch/systemsx/cisd/common/hdf5/HDF5ContainerTest.java
new file mode 100644
index 0000000000000000000000000000000000000000..0eb7529da4ae7d305bd8a0f92962bc1ef42493df
--- /dev/null
+++ b/common/sourceTest/java/ch/systemsx/cisd/common/hdf5/HDF5ContainerTest.java
@@ -0,0 +1,182 @@
+/*
+ * Copyright 2010 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.common.hdf5;
+
+import java.io.ByteArrayInputStream;
+import java.io.ByteArrayOutputStream;
+import java.io.File;
+import java.io.IOException;
+
+import org.testng.annotations.BeforeMethod;
+import org.testng.annotations.Test;
+
+import ch.systemsx.cisd.base.tests.AbstractFileSystemTestCase;
+import ch.systemsx.cisd.common.hdf5.HDF5Container.IHDF5ReaderClient;
+import ch.systemsx.cisd.common.hdf5.HDF5Container.IHDF5WriterClient;
+
+/**
+ * @author Chandrasekhar Ramakrishnan
+ */
+public class HDF5ContainerTest extends AbstractFileSystemTestCase
+{
+    private final static int KB = 1024;
+
+    @Override
+    @BeforeMethod
+    public void setUp() throws IOException
+    {
+        super.setUp();
+    }
+
+    @Test
+    public void testReadWriteUncompressedLarge()
+    {
+        File hdf5File = new File(workingDirectory, "testLarge.h5");
+        hdf5File.delete();
+        HDF5Container hdf5Content = new HDF5Container(hdf5File);
+        final byte[] byteArray = createByteArray(1500 * KB);
+
+        hdf5Content.runWriterClient(false, new IHDF5WriterClient()
+            {
+                public void runWithSimpleWriter(IHDF5ContainerWriter writer)
+                {
+                    writer.writeToHDF5Container("/test-bytes", new ByteArrayInputStream(byteArray),
+                            byteArray.length);
+                }
+            });
+
+        hdf5Content.runReaderClient(new IHDF5ReaderClient()
+            {
+                public void runWithSimpleReader(IHDF5ContainerReader reader)
+                {
+                    final ByteArrayOutputStream ostream = new ByteArrayOutputStream();
+                    reader.readFromHDF5Container("/test-bytes", ostream);
+                    byte[] readData = ostream.toByteArray();
+                    assertEquals(byteArray, readData);
+                }
+            });
+    }
+
+    @Test
+    public void testReadWriteUncompressed()
+    {
+        File hdf5File = new File(workingDirectory, "test.h5");
+        hdf5File.delete();
+        HDF5Container hdf5Content = new HDF5Container(hdf5File);
+        final byte[] byteArray = createByteArray();
+
+        hdf5Content.runWriterClient(false, new IHDF5WriterClient()
+            {
+                public void runWithSimpleWriter(IHDF5ContainerWriter writer)
+                {
+                    writer.writeToHDF5Container("/test-bytes", new ByteArrayInputStream(byteArray),
+                            byteArray.length);
+                }
+            });
+
+        hdf5Content.runReaderClient(new IHDF5ReaderClient()
+            {
+                public void runWithSimpleReader(IHDF5ContainerReader reader)
+                {
+                    final ByteArrayOutputStream ostream = new ByteArrayOutputStream();
+                    reader.readFromHDF5Container("/test-bytes", ostream);
+                    byte[] readData = ostream.toByteArray();
+                    assertEquals(byteArray, readData);
+                }
+            });
+    }
+
+    @Test
+    public void testReadWriteCompressed()
+    {
+        File hdf5File = new File(workingDirectory, "test.h5");
+        hdf5File.delete();
+        HDF5Container hdf5Content = new HDF5Container(hdf5File);
+        final byte[] byteArray = createByteArray();
+
+        hdf5Content.runWriterClient(true, new IHDF5WriterClient()
+            {
+                public void runWithSimpleWriter(IHDF5ContainerWriter writer)
+                {
+                    writer.writeToHDF5Container("/test-bytes", new ByteArrayInputStream(byteArray),
+                            byteArray.length);
+                }
+            });
+
+        hdf5Content.runReaderClient(new IHDF5ReaderClient()
+            {
+                public void runWithSimpleReader(IHDF5ContainerReader reader)
+                {
+                    final ByteArrayOutputStream ostream = new ByteArrayOutputStream();
+                    reader.readFromHDF5Container("/test-bytes", ostream);
+                    byte[] readData = ostream.toByteArray();
+                    assertEquals(byteArray, readData);
+                }
+            });
+    }
+
+    @Test
+    public void testSizeComparison()
+    {
+        final byte[] byteArray = createByteArray();
+
+        File hdf5FileUncompressed = new File(workingDirectory, "test-uncompressed.h5");
+        hdf5FileUncompressed.delete();
+        HDF5Container hdf5ContentUncompressed = new HDF5Container(hdf5FileUncompressed);
+        hdf5ContentUncompressed.runWriterClient(false, new IHDF5WriterClient()
+            {
+                public void runWithSimpleWriter(IHDF5ContainerWriter writer)
+                {
+                    writer.writeToHDF5Container("/test-bytes", new ByteArrayInputStream(byteArray),
+                            byteArray.length);
+                }
+            });
+
+        File hdf5FileCompressed = new File(workingDirectory, "test-compressed.h5");
+        HDF5Container hdf5ContentCompressed = new HDF5Container(hdf5FileCompressed);
+        hdf5ContentCompressed.runWriterClient(true, new IHDF5WriterClient()
+            {
+                public void runWithSimpleWriter(IHDF5ContainerWriter writer)
+                {
+                    writer.writeToHDF5Container("/test-bytes", new ByteArrayInputStream(byteArray),
+                            byteArray.length);
+                }
+            });
+
+        long uncompressedLength = hdf5FileUncompressed.length();
+        long compressedLength = hdf5FileCompressed.length();
+
+        assertTrue("" + uncompressedLength + " <= " + compressedLength,
+                uncompressedLength > compressedLength);
+    }
+
+    private byte[] createByteArray()
+    {
+        return createByteArray(16 * KB);
+    }
+
+    private byte[] createByteArray(int numberOfBytes)
+    {
+        ByteArrayOutputStream bos = new ByteArrayOutputStream(numberOfBytes);
+        for (int i = 0; i < numberOfBytes; ++i)
+        {
+            bos.write(1);
+        }
+        return bos.toByteArray();
+    }
+
+}
diff --git a/common/sourceTest/java/ch/systemsx/cisd/common/hdf5/Hdf5ContainerTest.java b/common/sourceTest/java/ch/systemsx/cisd/common/hdf5/Hdf5ContainerTest.java
deleted file mode 100644
index e665da188ac6a06b15b6e2c29cd79d80f12d393f..0000000000000000000000000000000000000000
--- a/common/sourceTest/java/ch/systemsx/cisd/common/hdf5/Hdf5ContainerTest.java
+++ /dev/null
@@ -1,139 +0,0 @@
-/*
- * Copyright 2010 ETH Zuerich, CISD
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package ch.systemsx.cisd.common.hdf5;
-
-import java.io.ByteArrayOutputStream;
-import java.io.File;
-import java.io.IOException;
-
-import org.testng.annotations.BeforeMethod;
-import org.testng.annotations.Test;
-
-import ch.systemsx.cisd.base.tests.AbstractFileSystemTestCase;
-import ch.systemsx.cisd.common.hdf5.Hdf5Container.IHdf5ReaderClient;
-import ch.systemsx.cisd.common.hdf5.Hdf5Container.IHdf5WriterClient;
-
-/**
- * @author Chandrasekhar Ramakrishnan
- */
-public class Hdf5ContainerTest extends AbstractFileSystemTestCase
-{
-    @Override
-    @BeforeMethod
-    public void setUp() throws IOException
-    {
-        super.setUp();
-    }
-
-    @Test
-    public void testReadWriteUncompressed()
-    {
-        File hdf5File = new File(workingDirectory, "test.h5");
-        Hdf5Container hdf5Content = new Hdf5Container(hdf5File);
-        final byte[] byteArray = createByteArray();
-
-        hdf5Content.runWriterClient(false, new IHdf5WriterClient()
-            {
-                public void runWithSimpleWriter(IHDF5ContainerWriter writer)
-                {
-                    writer.writeByteArray("/test-bytes", byteArray);
-
-                }
-            });
-
-        hdf5Content.runReaderClient(new IHdf5ReaderClient()
-            {
-                public void runWithSimpleReader(IHDF5ContainerReader reader)
-                {
-                    byte[] readData = reader.readAsByteArray("/test-bytes");
-                    assertEquals(byteArray, readData);
-                }
-            });
-    }
-
-    @Test
-    public void testReadWriteCompressed()
-    {
-        File hdf5File = new File(workingDirectory, "test.h5");
-        Hdf5Container hdf5Content = new Hdf5Container(hdf5File);
-        final byte[] byteArray = createByteArray();
-
-        hdf5Content.runWriterClient(true, new IHdf5WriterClient()
-            {
-                public void runWithSimpleWriter(IHDF5ContainerWriter writer)
-                {
-                    writer.writeByteArray("/test-bytes", byteArray);
-
-                }
-            });
-
-        hdf5Content.runReaderClient(new IHdf5ReaderClient()
-            {
-                public void runWithSimpleReader(IHDF5ContainerReader reader)
-                {
-                    byte[] readData = reader.readAsByteArray("/test-bytes");
-                    assertEquals(byteArray, readData);
-                }
-            });
-    }
-
-    @Test
-    public void testSizeComparison()
-    {
-        final byte[] byteArray = createByteArray();
-
-        File hdf5FileUncompressed = new File(workingDirectory, "test-uncompressed.h5");
-        Hdf5Container hdf5ContentUncompressed = new Hdf5Container(hdf5FileUncompressed);
-        hdf5ContentUncompressed.runWriterClient(false, new IHdf5WriterClient()
-            {
-                public void runWithSimpleWriter(IHDF5ContainerWriter writer)
-                {
-                    writer.writeByteArray("/test-bytes", byteArray);
-
-                }
-            });
-
-        File hdf5FileCompressed = new File(workingDirectory, "test-compressed.h5");
-        Hdf5Container hdf5ContentCompressed = new Hdf5Container(hdf5FileCompressed);
-        hdf5ContentCompressed.runWriterClient(true, new IHdf5WriterClient()
-            {
-                public void runWithSimpleWriter(IHDF5ContainerWriter writer)
-                {
-                    writer.writeByteArray("/test-bytes", byteArray);
-
-                }
-            });
-
-        long uncompressedLength = hdf5FileUncompressed.length();
-        long compressedLength = hdf5FileCompressed.length();
-
-        assertTrue("" + uncompressedLength + " <= " + compressedLength,
-                uncompressedLength > compressedLength);
-    }
-
-    private byte[] createByteArray()
-    {
-        int numberOfBytes = 1024 * 1024;
-        ByteArrayOutputStream bos = new ByteArrayOutputStream(1024);
-        for (int i = 0; i < numberOfBytes; ++i)
-        {
-            bos.write(1);
-        }
-        return bos.toByteArray();
-    }
-
-}
diff --git a/common/sourceTest/java/ch/systemsx/cisd/common/hdf5/HierarchicalStructureDuplicatorFileToHdf5Test.java b/common/sourceTest/java/ch/systemsx/cisd/common/hdf5/HierarchicalStructureDuplicatorFileToHDF5Test
similarity index 82%
rename from common/sourceTest/java/ch/systemsx/cisd/common/hdf5/HierarchicalStructureDuplicatorFileToHdf5Test.java
rename to common/sourceTest/java/ch/systemsx/cisd/common/hdf5/HierarchicalStructureDuplicatorFileToHDF5Test
index eebcebfdf0cc532b3ab502c48394793f9bc5ec08..99147a18575b749c6dc6497559eab8ca01d6597c 100644
--- a/common/sourceTest/java/ch/systemsx/cisd/common/hdf5/HierarchicalStructureDuplicatorFileToHdf5Test.java
+++ b/common/sourceTest/java/ch/systemsx/cisd/common/hdf5/HierarchicalStructureDuplicatorFileToHDF5Test
@@ -23,16 +23,16 @@ import org.testng.annotations.BeforeMethod;
 import org.testng.annotations.Test;
 
 import ch.systemsx.cisd.base.tests.AbstractFileSystemTestCase;
-import ch.systemsx.cisd.common.hdf5.Hdf5Container.IHdf5ReaderClient;
+import ch.systemsx.cisd.common.hdf5.HDF5Container.IHdf5ReaderClient;
 
 /**
  * @author Chandrasekhar Ramakrishnan
  */
-public class HierarchicalStructureDuplicatorFileToHdf5Test extends AbstractFileSystemTestCase
+public class HierarchicalStructureDuplicatorFileToHDF5Test extends AbstractFileSystemTestCase
 {
     private File containerFile;
 
-    private Hdf5Container container;
+    private HDF5Container container;
 
     @BeforeMethod
     @Override
@@ -41,7 +41,7 @@ public class HierarchicalStructureDuplicatorFileToHdf5Test extends AbstractFileS
         super.setUp();
 
         containerFile = new File(workingDirectory, "test-container.h5");
-        container = new Hdf5Container(containerFile);
+        container = new HDF5Container(containerFile);
     }
 
     /**
@@ -53,7 +53,7 @@ public class HierarchicalStructureDuplicatorFileToHdf5Test extends AbstractFileS
     {
         File sourceFolder = getTestData("basic-file-structure");
         container.runWriterClient(false,
-                new HierarchicalStructureDuplicatorFileToHdf5.DuplicatorWriterClient(sourceFolder));
+                new HierarchicalStructureDuplicatorFileToHDF5.DuplicatorWriterClient(sourceFolder));
         verifyDuplicate(sourceFolder);
     }
 
@@ -66,7 +66,7 @@ public class HierarchicalStructureDuplicatorFileToHdf5Test extends AbstractFileS
     {
         File sourceFolder = getTestData("basic-file-structure");
         container.runWriterClient(true,
-                new HierarchicalStructureDuplicatorFileToHdf5.DuplicatorWriterClient(sourceFolder));
+                new HierarchicalStructureDuplicatorFileToHDF5.DuplicatorWriterClient(sourceFolder));
         verifyDuplicate(sourceFolder);
     }
 
@@ -78,7 +78,7 @@ public class HierarchicalStructureDuplicatorFileToHdf5Test extends AbstractFileS
     {
         File sourceFolder = getTestData("basic-file-structure/file0.txt");
         container.runWriterClient(true,
-                new HierarchicalStructureDuplicatorFileToHdf5.DuplicatorWriterClient(sourceFolder));
+                new HierarchicalStructureDuplicatorFileToHDF5.DuplicatorWriterClient(sourceFolder));
         verifyDuplicate(sourceFolder);
     }
 
@@ -90,11 +90,11 @@ public class HierarchicalStructureDuplicatorFileToHdf5Test extends AbstractFileS
     {
         File sourceFile = getTestData("basic-file-structure/file0.txt");
         container.runWriterClient(true,
-                new HierarchicalStructureDuplicatorFileToHdf5.DuplicatorWriterClient(sourceFile));
+                new HierarchicalStructureDuplicatorFileToHDF5.DuplicatorWriterClient(sourceFile));
 
         File sourceFolder = getTestData("basic-file-structure");
         container.runWriterClient(true,
-                new HierarchicalStructureDuplicatorFileToHdf5.DuplicatorWriterClient(sourceFolder));
+                new HierarchicalStructureDuplicatorFileToHDF5.DuplicatorWriterClient(sourceFolder));
 
         verifyDuplicate(sourceFolder);
     }
@@ -108,7 +108,7 @@ public class HierarchicalStructureDuplicatorFileToHdf5Test extends AbstractFileS
     {
         File sourceFolder = getTestData("file-structure-with-links");
         container.runWriterClient(false,
-                new HierarchicalStructureDuplicatorFileToHdf5.DuplicatorWriterClient(sourceFolder));
+                new HierarchicalStructureDuplicatorFileToHDF5.DuplicatorWriterClient(sourceFolder));
     }
 
     @Test(expectedExceptions =
@@ -117,7 +117,7 @@ public class HierarchicalStructureDuplicatorFileToHdf5Test extends AbstractFileS
     {
         File sourceFolder = getTestData("does-not-exist");
         container.runWriterClient(false,
-                new HierarchicalStructureDuplicatorFileToHdf5.DuplicatorWriterClient(sourceFolder));
+                new HierarchicalStructureDuplicatorFileToHDF5.DuplicatorWriterClient(sourceFolder));
     }
 
     private File getTestData(String folderOrFile)
@@ -131,8 +131,8 @@ public class HierarchicalStructureDuplicatorFileToHdf5Test extends AbstractFileS
             {
                 public void runWithSimpleReader(IHDF5ContainerReader reader)
                 {
-                    FileToHdf5DuplicationVerifier verifier =
-                            new FileToHdf5DuplicationVerifier(sourceFolderOrFile, container, reader);
+                    FileToHDF5DuplicationVerifier verifier =
+                            new FileToHDF5DuplicationVerifier(sourceFolderOrFile, container, reader);
                     verifier.verifyDuplicate();
                 }
             });
diff --git a/common/sourceTest/java/ch/systemsx/cisd/common/io/hierarchical_content/DefaultFileBasedHierarchicalContentTest.java b/common/sourceTest/java/ch/systemsx/cisd/common/io/hierarchical_content/DefaultFileBasedHierarchicalContentTest.java
index 71233719c9a2dafb169d6b6075e5b778310383a2..f1c3c81694d907812a0bea5d97d1cc179e5a3f18 100644
--- a/common/sourceTest/java/ch/systemsx/cisd/common/io/hierarchical_content/DefaultFileBasedHierarchicalContentTest.java
+++ b/common/sourceTest/java/ch/systemsx/cisd/common/io/hierarchical_content/DefaultFileBasedHierarchicalContentTest.java
@@ -34,10 +34,8 @@ import ch.systemsx.cisd.base.exceptions.IOExceptionUnchecked;
 import ch.systemsx.cisd.base.io.IRandomAccessFile;
 import ch.systemsx.cisd.base.tests.AbstractFileSystemTestCase;
 import ch.systemsx.cisd.common.filesystem.FileUtilities;
-import ch.systemsx.cisd.common.hdf5.Hdf5Container;
-import ch.systemsx.cisd.common.hdf5.HierarchicalStructureDuplicatorFileToHdf5;
-import ch.systemsx.cisd.common.io.hierarchical_content.DefaultFileBasedHierarchicalContent;
-import ch.systemsx.cisd.common.io.hierarchical_content.IHierarchicalContentFactory;
+import ch.systemsx.cisd.common.hdf5.HDF5Container;
+import ch.systemsx.cisd.common.hdf5.HierarchicalStructureDuplicatorFileToHDF5;
 import ch.systemsx.cisd.common.io.hierarchical_content.api.IHierarchicalContentNode;
 import ch.systemsx.cisd.common.utilities.HierarchicalContentUtils;
 import ch.systemsx.cisd.common.utilities.IDelegatedAction;
@@ -496,7 +494,10 @@ public class DefaultFileBasedHierarchicalContentTest extends AbstractFileSystemT
         // file info access
         assertEquals("File: " + expectedFile, expectedFile.getName(), fileNode.getName());
         assertEquals("File: " + expectedFile, expectedFile.length(), fileNode.getFileLength());
-        assertEquals("File: " + expectedFile, expectedFile.lastModified(), fileNode.getLastModified());
+        assertTrue("File: " + expectedFile,
+                fileNode.getLastModified() >= expectedFile.lastModified());
+        assertTrue("File: " + expectedFile,
+                fileNode.getLastModified() - expectedFile.lastModified() <= 1000);
 
         final String expectedFileData = expectedFile.getName() + " data";
         // check random access to file content
@@ -513,9 +514,9 @@ public class DefaultFileBasedHierarchicalContentTest extends AbstractFileSystemT
     /** creates HDF5 container file with <var>containedDir</var> content */
     private static void createHDF5Container(File containerFile, File containedDir)
     {
-        Hdf5Container container = new Hdf5Container(containerFile);
+        HDF5Container container = new HDF5Container(containerFile);
         container.runWriterClient(true,
-                new HierarchicalStructureDuplicatorFileToHdf5.DuplicatorWriterClient(containedDir));
+                new HierarchicalStructureDuplicatorFileToHDF5.DuplicatorWriterClient(containedDir));
     }
 
     private static IHierarchicalContentNode createDummyFileBasedRootNode(final File root)
diff --git a/datastore_server/source/java/ch/systemsx/cisd/etlserver/hdf5/Hdf5StorageProcessor.java b/datastore_server/source/java/ch/systemsx/cisd/etlserver/hdf5/HDF5StorageProcessor.java
similarity index 87%
rename from datastore_server/source/java/ch/systemsx/cisd/etlserver/hdf5/Hdf5StorageProcessor.java
rename to datastore_server/source/java/ch/systemsx/cisd/etlserver/hdf5/HDF5StorageProcessor.java
index c69a9714a87c6468b3539f9b98ce54f2a351f1fe..bf565e1b5366d80e6146111e4b9dda4de59e4eeb 100644
--- a/datastore_server/source/java/ch/systemsx/cisd/etlserver/hdf5/Hdf5StorageProcessor.java
+++ b/datastore_server/source/java/ch/systemsx/cisd/etlserver/hdf5/HDF5StorageProcessor.java
@@ -20,8 +20,8 @@ import java.io.File;
 import java.util.Properties;
 
 import ch.systemsx.cisd.common.filesystem.FileUtilities;
-import ch.systemsx.cisd.common.hdf5.Hdf5Container;
-import ch.systemsx.cisd.common.hdf5.HierarchicalStructureDuplicatorFileToHdf5;
+import ch.systemsx.cisd.common.hdf5.HDF5Container;
+import ch.systemsx.cisd.common.hdf5.HierarchicalStructureDuplicatorFileToHDF5;
 import ch.systemsx.cisd.common.mail.IMailClient;
 import ch.systemsx.cisd.common.utilities.PropertyUtils;
 import ch.systemsx.cisd.etlserver.AbstractStorageProcessor;
@@ -35,7 +35,7 @@ import ch.systemsx.cisd.openbis.dss.generic.shared.dto.DataSetInformation;
  * 
  * @author Chandrasekhar Ramakrishnan
  */
-public class Hdf5StorageProcessor extends AbstractStorageProcessor
+public class HDF5StorageProcessor extends AbstractStorageProcessor
 {
     private static final String HDF5_CONTAINER_FILE_NAME = "container.h5";
 
@@ -48,7 +48,7 @@ public class Hdf5StorageProcessor extends AbstractStorageProcessor
      * 
      * @param properties
      */
-    public Hdf5StorageProcessor(Properties properties)
+    public HDF5StorageProcessor(Properties properties)
     {
         super(properties);
         isDataCompressed = PropertyUtils.getBoolean(properties, COMPRESS_DATA_PROPERTY, false);
@@ -75,9 +75,9 @@ public class Hdf5StorageProcessor extends AbstractStorageProcessor
                 {
                     checkParameters(incomingDataSetDirectory, rootDir);
 
-                    Hdf5Container container = getHdf5Container(rootDir);
+                    HDF5Container container = getHdf5Container(rootDir);
                     container.runWriterClient(isDataCompressed,
-                            new HierarchicalStructureDuplicatorFileToHdf5.DuplicatorWriterClient(
+                            new HierarchicalStructureDuplicatorFileToHDF5.DuplicatorWriterClient(
                                     incomingDataSetDirectory));
 
                     fileBeingProcessed = incomingDataSetDirectory;
@@ -88,7 +88,7 @@ public class Hdf5StorageProcessor extends AbstractStorageProcessor
                 {
                     // Just delete the file in the store -- no need to touch the incomingDataSet
                     // because we haven't done anything to it.
-                    File storedFile = getHdf5ContainerFile(storedDirectory);
+                    File storedFile = getHDF5ContainerFile(storedDirectory);
                     storedFile.delete();
 
                     fileBeingProcessed = null;
@@ -123,19 +123,19 @@ public class Hdf5StorageProcessor extends AbstractStorageProcessor
      * 
      * @return A file with HDF5 content
      */
-    public static File getHdf5ContainerFile(final File storedDataDirectory)
+    public static File getHDF5ContainerFile(final File storedDataDirectory)
     {
         return new File(storedDataDirectory, HDF5_CONTAINER_FILE_NAME);
     }
 
     /**
-     * Given a directory in the store, return an {@link Hdf5Container} object wrapping the file.
+     * Given a directory in the store, return an {@link HDF5Container} object wrapping the file.
      * 
-     * @return An Hdf5Container object.
+     * @return An HDF5Container object.
      */
-    public static Hdf5Container getHdf5Container(final File storedDataDirectory)
+    public static HDF5Container getHdf5Container(final File storedDataDirectory)
     {
-        return new Hdf5Container(getHdf5ContainerFile(storedDataDirectory));
+        return new HDF5Container(getHDF5ContainerFile(storedDataDirectory));
     }
 
 }
diff --git a/datastore_server/sourceTest/java/ch/systemsx/cisd/etlserver/hdf5/AbstractHdf5StorageProcessorTest.java b/datastore_server/sourceTest/java/ch/systemsx/cisd/etlserver/hdf5/AbstractHDF5StorageProcessorTest.java
similarity index 81%
rename from datastore_server/sourceTest/java/ch/systemsx/cisd/etlserver/hdf5/AbstractHdf5StorageProcessorTest.java
rename to datastore_server/sourceTest/java/ch/systemsx/cisd/etlserver/hdf5/AbstractHDF5StorageProcessorTest.java
index 2f4de9f9d2f94bfad6b63ec32f48666d54cbef1f..1fd4f29f4541baebf295c5eca20f076f756433ff 100644
--- a/datastore_server/sourceTest/java/ch/systemsx/cisd/etlserver/hdf5/AbstractHdf5StorageProcessorTest.java
+++ b/datastore_server/sourceTest/java/ch/systemsx/cisd/etlserver/hdf5/AbstractHDF5StorageProcessorTest.java
@@ -21,28 +21,28 @@ import java.util.Properties;
 
 import ch.systemsx.cisd.base.tests.AbstractFileSystemTestCase;
 import ch.systemsx.cisd.common.filesystem.FileUtilities;
-import ch.systemsx.cisd.common.hdf5.FileToHdf5DuplicationVerifier;
-import ch.systemsx.cisd.common.hdf5.Hdf5Container;
-import ch.systemsx.cisd.common.hdf5.Hdf5Container.IHdf5ReaderClient;
+import ch.systemsx.cisd.common.hdf5.FileToHDF5DuplicationVerifier;
+import ch.systemsx.cisd.common.hdf5.HDF5Container;
+import ch.systemsx.cisd.common.hdf5.HDF5Container.IHDF5ReaderClient;
 import ch.systemsx.cisd.common.hdf5.IHDF5ContainerReader;
 import ch.systemsx.cisd.etlserver.IStorageProcessorTransactional.IStorageProcessorTransaction;
 
 /**
- * Tests for {@link Hdf5StorageProcessor}.
+ * Tests for {@link HDF5StorageProcessor}.
  * 
  * @author Chandrasekhar Ramakrishnan
  */
-abstract class AbstractHdf5StorageProcessorTest extends AbstractFileSystemTestCase
+abstract class AbstractHDF5StorageProcessorTest extends AbstractFileSystemTestCase
 {
-    protected final Hdf5StorageProcessor storageProcessor;
+    protected final HDF5StorageProcessor storageProcessor;
 
     protected final IStorageProcessorTransaction transaction;
 
-    protected AbstractHdf5StorageProcessorTest(Properties properties)
+    protected AbstractHDF5StorageProcessorTest(Properties properties)
     {
         super();
 
-        storageProcessor = new Hdf5StorageProcessor(properties);
+        storageProcessor = new HDF5StorageProcessor(properties);
         storageProcessor.setStoreRootDirectory(workingDirectory);
         transaction = storageProcessor.createTransaction();
     }
@@ -63,19 +63,19 @@ abstract class AbstractHdf5StorageProcessorTest extends AbstractFileSystemTestCa
         assertTrue(incomingDataSetDirectory.exists());
         assertTrue(transaction.getStoredDataDirectory().isDirectory());
 
-        File hdf5ContainerFile = Hdf5StorageProcessor.getHdf5ContainerFile(rootDir);
+        File hdf5ContainerFile = HDF5StorageProcessor.getHDF5ContainerFile(rootDir);
         assertTrue(hdf5ContainerFile.exists());
         assertTrue(hdf5ContainerFile.isFile());
 
-        final Hdf5Container container = Hdf5StorageProcessor.getHdf5Container(rootDir);
+        final HDF5Container container = HDF5StorageProcessor.getHdf5Container(rootDir);
 
-        container.runReaderClient(new IHdf5ReaderClient()
+        container.runReaderClient(new IHDF5ReaderClient()
             {
 
                 public void runWithSimpleReader(IHDF5ContainerReader reader)
                 {
-                    FileToHdf5DuplicationVerifier verifier =
-                            new FileToHdf5DuplicationVerifier(incomingDataSetDirectory, container,
+                    FileToHDF5DuplicationVerifier verifier =
+                            new FileToHDF5DuplicationVerifier(incomingDataSetDirectory, container,
                                     reader);
                     verifier.verifyDuplicate();
                 }
@@ -96,7 +96,7 @@ abstract class AbstractHdf5StorageProcessorTest extends AbstractFileSystemTestCa
         assertTrue(incomingDataSetDirectory.exists());
         assertTrue(transaction.getStoredDataDirectory().isDirectory());
 
-        File hdf5ContainerFile = Hdf5StorageProcessor.getHdf5ContainerFile(rootDir);
+        File hdf5ContainerFile = HDF5StorageProcessor.getHDF5ContainerFile(rootDir);
         assertTrue(hdf5ContainerFile.exists());
         assertTrue(hdf5ContainerFile.isFile());
 
diff --git a/datastore_server/sourceTest/java/ch/systemsx/cisd/etlserver/hdf5/Hdf5StorageProcessorNoCompressionTest.java b/datastore_server/sourceTest/java/ch/systemsx/cisd/etlserver/hdf5/HDF5StorageProcessorNoCompressionTest.java
similarity index 90%
rename from datastore_server/sourceTest/java/ch/systemsx/cisd/etlserver/hdf5/Hdf5StorageProcessorNoCompressionTest.java
rename to datastore_server/sourceTest/java/ch/systemsx/cisd/etlserver/hdf5/HDF5StorageProcessorNoCompressionTest.java
index 2eda6e7f2d8890628b0824aa777d25b02d22f99f..fc8a957bc6e83006fe960e534e8c5836cf6b42e5 100644
--- a/datastore_server/sourceTest/java/ch/systemsx/cisd/etlserver/hdf5/Hdf5StorageProcessorNoCompressionTest.java
+++ b/datastore_server/sourceTest/java/ch/systemsx/cisd/etlserver/hdf5/HDF5StorageProcessorNoCompressionTest.java
@@ -24,10 +24,10 @@ import org.testng.annotations.Test;
 /**
  * @author Chandrasekhar Ramakrishnan
  */
-public class Hdf5StorageProcessorNoCompressionTest extends AbstractHdf5StorageProcessorTest
+public class HDF5StorageProcessorNoCompressionTest extends AbstractHDF5StorageProcessorTest
 {
 
-    public Hdf5StorageProcessorNoCompressionTest()
+    public HDF5StorageProcessorNoCompressionTest()
     {
         super(createProperties());
     }
diff --git a/datastore_server/sourceTest/java/ch/systemsx/cisd/etlserver/hdf5/Hdf5StorageProcessorWithCompressionTest.java b/datastore_server/sourceTest/java/ch/systemsx/cisd/etlserver/hdf5/HDF5StorageProcessorWithCompressionTest.java
similarity index 87%
rename from datastore_server/sourceTest/java/ch/systemsx/cisd/etlserver/hdf5/Hdf5StorageProcessorWithCompressionTest.java
rename to datastore_server/sourceTest/java/ch/systemsx/cisd/etlserver/hdf5/HDF5StorageProcessorWithCompressionTest.java
index fbb100dee2144e84483db3d07c25f02b7482e971..41b226d46172dfd90dcd6161f447efb93913661e 100644
--- a/datastore_server/sourceTest/java/ch/systemsx/cisd/etlserver/hdf5/Hdf5StorageProcessorWithCompressionTest.java
+++ b/datastore_server/sourceTest/java/ch/systemsx/cisd/etlserver/hdf5/HDF5StorageProcessorWithCompressionTest.java
@@ -24,10 +24,10 @@ import org.testng.annotations.Test;
 /**
  * @author Chandrasekhar Ramakrishnan
  */
-public class Hdf5StorageProcessorWithCompressionTest extends AbstractHdf5StorageProcessorTest
+public class HDF5StorageProcessorWithCompressionTest extends AbstractHDF5StorageProcessorTest
 {
 
-    public Hdf5StorageProcessorWithCompressionTest()
+    public HDF5StorageProcessorWithCompressionTest()
     {
         super(createProperties());
     }
@@ -35,7 +35,7 @@ public class Hdf5StorageProcessorWithCompressionTest extends AbstractHdf5Storage
     private static Properties createProperties()
     {
         Properties props = new Properties();
-        props.setProperty(Hdf5StorageProcessor.COMPRESS_DATA_PROPERTY, "true");
+        props.setProperty(HDF5StorageProcessor.COMPRESS_DATA_PROPERTY, "true");
         return props;
     }
 
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/AbstractImageStorageProcessor.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/AbstractImageStorageProcessor.java
index d3bfa4e9646e82544360e407581931b6c46142af..056a87cbb70d4361af6856cfd15e087b646a6da6 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/AbstractImageStorageProcessor.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/AbstractImageStorageProcessor.java
@@ -43,8 +43,8 @@ import ch.systemsx.cisd.common.filesystem.FileOperations;
 import ch.systemsx.cisd.common.filesystem.FileUtilities;
 import ch.systemsx.cisd.common.filesystem.IFileOperations;
 import ch.systemsx.cisd.common.filesystem.SoftLinkMaker;
-import ch.systemsx.cisd.common.hdf5.Hdf5Container;
-import ch.systemsx.cisd.common.hdf5.HierarchicalStructureDuplicatorFileToHdf5;
+import ch.systemsx.cisd.common.hdf5.HDF5Container;
+import ch.systemsx.cisd.common.hdf5.HierarchicalStructureDuplicatorFileToHDF5;
 import ch.systemsx.cisd.common.logging.LogCategory;
 import ch.systemsx.cisd.common.logging.LogFactory;
 import ch.systemsx.cisd.common.mail.IMailClient;
@@ -510,9 +510,9 @@ abstract class AbstractImageStorageProcessor extends AbstractStorageProcessor im
     private static void saveInHdf5(File sourceFolder, File hdf5DestinationFile,
             boolean compressFiles)
     {
-        Hdf5Container container = new Hdf5Container(hdf5DestinationFile);
+        HDF5Container container = new HDF5Container(hdf5DestinationFile);
         container.runWriterClient(compressFiles,
-                new HierarchicalStructureDuplicatorFileToHdf5.DuplicatorWriterClient(sourceFolder));
+                new HierarchicalStructureDuplicatorFileToHDF5.DuplicatorWriterClient(sourceFolder));
     }
 
     private File moveToStore(File incomingDataSetDirectory, File rootDirectory)
@@ -541,7 +541,7 @@ abstract class AbstractImageStorageProcessor extends AbstractStorageProcessor im
                 imageStorageConfiguraton.getThumbnailsStorageFormat();
         if (thumbnailsStorageFormatOrNull != null)
         {
-            Hdf5Container container = new Hdf5Container(thumbnailsFile);
+            HDF5Container container = new HDF5Container(thumbnailsFile);
             ImageLibraryInfo imageLibrary = imageStorageConfiguraton.tryGetImageLibrary();
             Hdf5ThumbnailGenerator thumbnailsGenerator =
                     new Hdf5ThumbnailGenerator(plateImages, imagesInStoreFolder,
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/Hdf5ThumbnailGenerator.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/Hdf5ThumbnailGenerator.java
index e5a14d2c714a9e510d73b7c30a2fd3c6cbab8f8d..7f9bbeb72d7a1068ad3ec815a72c210d3c53806d 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/Hdf5ThumbnailGenerator.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/Hdf5ThumbnailGenerator.java
@@ -17,6 +17,7 @@
 package ch.systemsx.cisd.openbis.dss.etl;
 
 import java.awt.image.BufferedImage;
+import java.io.ByteArrayInputStream;
 import java.io.ByteArrayOutputStream;
 import java.io.File;
 import java.io.IOException;
@@ -35,7 +36,7 @@ import ch.systemsx.cisd.common.concurrent.FailureRecord;
 import ch.systemsx.cisd.common.concurrent.ITaskExecutor;
 import ch.systemsx.cisd.common.concurrent.ParallelizedExecutor;
 import ch.systemsx.cisd.common.exceptions.Status;
-import ch.systemsx.cisd.common.hdf5.Hdf5Container.IHdf5WriterClient;
+import ch.systemsx.cisd.common.hdf5.HDF5Container.IHDF5WriterClient;
 import ch.systemsx.cisd.common.hdf5.IHDF5ContainerWriter;
 import ch.systemsx.cisd.common.io.FileBasedContent;
 import ch.systemsx.cisd.common.logging.LogCategory;
@@ -52,7 +53,7 @@ import ch.systemsx.cisd.openbis.dss.generic.shared.utils.ImageUtil;
  * 
  * @author Chandrasekhar Ramakrishnan
  */
-class Hdf5ThumbnailGenerator implements IHdf5WriterClient
+class Hdf5ThumbnailGenerator implements IHDF5WriterClient
 {
     private static final File convertUtilityOrNull = OSUtilities.findExecutable("convert");
 
@@ -117,7 +118,8 @@ class Hdf5ThumbnailGenerator implements IHdf5WriterClient
             }
             synchronized (writer)
             {
-                writer.writeByteArray(thumbnailPath, byteArray);
+                writer.writeToHDF5Container(thumbnailPath, new ByteArrayInputStream(byteArray),
+                        byteArray.length);
             }
         } catch (IOException ex)
         {