diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSDatasetUploader.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/AbstractImageDatasetUploader.java
similarity index 57%
rename from screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSDatasetUploader.java
rename to screening/source/java/ch/systemsx/cisd/openbis/dss/etl/AbstractImageDatasetUploader.java
index f78b0be136c1aece06684a77ed6822479dfbabcf..fbe52428198330b3628b83c23db672f56e17aa24 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSDatasetUploader.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/AbstractImageDatasetUploader.java
@@ -23,47 +23,27 @@ import java.util.Map;
 import java.util.Map.Entry;
 import java.util.Set;
 
-import ch.systemsx.cisd.openbis.dss.etl.ScreeningContainerDatasetInfoHelper.ExperimentWithChannelsAndContainer;
+import ch.systemsx.cisd.openbis.dss.etl.ImagingDatabaseHelper.ImagingChannelsMap;
 import ch.systemsx.cisd.openbis.dss.etl.dataaccess.IImagingQueryDAO;
 import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgAcquiredImageDTO;
 import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgChannelStackDTO;
 import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgImageDTO;
-import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgSpotDTO;
 
 /**
+ * Abstract superclass for uploaders of image datasets into the imaging database.
+ * 
  * @author Tomasz Pylak
  */
-public class HCSDatasetUploader
+abstract class AbstractImageDatasetUploader
 {
-    public static void upload(IImagingQueryDAO dao, ImageDatasetInfo info,
-            List<AcquiredPlateImage> images, List<HCSImageFileExtractionResult.Channel> channels)
-    {
-        new HCSDatasetUploader(dao).upload(info, images, channels);
-    }
+    protected final IImagingQueryDAO dao;
 
-    private final IImagingQueryDAO dao;
-
-    private HCSDatasetUploader(IImagingQueryDAO dao)
+    protected AbstractImageDatasetUploader(IImagingQueryDAO dao)
     {
         this.dao = dao;
     }
 
-    private void upload(ImageDatasetInfo info, List<AcquiredPlateImage> images,
-            List<HCSImageFileExtractionResult.Channel> channels)
-    {
-        ExperimentWithChannelsAndContainer basicStruct =
-                ScreeningContainerDatasetInfoHelper.getOrCreateExperimentWithChannelsAndContainer(
-                        dao, info, channels);
-        long contId = basicStruct.getContainerId();
-        Map<String, Long/* (tech id */> channelsMap = basicStruct.getChannelsMap();
-
-        Long[][] spotIds = getOrCreateSpots(contId, info, images);
-        long datasetId = createDataset(contId, info);
-
-        createImages(images, spotIds, channelsMap, datasetId);
-    }
-
-    private static class AcquiredImageInStack
+    protected static class AcquiredImageInStack
     {
         private final String channelCode;
 
@@ -95,23 +75,28 @@ public class HCSDatasetUploader
         }
     }
 
-    private void createImages(List<AcquiredPlateImage> images, Long[][] spotIds,
-            Map<String, Long> channelsMap, long datasetId)
+    protected static interface ISpotProvider
+    {
+        Long tryGetSpotId(AcquiredPlateImage image);
+    }
+
+    protected final void createImages(List<AcquiredPlateImage> images, ISpotProvider spotProvider,
+            ImagingChannelsMap channelsMap, long datasetId)
     {
         Map<ImgChannelStackDTO, List<AcquiredImageInStack>> stackImagesMap =
-                makeStackImagesMap(images, spotIds, datasetId);
+                makeStackImagesMap(images, spotProvider, datasetId);
         dao.addChannelStacks(new ArrayList<ImgChannelStackDTO>(stackImagesMap.keySet()));
         createImages(stackImagesMap, channelsMap);
     }
 
     private Map<ImgChannelStackDTO, List<AcquiredImageInStack>> makeStackImagesMap(
-            List<AcquiredPlateImage> images, Long[][] spotIds, long datasetId)
+            List<AcquiredPlateImage> images, ISpotProvider spotProvider, long datasetId)
     {
         Map<ImgChannelStackDTO, List<AcquiredImageInStack>> map =
                 new HashMap<ImgChannelStackDTO, List<AcquiredImageInStack>>();
         for (AcquiredPlateImage image : images)
         {
-            ImgChannelStackDTO stackDTO = makeStackDtoWithouId(image, spotIds, datasetId);
+            ImgChannelStackDTO stackDTO = makeStackDtoWithouId(image, spotProvider, datasetId);
             List<AcquiredImageInStack> stackImages = map.get(stackDTO);
             if (stackImages == null)
             {
@@ -138,26 +123,17 @@ public class HCSDatasetUploader
                 image.getThumbnailFilePathOrNull());
     }
 
-    private ImgChannelStackDTO makeStackDtoWithouId(AcquiredPlateImage image, Long[][] spotIds,
-            long datasetId)
+    private ImgChannelStackDTO makeStackDtoWithouId(AcquiredPlateImage image,
+            ISpotProvider spotProvider, long datasetId)
     {
-        long spotId = getSpotId(image, spotIds);
+        Long spotId = spotProvider.tryGetSpotId(image);
         int dummyId = 0;
         return new ImgChannelStackDTO(dummyId, image.getTileRow(), image.getTileColumn(),
                 datasetId, spotId, image.tryGetTimePoint(), image.tryGetDepth());
     }
 
-    private static long getSpotId(AcquiredPlateImage image, Long[][] spotIds)
-    {
-        int wellRow = image.getWellRow();
-        int wellColumn = image.getWellColumn();
-        Long spotId = spotIds[wellRow - 1][wellColumn - 1];
-        assert spotId != null : "no spot for " + image;
-        return spotId;
-    }
-
     private void createImages(Map<ImgChannelStackDTO, List<AcquiredImageInStack>> stackImagesMap,
-            Map<String, Long> channelsMap)
+            ImagingChannelsMap channelsMap)
     {
         ImagesToCreate imagesToCreate =
                 new ImagesToCreate(new ArrayList<ImgImageDTO>(),
@@ -178,13 +154,13 @@ public class HCSDatasetUploader
      * Then we can save everything in one go.
      */
     private void addImagesToCreate(ImagesToCreate imagesToCreate, long stackId,
-            Map<String, Long> channelsMap, List<AcquiredImageInStack> images)
+            ImagingChannelsMap channelsMap, List<AcquiredImageInStack> images)
     {
         List<ImgImageDTO> imageDTOs = imagesToCreate.getImages();
         List<ImgAcquiredImageDTO> acquiredImageDTOs = imagesToCreate.getAcquiredImages();
         for (AcquiredImageInStack image : images)
         {
-            long channelTechId = channelsMap.get(image.getChannelCode());
+            long channelTechId = channelsMap.getChannelId(image.getChannelCode());
 
             ImgImageDTO imageDTO = mkImageWithIdDTO(image.getImageFilePath());
             ImgImageDTO thumbnailDTO = tryMkImageWithIdDTO(image.getThumbnailPathOrNull());
@@ -253,92 +229,4 @@ public class HCSDatasetUploader
                         imageReferenceOrNull.tryGetColorComponent());
         return dto;
     }
-
-    // returns a matrix of spot tech ids. The matrix[row][col] contains null is
-    // spot at (row,col)
-    // does not exist. Spot coordinates are 0-based in the matrix.
-    private Long[][] getOrCreateSpots(long contId, ScreeningContainerDatasetInfo info,
-            List<AcquiredPlateImage> images)
-    {
-        List<ImgSpotDTO> oldSpots = dao.listSpots(contId);
-        List<ImgSpotDTO> newSpots =
-                createNewSpots(contId, images, oldSpots, info.getContainerRows(),
-                        info.getContainerColumns(), info.getContainerPermId());
-        newSpots.addAll(oldSpots);
-        return makeTechIdMatrix(newSpots, info.getContainerRows(), info.getContainerColumns());
-    }
-
-    private List<ImgSpotDTO> createNewSpots(long contId, List<AcquiredPlateImage> images,
-            List<ImgSpotDTO> existingSpots, int rows, int columns, String containerPermId)
-    {
-        Boolean[][] newSpotMatrix = extractNewSpots(rows, columns, images, existingSpots);
-        List<ImgSpotDTO> newSpots = makeSpotDTOs(newSpotMatrix, contId);
-        for (ImgSpotDTO spot : newSpots)
-        {
-            long id = dao.addSpot(spot);
-            spot.setId(id);
-        }
-        return newSpots;
-    }
-
-    private static Boolean[][] extractNewSpots(int rows, int columns,
-            List<AcquiredPlateImage> images, List<ImgSpotDTO> existingSpots)
-    {
-        Boolean[][] spots = extractExistingSpots(rows, columns, images);
-        unmarkSpots(existingSpots, spots);
-        return spots;
-    }
-
-    private static Boolean[][] extractExistingSpots(int rows, int columns,
-            List<AcquiredPlateImage> images)
-    {
-        Boolean[][] spots = new Boolean[rows][columns];
-        for (AcquiredPlateImage image : images)
-        {
-            spots[image.getWellRow() - 1][image.getWellColumn() - 1] = true;
-        }
-        return spots;
-    }
-
-    private static Long[][] makeTechIdMatrix(List<ImgSpotDTO> existingSpots, int rows, int columns)
-    {
-        Long[][] matrix = new Long[rows][columns];
-        for (ImgSpotDTO spot : existingSpots)
-        {
-            matrix[spot.getRow() - 1][spot.getColumn() - 1] = spot.getId();
-        }
-        return matrix;
-    }
-
-    private static List<ImgSpotDTO> makeSpotDTOs(Boolean[][] spots, long contId)
-    {
-
-        List<ImgSpotDTO> newSpots = new ArrayList<ImgSpotDTO>();
-        for (int row = 0; row < spots.length; row++)
-        {
-            Boolean[] spotRow = spots[row];
-            for (int col = 0; col < spotRow.length; col++)
-            {
-                Boolean wanted = spotRow[col];
-                if (wanted != null && wanted)
-                {
-                    newSpots.add(new ImgSpotDTO(row + 1, col + 1, contId));
-                }
-            }
-        }
-        return newSpots;
-    }
-
-    private static void unmarkSpots(List<ImgSpotDTO> existingSpots, Boolean[][] spotMatrix)
-    {
-        for (ImgSpotDTO existingSpot : existingSpots)
-        {
-            spotMatrix[existingSpot.getRow() - 1][existingSpot.getColumn() - 1] = false;
-        }
-    }
-
-    private long createDataset(long contId, ImageDatasetInfo info)
-    {
-        return ScreeningContainerDatasetInfoHelper.createImageDataset(dao, info, contId);
-    }
 }
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/AbstractHCSImageFileExtractor.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/AbstractImageFileExtractor.java
similarity index 52%
rename from screening/source/java/ch/systemsx/cisd/openbis/dss/etl/AbstractHCSImageFileExtractor.java
rename to screening/source/java/ch/systemsx/cisd/openbis/dss/etl/AbstractImageFileExtractor.java
index 0d4305b795904473caa71b56a91cd1fafc46ec2b..b5d7ecfa665ff2d1e4fbfd6633c491dd56d6a5e0 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/AbstractHCSImageFileExtractor.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/AbstractImageFileExtractor.java
@@ -23,8 +23,6 @@ import java.util.LinkedList;
 import java.util.List;
 import java.util.Properties;
 
-import org.apache.commons.io.FilenameUtils;
-import org.apache.commons.lang.StringUtils;
 import org.apache.log4j.Logger;
 
 import ch.systemsx.cisd.base.exceptions.InterruptedExceptionUnchecked;
@@ -36,194 +34,38 @@ import ch.systemsx.cisd.common.exceptions.UserFailureException;
 import ch.systemsx.cisd.common.filesystem.FileUtilities;
 import ch.systemsx.cisd.common.logging.LogCategory;
 import ch.systemsx.cisd.common.logging.LogFactory;
-import ch.systemsx.cisd.common.utilities.AbstractHashable;
 import ch.systemsx.cisd.common.utilities.PropertyUtils;
 import ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractionResult.Channel;
+import ch.systemsx.cisd.openbis.dss.etl.dto.ImageFileInfo;
 import ch.systemsx.cisd.openbis.dss.generic.shared.dto.DataSetInformation;
 import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SampleIdentifier;
 import ch.systemsx.cisd.openbis.plugin.screening.shared.basic.dto.ChannelDescription;
 import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ColorComponent;
 
 /**
- * Abstract superclass for <code>IHCSImageFileExtractor</code> implementations.
+ * Abstract superclass for {@link IImageFileExtractor} implementations.<br>
+ * <br>
+ * Assumes that images names have a file extension present in
+ * {@link ImageFileExtractorUtils#IMAGE_EXTENSIONS} constant. <br>
+ * <br>
+ * If 'extract-single-image-channels' property is specified for storage processor then the channels
+ * are extracted from the color components and the channel in the image file name is ignored.
  * 
  * @author Tomasz Pylak
  */
-abstract public class AbstractHCSImageFileExtractor implements IHCSImageFileExtractor
+abstract public class AbstractImageFileExtractor implements IImageFileExtractor
 {
-    abstract protected List<AcquiredPlateImage> getImages(ImageFileInfo imageInfo);
-
-    abstract protected List<Channel> getAllChannels();
-
-    /** Should log all the syntax problems in image names. */
-    abstract protected ImageFileInfo tryExtractImageInfo(File imageFile,
-            File incomingDataSetDirectory, SampleIdentifier datasetSample);
-
     /**
-     * Extracts the plate location from argument. Returns <code>null</code> if the operation fails.
+     * Extracts {@link ImageFileInfo} for a given image file. Should log all the syntax problems in
+     * image names.
      */
-    protected static Location tryGetPlateLocation(final String plateLocation)
-    {
-        return Location.tryCreateLocationFromTransposedMatrixCoordinate(plateLocation);
-    }
-
-    /**
-     * Intermediate structure containing tokens from which image info {@link ImageFileInfo} can be
-     * extracted if one finds it useful.
-     */
-    public static class UnparsedImageFileInfo extends AbstractHashable
-    {
-        private String wellLocationToken;
-
-        private String tileLocationToken;
-
-        private String channelToken;
-
-        private String timepointToken;
-
-        private String depthToken;
-
-        private String imageRelativePath;
-
-        public String getWellLocationToken()
-        {
-            return wellLocationToken;
-        }
-
-        public void setWellLocationToken(String wellLocationToken)
-        {
-            this.wellLocationToken = wellLocationToken;
-        }
-
-        public String getTileLocationToken()
-        {
-            return tileLocationToken;
-        }
-
-        public void setTileLocationToken(String tileLocationToken)
-        {
-            this.tileLocationToken = tileLocationToken;
-        }
-
-        public String getChannelToken()
-        {
-            return channelToken;
-        }
-
-        public void setChannelToken(String channelToken)
-        {
-            this.channelToken = channelToken;
-        }
-
-        public String getTimepointToken()
-        {
-            return timepointToken;
-        }
-
-        public void setTimepointToken(String timepointToken)
-        {
-            this.timepointToken = timepointToken;
-        }
-
-        public String getDepthToken()
-        {
-            return depthToken;
-        }
-
-        public void setDepthToken(String depthToken)
-        {
-            this.depthToken = depthToken;
-        }
-
-        public String getImageRelativePath()
-        {
-            return imageRelativePath;
-        }
-
-        public void setImageRelativePath(String imageRelativePath)
-        {
-            this.imageRelativePath = imageRelativePath;
-        }
-    }
-
-    /** Information about one image file */
-    public static final class ImageFileInfo
-    {
-        private final Location wellLocation;
-
-        private final Location tileLocation;
-
-        private String channelCode;
-
-        private final String imageRelativePath;
-
-        private final Float timepointOrNull;
-
-        private final Float depthOrNull;
-
-        public ImageFileInfo(Location wellLocation, String channelCode, Location tileLocation,
-                String imageRelativePath, Float timepointOrNull, Float depthOrNull)
-        {
-            assert wellLocation != null;
-            assert channelCode != null;
-            assert tileLocation != null;
-            assert imageRelativePath != null;
-
-            this.wellLocation = wellLocation;
-            this.channelCode = channelCode;
-            this.tileLocation = tileLocation;
-            this.imageRelativePath = imageRelativePath;
-            this.timepointOrNull = timepointOrNull;
-            this.depthOrNull = depthOrNull;
-        }
-
-        public Location getWellLocation()
-        {
-            return wellLocation;
-        }
-
-        public Location getTileLocation()
-        {
-            return tileLocation;
-        }
-
-        public String getChannelCode()
-        {
-            return channelCode;
-        }
-
-        public String getImageRelativePath()
-        {
-            return imageRelativePath;
-        }
-
-        public Float tryGetTimepoint()
-        {
-            return timepointOrNull;
-        }
-
-        public Float tryGetDepth()
-        {
-            return depthOrNull;
-        }
-
-        public void setChannelCode(String channelCode)
-        {
-            this.channelCode = channelCode;
-        }
-
-        @Override
-        public String toString()
-        {
-            return "ImageFileInfo [well=" + wellLocation + ", tile=" + tileLocation + ", channel="
-                    + channelCode + ", path=" + imageRelativePath + ", timepoint="
-                    + timepointOrNull + ", depth=" + depthOrNull + "]";
-        }
+    abstract protected ImageFileInfo tryExtractImageInfo(File imageFile,
+            File incomingDataSetDirectory, SampleIdentifier datasetSample);
 
-    }
+    // -----------
 
     protected static final Logger operationLog = LogFactory.getLogger(LogCategory.OPERATION,
-            AbstractHCSImageFileExtractor.class);
+            AbstractImageFileExtractor.class);
 
     protected static final String IMAGE_FILE_NOT_STANDARDIZABLE =
             "Information about the image could not be extracted for the file '%s'.";
@@ -240,63 +82,68 @@ abstract public class AbstractHCSImageFileExtractor implements IHCSImageFileExtr
     protected static final String EXTRACT_SINGLE_IMAGE_CHANNELS_PROPERTY =
             "extract-single-image-channels";
 
+    protected static final String TILE_MAPPING_PROPERTY = "tile_mapping";
+
     protected static final char TOKEN_SEPARATOR = '_';
 
-    public AbstractHCSImageFileExtractor(final Properties properties)
+    // -----------------------------------------
+
+    private final List<ChannelDescription> channelDescriptions;
+
+    private final List<ColorComponent> channelColorComponentsOrNull;
+
+    protected final TileMapper tileMapperOrNull;
+
+    protected final Geometry wellGeometry;
+
+    protected AbstractImageFileExtractor(final Properties properties)
     {
+        this.channelDescriptions = tryExtractChannelDescriptions(properties);
+        this.channelColorComponentsOrNull = tryGetChannelComponents(properties);
+        checkChannelsAndColorComponents();
+        this.wellGeometry = getWellGeometry(properties);
+        this.tileMapperOrNull =
+                TileMapper.tryCreate(properties.getProperty(TILE_MAPPING_PROPERTY), wellGeometry);
     }
 
-    /**
-     * Splits specified image file name into at least four tokens. Only the last four tokens will be
-     * considered. They are sample code, plate location, well location, and channel. Note, that
-     * sample code could be <code>null</code>.
-     * 
-     * @param shouldValidatePlateName if true it will be checked if the plate code in the file name
-     *            matches the datasetSample plate code.
-     * @return <code>null</code> if the argument could not be splitted into tokens.
-     */
-    protected final static UnparsedImageFileInfo tryExtractDefaultImageInfo(File imageFile,
-            File incomingDataSetDirectory, SampleIdentifier datasetSample,
-            boolean shouldValidatePlateName)
+    private static Geometry getWellGeometry(final Properties properties)
     {
-        final String baseName = FilenameUtils.getBaseName(imageFile.getPath());
-        final String[] tokens = StringUtils.split(baseName, TOKEN_SEPARATOR);
-        if (tokens == null || tokens.length < 4)
+        final String property = properties.getProperty(WellGeometry.WELL_GEOMETRY);
+        if (property == null)
         {
-            if (operationLog.isInfoEnabled())
-            {
-                operationLog.info(String.format(IMAGE_FILE_NOT_ENOUGH_ENTITIES, imageFile));
-            }
-            return null;
+            throw new ConfigurationFailureException(String.format(
+                    "No '%s' property has been specified.", WellGeometry.WELL_GEOMETRY));
         }
-        final String sampleCode = tokens[tokens.length - 4];
-        if (shouldValidatePlateName && sampleCode != null
-                && sampleCode.equalsIgnoreCase(datasetSample.getSampleCode()) == false)
+        final Geometry geometry = WellGeometry.createFromString(property);
+        if (geometry == null)
         {
-            if (operationLog.isInfoEnabled())
-            {
-                operationLog.info(String.format(IMAGE_FILE_BELONGS_TO_WRONG_SAMPLE, imageFile,
-                        datasetSample, sampleCode));
-            }
-            return null;
+            throw new ConfigurationFailureException(String.format(
+                    "Could not create a geometry from property value '%s'.", property));
         }
-        String channelToken = tokens[tokens.length - 1];
-        if (StringUtils.isBlank(channelToken))
+        return geometry;
+    }
+
+    protected final Location tryGetTileLocation(int tileNumber)
+    {
+        if (tileMapperOrNull != null)
+        {
+            return tileMapperOrNull.tryGetLocation(tileNumber);
+        } else
         {
-            operationLog.info("Channel token is empty for image: " + imageFile);
             return null;
         }
+    }
 
-        UnparsedImageFileInfo info = new UnparsedImageFileInfo();
-        info.setWellLocationToken(tokens[tokens.length - 3]);
-        info.setTileLocationToken(tokens[tokens.length - 2]);
-        info.setChannelToken(channelToken);
-        info.setTimepointToken(null);
-        info.setDepthToken(null);
-
-        String imageRelativePath = getRelativeImagePath(incomingDataSetDirectory, imageFile);
-        info.setImageRelativePath(imageRelativePath);
-        return info;
+    private void checkChannelsAndColorComponents()
+    {
+        if (channelColorComponentsOrNull != null
+                && channelColorComponentsOrNull.size() != channelDescriptions.size())
+        {
+            throw ConfigurationFailureException.fromTemplate(
+                    "There should be exactly one color component for each channel name."
+                            + " Correct the list of values for '%s' property.",
+                    AbstractImageFileExtractor.EXTRACT_SINGLE_IMAGE_CHANNELS_PROPERTY);
+        }
     }
 
     public ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractionResult extract(
@@ -338,7 +185,38 @@ abstract public class AbstractHCSImageFileExtractor implements IHCSImageFileExtr
 
     }
 
-    protected static String getRelativeImagePath(File incomingDataSetDirectory, final File imageFile)
+    private List<AcquiredPlateImage> getImages(ImageFileInfo imageInfo)
+    {
+        checkChannelsAndColorComponents();
+
+        if (channelColorComponentsOrNull != null)
+        {
+            List<AcquiredPlateImage> images = new ArrayList<AcquiredPlateImage>();
+            for (int i = 0; i < channelColorComponentsOrNull.size(); i++)
+            {
+                ColorComponent colorComponent = channelColorComponentsOrNull.get(i);
+                ChannelDescription channelDescription = channelDescriptions.get(i);
+                imageInfo.setChannelCode(channelDescription.getCode());
+                images.add(createImage(imageInfo, colorComponent));
+            }
+            return images;
+        } else
+        {
+            ensureChannelExist(channelDescriptions, imageInfo.getChannelCode());
+            return createImagesWithNoColorComponent(imageInfo);
+        }
+
+    }
+
+    private List<Channel> getAllChannels()
+    {
+        return createChannels(channelDescriptions);
+    }
+
+    // ------- static helper methods
+
+    protected final static String getRelativeImagePath(File incomingDataSetDirectory,
+            final File imageFile)
     {
         String imageRelativePath =
                 FileUtilities.getRelativeFile(incomingDataSetDirectory,
@@ -347,9 +225,7 @@ abstract public class AbstractHCSImageFileExtractor implements IHCSImageFileExtr
         return imageRelativePath;
     }
 
-    // ------- static helper methods
-
-    public static List<ColorComponent> tryGetChannelComponents(final Properties properties)
+    protected final static List<ColorComponent> tryGetChannelComponents(final Properties properties)
     {
         List<String> componentNames =
                 PropertyUtils.tryGetList(properties, EXTRACT_SINGLE_IMAGE_CHANNELS_PROPERTY);
@@ -397,38 +273,52 @@ abstract public class AbstractHCSImageFileExtractor implements IHCSImageFileExtr
                 channelDescriptions);
     }
 
-    protected final static Geometry getWellGeometry(final Properties properties)
-    {
-        final String property = properties.getProperty(WellGeometry.WELL_GEOMETRY);
-        if (property == null)
-        {
-            throw new ConfigurationFailureException(String.format(
-                    "No '%s' property has been specified.", WellGeometry.WELL_GEOMETRY));
-        }
-        final Geometry geometry = WellGeometry.createFromString(property);
-        if (geometry == null)
-        {
-            throw new ConfigurationFailureException(String.format(
-                    "Could not create a geometry from property value '%s'.", property));
-        }
-        return geometry;
-    }
-
-    protected static List<AcquiredPlateImage> getDefaultImages(ImageFileInfo imageInfo)
+    protected final static List<AcquiredPlateImage> createImagesWithNoColorComponent(
+            ImageFileInfo imageInfo)
     {
         List<AcquiredPlateImage> images = new ArrayList<AcquiredPlateImage>();
         images.add(createImage(imageInfo, null));
         return images;
     }
 
-    protected static final AcquiredPlateImage createImage(ImageFileInfo imageInfo,
+    protected final static AcquiredPlateImage createImage(ImageFileInfo imageInfo,
             ColorComponent colorComponentOrNull)
     {
         RelativeImageReference relativeImageRef =
                 new RelativeImageReference(imageInfo.getImageRelativePath(), null,
                         colorComponentOrNull);
-        return new AcquiredPlateImage(imageInfo.getWellLocation(), imageInfo.getTileLocation(),
+        return new AcquiredPlateImage(imageInfo.tryGetWellLocation(), imageInfo.getTileLocation(),
                 imageInfo.getChannelCode(), imageInfo.tryGetTimepoint(), imageInfo.tryGetDepth(),
                 relativeImageRef);
     }
+
+    protected static Integer tryAsInt(String valueOrNull)
+    {
+        if (valueOrNull == null)
+        {
+            return null;
+        }
+        try
+        {
+            return Integer.parseInt(valueOrNull);
+        } catch (NumberFormatException e)
+        {
+            return null;
+        }
+    }
+
+    protected static Float tryAsFloat(String valueOrNull)
+    {
+        if (valueOrNull == null)
+        {
+            return null;
+        }
+        try
+        {
+            return Float.parseFloat(valueOrNull);
+        } catch (NumberFormatException e)
+        {
+            return null;
+        }
+    }
 }
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/AbstractImageStorageProcessor.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/AbstractImageStorageProcessor.java
new file mode 100644
index 0000000000000000000000000000000000000000..40a1702209749ebd335d3f76ccf06e469c13438c
--- /dev/null
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/AbstractImageStorageProcessor.java
@@ -0,0 +1,731 @@
+/*
+ * Copyright 2007 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.openbis.dss.etl;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.List;
+import java.util.Properties;
+
+import javax.sql.DataSource;
+
+import net.lemnik.eodsql.QueryTool;
+
+import org.apache.commons.lang.time.DurationFormatUtils;
+import org.apache.log4j.Logger;
+
+import ch.systemsx.cisd.bds.hcs.Geometry;
+import ch.systemsx.cisd.common.collections.CollectionUtils;
+import ch.systemsx.cisd.common.exceptions.ConfigurationFailureException;
+import ch.systemsx.cisd.common.exceptions.EnvironmentFailureException;
+import ch.systemsx.cisd.common.exceptions.UserFailureException;
+import ch.systemsx.cisd.common.filesystem.FileOperations;
+import ch.systemsx.cisd.common.filesystem.FileUtilities;
+import ch.systemsx.cisd.common.filesystem.IFileOperations;
+import ch.systemsx.cisd.common.filesystem.SoftLinkMaker;
+import ch.systemsx.cisd.common.logging.LogCategory;
+import ch.systemsx.cisd.common.logging.LogFactory;
+import ch.systemsx.cisd.common.mail.IMailClient;
+import ch.systemsx.cisd.common.utilities.ClassUtils;
+import ch.systemsx.cisd.common.utilities.PropertyUtils;
+import ch.systemsx.cisd.etlserver.AbstractStorageProcessor;
+import ch.systemsx.cisd.etlserver.ITypeExtractor;
+import ch.systemsx.cisd.etlserver.hdf5.Hdf5Container;
+import ch.systemsx.cisd.etlserver.hdf5.HierarchicalStructureDuplicatorFileToHdf5;
+import ch.systemsx.cisd.openbis.dss.Constants;
+import ch.systemsx.cisd.openbis.dss.etl.dataaccess.IImagingQueryDAO;
+import ch.systemsx.cisd.openbis.dss.generic.shared.ServiceProvider;
+import ch.systemsx.cisd.openbis.dss.generic.shared.dto.DataSetInformation;
+import ch.systemsx.cisd.openbis.generic.shared.dto.StorageFormat;
+import ch.systemsx.cisd.openbis.plugin.screening.shared.basic.dto.ChannelDescription;
+import ch.systemsx.cisd.openbis.plugin.screening.shared.basic.dto.ScreeningConstants;
+
+/**
+ * Abstract superclass for storage processor which stores images in a special-purpose imaging
+ * database besides putting it into the store. It has ability to compress the whole dataset as an
+ * HDF5 container. It can also generate thumbnails for each image.
+ * <p>
+ * Accepts following properties:
+ * <ul>
+ * <li>generate-thumbnails - should the thumbnails be generated? It slows down the dataset
+ * registration, but increases the performance when the user wants to see the image. Can be 'true'
+ * or 'false', 'false' is the default value
+ * <li>compress-thumbnails - should the thumbnails be compressed? Used if generate-thumbnails is
+ * true, otherwise ignored
+ * <li>thumbnail-max-width, thumbnail-max-height - thumbnails size in pixels
+ * <li>[deprecated] channel-names - names of the channels in which images have been acquired
+ * <li>channel-codes - codes of the channels in which images have been acquired
+ * <li>channel-labels - labels of the channels in which images have been acquired
+ * <li>well_geometry - format: [width]>x[height], e.g. 3x4. Specifies the grid into which a
+ * microscope divided the well to acquire images.
+ * <li>file-extractor - implementation of the {@link IImageFileExtractor} interface which maps
+ * images to the location on the plate and particular channel
+ * <li>data-source - specification of the imaging db
+ * <li>extract-single-image-channels - optional comma separated list of color components. Available
+ * values: RED, GREEN or BLUE. If specified then the channels are extracted from the color
+ * components and override 'file-extractor' results.
+ * </p>
+ * 
+ * @author Tomasz Pylak
+ */
+abstract class AbstractImageStorageProcessor extends AbstractStorageProcessor
+{
+    /**
+     * Stores the references to the extracted images in the imaging database.
+     * 
+     * @param dao should not be commited or rollbacked, it's done outside of this method.
+     */
+    abstract protected void storeInDatabase(IImagingQueryDAO dao,
+            DataSetInformation dataSetInformation, HCSImageFileExtractionResult extractedImages);
+
+    /**
+     * Additional imgae validation (e.g. are there all images that were expected?). Prints warnings
+     * to the log, does not throw exceptions.
+     */
+    abstract protected void validateImages(DataSetInformation dataSetInformation,
+            IMailClient mailClient, File incomingDataSetDirectory,
+            HCSImageFileExtractionResult extractionResult);
+
+    // --------------------------------------------
+
+    /** The directory where <i>original</i> data could be found. */
+    private static final String DIR_ORIGINAL = ScreeningConstants.ORIGINAL_DATA_DIR;
+
+    protected static final Logger operationLog = LogFactory.getLogger(LogCategory.OPERATION,
+            PlateStorageProcessor.class);
+
+    protected static final Logger notificationLog = LogFactory.getLogger(LogCategory.NOTIFY,
+            PlateStorageProcessor.class);
+
+    // tiles geometry, e.g. 3x4 if the well is divided into 12 tiles (3 rows, 4 columns)
+    private static final String SPOT_GEOMETRY_PROPERTY = "well_geometry";
+
+    private static final String GENERATE_THUMBNAILS_PROPERTY = "generate-thumbnails";
+
+    private final static String COMPRESS_THUMBNAILS_PROPERTY = "compress-thumbnails";
+
+    private final static String ORIGINAL_DATA_STORAGE_FORMAT_PROPERTY =
+            "original-data-storage-format";
+
+    private static final String THUMBNAIL_MAX_WIDTH_PROPERTY = "thumbnail-max-width";
+
+    private static final int DEFAULT_THUMBNAIL_MAX_WIDTH = 200;
+
+    private static final String THUMBNAIL_MAX_HEIGHT_PROPERTY = "thumbnail-max-height";
+
+    private static final int DEFAULT_THUMBNAIL_MAX_HEIGHT = 120;
+
+    protected static final String FILE_EXTRACTOR_PROPERTY = "file-extractor";
+
+    // comma separated list of channel names, order matters
+    @Deprecated
+    public static final String CHANNEL_NAMES = "channel-names";
+
+    // comma separated list of channel codes, order matters
+    public static final String CHANNEL_CODES = "channel-codes";
+
+    // comma separated list of channel labels, order matters
+    public static final String CHANNEL_LABELS = "channel-labels";
+
+    // how the original data should be stored
+    private static enum OriginalDataStorageFormat
+    {
+        UNCHANGED, HDF5, HDF5_COMPRESSED;
+
+        public boolean isHdf5()
+        {
+            return this == OriginalDataStorageFormat.HDF5
+                    || this == OriginalDataStorageFormat.HDF5_COMPRESSED;
+        }
+    }
+
+    // --- private --------
+
+    private final DataSource dataSource;
+
+    private final int thumbnailMaxWidth;
+
+    private final int thumbnailMaxHeight;
+
+    private final boolean generateThumbnails;
+
+    private final boolean areThumbnailsCompressed;
+
+    private final OriginalDataStorageFormat originalDataStorageFormat;
+
+    // --- protected --------
+
+    protected final IImageFileExtractor imageFileExtractor;
+
+    protected final Geometry spotGeometry;
+
+    protected final List<ChannelDescription> channelDescriptions;
+
+    // --- internal state -------------
+
+    private IImagingQueryDAO currentTransaction;
+
+    // ---
+
+    public AbstractImageStorageProcessor(final Properties properties)
+    {
+        super(properties);
+        String spotGeometryText = getMandatoryProperty(SPOT_GEOMETRY_PROPERTY);
+        this.spotGeometry = Geometry.createFromString(spotGeometryText);
+        channelDescriptions = extractChannelDescriptions(properties);
+        thumbnailMaxWidth =
+                PropertyUtils.getInt(properties, THUMBNAIL_MAX_WIDTH_PROPERTY,
+                        DEFAULT_THUMBNAIL_MAX_WIDTH);
+        thumbnailMaxHeight =
+                PropertyUtils.getInt(properties, THUMBNAIL_MAX_HEIGHT_PROPERTY,
+                        DEFAULT_THUMBNAIL_MAX_HEIGHT);
+        generateThumbnails =
+                PropertyUtils.getBoolean(properties, GENERATE_THUMBNAILS_PROPERTY, false);
+        areThumbnailsCompressed =
+                PropertyUtils.getBoolean(properties, COMPRESS_THUMBNAILS_PROPERTY, false);
+        originalDataStorageFormat = getOriginalDataStorageFormat(properties);
+
+        this.imageFileExtractor = tryCreateImageExtractor(properties);
+        this.dataSource = ServiceProvider.getDataSourceProvider().getDataSource(properties);
+        this.currentTransaction = null;
+    }
+
+    private static IImageFileExtractor tryCreateImageExtractor(final Properties properties)
+    {
+        String fileExtractorClass = PropertyUtils.getProperty(properties, FILE_EXTRACTOR_PROPERTY);
+        if (fileExtractorClass != null)
+        {
+            return ClassUtils.create(IImageFileExtractor.class, fileExtractorClass, properties);
+        } else
+        {
+            return null;
+        }
+    }
+
+    private static OriginalDataStorageFormat getOriginalDataStorageFormat(
+            final Properties properties)
+    {
+        String defaultValue = OriginalDataStorageFormat.UNCHANGED.name();
+        String textValue =
+                PropertyUtils.getProperty(properties, ORIGINAL_DATA_STORAGE_FORMAT_PROPERTY,
+                        defaultValue);
+        return OriginalDataStorageFormat.valueOf(textValue.toUpperCase());
+    }
+
+    private final static List<String> tryGetListOfLabels(Properties properties, String propertyKey)
+    {
+        String itemsList = PropertyUtils.getProperty(properties, propertyKey);
+        if (itemsList == null)
+        {
+            return null;
+        }
+        String[] items = itemsList.split(",");
+        for (int i = 0; i < items.length; i++)
+        {
+            items[i] = items[i].trim();
+        }
+        return Arrays.asList(items);
+    }
+
+    public final static List<ChannelDescription> extractChannelDescriptions(
+            final Properties properties)
+    {
+        List<String> names = PropertyUtils.tryGetList(properties, CHANNEL_NAMES);
+        List<String> codes = PropertyUtils.tryGetList(properties, CHANNEL_CODES);
+        List<String> labels = tryGetListOfLabels(properties, CHANNEL_LABELS);
+        if (names != null && (codes != null || labels != null))
+        {
+            throw new ConfigurationFailureException(String.format(
+                    "Configure either '%s' or ('%s','%s') but not both.", CHANNEL_NAMES,
+                    CHANNEL_CODES, CHANNEL_LABELS));
+        }
+        if (names != null)
+        {
+            List<ChannelDescription> descriptions = new ArrayList<ChannelDescription>();
+            for (String name : names)
+            {
+                descriptions.add(new ChannelDescription(name));
+            }
+            return descriptions;
+        }
+        if (codes == null || labels == null)
+        {
+            throw new ConfigurationFailureException(String.format(
+                    "Both '%s' and '%s' should be configured", CHANNEL_CODES, CHANNEL_LABELS));
+        }
+        if (codes.size() != labels.size())
+        {
+            throw new ConfigurationFailureException(String.format(
+                    "Number of configured '%s' should be the same as number of '%s'.",
+                    CHANNEL_CODES, CHANNEL_LABELS));
+        }
+        List<ChannelDescription> descriptions = new ArrayList<ChannelDescription>();
+        for (int i = 0; i < codes.size(); i++)
+        {
+            descriptions.add(new ChannelDescription(codes.get(i), labels.get(i)));
+        }
+        return descriptions;
+    }
+
+    private IImagingQueryDAO createQuery()
+    {
+        return QueryTool.getQuery(dataSource, IImagingQueryDAO.class);
+    }
+
+    private void checkDataSetInformation(final DataSetInformation dataSetInformation)
+    {
+        assert dataSetInformation != null : "Unspecified data set information";
+        assert dataSetInformation.getSampleIdentifier() != null : "Unspecified sample identifier";
+
+        final ch.systemsx.cisd.openbis.generic.shared.dto.identifier.ExperimentIdentifier experimentIdentifier =
+                dataSetInformation.getExperimentIdentifier();
+        assert experimentIdentifier != null : "Unspecified experiment identifier";
+        assert dataSetInformation.tryToGetExperiment() != null : "experiment not set";
+        checkExperimentIdentifier(experimentIdentifier);
+    }
+
+    private final static void checkExperimentIdentifier(
+            final ch.systemsx.cisd.openbis.generic.shared.dto.identifier.ExperimentIdentifier experimentIdentifier)
+    {
+        assert experimentIdentifier.getSpaceCode() != null : "Space code is null";
+        assert experimentIdentifier.getExperimentCode() != null : "Experiment code is null";
+        assert experimentIdentifier.getProjectCode() != null : "Project code is null";
+    }
+
+    // ---------------------------------
+
+    public final File storeData(final DataSetInformation dataSetInformation,
+            final ITypeExtractor typeExtractor, final IMailClient mailClient,
+            final File incomingDataSetDirectory, final File rootDirectory)
+    {
+        checkDataSetInformation(dataSetInformation);
+        assert rootDirectory != null : "Root directory can not be null.";
+        assert incomingDataSetDirectory != null : "Incoming data set directory can not be null.";
+        assert typeExtractor != null : "Unspecified IProcedureAndDataTypeExtractor implementation.";
+
+        HCSImageFileExtractionResult extractionResult =
+                extractImages(dataSetInformation, incomingDataSetDirectory);
+
+        validateImages(dataSetInformation, mailClient, incomingDataSetDirectory, extractionResult);
+        List<AcquiredPlateImage> plateImages = extractionResult.getImages();
+
+        File imagesInStoreFolder = moveToStore(incomingDataSetDirectory, rootDirectory);
+
+        // NOTE: plateImages will be changed by reference
+        processImages(rootDirectory, plateImages, imagesInStoreFolder);
+
+        storeInDatabase(dataSetInformation, extractionResult);
+        return rootDirectory;
+    }
+
+    private void processImages(final File rootDirectory, List<AcquiredPlateImage> plateImages,
+            File imagesInStoreFolder)
+    {
+        generateThumbnails(plateImages, rootDirectory, imagesInStoreFolder);
+        String relativeImagesDirectory =
+                packageImagesIfNecessary(rootDirectory, plateImages, imagesInStoreFolder);
+        updateImagesRelativePath(relativeImagesDirectory, plateImages);
+    }
+
+    // returns the prefix which should be added before each image path to create a path relative to
+    // the dataset folder
+    private String packageImagesIfNecessary(final File rootDirectory,
+            List<AcquiredPlateImage> plateImages, File imagesInStoreFolder)
+    {
+        if (originalDataStorageFormat.isHdf5())
+        {
+            File hdf5OriginalContainer = createHdf5OriginalContainer(rootDirectory);
+            boolean isDataCompressed =
+                    originalDataStorageFormat == OriginalDataStorageFormat.HDF5_COMPRESSED;
+            saveInHdf5(imagesInStoreFolder, hdf5OriginalContainer, isDataCompressed);
+            String hdf5ArchivePathPrefix =
+                    hdf5OriginalContainer.getName() + ContentRepository.ARCHIVE_DELIMITER;
+            return hdf5ArchivePathPrefix;
+        } else
+        {
+            return getRelativeImagesDirectory(rootDirectory, imagesInStoreFolder) + "/";
+        }
+    }
+
+    private static File createHdf5OriginalContainer(final File rootDirectory)
+    {
+        return new File(rootDirectory, Constants.HDF5_CONTAINER_ORIGINAL_FILE_NAME);
+    }
+
+    private void saveInHdf5(File sourceFolder, File hdf5DestinationFile, boolean compressFiles)
+    {
+        Hdf5Container container = new Hdf5Container(hdf5DestinationFile);
+        container.runWriterClient(compressFiles,
+                new HierarchicalStructureDuplicatorFileToHdf5.DuplicatorWriterClient(sourceFolder));
+    }
+
+    private File moveToStore(File incomingDataSetDirectory, File rootDirectory)
+    {
+        File originalFolder = getOriginalFolder(rootDirectory);
+        originalFolder.mkdirs();
+        if (originalFolder.exists() == false)
+        {
+            throw new UserFailureException("Cannot create a directory: " + originalFolder);
+        }
+        return moveFileToDirectory(incomingDataSetDirectory, originalFolder);
+
+    }
+
+    // modifies plateImages by setting the path to thumbnails
+    private void generateThumbnails(final List<AcquiredPlateImage> plateImages,
+            final File rootDirectory, final File imagesInStoreFolder)
+    {
+        final File thumbnailsFile =
+                new File(rootDirectory, Constants.HDF5_CONTAINER_THUMBNAILS_FILE_NAME);
+        final String relativeThumbnailFilePath =
+                getRelativeImagesDirectory(rootDirectory, thumbnailsFile);
+
+        if (generateThumbnails)
+        {
+            Hdf5Container container = new Hdf5Container(thumbnailsFile);
+            container.runWriterClient(areThumbnailsCompressed, new Hdf5ThumbnailGenerator(
+                    plateImages, imagesInStoreFolder, thumbnailMaxWidth, thumbnailMaxHeight,
+                    relativeThumbnailFilePath, operationLog));
+        }
+    }
+
+    private void updateImagesRelativePath(String folderPathPrefix,
+            final List<AcquiredPlateImage> plateImages)
+    {
+        for (AcquiredPlateImage plateImage : plateImages)
+        {
+            RelativeImageReference imageReference = plateImage.getImageReference();
+            imageReference.setRelativeImageFolder(folderPathPrefix);
+        }
+    }
+
+    private String getRelativeImagesDirectory(File rootDirectory, File imagesInStoreFolder)
+    {
+        String root = rootDirectory.getAbsolutePath();
+        String imgDir = imagesInStoreFolder.getAbsolutePath();
+        if (imgDir.startsWith(root) == false)
+        {
+            throw UserFailureException.fromTemplate(
+                    "Directory %s should be a subdirectory of directory %s.", imgDir, root);
+        }
+        return imgDir.substring(root.length());
+    }
+
+    private HCSImageFileExtractionResult extractImages(final DataSetInformation dataSetInformation,
+            final File incomingDataSetDirectory)
+    {
+        long extractionStart = System.currentTimeMillis();
+        final HCSImageFileExtractionResult result =
+                getImageFileExtractor(incomingDataSetDirectory).extract(incomingDataSetDirectory,
+                        dataSetInformation);
+
+        if (operationLog.isInfoEnabled())
+        {
+            long duration = System.currentTimeMillis() - extractionStart;
+            operationLog.info(String.format("Extraction of %d files took %s.", result.getImages()
+                    .size(), DurationFormatUtils.formatDurationHMS(duration)));
+        }
+        if (result.getImages().size() == 0)
+        {
+            throw new UserFailureException("No images found in the incoming diretcory: "
+                    + incomingDataSetDirectory);
+        }
+        return result;
+    }
+
+    protected IImageFileExtractor getImageFileExtractor(File incomingDataSetDirectory)
+    {
+        assert imageFileExtractor != null : "imageFileExtractor is null";
+        return imageFileExtractor;
+    }
+
+    @Override
+    public void commit(File incomingDataSetDirectory, File storedDataDirectory)
+    {
+        if (originalDataStorageFormat.isHdf5())
+        {
+            commitHdf5StorageFormatChanges(storedDataDirectory);
+        }
+        commitDatabaseChanges();
+    }
+
+    private static void commitHdf5StorageFormatChanges(File storedDataDirectory)
+    {
+        File originalFolder = getOriginalFolder(storedDataDirectory);
+        File hdf5OriginalContainer = createHdf5OriginalContainer(storedDataDirectory);
+        if (hdf5OriginalContainer.exists())
+        {
+            final IFileOperations fileOps = FileOperations.getMonitoredInstanceForCurrentThread();
+            if (fileOps.removeRecursivelyQueueing(originalFolder) == false)
+            {
+                operationLog.error("Cannot delete '" + originalFolder.getAbsolutePath() + "'.");
+            }
+        } else
+        {
+            notificationLog.error(String.format("HDF5 container with original data '%s' does not "
+                    + "exist, keeping the original directory '%s'.", hdf5OriginalContainer,
+                    originalFolder));
+        }
+    }
+
+    private void commitDatabaseChanges()
+    {
+        if (currentTransaction == null)
+        {
+            throw new IllegalStateException("there is no transaction to commit");
+        }
+        try
+        {
+            currentTransaction.close(true);
+        } finally
+        {
+            currentTransaction = null;
+        }
+    }
+
+    public UnstoreDataAction rollback(File incomingDataSetDirectory, File storedDataDirectory,
+            Throwable exception)
+    {
+        unstoreFiles(incomingDataSetDirectory, storedDataDirectory);
+        rollbackDatabaseChanges();
+        return UnstoreDataAction.MOVE_TO_ERROR;
+    }
+
+    private final void unstoreFiles(final File incomingDataSetDirectory,
+            final File storedDataDirectory)
+    {
+        checkParameters(incomingDataSetDirectory, storedDataDirectory);
+
+        final File originalDataFile = tryGetProprietaryData(storedDataDirectory);
+        if (originalDataFile == null)
+        {
+            // nothing has been stored in the file system yet,
+            // e.g. because images could not be validated
+            return;
+        }
+        // Move the data from the 'original' directory back to the 'incoming' directory.
+        final File incomingDirectory = incomingDataSetDirectory.getParentFile();
+        try
+        {
+            moveFileToDirectory(originalDataFile, incomingDirectory);
+            if (operationLog.isInfoEnabled())
+            {
+                operationLog.info(String.format(
+                        "Directory '%s' has moved to incoming directory '%s'.", originalDataFile,
+                        incomingDirectory.getAbsolutePath()));
+            }
+        } catch (final EnvironmentFailureException ex)
+        {
+            notificationLog.error(String.format("Could not move '%s' to incoming directory '%s'.",
+                    originalDataFile, incomingDirectory.getAbsolutePath()), ex);
+            return;
+        }
+        // Remove the dataset directory from the store
+        final IFileOperations fileOps = FileOperations.getMonitoredInstanceForCurrentThread();
+        if (fileOps.exists(incomingDataSetDirectory))
+        {
+            if (fileOps.removeRecursivelyQueueing(storedDataDirectory) == false)
+            {
+                operationLog
+                        .error("Cannot delete '" + storedDataDirectory.getAbsolutePath() + "'.");
+            }
+        } else
+        {
+            notificationLog.error(String.format("Incoming data set directory '%s' does not "
+                    + "exist, keeping store directory '%s'.", incomingDataSetDirectory,
+                    storedDataDirectory));
+        }
+    }
+
+    private void storeInDatabase(DataSetInformation dataSetInformation,
+            HCSImageFileExtractionResult extractionResult)
+    {
+        if (currentTransaction != null)
+        {
+            throw new IllegalStateException("previous transaction has not been commited!");
+        }
+        currentTransaction = createQuery();
+
+        storeInDatabase(currentTransaction, dataSetInformation, extractionResult);
+    }
+
+    private void rollbackDatabaseChanges()
+    {
+        if (currentTransaction == null)
+        {
+            return; // storing in the imaging db has not started
+        }
+        try
+        {
+            currentTransaction.rollback();
+        } finally
+        {
+            currentTransaction.close();
+            currentTransaction = null;
+        }
+    }
+
+    /**
+     * Moves source file/folder to the destination directory. If the source is a symbolic links to
+     * the original data then we do not move any data. Instead we create symbolic link to original
+     * data which points to the same place as the source link.
+     * 
+     * @return
+     */
+    private static File moveFileToDirectory(final File source, final File directory)
+            throws EnvironmentFailureException
+    {
+        assert source != null;
+        IFileOperations fileOperations = FileOperations.getMonitoredInstanceForCurrentThread();
+        assert directory != null && fileOperations.isDirectory(directory);
+        final String newName = source.getName();
+        final File destination = new File(directory, newName);
+        if (fileOperations.exists(destination) == false)
+        {
+            if (FileUtilities.isSymbolicLink(source))
+            {
+                moveSymbolicLink(source, destination);
+            } else
+            {
+                final boolean successful = fileOperations.rename(source, destination);
+                if (successful == false)
+                {
+                    throw EnvironmentFailureException.fromTemplate(
+                            "Can not move file '%s' to directory '%s'.", source.getAbsolutePath(),
+                            directory.getAbsolutePath());
+                }
+            }
+            return destination;
+        } else
+        {
+            throw EnvironmentFailureException
+                    .fromTemplate(
+                            "Can not move file '%s' to directory '%s' because the destination directory already exists.",
+                            source.getAbsolutePath(), directory.getAbsolutePath());
+        }
+    }
+
+    // WORKAROUND there were cases where it was impossible to move an absolute symbolic link
+    // It happened on a CIFS share. So instead of moving the link we create a file which points to
+    // the same place and delete the link.
+    private static void moveSymbolicLink(File source, File destination)
+    {
+        File referencedSource;
+        try
+        {
+            referencedSource = source.getCanonicalFile();
+        } catch (IOException ex)
+        {
+            throw new EnvironmentFailureException("cannot get the canonical path of " + source);
+        }
+        boolean ok = SoftLinkMaker.createSymbolicLink(referencedSource, destination);
+        if (ok == false)
+        {
+            throw EnvironmentFailureException.fromTemplate(
+                    "Can not create symbolic link to '%s' in '%s'.", referencedSource.getPath(),
+                    destination.getPath());
+        }
+        ok = source.delete();
+        if (ok == false)
+        {
+            throw EnvironmentFailureException.fromTemplate("Can not delete symbolic link '%s'.",
+                    source.getPath());
+        }
+    }
+
+    public final File tryGetProprietaryData(final File storedDataDirectory)
+    {
+        assert storedDataDirectory != null : "Unspecified stored data directory.";
+
+        File originalFolder = getOriginalFolder(storedDataDirectory);
+        File[] content = originalFolder.listFiles();
+        if (content == null || content.length == 0)
+        {
+            return null;
+        }
+        if (content.length > 1)
+        {
+            operationLog.error("There should be exactly one original folder inside '"
+                    + originalFolder + "', but " + originalFolder.length() + " has been found.");
+            return null;
+        }
+        File originalDataFile = content[0];
+        if (originalDataFile.exists() == false)
+        {
+            operationLog.error("Original data set file '" + originalDataFile.getAbsolutePath()
+                    + "' does not exist.");
+            return null;
+        }
+        return originalDataFile;
+    }
+
+    private static File getOriginalFolder(File storedDataDirectory)
+    {
+        return new File(storedDataDirectory, DIR_ORIGINAL);
+    }
+
+    public final StorageFormat getStorageFormat()
+    {
+        return StorageFormat.PROPRIETARY;
+    }
+
+    protected static List<String> extractChannelCodes(final List<ChannelDescription> descriptions)
+    {
+        List<String> channelCodes = new ArrayList<String>();
+        for (ChannelDescription cd : descriptions)
+        {
+            channelCodes.add(cd.getCode());
+        }
+        return channelCodes;
+    }
+
+    protected static List<String> extractChannelLabels(final List<ChannelDescription> descriptions)
+    {
+        List<String> channelLabels = new ArrayList<String>();
+        for (ChannelDescription cd : descriptions)
+        {
+            channelLabels.add(cd.getLabel());
+        }
+        return channelLabels;
+    }
+
+    protected static String getChannelCodeOrLabel(final List<String> channelCodes, int channelId)
+    {
+        if (channelId > channelCodes.size())
+        {
+            throw UserFailureException.fromTemplate(
+                    "Too large channel number %d, configured channels: %s.", channelId,
+                    CollectionUtils.abbreviate(channelCodes, -1));
+        }
+        return channelCodes.get(channelId - 1);
+    }
+
+    protected static boolean hasImageSeries(List<AcquiredPlateImage> images)
+    {
+        for (AcquiredPlateImage image : images)
+        {
+            if (image.tryGetTimePoint() != null || image.tryGetDepth() != null)
+            {
+                return true;
+            }
+        }
+        return false;
+    }
+
+}
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/AcquiredPlateImage.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/AcquiredPlateImage.java
index b5612d067d1242758da12a9d4f76d32e2a385f43..b8a0e0de2c9139d1d58af06bf0f7b8d7c3e343c8 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/AcquiredPlateImage.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/AcquiredPlateImage.java
@@ -20,13 +20,16 @@ import ch.systemsx.cisd.bds.hcs.Location;
 import ch.systemsx.cisd.common.utilities.AbstractHashable;
 
 /**
- * Describes properties extracted for one screening image.
+ * Describes properties extracted for one logical screening image (note that one file can contain
+ * many logical images).
  * 
  * @author Tomasz Pylak
  */
+// TODO 2010-12-08, Tomasz Pylak: rename to AcquiredSingleImage
 public class AcquiredPlateImage extends AbstractHashable
 {
-    private final Location wellLocation;
+    // null for non-HCS images
+    private final Location wellLocationOrNull;
 
     private final Location tileLocation;
 
@@ -39,11 +42,12 @@ public class AcquiredPlateImage extends AbstractHashable
                                                         // directory
 
     private RelativeImageReference thumbnailFilePathOrNull;
-    
-    public AcquiredPlateImage(Location wellLocation, Location tileLocation, String channelCode,
-            Float timePointOrNull, Float depthOrNull, RelativeImageReference imageFilePath)
+
+    public AcquiredPlateImage(Location wellLocationOrNull, Location tileLocation,
+            String channelCode, Float timePointOrNull, Float depthOrNull,
+            RelativeImageReference imageFilePath)
     {
-        this.wellLocation = wellLocation;
+        this.wellLocationOrNull = wellLocationOrNull;
         this.tileLocation = tileLocation;
         this.channelCode = channelCode.toUpperCase();
         this.timePointOrNull = timePointOrNull;
@@ -51,14 +55,18 @@ public class AcquiredPlateImage extends AbstractHashable
         this.imageFilePath = imageFilePath;
     }
 
+    /** Valid only in HCS case, do not call this method for microscopy images. */
     public int getWellRow()
     {
-        return wellLocation.getY();
+        assert wellLocationOrNull != null : "wellLocationOrNull is null";
+        return wellLocationOrNull.getY();
     }
 
+    /** Valid only in HCS case, do not call this method for microscopy images. */
     public int getWellColumn()
     {
-        return wellLocation.getX();
+        assert wellLocationOrNull != null : "wellLocationOrNull is null";
+        return wellLocationOrNull.getX();
     }
 
     public int getTileRow()
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/biozentrum/HCSImageFileExtractor.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/FlexibleHCSImageFileExtractor.java
similarity index 55%
rename from screening/source/java/ch/systemsx/cisd/openbis/dss/etl/biozentrum/HCSImageFileExtractor.java
rename to screening/source/java/ch/systemsx/cisd/openbis/dss/etl/FlexibleHCSImageFileExtractor.java
index 16b11e1940c1ec23688c6bbad4135558accee193..f9e77839594bc0c0c6ad5484d67113b888028294 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/biozentrum/HCSImageFileExtractor.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/FlexibleHCSImageFileExtractor.java
@@ -14,23 +14,29 @@
  * limitations under the License.
  */
 
-package ch.systemsx.cisd.openbis.dss.etl.biozentrum;
+package ch.systemsx.cisd.openbis.dss.etl;
 
 import java.io.File;
 import java.util.Properties;
 
-import ch.systemsx.cisd.openbis.dss.etl.UnparsedImageFileInfoLexer;
+import ch.systemsx.cisd.openbis.dss.etl.dto.ImageFileInfo;
+import ch.systemsx.cisd.openbis.dss.etl.dto.UnparsedImageFileInfo;
 import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SampleIdentifier;
 
 /**
- * A <code>IHCSImageFileExtractor</code> implementation suitable for <i>iBrain</i>.
+ * A <code>IHCSImageFileExtractor</code> implementation which allows to have optionally time points
+ * and depth-scans.
+ * <p>
+ * Suitable for images processed by <i>iBrain2</i>, but not only.
+ * </p>
  * 
- * @author Izabela Adamczyk
+ * @author Tomasz Pylak
  */
-public class HCSImageFileExtractor extends ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractor
+public class FlexibleHCSImageFileExtractor extends
+        ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractor
 {
 
-    public HCSImageFileExtractor(Properties properties)
+    public FlexibleHCSImageFileExtractor(Properties properties)
     {
         super(properties);
     }
@@ -39,12 +45,14 @@ public class HCSImageFileExtractor extends ch.systemsx.cisd.openbis.dss.etl.HCSI
     protected final ImageFileInfo tryExtractImageInfo(File imageFile,
             File incomingDataSetDirectory, SampleIdentifier datasetSample)
     {
-        UnparsedImageFileInfo unparsedInfo = UnparsedImageFileInfoLexer.extractImageFileInfo(imageFile);
+        UnparsedImageFileInfo unparsedInfo =
+                UnparsedImageFileInfoLexer.tryExtractHCSImageFileInfo(imageFile,
+                        incomingDataSetDirectory);
         if (unparsedInfo == null)
         {
             return null;
         }
-        return tryExtractImageInfo(unparsedInfo);
+        return tryExtractHCSImageInfo(unparsedInfo, imageFile, incomingDataSetDirectory);
     }
 
 }
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/ScreeningContainerDatasetInfo.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSContainerDatasetInfo.java
similarity index 90%
rename from screening/source/java/ch/systemsx/cisd/openbis/dss/etl/ScreeningContainerDatasetInfo.java
rename to screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSContainerDatasetInfo.java
index 202b9868ba88db8e02b41100b2097af39431e5fc..fa215a3b10988bd38cedca367be314ea5f3b54a2 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/ScreeningContainerDatasetInfo.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSContainerDatasetInfo.java
@@ -25,11 +25,11 @@ import ch.systemsx.cisd.openbis.plugin.screening.shared.dto.PlateDimension;
 import ch.systemsx.cisd.openbis.plugin.screening.shared.dto.PlateDimensionParser;
 
 /**
- * Describes one dataset container (e.g. plate) with images.
+ * Describes one HCS dataset container (e.g. plate) with images.
  * 
  * @author Tomasz Pylak
  */
-public class ScreeningContainerDatasetInfo
+public class HCSContainerDatasetInfo
 {
     private String experimentPermId;
 
@@ -89,13 +89,13 @@ public class ScreeningContainerDatasetInfo
         this.containerColumns = containerColumns;
     }
 
-    public static ScreeningContainerDatasetInfo createScreeningDatasetInfo(
+    public static HCSContainerDatasetInfo createScreeningDatasetInfo(
             DataSetInformation dataSetInformation)
     {
         Sample sample = dataSetInformation.tryToGetSample();
         assert sample != null : "no sample connected to a dataset";
         PlateDimension plateGeometry = getPlateGeometry(dataSetInformation);
-        ScreeningContainerDatasetInfo info =
+        HCSContainerDatasetInfo info =
                 createBasicScreeningDataSetInfo(dataSetInformation, sample, plateGeometry);
         return info;
     }
@@ -103,22 +103,22 @@ public class ScreeningContainerDatasetInfo
     /**
      * Create a screening data set info given sample.
      */
-    public static ScreeningContainerDatasetInfo createScreeningDatasetInfoWithSample(
+    public static HCSContainerDatasetInfo createScreeningDatasetInfoWithSample(
             DataSetInformation dataSetInformation, Sample containingSample)
     {
         Sample sample = containingSample;
         assert sample != null : "no sample connected to a dataset";
         PlateDimension plateGeometry = getPlateGeometry(sample);
-        ScreeningContainerDatasetInfo info =
+        HCSContainerDatasetInfo info =
                 createBasicScreeningDataSetInfo(dataSetInformation, sample, plateGeometry);
         return info;
     }
 
-    private static ScreeningContainerDatasetInfo createBasicScreeningDataSetInfo(
+    private static HCSContainerDatasetInfo createBasicScreeningDataSetInfo(
             DataSetInformation dataSetInformation, Sample sample, PlateDimension plateGeometry)
     {
         Experiment experiment = dataSetInformation.tryToGetExperiment();
-        ScreeningContainerDatasetInfo info = new ScreeningContainerDatasetInfo();
+        HCSContainerDatasetInfo info = new HCSContainerDatasetInfo();
         info.setExperimentPermId(experiment.getPermId());
         info.setContainerPermId(sample.getPermId());
         info.setDatasetPermId(dataSetInformation.getDataSetCode());
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSImageCheckList.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSImageCheckList.java
index 1808f7d316eeb3f2263e6bd02ef31698a8856623..2d2876e034a0f9cac9b4af31195cdcaf1bccafa0 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSImageCheckList.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSImageCheckList.java
@@ -18,8 +18,10 @@ package ch.systemsx.cisd.openbis.dss.etl;
 
 import java.util.ArrayList;
 import java.util.HashMap;
+import java.util.HashSet;
 import java.util.List;
 import java.util.Map;
+import java.util.Set;
 
 import org.apache.log4j.Logger;
 
@@ -92,7 +94,8 @@ public final class HCSImageCheckList
             throw new IllegalArgumentException("Invalid channel/well/tile: " + image);
         }
         Float timepointOrNull = image.tryGetTimePoint();
-        if (check.isCheckedOff(timepointOrNull))
+        Float depthOrNull = image.tryGetDepth();
+        if (check.isCheckedOff(timepointOrNull, depthOrNull))
         {
             throw new IllegalArgumentException("Image already handled: " + image);
         }
@@ -101,7 +104,7 @@ public final class HCSImageCheckList
             operationLog.debug("Checking location " + location
                     + (timepointOrNull == null ? "" : " timepoint " + timepointOrNull));
         }
-        check.checkOff(timepointOrNull);
+        check.checkOff(timepointOrNull, depthOrNull);
     }
 
     private static FullLocation createLocation(AcquiredPlateImage image)
@@ -115,7 +118,7 @@ public final class HCSImageCheckList
         final List<FullLocation> fullLocations = new ArrayList<FullLocation>();
         for (final Map.Entry<FullLocation, Check> entry : imageMap.entrySet())
         {
-            if (entry.getValue().isCheckedOff(null) == false)
+            if (entry.getValue().isCheckedOff(null, null) == false)
             {
                 fullLocations.add(entry.getKey());
             }
@@ -127,21 +130,74 @@ public final class HCSImageCheckList
     // Helper classes
     //
 
+    private static final class CheckDimension
+    {
+        private final Float timeOrNull;
+
+        private final Float depthOrNull;
+
+        public CheckDimension(Float timeOrNull, Float depthOrNull)
+        {
+            this.timeOrNull = timeOrNull;
+            this.depthOrNull = depthOrNull;
+        }
+
+        @Override
+        public int hashCode()
+        {
+            final int prime = 31;
+            int result = 1;
+            result = prime * result + ((depthOrNull == null) ? 0 : depthOrNull.hashCode());
+            result = prime * result + ((timeOrNull == null) ? 0 : timeOrNull.hashCode());
+            return result;
+        }
+
+        @Override
+        public boolean equals(Object obj)
+        {
+            if (this == obj)
+                return true;
+            if (obj == null)
+                return false;
+            if (getClass() != obj.getClass())
+                return false;
+            CheckDimension other = (CheckDimension) obj;
+            if (depthOrNull == null)
+            {
+                if (other.depthOrNull != null)
+                    return false;
+            } else if (!depthOrNull.equals(other.depthOrNull))
+                return false;
+            if (timeOrNull == null)
+            {
+                if (other.timeOrNull != null)
+                    return false;
+            } else if (!timeOrNull.equals(other.timeOrNull))
+                return false;
+            return true;
+        }
+    }
+
     private static final class Check
     {
         private boolean checkedOff;
 
-        private final List<Float> timepoints = new ArrayList<Float>();
+        private final Set<CheckDimension> dimensions = new HashSet<CheckDimension>();
 
-        final void checkOff(Float timepointOrNull)
+        final void checkOff(Float timepointOrNull, Float depthOrNull)
         {
-            timepoints.add(timepointOrNull);
+            dimensions.add(new CheckDimension(timepointOrNull, depthOrNull));
             checkedOff = true;
         }
 
-        final boolean isCheckedOff(Float timepointOrNull)
+        final boolean isCheckedOff(Float timepointOrNull, Float depthOrNull)
         {
-            return checkedOff && (timepointOrNull == null || timepoints.contains(timepointOrNull));
+            CheckDimension dim = null;
+            if (timepointOrNull != null || depthOrNull != null)
+            {
+                dim = new CheckDimension(timepointOrNull, depthOrNull);
+            }
+            return checkedOff && (dim == null || dimensions.contains(dim));
         }
     }
 
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/ImageDatasetInfo.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSImageDatasetInfo.java
similarity index 87%
rename from screening/source/java/ch/systemsx/cisd/openbis/dss/etl/ImageDatasetInfo.java
rename to screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSImageDatasetInfo.java
index 3dfcfd3014c560bfb56f86b7841e6893f32e51a6..6a1e9c56dd5648e5886c05b76be55b824d8b778d 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/ImageDatasetInfo.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSImageDatasetInfo.java
@@ -17,18 +17,18 @@
 package ch.systemsx.cisd.openbis.dss.etl;
 
 /**
- * Describes one image dataset from imaging database.
+ * Describes one HCS image dataset from imaging database.
  * 
  * @author Tomasz Pylak
  */
-public class ImageDatasetInfo extends ScreeningContainerDatasetInfo
+public class HCSImageDatasetInfo extends HCSContainerDatasetInfo
 {
     private final int tileRows, tileColumns;
 
     // has any well timepoints or depth stack images?
     private final boolean hasImageSeries;
 
-    public ImageDatasetInfo(ScreeningContainerDatasetInfo info, int tileRows, int tileColumns,
+    public HCSImageDatasetInfo(HCSContainerDatasetInfo info, int tileRows, int tileColumns,
             boolean hasImageSeries)
     {
         super.setContainerRows(info.getContainerRows());
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSImageDatasetUploader.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSImageDatasetUploader.java
new file mode 100644
index 0000000000000000000000000000000000000000..b50739bc1f8300e512e81773e6f1452aea9fb87b
--- /dev/null
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSImageDatasetUploader.java
@@ -0,0 +1,172 @@
+/*
+ * Copyright 2010 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.openbis.dss.etl;
+
+import java.util.ArrayList;
+import java.util.List;
+
+import ch.systemsx.cisd.openbis.dss.etl.ImagingDatabaseHelper.ExperimentWithChannelsAndContainer;
+import ch.systemsx.cisd.openbis.dss.etl.ImagingDatabaseHelper.ImagingChannelsMap;
+import ch.systemsx.cisd.openbis.dss.etl.dataaccess.IImagingQueryDAO;
+import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgDatasetDTO;
+import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgSpotDTO;
+
+/**
+ * Uploads HCS images (with spots in the container) into imaging database.
+ * 
+ * @author Tomasz Pylak
+ */
+public class HCSImageDatasetUploader extends AbstractImageDatasetUploader
+{
+    public static void upload(IImagingQueryDAO dao, HCSImageDatasetInfo info,
+            List<AcquiredPlateImage> images, List<HCSImageFileExtractionResult.Channel> channels)
+    {
+        new HCSImageDatasetUploader(dao).upload(info, images, channels);
+    }
+
+    private HCSImageDatasetUploader(IImagingQueryDAO dao)
+    {
+        super(dao);
+    }
+
+    private void upload(HCSImageDatasetInfo info, List<AcquiredPlateImage> images,
+            List<HCSImageFileExtractionResult.Channel> channels)
+    {
+        ExperimentWithChannelsAndContainer basicStruct =
+                ImagingDatabaseHelper.getOrCreateExperimentWithChannelsAndContainer(
+                        dao, info, channels);
+        long contId = basicStruct.getContainerId();
+        ImagingChannelsMap channelsMap = basicStruct.getChannelsMap();
+
+        Long[][] spotIds = getOrCreateSpots(contId, info, images);
+        ISpotProvider spotProvider = getSpotProvider(spotIds);
+        long datasetId = createDataset(contId, info);
+
+        createImages(images, spotProvider, channelsMap, datasetId);
+    }
+
+    private static ISpotProvider getSpotProvider(final Long[][] spotIds)
+    {
+        return new ISpotProvider()
+            {
+                public Long tryGetSpotId(AcquiredPlateImage image)
+                {
+                    return findSpotId(image, spotIds);
+                }
+            };
+    }
+
+    private static long findSpotId(AcquiredPlateImage image, Long[][] spotIds)
+    {
+        int wellRow = image.getWellRow();
+        int wellColumn = image.getWellColumn();
+        Long spotId = spotIds[wellRow - 1][wellColumn - 1];
+        assert spotId != null : "no spot for " + image;
+        return spotId;
+    }
+
+    // returns a matrix of spot tech ids. The matrix[row][col] contains null is
+    // spot at (row,col)
+    // does not exist. Spot coordinates are 0-based in the matrix.
+    private Long[][] getOrCreateSpots(long contId, HCSContainerDatasetInfo info,
+            List<AcquiredPlateImage> images)
+    {
+        List<ImgSpotDTO> oldSpots = dao.listSpots(contId);
+        List<ImgSpotDTO> newSpots =
+                createNewSpots(contId, images, oldSpots, info.getContainerRows(),
+                        info.getContainerColumns(), info.getContainerPermId());
+        newSpots.addAll(oldSpots);
+        return makeTechIdMatrix(newSpots, info.getContainerRows(), info.getContainerColumns());
+    }
+
+    private List<ImgSpotDTO> createNewSpots(long contId, List<AcquiredPlateImage> images,
+            List<ImgSpotDTO> existingSpots, int rows, int columns, String containerPermId)
+    {
+        Boolean[][] newSpotMatrix = extractNewSpots(rows, columns, images, existingSpots);
+        List<ImgSpotDTO> newSpots = makeSpotDTOs(newSpotMatrix, contId);
+        for (ImgSpotDTO spot : newSpots)
+        {
+            long id = dao.addSpot(spot);
+            spot.setId(id);
+        }
+        return newSpots;
+    }
+
+    private static Boolean[][] extractNewSpots(int rows, int columns,
+            List<AcquiredPlateImage> images, List<ImgSpotDTO> existingSpots)
+    {
+        Boolean[][] spots = extractExistingSpots(rows, columns, images);
+        unmarkSpots(existingSpots, spots);
+        return spots;
+    }
+
+    private static Boolean[][] extractExistingSpots(int rows, int columns,
+            List<AcquiredPlateImage> images)
+    {
+        Boolean[][] spots = new Boolean[rows][columns];
+        for (AcquiredPlateImage image : images)
+        {
+            spots[image.getWellRow() - 1][image.getWellColumn() - 1] = true;
+        }
+        return spots;
+    }
+
+    private static Long[][] makeTechIdMatrix(List<ImgSpotDTO> existingSpots, int rows, int columns)
+    {
+        Long[][] matrix = new Long[rows][columns];
+        for (ImgSpotDTO spot : existingSpots)
+        {
+            matrix[spot.getRow() - 1][spot.getColumn() - 1] = spot.getId();
+        }
+        return matrix;
+    }
+
+    private static List<ImgSpotDTO> makeSpotDTOs(Boolean[][] spots, long contId)
+    {
+
+        List<ImgSpotDTO> newSpots = new ArrayList<ImgSpotDTO>();
+        for (int row = 0; row < spots.length; row++)
+        {
+            Boolean[] spotRow = spots[row];
+            for (int col = 0; col < spotRow.length; col++)
+            {
+                Boolean wanted = spotRow[col];
+                if (wanted != null && wanted)
+                {
+                    newSpots.add(new ImgSpotDTO(row + 1, col + 1, contId));
+                }
+            }
+        }
+        return newSpots;
+    }
+
+    private static void unmarkSpots(List<ImgSpotDTO> existingSpots, Boolean[][] spotMatrix)
+    {
+        for (ImgSpotDTO existingSpot : existingSpots)
+        {
+            spotMatrix[existingSpot.getRow() - 1][existingSpot.getColumn() - 1] = false;
+        }
+    }
+
+    private long createDataset(long contId, HCSImageDatasetInfo info)
+    {
+        ImgDatasetDTO dataset =
+                new ImgDatasetDTO(info.getDatasetPermId(), info.getTileRows(),
+                        info.getTileColumns(), contId, info.hasImageSeries());
+        return dao.addDataset(dataset);
+    }
+}
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSImageFileExtractionResult.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSImageFileExtractionResult.java
index 021d5f94b4456f8daba9db3240ab18cce75eab6b..ae25bb962f8555ab027b592ba69c10a0b9981910 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSImageFileExtractionResult.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSImageFileExtractionResult.java
@@ -23,6 +23,7 @@ import java.util.List;
  * 
  * @author Tomasz Pylak
  */
+// TODO 2010-12-08, Tomasz Pylak: change to ImageFileExtractionResult 
 public final class HCSImageFileExtractionResult
 {
     /** The images files with description. */
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSImageFileExtractor.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSImageFileExtractor.java
index 3079c718833399a0ac62b95ed4eb37ded1103c6e..b02502a15e9034811c2812843a17f7052c82b92c 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSImageFileExtractor.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/HCSImageFileExtractor.java
@@ -17,39 +17,29 @@
 package ch.systemsx.cisd.openbis.dss.etl;
 
 import java.io.File;
-import java.util.ArrayList;
-import java.util.List;
 import java.util.Properties;
 
-import ch.systemsx.cisd.bds.hcs.Geometry;
+import org.apache.commons.io.FilenameUtils;
+import org.apache.commons.lang.StringUtils;
+
 import ch.systemsx.cisd.bds.hcs.Location;
-import ch.systemsx.cisd.common.exceptions.ConfigurationFailureException;
 import ch.systemsx.cisd.common.utilities.PropertyUtils;
-import ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractionResult.Channel;
+import ch.systemsx.cisd.openbis.dss.etl.dto.ImageFileInfo;
+import ch.systemsx.cisd.openbis.dss.etl.dto.UnparsedImageFileInfo;
 import ch.systemsx.cisd.openbis.dss.generic.shared.utils.CodeAndLabelUtil;
 import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SampleIdentifier;
-import ch.systemsx.cisd.openbis.plugin.screening.shared.basic.dto.ChannelDescription;
-import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ColorComponent;
 
 /**
- * Generic image extractor implementation. The images names should have an extension present in
- * {@link ImageFileExtractorUtils#IMAGE_EXTENSIONS} constant. Each image name should adhere to the
- * schema:<br>
+ * Each image name should adhere to the schema:<br>
  * 
  * <pre>
  * &lt;any-text&gt;_&lt;plate-code&gt;_&lt;well-code&gt;_&lt;tile-code&gt;_&lt;channel-name&gt;.&lt;allowed-image-extension&gt;
  * </pre>
  * 
- * If 'extract-single-image-channels' property is specified for storage processor then the channels
- * are extracted from the color components and the token &lt;channel-name&gt; from the image file
- * name is ignored.
- * 
  * @author Tomasz Pylak
  */
-public class HCSImageFileExtractor extends AbstractHCSImageFileExtractor
+public class HCSImageFileExtractor extends AbstractImageFileExtractor
 {
-    private static final String TILE_MAPPING = "tile_mapping";
-
     // boolean property, if true the names of the plate in file name and directory name have to
     // match.
     // True by default.
@@ -57,93 +47,91 @@ public class HCSImageFileExtractor extends AbstractHCSImageFileExtractor
 
     private final boolean shouldValidatePlateName;
 
-    private final TileMapper tileMapperOrNull;
-
-    private final List<ChannelDescription> channelDescriptions;
-
-    private final List<ColorComponent> channelColorComponentsOrNull;
-
-    protected final Geometry wellGeometry;
-
     public HCSImageFileExtractor(final Properties properties)
     {
         super(properties);
-        this.channelDescriptions = tryExtractChannelDescriptions(properties);
-        this.channelColorComponentsOrNull = tryGetChannelComponents(properties);
-        checkChannelsAndColorComponents();
-        this.wellGeometry = getWellGeometry(properties);
-        this.tileMapperOrNull =
-                TileMapper.tryCreate(properties.getProperty(TILE_MAPPING), wellGeometry);
         this.shouldValidatePlateName =
                 PropertyUtils.getBoolean(properties, CHECK_PLATE_NAME_FLAG_PROPERTY_NAME, true);
     }
 
-    private void checkChannelsAndColorComponents()
+    /**
+     * Extracts the plate location from argument. Returns <code>null</code> if the operation fails.
+     */
+    protected static Location tryGetWellLocation(final String plateLocation)
     {
-        if (channelColorComponentsOrNull != null
-                && channelColorComponentsOrNull.size() != channelDescriptions.size())
-        {
-            throw ConfigurationFailureException.fromTemplate(
-                    "There should be exactly one color component for each channel name."
-                            + " Correct the list of values for '%s' property.",
-                    AbstractHCSImageFileExtractor.EXTRACT_SINGLE_IMAGE_CHANNELS_PROPERTY);
-        }
+        return Location.tryCreateLocationFromTransposedMatrixCoordinate(plateLocation);
     }
 
     /**
      * Extracts the well location from given token. Returns <code>null</code> if the operation
      * fails.<br>
      * Can be overwritten in the subclasses if they use
-     * {@link #tryExtractImageInfo(UnparsedImageFileInfo)} internally.
+     * {@link #tryExtractHCSImageInfo(UnparsedImageFileInfo, File, File)} internally.
      */
-    protected Location tryGetWellLocation(final String wellLocation)
+    protected Location tryGetTileLocation(final String wellLocation)
     {
-        try
+        Integer tileNumber = tryAsInt(wellLocation);
+        if (tileNumber == null)
         {
-            int tileNumber = Integer.parseInt(wellLocation);
-
-            if (tileMapperOrNull != null)
-            {
-                return tileMapperOrNull.tryGetLocation(tileNumber);
-            } else
-            {
-                return Location.tryCreateLocationFromRowwisePosition(tileNumber, wellGeometry);
-            }
-        } catch (final NumberFormatException ex)
+            return null;
+        }
+        Location tileLoc = tryGetTileLocation(tileNumber);
+        if (tileLoc == null)
         {
-            // Nothing to do here. Rest of the code can handle this.
+            tileLoc = Location.tryCreateLocationFromRowwisePosition(tileNumber, wellGeometry);
         }
-        return null;
+        return tileLoc;
     }
 
-    @Override
-    protected List<AcquiredPlateImage> getImages(ImageFileInfo imageInfo)
+    /**
+     * Splits specified image file name into at least four tokens. Only the last four tokens will be
+     * considered. They are sample code, plate location, well location, and channel. Note, that
+     * sample code could be <code>null</code>.
+     * 
+     * @param shouldValidatePlateName if true it will be checked if the plate code in the file name
+     *            matches the datasetSample plate code.
+     * @return <code>null</code> if the argument could not be splitted into tokens.
+     */
+    private final static UnparsedImageFileInfo tryExtractImageInfo(File imageFile,
+            File incomingDataSetDirectory, SampleIdentifier datasetSample,
+            boolean shouldValidatePlateName)
     {
-        checkChannelsAndColorComponents();
-
-        if (channelColorComponentsOrNull != null)
+        final String baseName = FilenameUtils.getBaseName(imageFile.getPath());
+        final String[] tokens = StringUtils.split(baseName, TOKEN_SEPARATOR);
+        if (tokens == null || tokens.length < 4)
+        {
+            if (operationLog.isInfoEnabled())
+            {
+                operationLog.info(String.format(IMAGE_FILE_NOT_ENOUGH_ENTITIES, imageFile));
+            }
+            return null;
+        }
+        final String sampleCode = tokens[tokens.length - 4];
+        if (shouldValidatePlateName && sampleCode != null
+                && sampleCode.equalsIgnoreCase(datasetSample.getSampleCode()) == false)
         {
-            List<AcquiredPlateImage> images = new ArrayList<AcquiredPlateImage>();
-            for (int i = 0; i < channelColorComponentsOrNull.size(); i++)
+            if (operationLog.isInfoEnabled())
             {
-                ColorComponent colorComponent = channelColorComponentsOrNull.get(i);
-                ChannelDescription channelDescription = channelDescriptions.get(i);
-                imageInfo.setChannelCode(channelDescription.getCode());
-                images.add(createImage(imageInfo, colorComponent));
+                operationLog.info(String.format(IMAGE_FILE_BELONGS_TO_WRONG_SAMPLE, imageFile,
+                        datasetSample, sampleCode));
             }
-            return images;
-        } else
+            return null;
+        }
+        String channelToken = tokens[tokens.length - 1];
+        if (StringUtils.isBlank(channelToken))
         {
-            ensureChannelExist(channelDescriptions, imageInfo.getChannelCode());
-            return getDefaultImages(imageInfo);
+            operationLog.info("Channel token is empty for image: " + imageFile);
+            return null;
         }
 
-    }
+        UnparsedImageFileInfo info = new UnparsedImageFileInfo();
+        info.setWellLocationToken(tokens[tokens.length - 3]);
+        info.setTileLocationToken(tokens[tokens.length - 2]);
+        info.setChannelToken(channelToken);
+        info.setTimepointToken(null);
+        info.setDepthToken(null);
 
-    @Override
-    protected List<Channel> getAllChannels()
-    {
-        return createChannels(channelDescriptions);
+        return info;
     }
 
     @Override
@@ -152,28 +140,29 @@ public class HCSImageFileExtractor extends AbstractHCSImageFileExtractor
             SampleIdentifier datasetSample)
     {
         UnparsedImageFileInfo unparsedInfo =
-                tryExtractDefaultImageInfo(imageFile, incomingDataSetDirectory, datasetSample,
+                tryExtractImageInfo(imageFile, incomingDataSetDirectory, datasetSample,
                         shouldValidatePlateName);
         if (unparsedInfo == null)
         {
             return null;
         }
-        return tryExtractImageInfo(unparsedInfo);
+        return tryExtractHCSImageInfo(unparsedInfo, imageFile, incomingDataSetDirectory);
     }
 
-    protected final ImageFileInfo tryExtractImageInfo(UnparsedImageFileInfo unparsedInfo)
+    protected final ImageFileInfo tryExtractHCSImageInfo(UnparsedImageFileInfo unparsedInfo,
+            File imageFile, File incomingDataSetDirectory)
     {
         assert unparsedInfo != null;
 
-        Location plateLocation = tryGetPlateLocation(unparsedInfo.getWellLocationToken());
-        if (plateLocation == null)
+        Location wellLocation = tryGetWellLocation(unparsedInfo.getWellLocationToken());
+        if (wellLocation == null)
         {
             operationLog.info("Cannot extract well location from token "
                     + unparsedInfo.getWellLocationToken());
             return null;
         }
-        Location wellLocation = tryGetWellLocation(unparsedInfo.getTileLocationToken());
-        if (wellLocation == null)
+        Location tileLocation = tryGetTileLocation(unparsedInfo.getTileLocationToken());
+        if (tileLocation == null)
         {
             operationLog.info("Cannot extract tile location (a.k.a. tile/field/side) from token "
                     + unparsedInfo.getTileLocationToken());
@@ -183,23 +172,9 @@ public class HCSImageFileExtractor extends AbstractHCSImageFileExtractor
 
         Float timepointOrNull = tryAsFloat(unparsedInfo.getTimepointToken());
         Float depthOrNull = tryAsFloat(unparsedInfo.getDepthToken());
+        String imageRelativePath = getRelativeImagePath(incomingDataSetDirectory, imageFile);
 
-        return new ImageFileInfo(plateLocation, channelCode, wellLocation,
-                unparsedInfo.getImageRelativePath(), timepointOrNull, depthOrNull);
-    }
-
-    private static Float tryAsFloat(String valueOrNull)
-    {
-        if (valueOrNull == null)
-        {
-            return null;
-        }
-        try
-        {
-            return Float.parseFloat(valueOrNull);
-        } catch (NumberFormatException e)
-        {
-            return null;
-        }
+        return new ImageFileInfo(wellLocation, channelCode, tileLocation, imageRelativePath,
+                timepointOrNull, depthOrNull);
     }
 }
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/IHCSImageFileExtractor.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/IImageFileExtractor.java
similarity index 84%
rename from screening/source/java/ch/systemsx/cisd/openbis/dss/etl/IHCSImageFileExtractor.java
rename to screening/source/java/ch/systemsx/cisd/openbis/dss/etl/IImageFileExtractor.java
index 06fba542a207121a87e02ad9288278808481d688..68ba156266abe4e97aa656ffaae958c65b8b325c 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/IHCSImageFileExtractor.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/IImageFileExtractor.java
@@ -22,13 +22,13 @@ import java.util.Properties;
 import ch.systemsx.cisd.openbis.dss.generic.shared.dto.DataSetInformation;
 
 /**
- * This role is supposed to be implemented by classes that can extract HCS image files from an
- * incoming data set directory. Implementations of this interface need to have a constructor that
- * takes {@link Properties} to initialize itself.
+ * This role is supposed to be implemented by classes that can extract HCS or Microscopy image files
+ * from an incoming data set directory. Implementations of this interface need to have a constructor
+ * that takes {@link Properties} to initialize itself.
  * 
  * @author Tomasz Pylak
  */
-public interface IHCSImageFileExtractor
+public interface IImageFileExtractor
 {
 
     /**
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/ImagingDatabaseHelper.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/ImagingDatabaseHelper.java
new file mode 100644
index 0000000000000000000000000000000000000000..fc3050ed642c7f677af14521af0d97a105949b71
--- /dev/null
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/ImagingDatabaseHelper.java
@@ -0,0 +1,413 @@
+/*
+ * Copyright 2010 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.openbis.dss.etl;
+
+import java.util.HashMap;
+import java.util.List;
+import java.util.Map;
+
+import ch.systemsx.cisd.common.exceptions.UserFailureException;
+import ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractionResult.Channel;
+import ch.systemsx.cisd.openbis.dss.etl.dataaccess.IImagingQueryDAO;
+import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgChannelDTO;
+import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgContainerDTO;
+import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgExperimentDTO;
+
+/**
+ * Helper class for retrieving and/or creating entities associated with the imaging database:
+ * experiments, containers, channels and datasets.
+ * 
+ * @author Chandrasekhar Ramakrishnan
+ * @author Tomasz Pylak
+ */
+public class ImagingDatabaseHelper
+{
+    private final IImagingQueryDAO dao;
+
+    private ImagingDatabaseHelper(IImagingQueryDAO dao)
+    {
+        this.dao = dao;
+    }
+
+    /**
+     * Creates channels connected to the specified dataset id.
+     */
+    public static ImagingChannelsMap createDatasetChannels(IImagingQueryDAO dao, long datasetId,
+            List<HCSImageFileExtractionResult.Channel> channels)
+    {
+        ChannelOwner channelOwner = ChannelOwner.createDataset(datasetId);
+        return new ImagingChannelsCreator(dao).getOrCreateChannelsMap(channelOwner, channels);
+    }
+
+    /** @return container id */
+    public static long getOrCreateExperimentAndContainer(IImagingQueryDAO dao,
+            HCSContainerDatasetInfo info)
+    {
+        return doGetOrCreateExperimentAndContainer(dao, info).getContainerId();
+    }
+
+    /**
+     * NOTE: Code responsible for trying to get sample and experiment from the DB and creating them
+     * if they don't exist is in synchronized block and uses currently opened transaction. Then the
+     * transaction is closed and data set is added to the DB in second transaction. If second
+     * transaction will be rolled back sample and experiment created in first transaction will stay
+     * in the DB.
+     */
+    private static ExperimentAndContainerIds doGetOrCreateExperimentAndContainer(
+            IImagingQueryDAO dao, HCSContainerDatasetInfo info)
+    {
+        synchronized (IImagingQueryDAO.class)
+        {
+            CreatedOrFetchedEntity exp = getOrCreateExperiment(dao, info);
+            CreatedOrFetchedEntity cont = getOrCreateContainer(dao, info, exp.getId());
+            if (exp.hasAlreadyExisted() == false || cont.hasAlreadyExisted() == false)
+            {
+                // without this commit other threads will not see the new experiment/sample when the
+                // synchronized block ends
+                dao.commit();
+            }
+            return new ExperimentAndContainerIds(exp.getId(), cont.getId());
+        }
+    }
+
+    /**
+     * NOTE: Code responsible for trying to get sample and experiment from the DB and creating them
+     * if they don't exist is in synchronized block and uses currently opened transaction. Then the
+     * transaction is closed and data set is added to the DB in second transaction. If second
+     * transaction will be rolled back sample and experiment created in first transaction will stay
+     * in the DB.
+     */
+    public static ExperimentWithChannelsAndContainer getOrCreateExperimentWithChannelsAndContainer(
+            IImagingQueryDAO dao, HCSContainerDatasetInfo info,
+            List<HCSImageFileExtractionResult.Channel> channels)
+    {
+        ImagingDatabaseHelper helper = new ImagingDatabaseHelper(dao);
+        synchronized (IImagingQueryDAO.class)
+        {
+            CreatedOrFetchedEntity exp = getOrCreateExperiment(dao, info);
+            long expId = exp.getId();
+            CreatedOrFetchedEntity cont = getOrCreateContainer(dao, info, expId);
+            ImagingChannelsMap channelsMap =
+                    helper.getOrCreateChannels(ChannelOwner.createExperiment(expId), channels);
+            if (exp.hasAlreadyExisted() == false || cont.hasAlreadyExisted() == false)
+            {
+                // without this commit other threads will not see the new experiment/sample when the
+                // synchronized block ends
+                dao.commit();
+            }
+            return new ExperimentWithChannelsAndContainer(expId, cont.getId(), channelsMap);
+        }
+    }
+
+    private ImagingChannelsMap getOrCreateChannels(ChannelOwner channelOwner, List<Channel> channels)
+    {
+        return new ImagingChannelsCreator(dao).getOrCreateChannelsMap(channelOwner, channels);
+    }
+
+    private static CreatedOrFetchedEntity getOrCreateContainer(IImagingQueryDAO dao,
+            HCSContainerDatasetInfo info, long expId)
+    {
+        String containerPermId = info.getContainerPermId();
+        Long containerId = dao.tryGetContainerIdPermId(containerPermId);
+        if (containerId != null)
+        {
+            return new CreatedOrFetchedEntity(true, containerId);
+        } else
+        {
+            ImgContainerDTO container =
+                    new ImgContainerDTO(containerPermId, info.getContainerRows(),
+                            info.getContainerColumns(), expId);
+            containerId = dao.addContainer(container);
+            return new CreatedOrFetchedEntity(false, containerId);
+        }
+    }
+
+    private static CreatedOrFetchedEntity getOrCreateExperiment(IImagingQueryDAO dao,
+            HCSContainerDatasetInfo info)
+    {
+        String experimentPermId = info.getExperimentPermId();
+        ImgExperimentDTO experiment = dao.tryGetExperimentByPermId(experimentPermId);
+        if (experiment != null)
+        {
+            return new CreatedOrFetchedEntity(true, experiment.getId());
+        } else
+        {
+            Long expId = dao.addExperiment(experimentPermId);
+            return new CreatedOrFetchedEntity(false, expId);
+        }
+    }
+
+    private static class CreatedOrFetchedEntity
+    {
+        private final boolean alreadyExisted;
+
+        private final long id;
+
+        public CreatedOrFetchedEntity(boolean alreadyExisted, long id)
+        {
+            this.alreadyExisted = alreadyExisted;
+            this.id = id;
+        }
+
+        public boolean hasAlreadyExisted()
+        {
+            return alreadyExisted;
+        }
+
+        public long getId()
+        {
+            return id;
+        }
+    }
+
+    private static class ExperimentAndContainerIds
+    {
+        private final long experimentId;
+
+        private final long containerId;
+
+        public ExperimentAndContainerIds(long experimentId, long containerId)
+        {
+            this.experimentId = experimentId;
+            this.containerId = containerId;
+        }
+
+        public long getExperimentId()
+        {
+            return experimentId;
+        }
+
+        public long getContainerId()
+        {
+            return containerId;
+        }
+    }
+
+    public static class ImagingChannelsMap
+    {
+        private final Map<String/* channel code */, Long/* (tech id */> channelsMap;
+
+        public ImagingChannelsMap(Map<String, Long> channelsMap)
+        {
+            this.channelsMap = channelsMap;
+        }
+
+        /** channel must be defined */
+        public long getChannelId(String channelCode)
+        {
+            Long channelId = channelsMap.get(channelCode);
+            if (channelId == null)
+            {
+                throw new UserFailureException("Undefined channel " + channelCode);
+            }
+            return channelId;
+        }
+    }
+
+    public static class ExperimentWithChannelsAndContainer extends ExperimentAndContainerIds
+    {
+        private final ImagingChannelsMap channelsMap;
+
+        public ExperimentWithChannelsAndContainer(long experimentId, long containerId,
+                ImagingChannelsMap channelsMap)
+        {
+            super(experimentId, containerId);
+            this.channelsMap = channelsMap;
+        }
+
+        public ImagingChannelsMap getChannelsMap()
+        {
+            return channelsMap;
+        }
+    }
+
+    /** Logic to find or create channels */
+    private static class ImagingChannelsCreator
+    {
+        private final IImagingQueryDAO dao;
+
+        public ImagingChannelsCreator(IImagingQueryDAO dao)
+        {
+            this.dao = dao;
+        }
+
+        public ImagingChannelsMap getOrCreateChannelsMap(ChannelOwner channelOwner,
+                List<HCSImageFileExtractionResult.Channel> channels)
+        {
+            Map<String, Long> map = getOrCreateChannels(channelOwner, channels);
+            return new ImagingChannelsMap(map);
+        }
+
+        private Map<String, Long> getOrCreateChannels(ChannelOwner channelOwner,
+                List<HCSImageFileExtractionResult.Channel> channels)
+        {
+            if (channelOwner.tryGetExperimentId() != null)
+            {
+                long expId = channelOwner.tryGetExperimentId();
+                List<ImgChannelDTO> allChannels = dao.getChannelsByExperimentId(expId);
+                if (allChannels.size() == 0)
+                {
+                    return createChannels(channelOwner, channels);
+                } else
+                {
+                    return updateExperimentChannels(expId, channels, allChannels);
+                }
+            } else
+            {
+                // dataset is always a new one, so we always create new channels.
+                return createChannels(channelOwner, channels);
+            }
+        }
+
+        private Map<String, Long> updateExperimentChannels(long expId, List<Channel> channels,
+                List<ImgChannelDTO> allChannels)
+        {
+            Map<String/* name */, ImgChannelDTO> existingChannels = asNameMap(allChannels);
+            Map<String, Long> map = new HashMap<String, Long>();
+            for (HCSImageFileExtractionResult.Channel channel : channels)
+            {
+                ImgChannelDTO channelDTO =
+                        updateExperimentChannel(channel, expId, existingChannels);
+                addChannel(map, channelDTO);
+            }
+            return map;
+        }
+
+        private Map<String, Long> createChannels(ChannelOwner channelOwner, List<Channel> channels)
+        {
+            Map<String, Long> map = new HashMap<String, Long>();
+            for (HCSImageFileExtractionResult.Channel channel : channels)
+            {
+                ImgChannelDTO channelDTO = createChannel(channel, channelOwner);
+                addChannel(map, channelDTO);
+            }
+            return map;
+        }
+
+        private static void addChannel(Map<String, Long> map, ImgChannelDTO channelDTO)
+        {
+            map.put(channelDTO.getCode(), channelDTO.getId());
+        }
+
+        private static Map<String, ImgChannelDTO> asNameMap(List<ImgChannelDTO> channels)
+        {
+            Map<String, ImgChannelDTO> nameMap = new HashMap<String, ImgChannelDTO>();
+            for (ImgChannelDTO channel : channels)
+            {
+                nameMap.put(channel.getCode(), channel);
+            }
+            return nameMap;
+        }
+
+        private ImgChannelDTO updateExperimentChannel(HCSImageFileExtractionResult.Channel channel,
+                long expId, Map<String, ImgChannelDTO> existingChannels)
+        {
+            ImgChannelDTO channelDTO =
+                    makeChannelDTO(channel, ChannelOwner.createExperiment(expId));
+            String channelCode = channelDTO.getCode();
+            ImgChannelDTO existingChannel = existingChannels.get(channelCode);
+            if (existingChannel == null)
+            {
+                throw createInvalidNewExperimentChannelException(expId, existingChannels,
+                        channelCode);
+            }
+            // a channel with a specified name already exists for an experiment, its description
+            // will be updated. Wavelength will be updated only if it was null before.
+            if (channelDTO.getWavelength() == null)
+            {
+                channelDTO.setWavelength(existingChannel.getWavelength());
+            }
+            if (existingChannel.getWavelength() != null
+                    && existingChannel.getWavelength().equals(channelDTO.getWavelength()) == false)
+            {
+                throw UserFailureException
+                        .fromTemplate(
+                                "There are already datasets registered for the experiment "
+                                        + "which use the same channel code, but with a different wavelength! "
+                                        + "Channel %s, old wavelength %d, new wavelength %d.",
+                                channelCode, existingChannel.getWavelength(),
+                                channelDTO.getWavelength());
+            }
+            channelDTO.setId(existingChannel.getId());
+            dao.updateChannel(channelDTO);
+            return channelDTO;
+        }
+
+        private static UserFailureException createInvalidNewExperimentChannelException(long expId,
+                Map<String, ImgChannelDTO> existingChannels, String channelName)
+        {
+            return UserFailureException.fromTemplate(
+                    "Experiment with id '%d' has already some channels registered "
+                            + "and does not have a channel with a code '%s'. "
+                            + "Register a new experiment to use new channels. "
+                            + "Available channel names in this experiment: %s.", expId,
+                    channelName, existingChannels.keySet());
+        }
+
+        private ImgChannelDTO createChannel(HCSImageFileExtractionResult.Channel channel,
+                ChannelOwner channelOwner)
+        {
+            ImgChannelDTO channelDTO = makeChannelDTO(channel, channelOwner);
+            long channelId = dao.addChannel(channelDTO);
+            channelDTO.setId(channelId);
+            return channelDTO;
+        }
+
+        private static ImgChannelDTO makeChannelDTO(HCSImageFileExtractionResult.Channel channel,
+                ChannelOwner channelOwner)
+        {
+            return new ImgChannelDTO(channel.getCode(), channel.tryGetDescription(),
+                    channel.tryGetWavelength(), channelOwner.tryGetDatasetId(),
+                    channelOwner.tryGetExperimentId(), channel.getLabel());
+        }
+    }
+
+    /** DTO to store channel owner: dataset id or experiment id */
+    private static class ChannelOwner
+    {
+        private final Long expIdOrNull;
+
+        private final Long datasetIdOrNull;
+
+        public static ChannelOwner createDataset(long datasetId)
+        {
+            return new ChannelOwner(null, datasetId);
+        }
+
+        public static ChannelOwner createExperiment(long expId)
+        {
+            return new ChannelOwner(expId, null);
+        }
+
+        private ChannelOwner(Long expIdOrNull, Long datasetIdOrNull)
+        {
+            this.expIdOrNull = expIdOrNull;
+            this.datasetIdOrNull = datasetIdOrNull;
+        }
+
+        public Long tryGetExperimentId()
+        {
+            return expIdOrNull;
+        }
+
+        public Long tryGetDatasetId()
+        {
+            return datasetIdOrNull;
+        }
+
+    }
+}
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/ImagingDatabaseVersionHolder.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/ImagingDatabaseVersionHolder.java
index 4531667bbb083f26c388e96d5c58ce6c71b4ec72..d27f9e8f21da655ebe7b9d5b76062dcef7c6f05c 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/ImagingDatabaseVersionHolder.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/ImagingDatabaseVersionHolder.java
@@ -28,7 +28,7 @@ public class ImagingDatabaseVersionHolder implements IDatabaseVersionHolder
 
     public String getDatabaseVersion()
     {
-        return "008"; // changed in S95
+        return "009"; // changed in S96
     }
 
 }
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/MicroscopyImageDatasetInfo.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/MicroscopyImageDatasetInfo.java
new file mode 100644
index 0000000000000000000000000000000000000000..b5aa8a199d440d65add59392746844ca85877d33
--- /dev/null
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/MicroscopyImageDatasetInfo.java
@@ -0,0 +1,61 @@
+/*
+ * Copyright 2010 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.openbis.dss.etl;
+
+/**
+ * Info about one microscopy image dataset.
+ * 
+ * @author Tomasz Pylak
+ */
+public class MicroscopyImageDatasetInfo
+{
+    private final String datasetPermId;
+
+    private final int tileRows, tileColumns;
+
+    // has any well timepoints or depth stack images?
+    private final boolean hasImageSeries;
+
+    public MicroscopyImageDatasetInfo(String datasetPermId, int tileRows, int tileColumns,
+            boolean hasImageSeries)
+    {
+        this.datasetPermId = datasetPermId;
+        this.tileRows = tileRows;
+        this.tileColumns = tileColumns;
+        this.hasImageSeries = hasImageSeries;
+    }
+
+    public int getTileRows()
+    {
+        return tileRows;
+    }
+
+    public int getTileColumns()
+    {
+        return tileColumns;
+    }
+
+    public boolean hasImageSeries()
+    {
+        return hasImageSeries;
+    }
+
+    public String getDatasetPermId()
+    {
+        return datasetPermId;
+    }
+}
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/MicroscopyImageDatasetUploader.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/MicroscopyImageDatasetUploader.java
new file mode 100644
index 0000000000000000000000000000000000000000..80bcf0fd2be2123bfc621d128490d1ad81618730
--- /dev/null
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/MicroscopyImageDatasetUploader.java
@@ -0,0 +1,71 @@
+/*
+ * Copyright 2010 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.openbis.dss.etl;
+
+import java.util.List;
+
+import ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractionResult.Channel;
+import ch.systemsx.cisd.openbis.dss.etl.ImagingDatabaseHelper.ImagingChannelsMap;
+import ch.systemsx.cisd.openbis.dss.etl.dataaccess.IImagingQueryDAO;
+import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgDatasetDTO;
+
+/**
+ * Uploads microscopy imagaes (no spots, no container) into imaging database.
+ * 
+ * @author Tomasz Pylak
+ */
+public class MicroscopyImageDatasetUploader extends AbstractImageDatasetUploader
+{
+    public static void upload(IImagingQueryDAO dao, MicroscopyImageDatasetInfo dataset,
+            List<AcquiredPlateImage> images, List<HCSImageFileExtractionResult.Channel> channels)
+    {
+        new MicroscopyImageDatasetUploader(dao).upload(dataset, images, channels);
+    }
+
+    private MicroscopyImageDatasetUploader(IImagingQueryDAO dao)
+    {
+        super(dao);
+    }
+
+    private void upload(MicroscopyImageDatasetInfo dataset, List<AcquiredPlateImage> images,
+            List<Channel> channels)
+    {
+        long datasetId = createMicroscopyDataset(dataset);
+        ImagingChannelsMap channelsMap =
+                ImagingDatabaseHelper.createDatasetChannels(dao, datasetId, channels);
+        createImages(images, createDummySpotProvider(), channelsMap, datasetId);
+    }
+
+    private static ISpotProvider createDummySpotProvider()
+    {
+        return new ISpotProvider()
+            {
+                public Long tryGetSpotId(AcquiredPlateImage image)
+                {
+                    return null;
+                }
+            };
+    }
+
+    private long createMicroscopyDataset(MicroscopyImageDatasetInfo dataset)
+    {
+        ImgDatasetDTO datasetDTO =
+                new ImgDatasetDTO(dataset.getDatasetPermId(), dataset.getTileRows(),
+                        dataset.getTileColumns(), null, dataset.hasImageSeries());
+        return dao.addDataset(datasetDTO);
+    }
+}
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/MicroscopyImageFileExtractor.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/MicroscopyImageFileExtractor.java
new file mode 100644
index 0000000000000000000000000000000000000000..c908f2c3fdef402d9021c3191a898a590ac5420c
--- /dev/null
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/MicroscopyImageFileExtractor.java
@@ -0,0 +1,83 @@
+/*
+ * Copyright 2010 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.openbis.dss.etl;
+
+import java.io.File;
+import java.util.Properties;
+
+import ch.systemsx.cisd.bds.hcs.Location;
+import ch.systemsx.cisd.common.exceptions.ConfigurationFailureException;
+import ch.systemsx.cisd.openbis.dss.etl.dto.ImageFileInfo;
+import ch.systemsx.cisd.openbis.dss.etl.dto.UnparsedImageFileInfo;
+import ch.systemsx.cisd.openbis.dss.generic.shared.utils.CodeAndLabelUtil;
+import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SampleIdentifier;
+
+/**
+ * {@link IImageFileExtractor} implementation for microscopy images.
+ * 
+ * @author Tomasz Pylak
+ */
+public class MicroscopyImageFileExtractor extends AbstractImageFileExtractor
+{
+
+    public MicroscopyImageFileExtractor(Properties properties)
+    {
+        super(properties);
+        if (this.tileMapperOrNull == null)
+        {
+            throw new ConfigurationFailureException("Tile mapping property not defined: "
+                    + TILE_MAPPING_PROPERTY);
+        }
+    }
+
+    @Override
+    protected ImageFileInfo tryExtractImageInfo(File imageFile, File incomingDataSetDirectory,
+            SampleIdentifier datasetSample)
+    {
+        UnparsedImageFileInfo unparsedInfo =
+                UnparsedImageFileInfoLexer.tryExtractMicroscopyImageFileInfo(imageFile,
+                        incomingDataSetDirectory);
+        if (unparsedInfo == null)
+        {
+            return null;
+        }
+
+        // extract tile
+        Location tileLocation = null;
+        Integer tileNumber = tryAsInt(unparsedInfo.getTileLocationToken());
+        if (tileNumber != null)
+        {
+            tileLocation = tryGetTileLocation(tileNumber);
+        }
+        if (tileLocation == null)
+        {
+            operationLog.info("Cannot extract tile location (a.k.a. tile/field/side) from token "
+                    + unparsedInfo.getTileLocationToken());
+            return null;
+        }
+
+        String channelCode = CodeAndLabelUtil.normalize(unparsedInfo.getChannelToken());
+
+        Float timepointOrNull = tryAsFloat(unparsedInfo.getTimepointToken());
+        Float depthOrNull = tryAsFloat(unparsedInfo.getDepthToken());
+        String imageRelativePath = getRelativeImagePath(incomingDataSetDirectory, imageFile);
+
+        return new ImageFileInfo(null, channelCode, tileLocation, imageRelativePath,
+                timepointOrNull, depthOrNull);
+    }
+
+}
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/MicroscopyStorageProcessor.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/MicroscopyStorageProcessor.java
new file mode 100644
index 0000000000000000000000000000000000000000..03eb7962fb787c3a07fd60cc8b6d4fe93fc48217
--- /dev/null
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/MicroscopyStorageProcessor.java
@@ -0,0 +1,75 @@
+/*
+ * Copyright 2010 ETH Zuerich, CISD
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package ch.systemsx.cisd.openbis.dss.etl;
+
+import java.io.File;
+import java.util.List;
+import java.util.Properties;
+
+import ch.systemsx.cisd.common.exceptions.ConfigurationFailureException;
+import ch.systemsx.cisd.common.mail.IMailClient;
+import ch.systemsx.cisd.openbis.dss.etl.dataaccess.IImagingQueryDAO;
+import ch.systemsx.cisd.openbis.dss.generic.shared.dto.DataSetInformation;
+
+/**
+ * Storage processor which stores microscopy images in a special-purpose imaging database.
+ * <p>
+ * See {@link AbstractImageStorageProcessor} documentation.
+ * 
+ * @author Tomasz Pylak
+ */
+public class MicroscopyStorageProcessor extends AbstractImageStorageProcessor
+{
+
+    public MicroscopyStorageProcessor(Properties properties)
+    {
+        super(properties);
+        if (imageFileExtractor == null)
+        {
+            throw ConfigurationFailureException
+                    .fromTemplate("Image file extractor property is not configured: "
+                            + FILE_EXTRACTOR_PROPERTY);
+        }
+    }
+
+    @Override
+    protected void storeInDatabase(IImagingQueryDAO dao, DataSetInformation dataSetInformation,
+            HCSImageFileExtractionResult extractedImages)
+    {
+        List<AcquiredPlateImage> images = extractedImages.getImages();
+        MicroscopyImageDatasetInfo dataset =
+                createMicroscopyImageDatasetInfo(dataSetInformation, images);
+
+        MicroscopyImageDatasetUploader.upload(dao, dataset, images, extractedImages.getChannels());
+    }
+
+    private MicroscopyImageDatasetInfo createMicroscopyImageDatasetInfo(
+            DataSetInformation dataSetInformation, List<AcquiredPlateImage> images)
+    {
+        boolean hasImageSeries = hasImageSeries(images);
+        return new MicroscopyImageDatasetInfo(dataSetInformation.getDataSetCode(),
+                spotGeometry.getRows(), spotGeometry.getColumns(), hasImageSeries);
+    }
+
+    @Override
+    protected void validateImages(DataSetInformation dataSetInformation, IMailClient mailClient,
+            File incomingDataSetDirectory, HCSImageFileExtractionResult extractionResult)
+    {
+        // do nothing - for now we do not have good examples of real data
+    }
+
+}
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/PlateStorageProcessor.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/PlateStorageProcessor.java
index 3170a472543f7881d4bf3a963a774dac14706d68..68f45b1efea6f9edef8d58e525b40a5993d6c3aa 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/PlateStorageProcessor.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/PlateStorageProcessor.java
@@ -17,325 +17,193 @@
 package ch.systemsx.cisd.openbis.dss.etl;
 
 import java.io.File;
-import java.io.IOException;
 import java.util.ArrayList;
-import java.util.Arrays;
 import java.util.List;
 import java.util.Properties;
 import java.util.Set;
 
-import javax.sql.DataSource;
-
-import net.lemnik.eodsql.QueryTool;
-
 import org.apache.commons.lang.StringUtils;
-import org.apache.commons.lang.time.DurationFormatUtils;
-import org.apache.log4j.Logger;
 
 import ch.systemsx.cisd.bds.hcs.Channel;
-import ch.systemsx.cisd.bds.hcs.Geometry;
 import ch.systemsx.cisd.bds.hcs.Location;
 import ch.systemsx.cisd.bds.storage.IFile;
 import ch.systemsx.cisd.bds.storage.filesystem.NodeFactory;
 import ch.systemsx.cisd.common.collections.CollectionUtils;
-import ch.systemsx.cisd.common.exceptions.ConfigurationFailureException;
 import ch.systemsx.cisd.common.exceptions.EnvironmentFailureException;
 import ch.systemsx.cisd.common.exceptions.UserFailureException;
-import ch.systemsx.cisd.common.filesystem.FileOperations;
 import ch.systemsx.cisd.common.filesystem.FileUtilities;
-import ch.systemsx.cisd.common.filesystem.IFileOperations;
-import ch.systemsx.cisd.common.filesystem.SoftLinkMaker;
-import ch.systemsx.cisd.common.logging.LogCategory;
-import ch.systemsx.cisd.common.logging.LogFactory;
 import ch.systemsx.cisd.common.mail.IMailClient;
 import ch.systemsx.cisd.common.utilities.ClassUtils;
-import ch.systemsx.cisd.common.utilities.PropertyUtils;
-import ch.systemsx.cisd.etlserver.AbstractStorageProcessor;
 import ch.systemsx.cisd.etlserver.IHCSImageFileAccepter;
-import ch.systemsx.cisd.etlserver.ITypeExtractor;
-import ch.systemsx.cisd.etlserver.hdf5.Hdf5Container;
-import ch.systemsx.cisd.etlserver.hdf5.HierarchicalStructureDuplicatorFileToHdf5;
-import ch.systemsx.cisd.openbis.dss.Constants;
 import ch.systemsx.cisd.openbis.dss.etl.HCSImageCheckList.FullLocation;
 import ch.systemsx.cisd.openbis.dss.etl.dataaccess.IImagingQueryDAO;
-import ch.systemsx.cisd.openbis.dss.generic.shared.ServiceProvider;
 import ch.systemsx.cisd.openbis.dss.generic.shared.dto.DataSetInformation;
 import ch.systemsx.cisd.openbis.generic.shared.basic.dto.Experiment;
-import ch.systemsx.cisd.openbis.generic.shared.dto.StorageFormat;
 import ch.systemsx.cisd.openbis.plugin.screening.shared.basic.dto.ChannelDescription;
-import ch.systemsx.cisd.openbis.plugin.screening.shared.basic.dto.ScreeningConstants;
 import ch.systemsx.cisd.openbis.plugin.screening.shared.dto.PlateDimension;
 
 /**
- * Storage processor which stores HCS images in a special-purpose database.
+ * Storage processor which stores HCS plate images in a special-purpose imaging database.
  * <p>
- * Accepts following properties:
- * <ul>
- * <li>generate-thumbnails - should the thumbnails be generated? It slows down the dataset
- * registration, but increases the performance when the user wants to see the image. Can be 'true'
- * or 'false', 'false' is the default value
- * <li>compress-thumbnails - should the thumbnails be compressed? Used if generate-thumbnails is
- * true, otherwise ignored
- * <li>thumbnail-max-width, thumbnail-max-height - thumbnails size in pixels
- * <li>[deprecated] channel-names - names of the channels in which images have been acquired
- * <li>channel-codes - codes of the channels in which images have been acquired
- * <li>channel-labels - labels of the channels in which images have been acquired
- * <li>well_geometry - format: [width]>x[height], e.g. 3x4. Specifies the grid into which a
- * microscope divided the well to acquire images.
- * <li>file-extractor - implementation of the {@link IHCSImageFileExtractor} interface which maps
- * images to the location on the plate and particular channel
- * <li>data-source - specification of the imaging db
- * <li>extract-single-image-channels - optional comma separated list of color components. Available
- * values: RED, GREEN or BLUE. If specified then the channels are extracted from the color
- * components and override 'file-extractor' results.
- * </p>
+ * See {@link AbstractImageStorageProcessor} documentation.
  * 
  * @author Tomasz Pylak
  */
-public final class PlateStorageProcessor extends AbstractStorageProcessor
+public final class PlateStorageProcessor extends AbstractImageStorageProcessor
 {
-
-    /** The directory where <i>original</i> data could be found. */
-    private static final String DIR_ORIGINAL = ScreeningConstants.ORIGINAL_DATA_DIR;
-
-    private static final Logger operationLog = LogFactory.getLogger(LogCategory.OPERATION,
-            PlateStorageProcessor.class);
-
-    private static final Logger notificationLog = LogFactory.getLogger(LogCategory.NOTIFY,
-            PlateStorageProcessor.class);
-
-    // tiles geometry, e.g. 3x4 if the well is divided into 12 tiles (3 rows, 4 columns)
-    private static final String SPOT_GEOMETRY_PROPERTY = "well_geometry";
-
-    private static final String GENERATE_THUMBNAILS_PROPERTY = "generate-thumbnails";
-
-    private final static String COMPRESS_THUMBNAILS_PROPERTY = "compress-thumbnails";
-
-    private final static String ORIGINAL_DATA_STORAGE_FORMAT_PROPERTY =
-            "original-data-storage-format";
-
-    private static final String THUMBNAIL_MAX_WIDTH_PROPERTY = "thumbnail-max-width";
-
-    private static final int DEFAULT_THUMBNAIL_MAX_WIDTH = 200;
-
-    private static final String THUMBNAIL_MAX_HEIGHT_PROPERTY = "thumbnail-max-height";
-
-    private static final int DEFAULT_THUMBNAIL_MAX_HEIGHT = 120;
-
-    private static final String FILE_EXTRACTOR_PROPERTY = "file-extractor";
-
     // a class of the old-style image extractor
     private static final String DEPRECATED_FILE_EXTRACTOR_PROPERTY = "deprecated-file-extractor";
 
-    // comma separated list of channel names, order matters
-    @Deprecated
-    public static final String CHANNEL_NAMES = "channel-names";
-
-    // comma separated list of channel codes, order matters
-    public static final String CHANNEL_CODES = "channel-codes";
-
-    // comma separated list of channel labels, order matters
-    public static final String CHANNEL_LABELS = "channel-labels";
-
-    // how the original data should be stored
-    private static enum OriginalDataStorageFormat
-    {
-        UNCHANGED, HDF5, HDF5_COMPRESSED;
-
-        public boolean isHdf5()
-        {
-            return this == OriginalDataStorageFormat.HDF5
-                    || this == OriginalDataStorageFormat.HDF5_COMPRESSED;
-        }
-    }
-
-    // -----------
-
-    private final DataSource dataSource;
-
-    private final Geometry spotGeometry;
-
-    private final int thumbnailMaxWidth;
-
-    private final int thumbnailMaxHeight;
-
-    private final boolean generateThumbnails;
-
-    private final boolean areThumbnailsCompressed;
-
-    private final OriginalDataStorageFormat originalDataStorageFormat;
-
-    // one of the extractors is always null and one not null
-    private final IHCSImageFileExtractor imageFileExtractor;
+    // ---
 
     private final ch.systemsx.cisd.etlserver.IHCSImageFileExtractor deprecatedImageFileExtractor;
 
-    private final List<ChannelDescription> channelDescriptions;
-
-    // --- internal state -------------
-
-    private IImagingQueryDAO currentTransaction;
-
-    // ---
-
-    public PlateStorageProcessor(final Properties properties)
+    public PlateStorageProcessor(Properties properties)
     {
         super(properties);
-        String spotGeometryText = getMandatoryProperty(SPOT_GEOMETRY_PROPERTY);
-        this.spotGeometry = Geometry.createFromString(spotGeometryText);
-        channelDescriptions = extractChannelDescriptions(properties);
-        thumbnailMaxWidth =
-                PropertyUtils.getInt(properties, THUMBNAIL_MAX_WIDTH_PROPERTY,
-                        DEFAULT_THUMBNAIL_MAX_WIDTH);
-        thumbnailMaxHeight =
-                PropertyUtils.getInt(properties, THUMBNAIL_MAX_HEIGHT_PROPERTY,
-                        DEFAULT_THUMBNAIL_MAX_HEIGHT);
-        generateThumbnails =
-                PropertyUtils.getBoolean(properties, GENERATE_THUMBNAILS_PROPERTY, false);
-        areThumbnailsCompressed =
-                PropertyUtils.getBoolean(properties, COMPRESS_THUMBNAILS_PROPERTY, false);
-        originalDataStorageFormat = getOriginalDataStorageFormat(properties);
-
-        String fileExtractorClass = PropertyUtils.getProperty(properties, FILE_EXTRACTOR_PROPERTY);
-        if (fileExtractorClass != null)
+        if (imageFileExtractor == null)
         {
-            this.imageFileExtractor =
-                    ClassUtils.create(IHCSImageFileExtractor.class, fileExtractorClass, properties);
-            this.deprecatedImageFileExtractor = null;
-        } else
-        {
-            this.imageFileExtractor = null;
-            fileExtractorClass = getMandatoryProperty(DEPRECATED_FILE_EXTRACTOR_PROPERTY);
+            String fileExtractorClass = getMandatoryProperty(DEPRECATED_FILE_EXTRACTOR_PROPERTY);
             this.deprecatedImageFileExtractor =
                     ClassUtils.create(ch.systemsx.cisd.etlserver.IHCSImageFileExtractor.class,
                             fileExtractorClass, properties);
+        } else
+        {
+            this.deprecatedImageFileExtractor = null;
         }
-        this.dataSource = ServiceProvider.getDataSourceProvider().getDataSource(properties);
-        this.currentTransaction = null;
     }
 
-    private static OriginalDataStorageFormat getOriginalDataStorageFormat(
-            final Properties properties)
+    private static final class HCSImageFileAccepter implements IHCSImageFileAccepter
     {
-        String defaultValue = OriginalDataStorageFormat.UNCHANGED.name();
-        String textValue =
-                PropertyUtils.getProperty(properties, ORIGINAL_DATA_STORAGE_FORMAT_PROPERTY,
-                        defaultValue);
-        return OriginalDataStorageFormat.valueOf(textValue.toUpperCase());
-    }
+        private final List<AcquiredPlateImage> images = new ArrayList<AcquiredPlateImage>();
 
-    private final static List<String> tryGetListOfLabels(Properties properties, String propertyKey)
-    {
-        String itemsList = PropertyUtils.getProperty(properties, propertyKey);
-        if (itemsList == null)
-        {
-            return null;
-        }
-        String[] items = itemsList.split(",");
-        for (int i = 0; i < items.length; i++)
-        {
-            items[i] = items[i].trim();
-        }
-        return Arrays.asList(items);
-    }
+        private final File imageFileRootDirectory;
 
-    public final static List<ChannelDescription> extractChannelDescriptions(
-            final Properties properties)
-    {
-        List<String> names = PropertyUtils.tryGetList(properties, CHANNEL_NAMES);
-        List<String> codes = PropertyUtils.tryGetList(properties, CHANNEL_CODES);
-        List<String> labels = tryGetListOfLabels(properties, CHANNEL_LABELS);
-        if (names != null && (codes != null || labels != null))
-        {
-            throw new ConfigurationFailureException(String.format(
-                    "Configure either '%s' or ('%s','%s') but not both.", CHANNEL_NAMES,
-                    CHANNEL_CODES, CHANNEL_LABELS));
-        }
-        if (names != null)
-        {
-            List<ChannelDescription> descriptions = new ArrayList<ChannelDescription>();
-            for (String name : names)
-            {
-                descriptions.add(new ChannelDescription(name));
-            }
-            return descriptions;
-        }
-        if (codes == null || labels == null)
+        private final List<String> channelCodes;
+
+        public HCSImageFileAccepter(File imageFileRootDirectory, List<String> channelCodes)
         {
-            throw new ConfigurationFailureException(String.format(
-                    "Both '%s' and '%s' should be configured", CHANNEL_CODES, CHANNEL_LABELS));
+            this.imageFileRootDirectory = imageFileRootDirectory;
+            this.channelCodes = channelCodes;
         }
-        if (codes.size() != labels.size())
+
+        public final void accept(final int channel, final Location wellLocation,
+                final Location tileLocation, final IFile imageFile)
         {
-            throw new ConfigurationFailureException(String.format(
-                    "Number of configured '%s' should be the same as number of '%s'.",
-                    CHANNEL_CODES, CHANNEL_LABELS));
+            final String imageRelativePath =
+                    FileUtilities.getRelativeFile(imageFileRootDirectory,
+                            new File(imageFile.getPath()));
+            assert imageRelativePath != null : "Image relative path should not be null.";
+            String channelCode = getChannelCodeOrLabel(channelCodes, channel);
+            AcquiredPlateImage imageDesc =
+                    new AcquiredPlateImage(wellLocation, tileLocation, channelCode, null, null,
+                            new RelativeImageReference(imageRelativePath, null, null));
+            images.add(imageDesc);
         }
-        List<ChannelDescription> descriptions = new ArrayList<ChannelDescription>();
-        for (int i = 0; i < codes.size(); i++)
+
+        public List<AcquiredPlateImage> getImages()
         {
-            descriptions.add(new ChannelDescription(codes.get(i), labels.get(i)));
+            return images;
         }
-        return descriptions;
     }
 
-    private IImagingQueryDAO createQuery()
+    // adapts old-style image extractor to the new one which is stateless
+    private static IImageFileExtractor adapt(
+            final ch.systemsx.cisd.etlserver.IHCSImageFileExtractor extractor,
+            final File imageFileRootDirectory, final List<ChannelDescription> descriptions)
     {
-        return QueryTool.getQuery(dataSource, IImagingQueryDAO.class);
-    }
+        return new IImageFileExtractor()
+            {
+                public HCSImageFileExtractionResult extract(File incomingDataSetDirectory,
+                        DataSetInformation dataSetInformation)
+                {
+                    HCSImageFileAccepter accepter =
+                            new HCSImageFileAccepter(imageFileRootDirectory,
+                                    extractChannelCodes(descriptions));
+                    ch.systemsx.cisd.etlserver.HCSImageFileExtractionResult originalResult =
+                            extractor.process(
+                                    NodeFactory.createDirectoryNode(incomingDataSetDirectory),
+                                    dataSetInformation, accepter);
+                    List<HCSImageFileExtractionResult.Channel> channels =
+                            convert(originalResult.getChannels());
+                    return new HCSImageFileExtractionResult(accepter.getImages(),
+                            asRelativePaths(originalResult.getInvalidFiles()), channels);
+                }
 
-    private final static void checkDataSetInformation(final DataSetInformation dataSetInformation)
-    {
-        assert dataSetInformation != null : "Unspecified data set information";
-        assert dataSetInformation.getSampleIdentifier() != null : "Unspecified sample identifier";
-        final ch.systemsx.cisd.openbis.generic.shared.dto.identifier.ExperimentIdentifier experimentIdentifier =
-                dataSetInformation.getExperimentIdentifier();
-        assert experimentIdentifier != null : "Unspecified experiment identifier";
-        assert dataSetInformation.tryToGetExperiment() != null : "experiment not set";
-        checkExperimentIdentifier(experimentIdentifier);
-    }
+                private List<HCSImageFileExtractionResult.Channel> convert(Set<Channel> channels)
+                {
+                    List<HCSImageFileExtractionResult.Channel> result =
+                            new ArrayList<HCSImageFileExtractionResult.Channel>();
+                    for (Channel channel : channels)
+                    {
+                        result.add(new HCSImageFileExtractionResult.Channel(getChannelCodeOrLabel(
+                                extractChannelCodes(descriptions), channel.getCounter()), null,
+                                channel.getWavelength(), getChannelCodeOrLabel(
+                                        extractChannelLabels(descriptions), channel.getCounter())));
+                    }
+                    return result;
+                }
 
-    private final static void checkExperimentIdentifier(
-            final ch.systemsx.cisd.openbis.generic.shared.dto.identifier.ExperimentIdentifier experimentIdentifier)
-    {
-        assert experimentIdentifier.getSpaceCode() != null : "Space code is null";
-        assert experimentIdentifier.getExperimentCode() != null : "Experiment code is null";
-        assert experimentIdentifier.getProjectCode() != null : "Project code is null";
+                private List<File> asRelativePaths(List<IFile> files)
+                {
+                    List<File> result = new ArrayList<File>();
+                    for (IFile file : files)
+                    {
+                        result.add(new File(file.getPath()));
+                    }
+                    return result;
+                }
+            };
     }
 
-    // ---------------------------------
-
-    private ImageDatasetInfo createImageDatasetInfo(Experiment experiment,
-            DataSetInformation dataSetInformation, List<AcquiredPlateImage> acquiredImages)
+    @Override
+    protected void validateImages(DataSetInformation dataSetInformation, IMailClient mailClient,
+            File incomingDataSetDirectory, HCSImageFileExtractionResult extractionResult)
     {
-        ScreeningContainerDatasetInfo info =
-                ScreeningContainerDatasetInfo.createScreeningDatasetInfo(dataSetInformation);
-        boolean hasImageSeries = hasImageSeries(acquiredImages);
-        return new ImageDatasetInfo(info, spotGeometry.getRows(), spotGeometry.getColumns(),
-                hasImageSeries);
+        HCSImageCheckList imageCheckList = createImageCheckList(dataSetInformation);
+        checkImagesForDuplicates(extractionResult, imageCheckList);
+        if (extractionResult.getInvalidFiles().size() > 0)
+        {
+            throw UserFailureException.fromTemplate("Following invalid files %s have been found.",
+                    CollectionUtils.abbreviate(extractionResult.getInvalidFiles(), 10));
+        }
+        if (extractionResult.getImages().size() == 0)
+        {
+            throw UserFailureException.fromTemplate(
+                    "No extractable files were found inside a dataset '%s'."
+                            + " Have you changed your naming convention?",
+                    incomingDataSetDirectory.getAbsolutePath());
+        }
+        checkCompleteness(imageCheckList, dataSetInformation, incomingDataSetDirectory.getName(),
+                mailClient);
     }
 
-    private boolean hasImageSeries(List<AcquiredPlateImage> images)
+    private static void checkImagesForDuplicates(HCSImageFileExtractionResult extractionResult,
+            HCSImageCheckList imageCheckList)
     {
+        List<AcquiredPlateImage> images = extractionResult.getImages();
         for (AcquiredPlateImage image : images)
         {
-            if (image.tryGetTimePoint() != null || image.tryGetDepth() != null)
-            {
-                return true;
-            }
+            imageCheckList.checkOff(image);
         }
-        return false;
     }
 
     private PlateDimension getPlateGeometry(final DataSetInformation dataSetInformation)
     {
-        return ScreeningContainerDatasetInfo.getPlateGeometry(dataSetInformation);
+        return HCSContainerDatasetInfo.getPlateGeometry(dataSetInformation);
     }
 
-    // ---------------------------------
+    private HCSImageCheckList createImageCheckList(DataSetInformation dataSetInformation)
+    {
+        PlateDimension plateGeometry = getPlateGeometry(dataSetInformation);
+        List<String> channelCodes = new ArrayList<String>();
+        for (ChannelDescription cd : channelDescriptions)
+        {
+            channelCodes.add(cd.getCode());
+        }
+        return new HCSImageCheckList(channelCodes, plateGeometry, spotGeometry);
+    }
 
-    // Although this check should be performed in the BDS library when closing is performed, we set
-    // the complete flag here as we want to inform the registrator about the incompleteness.
     private void checkCompleteness(HCSImageCheckList imageCheckList,
             final DataSetInformation dataSetInformation, final String dataSetFileName,
             final IMailClient mailClientOrNull)
@@ -376,538 +244,38 @@ public final class PlateStorageProcessor extends AbstractStorageProcessor
         }
     }
 
-    public final File storeData(final DataSetInformation dataSetInformation,
-            final ITypeExtractor typeExtractor, final IMailClient mailClient,
-            final File incomingDataSetDirectory, final File rootDirectory)
-    {
-        checkDataSetInformation(dataSetInformation);
-        assert rootDirectory != null : "Root directory can not be null.";
-        assert incomingDataSetDirectory != null : "Incoming data set directory can not be null.";
-        assert typeExtractor != null : "Unspecified IProcedureAndDataTypeExtractor implementation.";
-
-        Experiment experiment = dataSetInformation.tryToGetExperiment();
-        if (experiment == null)
-        {
-            throw new UserFailureException("Experiment unknown for data set " + dataSetInformation);
-        }
-        HCSImageFileExtractionResult extractionResult =
-                extractImages(dataSetInformation, incomingDataSetDirectory);
-
-        validateImages(dataSetInformation, mailClient, incomingDataSetDirectory, extractionResult);
-        List<AcquiredPlateImage> plateImages = extractionResult.getImages();
-
-        File imagesInStoreFolder = moveToStore(incomingDataSetDirectory, rootDirectory);
-
-        processImages(rootDirectory, plateImages, imagesInStoreFolder);
-
-        storeInDatabase(experiment, dataSetInformation, plateImages, extractionResult.getChannels());
-        return rootDirectory;
-    }
-
-    private void processImages(final File rootDirectory, List<AcquiredPlateImage> plateImages,
-            File imagesInStoreFolder)
-    {
-        generateThumbnails(plateImages, rootDirectory, imagesInStoreFolder);
-        String relativeImagesDirectory =
-                packageImagesIfNecessary(rootDirectory, plateImages, imagesInStoreFolder);
-        updateImagesRelativePath(relativeImagesDirectory, plateImages);
-    }
-
-    // returns the prefix which should be added before each image path to create a path relative to
-    // the dataset folder
-    private String packageImagesIfNecessary(final File rootDirectory,
-            List<AcquiredPlateImage> plateImages, File imagesInStoreFolder)
-    {
-        if (originalDataStorageFormat.isHdf5())
-        {
-            File hdf5OriginalContainer = createHdf5OriginalContainer(rootDirectory);
-            boolean isDataCompressed =
-                    originalDataStorageFormat == OriginalDataStorageFormat.HDF5_COMPRESSED;
-            saveInHdf5(imagesInStoreFolder, hdf5OriginalContainer, isDataCompressed);
-            String hdf5ArchivePathPrefix =
-                    hdf5OriginalContainer.getName() + ContentRepository.ARCHIVE_DELIMITER;
-            return hdf5ArchivePathPrefix;
-        } else
-        {
-            return getRelativeImagesDirectory(rootDirectory, imagesInStoreFolder) + "/";
-        }
-    }
-
-    private static File createHdf5OriginalContainer(final File rootDirectory)
-    {
-        return new File(rootDirectory, Constants.HDF5_CONTAINER_ORIGINAL_FILE_NAME);
-    }
-
-    private void saveInHdf5(File sourceFolder, File hdf5DestinationFile, boolean compressFiles)
-    {
-        Hdf5Container container = new Hdf5Container(hdf5DestinationFile);
-        container.runWriterClient(compressFiles,
-                new HierarchicalStructureDuplicatorFileToHdf5.DuplicatorWriterClient(sourceFolder));
-    }
-
-    private File moveToStore(File incomingDataSetDirectory, File rootDirectory)
-    {
-        File originalFolder = getOriginalFolder(rootDirectory);
-        originalFolder.mkdirs();
-        if (originalFolder.exists() == false)
-        {
-            throw new UserFailureException("Cannot create a directory: " + originalFolder);
-        }
-        return moveFileToDirectory(incomingDataSetDirectory, originalFolder);
-
-    }
-
-    // modifies plateImages by setting the path to thumbnails
-    private void generateThumbnails(final List<AcquiredPlateImage> plateImages,
-            final File rootDirectory, final File imagesInStoreFolder)
-    {
-        final File thumbnailsFile =
-                new File(rootDirectory, Constants.HDF5_CONTAINER_THUMBNAILS_FILE_NAME);
-        final String relativeThumbnailFilePath =
-                getRelativeImagesDirectory(rootDirectory, thumbnailsFile);
-
-        if (generateThumbnails)
-        {
-            Hdf5Container container = new Hdf5Container(thumbnailsFile);
-            container.runWriterClient(areThumbnailsCompressed, new Hdf5ThumbnailGenerator(
-                    plateImages, imagesInStoreFolder, thumbnailMaxWidth, thumbnailMaxHeight,
-                    relativeThumbnailFilePath, operationLog));
-        }
-    }
-
-    private void updateImagesRelativePath(String folderPathPrefix,
-            final List<AcquiredPlateImage> plateImages)
-    {
-        for (AcquiredPlateImage plateImage : plateImages)
-        {
-            RelativeImageReference imageReference = plateImage.getImageReference();
-            imageReference.setRelativeImageFolder(folderPathPrefix);
-        }
-    }
-
-    private String getRelativeImagesDirectory(File rootDirectory, File imagesInStoreFolder)
-    {
-        String root = rootDirectory.getAbsolutePath();
-        String imgDir = imagesInStoreFolder.getAbsolutePath();
-        if (imgDir.startsWith(root) == false)
-        {
-            throw UserFailureException.fromTemplate(
-                    "Directory %s should be a subdirectory of directory %s.", imgDir, root);
-        }
-        return imgDir.substring(root.length());
-    }
-
-    private void validateImages(final DataSetInformation dataSetInformation,
-            final IMailClient mailClient, final File incomingDataSetDirectory,
-            HCSImageFileExtractionResult extractionResult)
-    {
-        HCSImageCheckList imageCheckList = createImageCheckList(dataSetInformation);
-        checkImagesForDuplicates(extractionResult, imageCheckList);
-        if (extractionResult.getInvalidFiles().size() > 0)
-        {
-            throw UserFailureException.fromTemplate("Following invalid files %s have been found.",
-                    CollectionUtils.abbreviate(extractionResult.getInvalidFiles(), 10));
-        }
-        if (extractionResult.getImages().size() == 0)
-        {
-            throw UserFailureException.fromTemplate(
-                    "No extractable files were found inside a dataset '%s'."
-                            + " Have you changed your naming convention?",
-                    incomingDataSetDirectory.getAbsolutePath());
-        }
-        checkCompleteness(imageCheckList, dataSetInformation, incomingDataSetDirectory.getName(),
-                mailClient);
-    }
-
-    private static void checkImagesForDuplicates(HCSImageFileExtractionResult extractionResult,
-            HCSImageCheckList imageCheckList)
-    {
-        List<AcquiredPlateImage> images = extractionResult.getImages();
-        for (AcquiredPlateImage image : images)
-        {
-            imageCheckList.checkOff(image);
-        }
-    }
-
-    private HCSImageCheckList createImageCheckList(DataSetInformation dataSetInformation)
-    {
-        PlateDimension plateGeometry = getPlateGeometry(dataSetInformation);
-        List<String> channelCodes = new ArrayList<String>();
-        for (ChannelDescription cd : channelDescriptions)
-        {
-            channelCodes.add(cd.getCode());
-        }
-        return new HCSImageCheckList(channelCodes, plateGeometry, spotGeometry);
-    }
-
-    private HCSImageFileExtractionResult extractImages(final DataSetInformation dataSetInformation,
-            final File incomingDataSetDirectory)
+    @Override
+    protected IImageFileExtractor getImageFileExtractor(File incomingDataSetDirectory)
     {
-        long extractionStart = System.currentTimeMillis();
-        IHCSImageFileExtractor extractor = imageFileExtractor;
+        IImageFileExtractor extractor = imageFileExtractor;
         if (extractor == null)
         {
             extractor =
                     adapt(deprecatedImageFileExtractor, incomingDataSetDirectory,
                             channelDescriptions);
         }
-        final HCSImageFileExtractionResult result =
-                extractor.extract(incomingDataSetDirectory, dataSetInformation);
-
-        if (operationLog.isInfoEnabled())
-        {
-            long duration = System.currentTimeMillis() - extractionStart;
-            operationLog.info(String.format("Extraction of %d files took %s.", result.getImages()
-                    .size(), DurationFormatUtils.formatDurationHMS(duration)));
-        }
-        return result;
+        return extractor;
     }
 
     @Override
-    public void commit(File incomingDataSetDirectory, File storedDataDirectory)
-    {
-        if (originalDataStorageFormat.isHdf5())
-        {
-            commitHdf5StorageFormatChanges(storedDataDirectory);
-        }
-        commitDatabaseChanges();
-    }
-
-    private static void commitHdf5StorageFormatChanges(File storedDataDirectory)
-    {
-        File originalFolder = getOriginalFolder(storedDataDirectory);
-        File hdf5OriginalContainer = createHdf5OriginalContainer(storedDataDirectory);
-        if (hdf5OriginalContainer.exists())
-        {
-            final IFileOperations fileOps = FileOperations.getMonitoredInstanceForCurrentThread();
-            if (fileOps.removeRecursivelyQueueing(originalFolder) == false)
-            {
-                operationLog.error("Cannot delete '" + originalFolder.getAbsolutePath() + "'.");
-            }
-        } else
-        {
-            notificationLog.error(String.format("HDF5 container with original data '%s' does not "
-                    + "exist, keeping the original directory '%s'.", hdf5OriginalContainer,
-                    originalFolder));
-        }
-    }
-
-    private void commitDatabaseChanges()
-    {
-        if (currentTransaction == null)
-        {
-            throw new IllegalStateException("there is no transaction to commit");
-        }
-        try
-        {
-            currentTransaction.close(true);
-        } finally
-        {
-            currentTransaction = null;
-        }
-    }
-
-    public UnstoreDataAction rollback(File incomingDataSetDirectory, File storedDataDirectory,
-            Throwable exception)
-    {
-        unstoreFiles(incomingDataSetDirectory, storedDataDirectory);
-        rollbackDatabaseChanges();
-        return UnstoreDataAction.MOVE_TO_ERROR;
-    }
-
-    private final void unstoreFiles(final File incomingDataSetDirectory,
-            final File storedDataDirectory)
-    {
-        checkParameters(incomingDataSetDirectory, storedDataDirectory);
-
-        final File originalDataFile = tryGetProprietaryData(storedDataDirectory);
-        if (originalDataFile == null)
-        {
-            // nothing has been stored in the file system yet,
-            // e.g. because images could not be validated
-            return;
-        }
-        // Move the data from the 'original' directory back to the 'incoming' directory.
-        final File incomingDirectory = incomingDataSetDirectory.getParentFile();
-        try
-        {
-            moveFileToDirectory(originalDataFile, incomingDirectory);
-            if (operationLog.isInfoEnabled())
-            {
-                operationLog.info(String.format(
-                        "Directory '%s' has moved to incoming directory '%s'.", originalDataFile,
-                        incomingDirectory.getAbsolutePath()));
-            }
-        } catch (final EnvironmentFailureException ex)
-        {
-            notificationLog.error(String.format("Could not move '%s' to incoming directory '%s'.",
-                    originalDataFile, incomingDirectory.getAbsolutePath()), ex);
-            return;
-        }
-        // Remove the dataset directory from the store
-        final IFileOperations fileOps = FileOperations.getMonitoredInstanceForCurrentThread();
-        if (fileOps.exists(incomingDataSetDirectory))
-        {
-            if (fileOps.removeRecursivelyQueueing(storedDataDirectory) == false)
-            {
-                operationLog
-                        .error("Cannot delete '" + storedDataDirectory.getAbsolutePath() + "'.");
-            }
-        } else
-        {
-            notificationLog.error(String.format("Incoming data set directory '%s' does not "
-                    + "exist, keeping store directory '%s'.", incomingDataSetDirectory,
-                    storedDataDirectory));
-        }
-    }
-
-    private void storeInDatabase(Experiment experiment, DataSetInformation dataSetInformation,
-            List<AcquiredPlateImage> acquiredImages,
-            List<HCSImageFileExtractionResult.Channel> channels)
-    {
-        ImageDatasetInfo info =
-                createImageDatasetInfo(experiment, dataSetInformation, acquiredImages);
-
-        if (currentTransaction != null)
-        {
-            throw new IllegalStateException("previous transaction has not been commited!");
-        }
-        currentTransaction = createQuery();
-
-        HCSDatasetUploader.upload(currentTransaction, info, acquiredImages, channels);
-    }
-
-    private void rollbackDatabaseChanges()
-    {
-        if (currentTransaction == null)
-        {
-            return; // storing in the imaging db has not started
-        }
-        try
-        {
-            currentTransaction.rollback();
-        } finally
-        {
-            currentTransaction.close();
-            currentTransaction = null;
-        }
-    }
-
-    /**
-     * Moves source file/folder to the destination directory. If the source is a symbolic links to
-     * the original data then we do not move any data. Instead we create symbolic link to original
-     * data which points to the same place as the source link.
-     * 
-     * @return
-     */
-    private static File moveFileToDirectory(final File source, final File directory)
-            throws EnvironmentFailureException
-    {
-        assert source != null;
-        IFileOperations fileOperations = FileOperations.getMonitoredInstanceForCurrentThread();
-        assert directory != null && fileOperations.isDirectory(directory);
-        final String newName = source.getName();
-        final File destination = new File(directory, newName);
-        if (fileOperations.exists(destination) == false)
-        {
-            if (FileUtilities.isSymbolicLink(source))
-            {
-                moveSymbolicLink(source, destination);
-            } else
-            {
-                final boolean successful = fileOperations.rename(source, destination);
-                if (successful == false)
-                {
-                    throw EnvironmentFailureException.fromTemplate(
-                            "Can not move file '%s' to directory '%s'.", source.getAbsolutePath(),
-                            directory.getAbsolutePath());
-                }
-            }
-            return destination;
-        } else
-        {
-            throw EnvironmentFailureException
-                    .fromTemplate(
-                            "Can not move file '%s' to directory '%s' because the destination directory already exists.",
-                            source.getAbsolutePath(), directory.getAbsolutePath());
-        }
-    }
-
-    // WORKAROUND there were cases where it was impossible to move an absolute symbolic link
-    // It happened on a CIFS share. So instead of moving the link we create a file which points to
-    // the same place and delete the link.
-    private static void moveSymbolicLink(File source, File destination)
-    {
-        File referencedSource;
-        try
-        {
-            referencedSource = source.getCanonicalFile();
-        } catch (IOException ex)
-        {
-            throw new EnvironmentFailureException("cannot get the canonical path of " + source);
-        }
-        boolean ok = SoftLinkMaker.createSymbolicLink(referencedSource, destination);
-        if (ok == false)
-        {
-            throw EnvironmentFailureException.fromTemplate(
-                    "Can not create symbolic link to '%s' in '%s'.", referencedSource.getPath(),
-                    destination.getPath());
-        }
-        ok = source.delete();
-        if (ok == false)
-        {
-            throw EnvironmentFailureException.fromTemplate("Can not delete symbolic link '%s'.",
-                    source.getPath());
-        }
-    }
-
-    public final File tryGetProprietaryData(final File storedDataDirectory)
+    protected void storeInDatabase(IImagingQueryDAO dao, DataSetInformation dataSetInformation,
+            HCSImageFileExtractionResult extractedImages)
     {
-        assert storedDataDirectory != null : "Unspecified stored data directory.";
-
-        File originalFolder = getOriginalFolder(storedDataDirectory);
-        File[] content = originalFolder.listFiles();
-        if (content == null || content.length == 0)
-        {
-            return null;
-        }
-        if (content.length > 1)
-        {
-            operationLog.error("There should be exactly one original folder inside '"
-                    + originalFolder + "', but " + originalFolder.length() + " has been found.");
-            return null;
-        }
-        File originalDataFile = content[0];
-        if (originalDataFile.exists() == false)
-        {
-            operationLog.error("Original data set file '" + originalDataFile.getAbsolutePath()
-                    + "' does not exist.");
-            return null;
-        }
-        return originalDataFile;
-    }
-
-    private static File getOriginalFolder(File storedDataDirectory)
-    {
-        return new File(storedDataDirectory, DIR_ORIGINAL);
-    }
-
-    public final StorageFormat getStorageFormat()
-    {
-        return StorageFormat.PROPRIETARY;
-    }
-
-    private static List<String> extractChannelCodes(final List<ChannelDescription> descriptions)
-    {
-        List<String> channelCodes = new ArrayList<String>();
-        for (ChannelDescription cd : descriptions)
-        {
-            channelCodes.add(cd.getCode());
-        }
-        return channelCodes;
-    }
-
-    private static List<String> extractChannelLabels(final List<ChannelDescription> descriptions)
-    {
-        List<String> channelLabels = new ArrayList<String>();
-        for (ChannelDescription cd : descriptions)
-        {
-            channelLabels.add(cd.getLabel());
-        }
-        return channelLabels;
-    }
-
-    // adapts old-style image extractor to the new one which is stateless
-    private static IHCSImageFileExtractor adapt(
-            final ch.systemsx.cisd.etlserver.IHCSImageFileExtractor extractor,
-            final File imageFileRootDirectory, final List<ChannelDescription> descriptions)
-    {
-        return new IHCSImageFileExtractor()
-            {
-                public HCSImageFileExtractionResult extract(File incomingDataSetDirectory,
-                        DataSetInformation dataSetInformation)
-                {
-                    HCSImageFileAccepter accepter =
-                            new HCSImageFileAccepter(imageFileRootDirectory,
-                                    extractChannelCodes(descriptions));
-                    ch.systemsx.cisd.etlserver.HCSImageFileExtractionResult originalResult =
-                            extractor.process(
-                                    NodeFactory.createDirectoryNode(incomingDataSetDirectory),
-                                    dataSetInformation, accepter);
-                    List<HCSImageFileExtractionResult.Channel> channels =
-                            convert(originalResult.getChannels());
-                    return new HCSImageFileExtractionResult(accepter.getImages(),
-                            asRelativePaths(originalResult.getInvalidFiles()), channels);
-                }
-
-                private List<HCSImageFileExtractionResult.Channel> convert(Set<Channel> channels)
-                {
-                    List<HCSImageFileExtractionResult.Channel> result =
-                            new ArrayList<HCSImageFileExtractionResult.Channel>();
-                    for (Channel channel : channels)
-                    {
-                        result.add(new HCSImageFileExtractionResult.Channel(getChannelCodeOrLabel(
-                                extractChannelCodes(descriptions), channel.getCounter()), null,
-                                channel.getWavelength(), getChannelCodeOrLabel(
-                                        extractChannelLabels(descriptions), channel.getCounter())));
-                    }
-                    return result;
-                }
-
-                private List<File> asRelativePaths(List<IFile> files)
-                {
-                    List<File> result = new ArrayList<File>();
-                    for (IFile file : files)
-                    {
-                        result.add(new File(file.getPath()));
-                    }
-                    return result;
-                }
-            };
-    }
+        Experiment experiment = dataSetInformation.tryToGetExperiment();
+        assert experiment != null : "experiment is null";
+        List<AcquiredPlateImage> images = extractedImages.getImages();
+        HCSImageDatasetInfo info = createImageDatasetInfo(experiment, dataSetInformation, images);
 
-    private static String getChannelCodeOrLabel(final List<String> channelCodes, int channelId)
-    {
-        if (channelId > channelCodes.size())
-        {
-            throw UserFailureException.fromTemplate(
-                    "Too large channel number %d, configured channels: %s.", channelId,
-                    CollectionUtils.abbreviate(channelCodes, -1));
-        }
-        return channelCodes.get(channelId - 1);
+        HCSImageDatasetUploader.upload(dao, info, images, extractedImages.getChannels());
     }
 
-    private static final class HCSImageFileAccepter implements IHCSImageFileAccepter
+    private HCSImageDatasetInfo createImageDatasetInfo(Experiment experiment,
+            DataSetInformation dataSetInformation, List<AcquiredPlateImage> acquiredImages)
     {
-        private final List<AcquiredPlateImage> images = new ArrayList<AcquiredPlateImage>();
-
-        private final File imageFileRootDirectory;
-
-        private final List<String> channelCodes;
-
-        public HCSImageFileAccepter(File imageFileRootDirectory, List<String> channelCodes)
-        {
-            this.imageFileRootDirectory = imageFileRootDirectory;
-            this.channelCodes = channelCodes;
-        }
-
-        public final void accept(final int channel, final Location wellLocation,
-                final Location tileLocation, final IFile imageFile)
-        {
-            final String imageRelativePath =
-                    FileUtilities.getRelativeFile(imageFileRootDirectory,
-                            new File(imageFile.getPath()));
-            assert imageRelativePath != null : "Image relative path should not be null.";
-            String channelCode = getChannelCodeOrLabel(channelCodes, channel);
-            AcquiredPlateImage imageDesc =
-                    new AcquiredPlateImage(wellLocation, tileLocation, channelCode, null, null,
-                            new RelativeImageReference(imageRelativePath, null, null));
-            images.add(imageDesc);
-        }
-
-        public List<AcquiredPlateImage> getImages()
-        {
-            return images;
-        }
+        HCSContainerDatasetInfo info =
+                HCSContainerDatasetInfo.createScreeningDatasetInfo(dataSetInformation);
+        boolean hasImageSeries = hasImageSeries(acquiredImages);
+        return new HCSImageDatasetInfo(info, spotGeometry.getRows(), spotGeometry.getColumns(),
+                hasImageSeries);
     }
 }
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/ScreeningContainerDatasetInfoHelper.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/ScreeningContainerDatasetInfoHelper.java
deleted file mode 100644
index fb279e1cbef6b2293a97bae2786b17fc1cde0c40..0000000000000000000000000000000000000000
--- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/ScreeningContainerDatasetInfoHelper.java
+++ /dev/null
@@ -1,328 +0,0 @@
-/*
- * Copyright 2010 ETH Zuerich, CISD
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package ch.systemsx.cisd.openbis.dss.etl;
-
-import java.util.HashMap;
-import java.util.List;
-import java.util.Map;
-
-import ch.systemsx.cisd.common.exceptions.UserFailureException;
-import ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractionResult.Channel;
-import ch.systemsx.cisd.openbis.dss.etl.dataaccess.IImagingQueryDAO;
-import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgChannelDTO;
-import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgContainerDTO;
-import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgDatasetDTO;
-import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgExperimentDTO;
-
-/**
- * Helper class for retrieving and/or creating entities associated with the screening container data
- * set info in the DB.
- * 
- * @author Chandrasekhar Ramakrishnan
- */
-public class ScreeningContainerDatasetInfoHelper
-{
-    private final IImagingQueryDAO dao;
-
-    public ScreeningContainerDatasetInfoHelper(IImagingQueryDAO dao)
-    {
-        this.dao = dao;
-    }
-
-    public ExperimentAndContainerIds getOrCreateExperimentAndContainer(
-            ScreeningContainerDatasetInfo info)
-    {
-        return getOrCreateExperimentAndContainer(dao, info);
-    }
-
-    public long createFeatureVectorDataset(long contId, ScreeningContainerDatasetInfo info)
-    {
-        boolean isMultidimensional = false;
-        ImgDatasetDTO dataset =
-                new ImgDatasetDTO(info.getDatasetPermId(), 0, 0, contId, isMultidimensional);
-        return dao.addDataset(dataset);
-    }
-
-    // Package-visible static methods
-
-    public static long createImageDataset(IImagingQueryDAO dao, ImageDatasetInfo info, long contId)
-    {
-        ImgDatasetDTO dataset =
-                new ImgDatasetDTO(info.getDatasetPermId(), info.getTileRows(),
-                        info.getTileColumns(), contId, info.hasImageSeries());
-        return dao.addDataset(dataset);
-    }
-
-    /**
-     * NOTE: Code responsible for trying to get sample and experiment from the DB and creating them
-     * if they don't exist is in synchronized block and uses currently opened transaction. Then the
-     * transaction is closed and data set is added to the DB in second transaction. If second
-     * transaction will be rolled back sample and experiment created in first transaction will stay
-     * in the DB.
-     */
-    public static ExperimentAndContainerIds getOrCreateExperimentAndContainer(IImagingQueryDAO dao,
-            ScreeningContainerDatasetInfo info)
-    {
-        synchronized (IImagingQueryDAO.class)
-        {
-            CreatedOrFetchedEntity exp = getOrCreateExperiment(dao, info);
-            CreatedOrFetchedEntity cont = getOrCreateContainer(dao, info, exp.getId());
-            if (exp.hasAlreadyExisted() == false || cont.hasAlreadyExisted() == false)
-            {
-                // without this commit other threads will not see the new experiment/sample when the
-                // synchronized block ends
-                dao.commit();
-            }
-            return new ExperimentAndContainerIds(exp.getId(), cont.getId());
-        }
-    }
-
-    /**
-     * NOTE: Code responsible for trying to get sample and experiment from the DB and creating them
-     * if they don't exist is in synchronized block and uses currently opened transaction. Then the
-     * transaction is closed and data set is added to the DB in second transaction. If second
-     * transaction will be rolled back sample and experiment created in first transaction will stay
-     * in the DB.
-     */
-    public static ExperimentWithChannelsAndContainer getOrCreateExperimentWithChannelsAndContainer(
-            IImagingQueryDAO dao, ScreeningContainerDatasetInfo info,
-            List<HCSImageFileExtractionResult.Channel> channels)
-    {
-        ScreeningContainerDatasetInfoHelper helper = new ScreeningContainerDatasetInfoHelper(dao);
-        synchronized (IImagingQueryDAO.class)
-        {
-            CreatedOrFetchedEntity exp = getOrCreateExperiment(dao, info);
-            long expId = exp.getId();
-            CreatedOrFetchedEntity cont = getOrCreateContainer(dao, info, expId);
-            Map<String, Long/* (tech id */> channelsMap =
-                    helper.getOrCreateChannels(expId, channels);
-            if (exp.hasAlreadyExisted() == false || cont.hasAlreadyExisted() == false)
-            {
-                // without this commit other threads will not see the new experiment/sample when the
-                // synchronized block ends
-                dao.commit();
-            }
-            return new ExperimentWithChannelsAndContainer(expId, cont.getId(), channelsMap);
-        }
-    }
-
-    private static CreatedOrFetchedEntity getOrCreateContainer(IImagingQueryDAO dao,
-            ScreeningContainerDatasetInfo info, long expId)
-    {
-        String containerPermId = info.getContainerPermId();
-        Long containerId = dao.tryGetContainerIdPermId(containerPermId);
-        if (containerId != null)
-        {
-            return new CreatedOrFetchedEntity(true, containerId);
-        } else
-        {
-            ImgContainerDTO container =
-                    new ImgContainerDTO(containerPermId, info.getContainerRows(),
-                            info.getContainerColumns(), expId);
-            containerId = dao.addContainer(container);
-            return new CreatedOrFetchedEntity(false, containerId);
-        }
-    }
-
-    private static CreatedOrFetchedEntity getOrCreateExperiment(IImagingQueryDAO dao,
-            ScreeningContainerDatasetInfo info)
-    {
-        String experimentPermId = info.getExperimentPermId();
-        ImgExperimentDTO experiment = dao.tryGetExperimentByPermId(experimentPermId);
-        if (experiment != null)
-        {
-            return new CreatedOrFetchedEntity(true, experiment.getId());
-        } else
-        {
-            Long expId = dao.addExperiment(experimentPermId);
-            return new CreatedOrFetchedEntity(false, expId);
-        }
-    }
-
-    private static class CreatedOrFetchedEntity
-    {
-        private final boolean alreadyExisted;
-
-        private final long id;
-
-        public CreatedOrFetchedEntity(boolean alreadyExisted, long id)
-        {
-            this.alreadyExisted = alreadyExisted;
-            this.id = id;
-        }
-
-        public boolean hasAlreadyExisted()
-        {
-            return alreadyExisted;
-        }
-
-        public long getId()
-        {
-            return id;
-        }
-    }
-
-    public static class ExperimentAndContainerIds
-    {
-        private final long experimentId;
-
-        private final long containerId;
-
-        public ExperimentAndContainerIds(long experimentId, long containerId)
-        {
-            this.experimentId = experimentId;
-            this.containerId = containerId;
-        }
-
-        public long getExperimentId()
-        {
-            return experimentId;
-        }
-
-        public long getContainerId()
-        {
-            return containerId;
-        }
-    }
-
-    public static class ExperimentWithChannelsAndContainer extends ExperimentAndContainerIds
-    {
-        private final Map<String, Long/* (tech id */> channelsMap;
-
-        public ExperimentWithChannelsAndContainer(long experimentId, long containerId,
-                Map<String, Long> channelsMap)
-        {
-            super(experimentId, containerId);
-            this.channelsMap = channelsMap;
-        }
-
-        public Map<String, Long> getChannelsMap()
-        {
-            return channelsMap;
-        }
-    }
-
-    // ------ channels creation ------------------------------
-
-    private Map<String, Long> getOrCreateChannels(long expId,
-            List<HCSImageFileExtractionResult.Channel> channels)
-    {
-        List<ImgChannelDTO> allChannels = dao.getChannelsByExperimentId(expId);
-        if (allChannels.size() == 0)
-        {
-            return createChannels(expId, channels);
-        } else
-        {
-            return updateChannels(expId, channels, allChannels);
-        }
-    }
-
-    private Map<String, Long> updateChannels(long expId, List<Channel> channels,
-            List<ImgChannelDTO> allChannels)
-    {
-        Map<String/* name */, ImgChannelDTO> existingChannels = asNameMap(allChannels);
-        Map<String, Long> map = new HashMap<String, Long>();
-        for (HCSImageFileExtractionResult.Channel channel : channels)
-        {
-            ImgChannelDTO channelDTO = updateChannel(channel, expId, existingChannels);
-            addChannel(map, channelDTO);
-        }
-        return map;
-    }
-
-    private Map<String, Long> createChannels(long expId, List<Channel> channels)
-    {
-        Map<String, Long> map = new HashMap<String, Long>();
-        for (HCSImageFileExtractionResult.Channel channel : channels)
-        {
-            ImgChannelDTO channelDTO = createChannel(expId, channel);
-            addChannel(map, channelDTO);
-        }
-        return map;
-    }
-
-    private static void addChannel(Map<String, Long> map, ImgChannelDTO channelDTO)
-    {
-        map.put(channelDTO.getCode(), channelDTO.getId());
-    }
-
-    private static Map<String, ImgChannelDTO> asNameMap(List<ImgChannelDTO> channels)
-    {
-        Map<String, ImgChannelDTO> nameMap = new HashMap<String, ImgChannelDTO>();
-        for (ImgChannelDTO channel : channels)
-        {
-            nameMap.put(channel.getCode(), channel);
-        }
-        return nameMap;
-    }
-
-    private ImgChannelDTO updateChannel(HCSImageFileExtractionResult.Channel channel, long expId,
-            Map<String, ImgChannelDTO> existingChannels)
-    {
-        ImgChannelDTO channelDTO = makeChannelDTO(channel, expId);
-        String channelCode = channelDTO.getCode();
-        ImgChannelDTO existingChannel = existingChannels.get(channelCode);
-        if (existingChannel == null)
-        {
-            throw createInvalidNewChannelException(expId, existingChannels, channelCode);
-        }
-        // a channel with a specified name already exists for an experiment, its description
-        // will be updated. Wavelength will be updated only if it was null before.
-        if (channelDTO.getWavelength() == null)
-        {
-            channelDTO.setWavelength(existingChannel.getWavelength());
-        }
-        if (existingChannel.getWavelength() != null
-                && existingChannel.getWavelength().equals(channelDTO.getWavelength()) == false)
-        {
-            throw UserFailureException.fromTemplate(
-                    "There are already datasets registered for the experiment "
-                            + "which use the same channel code, but with a different wavelength! "
-                            + "Channel %s, old wavelength %d, new wavelength %d.", channelCode,
-                    existingChannel.getWavelength(), channelDTO.getWavelength());
-        }
-        channelDTO.setId(existingChannel.getId());
-        dao.updateChannel(channelDTO);
-        return channelDTO;
-    }
-
-    private static UserFailureException createInvalidNewChannelException(long expId,
-            Map<String, ImgChannelDTO> existingChannels, String channelName)
-    {
-        return UserFailureException.fromTemplate(
-                "Experiment with id '%d' has already some channels registered "
-                        + "and does not have a channel with a code '%s'. "
-                        + "Register a new experiment to use new channels. "
-                        + "Available channel names in this experiment: %s.", expId, channelName,
-                existingChannels.keySet());
-    }
-
-    private ImgChannelDTO createChannel(long expId, HCSImageFileExtractionResult.Channel channel)
-    {
-        ImgChannelDTO channelDTO = makeChannelDTO(channel, expId);
-        long channelId = dao.addChannel(channelDTO);
-        channelDTO.setId(channelId);
-        return channelDTO;
-    }
-
-    private static ImgChannelDTO makeChannelDTO(HCSImageFileExtractionResult.Channel channel,
-            long expId)
-    {
-        return ImgChannelDTO.createExperimentChannel(channel.getCode(),
-                channel.tryGetDescription(), channel.tryGetWavelength(), expId, channel.getLabel());
-    }
-}
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/UnparsedImageFileInfoLexer.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/UnparsedImageFileInfoLexer.java
index 61c8a149af9dba3c39bbb10e6f766a0b2d116c25..f32bcbc8b68a80322c1b9fc00f867b1fafe0ee62 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/UnparsedImageFileInfoLexer.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/UnparsedImageFileInfoLexer.java
@@ -24,12 +24,13 @@ import org.apache.commons.io.FilenameUtils;
 import org.apache.commons.lang.StringUtils;
 
 import ch.rinn.restrictions.Private;
-import ch.systemsx.cisd.openbis.dss.etl.AbstractHCSImageFileExtractor.UnparsedImageFileInfo;
+import ch.systemsx.cisd.openbis.dss.etl.dto.UnparsedImageFileInfo;
 
 /**
- * Utility to parse information about the image from its name. Assumes that tokens are separated by
- * '_'. The first letter of the token tells which type of information the token contains. <br>
- * The convention is compatible with the one adopted in Biozentrum.
+ * Utility to parse information about the image from its file name. Assumes that tokens are
+ * separated by '_'. The first letter of the token tells which type of information the token
+ * contains. <br>
+ * The convention is compatible with the one adopted in Biozentrum by iBrain2.
  * 
  * <pre>
  * example: bDZ01-1A_wD17_s3_z0_t0_cGFP.tif
@@ -58,9 +59,40 @@ public class UnparsedImageFileInfoLexer
 
     private static final char TIME_MARKER = 't';
 
-    public static UnparsedImageFileInfo extractImageFileInfo(File imageFile)
+    public static UnparsedImageFileInfo tryExtractHCSImageFileInfo(File imageFile,
+            File incomingDataSetPath)
     {
-        return extractImageFileInfo(FilenameUtils.getBaseName(imageFile.getPath()));
+        UnparsedImageFileInfo info = tryExtractImageFileInfo(imageFile, incomingDataSetPath);
+        if (info.getWellLocationToken() == null || info.getTileLocationToken() == null
+                || info.getChannelToken() == null)
+        {
+            return null;
+        }
+        return info;
+    }
+
+    public static UnparsedImageFileInfo tryExtractMicroscopyImageFileInfo(File imageFile,
+            File incomingDataSetPath)
+    {
+        UnparsedImageFileInfo info = tryExtractImageFileInfo(imageFile, incomingDataSetPath);
+        if (info.getTileLocationToken() == null || info.getChannelToken() == null)
+        {
+            return null;
+        }
+        return info;
+    }
+
+    private static UnparsedImageFileInfo tryExtractImageFileInfo(File imageFile,
+            File incomingDataSetPath)
+    {
+        UnparsedImageFileInfo info = extractImageFileInfo(getFileBaseName(imageFile));
+
+        return info;
+    }
+
+    private static String getFileBaseName(File imageFile)
+    {
+        return FilenameUtils.getBaseName(imageFile.getPath());
     }
 
     /**
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/biozentrum/BZDataSetInfoExtractor.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/biozentrum/BZDataSetInfoExtractor.java
index b56db31ce7505494fed90146bb5a768187f53be1..7450afa0d8ae753af7ea990943cfbfa3c56f5818 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/biozentrum/BZDataSetInfoExtractor.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/biozentrum/BZDataSetInfoExtractor.java
@@ -31,6 +31,7 @@ import ch.systemsx.cisd.common.utilities.PropertyUtils;
 import ch.systemsx.cisd.etlserver.IDataSetInfoExtractor;
 import ch.systemsx.cisd.openbis.dss.etl.ImageFileExtractorUtils;
 import ch.systemsx.cisd.openbis.dss.etl.UnparsedImageFileInfoLexer;
+import ch.systemsx.cisd.openbis.dss.etl.dto.UnparsedImageFileInfo;
 import ch.systemsx.cisd.openbis.dss.generic.shared.IEncapsulatedOpenBISService;
 import ch.systemsx.cisd.openbis.dss.generic.shared.dto.DataSetInformation;
 import ch.systemsx.cisd.openbis.generic.shared.basic.dto.DataType;
@@ -229,10 +230,14 @@ public class BZDataSetInfoExtractor implements IDataSetInfoExtractor
         List<Location> plateLocations = new ArrayList<Location>();
         for (File imageFile : imageFiles)
         {
-            String plateLocationToken =
-                    UnparsedImageFileInfoLexer.extractImageFileInfo(imageFile).getWellLocationToken();
-            plateLocations.add(Location
-                    .tryCreateLocationFromTransposedMatrixCoordinate(plateLocationToken));
+            UnparsedImageFileInfo imageInfo =
+                    UnparsedImageFileInfoLexer.tryExtractHCSImageFileInfo(imageFile, incomingDataSetPath);
+            if (imageInfo != null)
+            {
+                String wellLocationToken = imageInfo.getWellLocationToken();
+                plateLocations.add(Location
+                        .tryCreateLocationFromTransposedMatrixCoordinate(wellLocationToken));
+            }
         }
         return plateLocations;
     }
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/dto/ImageFileInfo.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/dto/ImageFileInfo.java
new file mode 100644
index 0000000000000000000000000000000000000000..12ff5b1cac3cf08d9527d3943417a023a2860449
--- /dev/null
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/dto/ImageFileInfo.java
@@ -0,0 +1,82 @@
+package ch.systemsx.cisd.openbis.dss.etl.dto;
+
+import ch.systemsx.cisd.bds.hcs.Location;
+
+/**
+ * DTO with information about one image file
+ * 
+ * @author Tomasz Pylak
+ */
+public final class ImageFileInfo
+{
+    private final Location wellLocationOrNull;
+
+    private final Location tileLocation;
+
+    private String channelCode;
+
+    private final String imageRelativePath;
+
+    private final Float timepointOrNull;
+
+    private final Float depthOrNull;
+
+    public ImageFileInfo(Location wellLocationOrNull, String channelCode, Location tileLocation,
+            String imageRelativePath, Float timepointOrNull, Float depthOrNull)
+    {
+        assert channelCode != null;
+        assert tileLocation != null;
+        assert imageRelativePath != null;
+
+        this.wellLocationOrNull = wellLocationOrNull;
+        this.channelCode = channelCode;
+        this.tileLocation = tileLocation;
+        this.imageRelativePath = imageRelativePath;
+        this.timepointOrNull = timepointOrNull;
+        this.depthOrNull = depthOrNull;
+    }
+
+    public Location tryGetWellLocation()
+    {
+        return wellLocationOrNull;
+    }
+
+    public Location getTileLocation()
+    {
+        return tileLocation;
+    }
+
+    public String getChannelCode()
+    {
+        return channelCode;
+    }
+
+    public String getImageRelativePath()
+    {
+        return imageRelativePath;
+    }
+
+    public Float tryGetTimepoint()
+    {
+        return timepointOrNull;
+    }
+
+    public Float tryGetDepth()
+    {
+        return depthOrNull;
+    }
+
+    public void setChannelCode(String channelCode)
+    {
+        this.channelCode = channelCode;
+    }
+
+    @Override
+    public String toString()
+    {
+        return "ImageFileInfo [well=" + wellLocationOrNull + ", tile=" + tileLocation
+                + ", channel=" + channelCode + ", path=" + imageRelativePath + ", timepoint="
+                + timepointOrNull + ", depth=" + depthOrNull + "]";
+    }
+
+}
\ No newline at end of file
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/dto/UnparsedImageFileInfo.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/dto/UnparsedImageFileInfo.java
new file mode 100644
index 0000000000000000000000000000000000000000..df02548da37b63d90c25dd03f385d7b9bd437bff
--- /dev/null
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/dto/UnparsedImageFileInfo.java
@@ -0,0 +1,78 @@
+package ch.systemsx.cisd.openbis.dss.etl.dto;
+
+import ch.systemsx.cisd.common.utilities.AbstractHashable;
+
+/**
+ * Intermediate DTO containing tokens from which image info {@link ImageFileInfo} can be extracted
+ * (if one finds it useful).
+ * 
+ * @author Tomasz Pylak
+ */
+public class UnparsedImageFileInfo extends AbstractHashable
+{
+    // can be null
+    private String wellLocationToken;
+
+    private String tileLocationToken;
+
+    private String channelToken;
+
+    // can be null
+    private String timepointToken;
+
+    // can be null
+    private String depthToken;
+
+    /** can be null */
+    public String getWellLocationToken()
+    {
+        return wellLocationToken;
+    }
+
+    public void setWellLocationToken(String wellLocationToken)
+    {
+        this.wellLocationToken = wellLocationToken;
+    }
+
+    public String getTileLocationToken()
+    {
+        return tileLocationToken;
+    }
+
+    public void setTileLocationToken(String tileLocationToken)
+    {
+        this.tileLocationToken = tileLocationToken;
+    }
+
+    public String getChannelToken()
+    {
+        return channelToken;
+    }
+
+    public void setChannelToken(String channelToken)
+    {
+        this.channelToken = channelToken;
+    }
+
+    /** can be null */
+    public String getTimepointToken()
+    {
+        return timepointToken;
+    }
+
+    public void setTimepointToken(String timepointToken)
+    {
+        this.timepointToken = timepointToken;
+    }
+
+    /** can be null */
+    public String getDepthToken()
+    {
+        return depthToken;
+    }
+
+    public void setDepthToken(String depthToken)
+    {
+        this.depthToken = depthToken;
+    }
+}
\ No newline at end of file
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/dynamix/HCSImageFileExtractor.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/dynamix/HCSImageFileExtractor.java
index 7b3e80c9f47857129b65fa1054fcf2144e1d2797..3802e195bcbce39dc9eaadb64636fdf7936684d3 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/dynamix/HCSImageFileExtractor.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/dynamix/HCSImageFileExtractor.java
@@ -22,7 +22,6 @@ import java.text.SimpleDateFormat;
 import java.util.Arrays;
 import java.util.Date;
 import java.util.HashMap;
-import java.util.List;
 import java.util.Map;
 import java.util.Properties;
 
@@ -32,12 +31,10 @@ import org.apache.commons.lang.StringUtils;
 import ch.rinn.restrictions.Private;
 import ch.systemsx.cisd.bds.hcs.Location;
 import ch.systemsx.cisd.common.exceptions.EnvironmentFailureException;
-import ch.systemsx.cisd.openbis.dss.etl.AbstractHCSImageFileExtractor;
-import ch.systemsx.cisd.openbis.dss.etl.AcquiredPlateImage;
-import ch.systemsx.cisd.openbis.dss.etl.HCSImageFileExtractionResult.Channel;
+import ch.systemsx.cisd.openbis.dss.etl.AbstractImageFileExtractor;
+import ch.systemsx.cisd.openbis.dss.etl.dto.ImageFileInfo;
 import ch.systemsx.cisd.openbis.dss.etl.dynamix.WellLocationMappingUtils.DynamixWellPosition;
 import ch.systemsx.cisd.openbis.generic.shared.dto.identifier.SampleIdentifier;
-import ch.systemsx.cisd.openbis.plugin.screening.shared.basic.dto.ChannelDescription;
 import ch.systemsx.cisd.openbis.plugin.screening.shared.basic.dto.WellLocation;
 
 /**
@@ -45,14 +42,12 @@ import ch.systemsx.cisd.openbis.plugin.screening.shared.basic.dto.WellLocation;
  * 
  * @author Tomasz Pylak
  */
-public class HCSImageFileExtractor extends AbstractHCSImageFileExtractor
+public class HCSImageFileExtractor extends AbstractImageFileExtractor
 {
     private static final String DYNAMIX_TOKEN_SEPARATOR = "_";
 
     private static final String POSITION_MAPPING_FILE_NAME = "pos2loc.tsv";
 
-    private final List<ChannelDescription> channelDescriptions;
-
     private final Map<File/* mapping file */, Map<DynamixWellPosition, WellLocation>> wellLocationMapCache;
 
     // date when the first timepoint image has been acquired
@@ -61,25 +56,10 @@ public class HCSImageFileExtractor extends AbstractHCSImageFileExtractor
     public HCSImageFileExtractor(final Properties properties)
     {
         super(properties);
-        this.channelDescriptions = tryExtractChannelDescriptions(properties);
         this.wellLocationMapCache = new HashMap<File, Map<DynamixWellPosition, WellLocation>>();
         this.firstMeasurementDateCache = new HashMap<File, Date>();
     }
 
-    @Override
-    protected final List<Channel> getAllChannels()
-    {
-        return createChannels(channelDescriptions);
-    }
-
-    @Override
-    protected final List<AcquiredPlateImage> getImages(ImageFileInfo imageInfo)
-    {
-        ensureChannelExist(channelDescriptions, imageInfo.getChannelCode());
-
-        return getDefaultImages(imageInfo);
-    }
-
     @Override
     protected ImageFileInfo tryExtractImageInfo(File imageFile, File incomingDataSetDirectory,
             SampleIdentifier datasetSample)
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/AbstractFeatureVectorMigrator.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/AbstractFeatureVectorMigrator.java
index f0070d06defc31fa2d3d793666ba40d29033bd2c..436f02bf13baf5437f6aa6a847f90c90a7b49c51 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/AbstractFeatureVectorMigrator.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/AbstractFeatureVectorMigrator.java
@@ -28,7 +28,7 @@ import net.lemnik.eodsql.QueryTool;
 
 import ch.systemsx.cisd.common.exceptions.EnvironmentFailureException;
 import ch.systemsx.cisd.etlserver.plugins.IMigrator;
-import ch.systemsx.cisd.openbis.dss.etl.ScreeningContainerDatasetInfo;
+import ch.systemsx.cisd.openbis.dss.etl.HCSContainerDatasetInfo;
 import ch.systemsx.cisd.openbis.dss.etl.dataaccess.IImagingQueryDAO;
 import ch.systemsx.cisd.openbis.dss.generic.shared.IEncapsulatedOpenBISService;
 import ch.systemsx.cisd.openbis.dss.generic.shared.ServiceProvider;
@@ -116,16 +116,16 @@ public abstract class AbstractFeatureVectorMigrator implements IMigrator
     protected abstract AbstractMigrationDecision createMigrationDecision(File dataset);
 
     protected abstract AbstractImageDbImporter createImporter(
-            ScreeningContainerDatasetInfo dataSetInfo, File fileToMigrate);
+            HCSContainerDatasetInfo dataSetInfo, File fileToMigrate);
 
-    private ScreeningContainerDatasetInfo createScreeningDatasetInfo(
+    private HCSContainerDatasetInfo createScreeningDatasetInfo(
             SimpleDataSetInformationDTO dataSetInfo)
     {
         Sample sample = findSampleCodeForDataSet(dataSetInfo);
         assert sample != null : "no sample connected to a dataset";
 
         Experiment experiment = sample.getExperiment();
-        ScreeningContainerDatasetInfo info = new ScreeningContainerDatasetInfo();
+        HCSContainerDatasetInfo info = new HCSContainerDatasetInfo();
         info.setExperimentPermId(experiment.getPermId());
         info.setContainerPermId(sample.getPermId());
         info.setDatasetPermId(dataSetInfo.getDataSetCode());
@@ -255,14 +255,14 @@ public abstract class AbstractFeatureVectorMigrator implements IMigrator
     {
         protected final IImagingQueryDAO dao;
 
-        protected final ScreeningContainerDatasetInfo screeningDataSetInfo;
+        protected final HCSContainerDatasetInfo screeningDataSetInfo;
 
         protected final File fileToMigrate;
 
         protected boolean isSuccessful = false;
 
         protected AbstractImageDbImporter(IImagingQueryDAO dao,
-                ScreeningContainerDatasetInfo screeningDataSetInfo, File fileToMigrate)
+                HCSContainerDatasetInfo screeningDataSetInfo, File fileToMigrate)
         {
             this.dao = dao;
             this.screeningDataSetInfo = screeningDataSetInfo;
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/CsvFeatureVectorMigrator.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/CsvFeatureVectorMigrator.java
index 1ebcb450341cce6235723e695b339cb5b1a39b5b..06484e4911aac44662053ca24c5c5b8a6361e58b 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/CsvFeatureVectorMigrator.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/CsvFeatureVectorMigrator.java
@@ -24,7 +24,7 @@ import java.util.Properties;
 
 import ch.systemsx.cisd.base.exceptions.IOExceptionUnchecked;
 import ch.systemsx.cisd.etlserver.DefaultStorageProcessor;
-import ch.systemsx.cisd.openbis.dss.etl.ScreeningContainerDatasetInfo;
+import ch.systemsx.cisd.openbis.dss.etl.HCSContainerDatasetInfo;
 import ch.systemsx.cisd.openbis.dss.etl.dataaccess.IImagingQueryDAO;
 import ch.systemsx.cisd.openbis.dss.etl.featurevector.CsvToCanonicalFeatureVector.CsvToCanonicalFeatureVectorConfiguration;
 import ch.systemsx.cisd.openbis.dss.generic.server.plugins.tasks.DatasetFileLines;
@@ -53,7 +53,7 @@ public class CsvFeatureVectorMigrator extends AbstractFeatureVectorMigrator
     }
 
     @Override
-    protected AbstractImageDbImporter createImporter(ScreeningContainerDatasetInfo dataSetInfo,
+    protected AbstractImageDbImporter createImporter(HCSContainerDatasetInfo dataSetInfo,
             File fileToMigrate)
     {
         AbstractImageDbImporter importer;
@@ -120,7 +120,7 @@ public class CsvFeatureVectorMigrator extends AbstractFeatureVectorMigrator
         private final CsvToCanonicalFeatureVectorConfiguration convertorConfig;
 
         protected ImporterCsv(IImagingQueryDAO dao,
-                ScreeningContainerDatasetInfo screeningDataSetInfo, File fileToMigrate,
+                HCSContainerDatasetInfo screeningDataSetInfo, File fileToMigrate,
                 FeatureVectorStorageProcessorConfiguration configuration,
                 CsvToCanonicalFeatureVectorConfiguration convertorConfig)
         {
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/FeatureVectorStorageProcessor.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/FeatureVectorStorageProcessor.java
index 326cd5db600405c425618e15b24d7090387f8822..a06b436e9595c3f584c74a696c3b2f7aead38f75 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/FeatureVectorStorageProcessor.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/FeatureVectorStorageProcessor.java
@@ -30,7 +30,7 @@ import ch.systemsx.cisd.common.exceptions.UserFailureException;
 import ch.systemsx.cisd.common.mail.IMailClient;
 import ch.systemsx.cisd.etlserver.AbstractDelegatingStorageProcessor;
 import ch.systemsx.cisd.etlserver.ITypeExtractor;
-import ch.systemsx.cisd.openbis.dss.etl.ScreeningContainerDatasetInfo;
+import ch.systemsx.cisd.openbis.dss.etl.HCSContainerDatasetInfo;
 import ch.systemsx.cisd.openbis.dss.etl.dataaccess.IImagingQueryDAO;
 import ch.systemsx.cisd.openbis.dss.etl.featurevector.CsvToCanonicalFeatureVector.CsvToCanonicalFeatureVectorConfiguration;
 import ch.systemsx.cisd.openbis.dss.generic.server.plugins.tasks.DatasetFileLines;
@@ -98,7 +98,7 @@ public class FeatureVectorStorageProcessor extends AbstractDelegatingStorageProc
     private void loadDataSetIntoDatabase(File dataSet, DataSetInformation dataSetInformation)
             throws IOException
     {
-        ScreeningContainerDatasetInfo datasetInfo = createScreeningDatasetInfo(dataSetInformation);
+        HCSContainerDatasetInfo datasetInfo = createScreeningDatasetInfo(dataSetInformation);
         DatasetFileLines fileLines = getDatasetFileLines(dataSet);
         CsvToCanonicalFeatureVector convertor =
                 new CsvToCanonicalFeatureVector(fileLines, convertorConfig, datasetInfo
@@ -110,11 +110,11 @@ public class FeatureVectorStorageProcessor extends AbstractDelegatingStorageProc
         uploader.uploadFeatureVectors(fvecs);
     }
 
-    private ScreeningContainerDatasetInfo createScreeningDatasetInfo(
+    private HCSContainerDatasetInfo createScreeningDatasetInfo(
             DataSetInformation dataSetInformation)
     {
         Sample sampleOrNull = tryFindSampleForDataSet(dataSetInformation);
-        return ScreeningContainerDatasetInfo.createScreeningDatasetInfoWithSample(
+        return HCSContainerDatasetInfo.createScreeningDatasetInfoWithSample(
                 dataSetInformation, sampleOrNull);
     }
 
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/FeatureVectorUploader.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/FeatureVectorUploader.java
index 3d08827b7a6b7034d993373df670a741c13ef27c..5d77e1cbf6ee3c1bfb083ebf0550f0606fbd6549 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/FeatureVectorUploader.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/FeatureVectorUploader.java
@@ -18,9 +18,10 @@ package ch.systemsx.cisd.openbis.dss.etl.featurevector;
 
 import java.util.List;
 
-import ch.systemsx.cisd.openbis.dss.etl.ScreeningContainerDatasetInfo;
-import ch.systemsx.cisd.openbis.dss.etl.ScreeningContainerDatasetInfoHelper;
+import ch.systemsx.cisd.openbis.dss.etl.HCSContainerDatasetInfo;
+import ch.systemsx.cisd.openbis.dss.etl.ImagingDatabaseHelper;
 import ch.systemsx.cisd.openbis.dss.etl.dataaccess.IImagingQueryDAO;
+import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgDatasetDTO;
 import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgFeatureDefDTO;
 import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgFeatureValuesDTO;
 import ch.systemsx.cisd.openbis.plugin.screening.shared.imaging.dataaccess.ImgFeatureVocabularyTermDTO;
@@ -34,9 +35,9 @@ public class FeatureVectorUploader
 {
     private final IImagingQueryDAO dao;
 
-    private final ScreeningContainerDatasetInfo info;
+    private final HCSContainerDatasetInfo info;
 
-    public FeatureVectorUploader(IImagingQueryDAO imagingDao, ScreeningContainerDatasetInfo info)
+    public FeatureVectorUploader(IImagingQueryDAO imagingDao, HCSContainerDatasetInfo info)
     {
         this.dao = imagingDao;
         this.info = info;
@@ -48,13 +49,19 @@ public class FeatureVectorUploader
      */
     public void uploadFeatureVectors(List<CanonicalFeatureVector> fvecs)
     {
-        ScreeningContainerDatasetInfoHelper helper = new ScreeningContainerDatasetInfoHelper(dao);
-        long contId = helper.getOrCreateExperimentAndContainer(info).getContainerId();
-        long dataSetId = helper.createFeatureVectorDataset(contId, info);
-
+        long contId = ImagingDatabaseHelper.getOrCreateExperimentAndContainer(dao, info);
+        long dataSetId = createFeatureVectorDataset(contId);
         uploadFeatureVectors(dao, fvecs, dataSetId);
     }
 
+    private long createFeatureVectorDataset(long contId)
+    {
+        boolean isMultidimensional = false;
+        ImgDatasetDTO dataset =
+                new ImgDatasetDTO(info.getDatasetPermId(), 0, 0, contId, isMultidimensional);
+        return dao.addDataset(dataset);
+    }
+
     /** Uploads feature vectors for a given dataset id. Commit on the dao is NOT performed. */
     public static void uploadFeatureVectors(IImagingQueryDAO dao,
             List<CanonicalFeatureVector> fvecs, long dataSetId)
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/genedata/FeatureStorageProcessor.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/genedata/FeatureStorageProcessor.java
index 55324f84c57bac9a2b92acfbece39904fd837172..1da9d71953ab7b4a1ebd5efc97793c97fd1dfef7 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/genedata/FeatureStorageProcessor.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/genedata/FeatureStorageProcessor.java
@@ -36,7 +36,7 @@ import ch.systemsx.cisd.etlserver.DefaultStorageProcessor;
 import ch.systemsx.cisd.etlserver.ITypeExtractor;
 import ch.systemsx.cisd.etlserver.utils.Column;
 import ch.systemsx.cisd.etlserver.utils.TableBuilder;
-import ch.systemsx.cisd.openbis.dss.etl.ScreeningContainerDatasetInfo;
+import ch.systemsx.cisd.openbis.dss.etl.HCSContainerDatasetInfo;
 import ch.systemsx.cisd.openbis.dss.etl.dataaccess.IImagingQueryDAO;
 import ch.systemsx.cisd.openbis.dss.etl.featurevector.CanonicalFeatureVector;
 import ch.systemsx.cisd.openbis.dss.etl.featurevector.FeatureVectorUploader;
@@ -221,7 +221,7 @@ public class FeatureStorageProcessor extends AbstractDelegatingStorageProcessor
         uploader.uploadFeatureVectors(fvecs);
     }
 
-    private ScreeningContainerDatasetInfo createScreeningDatasetInfo(
+    private HCSContainerDatasetInfo createScreeningDatasetInfo(
             DataSetInformation dataSetInformation)
     {
         Sample sampleOrNull = tryFindSampleForDataSet(dataSetInformation);
@@ -231,7 +231,7 @@ public class FeatureStorageProcessor extends AbstractDelegatingStorageProcessor
                     "Cannot find a sample to which a plate should be (directly or indirectly) connected: "
                             + dataSetInformation);
         }
-        return ScreeningContainerDatasetInfo.createScreeningDatasetInfoWithSample(
+        return HCSContainerDatasetInfo.createScreeningDatasetInfoWithSample(
                 dataSetInformation, sampleOrNull);
     }
 
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/genedata/GenedataFeatureVectorMigrator.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/genedata/GenedataFeatureVectorMigrator.java
index b90d123427a4502a4d4777bda712206f5f72a52e..4aa15db2ba049bc2848029dbcd7fb77abbd8aaaa 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/genedata/GenedataFeatureVectorMigrator.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/genedata/GenedataFeatureVectorMigrator.java
@@ -24,7 +24,7 @@ import java.util.Properties;
 
 import ch.systemsx.cisd.common.filesystem.FileUtilities;
 import ch.systemsx.cisd.etlserver.DefaultStorageProcessor;
-import ch.systemsx.cisd.openbis.dss.etl.ScreeningContainerDatasetInfo;
+import ch.systemsx.cisd.openbis.dss.etl.HCSContainerDatasetInfo;
 import ch.systemsx.cisd.openbis.dss.etl.dataaccess.IImagingQueryDAO;
 import ch.systemsx.cisd.openbis.dss.etl.featurevector.AbstractFeatureVectorMigrator;
 import ch.systemsx.cisd.openbis.dss.etl.featurevector.CanonicalFeatureVector;
@@ -50,7 +50,7 @@ public class GenedataFeatureVectorMigrator extends AbstractFeatureVectorMigrator
     }
 
     @Override
-    protected AbstractImageDbImporter createImporter(ScreeningContainerDatasetInfo dataSetInfo,
+    protected AbstractImageDbImporter createImporter(HCSContainerDatasetInfo dataSetInfo,
             File fileToMigrate)
     {
         AbstractImageDbImporter importer;
@@ -113,7 +113,7 @@ public class GenedataFeatureVectorMigrator extends AbstractFeatureVectorMigrator
          * @param fileToMigrate
          */
         private ImporterGenedata(IImagingQueryDAO dao,
-                ScreeningContainerDatasetInfo screeningDataSetInfo, File fileToMigrate)
+                HCSContainerDatasetInfo screeningDataSetInfo, File fileToMigrate)
         {
             super(dao, screeningDataSetInfo, fileToMigrate);
         }
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/lmc/HCSImageFileExtractor.java b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/lmc/HCSImageFileExtractor.java
index c1f51ff735fd5df305c99f2974f495ee0d0e4d23..f283b20f0a989fee2c447bdb8c35919acbb43a23 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/lmc/HCSImageFileExtractor.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/dss/etl/lmc/HCSImageFileExtractor.java
@@ -33,7 +33,7 @@ public class HCSImageFileExtractor extends ch.systemsx.cisd.openbis.dss.etl.HCSI
     }
 
     @Override
-    protected Location tryGetWellLocation(final String wellLocation)
+    protected Location tryGetTileLocation(final String wellLocation)
     {
         int tileNumber;
         try
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/dataaccess/IImagingReadonlyQueryDAO.java b/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/dataaccess/IImagingReadonlyQueryDAO.java
index 014e4424cf9a1969f6fbe5ede77ad5e588ad331a..44ae4cfc17d7c0ef63ebccaff356137bcac61c22 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/dataaccess/IImagingReadonlyQueryDAO.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/dataaccess/IImagingReadonlyQueryDAO.java
@@ -35,7 +35,7 @@ public interface IImagingReadonlyQueryDAO extends BaseQuery
 {
     public static final int FETCH_SIZE = 1000;
 
-    public static final String SQL_IMAGE =
+    public static final String SQL_HCS_IMAGE =
             "select i.* from CHANNEL_STACKS, SPOTS, ACQUIRED_IMAGES, IMAGES as i "
                     + "where                                                                "
                     + "ACQUIRED_IMAGES.CHANNEL_ID = ?{1} and CHANNEL_STACKS.DS_ID = ?{2} and "
@@ -52,7 +52,8 @@ public interface IImagingReadonlyQueryDAO extends BaseQuery
      * @return an image for the specified chanel, well and tile. If many images for different
      *         timepoints or depths exist, the first one is returned.
      */
-    @Select(SQL_IMAGE + " and ACQUIRED_IMAGES.IMG_ID = i.ID " + SQL_NO_MULTIDIMENTIONAL_DATA_COND)
+    @Select(SQL_HCS_IMAGE + " and ACQUIRED_IMAGES.IMG_ID = i.ID "
+            + SQL_NO_MULTIDIMENTIONAL_DATA_COND)
     public ImgImageDTO tryGetImage(long channelId, long datasetId, Location tileLocation,
             Location wellLocation);
 
@@ -60,12 +61,12 @@ public interface IImagingReadonlyQueryDAO extends BaseQuery
      * @return a thumbnail for the specified chanel, well and tile. If many images for different
      *         timepoints or depths exist, the first one is returned.
      */
-    @Select(SQL_IMAGE + " and ACQUIRED_IMAGES.THUMBNAIL_ID = i.ID "
+    @Select(SQL_HCS_IMAGE + " and ACQUIRED_IMAGES.THUMBNAIL_ID = i.ID "
             + SQL_NO_MULTIDIMENTIONAL_DATA_COND)
     public ImgImageDTO tryGetThumbnail(long channelId, long datasetId, Location tileLocation,
             Location wellLocation);
 
-    /** @return an image for the specified chanel and channel stack or null */
+    /** @return an image for the specified channel and channel stack or null */
     @Select("select i.* from IMAGES as i "
             + "join ACQUIRED_IMAGES on ACQUIRED_IMAGES.IMG_ID = i.ID "
             + "join CHANNEL_STACKS on ACQUIRED_IMAGES.CHANNEL_STACK_ID = CHANNEL_STACKS.ID "
@@ -111,6 +112,10 @@ public interface IImagingReadonlyQueryDAO extends BaseQuery
             + "where cs.ds_id = ?{1} and s.x = ?{2} and s.y = ?{3}")
     public List<ImgChannelStackDTO> listChannelStacks(long datasetId, int spotX, int spotY);
 
+    @Select("select cs.* from CHANNEL_STACKS cs               "
+            + "where cs.ds_id = ?{1} and cs.spot_id is NULL")
+    public List<ImgChannelStackDTO> listSpotlessChannelStacks(long datasetId);
+
     @Select("select count(*) from CHANNELS where DS_ID = ?{1} or EXP_ID = ?{2}")
     public int countChannelByDatasetIdOrExperimentId(long datasetId, long experimentId);
 
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/dataaccess/ImgChannelDTO.java b/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/dataaccess/ImgChannelDTO.java
index dc9a6216900861ae79c6dc88bf1a724f98f6224f..54be6a6a756fb327844e0d99dcc4df1b390f5373 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/dataaccess/ImgChannelDTO.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/dataaccess/ImgChannelDTO.java
@@ -46,26 +46,15 @@ public class ImgChannelDTO extends AbstractImageTransformerFactoryHolder
     // can be null if datasetId is not null
     @ResultColumn("EXP_ID")
     private Long experimentIdOrNull;
-    
-    public static ImgChannelDTO createDatasetChannel(String code, String descriptionOrNull,
-            Integer wavelengthOrNull, long datasetId, String label)
-    {
-        return new ImgChannelDTO(code, descriptionOrNull, wavelengthOrNull, datasetId, null, label);
-    }
-
-    public static ImgChannelDTO createExperimentChannel(String code, String descriptionOrNull,
-            Integer wavelengthOrNull, long experimentId, String label)
-    {
-        return new ImgChannelDTO(code, descriptionOrNull, wavelengthOrNull, null, experimentId,
-                label);
-    }
 
+    // GWT only
+    @SuppressWarnings("unused")
     private ImgChannelDTO()
     {
         // All Data-Object classes must have a default constructor.
     }
 
-    private ImgChannelDTO(String code, String descriptionOrNull, Integer wavelengthOrNull,
+    public ImgChannelDTO(String code, String descriptionOrNull, Integer wavelengthOrNull,
             Long datasetIdOrNull, Long experimentIdOrNull, String label)
     {
         assert (datasetIdOrNull == null && experimentIdOrNull != null)
@@ -149,5 +138,4 @@ public class ImgChannelDTO extends AbstractImageTransformerFactoryHolder
         this.experimentIdOrNull = experimentId;
     }
 
-
 }
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/dataaccess/ImgChannelStackDTO.java b/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/dataaccess/ImgChannelStackDTO.java
index 7f7fbf71229b8fc8e90d8434630590d9ad11840f..f262b732a3f21c4f1ae93225f8353e83a75a3898 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/dataaccess/ImgChannelStackDTO.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/dataaccess/ImgChannelStackDTO.java
@@ -51,7 +51,7 @@ public class ImgChannelStackDTO
     private long datasetId;
 
     @ResultColumn("SPOT_ID")
-    private long spotId;
+    private Long spotId;
 
     @SuppressWarnings("unused")
     private ImgChannelStackDTO()
@@ -59,14 +59,14 @@ public class ImgChannelStackDTO
         // All Data-Object classes must have a default constructor.
     }
 
-    public ImgChannelStackDTO(long id, int row, int column, long datasetId, long spotId,
+    public ImgChannelStackDTO(long id, int row, int column, long datasetId, Long spotIdOrNull,
             Float tOrNull, Float zOrNull)
     {
         this.id = id;
         this.row = row;
         this.column = column;
         this.datasetId = datasetId;
-        this.spotId = spotId;
+        this.spotId = spotIdOrNull;
         this.t = tOrNull;
         this.z = zOrNull;
     }
@@ -121,12 +121,13 @@ public class ImgChannelStackDTO
         this.datasetId = datasetId;
     }
 
-    public long getSpotId()
+    /** can be null */
+    public Long getSpotId()
     {
         return spotId;
     }
 
-    public void setSpotId(long spotId)
+    public void setSpotId(Long spotId)
     {
         this.spotId = spotId;
     }
@@ -140,7 +141,7 @@ public class ImgChannelStackDTO
         result = prime * result + (int) (datasetId ^ (datasetId >>> 32));
         result = prime * result + ((column == null) ? 0 : column.hashCode());
         result = prime * result + ((row == null) ? 0 : row.hashCode());
-        result = prime * result + (int) (spotId ^ (spotId >>> 32));
+        result = prime * result + ((spotId == null) ? 0 : spotId.hashCode());
         result = prime * result + ((t == null) ? 0 : t.hashCode());
         result = prime * result + ((z == null) ? 0 : z.hashCode());
         return result;
@@ -171,8 +172,14 @@ public class ImgChannelStackDTO
                 return false;
         } else if (!row.equals(other.row))
             return false;
-        if (spotId != other.spotId)
+
+        if (spotId == null)
+        {
+            if (other.spotId != null)
+                return false;
+        } else if (!spotId.equals(other.spotId))
             return false;
+
         if (t == null)
         {
             if (other.t != null)
diff --git a/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/dataaccess/ImgDatasetDTO.java b/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/dataaccess/ImgDatasetDTO.java
index 0cdc5b7e641415c97984261fe6896fcbd5472e84..1c4cf9a511fc2623e655230ac4c289cfab195a72 100644
--- a/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/dataaccess/ImgDatasetDTO.java
+++ b/screening/source/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/dataaccess/ImgDatasetDTO.java
@@ -39,7 +39,7 @@ public class ImgDatasetDTO extends AbstractHashable
     private Integer fieldNumberOfRowsOrNull;
 
     @ResultColumn("CONT_ID")
-    private long containerId;
+    private Long containerId;
 
     // a redundant information if there are timepoint or depth stack data for any spots in this
     // dataset
@@ -53,7 +53,7 @@ public class ImgDatasetDTO extends AbstractHashable
     }
 
     public ImgDatasetDTO(String permId, Integer fieldNumberOfRowsOrNull,
-            Integer fieldNumberOfColumnsOrNull, long containerId, boolean isMultidimensional)
+            Integer fieldNumberOfColumnsOrNull, Long containerId, boolean isMultidimensional)
     {
         this.permId = permId;
         this.fieldNumberOfColumnsOrNull = fieldNumberOfColumnsOrNull;
@@ -102,12 +102,13 @@ public class ImgDatasetDTO extends AbstractHashable
         this.fieldNumberOfRowsOrNull = numberOfRows;
     }
 
-    public long getContainerId()
+    /** can be null */
+    public Long getContainerId()
     {
         return containerId;
     }
 
-    public void setContainerId(long containerId)
+    public void setContainerId(Long containerId)
     {
         this.containerId = containerId;
     }
diff --git a/screening/source/sql/postgresql/009/schema-009.sql b/screening/source/sql/postgresql/009/schema-009.sql
index 7f7e367d3ff316411ddc5841a4f2c6b92e6a3866..d3ad7a98d3f4877047acb4a0a7b17ee0456d6b4f 100644
--- a/screening/source/sql/postgresql/009/schema-009.sql
+++ b/screening/source/sql/postgresql/009/schema-009.sql
@@ -113,7 +113,9 @@ CREATE TABLE CHANNEL_STACKS (
 		Z_in_M REAL,
 		-- we use the fixed dimension seconds here
 		T_in_SEC REAL,
-
+		-- not null if and only if t and z are null
+		-- SERIES_NUMBER INTEGER,
+    
     DS_ID TECH_ID	NOT NULL,
 		SPOT_ID TECH_ID,
 
diff --git a/screening/source/sql/postgresql/migration/migration-008-009.sql b/screening/source/sql/postgresql/migration/migration-008-009.sql
index 93e1bf105e37bc6ad1dd4ffe218ffc3b2d100d56..9f8694a5391216465f32fd760271f64cc3f12900 100644
--- a/screening/source/sql/postgresql/migration/migration-008-009.sql
+++ b/screening/source/sql/postgresql/migration/migration-008-009.sql
@@ -1,13 +1,10 @@
 -- Migration from 008 to 009
 
+--- ADD MICROSCOPY SUPPORT -----------------------------------------------------------------------
+
 ALTER TABLE channel_stacks ALTER COLUMN spot_id DROP NOT NULL;
 ALTER TABLE data_sets ALTER COLUMN cont_id DROP NOT NULL;
 
-------------------------------------------------------------------------------------
---  Purpose:  Create trigger CHANNEL_STACKS_CHECK which checks if both spot_id and dataset.cont_id 
---            are both null or not null.
-------------------------------------------------------------------------------------
-
 CREATE OR REPLACE FUNCTION CHANNEL_STACKS_CHECK() RETURNS trigger AS $$
 DECLARE
    v_cont_id  CODE;
@@ -30,4 +27,8 @@ END;
 $$ LANGUAGE 'plpgsql';
 
 CREATE TRIGGER CHANNEL_STACKS_CHECK BEFORE INSERT OR UPDATE ON CHANNEL_STACKS
-    FOR EACH ROW EXECUTE PROCEDURE CHANNEL_STACKS_CHECK();
\ No newline at end of file
+    FOR EACH ROW EXECUTE PROCEDURE CHANNEL_STACKS_CHECK();
+    
+--- ADD SERIES SEQUENCE NUMBER -----------------------------------------------------------------------
+
+-- ALTER TABLE channel_stacks ADD COLUMN series_number INTEGER;    
\ No newline at end of file
diff --git a/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/UnparsedImageFileInfoLexerTest.java b/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/UnparsedImageFileInfoLexerTest.java
index 83b5b5cf30580c906366df48d6ea708e611f6518..f9a6c855f4b4d4c05a9cff08900037f0b7d91769 100644
--- a/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/UnparsedImageFileInfoLexerTest.java
+++ b/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/UnparsedImageFileInfoLexerTest.java
@@ -21,7 +21,7 @@ import org.testng.annotations.Test;
 
 import ch.rinn.restrictions.Friend;
 import ch.systemsx.cisd.openbis.dss.etl.UnparsedImageFileInfoLexer;
-import ch.systemsx.cisd.openbis.dss.etl.AbstractHCSImageFileExtractor.UnparsedImageFileInfo;
+import ch.systemsx.cisd.openbis.dss.etl.dto.UnparsedImageFileInfo;
 
 /**
  * Test cases for {@link UnparsedImageFileInfoLexer}.
@@ -35,8 +35,19 @@ public class UnparsedImageFileInfoLexerTest extends AssertJUnit
     public void testExtractFileInfoCorrectFileName() throws Exception
     {
         UnparsedImageFileInfo info =
-                UnparsedImageFileInfoLexer
-                        .extractImageFileInfo("bDZ01-1A_wD17_s3_z123_t321_cGFP");
+                UnparsedImageFileInfoLexer.extractImageFileInfo("bDZ01-1A_wD17_s3_z123_t321_cGFP");
+        assertEquals("well location token", "D17", info.getWellLocationToken());
+        assertEquals("channel token", "GFP", info.getChannelToken());
+        assertEquals("tile location token", "3", info.getTileLocationToken());
+        assertEquals("time point token", "321", info.getTimepointToken());
+        assertEquals("depth token", "123", info.getDepthToken());
+    }
+
+    @Test
+    public void testExtractFileInfoPartialFileName() throws Exception
+    {
+        UnparsedImageFileInfo info =
+                UnparsedImageFileInfoLexer.extractImageFileInfo("bDZ01-1A_wD17_s3_z123_t321_cGFP");
         assertEquals("well location token", "D17", info.getWellLocationToken());
         assertEquals("channel token", "GFP", info.getChannelToken());
         assertEquals("tile location token", "3", info.getTileLocationToken());
diff --git a/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/dataaccess/ImagingQueryDAOTest.java b/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/dataaccess/ImagingQueryDAOTest.java
index a01629d43d027a6466ad8d225df3d41a476864be..6020ffcdc130b1e197a0cb528476335d6a18dd4e 100644
--- a/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/dataaccess/ImagingQueryDAOTest.java
+++ b/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/dataaccess/ImagingQueryDAOTest.java
@@ -102,13 +102,28 @@ public class ImagingQueryDAOTest extends AbstractDBTest
 
     // adding rows to tables
 
+    @Test
+    public void testCreateMicroscopyDataset()
+    {
+        long datasetId = addDataset(DS_PERM_ID + "2", null);
+        long channelId1 = addDatasetChannel(datasetId);
+        long channelStackId = addChannelStack(datasetId, null);
+        long imageId1 = addImage("pathXXX", ColorComponent.BLUE);
+        addAcquiredImage(imageId1, channelStackId, channelId1);
+
+        List<ImgChannelStackDTO> stack = dao.listSpotlessChannelStacks(datasetId);
+        assertEquals(1, stack.size());
+        ImgChannelStackDTO stackDTO = stack.get(0);
+        assertEquals(channelStackId, stackDTO.getId());
+    }
+
     @Test
     public void testCreateFullExperimentAndGetImages()
     {
         // create experiment, container, dataset, spot, ds channel and exp channel
         final long experimentId = addExperiment();
         final long containerId = addContainer(experimentId);
-        final long datasetId = addDataset(containerId);
+        final long datasetId = addDataset(DS_PERM_ID, containerId);
         final long spotId = addSpot(containerId);
         final long channelId1 = addDatasetChannel(datasetId);
         final long channelId2 = addExperimentChannel(experimentId);
@@ -123,17 +138,14 @@ public class ImagingQueryDAOTest extends AbstractDBTest
         addAcquiredImage(imageId2, channelStackId, channelId2);
 
         testGetImage(datasetId, channelStackId, channelId1, channelId2);
-        testListChannelStacks(datasetId, channelStackId, spotId);
+        testListChannelStacksAndSpots(datasetId, channelStackId, spotId);
     }
 
-    private void testListChannelStacks(long datasetId, long channelStackId, long spotId)
+    private void testListChannelStacksAndSpots(long datasetId, long channelStackId, long spotId)
     {
-        List<ImgChannelStackDTO> stack =
-                dao.listChannelStacks(datasetId, X_WELL_COLUMN, Y_WELL_ROW);
-        assertEquals(1, stack.size());
-        ImgChannelStackDTO stackDTO = stack.get(0);
-        assertEquals(channelStackId, stackDTO.getId());
-        assertEquals(spotId, stackDTO.getSpotId());
+        ImgChannelStackDTO stackDTO = testListChannelStacks(datasetId, channelStackId);
+
+        assertEquals(spotId, stackDTO.getSpotId().longValue());
         assertEquals(datasetId, stackDTO.getDatasetId());
         assertEquals(TIMEPOINT, stackDTO.getT());
         assertNull(stackDTO.getZ());
@@ -141,6 +153,16 @@ public class ImagingQueryDAOTest extends AbstractDBTest
         assertEquals(X_WELL_COLUMN, stackDTO.getColumn().intValue());
     }
 
+    private ImgChannelStackDTO testListChannelStacks(long datasetId, long channelStackId)
+    {
+        List<ImgChannelStackDTO> stack =
+                dao.listChannelStacks(datasetId, X_WELL_COLUMN, Y_WELL_ROW);
+        assertEquals(1, stack.size());
+        ImgChannelStackDTO stackDTO = stack.get(0);
+        assertEquals(channelStackId, stackDTO.getId());
+        return stackDTO;
+    }
+
     private void testGetImage(final long datasetId, final long channelStackId,
             final long channelId1, final long channelId2)
     {
@@ -242,21 +264,20 @@ public class ImagingQueryDAOTest extends AbstractDBTest
         return containerId;
     }
 
-    private long addDataset(long containerId)
+    private long addDataset(String permId, Long containerIdOrNull)
     {
-        final String permId = DS_PERM_ID;
         final Integer fieldsWidth = 1;
         final Integer fieldsHeight = 2;
         final ImgDatasetDTO dataset =
-                new ImgDatasetDTO(permId, fieldsHeight, fieldsWidth, containerId, false);
+                new ImgDatasetDTO(permId, fieldsHeight, fieldsWidth, containerIdOrNull, false);
         final long datasetId = dao.addDataset(dataset);
 
-        final ImgDatasetDTO loadedDataset = dao.tryGetDatasetByPermId(DS_PERM_ID);
+        final ImgDatasetDTO loadedDataset = dao.tryGetDatasetByPermId(permId);
         assertNotNull(loadedDataset);
         assertEquals(permId, loadedDataset.getPermId());
         assertEquals(fieldsWidth, loadedDataset.getFieldNumberOfColumns());
         assertEquals(fieldsHeight, loadedDataset.getFieldNumberOfRows());
-        assertEquals(containerId, loadedDataset.getContainerId());
+        assertEquals(containerIdOrNull, loadedDataset.getContainerId());
 
         return datasetId;
     }
@@ -270,24 +291,24 @@ public class ImagingQueryDAOTest extends AbstractDBTest
     private long addDatasetChannel(long datasetId)
     {
         final ImgChannelDTO channel =
-                ImgChannelDTO.createDatasetChannel(DS_CHANNEL, CHANNEL_DESCRIPTION, WAVELENGTH,
-                        datasetId, CHANNEL_LABEL);
+                new ImgChannelDTO(DS_CHANNEL, CHANNEL_DESCRIPTION, WAVELENGTH, datasetId, null,
+                        CHANNEL_LABEL);
         return dao.addChannel(channel);
     }
 
     private long addExperimentChannel(long experimentId)
     {
         final ImgChannelDTO channel =
-                ImgChannelDTO.createExperimentChannel(EXP_CHANNEL, CHANNEL_DESCRIPTION, WAVELENGTH,
-                        experimentId, CHANNEL_LABEL);
+                new ImgChannelDTO(EXP_CHANNEL, CHANNEL_DESCRIPTION, WAVELENGTH, null, experimentId,
+                        CHANNEL_LABEL);
         return dao.addChannel(channel);
     }
 
-    private long addChannelStack(long datasetId, long spotId)
+    private long addChannelStack(long datasetId, Long spotIdOrNull)
     {
         final ImgChannelStackDTO channelStack =
                 new ImgChannelStackDTO(dao.createChannelStackId(), Y_TILE_ROW, X_TILE_COLUMN,
-                        datasetId, spotId, TIMEPOINT, DEPTH);
+                        datasetId, spotIdOrNull, TIMEPOINT, DEPTH);
         dao.addChannelStacks(Arrays.asList(channelStack));
         return channelStack.getId();
     }
diff --git a/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/FeatureVectorUploaderTest.java b/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/FeatureVectorUploaderTest.java
index 576e124b571c0eef10f3f6f3b9509a3b819978ae..325c65dbb01c7d6f3c6ae4a5bdd3fa9bc1e27df1 100644
--- a/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/FeatureVectorUploaderTest.java
+++ b/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/featurevector/FeatureVectorUploaderTest.java
@@ -28,7 +28,7 @@ import net.lemnik.eodsql.QueryTool;
 import org.testng.annotations.BeforeClass;
 import org.testng.annotations.Test;
 
-import ch.systemsx.cisd.openbis.dss.etl.ScreeningContainerDatasetInfo;
+import ch.systemsx.cisd.openbis.dss.etl.HCSContainerDatasetInfo;
 import ch.systemsx.cisd.openbis.dss.etl.dataaccess.IImagingQueryDAO;
 import ch.systemsx.cisd.openbis.plugin.screening.shared.api.v1.dto.Geometry;
 import ch.systemsx.cisd.openbis.plugin.screening.shared.dto.PlateFeatureValues;
@@ -66,7 +66,7 @@ public class FeatureVectorUploaderTest extends AbstractDBTest
     @Test
     public void testCreateFeatureValues()
     {
-        ScreeningContainerDatasetInfo info = new ScreeningContainerDatasetInfo();
+        HCSContainerDatasetInfo info = new HCSContainerDatasetInfo();
         info.setExperimentPermId(EXP_PERM_ID);
         info.setContainerPermId(CONTAINER_PERM_ID);
         info.setDatasetPermId(DS_PERM_ID);
diff --git a/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/genedata/FeatureStorageProcessorTest.java b/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/genedata/FeatureStorageProcessorTest.java
index 644336be67432b596587c5290c9edfac0866b79e..1fac61957c265323d5b26d2448484546dc473aae 100644
--- a/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/genedata/FeatureStorageProcessorTest.java
+++ b/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/etl/genedata/FeatureStorageProcessorTest.java
@@ -93,7 +93,7 @@ public class FeatureStorageProcessorTest extends AbstractFileSystemTestCase
                     one(dao).tryGetContainerIdPermId(CONTAINER_PERM_ID);
                     will(returnValue((long) 1));
 
-                    ImgDatasetDTO dataSetDTO = new ImgDatasetDTO(DATA_SET_PERM_ID, 3, 2, 1, false);
+                    ImgDatasetDTO dataSetDTO = new ImgDatasetDTO(DATA_SET_PERM_ID, 3, 2, 1L, false);
                     dataSetDTO.setId(1);
                     one(dao).tryGetDatasetByPermId(DATA_SET_PERM_ID);
                     will(returnValue(dataSetDTO));
diff --git a/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/screening/server/DssServiceRpcScreeningTest.java b/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/screening/server/DssServiceRpcScreeningTest.java
index 66ccfd51cf5404f5c0a414c894867ea143103d9f..0c17b2407455e0809a88da705cb96e0559f8b81f 100644
--- a/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/screening/server/DssServiceRpcScreeningTest.java
+++ b/screening/sourceTest/java/ch/systemsx/cisd/openbis/dss/screening/server/DssServiceRpcScreeningTest.java
@@ -339,7 +339,7 @@ public class DssServiceRpcScreeningTest extends AssertJUnit
                     one(dao).tryGetChannelByChannelCodeAndExperimentPermId(EXPERIMENT_PERM_ID,
                             channel);
                     ImgChannelDTO channelDTO =
-                            ImgChannelDTO.createDatasetChannel("dapi", null, null, 42, "dapi");
+                            new ImgChannelDTO("dapi", null, null, new Long(42), null, "dapi");
                     channelDTO.setSerializedImageTransformerFactory(SerializationUtils
                             .serialize(transformerFactory));
                     will(returnValue(channelDTO));
@@ -458,7 +458,8 @@ public class DssServiceRpcScreeningTest extends AssertJUnit
         assertEquals(expectedRowNumber, featureVector.getWellPosition().getWellRow());
         assertEquals(expectedColumnNumber, featureVector.getWellPosition().getWellColumn());
 
-        assertEquals(Arrays.asList(expectedValues).toString(), featureVector.getValueObjects().toString());
+        assertEquals(Arrays.asList(expectedValues).toString(), featureVector.getValueObjects()
+                .toString());
     }
 
     private void prepareGetHomeDatabaseInstance()
diff --git a/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/FeatureTableBuilderTest.java b/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/FeatureTableBuilderTest.java
index 9aa6e488602c2e6d846c366d46b4d70a86241c05..12ea3964cdd593a2897050150082f7e8dbbe3b3d 100644
--- a/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/FeatureTableBuilderTest.java
+++ b/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/FeatureTableBuilderTest.java
@@ -139,7 +139,7 @@ public class FeatureTableBuilderTest extends AssertJUnit
                 {
                     String dataSetCode = "ds" + dataSetID;
                     one(dao).tryGetDatasetByPermId(dataSetCode);
-                    int containerId = dataSetID + 100;
+                    long containerId = dataSetID + 100;
                     ImgDatasetDTO dataSet =
                             new ImgDatasetDTO(dataSetCode, null, null, containerId, false);
                     dataSet.setId(dataSetID);
diff --git a/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/dataaccess/ImgChannelStackDTOTest.java b/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/dataaccess/ImgChannelStackDTOTest.java
index 6fa6f1e175f3817ffad83a22debc50b5bf1826a6..0748606ef44f2bb63652cc6dc8a0453ed157f59f 100644
--- a/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/dataaccess/ImgChannelStackDTOTest.java
+++ b/screening/sourceTest/java/ch/systemsx/cisd/openbis/plugin/screening/shared/imaging/dataaccess/ImgChannelStackDTOTest.java
@@ -40,8 +40,21 @@ public class ImgChannelStackDTOTest
         AssertJUnit.assertEquals(createStackChannel().hashCode(), createStackChannel().hashCode());
     }
 
+    @Test
+    public void testSpotlessHashCode()
+    {
+        AssertJUnit.assertEquals(createStackChannel(null).hashCode(), createStackChannel(null)
+                .hashCode());
+    }
+
     private ImgChannelStackDTO createStackChannel()
     {
-        return new ImgChannelStackDTO(0, 1, 1, 1, 1, 123F, null);
+        return createStackChannel(new Long(1));
     }
+
+    private ImgChannelStackDTO createStackChannel(Long spotId)
+    {
+        return new ImgChannelStackDTO(0, 1, 1, 1, spotId, 123F, null);
+    }
+
 }